diff --git a/.clang-format b/.clang-format new file mode 100644 index 00000000..80d3506a --- /dev/null +++ b/.clang-format @@ -0,0 +1,84 @@ +# Generated from CLion C/C++ Code Style settings +Language: Cpp +BasedOnStyle: Google +ColumnLimit: 100 +SortIncludes: true +AccessModifierOffset: -1 +AlignAfterOpenBracket: Align +AlignConsecutiveAssignments: false +AlignOperands: Align +AllowAllArgumentsOnNextLine: false +AllowAllConstructorInitializersOnNextLine: true +AllowAllParametersOfDeclarationOnNextLine: false +AllowShortBlocksOnASingleLine: Never +AllowShortCaseLabelsOnASingleLine: false +AllowShortFunctionsOnASingleLine: None +AllowShortIfStatementsOnASingleLine: Always +AllowShortLambdasOnASingleLine: All +AllowShortLoopsOnASingleLine: true +AlwaysBreakAfterReturnType: None +AlwaysBreakTemplateDeclarations: Yes +BreakBeforeBraces: Custom +BraceWrapping: + AfterCaseLabel: false + AfterClass: false + AfterEnum: false + AfterFunction: false + AfterNamespace: false + AfterUnion: false + BeforeCatch: false + BeforeElse: false + IndentBraces: false + SplitEmptyFunction: false + SplitEmptyRecord: true +BreakBeforeBinaryOperators: None +BreakBeforeTernaryOperators: true +BreakConstructorInitializers: BeforeColon +BreakInheritanceList: BeforeColon +CompactNamespaces: false +ContinuationIndentWidth: 4 +IndentCaseLabels: true +IndentPPDirectives: None +IndentWidth: 2 +KeepEmptyLinesAtTheStartOfBlocks: true +MaxEmptyLinesToKeep: 1 +NamespaceIndentation: None +ObjCSpaceAfterProperty: false +ObjCSpaceBeforeProtocolList: false +DerivePointerAlignment: false +PointerAlignment: Left +ReflowComments: false +SpaceAfterCStyleCast: false +SpaceAfterLogicalNot: false +SpaceAfterTemplateKeyword: true +SpaceBeforeAssignmentOperators: true +SpaceBeforeCpp11BracedList: false +SpaceBeforeCtorInitializerColon: true +SpaceBeforeInheritanceColon: true +SpaceBeforeParens: ControlStatements +SpaceBeforeRangeBasedForLoopColon: true +SpaceInEmptyParentheses: false +SpacesBeforeTrailingComments: 2 +SpacesInAngles: false +SpacesInCStyleCastParentheses: false +SpacesInContainerLiterals: false +SpacesInParentheses: false +SpacesInSquareBrackets: false +ConstructorInitializerAllOnOneLineOrOnePerLine: false +IncludeBlocks: Merge +TabWidth: 2 +UseTab: Never + +--- +Language: ObjC +BasedOnStyle: Google +ColumnLimit: 100 + +# Only sort headers in each include block +SortIncludes: true +IncludeBlocks: Preserve +DerivePointerAlignment: false +PointerAlignment: Left +AllowShortFunctionsOnASingleLine: None +BraceWrapping: + SplitEmptyFunction: true \ No newline at end of file diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 00000000..f2bf5a96 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,22 @@ +*.png filter=lfs diff=lfs merge=lfs -text +*.pag filter=lfs diff=lfs merge=lfs -text +*.jpg filter=lfs diff=lfs merge=lfs -text +*.webp filter=lfs diff=lfs merge=lfs -text +*.aep filter=lfs diff=lfs merge=lfs -text +*.ttf filter=lfs diff=lfs merge=lfs -text +*.ttc filter=lfs diff=lfs merge=lfs -text +*.otf filter=lfs diff=lfs merge=lfs -text +*.mp4 filter=lfs diff=lfs merge=lfs -text +*.mov filter=lfs diff=lfs merge=lfs -text +*.aac filter=lfs diff=lfs merge=lfs -text +*.mp3 filter=lfs diff=lfs merge=lfs -text +*.m4a filter=lfs diff=lfs merge=lfs -text +*.gif filter=lfs diff=lfs merge=lfs -text +*.a filter=lfs diff=lfs merge=lfs -text +*.dylib filter=lfs diff=lfs merge=lfs -text +*.dll filter=lfs diff=lfs merge=lfs -text +*.lib filter=lfs diff=lfs merge=lfs -text +*.so filter=lfs diff=lfs merge=lfs -text +*.lzma2 filter=lfs diff=lfs merge=lfs -text +*.wasm filter=lfs diff=lfs merge=lfs -text +*.bin filter=lfs diff=lfs merge=lfs -text diff --git a/.github/ISSUE_TEMPLATE/bug_report_en.md b/.github/ISSUE_TEMPLATE/bug_report_en.md new file mode 100644 index 00000000..4ddc6ef8 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report_en.md @@ -0,0 +1,34 @@ +--- +name: Bug report +about: Create a report to help us improve TGFX +title: '' +labels: '' +assignees: 'domchen' + +--- + + + + +## Which Version of TGFX are you using? + +1.0.0 + +## What Platform are you on? + +iOS 12, Android 10, macOS 10.15.3, Chrome 87.0 ... + +## Expected Behavior + + +## Actual Behavior + + +## Code Example + +## Related File + \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 00000000..eda8bc24 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,7 @@ +--- +blank_issues_enabled: false +contact_links: + - + about: "Please use Discussions to ask usage questions or suggest new features" + name: Question & Feature Request + url: "https://github.com/libpag/tgfx/discussions/1" diff --git a/.github/workflows/autotest.yml b/.github/workflows/autotest.yml new file mode 100644 index 00000000..85003c5b --- /dev/null +++ b/.github/workflows/autotest.yml @@ -0,0 +1,116 @@ +# This is a basic workflow to help you get started with Actions + +name: autotest + +# Controls when the workflow will run +on: + # Triggers the workflow on push or pull request events but only for the master branch + pull_request: + branches: [ main ] + push: + branches: [ main ] +# A workflow run is made up of one or more jobs that can run sequentially or in parallel +jobs: + autotest: + # The CMake configure and build commands are platform agnostic and should work equally well on Windows or Mac. + # You can convert this to a matrix build if you need cross-platform coverage. + # See: https://docs.github.com/en/free-pro-team@latest/actions/learn-github-actions/managing-complex-workflows#using-a-build-matrix + runs-on: macos-latest + + steps: + - name: Get environment cache + id: cache-environment + uses: actions/cache@v2 + with: + path: | + /usr/local/Cellar/ninja + /usr/local/Cellar/icu4c + /usr/local/bin/ninja + /usr/local/Cellar/node + /usr/local/bin/node + /usr/local/Cellar/yasm + /usr/local/bin/yasm + /usr/local/bin/depsync + /usr/local/lib/node_modules/depsync + /usr/local/Cellar/gcovr + /usr/local/bin/gcovr + /usr/local/Cellar/emsdk + /usr/local/Cellar/emsdk/upstream/emscripten + /usr/local/Cellar/emsdk/node/14.18.2_64bit/bin + /usr/local/bin/em++ + /usr/local/bin/em-config + /usr/local/bin/emar + /usr/local/bin/embuilder + /usr/local/bin/emcc + /usr/local/bin/emcmake + /usr/local/bin/emconfigure + /usr/local/bin/emdump + /usr/local/bin/emdwp + /usr/local/bin/emmake + /usr/local/bin/emnm + /usr/local/bin/emrun + /usr/local/bin/emprofile + /usr/local/bin/emscons + /usr/local/bin/emsize + /usr/local/bin/emstrip + /usr/local/bin/emsymbolizer + /usr/local/bin/emcc.py + /usr/local/bin/emcmake.py + /usr/local/bin/emar.py + key: tgfx-environment-autotest-20221018 + restore-keys: | + tgfx-environment-autotest-20221018 + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + lfs: true + - name: Get thirdParty cache + id: cache-thirdParty + uses: actions/cache@v2 + with: + path: | + third_party + vendor_tools + test/baseline/.cache + key: third_party-autotest-01-${{ hashFiles('DEPS') }}-${{ hashFiles('vendor.json') }}-${{ hashFiles('test/baseline/version.json') }} + restore-keys: third_party-autotest- + - name: Run sync_deps script + run: | + chmod +x sync_deps.sh + ./sync_deps.sh + shell: bash + + - if: github.event_name == 'push' + name: Build cache(push) + run: | + node build_vendor -p mac + if [ ! $(which gcovr) ]; then + brew install gcovr + fi + chmod +x update_baseline.sh + ./update_baseline.sh 1 + mkdir result + cp -r test/baseline result + + - if: github.event_name == 'pull_request' + name: Run autotest script + run: | + chmod +x autotest.sh + ./autotest.sh + + - name: codecov + if: github.event_name == 'pull_request' + uses: codecov/codecov-action@v2.1.0 + with: + token: ${{ secrets.CODECOV_TOKEN }} + file: tgfx/result/coverage.xml + - name: The job has failed + if: ${{ failure() }} + uses: actions/upload-artifact@v2 + with: + name: result + path: result + - uses: actions/upload-artifact@v2 + with: + name: result + path: result diff --git a/.github/workflows/build_linux.yml b/.github/workflows/build_linux.yml new file mode 100644 index 00000000..b19d2977 --- /dev/null +++ b/.github/workflows/build_linux.yml @@ -0,0 +1,51 @@ +# This is a basic workflow to help you get started with Actions + +name: build_linux + +# Controls when the workflow will run +on: + # Triggers the workflow on push or pull request events but only for the master branch + pull_request: + branches: [ main ] + push: + branches: [ main ] +# A workflow run is made up of one or more jobs that can run sequentially or in parallel +jobs: + build_linux: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + with: + lfs: true + - name: Get thirdParty cache + id: cache-thirdParty + uses: actions/cache@v2 + with: + path: | + third_party + vendor_tools + key: third_party-linux-${{ hashFiles('DEPS') }}-${{ hashFiles('vendor.json') }} + restore-keys: third_party-linux- + + - uses: seanmiddleditch/gha-setup-ninja@master + - name: Run sync_deps script + run: | + chmod +x sync_deps.sh + ./sync_deps.sh + + - if: github.event_name == 'push' + name: Build cache(push) + run: | + node build_vendor -p linux + + - if: github.event_name == 'pull_request' + name: Run build_linux script + run: | + chmod +x build_linux.sh + ./build_linux.sh + cd linux + mkdir build + cd build + cmake ../ + make -j8 \ No newline at end of file diff --git a/.github/workflows/build_web.yml b/.github/workflows/build_web.yml new file mode 100644 index 00000000..b72d83bb --- /dev/null +++ b/.github/workflows/build_web.yml @@ -0,0 +1,92 @@ +# This is a basic workflow to help you get started with Actions + +name: build_web + +# Controls when the workflow will run +on: + # Triggers the workflow on push or pull request events but only for the master branch + pull_request: + branches: [ main ] + push: + branches: [ main ] +# A workflow run is made up of one or more jobs that can run sequentially or in parallel +jobs: + build_web: + # The CMake configure and build commands are platform agnostic and should work equally well on Windows or Mac. + # You can convert this to a matrix build if you need cross-platform coverage. + # See: https://docs.github.com/en/free-pro-team@latest/actions/learn-github-actions/managing-complex-workflows#using-a-build-matrix + runs-on: macos-latest + + steps: + - name: Get environment cache + id: cache-environment + uses: actions/cache@v2 + with: + path: | + /usr/local/Cellar/ninja + /usr/local/Cellar/icu4c + /usr/local/bin/ninja + /usr/local/Cellar/yasm + /usr/local/bin/yasm + /usr/local/bin/depsync + /usr/local/lib/node_modules/depsync + /usr/local/Cellar/gcovr + /usr/local/bin/gcovr + /usr/local/Cellar/emsdk + /usr/local/Cellar/emsdk/upstream/emscripten + /usr/local/Cellar/emsdk/node/14.18.2_64bit/bin + /usr/local/bin/em++ + /usr/local/bin/em-config + /usr/local/bin/emar + /usr/local/bin/embuilder + /usr/local/bin/emcc + /usr/local/bin/emcmake + /usr/local/bin/emconfigure + /usr/local/bin/emdump + /usr/local/bin/emdwp + /usr/local/bin/emmake + /usr/local/bin/emnm + /usr/local/bin/emrun + /usr/local/bin/emprofile + /usr/local/bin/emscons + /usr/local/bin/emsize + /usr/local/bin/emstrip + /usr/local/bin/emsymbolizer + /usr/local/bin/emcc.py + /usr/local/bin/emcmake.py + /usr/local/bin/emar.py + key: tgfx-environment-autotest-20221018 + restore-keys: | + tgfx-environment-autotest-20221018 + - uses: actions/checkout@v3 + with: + lfs: true + - name: Get thirdParty cache + id: cache-thirdParty + uses: actions/cache@v2 + with: + path: | + third_party + vendor_tools + web/node_node_modules + key: third_party-web-${{ hashFiles('DEPS') }}-${{ hashFiles('vendor.json') }} + restore-keys: third_party-web- + + - name: Run sync_deps script + run: | + chmod +x sync_deps.sh + ./sync_deps.sh + + - if: github.event_name == 'push' + name: Build cache(push) + run: | + node build_vendor -p web + if [ ! $(which gcovr) ]; then + brew install gcovr + fi + - if: github.event_name == 'pull_request' + name: Run build script + run: | + cd web/script + chmod +x build.sh + ./build.sh diff --git a/.github/workflows/build_win.yml b/.github/workflows/build_win.yml new file mode 100644 index 00000000..f9385de1 --- /dev/null +++ b/.github/workflows/build_win.yml @@ -0,0 +1,49 @@ +# This is a basic workflow to help you get started with Actions + +name: build_win + +# Controls when the workflow will run +on: + # Triggers the workflow on push or pull request events but only for the master branch + pull_request: + branches: [ main ] + push: + branches: [ main ] +# A workflow run is made up of one or more jobs that can run sequentially or in parallel +jobs: + build_win: + # The CMake configure and build commands are platform agnostic and should work equally well on Windows or Mac. + # You can convert this to a matrix build if you need cross-platform coverage. + # See: https://docs.github.com/en/free-pro-team@latest/actions/learn-github-actions/managing-complex-workflows#using-a-build-matrix + runs-on: windows-latest + + steps: + - uses: actions/checkout@v3 + with: + lfs: true + - name: Get thirdParty cache + id: cache-thirdParty + uses: actions/cache@v2 + with: + path: | + third_party + vendor_tools + key: third_party-win-${{ hashFiles('DEPS') }}-${{ hashFiles('vendor.json') }} + restore-keys: third_party-win- + + - name: Run depsync + run: | + npm install depsync -g + depsync + shell: bash + + - if: github.event_name == 'push' + name: Build cache(push) + run: | + node build_vendor -p win + + - if: github.event_name == 'pull_request' + name: Windows build + run: | + node ./vendor_tools/cmake-build tgfx -p win -a x64 -o ./win/libtgfx -v -i + shell: bash \ No newline at end of file diff --git a/.github/workflows/code_format.yml b/.github/workflows/code_format.yml new file mode 100644 index 00000000..8d323f4c --- /dev/null +++ b/.github/workflows/code_format.yml @@ -0,0 +1,25 @@ +name: code_format + +on: + pull_request: + branches: [ main ] + push: + branches: [ main ] + +env: + # Customize the CMake build type here (Release, Debug, RelWithDebInfo, etc.) + BUILD_TYPE: Debug + +jobs: + code_format: + # The CMake configure and build commands are platform agnostic and should work equally well on Windows or Mac. + # You can convert this to a matrix build if you need cross-platform coverage. + # See: https://docs.github.com/en/free-pro-team@latest/actions/learn-github-actions/managing-complex-workflows#using-a-build-matrix + runs-on: macos-latest + steps: + - uses: actions/checkout@v2 + - name: Run codeformat script + run: | + chmod +x codeformat.sh + ./codeformat.sh + shell: bash diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..bfc87c83 --- /dev/null +++ b/.gitignore @@ -0,0 +1,78 @@ +# Compiled Object files +*.slo +*.lo +*.o +*.obj + +# Precompiled Headers +*.gch +*.pch + +# Compiled Dynamic libraries +#*.dll + +# Fortran module files +*.mod +*.smod + +# Compiled Static libraries +*.lai +*.la +#*.a +#*.lib + +# Executables +*.exe +*.out +*.app + +# Gradle +captures/ +build/ + + +# Android +.gradle +.externalNativeBuild +local.properties + +# IDEA Projects +!.idea +**/.idea/* +!**/.idea/fileTemplates/ +!**/.idea/misc.xml + +/third_party +/out +cmake-build-* +node_modules +Build/ +test/out/ +DerivedData/ +xcuserdata +xcshareddata +*.xcworkspace +.DS_Store +.temp +Pods/ +Podfile.lock +Adobe After Effects Auto-Save/ +.cxx +/.sync_deps.cmake +/build.yml +.vs +win/Win32Demo/x64 +win/x64 +win/build +win/libtgfx +win/Debug/ +win/Release/ +win/Win32Demo/Debug/ +win/Win32Demo/Release/ +QTCMAKE.cfg +test/baseline/.cache +/result + + +vendor_tools/ +.vscode \ No newline at end of file diff --git a/.idea/fileTemplates/includes/TGFX File Header.h b/.idea/fileTemplates/includes/TGFX File Header.h new file mode 100644 index 00000000..9a045aca --- /dev/null +++ b/.idea/fileTemplates/includes/TGFX File Header.h @@ -0,0 +1,17 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/.idea/fileTemplates/internal/C Header File.h b/.idea/fileTemplates/internal/C Header File.h new file mode 100644 index 00000000..d7f0678b --- /dev/null +++ b/.idea/fileTemplates/internal/C Header File.h @@ -0,0 +1,7 @@ +#parse("TGFX File Header.h") + +#pragma once + +namespace tgfx { + +} // namespace tgfx diff --git a/.idea/fileTemplates/internal/C Source File.c b/.idea/fileTemplates/internal/C Source File.c new file mode 100644 index 00000000..6b8866ad --- /dev/null +++ b/.idea/fileTemplates/internal/C Source File.c @@ -0,0 +1,4 @@ +#parse("TGFX File Header.h") +#if (${HEADER_FILENAME}) +#[[#include]]# "${HEADER_FILENAME}" +#end diff --git a/.idea/fileTemplates/internal/C++ Class Header.h b/.idea/fileTemplates/internal/C++ Class Header.h new file mode 100644 index 00000000..e1c28ff0 --- /dev/null +++ b/.idea/fileTemplates/internal/C++ Class Header.h @@ -0,0 +1,9 @@ +#parse("TGFX File Header.h") + +#pragma once + +namespace tgfx { +class ${NAME} { + public: +}; +} // namespace tgfx diff --git a/.idea/fileTemplates/internal/C++ Class.cc b/.idea/fileTemplates/internal/C++ Class.cc new file mode 100644 index 00000000..6df4f00d --- /dev/null +++ b/.idea/fileTemplates/internal/C++ Class.cc @@ -0,0 +1,7 @@ +#parse("TGFX File Header.h") + +#[[#include]]# "${HEADER_FILENAME}" + +namespace tgfx { + +} // namespace tgfx diff --git a/.idea/misc.xml b/.idea/misc.xml new file mode 100644 index 00000000..0213d840 --- /dev/null +++ b/.idea/misc.xml @@ -0,0 +1,26 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/CMakeLists.txt b/CMakeLists.txt new file mode 100644 index 00000000..91e60cd7 --- /dev/null +++ b/CMakeLists.txt @@ -0,0 +1,344 @@ +cmake_minimum_required(VERSION 3.1) +project(TGFX) + +#set(CMAKE_VERBOSE_MAKEFILE ON) +include(./vendor_tools/vendor.cmake) + +# Options for building tgfx +option(TGFX_USE_OPENGL "Allow use of OpenGL as GPU backend" ON) +option(TGFX_USE_QT "Allow build with QT frameworks." OFF) +option(TGFX_USE_SWIFTSHADER "Allow build with SwiftShader library" OFF) + +if (APPLE OR WEB) + option(TGFX_USE_FREETYPE "Allow use of embedded freetype library" OFF) +else () + option(TGFX_USE_FREETYPE "Allow use of embedded freetype library" ON) +endif () + +if (IOS OR WEB) + option(TGFX_USE_WEBP_DECODE "Enable embedded WEBP decoding support" ON) +elseif (NOT ANDROID) + option(TGFX_USE_PNG_DECODE "Enable embedded PNG decoding support" ON) + option(TGFX_USE_PNG_ENCODE "Enable embedded PNG encoding support" ON) + option(TGFX_USE_JPEG_DECODE "Enable embedded JPEG decoding support" ON) + option(TGFX_USE_JPEG_ENCODE "Enable embedded JPEG encoding support" ON) + option(TGFX_USE_WEBP_DECODE "Enable embedded WEBP decoding support" ON) + option(TGFX_USE_WEBP_ENCODE "Enable embedded WEBP encoding support" ON) +endif () + +message("TGFX_USE_OPENGL: ${TGFX_USE_OPENGL}") +message("TGFX_USE_QT: ${TGFX_USE_QT}") +message("TGFX_USE_SWIFTSHADER: ${TGFX_USE_SWIFTSHADER}") +message("TGFX_USE_FREETYPE: ${TGFX_USE_FREETYPE}") +message("TGFX_USE_PNG_DECODE: ${TGFX_USE_PNG_DECODE}") +message("TGFX_USE_PNG_ENCODE: ${TGFX_USE_PNG_ENCODE}") +message("TGFX_USE_JPEG_DECODE: ${TGFX_USE_JPEG_DECODE}") +message("TGFX_USE_JPEG_ENCODE: ${TGFX_USE_JPEG_ENCODE}") +message("TGFX_USE_WEBP_DECODE: ${TGFX_USE_WEBP_DECODE}") +message("TGFX_USE_WEBP_ENCODE: ${TGFX_USE_WEBP_ENCODE}") + +set(CMAKE_CXX_STANDARD 17) +set(CMAKE_CXX_STANDARD_REQUIRED ON) +cmake_policy(SET CMP0063 NEW) +set(CMAKE_CXX_VISIBILITY_PRESET hidden) + +if (CMAKE_CXX_COMPILER_ID MATCHES "Clang") + add_definitions(-Werror -Wall -Wextra -Weffc++ -pedantic -Werror=return-type) +endif () + +if (MSVC) + add_compile_options("/utf-8") + add_compile_options(/w44251 /w44275) + add_definitions(-DNOMINMAX -D_USE_MATH_DEFINES) +endif (MSVC) + +# Sets flags +if (CMAKE_BUILD_TYPE STREQUAL "Debug") + add_definitions(-DDEBUG) + #set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsanitize=address -fno-omit-frame-pointer -g -O1") +endif () + +# collects tgfx common files. +set(TGFX_INCLUDES ./ include src) +file(GLOB_RECURSE TGFX_FILES + src/core/*.* + src/effects/*.* + src/images/*.* + src/shaders/*.* + src/shapes/*.* + src/utils/*.* + src/gpu/*.*) + +file(GLOB TGFX_PLATFORM_COMMON_FILES + src/codecs/*.* + src/vectors/*.* + src/platform/*.*) +list(APPEND TGFX_FILES ${TGFX_PLATFORM_COMMON_FILES}) + +if (TGFX_USE_QT) + set(TGFX_USE_SWIFTSHADER OFF) +endif () + +if (TGFX_USE_QT OR TGFX_USE_SWIFTSHADER) + set(TGFX_USE_OPENGL ON) +endif () + +if (TGFX_USE_FREETYPE) + # Freetype needs libpng + set(TGFX_USE_PNG_DECODE ON) +endif () + +if (TGFX_USE_PNG_DECODE) + add_definitions(-DTGFX_USE_PNG_DECODE) + set(TGFX_USE_PNG ON) +endif () + +if (TGFX_USE_PNG_ENCODE) + add_definitions(-DTGFX_USE_PNG_ENCODE) + set(TGFX_USE_PNG ON) +endif () + +if (TGFX_USE_JPEG_DECODE) + add_definitions(-DTGFX_USE_JPEG_DECODE) + set(TGFX_USE_JPEG ON) +endif () + +if (TGFX_USE_JPEG_ENCODE) + add_definitions(-DTGFX_USE_JPEG_ENCODE) + set(TGFX_USE_JPEG ON) +endif () + +if (TGFX_USE_WEBP_DECODE) + add_definitions(-DTGFX_USE_WEBP_DECODE) + set(TGFX_USE_WEBP ON) +endif () + +if (TGFX_USE_WEBP_ENCODE) + add_definitions(-DTGFX_USE_WEBP_ENCODE) + set(TGFX_USE_WEBP ON) +endif () + +if (TGFX_USE_JPEG) + list(APPEND TGFX_STATIC_VENDORS libjpeg-turbo) + file(GLOB_RECURSE GFX_PLATFORM_FILES src/codecs/jpeg/*.*) + list(APPEND TGFX_FILES ${GFX_PLATFORM_FILES}) + list(APPEND TGFX_INCLUDES third_party/out/libjpeg-turbo/${INCLUDE_ENTRY}) +endif () + +if (TGFX_USE_WEBP) + list(APPEND TGFX_STATIC_VENDORS libwebp) + file(GLOB_RECURSE GFX_PLATFORM_FILES src/codecs/webp/*.*) + list(APPEND TGFX_FILES ${GFX_PLATFORM_FILES}) + list(APPEND TGFX_INCLUDES third_party/libwebp/src) +endif () + +if (TGFX_USE_PNG) + set(TGFX_USE_ZLIB ON) + list(APPEND TGFX_STATIC_VENDORS libpng) + file(GLOB_RECURSE GFX_PLATFORM_FILES src/codecs/png/*.*) + list(APPEND TGFX_FILES ${GFX_PLATFORM_FILES}) + list(APPEND TGFX_INCLUDES third_party/out/libpng/${INCLUDE_ENTRY}) +endif () + +if (TGFX_USE_ZLIB) + list(APPEND TGFX_STATIC_VENDORS zlib) +endif () + +if (TGFX_USE_FREETYPE) + add_definitions(-DTGFX_USE_FREETYPE) + list(APPEND TGFX_STATIC_VENDORS freetype) + file(GLOB_RECURSE GFX_PLATFORM_FILES src/vectors/freetype/*.*) + list(APPEND TGFX_FILES ${GFX_PLATFORM_FILES}) + list(APPEND TGFX_INCLUDES third_party/freetype/include) +elseif (WEB) + add_definitions(-DTGFX_BUILD_FOR_WEB) + file(GLOB_RECURSE GFX_PLATFORM_FILES src/vectors/web/*.*) + list(APPEND TGFX_FILES ${GFX_PLATFORM_FILES}) +elseif (APPLE) + # Uses CoreGraphics instead. + add_definitions(-DTGFX_USE_CORE_GRAPHICS) + file(GLOB_RECURSE GFX_PLATFORM_FILES src/vectors/coregraphics/*.*) + list(APPEND TGFX_FILES ${GFX_PLATFORM_FILES}) +endif () + +if (TGFX_USE_OPENGL) + file(GLOB GFX_PLATFORM_FILES src/opengl/*.*) + list(APPEND TGFX_FILES ${GFX_PLATFORM_FILES}) + file(GLOB_RECURSE GFX_PROCESSOR_FILES src/opengl/processors/*.*) + list(APPEND TGFX_FILES ${GFX_PROCESSOR_FILES}) +elseif (ANDROID) + file(GLOB_RECURSE GFX_PLATFORM_FILES src/vulkan/*.*) + list(APPEND TGFX_FILES ${GFX_PLATFORM_FILES}) +elseif (APPLE) + file(GLOB_RECURSE GFX_PLATFORM_FILES src/metal/*.*) + list(APPEND TGFX_FILES ${GFX_PLATFORM_FILES}) + find_library(Metal_LIBS Metal REQUIRED) + list(APPEND TGFX_PLATFORM_SHARED_LIBS ${Metal_LIBS}) +endif () + +file(GLOB_RECURSE EGL_PLATFORM_FILES src/opengl/egl/*.*) + +if (TGFX_USE_QT) + # need to set the CMAKE_PREFIX_PATH to local QT installation path, for example : + # set(CMAKE_PREFIX_PATH /Users/username/Qt5.13.0/5.13.0/clang_64/lib/cmake) + find_package(Qt5OpenGL REQUIRED) + find_package(Qt5Gui REQUIRED) + find_package(Qt5Quick REQUIRED) + list(APPEND GPU_PLATFORM_SHARED_LIBS ${Qt5OpenGL_LIBRARIES} ${Qt5Gui_LIBRARIES} ${Qt5Quick_LIBRARIES}) + list(APPEND GPU_PLATFORM_INCLUDES ${Qt5OpenGL_INCLUDE_DIRS} ${Qt5Gui_INCLUDE_DIRS} ${Qt5Quick_INCLUDE_DIRS}) + file(GLOB_RECURSE GPU_PLATFORM_FILES src/opengl/qt/*.*) + if (MACOS) + file(GLOB CGL_PLATFORM_FILES src/opengl/cgl/CGLHardwareTexture.mm) + list(APPEND GPU_PLATFORM_FILES ${CGL_PLATFORM_FILES}) + add_definitions(-DGL_SILENCE_DEPRECATION) + find_library(OpenGL_LIBS OpenGL REQUIRED) + list(APPEND GPU_PLATFORM_SHARED_LIBS ${OpenGL_LIBS}) + endif () +elseif (TGFX_USE_SWIFTSHADER) + list(APPEND GPU_PLATFORM_INCLUDES vendor/swiftshader/include) + file(GLOB SWIFTSHADER_LIBS vendor/swiftshader/${LIBRARY_ENTRY}/*${CMAKE_SHARED_LIBRARY_SUFFIX}) + list(APPEND GPU_PLATFORM_SHARED_LIBS ${SWIFTSHADER_LIBS}) + list(APPEND GPU_PLATFORM_FILES ${EGL_PLATFORM_FILES}) +else () + set(TGFX_USE_NATIVE_GL ON) +endif () + +list(APPEND TGFX_STATIC_VENDORS pathkit) +list(APPEND TGFX_INCLUDES third_party/pathkit) +list(APPEND TGFX_STATIC_VENDORS skcms) +list(APPEND TGFX_INCLUDES third_party/skcms) + +if (WEB) + file(GLOB_RECURSE PLATFORM_FILES src/platform/web/*.*) + list(APPEND TGFX_PLATFORM_FILES ${PLATFORM_FILES}) + if (TGFX_USE_NATIVE_GL) + file(GLOB_RECURSE GPU_PLATFORM_FILES src/opengl/webgl/*.*) + endif () +elseif (IOS) + # finds all required platform libraries. + find_library(UIKit_LIBS UIKit REQUIRED) + list(APPEND TGFX_PLATFORM_SHARED_LIBS ${UIKit_LIBS}) + find_library(Foundation_LIBS Foundation REQUIRED) + list(APPEND TGFX_PLATFORM_SHARED_LIBS ${Foundation_LIBS}) + find_library(QuartzCore_LIBS QuartzCore REQUIRED) + list(APPEND TGFX_PLATFORM_SHARED_LIBS ${QuartzCore_LIBS}) + find_library(CoreGraphics_LIBS CoreGraphics REQUIRED) + list(APPEND TGFX_PLATFORM_SHARED_LIBS ${CoreGraphics_LIBS}) + find_library(CoreText_LIBS CoreText REQUIRED) + list(APPEND TGFX_PLATFORM_SHARED_LIBS ${CoreText_LIBS}) + find_library(CoreMedia_LIBS CoreMedia REQUIRED) + list(APPEND TGFX_PLATFORM_SHARED_LIBS ${CoreMedia_LIBS}) + find_library(CoreMedia_LIBS CoreMedia REQUIRED) + list(APPEND TGFX_PLATFORM_SHARED_LIBS ${CoreMedia_LIBS}) + find_library(ImageIO_LIBS ImageIO REQUIRED) + list(APPEND TGFX_PLATFORM_SHARED_LIBS ${ImageIO_LIBS}) + find_library(CORE_VIDEO CoreVideo) + list(APPEND TGFX_PLATFORM_SHARED_LIBS ${CORE_VIDEO}) + find_library(ICONV_LIBRARIES NAMES iconv libiconv libiconv-2 c) + list(APPEND TGFX_PLATFORM_SHARED_LIBS ${ICONV_LIBRARIES}) + + file(GLOB_RECURSE PLATFORM_FILES src/platform/apple/*.*) + list(APPEND TGFX_PLATFORM_FILES ${PLATFORM_FILES}) + + if (TGFX_USE_NATIVE_GL) + add_definitions(-DGL_SILENCE_DEPRECATION) + find_library(OpenGLES_LIBS OpenGLES REQUIRED) + list(APPEND GPU_PLATFORM_SHARED_LIBS ${OpenGLES_LIBS}) + file(GLOB_RECURSE GPU_PLATFORM_FILES src/opengl/eagl/*.*) + endif () +elseif (MACOS) + # finds all required platform libraries. + find_library(APPLICATION_SERVICES_FRAMEWORK ApplicationServices REQUIRED) + list(APPEND TGFX_PLATFORM_SHARED_LIBS ${APPLICATION_SERVICES_FRAMEWORK}) + find_library(QUARTZ_CORE QuartzCore REQUIRED) + list(APPEND TGFX_PLATFORM_SHARED_LIBS ${QUARTZ_CORE}) + find_library(COCOA Cocoa REQUIRED) + list(APPEND TGFX_PLATFORM_SHARED_LIBS ${COCOA}) + find_library(FOUNDATION Foundation REQUIRED) + list(APPEND TGFX_PLATFORM_SHARED_LIBS ${FOUNDATION}) + find_library(ICONV_LIBRARIES NAMES iconv libiconv libiconv-2 c) + list(APPEND TGFX_PLATFORM_SHARED_LIBS ${ICONV_LIBRARIES}) + find_library(CORE_MEDIA CoreMedia) + list(APPEND TGFX_PLATFORM_SHARED_LIBS ${CORE_MEDIA}) + + file(GLOB_RECURSE PLATFORM_FILES src/platform/apple/*.*) + list(APPEND TGFX_PLATFORM_FILES ${PLATFORM_FILES}) + + if (TGFX_USE_NATIVE_GL) + add_definitions(-DGL_SILENCE_DEPRECATION) + find_library(OpenGL_LIBS OpenGL REQUIRED) + list(APPEND GPU_PLATFORM_SHARED_LIBS ${OpenGL_LIBS}) + file(GLOB_RECURSE GPU_PLATFORM_FILES src/opengl/cgl/*.*) + endif () + +elseif (ANDROID) + + find_library(LOG_LIB log) + list(APPEND TGFX_PLATFORM_SHARED_LIBS ${LOG_LIB}) + find_library(ANDROID_LIB android) + list(APPEND TGFX_PLATFORM_SHARED_LIBS ${ANDROID_LIB}) + find_library(JNIGRAPHICS_LIB jnigraphics) + list(APPEND TGFX_PLATFORM_SHARED_LIBS ${JNIGRAPHICS_LIB}) + + file(GLOB_RECURSE PLATFORM_FILES src/platform/android/*.*) + list(APPEND TGFX_PLATFORM_FILES ${PLATFORM_FILES}) + + if (TGFX_USE_NATIVE_GL) + find_library(GLESV2_LIB GLESv2) + list(APPEND GPU_PLATFORM_SHARED_LIBS ${GLESV2_LIB}) + find_library(EGL_LIB EGL) + list(APPEND GPU_PLATFORM_SHARED_LIBS ${EGL_LIB}) + file(GLOB_RECURSE GPU_PLATFORM_FILES src/opengl/egl/*.*) + endif () + + # optimizes the output size + add_compile_options(-ffunction-sections -fdata-sections -Os -fno-exceptions -fno-rtti) + +elseif (WIN32) + find_library(Bcrypt_LIB Bcrypt) + list(APPEND TGFX_PLATFORM_SHARED_LIBS ${Bcrypt_LIB}) + find_library(ws2_32_LIB ws2_32) + list(APPEND TGFX_PLATFORM_SHARED_LIBS ${ws2_32_LIB}) + find_library(DWrite_LIB DWrite) + list(APPEND TGFX_PLATFORM_STATIC_LIBS ${DWrite_LIB}) + + file(GLOB_RECURSE PLATFORM_FILES src/platform/mock/*.*) + list(APPEND TGFX_PLATFORM_FILES ${PLATFORM_FILES}) + + if (TGFX_USE_NATIVE_GL) + file(GLOB ANGLE_LIBS vendor/angle/${PLATFORM}/${ARCH}/*${CMAKE_STATIC_LIBRARY_SUFFIX}) + list(APPEND GPU_PLATFORM_STATIC_LIBS ${ANGLE_LIBS}) + list(APPEND GPU_PLATFORM_INCLUDES vendor/angle/include) + file(GLOB_RECURSE GPU_PLATFORM_FILES src/opengl/egl/*.*) + endif () +elseif (CMAKE_HOST_SYSTEM_NAME MATCHES "Linux") + find_package(Threads) + list(APPEND TGFX_PLATFORM_SHARED_LIBS ${CMAKE_THREAD_LIBS_INIT}) + list(APPEND TGFX_PLATFORM_SHARED_LIBS dl) + add_compile_options(-fPIC -pthread) + + file(GLOB_RECURSE PLATFORM_FILES src/platform/mock/*.*) + list(APPEND TGFX_PLATFORM_FILES ${PLATFORM_FILES}) + + if (TGFX_USE_NATIVE_GL) + find_library(GLESV2_LIB GLESv2) + list(APPEND GPU_PLATFORM_SHARED_LIBS ${GLESV2_LIB}) + find_library(EGL_LIB EGL) + list(APPEND GPU_PLATFORM_SHARED_LIBS ${EGL_LIB}) + file(GLOB_RECURSE GPU_PLATFORM_FILES src/opengl/egl/*.*) + endif () +endif () + +add_vendor_target(TGFX STATIC_VENDORS ${TGFX_STATIC_VENDORS} SHARED_VENDORS ${TGFX_SHARED_VENDORS}) +list(APPEND TGFX_PLATFORM_STATIC_LIBS ${TGFX_VENDOR_STATIC_LIBRARIES}) +list(APPEND TGFX_PLATFORM_SHARED_LIBS ${TGFX_VENDOR_SHARED_LIBRARIES}) + +get_directory_property(HasParent PARENT_DIRECTORY) +if (HasParent) + set(TGFX_PLATFORM_STATIC_LIBS ${TGFX_PLATFORM_STATIC_LIBS} PARENT_SCOPE) +endif () + +add_library(tgfx STATIC ${TGFX_VENDOR_TARGET} ${PLATFORM_VENDOR_TARGETS} ${TGFX_FILES} ${TGFX_PLATFORM_FILES} ${GPU_PLATFORM_FILES}) +target_include_directories(tgfx PUBLIC include PRIVATE ${TGFX_INCLUDES} ${GPU_PLATFORM_INCLUDES}) +merge_libraries_into(tgfx ${TGFX_STATIC_LIBS} ${TGFX_PLATFORM_STATIC_LIBS}) +target_link_libraries(tgfx ${TGFX_PLATFORM_SHARED_LIBS} ${GPU_PLATFORM_SHARED_LIBS}) \ No newline at end of file diff --git a/CODEOWNERS b/CODEOWNERS new file mode 100644 index 00000000..ef4ddca0 --- /dev/null +++ b/CODEOWNERS @@ -0,0 +1,12 @@ +# Default owners: +* @domchen @lvpengwei @luckyzhliu + +*.js @domchen @lvpengwei @luckyzhliu +*.ts @domchen @lvpengwei @luckyzhliu +*.sh @kevingpqi123 @domchen @lvpengwei @luckyzhliu +/* @kevingpqi123 @domchen @lvpengwei @luckyzhliu +/web/ @domchen @lvpengwei @luckyzhliu +/.github/ @kevingpqi123 @domchen @lvpengwei @luckyzhliu +/test/ @kevingpqi123 @domchen @lvpengwei @luckyzhliu +/linux/ @kevingpqi123 @domchen @lvpengwei @luckyzhliu +/win/ @FusionXu @kevingpqi123 @domchen @lvpengwei @luckyzhliu diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..1353ef58 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,50 @@ +# Contributing to tgfx +Welcome to [report Issues](https://github.com/Tencent/tgfx/issues) or [pull requests](https://github.com/Tencent/tgfx/pulls). It's recommended to read the following Contributing Guide first before contributing. + +## Issues +We use Github Issues to track public bugs and feature requests. + +### Search Known Issues First +Please search the existing issues to see if any similar issue or feature request has already been filed. You should make sure your issue isn't redundant. + +### Reporting New Issues +If you open an issue, the more information the better. Such as detailed description, screenshot or video of your problem, logcat and xlog or code blocks for your crash. + +## Pull Requests +We strongly welcome your pull request to make tgfx better. + +### Branch Management +There are two main branches here: + +1. `main` branch. + 1. It is our active developing branch. After full testing, we will periodically pull the pre-release branch based on the main branch. + 2. **You are recommended to submit bugfix or feature PR on `main` branch.** +2. `release/{version}` branch. + 1. This is our stable release branch, which is fully tested and already used in many apps. + +Normal bugfix or feature request should be submitted to `main` branch. + + +### Make Pull Requests +The code team will monitor all pull request, we run some code check and test on it. After all tests passed, we will accecpt this PR. + +Before submitting a pull request, please make sure the followings are done: + +1. Fork the repo and create your branch from `main`. +2. Update code or documentation if you have changed APIs. +3. Add the copyright notice to the top of any new files you've added. +4. Check your code lints and checkstyles. +5. Test and test again your code. +6. Now, you can submit your pull request on `main` branch. + +## Code Style Guide +Use [Code Style for C/C++](http://zh-google-styleguide.readthedocs.io/en/latest/google-cpp-styleguide/). + +It is recommended to configure the clang-format we provide in the IDE + +* 2 spaces for indentation rather than tabs + + +## License +By contributing to tgfx, you agree that your contributions will be licensed +under its [BSD 3-Clause License](./LICENSE.txt) \ No newline at end of file diff --git a/CPPLINT.cfg b/CPPLINT.cfg new file mode 100644 index 00000000..88729376 --- /dev/null +++ b/CPPLINT.cfg @@ -0,0 +1,9 @@ +linelength=100 +root=src +exclude_files=mac +exclude_files=android +exclude_files=ios +exclude_files=qt +exclude_files=vendor +exclude_files=test +exclude_files=.idea \ No newline at end of file diff --git a/DEPS b/DEPS new file mode 100644 index 00000000..8d9d4714 --- /dev/null +++ b/DEPS @@ -0,0 +1,81 @@ +{ + "version": "1.3.1", + "vars": { + "PAG_GROUP": "https://github.com/libpag" + }, + "repos": { + "common": [ + { + "url": "${PAG_GROUP}/vendor_tools.git", + "commit": "5593395a4a4328bc7e5ff11e5e90fc20ac61983c", + "dir": "vendor_tools" + }, + { + "url": "${PAG_GROUP}/pathkit.git", + "commit": "f0c4736442a8e640e7f7978b6b9ed322148245bb", + "dir": "third_party/pathkit" + }, + { + "url": "${PAG_GROUP}/skcms.git", + "commit": "26af768d918c8f9a693ce6ee1a0fc6611559d29e", + "dir": "third_party/skcms" + }, + { + "url": "${PAG_GROUP}/swiftshader.git", + "commit": "040ee43bfeba9b6b532dbd8222df737aac2d55b1", + "dir": "third_party/swiftshader" + }, + { + "url": "https://github.com/madler/zlib.git", + "commit": "cacf7f1d4e3d44d871b605da3b647f07d718623f", + "dir": "third_party/zlib" + }, + { + "url": "https://github.com/glennrp/libpng.git", + "commit": "a40189cf881e9f0db80511c382292a5604c3c3d1", + "dir": "third_party/libpng" + }, + { + "url": "https://github.com/webmproject/libwebp.git", + "commit": "9ce5843dbabcfd3f7c39ec7ceba9cbeb213cbfdf", + "dir": "third_party/libwebp" + }, + { + "url": "https://github.com/libjpeg-turbo/libjpeg-turbo.git", + "commit": "0a9b9721782d3a60a5c16c8c9a7abf3d4b1ecd42", + "dir": "third_party/libjpeg-turbo" + }, + { + "url": "https://github.com/freetype/freetype.git", + "commit": "86bc8a95056c97a810986434a3f268cbe67f2902", + "dir": "third_party/freetype" + }, + { + "url": "https://github.com/google/googletest.git", + "commit": "e2239ee6043f73722e7aa812a459f54a28552929", + "dir": "third_party/googletest" + }, + { + "url": "https://github.com/nlohmann/json.git", + "commit": "fec56a1a16c6e1c1b1f4e116a20e79398282626c", + "dir": "third_party/json" + } + ] + }, + "actions": { + "common": [ + { + "command": "depsync --clean", + "dir": "third_party" + }, + { + "command": "git lfs prune", + "dir": "./" + }, + { + "command": "git lfs pull", + "dir": "./" + } + ] + } +} diff --git a/LICENSE.txt b/LICENSE.txt new file mode 100644 index 00000000..e67fff0e --- /dev/null +++ b/LICENSE.txt @@ -0,0 +1,647 @@ +Tencent is pleased to support the open source community by making tgfx available. + +Copyright (C) 2022 THL A29 Limited, a Tencent company. All rights reserved. The below software in this distribution may have been modified by THL A29 Limited ("Tencent Modifications"). All Tencent Modifications are Copyright (C) 2022 THL A29 Limited. + +tgfx is licensed under the BSD 3-Clause License except for the third-party components listed below. + + +Terms of the BSD 3-Clause License: +-------------------------------------------------------------------- +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + + +Other dependencies and licenses: + + +Open Source Software Licensed under the The FreeType Project LICENSE and Other Licenses of the Third-Party Components therein: +-------------------------------------------------------------------- +1. freetype +Copyright (C) 2006-2022 by +David Turner, Robert Wilhelm, and Werner Lemberg. + + +Terms of the The FreeType Project LICENSE: +-------------------------------------------------------------------- +The FreeType Project LICENSE + ---------------------------- + 2006-Jan-27 + Copyright 1996-2002, 2006 by + David Turner, Robert Wilhelm, and Werner Lemberg +Introduction +============ + The FreeType Project is distributed in several archive packages; + some of them may contain, in addition to the FreeType font engine, + various tools and contributions which rely on, or relate to, the + FreeType Project. + This license applies to all files found in such packages, and + which do not fall under their own explicit license. The license + affects thus the FreeType font engine, the test programs, + documentation and makefiles, at the very least. + This license was inspired by the BSD, Artistic, and IJG + (Independent JPEG Group) licenses, which all encourage inclusion + and use of free software in commercial and freeware products + alike. As a consequence, its main points are that: + o We don't promise that this software works. However, we will be + interested in any kind of bug reports. (`as is' distribution) + o You can use this software for whatever you want, in parts or + full form, without having to pay us. (`royalty-free' usage) + o You may not pretend that you wrote this software. If you use + it, or only parts of it, in a program, you must acknowledge + somewhere in your documentation that you have used the + FreeType code. (`credits') + We specifically permit and encourage the inclusion of this + software, with or without modifications, in commercial products. + We disclaim all warranties covering The FreeType Project and + assume no liability related to The FreeType Project. + Finally, many people asked us for a preferred form for a + credit/disclaimer to use in compliance with this license. We thus + encourage you to use the following text: + """ + Portions of this software are copyright © The FreeType + Project (www.freetype.org). All rights reserved. + """ + Please replace with the value from the FreeType version you + actually use. +Legal Terms +=========== +0. Definitions +-------------- + Throughout this license, the terms `package', `FreeType Project', + and `FreeType archive' refer to the set of files originally + distributed by the authors (David Turner, Robert Wilhelm, and + Werner Lemberg) as the `FreeType Project', be they named as alpha, + beta or final release. + `You' refers to the licensee, or person using the project, where + `using' is a generic term including compiling the project's source + code as well as linking it to form a `program' or `executable'. + This program is referred to as `a program using the FreeType + engine'. + This license applies to all files distributed in the original + FreeType Project, including all source code, binaries and + documentation, unless otherwise stated in the file in its + original, unmodified form as distributed in the original archive. + If you are unsure whether or not a particular file is covered by + this license, you must contact us to verify this. + The FreeType Project is copyright (C) 1996-2000 by David Turner, + Robert Wilhelm, and Werner Lemberg. All rights reserved except as + specified below. +1. No Warranty +-------------- + THE FREETYPE PROJECT IS PROVIDED `AS IS' WITHOUT WARRANTY OF ANY + KIND, EITHER EXPRESS OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE. IN NO EVENT WILL ANY OF THE AUTHORS OR COPYRIGHT HOLDERS + BE LIABLE FOR ANY DAMAGES CAUSED BY THE USE OR THE INABILITY TO + USE, OF THE FREETYPE PROJECT. +2. Redistribution +----------------- + This license grants a worldwide, royalty-free, perpetual and + irrevocable right and license to use, execute, perform, compile, + display, copy, create derivative works of, distribute and + sublicense the FreeType Project (in both source and object code + forms) and derivative works thereof for any purpose; and to + authorize others to exercise some or all of the rights granted + herein, subject to the following conditions: + o Redistribution of source code must retain this license file + (`FTL.TXT') unaltered; any additions, deletions or changes to + the original files must be clearly indicated in accompanying + documentation. The copyright notices of the unaltered, + original files must be preserved in all copies of source + files. + o Redistribution in binary form must provide a disclaimer that + states that the software is based in part of the work of the + FreeType Team, in the distribution documentation. We also + encourage you to put an URL to the FreeType web page in your + documentation, though this isn't mandatory. + These conditions apply to any software derived from or based on + the FreeType Project, not just the unmodified files. If you use + our work, you must acknowledge us. However, no fee need be paid + to us. +3. Advertising +-------------- + Neither the FreeType authors and contributors nor you shall use + the name of the other for commercial, advertising, or promotional + purposes without specific prior written permission. + We suggest, but do not require, that you use one or more of the + following phrases to refer to this software in your documentation + or advertising materials: `FreeType Project', `FreeType Engine', + `FreeType library', or `FreeType Distribution'. + As you have not signed this license, you are not required to + accept it. However, as the FreeType Project is copyrighted + material, only this license, or another one contracted with the + authors, grants you the right to use, distribute, and modify it. + Therefore, by using, distributing, or modifying the FreeType + Project, you indicate that you understand and accept all the terms + of this license. +4. Contacts +----------- + There are two mailing lists related to FreeType: + o freetype@nongnu.org + Discusses general use and applications of FreeType, as well as + future and wanted additions to the library and distribution. + If you are looking for support, start in this list if you + haven't found anything to help you in the documentation. + o freetype-devel@nongnu.org + Discusses bugs, as well as engine internals, design issues, + specific licenses, porting, etc. + Our home page can be found at + https://www.freetype.org +--- end of FTL.TXT --- + + + +Other Licenses of the Third-Party Components therein can be obtained from: +https://skia.googlesource.com/third_party/freetype2.git/+/refs/heads/master/LICENSE.TXT + + +Open Source Software Licensed under the IJG License and Other Licenses of the Third-Party Components therein: +-------------------------------------------------------------------- +1. libjpeg-turbo +Copyright (C) 1991-1996, Thomas G. Lane. +Copyright (C) 2017, D. R. Commander. + + +Terms of the IJG License: +-------------------------------------------------------------------- +The Independent JPEG Group's JPEG software +========================================== + +This distribution contains a release of the Independent JPEG Group's free JPEG +software. You are welcome to redistribute this software and to use it for any +purpose, subject to the conditions under LEGAL ISSUES, below. + +This software is the work of Tom Lane, Guido Vollbeding, Philip Gladstone, +Bill Allombert, Jim Boucher, Lee Crocker, Bob Friesenhahn, Ben Jackson, +Julian Minguillon, Luis Ortiz, George Phillips, Davide Rossi, Ge' Weijers, +and other members of the Independent JPEG Group. + +IJG is not affiliated with the ISO/IEC JTC1/SC29/WG1 standards committee +(also known as JPEG, together with ITU-T SG16). + + +DOCUMENTATION ROADMAP +===================== + +This file contains the following sections: + +OVERVIEW General description of JPEG and the IJG software. +LEGAL ISSUES Copyright, lack of warranty, terms of distribution. +REFERENCES Where to learn more about JPEG. +ARCHIVE LOCATIONS Where to find newer versions of this software. +FILE FORMAT WARS Software *not* to get. +TO DO Plans for future IJG releases. + +Other documentation files in the distribution are: + +User documentation: + usage.txt Usage instructions for cjpeg, djpeg, jpegtran, + rdjpgcom, and wrjpgcom. + *.1 Unix-style man pages for programs (same info as usage.txt). + wizard.txt Advanced usage instructions for JPEG wizards only. + change.log Version-to-version change highlights. +Programmer and internal documentation: + libjpeg.txt How to use the JPEG library in your own programs. + example.txt Sample code for calling the JPEG library. + structure.txt Overview of the JPEG library's internal structure. + coderules.txt Coding style rules --- please read if you contribute code. + +Please read at least usage.txt. Some information can also be found in the JPEG +FAQ (Frequently Asked Questions) article. See ARCHIVE LOCATIONS below to find +out where to obtain the FAQ article. + +If you want to understand how the JPEG code works, we suggest reading one or +more of the REFERENCES, then looking at the documentation files (in roughly +the order listed) before diving into the code. + + +OVERVIEW +======== + +This package contains C software to implement JPEG image encoding, decoding, +and transcoding. JPEG (pronounced "jay-peg") is a standardized compression +method for full-color and grayscale images. JPEG's strong suit is compressing +photographic images or other types of images that have smooth color and +brightness transitions between neighboring pixels. Images with sharp lines or +other abrupt features may not compress well with JPEG, and a higher JPEG +quality may have to be used to avoid visible compression artifacts with such +images. + +JPEG is lossy, meaning that the output pixels are not necessarily identical to +the input pixels. However, on photographic content and other "smooth" images, +very good compression ratios can be obtained with no visible compression +artifacts, and extremely high compression ratios are possible if you are +willing to sacrifice image quality (by reducing the "quality" setting in the +compressor.) + +This software implements JPEG baseline, extended-sequential, and progressive +compression processes. Provision is made for supporting all variants of these +processes, although some uncommon parameter settings aren't implemented yet. +We have made no provision for supporting the hierarchical or lossless +processes defined in the standard. + +We provide a set of library routines for reading and writing JPEG image files, +plus two sample applications "cjpeg" and "djpeg", which use the library to +perform conversion between JPEG and some other popular image file formats. +The library is intended to be reused in other applications. + +In order to support file conversion and viewing software, we have included +considerable functionality beyond the bare JPEG coding/decoding capability; +for example, the color quantization modules are not strictly part of JPEG +decoding, but they are essential for output to colormapped file formats or +colormapped displays. These extra functions can be compiled out of the +library if not required for a particular application. + +We have also included "jpegtran", a utility for lossless transcoding between +different JPEG processes, and "rdjpgcom" and "wrjpgcom", two simple +applications for inserting and extracting textual comments in JFIF files. + +The emphasis in designing this software has been on achieving portability and +flexibility, while also making it fast enough to be useful. In particular, +the software is not intended to be read as a tutorial on JPEG. (See the +REFERENCES section for introductory material.) Rather, it is intended to +be reliable, portable, industrial-strength code. We do not claim to have +achieved that goal in every aspect of the software, but we strive for it. + +We welcome the use of this software as a component of commercial products. +No royalty is required, but we do ask for an acknowledgement in product +documentation, as described under LEGAL ISSUES. + + +LEGAL ISSUES +============ + +In plain English: + +1. We don't promise that this software works. (But if you find any bugs, + please let us know!) +2. You can use this software for whatever you want. You don't have to pay us. +3. You may not pretend that you wrote this software. If you use it in a + program, you must acknowledge somewhere in your documentation that + you've used the IJG code. + +In legalese: + +The authors make NO WARRANTY or representation, either express or implied, +with respect to this software, its quality, accuracy, merchantability, or +fitness for a particular purpose. This software is provided "AS IS", and you, +its user, assume the entire risk as to its quality and accuracy. + +This software is copyright (C) 1991-2016, Thomas G. Lane, Guido Vollbeding. +All Rights Reserved except as specified below. + +Permission is hereby granted to use, copy, modify, and distribute this +software (or portions thereof) for any purpose, without fee, subject to these +conditions: +(1) If any part of the source code for this software is distributed, then this +README file must be included, with this copyright and no-warranty notice +unaltered; and any additions, deletions, or changes to the original files +must be clearly indicated in accompanying documentation. +(2) If only executable code is distributed, then the accompanying +documentation must state that "this software is based in part on the work of +the Independent JPEG Group". +(3) Permission for use of this software is granted only if the user accepts +full responsibility for any undesirable consequences; the authors accept +NO LIABILITY for damages of any kind. + +These conditions apply to any software derived from or based on the IJG code, +not just to the unmodified library. If you use our work, you ought to +acknowledge us. + +Permission is NOT granted for the use of any IJG author's name or company name +in advertising or publicity relating to this software or products derived from +it. This software may be referred to only as "the Independent JPEG Group's +software". + +We specifically permit and encourage the use of this software as the basis of +commercial products, provided that all warranty or liability claims are +assumed by the product vendor. + + +The IJG distribution formerly included code to read and write GIF files. +To avoid entanglement with the Unisys LZW patent (now expired), GIF reading +support has been removed altogether, and the GIF writer has been simplified +to produce "uncompressed GIFs". This technique does not use the LZW +algorithm; the resulting GIF files are larger than usual, but are readable +by all standard GIF decoders. + +We are required to state that + "The Graphics Interchange Format(c) is the Copyright property of + CompuServe Incorporated. GIF(sm) is a Service Mark property of + CompuServe Incorporated." + + +REFERENCES +========== + +We recommend reading one or more of these references before trying to +understand the innards of the JPEG software. + +The best short technical introduction to the JPEG compression algorithm is + Wallace, Gregory K. "The JPEG Still Picture Compression Standard", + Communications of the ACM, April 1991 (vol. 34 no. 4), pp. 30-44. +(Adjacent articles in that issue discuss MPEG motion picture compression, +applications of JPEG, and related topics.) If you don't have the CACM issue +handy, a PDF file containing a revised version of Wallace's article is +available at http://www.ijg.org/files/Wallace.JPEG.pdf. The file (actually +a preprint for an article that appeared in IEEE Trans. Consumer Electronics) +omits the sample images that appeared in CACM, but it includes corrections +and some added material. Note: the Wallace article is copyright ACM and IEEE, +and it may not be used for commercial purposes. + +A somewhat less technical, more leisurely introduction to JPEG can be found in +"The Data Compression Book" by Mark Nelson and Jean-loup Gailly, published by +M&T Books (New York), 2nd ed. 1996, ISBN 1-55851-434-1. This book provides +good explanations and example C code for a multitude of compression methods +including JPEG. It is an excellent source if you are comfortable reading C +code but don't know much about data compression in general. The book's JPEG +sample code is far from industrial-strength, but when you are ready to look +at a full implementation, you've got one here... + +The best currently available description of JPEG is the textbook "JPEG Still +Image Data Compression Standard" by William B. Pennebaker and Joan L. +Mitchell, published by Van Nostrand Reinhold, 1993, ISBN 0-442-01272-1. +Price US$59.95, 638 pp. The book includes the complete text of the ISO JPEG +standards (DIS 10918-1 and draft DIS 10918-2). + +The original JPEG standard is divided into two parts, Part 1 being the actual +specification, while Part 2 covers compliance testing methods. Part 1 is +titled "Digital Compression and Coding of Continuous-tone Still Images, +Part 1: Requirements and guidelines" and has document numbers ISO/IEC IS +10918-1, ITU-T T.81. Part 2 is titled "Digital Compression and Coding of +Continuous-tone Still Images, Part 2: Compliance testing" and has document +numbers ISO/IEC IS 10918-2, ITU-T T.83. + +The JPEG standard does not specify all details of an interchangeable file +format. For the omitted details, we follow the "JFIF" conventions, revision +1.02. JFIF version 1 has been adopted as ISO/IEC 10918-5 (05/2013) and +Recommendation ITU-T T.871 (05/2011): Information technology - Digital +compression and coding of continuous-tone still images: JPEG File Interchange +Format (JFIF). It is available as a free download in PDF file format from +https://www.iso.org/standard/54989.html and http://www.itu.int/rec/T-REC-T.871. +A PDF file of the older JFIF 1.02 specification is available at +http://www.w3.org/Graphics/JPEG/jfif3.pdf. + +The TIFF 6.0 file format specification can be obtained by FTP from +ftp://ftp.sgi.com/graphics/tiff/TIFF6.ps.gz. The JPEG incorporation scheme +found in the TIFF 6.0 spec of 3-June-92 has a number of serious problems. +IJG does not recommend use of the TIFF 6.0 design (TIFF Compression tag 6). +Instead, we recommend the JPEG design proposed by TIFF Technical Note #2 +(Compression tag 7). Copies of this Note can be obtained from +http://www.ijg.org/files/. It is expected that the next revision +of the TIFF spec will replace the 6.0 JPEG design with the Note's design. +Although IJG's own code does not support TIFF/JPEG, the free libtiff library +uses our library to implement TIFF/JPEG per the Note. + + +ARCHIVE LOCATIONS +================= + +The "official" archive site for this software is www.ijg.org. +The most recent released version can always be found there in +directory "files". + +The JPEG FAQ (Frequently Asked Questions) article is a source of some +general information about JPEG. +It is available on the World Wide Web at http://www.faqs.org/faqs/jpeg-faq/ +and other news.answers archive sites, including the official news.answers +archive at rtfm.mit.edu: ftp://rtfm.mit.edu/pub/usenet/news.answers/jpeg-faq/. +If you don't have Web or FTP access, send e-mail to mail-server@rtfm.mit.edu +with body + send usenet/news.answers/jpeg-faq/part1 + send usenet/news.answers/jpeg-faq/part2 + + +FILE FORMAT COMPATIBILITY +========================= + +This software implements ITU T.81 | ISO/IEC 10918 with some extensions from +ITU T.871 | ISO/IEC 10918-5 (JPEG File Interchange Format-- see REFERENCES). +Informally, the term "JPEG image" or "JPEG file" most often refers to JFIF or +a subset thereof, but there are other formats containing the name "JPEG" that +are incompatible with the DCT-based JPEG standard or with JFIF (for instance, +JPEG 2000 and JPEG XR). This software therefore does not support these +formats. Indeed, one of the original reasons for developing this free software +was to help force convergence on a common, interoperable format standard for +JPEG files. + +JFIF is a minimal or "low end" representation. TIFF/JPEG (TIFF revision 6.0 as +modified by TIFF Technical Note #2) can be used for "high end" applications +that need to record a lot of additional data about an image. + + +TO DO +===== + +Please send bug reports, offers of help, etc. to jpeg-info@jpegclub.org. + + + +Other Licenses of the Third-Party Components therein can be obtained from: +https://github.com/libjpeg-turbo/libjpeg-turbo/blob/2.0.0/LICENSE.md + + +Open Source Software Licensed under the libpng license: +-------------------------------------------------------------------- +1. libpng +Copyright (c) 2000-2002, 2004, 2006-2017 Glenn Randers-Pehrson + + +Terms of the libpng license: +-------------------------------------------------------------------- +This copy of the libpng notices is provided for your convenience. In case of +any discrepancy between this copy and the notices in the file png.h that is +included in the libpng distribution, the latter shall prevail. + +COPYRIGHT NOTICE, DISCLAIMER, and LICENSE: + +If you modify libpng you may insert additional notices immediately following +this sentence. + +This code is released under the libpng license. + +libpng versions 1.0.7, July 1, 2000 through 1.6.33, September 28, 2017 are +Copyright (c) 2000-2002, 2004, 2006-2017 Glenn Randers-Pehrson, are +derived from libpng-1.0.6, and are distributed according to the same +disclaimer and license as libpng-1.0.6 with the following individuals +added to the list of Contributing Authors: + + Simon-Pierre Cadieux + Eric S. Raymond + Mans Rullgard + Cosmin Truta + Gilles Vollant + James Yu + Mandar Sahastrabuddhe + Google Inc. + Vadim Barkov + +and with the following additions to the disclaimer: + + There is no warranty against interference with your enjoyment of the + library or against infringement. There is no warranty that our + efforts or the library will fulfill any of your particular purposes + or needs. This library is provided with all faults, and the entire + risk of satisfactory quality, performance, accuracy, and effort is with + the user. + +Some files in the "contrib" directory and some configure-generated +files that are distributed with libpng have other copyright owners and +are released under other open source licenses. + +libpng versions 0.97, January 1998, through 1.0.6, March 20, 2000, are +Copyright (c) 1998-2000 Glenn Randers-Pehrson, are derived from +libpng-0.96, and are distributed according to the same disclaimer and +license as libpng-0.96, with the following individuals added to the list +of Contributing Authors: + + Tom Lane + Glenn Randers-Pehrson + Willem van Schaik + +libpng versions 0.89, June 1996, through 0.96, May 1997, are +Copyright (c) 1996-1997 Andreas Dilger, are derived from libpng-0.88, +and are distributed according to the same disclaimer and license as +libpng-0.88, with the following individuals added to the list of +Contributing Authors: + + John Bowler + Kevin Bracey + Sam Bushell + Magnus Holmgren + Greg Roelofs + Tom Tanner + +Some files in the "scripts" directory have other copyright owners +but are released under this license. + +libpng versions 0.5, May 1995, through 0.88, January 1996, are +Copyright (c) 1995-1996 Guy Eric Schalnat, Group 42, Inc. + +For the purposes of this copyright and license, "Contributing Authors" +is defined as the following set of individuals: + + Andreas Dilger + Dave Martindale + Guy Eric Schalnat + Paul Schmidt + Tim Wegner + +The PNG Reference Library is supplied "AS IS". The Contributing Authors +and Group 42, Inc. disclaim all warranties, expressed or implied, +including, without limitation, the warranties of merchantability and of +fitness for any purpose. The Contributing Authors and Group 42, Inc. +assume no liability for direct, indirect, incidental, special, exemplary, +or consequential damages, which may result from the use of the PNG +Reference Library, even if advised of the possibility of such damage. + +Permission is hereby granted to use, copy, modify, and distribute this +source code, or portions hereof, for any purpose, without fee, subject +to the following restrictions: + + 1. The origin of this source code must not be misrepresented. + + 2. Altered versions must be plainly marked as such and must not + be misrepresented as being the original source. + + 3. This Copyright notice may not be removed or altered from any + source or altered source distribution. + +The Contributing Authors and Group 42, Inc. specifically permit, without +fee, and encourage the use of this source code as a component to +supporting the PNG file format in commercial products. If you use this +source code in a product, acknowledgment is not required but would be +appreciated. + +END OF COPYRIGHT NOTICE, DISCLAIMER, and LICENSE. + +TRADEMARK: + +The name "libpng" has not been registered by the Copyright owner +as a trademark in any jurisdiction. However, because libpng has +been distributed and maintained world-wide, continually since 1995, +the Copyright owner claims "common-law trademark protection" in any +jurisdiction where common-law trademark is recognized. + +OSI CERTIFICATION: + +Libpng is OSI Certified Open Source Software. OSI Certified Open Source is +a certification mark of the Open Source Initiative. OSI has not addressed +the additional disclaimers inserted at version 1.0.7. + +EXPORT CONTROL: + +The Copyright owner believes that the Export Control Classification +Number (ECCN) for libpng is EAR99, which means not subject to export +controls or International Traffic in Arms Regulations (ITAR) because +it is open source, publicly available software, that does not contain +any encryption software. See the EAR, paragraphs 734.3(b)(3) and +734.7(b). + +Glenn Randers-Pehrson +glennrp at users.sourceforge.net +September 28, 2017 + + + +Open Source Software Licensed under the BSD 3-Clause License: +The below software in this distribution may have been modified by THL A29 Limited ("Tencent Modifications"). All Tencent Modifications are Copyright (C) 2022 THL A29 Limited. +-------------------------------------------------------------------- +1. libwebp +Copyright (c) 2010, Google Inc. All rights reserved. + +2. pathKit +Copyright (c) 2011 Google Inc. All rights reserved. + +3. skcms +Copyright 2018 Google Inc. + + +Terms of the BSD 3-Clause License: +-------------------------------------------------------------------- +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + + +Open Source Software Licensed under the Zlib License: +-------------------------------------------------------------------- +1. zlib +Copyright (C) 1995-2017 Jean-loup Gailly and Mark Adler + + +Terms of the Zlib License: +-------------------------------------------------------------------- + This software is provided 'as-is', without any express or implied + warranty. In no event will the authors be held liable for any damages + arising from the use of this software. + Permission is granted to anyone to use this software for any purpose, + including commercial applications, and to alter it and redistribute it + freely, subject to the following restrictions: + 1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. + 2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. + 3. This notice may not be removed or altered from any source distribution. + Jean-loup Gailly Mark Adler + jloup@gzip.org madler@alumni.caltech.edu + The data format used by the zlib library is described by RFCs (Request for + Comments) 1950 to 1952 in the files http://tools.ietf.org/html/rfc1950 + (zlib format), rfc1951 (deflate format) and rfc1952 (gzip format). \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 00000000..93a0f7f7 --- /dev/null +++ b/README.md @@ -0,0 +1,124 @@ +TGFX Logo + +[![license](https://img.shields.io/badge/license-BSD--3--Clause-blue)](https://github.com/libpag/tgfx/blob/master/LICENSE.txt) +[![PRs Welcome](https://img.shields.io/badge/PRs-welcome-brightgreen.svg)](https://github.com/libpag/tgfx/pulls) +[![codecov](https://codecov.io/gh/libpag/tgfx/branch/main/graph/badge.svg)](https://codecov.io/gh/libpag/tgfx) +[![Actions Status](https://github.com/libpag/tgfx/workflows/autotest/badge.svg?branch=main)](https://github.com/libpag/tgfx/actions) +[![GitHub release (latest SemVer)](https://img.shields.io/github/v/release/libpag/tgfx)](https://github.com/libpag/tgfx/releases) + +## Introduction + +TGFX (Tencent Graphics) is a lightweight 2D graphics library designed for rendering texts, +geometries, and images. It provides high-performance APIs that work across a variety of hardware and +software platforms. TGFX was initially created as a core component of the [PAG](https://pag.art) +project and has since become the default graphics engine for the [libpag](https://github.com/Tencent/libpag) +library, starting from version 4.0. Its main objective is to offer a compelling alternative to the +Skia graphics library while maintaining a smaller binary size. Over time, it has found its way into +many other products, such as [Tencent Docs](https://docs.qq.com) and various video-editing apps. + +## Platform Support + +| Vector Backend | GPU Backend | Target Platforms | Status | +|:--------------:|:--------------:|:----------------------------:|:-------------:| +| FreeType | OpenGL | All | complete | +| CoreGraphics | OpenGL | iOS, macOS | complete | +| Canvas2D | WebGL | Web | complete | +| CoreGraphics | Metal | iOS, macOS | in progress | +| FreeType | Vulkan | Android, Linux | planned | + + +## Branch Management + +- The `main` branch is our active developing branch which contains the latest features and bugfixes. +- The branches under `release/` are our stable milestone branches which are fully tested. We will + periodically cut a `release/{version}` branch from the `main` branch. After one `release/{version}` + branch is cut, only high-priority fixes are checked into it. + +## System Requirements + +- iOS 9.0 or later +- Android 4.4 or later +- macOS 10.13 or later +- Windows 7.0 or later +- Chrome 69.0 or later (Web) +- Safari 11.3 or later (Web) + +## Build Prerequisites + +- Xcode 11.0+ +- GCC 7.0+ +- CMake 3.10.2+ +- Visual Studio 2019 +- NDK 19.2.5345600 (**Please use this exact version of NDK, other versions may fail.**) + +## Dependency Management + +TGFX uses [depsync](https://github.com/domchen/depsync) tool to manage third-party dependencies. + +**For macOS platform:** + +Run the script in the root of the project: + +``` +./sync_deps.sh +``` + +This script will automatically install the necessary tools and synchronize all third-party repositories. + +**For other platforms:** + +First, make sure you have installed the latest version of node.js (You may need to restart your +computer after this step). And then run the following command to install depsync tool: + +``` +npm install -g depsync +``` + +And then run `depsync` in the root directory of the project. + +``` +depsync +``` + +Git account and password may be required during synchronizing. Please make sure you have enabled the +`git-credential-store` so that `CMakeList.txt` can trigger synchronizing automatically next time. + + +## Build & Run + +We recommend using CLion IDE on the macOS platform for development. After the synchronization, you +can open the project with CLion and build the TGFX library. + +**For macOS platform:** + +There are no extra configurations of CLion required. + +**For Windows platform:** + +Please follow the following steps to configure the CLion environment correctly: + +- Make sure you have installed at least the **[Desktop development with C++]** and **[Universal Windows Platform development]** components for VS2019. +- Open the **File->Setting** panel, and go to **Build, Execution, Deployment->ToolChains**, then set the toolchain of CLion to **Visual Studio** with **amd64 (Recommended)** or **x86** architecture. + +**Note: If anything goes wrong during cmake building, please update the cmake commandline tool to the latest +version and try again.** + + +## Support Us + +If you find TGFX is helpful, please give us a **Star**. We sincerely appreciate your support :) + + +## License + +TGFX is licensed under the [BSD-3-Clause License](./LICENSE.txt) + +[![Star History Chart](https://api.star-history.com/svg?repos=libpag/tgfx&type=Date)](https://star-history.com/#libpag/tgfx&Date) + +## Contribution + +If you have any ideas or suggestions to improve TGFX, welcome to open +a [discussion](https://github.com/libpag/tgfx/discussions/new/choose) +/ [issue](https://github.com/libpag/tgfx/issues/new/choose) +/ [pull request](https://github.com/libpag/tgfx/pulls). Before making a pull request or issue, +please make sure to read [Contributing Guide](./CONTRIBUTING.md). diff --git a/autotest.sh b/autotest.sh new file mode 100755 index 00000000..a0c8a619 --- /dev/null +++ b/autotest.sh @@ -0,0 +1,77 @@ +#!/usr/bin/env bash + +function make_dir() { + rm -rf $1 + mkdir -p $1 +} +echo "shell log - autotest start" +if [[ $(uname) == 'Darwin' ]]; then + MAC_REQUIRED_TOOLS="gcovr" + for TOOL in ${MAC_REQUIRED_TOOLS[@]}; do + if [ ! $(which $TOOL) ]; then + if [ ! $(which brew) ]; then + echo "Homebrew not found. Trying to install..." + /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)" || + exit 1 + fi + echo "$TOOL not found. Trying to install..." + brew install $TOOL || exit 1 + fi + done +fi + +echo $(pwd) + +COMPLIE_RESULT=true + +WORKSPACE=$(pwd) + +cd $WORKSPACE + +make_dir result +make_dir build + +./update_baseline.sh 1 +cp -r $WORKSPACE/test/baseline $WORKSPACE/result + +cd build + +cmake -DCMAKE_CXX_FLAGS="-fprofile-arcs -ftest-coverage -g -O0" -DTGFX_USE_SWIFTSHADER=ON -DCMAKE_BUILD_TYPE=Debug ../ +if test $? -eq 0; then + echo "~~~~~~~~~~~~~~~~~~~CMakeLists OK~~~~~~~~~~~~~~~~~~" +else + echo "~~~~~~~~~~~~~~~~~~~CMakeLists error~~~~~~~~~~~~~~~~~~" + exit +fi + +cmake --build . --target TGFXFullTest -- -j 12 +if test $? -eq 0; then + echo "~~~~~~~~~~~~~~~~~~~TGFXFullTest make successed~~~~~~~~~~~~~~~~~~" +else + echo "~~~~~~~~~~~~~~~~~~~TGFXFullTest make error~~~~~~~~~~~~~~~~~~" + exit 1 +fi + +./TGFXFullTest --gtest_output=json:TGFXFullTest.json + +if test $? -eq 0; then + echo "~~~~~~~~~~~~~~~~~~~TGFXFullTest successed~~~~~~~~~~~~~~~~~~" +else + echo "~~~~~~~~~~~~~~~~~~~TGFXFullTest Failed~~~~~~~~~~~~~~~~~~" + COMPLIE_RESULT=false +fi + +cp -a $WORKSPACE/build/*.json $WORKSPACE/result/ + +cd .. + +gcovr -r . -e='test/*.*' -e='vendor/*.*'--html -o ./result/coverage.html +gcovr -r . -e='test/*.*' -e='vendor/*.*' --xml-pretty -o ./result/coverage.xml + +rm -rf build + +cp -r $WORKSPACE/test/out $WORKSPACE/result + +if [ "$COMPLIE_RESULT" == false ]; then + exit 1 +fi diff --git a/build_vendor b/build_vendor new file mode 100755 index 00000000..dcf1bdd2 --- /dev/null +++ b/build_vendor @@ -0,0 +1,3 @@ +#!/usr/bin/env node +// run 'node build_vendor -h" to print help message +require("./vendor_tools/build"); \ No newline at end of file diff --git a/codeformat.sh b/codeformat.sh new file mode 100755 index 00000000..cb8cc075 --- /dev/null +++ b/codeformat.sh @@ -0,0 +1,43 @@ +#!/usr/bin/env bash +if [[ $(uname) == 'Darwin' ]]; then + MAC_REQUIRED_TOOLS="python3" + for TOOL in ${MAC_REQUIRED_TOOLS[@]}; do + if [ ! $(which $TOOL) ]; then + if [ ! $(which brew) ]; then + echo "Homebrew not found. Trying to install..." + /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)" || + exit 1 + fi + echo "$TOOL not found. Trying to install..." + brew install $TOOL || exit 1 + fi + done + clangformat=`clang-format --version` + if [[ $clangformat =~ "14." ]] + then + echo "----$clangformat----" + else + echo "----install clang-format----" + pip3 install clang-format==14 + fi +fi + +echo "----begin to scan code format----" +find include/ -iname '*.h' -print0 | xargs clang-format -i +# shellcheck disable=SC2038 +find src -name "*.cpp" -print -o -name "*.h" -print -o -name "*.mm" -print -o -name "*.m" -print | xargs clang-format -i +# shellcheck disable=SC2038 +find test \( -path test/framework/lzma \) -prune -o -name "*.cpp" -print -o -name "*.h" -print | xargs clang-format -i + +git diff +result=`git diff` +if [[ $result =~ "diff" ]] +then + echo "----Failed to pass coding specification----" + exit 1 +else + echo "----Pass coding specification----" +fi + +echo "----Complete the scan code format-----" + diff --git a/include/tgfx/core/AlphaType.h b/include/tgfx/core/AlphaType.h new file mode 100644 index 00000000..f7aaf56e --- /dev/null +++ b/include/tgfx/core/AlphaType.h @@ -0,0 +1,44 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + + +#pragma once + +namespace tgfx { +/** + * Describes how to interpret the alpha component of a pixel. + */ +enum class AlphaType { + /** + * uninitialized. + */ + Unknown, + /** + * pixel is opaque. + */ + Opaque, + /** + * pixel components are premultiplied by alpha. + */ + Premultiplied, + /** + * pixel components are independent of alpha. + */ + Unpremultiplied, +}; +} // namespace tgfx diff --git a/include/tgfx/core/Bitmap.h b/include/tgfx/core/Bitmap.h new file mode 100644 index 00000000..b8ba84ac --- /dev/null +++ b/include/tgfx/core/Bitmap.h @@ -0,0 +1,237 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/Color.h" +#include "tgfx/core/Data.h" +#include "tgfx/core/EncodedFormat.h" +#include "tgfx/core/ImageBuffer.h" +#include "tgfx/core/ImageInfo.h" +#include "tgfx/platform/HardwareBuffer.h" + +namespace tgfx { +class PixelRef; + +/** + * Bitmap describes a two-dimensional raster pixel array. Bitmap points to PixelRef, which describes + * the physical array of pixels and is optimized for creating textures. If the pixel array is + * primarily read-only, use Image for better performance. Declaring Bitmap const prevents altering + * ImageInfo: the Bitmap height, width, and so on cannot change. It does not affect PixelRef: a + * caller may write its pixels. Bitmap is not thread safe. + */ +class Bitmap { + public: + /** + * Creates an empty Bitmap without pixels, and with an empty ImageInfo. + */ + Bitmap() = default; + + /** + * Creates a new Bitmap and tries to allocate its pixels by the specified width, height, and + * native color type. If the alphaOnly is true, sets ImageInfo to ColorType::ALPHA_8. If the + * tryHardware is true and there is hardware buffer support on the current platform, a hardware + * backed PixelRef is allocated. Otherwise, a raster PixelRef is allocated. The isEmpty() method + * of the Bitmap will return true if allocation fails. + */ + Bitmap(int width, int height, bool alphaOnly = false, bool tryHardware = true); + + /** + * Copies settings from src to returned Bitmap. Shares pixels if src has pixels allocated, so both + * bitmaps reference the same pixels. + */ + Bitmap(const Bitmap& src); + + /** + * Copies settings from src to returned Bitmap. Moves ownership of src pixels to Bitmap. + */ + Bitmap(Bitmap&& src); + + /** + * Creates a new Bitmap from the platform-specific hardware buffer. For example, the hardware + * buffer could be an AHardwareBuffer on the android platform or a CVPixelBufferRef on the apple + * platform. The Bitmap takes a reference to the hardwareBuffer. + */ + explicit Bitmap(HardwareBufferRef hardwareBuffer); + + /** + * Copies settings from src to returned Bitmap. Shares pixels if src has pixels allocated, so both + * bitmaps reference the same pixels. + */ + Bitmap& operator=(const Bitmap& src); + + /** + * Copies settings from src to returned Bitmap. Moves ownership of src pixels to Bitmap. + */ + Bitmap& operator=(Bitmap&& src); + + /** + * Sets ImageInfo to width, height, and the native color type; and allocates pixel memory. If + * the alphaOnly is true, sets ImageInfo to ColorType::ALPHA_8. If the tryHardware is true and + * there is hardware buffer support on the current platform, a hardware-backed PixelRef is + * allocated. Otherwise, a raster PixelRef is allocated. Returns true if the PixelRef is + * allocated successfully. + */ + bool allocPixels(int width, int height, bool alphaOnly = false, bool tryHardware = true); + + /** + * Locks and returns the writable pixels, the base address corresponding to the pixel origin. + */ + void* lockPixels(); + + /** + * Locks and returns the read-only pixels, the base address corresponding to the pixel origin. + */ + const void* lockPixels() const; + + /** + * Call this to balance a successful call to lockPixels(). + */ + void unlockPixels() const; + + /** + * Return true if the Bitmap describes an empty area of pixels. + */ + bool isEmpty() const { + return _info.isEmpty(); + } + + /** + * Returns an ImageInfo describing the width, height, color type, alpha type, and row bytes of the + * pixels. + */ + const ImageInfo& info() const { + return _info; + } + + /** + * Returns the width of the pixels. + */ + int width() const { + return _info.width(); + } + + /** + * Returns the height of the pixels. + */ + int height() const { + return _info.height(); + } + + /** + * Returns the ColorType of the pixels. + */ + ColorType colorType() const { + return _info.colorType(); + } + + /** + * Returns the AlphaType of the pixels. + */ + AlphaType alphaType() const { + return _info.alphaType(); + } + + /** + * Returns true if pixels represent transparency only. If true, each pixel is packed in 8 bits as + * defined by ColorType::ALPHA_8. + */ + bool isAlphaOnly() const { + return _info.isAlphaOnly(); + } + + /** + * Returns the rowBytes of the pixels. + */ + size_t rowBytes() const { + return _info.rowBytes(); + } + + /** + * Returns the byte size of the pixels. + */ + size_t byteSize() const { + return _info.byteSize(); + } + + /** + * Returns true if the Bitmap is backed by a platform-specified hardware buffer. A hardware-backed + * Bitmap allows sharing buffers across CPU and GPU, which can be used to speed up the texture + * uploading. + */ + bool isHardwareBacked() const; + + /** + * Retrieves the backing hardware buffer. This method does not acquire any additional reference to + * the returned hardware buffer. Returns nullptr if the Bitmap is not backed by a hardware buffer. + */ + HardwareBufferRef getHardwareBuffer() const; + + /** + * Encodes the pixels in Bitmap into a binary image format. + * @param format One of: EncodedFormat::JPEG, EncodedFormat::PNG, EncodedFormat::WEBP + * @param quality A platform and format specific metric trading off size and encoding error. When + * used, quality equaling 100 encodes with the least error. quality may be ignored by the encoder. + * @return Returns nullptr if encoding fails, or if the format is not supported. + */ + std::shared_ptr encode(EncodedFormat format = EncodedFormat::PNG, int quality = 100) const; + + /** + * Returns pixel at (x, y) as unpremultiplied color. Some color precision may be lost in the + * conversion to unpremultiplied color; original pixel data may have additional precision. Returns + * a transparent color if the point (x, y) is not contained by bounds. + */ + Color getColor(int x, int y) const; + + /** + * Copies a rect of pixels to dstPixels with specified ImageInfo. Copy starts at (srcX, srcY), and + * does not exceed Bitmap (width(), height()). Pixels are copied only if pixel conversion is + * possible. Returns true if pixels are copied to dstPixels. + */ + bool readPixels(const ImageInfo& dstInfo, void* dstPixels, int srcX = 0, int srcY = 0) const; + + /** + * Copies a rect of pixels from src. Copy starts at (dstX, dstY), and does not exceed + * Bitmap (width(), height()). Pixels are copied only if pixel conversion is possible and the + * bitmap is constructed from writable pixels. Returns true if src pixels are copied to Bitmap. + */ + bool writePixels(const ImageInfo& srcInfo, const void* srcPixels, int dstX = 0, int dstY = 0); + + /** + * Replaces all pixel values with transparent colors. + */ + void clear(); + + /** + * Returns an ImageBuffer object capturing the pixels in the Bitmap. Subsequent writing of the + * Bitmap will not be captured. Instead, the Bitmap will copy its pixels to a new memory buffer if + * there is a subsequent writing call to the Bitmap while the returned ImageBuffer is still alive. + * If the Bitmap is modified frequently, create an ImageReader from the Bitmap instead, which + * allows you to continuously read the latest content from the Bitmap with minimal memory copying. + * Returns nullptr if the Bitmap is empty. + */ + std::shared_ptr makeBuffer() const; + + private: + ImageInfo _info = {}; + std::shared_ptr pixelRef = nullptr; + + friend class Pixmap; + friend class ImageReader; +}; +} // namespace tgfx diff --git a/include/tgfx/core/BlendMode.h b/include/tgfx/core/BlendMode.h new file mode 100644 index 00000000..1a572554 --- /dev/null +++ b/include/tgfx/core/BlendMode.h @@ -0,0 +1,143 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +namespace tgfx { +/** + * Defines constant values for visual blend mode effects. + */ +enum class BlendMode { + /** + * Replaces destination with zero: fully transparent. + */ + Clear, + /** + * Replaces destination. + */ + Src, + /** + * Preserves destination. + */ + Dst, + /** + * Source over destination. + */ + SrcOver, + /** + * Destination over source. + */ + DstOver, + /** + * Source trimmed inside destination. + */ + SrcIn, + /** + * Destination trimmed by source. + */ + DstIn, + /** + * Source trimmed outside destination. + */ + SrcOut, + /** + * Destination trimmed outside source. + */ + DstOut, + /** + * Source inside destination blended with destination. + */ + SrcATop, + /** + * Destination inside source blended with source. + */ + DstATop, + /** + * Each of source and destination trimmed outside the other. + */ + Xor, + /** + * Sum of colors. + */ + Plus, + /** + * Product of premultiplied colors; darkens destination. + */ + Modulate, + /** + * Multiply inverse of pixels, inverting result; brightens destination. + */ + Screen, + /** + * Multiply or screen, depending on destination. + */ + Overlay, + /** + * Darker of source and destination. + */ + Darken, + /** + * Lighter of source and destination. + */ + Lighten, + /** + * Brighten destination to reflect source. + */ + ColorDodge, + /** + * Darken destination to reflect source. + */ + ColorBurn, + /** + * Multiply or screen, depending on source. + */ + HardLight, + /** + * Lighten or darken, depending on source. + */ + SoftLight, + /** + * Subtract darker from lighter with higher contrast. + */ + Difference, + /** + * Subtract darker from lighter with lower contrast. + */ + Exclusion, + /** + * Multiply source with destination, darkening image. + */ + Multiply, + /** + * Hue of source with saturation and luminosity of destination. + */ + Hue, + /** + * Saturation of source with hue and luminosity of destination. + */ + Saturation, + /** + * Hue and saturation of source with luminosity of destination. + */ + Color, + /** + * Luminosity of source with hue and saturation of destination. + */ + Luminosity +}; +} // namespace tgfx diff --git a/include/tgfx/core/Canvas.h b/include/tgfx/core/Canvas.h new file mode 100644 index 00000000..3e60f0b3 --- /dev/null +++ b/include/tgfx/core/Canvas.h @@ -0,0 +1,237 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "tgfx/core/BlendMode.h" +#include "tgfx/core/Font.h" +#include "tgfx/core/Image.h" +#include "tgfx/core/Paint.h" +#include "tgfx/core/Path.h" +#include "tgfx/core/SamplingOptions.h" +#include "tgfx/core/Shape.h" +#include "tgfx/core/TextBlob.h" + +namespace tgfx { +class Surface; +class SurfaceOptions; +class Texture; +struct CanvasState; +class SurfaceDrawContext; +class GpuPaint; + +/** + * Canvas provides an interface for drawing, and how the drawing is clipped and transformed. Canvas + * contains a stack of opacity, blend mode, matrix and clip values. Each Canvas draw call transforms + * the geometry of the object by the concatenation of all matrix values in the stack. The + * transformed geometry is clipped by the intersection of all of clip values in the stack. + */ +class Canvas { + public: + explicit Canvas(Surface* surface); + + ~Canvas(); + + /** + * Retrieves the context associated with this Surface. + */ + Context* getContext() const; + + /** + * Returns the Surface this canvas draws into. + */ + Surface* getSurface() const; + + /** + * Returns the SurfaceOptions associated with the Canvas. Returns nullptr if the Canvas is not + * created from a Surface. + */ + const SurfaceOptions* surfaceOptions() const; + + /** + * Saves alpha, blend mode, matrix and clip. Calling restore() discards changes to them, restoring + * them to their state when save() was called. Saved Canvas state is put on a stack, multiple + * calls to save() should be balance by an equal number of calls to restore(). + */ + void save(); + + /** + * Removes changes to alpha, blend mode, matrix and clips since Canvas state was last saved. The + * state is removed from the stack. Does nothing if the stack is empty. + */ + void restore(); + + /** + * Returns the current total matrix. + */ + Matrix getMatrix() const; + + /** + * Replaces transformation with specified matrix. Unlike concat(), any prior matrix state is + * overwritten. + * @param matrix matrix to copy, replacing existing Matrix + */ + void setMatrix(const Matrix& matrix); + + /** + * Sets Matrix to the identity matrix. Any prior matrix state is overwritten. + */ + void resetMatrix(); + + /** + * Replaces the current Matrix with matrix premultiplied with the existing one. This has the + * effect of transforming the drawn geometry by matrix, before transforming the result with the + * existing Matrix. + */ + void concat(const Matrix& matrix); + + /** + * Returns the current global alpha. + */ + float getAlpha() const; + + /** + * Replaces the global alpha with specified newAlpha. + */ + void setAlpha(float newAlpha); + + /** + * Returns the current global blend mode. + */ + BlendMode getBlendMode() const; + + /** + * Replaces the global blend mode with specified new blend mode. + */ + void setBlendMode(BlendMode blendMode); + + /** + * Returns the current total clip. + */ + Path getTotalClip() const; + + /** + * Replaces clip with the intersection of clip and path. The path is transformed by Matrix before + * it is combined with clip. + */ + void clipPath(const Path& path); + + /** + * Fills clip with color. This has the effect of replacing all pixels contained by clip with + * color. + */ + void clear(const Color& color = Color::Transparent()); + + /** + * Draws a rectangle with specified paint, using current current alpha, blend mode, clip and + * matrix. + */ + void drawRect(const Rect& rect, const Paint& paint); + + /** + * Draws a path with using current clip, matrix and specified paint. + */ + void drawPath(const Path& path, const Paint& paint); + + /** + * Draws a shape with using current clip, matrix and specified paint. + */ + void drawShape(std::shared_ptr shape, const Paint& paint); + + /** + * Draws an image, with its top-left corner at (left, top), using current clip, matrix and + * optional paint. If image->hasMipmaps() is true, uses FilterMode::Linear and MipMapMode::Linear + * as the sampling options. Otherwise, uses FilterMode::Linear and MipMapMode::None as the + * sampling options. + */ + void drawImage(std::shared_ptr image, float left, float top, const Paint* paint = nullptr); + + /** + * Draws a Image, with its top-left corner at (0, 0), using current alpha, clip and matrix + * premultiplied with existing Matrix. If image->hasMipmaps() is true, uses FilterMode::Linear + * and MipMapMode::Linear as the sampling options. Otherwise, uses FilterMode::Linear and + * MipMapMode::None as the sampling options. + */ + void drawImage(std::shared_ptr image, const Matrix& matrix, const Paint* paint = nullptr); + + /** + * Draws an image, with its top-left corner at (0, 0), using current clip, matrix and optional + * paint. If image->hasMipmaps() is true, uses FilterMode::Linear and MipMapMode::Linear as the + * sampling options. Otherwise, uses FilterMode::Linear and MipMapMode::None as the sampling + * options. + */ + void drawImage(std::shared_ptr image, const Paint* paint = nullptr); + + /** + * Draws an image, with its top-left corner at (0, 0), using current clip, matrix, sampling + * options and optional paint. + */ + void drawImage(std::shared_ptr image, SamplingOptions sampling, + const Paint* paint = nullptr); + + /** + * Draw array of glyphs with specified font, using current alpha, blend mode, clip and Matrix. + */ + void drawGlyphs(const GlyphID glyphIDs[], const Point positions[], size_t glyphCount, + const Font& font, const Paint& paint); + + // TODO(pengweilv): Support blend mode, atlas as source, colors as destination, colors can be + // nullptr. + void drawAtlas(std::shared_ptr atlas, const Matrix matrix[], const Rect tex[], + const Color colors[], size_t count, SamplingOptions sampling = SamplingOptions()); + + /** + * Triggers the immediate execution of all pending draw operations. + */ + void flush(); + + private: + std::shared_ptr getClipTexture(); + + std::pair, bool> getClipRect(); + + std::unique_ptr getClipMask(const Rect& deviceBounds, Rect* scissorRect); + + Rect clipLocalBounds(Rect localBounds); + + void drawImage(std::shared_ptr image, SamplingOptions sampling, const Paint& paint); + + void drawMask(const Rect& bounds, std::shared_ptr mask, GpuPaint paint); + + void drawColorGlyphs(const GlyphID glyphIDs[], const Point positions[], size_t glyphCount, + const Font& font, const Paint& paint); + + void drawMaskGlyphs(TextBlob* textBlob, const Paint& paint); + + void fillPath(const Path& path, const Paint& paint); + + bool drawAsClear(const Path& path, const GpuPaint& paint); + + void draw(std::unique_ptr op, GpuPaint paint, bool aa = false); + + bool nothingToDraw(const Paint& paint) const; + + Surface* surface = nullptr; + std::shared_ptr _clipSurface = nullptr; + uint32_t clipID = 0; + std::shared_ptr state = nullptr; + SurfaceDrawContext* drawContext = nullptr; + std::vector> savedStateList = {}; +}; +} // namespace tgfx diff --git a/include/tgfx/core/Color.h b/include/tgfx/core/Color.h new file mode 100644 index 00000000..e631a08b --- /dev/null +++ b/include/tgfx/core/Color.h @@ -0,0 +1,143 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include +#include + +namespace tgfx { + +/** + * RGBA color value, holding four floating point components. Color components are always in a known + * order. + */ +struct Color { + /** + * Red component. + */ + float red; + + /** + * Green component. + */ + float green; + + /** + * Blue component. + */ + float blue; + + /** + * Alpha component. + */ + float alpha; + + /** + * Returns a fully transparent Color. + */ + static const Color& Transparent(); + + /** + * Returns a fully opaque black Color. + */ + static const Color& Black(); + + /** + * Returns a fully opaque white Color. + */ + static const Color& White(); + + /** + * Returns color value from 8-bit component values. + */ + static Color FromRGBA(uint8_t r, uint8_t g, uint8_t b, uint8_t a = 255); + + /** + * Compares Color with other, and returns true if all components are equal. + */ + bool operator==(const Color& other) const { + return alpha == other.alpha && red == other.red && green == other.green && blue == other.blue; + } + + /** + * Compares Color with other, and returns true if not all components are equal. + */ + bool operator!=(const Color& other) const { + return !(*this == other); + } + + /** + * Returns a pointer to components of Color, for array access. + */ + const float* array() const { + return &red; + } + + /** + * Returns a pointer to components of Color, for array access. + */ + float* array() { + return &red; + } + + /** + * Returns one component. + * @param index one of: 0 (r), 1 (g), 2 (b), 3 (a) + * @return value corresponding to index. + */ + float operator[](int index) const; + + /** + * Returns one component. + * @param index one of: 0 (r), 1 (g), 2 (b), 3 (a) + * @return value corresponding to index. + */ + float& operator[](int index); + + /** + * Returns true if all channels are in [0, 1]. + **/ + bool isValid() const; + + /** + * Returns true if Color is an opaque color. + */ + bool isOpaque() const; + + /** + * Returns a Color with alpha set to 1.0. + */ + Color makeOpaque() const { + return {red, green, blue, 1.0f}; + } + + /** + * Returns a Color premultiplied by alpha. + */ + Color premultiply() const { + return {red * alpha, green * alpha, blue * alpha, alpha}; + } + + /** + * Returns a Color unpremultiplied by alpha. + */ + Color unpremultiply() const; +}; +} // namespace tgfx diff --git a/include/tgfx/core/ColorFilter.h b/include/tgfx/core/ColorFilter.h new file mode 100644 index 00000000..bdffb491 --- /dev/null +++ b/include/tgfx/core/ColorFilter.h @@ -0,0 +1,45 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include +#include "tgfx/core/BlendMode.h" +#include "tgfx/core/Color.h" + +namespace tgfx { +class FragmentProcessor; + +class ColorFilter { + public: + static std::shared_ptr MakeLumaColorFilter(); + + static std::shared_ptr Blend(Color color, BlendMode mode); + + static std::shared_ptr Matrix(const std::array& rowMajor); + + virtual ~ColorFilter() = default; + + virtual std::unique_ptr asFragmentProcessor() const = 0; + + virtual bool isAlphaUnchanged() const { + return false; + } +}; +} // namespace tgfx diff --git a/include/tgfx/core/ColorType.h b/include/tgfx/core/ColorType.h new file mode 100644 index 00000000..9fd6cfd0 --- /dev/null +++ b/include/tgfx/core/ColorType.h @@ -0,0 +1,72 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +namespace tgfx { +/** + * Describes how pixel bits encode color. + */ +enum class ColorType { + /** + * uninitialized. + */ + Unknown, + /** + * Each pixel is stored as a single translucency (alpha) channel. This is very useful for + * storing masks efficiently, for instance. No color information is stored. With this + * configuration, each pixel requires 1 byte of memory. + */ + ALPHA_8, + /** + * Each pixel is stored on 4 bytes. Each channel (RGB and alpha for translucency) is stored with 8 + * bits of precision (256 possible values). The channel order is: red, green, blue, alpha. + */ + RGBA_8888, + /** + * Each pixel is stored on 4 bytes. Each channel (RGB and alpha for translucency) is stored with 8 + * bits of precision (256 possible values). The channel order is: blue, green, red, alpha. + */ + BGRA_8888, + /** + * Each pixel is stored on 2 bytes, and only the RGB channels are encoded: red is stored with 5 + * bits of precision (32 possible values), green is stored with 6 bits of precision (64 possible + * values), and blue is stored with 5 bits of precision. + */ + RGB_565, + /** + * Each pixel is stored as a single grayscale level. No color information is stored. With this + * configuration, each pixel requires 1 byte of memory. + */ + Gray_8, + /** + * Each pixel is stored on 8 bytes. Each channel (RGB and alpha for translucency) is stored as a + * half-precision floating point value. This configuration is particularly suited for wide-gamut + * and HDR content. + */ + RGBA_F16, + /** + * Each pixel is stored on 4 bytes. Each RGB channel is stored with 10 bits of precision (1024 + * possible values). There is an additional alpha channel that is stored with 2 bits of precision + * (4 possible values). This configuration is suited for wide-gamut and HDR content which does not + * require alpha blending, such that the memory cost is the same as RGBA_8888 while enabling + * higher color precision. + */ + RGBA_1010102 +}; +} // namespace tgfx diff --git a/include/tgfx/core/Data.h b/include/tgfx/core/Data.h new file mode 100644 index 00000000..29c142f4 --- /dev/null +++ b/include/tgfx/core/Data.h @@ -0,0 +1,118 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include + +namespace tgfx { +/** + * Data holds an immutable data buffer. Not only is the Data immutable, but the actual pointer + * that is returned by data() or bytes() is guaranteed to always be the same for the life of this + * instance. + */ +class Data { + public: + /** + * Creates a Data object from the specified file path. + */ + static std::shared_ptr MakeFromFile(const std::string& filePath); + + /** + * Creates a Data object by copying the specified data. + */ + static std::shared_ptr MakeWithCopy(const void* data, size_t length); + + /** + * Call this when the data parameter is already const, suitable for const globals. The caller must + * ensure the data parameter will always be the same and alive for the lifetime of the returned + * Data. + */ + static std::shared_ptr MakeWithoutCopy(const void* data, size_t length); + + /** + * Function that, if provided, will be called when the Data goes out of scope, allowing for + * custom allocation/freeing of the data's contents. + */ + typedef void (*ReleaseProc)(const void* data, void* context); + + /** + * A ReleaseProc using delete to release data. + */ + static void DeleteProc(const void* data, void* context); + + /** + * A ReleaseProc using free() to release data. + */ + static void FreeProc(const void* data, void* context); + + /** + * Creates a Data object, taking ownership of the specified data, and using the releaseProc to + * free it. The releaseProc may be nullptr. + */ + static std::shared_ptr MakeAdopted(const void* data, size_t length, + ReleaseProc releaseProc = DeleteProc, + void* context = nullptr); + + /** + * Creates a new empty Data object. + */ + static std::shared_ptr MakeEmpty(); + + ~Data(); + + /** + * Returns the memory address of the data. + */ + const void* data() const { + return _data; + } + + /** + * Returns the read-only memory address of the data, but in this case it is cast to uint8_t*, to + * make it easy to add an offset to it. + */ + const uint8_t* bytes() const { + return reinterpret_cast(_data); + } + + /** + * Returns the byte size. + */ + size_t size() const { + return _size; + } + + /** + * Returns true if the Data is empty. + */ + bool empty() const { + return _size == 0; + } + + private: + const void* _data = nullptr; + size_t _size = 0; + ReleaseProc releaseProc = nullptr; + void* releaseContext = nullptr; + + Data(const void* data, size_t length, ReleaseProc releaseProc, void* context); +}; + +} // namespace tgfx diff --git a/include/tgfx/core/EncodedFormat.h b/include/tgfx/core/EncodedFormat.h new file mode 100644 index 00000000..a8c854d6 --- /dev/null +++ b/include/tgfx/core/EncodedFormat.h @@ -0,0 +1,28 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +namespace tgfx { + +/** + * Describes the known formats a Pixmap can be encoded into. + */ +enum class EncodedFormat { JPEG, PNG, WEBP }; + +} // namespace tgfx diff --git a/include/tgfx/core/EncodedOrigin.h b/include/tgfx/core/EncodedOrigin.h new file mode 100644 index 00000000..d68c3ff2 --- /dev/null +++ b/include/tgfx/core/EncodedOrigin.h @@ -0,0 +1,68 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/Matrix.h" + +namespace tgfx { +/** + * These values match the orientation www.exif.org/Exif2-2.PDF. + */ +enum class EncodedOrigin { + /** + * Default + */ + TopLeft = 1, + /** + * Reflected across y-axis + */ + TopRight = 2, + /** + * Rotated 180 + */ + BottomRight = 3, + /** + * Reflected across x-axis + */ + BottomLeft = 4, + /** + * Reflected across x-axis, Rotated 90 CCW + */ + LeftTop = 5, + /** + * Rotated 90 CW + */ + RightTop = 6, + /** + * Reflected across x-axis, Rotated 90 CW + */ + RightBottom = 7, + /** + * Rotated 90 CCW + */ + LeftBottom = 8 +}; + +/** + * Given an EncodedOrigin and the width and height of the source data, returns a matrix that + * transforms the source rectangle [0, 0, w, h] to a correctly oriented destination rectangle, with + * the upper left corner still at [0, 0]. + */ +Matrix EncodedOriginToMatrix(EncodedOrigin origin, int width, int height); +} // namespace tgfx diff --git a/include/tgfx/core/Font.h b/include/tgfx/core/Font.h new file mode 100644 index 00000000..7da41658 --- /dev/null +++ b/include/tgfx/core/Font.h @@ -0,0 +1,158 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/Typeface.h" + +namespace tgfx { +/** + * Font controls options applied when drawing and measuring text. + */ +class Font { + public: + /** + * Constructs Font with default values. + */ + Font(); + + /** + * Constructs Font with default values with Typeface and size in points. + */ + explicit Font(std::shared_ptr typeface, float size = 12.0f); + + /** + * Returns a new font with the same attributes of this font, but with the specified size. + */ + Font makeWithSize(float size) const; + + /** + * Returns a typeface reference if set, or the default typeface reference, which is never nullptr. + */ + std::shared_ptr getTypeface() const { + return typeface; + } + + /** + * Sets a new Typeface to this Font. + */ + void setTypeface(std::shared_ptr newTypeface); + + /** + * Returns the point size of this font. + */ + float getSize() const { + return size; + } + + /** + * Sets text size in points. Has no effect if textSize is not greater than or equal to zero. + */ + void setSize(float newSize); + + /** + * Returns true if bold is approximated by increasing the stroke width when drawing glyphs. + */ + bool isFauxBold() const { + return fauxBold; + } + + /** + * Increases stroke width when drawing glyphs to approximate a bold typeface. + */ + void setFauxBold(bool value) { + fauxBold = value; + } + + /** + * Returns true if italic is approximated by adding skewX value of a canvas's matrix when + * drawing glyphs. + */ + bool isFauxItalic() const { + return fauxItalic; + } + + /** + * Adds skewX value of a canvas's matrix when drawing glyphs to approximate a italic typeface. + */ + void setFauxItalic(bool value) { + fauxItalic = value; + } + + /** + * Returns the FontMetrics associated with this font. + */ + FontMetrics getMetrics() const { + return typeface->getMetrics(size); + } + + /** + * Returns the glyph ID corresponds to the specified glyph name. The glyph name must be in utf-8 + * encoding. Returns 0 if the glyph name is not associated with this typeface. + */ + GlyphID getGlyphID(const std::string& name) const { + return typeface->getGlyphID(name); + } + + /** + * Returns the bounding box of the specified glyph. + */ + Rect getBounds(GlyphID glyphID) const { + return typeface->getBounds(glyphID, size, fauxBold, fauxItalic); + } + + /** + * Returns the advance for specified glyph. + * @param glyphID The id of specified glyph. + * @param verticalText The intended drawing orientation of the glyph. + */ + float getAdvance(GlyphID glyphID, bool verticalText = false) const { + return typeface->getAdvance(glyphID, size, fauxBold, fauxItalic, verticalText); + } + + /** + * Creates a path corresponding to glyph outline. If glyph has an outline, copies outline to path + * and returns true. If glyph is described by a bitmap, returns false and ignores path parameter. + */ + bool getPath(GlyphID glyphID, Path* path) const { + return typeface->getPath(glyphID, size, fauxBold, fauxItalic, path); + } + + /** + * Creates an image buffer capturing the content of the specified glyph. The returned matrix + * should apply to the glyph image when drawing. + */ + std::shared_ptr getGlyphImage(GlyphID glyphID, Matrix* matrix) const { + return typeface->getGlyphImage(glyphID, size, fauxBold, fauxItalic, matrix); + } + + /** + * Calculates the offset from the default (horizontal) origin to the vertical origin for specified + * glyph. + */ + Point getVerticalOffset(GlyphID glyphID) const { + return typeface->getVerticalOffset(glyphID, size, fauxBold, fauxItalic); + } + + private: + std::shared_ptr typeface = nullptr; + float size = 12.0f; + bool fauxBold = false; + bool fauxItalic = false; +}; +} // namespace tgfx \ No newline at end of file diff --git a/include/tgfx/core/FontMetrics.h b/include/tgfx/core/FontMetrics.h new file mode 100644 index 00000000..6f39fc2d --- /dev/null +++ b/include/tgfx/core/FontMetrics.h @@ -0,0 +1,82 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +namespace tgfx { +class FontMetrics { + public: + /** + * Extent above baseline. + */ + float top = 0; + /** + * Distance to reserve above baseline. + */ + float ascent = 0; + /** + * Distance to reserve below baseline. + */ + float descent = 0; + /** + * Extent below baseline. + */ + float bottom = 0; + /** + * Distance to add between lines. + */ + float leading = 0; + /** + * Minimum x. + */ + float xMin = 0; + /** + * Maximum x. + */ + float xMax = 0; + /** + * Height of lower-case 'x'. + */ + float xHeight = 0; + /** + * Height of an upper-case letter. + */ + float capHeight = 0; + /** + * Underline thickness. + */ + float underlineThickness = 0; + /** + * Underline position relative to baseline. + */ + float underlinePosition = 0; +}; + +struct GlyphMetrics { + float width = 0; + float height = 0; + + // The offset from the glyphs' origin on the baseline to the top left of the glyph mask. + float top = 0; + float left = 0; + + // The advance for this glyph. + float advanceX = 0; + float advanceY = 0; +}; +} // namespace tgfx diff --git a/include/tgfx/core/Image.h b/include/tgfx/core/Image.h new file mode 100644 index 00000000..be5454aa --- /dev/null +++ b/include/tgfx/core/Image.h @@ -0,0 +1,276 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/Data.h" +#include "tgfx/core/EncodedOrigin.h" +#include "tgfx/core/ImageGenerator.h" +#include "tgfx/core/ImageInfo.h" +#include "tgfx/core/Pixmap.h" +#include "tgfx/core/SamplingOptions.h" +#include "tgfx/core/TileMode.h" +#include "tgfx/gpu/Backend.h" +#include "tgfx/gpu/ImageOrigin.h" +#include "tgfx/platform/HardwareBuffer.h" +#include "tgfx/platform/NativeImage.h" + +namespace tgfx { +class ImageSource; +class Context; +class TextureProxy; +class FragmentProcessor; + +/** + * Image describes a two-dimensional array of pixels to draw. The pixels may be decoded in an + * ImageBuffer, encoded in a Picture or compressed data stream, or located in GPU memory as a GPU + * texture. The Image class is safe across threads and cannot be modified after it is created. The + * width and height of an Image are always greater than zero. Creating an Image with zero width or + * height returns nullptr. The corresponding GPU cache is immediately marked as expired if all + * Images with the same ImageSource are released, which becomes recyclable and will be purged at + * some point in the future. + */ +class Image { + public: + /** + * Creates an Image from the file path. An Image is returned if the format of the image file is + * recognized and supported. Recognized formats vary by platform. + */ + static std::shared_ptr MakeFromFile(const std::string& filePath); + + /** + * Creates an Image from the encoded data. An Image is returned if the format of the encoded data + * is recognized and supported. Recognized formats vary by platform. + */ + static std::shared_ptr MakeFromEncoded(std::shared_ptr encodedData); + + /** + * Creates an Image from the platform-specific NativeImage. For example, the NativeImage could be + * a jobject that represents a java Bitmap on the android platform or a CGImageRef on the apple + * platform. The returned Image object takes a reference to the nativeImage. Returns nullptr if + * the nativeImage is nullptr or the current platform has no NativeImage support. + */ + static std::shared_ptr MakeFrom(NativeImageRef nativeImage); + + /** + * Creates an Image from the image generator. An Image is returned if the generator is not + * nullptr. The image generator may wrap codec data or custom data. + */ + static std::shared_ptr MakeFrom(std::shared_ptr generator); + + /** + * Creates an Image from the ImageInfo and shares pixels from the immutable Data object. The + * returned Image takes a reference to the pixels. The caller must ensure the pixels are always + * the same for the lifetime of the returned Image. If the ImageInfo is unsuitable for direct + * texture uploading, the Image will internally create an ImageGenerator for pixel format + * conventing instead of an ImageBuffer. Returns nullptr if the ImageInfo is empty or the pixels + * are nullptr. + */ + static std::shared_ptr MakeFrom(const ImageInfo& info, std::shared_ptr pixels); + + /** + * Creates an Image from the Bitmap, sharing bitmap pixels. The Bitmap will allocate new internal + * pixel memory and copy the original pixels into it if there is a subsequent call of pixel + * writing to the Bitmap. Therefore, the content of the returned Image will always be the same. + */ + static std::shared_ptr MakeFrom(const Bitmap& bitmap); + + /** + * Creates an Image from the platform-specific hardware buffer. For example, the hardware buffer + * could be an AHardwareBuffer on the android platform or a CVPixelBufferRef on the apple + * platform. The returned Image takes a reference to the hardwareBuffer. The caller must + * ensure the buffer content stays unchanged for the lifetime of the returned Image. The + * colorSpace is ignored if the hardwareBuffer contains only one plane, which is not in the YUV + * format. Returns nullptr if the hardwareBuffer is nullptr. + */ + static std::shared_ptr MakeFrom(HardwareBufferRef hardwareBuffer, + YUVColorSpace colorSpace = YUVColorSpace::BT601_LIMITED); + + /** + * Creates an Image in the I420 format with the specified YUVData and the YUVColorSpace. Returns + * nullptr if the yuvData is invalid. + */ + static std::shared_ptr MakeI420(std::shared_ptr yuvData, + YUVColorSpace colorSpace = YUVColorSpace::BT601_LIMITED); + + /** + * Creates an Image in the NV12 format with the specified YUVData and the YUVColorSpace. Returns + * nullptr if the yuvData is invalid. + */ + static std::shared_ptr MakeNV12(std::shared_ptr yuvData, + YUVColorSpace colorSpace = YUVColorSpace::BT601_LIMITED); + + /** + * Creates an Image from the ImageBuffer, An Image is returned if the imageBuffer is not nullptr + * and its dimensions are greater than zero. + */ + static std::shared_ptr MakeFrom(std::shared_ptr imageBuffer); + + /** + * Creates an Image from the backendTexture associated with the context. The caller must ensure + * the backendTexture stays valid and unchanged for the lifetime of the returned Image. An Image + * is returned if the format of the backendTexture is recognized and supported. Recognized formats + * vary by GPU back-ends. + */ + static std::shared_ptr MakeFrom(Context* context, const BackendTexture& backendTexture, + ImageOrigin origin = ImageOrigin::TopLeft); + + /** + * Creates an Image from the backendTexture associated with the context, taking ownership of the + * backendTexture. The backendTexture will be released when no longer needed. The caller must + * ensure the backendTexture stays unchanged for the lifetime of the returned Image. An Image is + * returned if the format of the backendTexture is recognized and supported. Recognized formats + * vary by GPU back-ends. + */ + static std::shared_ptr MakeAdopted(Context* context, const BackendTexture& backendTexture, + ImageOrigin origin = ImageOrigin::TopLeft); + + virtual ~Image() = default; + + /** + * Returns the width of the Image. + */ + virtual int width() const; + + /** + * Returns pixel row count. + */ + virtual int height() const; + + /** + * Returns true if pixels represent transparency only. If true, each pixel is packed in 8 bits as + * defined by ColorType::ALPHA_8. + */ + bool isAlphaOnly() const; + + /** + * Returns true if the Image is an RGBAAA image, which takes half of the original image as its RGB + * channels and the other half as its alpha channel. + */ + virtual bool isRGBAAA() const; + + /** + * Returns true if Image is backed by an image generator or other services that create their + * pixels on-demand. + */ + bool isLazyGenerated() const; + + /** + * Returns true if the Image was created from a GPU texture. + */ + bool isTextureBacked() const; + + /** + * Returns true if the Image has mipmap levels. The flag was set by the makeMipMapped() method, + * which may be ignored if the GPU or the associated image source does not support mipmaps. + */ + bool hasMipmaps() const; + + /** + * Retrieves the backend texture of the Image. Returns an invalid BackendTexture if the Image is + * not backed by a Texture. + */ + BackendTexture getBackendTexture() const; + + /** + * Returns an Image backed by GPU texture associated with the specified context. If there is a + * corresponding texture cache in the context, returns an Image wraps that texture. Otherwise, + * creates one immediately, which may block the calling thread. Returns the original Image if the + * Image is texture backed and the context is compatible with the backing GPU texture. Otherwise, + * returns nullptr. The associated CPU memory can be freed entirely by setting the original Image + * to nullptr, since the returned Image contains only a GPU texture. + */ + std::shared_ptr makeTextureImage(Context* context) const; + + /** + * Returns subset of Image. The subset must be fully contained by Image dimensions. The returned + * Image always shares pixels and caches with the original Image. Returns nullptr if the subset is + * empty, or the subset is not contained by bounds. + */ + std::shared_ptr makeSubset(const Rect& subset) const; + + /** + * Returns a decoded Image from the lazy Image. The returned Image shares the same texture cache + * with the original Image and immediately schedules an asynchronous decoding task, which will not + * block the calling thread. Returns the original Image if the Image is not lazy or has a + * corresponding texture cache in the specified context. + */ + std::shared_ptr makeDecoded(Context* context = nullptr) const; + + /** + * Returns an Image with mipmaps enabled. Returns the original Image if the Image has mipmaps + * enabled already or fails to enable mipmaps. + */ + std::shared_ptr makeMipMapped() const; + + /** + * Returns an Image with the RGBAAA layout that takes half of the original Image as its RGB + * channels and the other half as its alpha channel. Returns a subset Image if both alphaStartX + * and alphaStartY are zero. Returns nullptr if the original Image has an encoded origin, subset + * bounds, or an RGBAAA layout. + * @param displayWidth The display width of the RGBAAA image. + * @param displayHeight The display height of the RGBAAA image. + * @param alphaStartX The x position of where alpha area begins in the original image. + * @param alphaStartY The y position of where alpha area begins in the original image. + */ + std::shared_ptr makeRGBAAA(int displayWidth, int displayHeight, int alphaStartX, + int alphaStartY); + + /** + * Returns an Image with its origin transformed by the given EncodedOrigin. The returned Image + * always shares pixels and caches with the original Image. + */ + std::shared_ptr applyOrigin(EncodedOrigin origin) const; + + protected: + std::weak_ptr weakThis; + std::shared_ptr source = nullptr; + + explicit Image(std::shared_ptr source); + + virtual std::shared_ptr onCloneWith(std::shared_ptr newSource) const; + + virtual std::shared_ptr onMakeSubset(const Rect& subset) const; + + virtual std::shared_ptr onMakeRGBAAA(int displayWidth, int displayHeight, int alphaStartX, + int alphaStartY) const; + + virtual std::shared_ptr onApplyOrigin(EncodedOrigin encodedOrigin) const; + + virtual std::unique_ptr asFragmentProcessor( + Context* context, uint32_t surfaceFlags, TileMode tileModeX, TileMode tileModeY, + const SamplingOptions& sampling, const Matrix* localMatrix = nullptr); + + private: + std::unique_ptr asFragmentProcessor(Context* context, uint32_t surfaceFlags, + const SamplingOptions& sampling); + + static std::shared_ptr MakeFrom(std::shared_ptr texture); + + static std::shared_ptr MakeFrom(std::shared_ptr source, + EncodedOrigin origin = EncodedOrigin::TopLeft); + + std::shared_ptr cloneWithSource(std::shared_ptr newSource) const; + + friend class ImageShader; + friend class BlurImageFilter; + friend class Canvas; + friend class Surface; + friend class Mask; +}; +} // namespace tgfx diff --git a/include/tgfx/core/ImageBuffer.h b/include/tgfx/core/ImageBuffer.h new file mode 100644 index 00000000..72ab1f7e --- /dev/null +++ b/include/tgfx/core/ImageBuffer.h @@ -0,0 +1,117 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/Data.h" +#include "tgfx/core/ImageInfo.h" +#include "tgfx/core/YUVColorSpace.h" +#include "tgfx/core/YUVData.h" +#include "tgfx/platform/HardwareBuffer.h" +#include "tgfx/platform/NativeImage.h" + +namespace tgfx { +class Context; +class Texture; + +/** + * ImageBuffer describes a two-dimensional array of pixels and is optimized for creating textures. + * ImageBuffer is immutable and safe across threads. The content of an ImageBuffer never changes, + * but some ImageBuffers may have a limited lifetime and cannot create textures after they expire. + * For example, the ImageBuffers generated from an ImageReader. In other cases, ImageBuffers usually + * only expire if explicitly stated by the creator. + */ +class ImageBuffer { + public: + /** + * Creates an ImageBuffer from the platform-specific hardware buffer. For example, the hardware + * buffer could be an AHardwareBuffer on the android platform or a CVPixelBufferRef on the apple + * platform. The returned ImageBuffer takes a reference to the hardwareBuffer. The caller must + * ensure the buffer content stays unchanged for the lifetime of the returned ImageBuffer. The + * colorSpace is ignored if the hardwareBuffer contains only one plane, which is not in the YUV + * format. Returns nullptr if the hardwareBuffer is nullptr. + */ + static std::shared_ptr MakeFrom( + HardwareBufferRef hardwareBuffer, YUVColorSpace colorSpace = YUVColorSpace::BT601_LIMITED); + + /** + * Creates an ImageBuffer from the ImageInfo and shares pixels from the immutable Data object. The + * pixel data may be copied and converted to a new format which is more efficient for texture + * uploading. However, if the ImageInfo is suitable for direct texture uploading, the pixel data + * will be shared instead of copied. In that case, the caller must ensure the pixel data stay + * unchanged for the lifetime of the returned ImageBuffer. Returns nullptr if the info is empty or + * the pixels are nullptr. + * ImageInfo parameters suitable for direct texture uploading include: + * The alpha type is not AlphaType::Unpremultiplied; + * The color type is one of ColorType::ALPHA_8, ColorType::RGBA_8888, and ColorType::BGRA_8888. + */ + static std::shared_ptr MakeFrom(const ImageInfo& info, std::shared_ptr pixels); + + /** + * Creates an ImageBuffer in the I420 format with the specified YUVData and YUVColorSpace. The + * caller must ensure the yuvData stays unchanged for the lifetime of the returned ImageBuffer. + * Returns nullptr if the yuvData is invalid. + */ + static std::shared_ptr MakeI420( + std::shared_ptr yuvData, YUVColorSpace colorSpace = YUVColorSpace::BT601_LIMITED); + + /** + * Creates an ImageBuffer in the NV12 format with the specified YUVData and YUVColorSpace. The + * caller must ensure the yuvData stays unchanged for the lifetime of the returned ImageBuffer. + * Returns nullptr if the yuvData is invalid. + */ + static std::shared_ptr MakeNV12( + std::shared_ptr yuvData, YUVColorSpace colorSpace = YUVColorSpace::BT601_LIMITED); + + virtual ~ImageBuffer() = default; + /** + * Returns the width of the image buffer. + */ + virtual int width() const = 0; + + /** + * Returns the height of the image buffer. + */ + virtual int height() const = 0; + + /** + * Returns true if pixels represent transparency only. If true, each pixel is packed in 8 bits as + * defined by ColorType::ALPHA_8. + */ + virtual bool isAlphaOnly() const = 0; + + /** + * Returns true if the ImageBuffer is expired, which means it cannot create any new textures. + * However, you can still safely access all of its properties across threads. + */ + virtual bool expired() const { + return false; + } + + protected: + ImageBuffer() = default; + + /** + * Creates a new Texture capturing the pixels of the ImageBuffer. The mipMapped parameter + * specifies whether created texture must allocate mip map levels. + */ + virtual std::shared_ptr onMakeTexture(Context* context, bool mipMapped) const = 0; + + friend class Texture; +}; +} // namespace tgfx diff --git a/include/tgfx/core/ImageCodec.h b/include/tgfx/core/ImageCodec.h new file mode 100644 index 00000000..4a91b706 --- /dev/null +++ b/include/tgfx/core/ImageCodec.h @@ -0,0 +1,107 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/Data.h" +#include "tgfx/core/EncodedFormat.h" +#include "tgfx/core/EncodedOrigin.h" +#include "tgfx/core/ImageGenerator.h" +#include "tgfx/core/ImageInfo.h" +#include "tgfx/core/Pixmap.h" +#include "tgfx/platform/NativeImage.h" + +namespace tgfx { + +class ImageBuffer; + +/** + * Abstraction layer directly on top of an image codec. + */ +class ImageCodec : public ImageGenerator { + public: + /** + * If this file path represents an encoded image that we know how to decode, return an ImageCodec + * that can decode it. Otherwise, return nullptr. + */ + static std::shared_ptr MakeFrom(const std::string& filePath); + + /** + * If the file bytes represent an encoded image that we know how to decode, return an ImageCodec + * that can decode it. Otherwise, return nullptr. + */ + static std::shared_ptr MakeFrom(std::shared_ptr imageBytes); + + /** + * Creates a new ImageCodec object from a platform-specific NativeImage. For example, the + * NativeImage could be a jobject that represents a java Bitmap on the android platform or a + * CGImageRef on the apple platform.The returned ImageCodec object takes a reference on the + * nativeImage. Returns nullptr if the nativeImage is nullptr or the current platform has no + * NativeImage support. + */ + static std::shared_ptr MakeFrom(NativeImageRef nativeImage); + + /** + * Encodes the specified Pixmap into a binary image format. Returns nullptr if encoding fails. + */ + static std::shared_ptr Encode(const Pixmap& pixmap, EncodedFormat format, int quality); + + /** + * Returns the encoded origin of the target image. + */ + EncodedOrigin origin() const { + return _origin; + } + + bool isAlphaOnly() const override { + return false; + } + + /** + * Decodes the image with the specified image info into the given pixels. Returns true if the + * decoding was successful. Note that we do not recommend calling this method due to performance + * reasons, especially on the web platform. Use the makeBuffer() method for better performance if + * your final goal is to draw the image. + */ + virtual bool readPixels(const ImageInfo& dstInfo, void* dstPixels) const = 0; + + protected: + ImageCodec(int width, int height, EncodedOrigin origin) + : ImageGenerator(width, height), _origin(origin) { + } + + std::shared_ptr onMakeBuffer(bool tryHardware) const override; + + private: + EncodedOrigin _origin = EncodedOrigin::TopLeft; + + /** + * If the file path represents an encoded image that the current platform knows how to decode, + * returns an ImageCodec that can decode it. Otherwise, returns nullptr. + */ + static std::shared_ptr MakeNativeCodec(const std::string& filePath); + + /** + * If the file bytes represent an encoded image that the current platform knows how to decode, + * returns an ImageCodec that can decode it. Otherwise, returns nullptr. + */ + static std::shared_ptr MakeNativeCodec(std::shared_ptr imageBytes); + + friend class Pixmap; +}; +} // namespace tgfx diff --git a/include/tgfx/core/ImageFilter.h b/include/tgfx/core/ImageFilter.h new file mode 100644 index 00000000..f43bbff3 --- /dev/null +++ b/include/tgfx/core/ImageFilter.h @@ -0,0 +1,70 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "tgfx/core/Image.h" +#include "tgfx/core/Matrix.h" +#include "tgfx/core/TileMode.h" +#include "tgfx/gpu/Context.h" + +namespace tgfx { +struct ImageFilterContext { + ImageFilterContext(Context* context, Matrix matrix, Rect clipBounds, std::shared_ptr image) + : context(context), deviceMatrix(matrix), clipBounds(clipBounds), source(std::move(image)) { + } + + Context* context = nullptr; + Matrix deviceMatrix = Matrix::I(); + Rect clipBounds = Rect::MakeEmpty(); + std::shared_ptr source; +}; + +class ImageFilter { + public: + static std::shared_ptr Blur(float blurrinessX, float blurrinessY, + TileMode tileMode = TileMode::Decal, + const Rect& cropRect = Rect::MakeEmpty()); + + static std::shared_ptr DropShadow(float dx, float dy, float blurrinessX, + float blurrinessY, const Color& color, + const Rect& cropRect = Rect::MakeEmpty()); + + static std::shared_ptr DropShadowOnly(float dx, float dy, float blurrinessX, + float blurrinessY, const Color& color, + const Rect& cropRect = Rect::MakeEmpty()); + + virtual ~ImageFilter() = default; + + virtual std::pair, Point> filterImage( + const ImageFilterContext& context) = 0; + + Rect filterBounds(const Rect& rect) const; + + protected: + explicit ImageFilter(const Rect cropRect) : cropRect(cropRect) { + } + + bool applyCropRect(const Rect& srcRect, Rect* dstRect, const Rect* clipRect = nullptr) const; + + virtual Rect onFilterNodeBounds(const Rect& srcRect) const; + + Rect cropRect; +}; +} // namespace tgfx diff --git a/include/tgfx/core/ImageGenerator.h b/include/tgfx/core/ImageGenerator.h new file mode 100644 index 00000000..3af61712 --- /dev/null +++ b/include/tgfx/core/ImageGenerator.h @@ -0,0 +1,80 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/ImageBuffer.h" + +namespace tgfx { +/** + * ImageGenerator defines the interfaces for generating ImageBuffer objects from encoded images or + * custom data. + */ +class ImageGenerator { + public: + virtual ~ImageGenerator() = default; + + /** + * Returns the width of the target image. + */ + int width() const { + return _width; + } + + /** + * Returns the height of the target image. + */ + int height() const { + return _height; + } + + /** + * Returns true if the generator is guaranteed to produce transparency only pixels. If true, each + * pixel is packed in 8 bits as defined by ColorType::ALPHA_8. + */ + virtual bool isAlphaOnly() const = 0; + + /** + * Returns true if the ImageGenerator has built-in support for asynchronous decoding. If true, the + * makeBuffer() method will not block the calling thread. + */ + virtual bool asyncSupport() const { + return false; + } + + /** + * Crates a new image buffer capturing the pixels decoded from this image generator. + * ImageGenerator does not cache the returned image buffer, each call to this method allocates + * additional storage. Returns an ImageBuffer backed by hardware if tryHardware is true and + * the current platform supports creating it. Otherwise, a raster ImageBuffer is returned. + */ + std::shared_ptr makeBuffer(bool tryHardware = true) const { + return onMakeBuffer(tryHardware); + } + + protected: + ImageGenerator(int width, int height) : _width(width), _height(height) { + } + + virtual std::shared_ptr onMakeBuffer(bool tryHardware) const = 0; + + private: + int _width = 0; + int _height = 0; +}; +} // namespace tgfx diff --git a/include/tgfx/core/ImageInfo.h b/include/tgfx/core/ImageInfo.h new file mode 100644 index 00000000..3b00400a --- /dev/null +++ b/include/tgfx/core/ImageInfo.h @@ -0,0 +1,195 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include +#include +#include +#include "tgfx/core/AlphaType.h" +#include "tgfx/core/ColorType.h" + +namespace tgfx { +/** + * ImageInfo describes the properties for an area of pixels. + */ +class ImageInfo { + public: + /** + * Returns true if the specified width and height is a valid size for pixels. + */ + static bool IsValidSize(int width, int height); + + /** + * Creates a new ImageInfo. Parameters are validated to see if their values are legal, or that the + * combination is supported. Returns an empty ImageInfo if validating fails. + */ + static ImageInfo Make(int width, int height, ColorType colorType, + AlphaType alphaType = AlphaType::Premultiplied, size_t rowBytes = 0); + + static int GetBytesPerPixel(ColorType colorType); + + /** + * Creates an empty ImageInfo. + */ + ImageInfo() = default; + + /** + * Returns true if ImageInfo describes an empty area of pixels. + */ + bool isEmpty() const { + return _width <= 0; + } + + /** + * Returns true if pixels represent transparency only. If true, each pixel is packed in 8 bits as + * defined by ColorType::ALPHA_8. + */ + bool isAlphaOnly() const { + return _colorType == ColorType::ALPHA_8; + } + + /** + * Returns the width of the pixels. + */ + int width() const { + return _width; + } + + /** + * Returns the height of the pixels. + */ + int height() const { + return _height; + } + + /** + * Returns the ColorType of the pixels. + */ + ColorType colorType() const { + return _colorType; + } + + /** + * Returns the AlphaType of the pixels. + */ + AlphaType alphaType() const { + return _alphaType; + } + + /** + * Returns the rowBytes of the pixels. + */ + size_t rowBytes() const { + return _rowBytes; + } + + /** + * Returns minimum bytes per row, computed from the width and colorType. + */ + size_t minRowBytes() const { + return _width * bytesPerPixel(); + } + + /** + * Returns the byte size of the pixels, computed from the rowBytes and width. + */ + size_t byteSize() const { + return _rowBytes * _height; + } + + /** + * Returns number of bytes per pixel required by the colorType. + */ + int bytesPerPixel() const; + + /** + * Creates a new ImageInfo with dimensions set to width and height, and keep other properties the + * same. + */ + ImageInfo makeWH(int newWidth, int newHeight) const { + return Make(newWidth, newHeight, _colorType, _alphaType, _rowBytes); + } + + /** + * If (0, 0, width(), height()) intersects (x, y, targetWidth, targetHeight), returns a new + * ImageInfo with dimensions set to the size of intersection, and keep other properties the same. + * Otherwise, returns an empty ImageInfo. + */ + ImageInfo makeIntersect(int x, int y, int targetWidth, int targetHeight) const; + + /** + * Creates a new ImageInfo with alphaType set to newAlphaType, and keep other properties the same. + */ + ImageInfo makeAlphaType(AlphaType newAlphaType) const { + return Make(_width, _height, _colorType, newAlphaType, _rowBytes); + } + + /** + * Creates ImageInfo with colorType set to newColorType, rowBytes set to newRowBytes, and keep + * other properties the same. + */ + ImageInfo makeColorType(ColorType newColorType, size_t newRowBytes = 0) const { + return Make(_width, _height, newColorType, _alphaType, newRowBytes); + } + + /** + * Returns readable pixel address at (x, y) of specified base pixel address. + * Note: The x value will be clamped to the range of (0, width), and the y value will be clamped + * to the range of (0, height). + */ + const void* computeOffset(const void* pixels, int x, int y) const; + + /** + * Returns writable pixel address at (x, y) of specified base pixel address. + * Note: The x value will be clamped to the range of [0, width-1], and the y value will be clamped + * to the range of [0, height-1]. + */ + void* computeOffset(void* pixels, int x, int y) const; + + /** + * Returns true if a is equivalent to b. + */ + friend bool operator==(const ImageInfo& a, const ImageInfo& b) { + return memcmp(&a, &b, sizeof(ImageInfo)) == 0; + } + + /** + * Returns true if a is not equivalent to b. + */ + friend bool operator!=(const ImageInfo& a, const ImageInfo& b) { + return !(a == b); + } + + private: + ImageInfo(int width, int height, ColorType colorType, AlphaType alphaType, size_t rowBytes) + : _width(width), + _height(height), + _colorType(colorType), + _alphaType(alphaType), + _rowBytes(rowBytes) { + } + + int _width = 0; + int _height = 0; + ColorType _colorType = ColorType::Unknown; + AlphaType _alphaType = AlphaType::Unknown; + size_t _rowBytes = 0; +}; +} // namespace tgfx \ No newline at end of file diff --git a/include/tgfx/core/ImageReader.h b/include/tgfx/core/ImageReader.h new file mode 100644 index 00000000..7134d91c --- /dev/null +++ b/include/tgfx/core/ImageReader.h @@ -0,0 +1,109 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include +#include "tgfx/core/Bitmap.h" +#include "tgfx/core/ImageBuffer.h" +#include "tgfx/core/Mask.h" +#include "tgfx/core/Rect.h" + +namespace tgfx { +class Texture; +class ImageStream; + +/** + * The ImageReader class allows direct access to ImageBuffers generated from an image stream. The + * image stream may come from a Bitmap, a Mask, or a video-related object of the native + * platform. You should call ImageReader::acquireNextBuffer() to read a new ImageBuffer each time + * when the image stream is modified. All ImageBuffers generated from one ImageReader share the same + * internal texture, which allows you to continuously read the latest content from the image stream + * with minimal memory copying. However, there are two limits: + * + * 1) The generated ImageBuffers are bound to the associated GPU Context when first being drawn + * and cannot be drawn to another Context anymore. + * 2) The generated ImageBuffers may have a limited lifetime and cannot create textures after + * expiration. Usually, the previously acquired ImageBuffer will expire after the newly + * created ImageBuffer is drawn. So there are only two ImageBuffers that can be accessed + * simultaneously. But if the image stream is backed by a hardware buffer, the previously + * acquired ImageBuffer immediately expires when the image stream is being modified. + * + * You can create multiple ImageReaders from the same image stream. ImageReader is safe across + * threads. + */ +class ImageReader { + public: + /** + * Creates a new ImageReader from the specified Bitmap. Returns nullptr if the bitmap is empty. + */ + static std::shared_ptr MakeFrom(const Bitmap& bitmap); + + /** + * Creates a new ImageReader from the specified Mask. Returns nullptr if the mask is nullptr. + */ + static std::shared_ptr MakeFrom(std::shared_ptr mask); + + virtual ~ImageReader(); + + /** + * Returns the width of generated image buffers. + */ + int width() const; + + /** + * Returns the height of generated image buffers. + */ + int height() const; + + /** + * Acquires the next ImageBuffer from the ImageReader after a new image frame has been rendered + * into the associated image stream. Usually, the previously acquired ImageBuffer will expire + * after the newly created ImageBuffer is drawn. But if the image stream is backed by a hardware + * buffer, the previously acquired ImageBuffer immediately expires when the image stream is being + * modified. Returns nullptr if the associated image stream has no content changes. + */ + virtual std::shared_ptr acquireNextBuffer(); + + protected: + std::mutex locker = {}; + std::weak_ptr weakThis; + std::shared_ptr stream = nullptr; + + explicit ImageReader(std::shared_ptr stream); + + private: + std::shared_ptr texture = nullptr; + uint64_t bufferVersion = 0; + uint64_t textureVersion = 0; + bool hasPendingChanges = true; + Rect dirtyBounds = Rect::MakeEmpty(); + + static std::shared_ptr MakeFrom(std::shared_ptr imageStream); + + bool checkExpired(uint64_t contentVersion); + + std::shared_ptr readTexture(uint64_t contentVersion, Context* context, bool mipMapped); + + void onContentDirty(const Rect& bounds); + + friend class ImageReaderBuffer; + friend class ImageStream; +}; +} // namespace tgfx diff --git a/include/tgfx/core/Mask.h b/include/tgfx/core/Mask.h new file mode 100644 index 00000000..3010efc1 --- /dev/null +++ b/include/tgfx/core/Mask.h @@ -0,0 +1,129 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/Image.h" +#include "tgfx/core/ImageBuffer.h" +#include "tgfx/core/Path.h" +#include "tgfx/core/Stroke.h" +#include "tgfx/core/TextBlob.h" + +namespace tgfx { +class ImageStream; + +/** + * Mask is a utility that can take an image described in a vector graphics format (paths, glyphs) + * and convert it into a raster image that can be used as a drawing mask. Mask is not thread safe, + * do not use it across multiple threads. + */ +class Mask { + public: + /** + * Creates a new Mask and tries to allocate its pixel memory by the specified width and height. + * If tryHardware is true and there is hardware buffer support on the current platform, + * a hardware-backed pixel buffer is allocated. Otherwise, a raster pixel buffer is allocated. + * Returns nullptr if allocation fails. + */ + static std::shared_ptr Make(int width, int height, bool tryHardware = true); + + virtual ~Mask() = default; + + /** + * Returns the width of the Mask. + */ + virtual int width() const = 0; + + /** + * Returns the height of the Mask. + */ + virtual int height() const = 0; + + /** + * Returns true if the Mask is backed by a platform-specified hardware buffer. + */ + virtual bool isHardwareBacked() const = 0; + + /** + * Returns the current total matrix. + */ + Matrix getMatrix() const { + return matrix; + } + + /** + * Replaces transformation with the specified matrix. + */ + void setMatrix(const Matrix& m) { + matrix = m; + } + + /** + * Replaces the current Matrix with matrix premultiplied with the existing one. This has the + * effect of transforming the filling geometry by matrix, before transforming the result with + * the existing Matrix. + */ + void concat(const Matrix& m) { + matrix.preConcat(m); + } + + /** + * Writes the fills or outlines of the given Path to the Mask,with its top-left corner at (0, 0), + * using the current Matrix. + */ + void fillPath(const Path& path, const Stroke* stroke = nullptr); + + /** + * Writes the fills or outlines of the given TextBlob to the Mask,with its top-left corner at + * (0, 0), using the current Matrix. Returns false if the associated typeface has color glyphs and + * leaves the Mask unchanged. + */ + bool fillText(const TextBlob* textBlob, const Stroke* stroke = nullptr); + + /** + * Replaces all pixel values with transparent colors. + */ + virtual void clear() = 0; + + /** + * Returns an ImageBuffer object capturing the pixels in the Mask. Subsequent writing of the Mask + * will not be captured. Instead, the Mask will copy its pixels to a new memory buffer if there is + * a subsequent writing call to the Mask while the returned ImageBuffer is still alive. If the + * Mask is modified frequently, create an ImageReader from the Mask instead, which allows you to + * continuously read the latest content from the Mask with minimal memory copying. Returns nullptr + * if the Mask is empty. + */ + virtual std::shared_ptr makeBuffer() const = 0; + + protected: + virtual std::shared_ptr getImageStream() const = 0; + + void onFillPath(const Path& path, const Matrix& m) { + onFillPath(path, m, false); + } + + virtual void onFillPath(const Path& path, const Matrix& m, bool needsGammaCorrection) = 0; + + virtual bool onFillText(const TextBlob* textBlob, const Stroke* stroke, const Matrix& matrix); + + private: + Matrix matrix = Matrix::I(); + + friend class ImageReader; +}; +} // namespace tgfx diff --git a/include/tgfx/core/MaskFilter.h b/include/tgfx/core/MaskFilter.h new file mode 100644 index 00000000..ccebb832 --- /dev/null +++ b/include/tgfx/core/MaskFilter.h @@ -0,0 +1,37 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "Shader.h" + +namespace tgfx { +struct FPArgs; + +class FragmentProcessor; + +class MaskFilter { + public: + static std::shared_ptr Make(std::shared_ptr shader, bool inverted = false); + + virtual ~MaskFilter() = default; + + virtual std::unique_ptr asFragmentProcessor(const FPArgs& args) const = 0; +}; +} // namespace tgfx diff --git a/include/tgfx/core/Matrix.h b/include/tgfx/core/Matrix.h new file mode 100644 index 00000000..09ba5c2b --- /dev/null +++ b/include/tgfx/core/Matrix.h @@ -0,0 +1,709 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "tgfx/core/Rect.h" + +namespace tgfx { +/*** + * Matrix holds a 3x2 matrix for transforming coordinates. This allows mapping Point and vectors + * with translation, scaling, skewing, and rotation. Together these types of transformations are + * known as affine transformations. Affine transformations preserve the straightness of lines while + * transforming, so that parallel lines stay parallel. Matrix elements are in row major order. + * Matrix does not have a constructor, so it must be explicitly initialized. + */ +class Matrix { + public: + /** + * Sets Matrix to scale by (sx, sy). Returned matrix is: + * + * | sx 0 0 | + * | 0 sy 0 | + * | 0 0 1 | + * + * @param sx horizontal scale factor + * @param sy vertical scale factor + * @return Matrix with scale factors. + */ + static Matrix MakeScale(float sx, float sy) { + Matrix m = {}; + m.setScale(sx, sy); + return m; + } + + /** + * Sets Matrix to scale by (scale, scale). Returned matrix is: + * + * | scale 0 0 | + * | 0 scale 0 | + * | 0 0 1 | + * + * @param scale horizontal and vertical scale factor + * @return Matrix with scale factors. + */ + static Matrix MakeScale(float scale) { + Matrix m = {}; + m.setScale(scale, scale); + return m; + } + + /** + * Sets Matrix to translate by (tx, ty). Returned matrix is: + * + * | 1 0 tx | + * | 0 1 ty | + * | 0 0 1 | + * + * @param tx horizontal translation + * @param ty vertical translation + * @return Matrix with translation + */ + static Matrix MakeTrans(float tx, float ty) { + Matrix m = {}; + m.setTranslate(tx, ty); + return m; + } + + /** + * Sets Matrix to: + * + * | scaleX skewX transX | + * | skewY scaleY transY | + * | 0 0 1 | + * + * @param scaleX horizontal scale factor + * @param skewX horizontal skew factor + * @param transX horizontal translation + * @param skewY vertical skew factor + * @param scaleY vertical scale factor + * @param transY vertical translation + * @return Matrix constructed from parameters + */ + static Matrix MakeAll(float scaleX, float skewX, float transX, float skewY, float scaleY, + float transY) { + Matrix m = {}; + m.setAll(scaleX, skewX, transX, skewY, scaleY, transY); + return m; + } + + /** + * Returns true if Matrix is identity. The identity matrix is: + * + * | 1 0 0 | + * | 0 1 0 | + * | 0 0 1 | + * + * @return Returns true if the Matrix has no effect. + */ + bool isIdentity() const { + return values[0] == 1 && values[1] == 0 && values[2] == 0 && values[3] == 0 && values[4] == 1 && + values[5] == 0; + } + + /** + * Returns one matrix value. + */ + float operator[](int index) const { + return values[index]; + } + + /** + * Returns writable Matrix value. + */ + float& operator[](int index) { + return values[index]; + } + + /** + * Returns one matrix value. + */ + float get(int index) const { + return values[index]; + } + + /** + * Sets Matrix value. + */ + void set(int index, float value) { + values[index] = value; + } + + /** + * Copies six scalar values contained by Matrix into buffer, in member value ascending order: + * ScaleX, SkewX, TransX, SkewY, ScaleY, TransY. + * @param buffer storage for six scalar values. + */ + void get6(float buffer[6]) const { + memcpy(buffer, values, 6 * sizeof(float)); + } + + /** + * Sets Matrix to six scalar values in buffer, in member value ascending order: + * ScaleX, SkewX, TransX, SkewY, ScaleY, TransY. + * Sets matrix to: + * + * | buffer[0] buffer[1] buffer[2] | + * | buffer[3] buffer[4] buffer[5] | + * + * @param buffer storage for six scalar values. + */ + void set6(const float buffer[6]) { + memcpy(values, buffer, 6 * sizeof(float)); + } + + /** + * Copies nine scalar values contained by Matrix into buffer, in member value ascending order: + * ScaleX, SkewX, TransX, SkewY, ScaleY, TransY, 0, 0, 1. + * @param buffer storage for nine scalar values + */ + void get9(float buffer[9]) const; + + /** + * Returns the horizontal scale factor. + */ + float getScaleX() const { + return values[SCALE_X]; + } + + /** + * Returns the vertical scale factor. + */ + float getScaleY() const { + return values[SCALE_Y]; + } + + /** + * Returns the vertical skew factor. + */ + float getSkewY() const { + return values[SKEW_Y]; + } + + /** + * Returns the horizontal scale factor. + */ + float getSkewX() const { + return values[SKEW_X]; + } + + /** + * Returns the horizontal translation factor. + */ + float getTranslateX() const { + return values[TRANS_X]; + } + + /** + * Returns the vertical translation factor. + */ + float getTranslateY() const { + return values[TRANS_Y]; + } + + /** + * Sets the horizontal scale factor. + */ + void setScaleX(float v) { + values[SCALE_X] = v; + } + + /** + * Sets the vertical scale factor. + */ + void setScaleY(float v) { + values[SCALE_Y] = v; + } + + /** + * Sets the vertical skew factor. + */ + void setSkewY(float v) { + values[SKEW_Y] = v; + } + + /** + * Sets the horizontal skew factor. + */ + void setSkewX(float v) { + values[SKEW_X] = v; + } + + /** + * Sets the horizontal translation. + */ + void setTranslateX(float v) { + values[TRANS_X] = v; + } + + /** + * Sets the vertical translation. + */ + void setTranslateY(float v) { + values[TRANS_Y] = v; + } + + /** + * Sets all values from parameters. Sets matrix to: + * + * | scaleX skewX transX | + * | skewY scaleY transY | + * | 0 0 1 | + * + * @param scaleX horizontal scale factor to store + * @param skewX horizontal skew factor to store + * @param transX horizontal translation to store + * @param skewY vertical skew factor to store + * @param scaleY vertical scale factor to store + * @param transY vertical translation to store + */ + void setAll(float scaleX, float skewX, float transX, float skewY, float scaleY, float transY); + + /** + * Sets the Matrix to affine values, passed in column major order: + * + * | a c tx | + * | b d ty | + * | 0 1 | + * + * @param a horizontal scale factor + * @param b vertical skew factor + * @param c horizontal skew factor + * @param d vertical scale factor + * @param tx horizontal translation + * @param ty vertical translation + */ + void setAffine(float a, float b, float c, float d, float tx, float ty); + + /** + * Sets Matrix to identity; which has no effect on mapped Point. Sets Matrix to: + * + * | 1 0 0 | + * | 0 1 0 | + * | 0 0 1 | + * + * Also called setIdentity(); use the one that provides better inline documentation. + */ + void reset(); + + /** + * Sets Matrix to identity; which has no effect on mapped Point. Sets Matrix to: + * + * | 1 0 0 | + * | 0 1 0 | + * | 0 0 1 | + * + * Also called reset(); use the one that provides better inline documentation. + */ + void setIdentity() { + this->reset(); + } + + /** + * Sets Matrix to translate by (tx, ty). + * @param tx horizontal translation + * @param ty vertical translation + */ + void setTranslate(float tx, float ty); + + /** + * Sets Matrix to scale by sx and sy, about a pivot point at (px, py). The pivot point is + * unchanged when mapped with Matrix. + * @param sx horizontal scale factor + * @param sy vertical scale factor + * @param px pivot on x-axis + * @param py pivot on y-axis + */ + void setScale(float sx, float sy, float px, float py); + + /** + * Sets Matrix to scale by sx and sy about at pivot point at (0, 0). + * @param sx horizontal scale factor + * @param sy vertical scale factor + */ + void setScale(float sx, float sy); + + /** + * Sets Matrix to rotate by degrees about a pivot point at (px, py). The pivot point is + * unchanged when mapped with Matrix. Positive degrees rotates clockwise. + * @param degrees angle of axes relative to upright axes + * @param px pivot on x-axis + * @param py pivot on y-axis + */ + void setRotate(float degrees, float px, float py); + + /** + * Sets Matrix to rotate by degrees about a pivot point at (0, 0). Positive degrees rotates + * clockwise. + * @param degrees angle of axes relative to upright axes + */ + void setRotate(float degrees); + + /** + * Sets Matrix to rotate by sinValue and cosValue, about a pivot point at (px, py). + * The pivot point is unchanged when mapped with Matrix. + * Vector (sinValue, cosValue) describes the angle of rotation relative to (0, 1). + * Vector length specifies the scale factor. + */ + void setSinCos(float sinV, float cosV, float px, float py); + + /** + * Sets Matrix to rotate by sinValue and cosValue, about a pivot point at (0, 0). + * Vector (sinValue, cosValue) describes the angle of rotation relative to (0, 1). + * Vector length specifies the scale factor. + */ + void setSinCos(float sinV, float cosV); + + /** + * Sets Matrix to skew by kx and ky, about a pivot point at (px, py). The pivot point is + * unchanged when mapped with Matrix. + * @param kx horizontal skew factor + * @param ky vertical skew factor + * @param px pivot on x-axis + * @param py pivot on y-axis + */ + void setSkew(float kx, float ky, float px, float py); + + /** + * Sets Matrix to skew by kx and ky, about a pivot point at (0, 0). + * @param kx horizontal skew factor + * @param ky vertical skew factor + */ + void setSkew(float kx, float ky); + + /** + * Sets Matrix to Matrix a multiplied by Matrix b. Either a or b may be this. + * + * Given: + * + * | A B C | | J K L | + * a = | D E F |, b = | M N O | + * | G H I | | P Q R | + * + * sets Matrix to: + * + * | A B C | | J K L | | AJ+BM+CP AK+BN+CQ AL+BO+CR | + * a * b = | D E F | * | M N O | = | DJ+EM+FP DK+EN+FQ DL+EO+FR | + * | G H I | | P Q R | | GJ+HM+IP GK+HN+IQ GL+HO+IR | + * + * @param a Matrix on the left side of multiply expression + * @param b Matrix on the right side of multiply expression + */ + void setConcat(const Matrix& a, const Matrix& b); + + /** + * Preconcats the matrix with the specified scale. M' = M * S(sx, sy) + */ + void preTranslate(float tx, float ty); + + /** + * Postconcats the matrix with the specified scale. M' = S(sx, sy, px, py) * M + */ + void preScale(float sx, float sy, float px, float py); + + /** + * Preconcats the matrix with the specified scale. M' = M * S(sx, sy) + */ + void preScale(float sx, float sy); + + /** + * Preconcats the matrix with the specified rotation. M' = M * R(degrees, px, py) + */ + void preRotate(float degrees, float px, float py); + + /** + * Preconcats the matrix with the specified rotation. M' = M * R(degrees) + */ + void preRotate(float degrees); + + /** + * Preconcats the matrix with the specified skew. M' = M * K(kx, ky, px, py) + */ + void preSkew(float kx, float ky, float px, float py); + + /** + * Preconcats the matrix with the specified skew. M' = M * K(kx, ky) + */ + void preSkew(float kx, float ky); + + /** + * Preconcats the matrix with the specified matrix. M' = M * other + */ + void preConcat(const Matrix& other); + + /** + * Postconcats the matrix with the specified translation. M' = T(tx, ty) * M + */ + void postTranslate(float tx, float ty); + + /** + * Postconcats the matrix with the specified scale. M' = S(sx, sy, px, py) * M + */ + void postScale(float sx, float sy, float px, float py); + + /** + * Postconcats the matrix with the specified scale. M' = S(sx, sy) * M + */ + void postScale(float sx, float sy); + + /** + * Postconcats the matrix with the specified rotation. M' = R(degrees, px, py) * M + */ + void postRotate(float degrees, float px, float py); + + /** + * Postconcats the matrix with the specified rotation. M' = R(degrees) * M + */ + void postRotate(float degrees); + + /** + * Postconcats the matrix with the specified skew. M' = K(kx, ky, px, py) * M + */ + void postSkew(float kx, float ky, float px, float py); + + /** + * Postconcats the matrix with the specified skew. M' = K(kx, ky) * M + */ + void postSkew(float kx, float ky); + + /** + * Postconcats the matrix with the specified matrix. M' = other * M + */ + void postConcat(const Matrix& other); + + /** + * If this matrix can be inverted, return true and if the inverse is not null, set inverse to be + * the inverse of this matrix. If this matrix cannot be inverted, ignore the inverse and return + * false. + */ + bool invert(Matrix* inverse) const { + if (this->isIdentity()) { + if (inverse) { + inverse->reset(); + } + return true; + } + return this->invertNonIdentity(inverse); + } + + /** + * Returns ture if the Matrix is invertible. + */ + bool invertible() const; + + /** + * Maps src Point array of length count to dst Point array of equal or greater length. Points are + * mapped by multiplying each Point by Matrix. Given: + * + * | A B C | | x | + * Matrix = | D E F |, pt = | y | + * | G H I | | 1 | + * + * where + * + * for (i = 0; i < count; ++i) { + * x = src[i].fX + * y = src[i].fY + * } + * + * each dst Point is computed as: + * + * |A B C| |x| Ax+By+C Dx+Ey+F + * Matrix * pt = |D E F| |y| = |Ax+By+C Dx+Ey+F Gx+Hy+I| = ------- , ------- + * |G H I| |1| Gx+Hy+I Gx+Hy+I + * + * src and dst may point to the same storage. + * + * @param dst storage for mapped Point + * @param src Point to transform + * @param count number of Points to transform + */ + void mapPoints(Point dst[], const Point src[], int count) const; + + /** + * Maps pts Point array of length count in place. Points are mapped by multiplying each Point by + * Matrix. Given: + * + * | A B C | | x | + * Matrix = | D E F |, pt = | y | + * | G H I | | 1 | + * + * where + * + * for (i = 0; i < count; ++i) { + * x = pts[i].fX + * y = pts[i].fY + * } + * + * each resulting pts Point is computed as: + * + * |A B C| |x| Ax+By+C Dx+Ey+F + * Matrix * pt = |D E F| |y| = |Ax+By+C Dx+Ey+F Gx+Hy+I| = ------- , ------- + * |G H I| |1| Gx+Hy+I Gx+Hy+I + * + * @param pts storage for mapped Point + * @param count number of Points to transform + */ + void mapPoints(Point pts[], int count) const { + this->mapPoints(pts, pts, count); + } + + /** + * Maps Point (x, y) to result. Point is mapped by multiplying by Matrix. Given: + * + * | A B C | | x | + * Matrix = | D E F |, pt = | y | + * | G H I | | 1 | + * + * the result is computed as: + * + * |A B C| |x| Ax+By+C Dx+Ey+F + * Matrix * pt = |D E F| |y| = |Ax+By+C Dx+Ey+F Gx+Hy+I| = ------- , ------- + * |G H I| |1| Gx+Hy+I Gx+Hy+I + * + * @param x x-axis value of Point to map + * @param y y-axis value of Point to map + * @param result storage for mapped Point + */ + void mapXY(float x, float y, Point* result) const; + + /** + * Returns Point (x, y) multiplied by Matrix. Given: + * + * | A B C | | x | + * Matrix = | D E F |, pt = | y | + * | G H I | | 1 | + * + * the result is computed as: + * + * |A B C| |x| Ax+By+C Dx+Ey+F + * Matrix * pt = |D E F| |y| = |Ax+By+C Dx+Ey+F Gx+Hy+I| = ------- , ------- + * |G H I| |1| Gx+Hy+I Gx+Hy+I + * + * @param x x-axis value of Point to map + * @param y y-axis value of Point to map + * @return mapped Point + */ + Point mapXY(float x, float y) const { + Point result = {}; + this->mapXY(x, y, &result); + return result; + } + + /** + * Returns true if Matrix maps Rect to another Rect. If true, the Matrix is identity, or scales, + * or rotates a multiple of 90 degrees, or mirrors on axes. In all cases, Matrix may also have + * translation. Matrix form is either: + * + * | scale-x 0 translate-x | + * | 0 scale-y translate-y | + * | 0 0 1 | + * + * or + * + * | 0 rotate-x translate-x | + * | rotate-y 0 translate-y | + * | 0 0 1 | + * + * for non-zero values of scale-x, scale-y, rotate-x, and rotate-y. + */ + bool rectStaysRect() const; + + /** + * Sets dst to bounds of src corners mapped by Matrix. + */ + void mapRect(Rect* dst, const Rect& src) const; + + /** + * Sets rect to bounds of rect corners mapped by Matrix. + */ + void mapRect(Rect* rect) const { + mapRect(rect, *rect); + } + + /** + * Returns bounds of src corners mapped by Matrix. + */ + Rect mapRect(const Rect& src) const { + Rect dst = {}; + mapRect(&dst, src); + return dst; + } + + /** Compares a and b; returns true if a and b are numerically equal. Returns true even if sign + * of zero values are different. Returns false if either Matrix contains NaN, even if the other + * Matrix also contains NaN. + */ + friend bool operator==(const Matrix& a, const Matrix& b); + + /** + * Compares a and b; returns true if a and b are not numerically equal. Returns false even if + * sign of zero values are different. Returns true if either Matrix contains NaN, even if the + * other Matrix also contains NaN. + */ + friend bool operator!=(const Matrix& a, const Matrix& b) { + return !(a == b); + } + + /** + * Returns the minimum scaling factor of the Matrix by decomposing the scaling and skewing + * elements. Returns -1 if scale factor overflows. + */ + float getMinScale() const; + + /** + * Returns the maximum scaling factor of the Matrix by decomposing the scaling and skewing + * elements. Returns -1 if scale factor overflows. + */ + float getMaxScale() const; + + /** + * Returns true if all elements of the matrix are finite. Returns false if any element is + * infinity, or NaN. + */ + bool isFinite() const; + + /** + * Returns reference to const identity Matrix. Returned Matrix is set to: + * + * | 1 0 0 | + * | 0 1 0 | + * | 0 0 1 | + * + * @return const identity Matrix + */ + static const Matrix& I(); + + private: + float values[6]; + /** + * Matrix organizes its values in row order. These members correspond to each value in Matrix. + */ + static constexpr int SCALE_X = 0; //!< horizontal scale factor + static constexpr int SKEW_X = 1; //!< horizontal skew factor + static constexpr int TRANS_X = 2; //!< horizontal translation + static constexpr int SKEW_Y = 3; //!< vertical skew factor + static constexpr int SCALE_Y = 4; //!< vertical scale factor + static constexpr int TRANS_Y = 5; //!< vertical translation + + void setScaleTranslate(float sx, float sy, float tx, float ty); + bool invertNonIdentity(Matrix* inverse) const; + bool getMinMaxScaleFactors(float results[2]) const; +}; +} // namespace tgfx diff --git a/include/tgfx/core/Paint.h b/include/tgfx/core/Paint.h new file mode 100644 index 00000000..24982751 --- /dev/null +++ b/include/tgfx/core/Paint.h @@ -0,0 +1,218 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/Color.h" +#include "tgfx/core/Stroke.h" +#include "tgfx/core/ImageFilter.h" +#include "tgfx/core/MaskFilter.h" +#include "tgfx/core/Shader.h" + +namespace tgfx { +/** + * Defines enumerations for Paint.setStyle(). + */ +enum class PaintStyle { + /** + * Set to fill geometry. + */ + Fill, + /** + * Set to stroke geometry. + */ + Stroke +}; + +/** + * Paint controls options applied when drawing. + */ +class Paint { + public: + /** + * Sets all Paint contents to their initial values. This is equivalent to replacing Paint with the + * result of Paint(). + */ + void reset(); + + /** + * Returns whether the geometry is filled, stroked, or filled and stroked. + */ + PaintStyle getStyle() const { + return style; + } + + /** + * Sets whether the geometry is filled, stroked, or filled and stroked. + */ + void setStyle(PaintStyle newStyle) { + style = newStyle; + } + + /** + * Retrieves alpha and RGB, unpremultiplied, as four floating point values. + */ + Color getColor() const { + return color; + } + + /** + * Sets alpha and RGB used when stroking and filling. The color is four floating point values, + * unpremultiplied. + */ + void setColor(Color newColor) { + color = newColor; + } + + /** + * Retrieves alpha from the color used when stroking and filling. + */ + float getAlpha() const { + return color.alpha; + } + + /** + * Replaces alpha, leaving RGB unchanged. + */ + void setAlpha(float newAlpha) { + color.alpha = newAlpha; + } + + /** + * Returns the thickness of the pen used by Paint to outline the shape. + * @return zero for hairline, greater than zero for pen thickness + */ + float getStrokeWidth() const { + return stroke.width; + } + + /** + * Sets the thickness of the pen used by the paint to outline the shape. Has no effect if width is + * less than zero. + * @param width zero thickness for hairline; greater than zero for pen thickness + */ + void setStrokeWidth(float width) { + stroke.width = width; + } + + /** + * Returns the geometry drawn at the beginning and end of strokes. + */ + LineCap getLineCap() const { + return stroke.cap; + } + + /** + * Sets the geometry drawn at the beginning and end of strokes. + */ + void setLineCap(LineCap cap) { + stroke.cap = cap; + } + + /** + * Returns the geometry drawn at the corners of strokes. + */ + LineJoin getLineJoin() const { + return stroke.join; + } + + /** + * Sets the geometry drawn at the corners of strokes. + */ + void setLineJoin(LineJoin join) { + stroke.join = join; + } + + /** + * Returns the limit at which a sharp corner is drawn beveled. + */ + float getMiterLimit() const { + return stroke.miterLimit; + } + + /** + * Sets the limit at which a sharp corner is drawn beveled. + */ + void setMiterLimit(float limit) { + stroke.miterLimit = limit; + } + + /** + * Returns the stroke options. + */ + const Stroke* getStroke() const { + return &stroke; + } + + /** + * Sets the stroke options. + */ + void setStroke(const Stroke& newStroke) { + stroke = newStroke; + } + + /** + * Returns optional colors used when filling a path if previously set, such as a gradient. + */ + std::shared_ptr getShader() const { + return shader; + } + + /** + * Sets optional colors used when filling a path, such as a gradient. If nullptr, color is used + * instead. + */ + void setShader(std::shared_ptr newShader) { + shader = std::move(newShader); + } + + std::shared_ptr getMaskFilter() const { + return maskFilter; + } + + void setMaskFilter(std::shared_ptr newMaskFilter) { + maskFilter = std::move(newMaskFilter); + } + + std::shared_ptr getColorFilter() const { + return colorFilter; + } + + void setColorFilter(std::shared_ptr newColorFilter) { + colorFilter = std::move(newColorFilter); + } + + // TODO(pengweilv): Used when drawing, currently only works with drawTexture. + void setImageFilter(std::shared_ptr newImageFilter) { + imageFilter = std::move(newImageFilter); + } + + std::shared_ptr getImageFilter() const { + return imageFilter; + } + + private: + PaintStyle style = PaintStyle::Fill; + Color color = Color::Black(); + Stroke stroke = Stroke(0); + std::shared_ptr shader = nullptr; + std::shared_ptr maskFilter = nullptr; + std::shared_ptr colorFilter = nullptr; + std::shared_ptr imageFilter = nullptr; +}; +} // namespace tgfx diff --git a/include/tgfx/core/Path.h b/include/tgfx/core/Path.h new file mode 100644 index 00000000..efe6ae87 --- /dev/null +++ b/include/tgfx/core/Path.h @@ -0,0 +1,272 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/Matrix.h" +#include "tgfx/core/PathTypes.h" + +namespace tgfx { +class PathRef; + +/** + * Path contain geometry. Path may be empty, or contain one or more verbs that outline a figure. + * Path always starts with a move verb to a Cartesian coordinate, and may be followed by additional + * verbs that add lines or curves. dding a close verb makes the geometry into a continuous loop, a + * closed contour. Path may contain any number of contours, each beginning with a move verb. + */ +class Path { + public: + /** + * Creates an empty path. + */ + Path(); + + /** + * Compares a and b; returns true if they are equivalent. + */ + friend bool operator==(const Path& a, const Path& b); + + /** + * Compares a and b; returns true if they are not equivalent. + */ + friend bool operator!=(const Path& a, const Path& b); + + /** + * Returns PathFillType, the rule used to fill Path. PathFillType of a new Path is Path + * FillType::Winding. + */ + PathFillType getFillType() const; + + /** + * Sets PathFillType, the rule used to fill Path. + */ + void setFillType(PathFillType fillType); + + /** + * Returns if PathFillType is InverseWinding or InverseEvenOdd. + */ + bool isInverseFillType() const; + + /** + * Replaces PathFillType with its inverse. + */ + void toggleInverseFillType(); + + /** + * Returns true if Path is equivalent to Rect when filled, Otherwise returns false, and leaves + * rect unchanged. The rect may be smaller than the Path bounds. Path bounds may include + * PathVerb::Move points that do not alter the area drawn by the returned rect. + */ + bool asRect(Rect* rect) const; + + /** + * Returns true if Path is equivalent to RRect when filled, Otherwise returns false, and leaves + * rRect unchanged. + */ + bool asRRect(RRect* rRect) const; + + /** + * Returns true if Path contains only one line; + */ + bool isLine(Point line[2] = nullptr) const; + + /** + * Returns the bounds of the path's points. If the path contains 0 or 1 points, the bounds is set + * to (0,0,0,0), and isEmpty() will return true. Note: this bounds may be larger than the actual + * shape, since curves do not extend as far as their control points. + */ + Rect getBounds() const; + + /** + * Returns true if Path is empty. + */ + bool isEmpty() const; + + /** + * Returns true if the point (x, y) is contained by Path, taking into account PathFillType. + */ + bool contains(float x, float y) const; + + /** + * Returns true if rect is contained by Path. This method is conservative. it may return false + * when rect is actually contained by Path. For now, only returns true if Path has one contour. + */ + bool contains(const Rect& rect) const; + + /** + * Adds beginning of contour at Point (x, y). + */ + void moveTo(float x, float y); + + /** + * Adds beginning of contour at point. + */ + void moveTo(const Point& point); + + /** + * Adds a line from last point to (x, y). + */ + void lineTo(float x, float y); + + /** + * Adds a line from last point to new point. + */ + void lineTo(const Point& point); + + /** + * Adds a quad curve from last point towards (controlX, controlY), ending at (x, y). + */ + void quadTo(float controlX, float controlY, float x, float y); + + /** + * Adds a quad curve from last point towards control, ending at point. + */ + void quadTo(const Point& control, const Point& point); + + /** + * Adds a cubic curve from last point towards (controlX1, controlY1), then towards + * (controlX2, controlY2), ending at (x, y). + */ + void cubicTo(float controlX1, float controlY1, float controlX2, float controlY2, float x, + float y); + + /** + * Adds a cubic curve from last point towards control1, then towards control2, ending at (x, y). + */ + void cubicTo(const Point& control1, const Point& control2, const Point& point); + + /** + * Closes the current contour of Path. A closed contour connects the first and last Point with + * line, forming a continuous loop. + */ + void close(); + + /** + * Adds a rect to Path. If reversed is false, Rect begins at startIndex point and continues + * clockwise if reversed is false, counterclockwise if reversed is true. The indices of all points + * are as follows: + * 0 1 + * *-------* + * | | + * *-------* + * 3 2 + */ + void addRect(const Rect& rect, bool reversed = false, unsigned startIndex = 0); + + /** + * Adds a rect to Path. If reversed is false, Rect begins at startIndex point and continues + * clockwise if reversed is false, counterclockwise if reversed is true. The indices of all points + * are as follows: + * 0 1 + * *-------* + * | | + * *-------* + * 3 2 + */ + void addRect(float left, float top, float right, float bottom, bool reversed = false, + unsigned startIndex = 0); + + /** + * Adds a oval to path. Oval is upright ellipse bounded by Rect oval with radius equal to half + * oval width and half oval height. Oval begins at startIndex point and continues clockwise if + * reversed is false, counterclockwise if reversed is true. The indices of all points are as + * follows: + * 0 + * --*-- + * | | + * 3 * * 1 + * | | + * --*-- + * 2 + */ + void addOval(const Rect& oval, bool reversed = false, unsigned startIndex = 0); + + /** + * Appends arc to Path, as the start of new contour. Arc added is part of ellipse bounded by oval, + * from startAngle through sweepAngle. Both startAngle and sweepAngle are measured in degrees, + * where zero degrees is aligned with the positive x-axis, and positive sweeps extends arc + * clockwise. If sweepAngle <= -360, or sweepAngle >= 360; and startAngle modulo 90 is nearly + * zero, append oval instead of arc. Otherwise, sweepAngle values are treated modulo 360, and arc + * may or may not draw depending on numeric rounding. + * + * @param oval bounds of ellipse containing arc + * @param startAngle starting angle of arc in degrees + * @param sweepAngle sweep, in degrees. Positive is clockwise; treated modulo 360 + */ + void addArc(const Rect& oval, float startAngle, float sweepAngle); + + /** + * Adds a round rect to path. creating a new closed contour, each corner is 90 degrees of an + * ellipse with radius (radiusX, radiusY). The round rect begins at startIndex point and continues + * clockwise if reversed is false, counterclockwise if reversed is true. The indices of all points + * are as follows: + * 0 1 + * *------* + * 7 * * 2 + * | | + * 6 * * 3 + * *------* + * 5 4 + */ + void addRoundRect(const Rect& rect, float radiusX, float radiusY, bool reversed = false, + unsigned startIndex = 0); + + /** + * Adds a src to this Path. + */ + void addPath(const Path& src, PathOp op = PathOp::Append); + + /** + * Sets Path to its initial state. Internal storage associated with Path is released. + */ + void reset(); + + /** + * Applies matrix to this Path, this transforms verb array, Point array, and weight. + */ + void transform(const Matrix& matrix); + + /** + * Reveres this path from back to front. + */ + void reverse(); + + /** + * Iterates through verb array and associated Point array. + */ + void decompose(const PathIterator& iterator, void* info = nullptr) const; + + /** + * Returns the number of points in Path. + */ + int countPoints() const; + + /** + * Returns the number of verbs in Path. + */ + int countVerbs() const; + + private: + std::shared_ptr pathRef = nullptr; + + PathRef* writableRef(); + + friend class PathRef; +}; +} // namespace tgfx \ No newline at end of file diff --git a/include/tgfx/core/PathEffect.h b/include/tgfx/core/PathEffect.h new file mode 100644 index 00000000..65f7ec2e --- /dev/null +++ b/include/tgfx/core/PathEffect.h @@ -0,0 +1,60 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/Path.h" +#include "tgfx/core/Stroke.h" + +namespace tgfx { +/** + * PathEffect applies transformation to a Path. + */ +class PathEffect { + public: + /** + * Creates a stroke path effect with the specified stroke options. + */ + static std::unique_ptr MakeStroke(const Stroke* stroke); + + /** + * Creates a dash path effect. + * @param intervals array containing an even number of entries (>=2), with the even indices + * specifying the length of "on" intervals, and the odd indices specifying the length of "off" + * intervals. + * @param count number of elements in the intervals array + * @param phase offset into the intervals array (mod the sum of all of the intervals). + */ + static std::unique_ptr MakeDash(const float intervals[], int count, float phase); + + /** + * Create a corner path effect. + * @param radius must be > 0 to have an effect. It specifies the distance from each corner that + * should be "rounded". + */ + static std::unique_ptr MakeCorner(float radius); + + virtual ~PathEffect() = default; + + /** + * Applies this effect to specified path. Returns false if this effect cannot be applied, and + * leaves this path unchanged. + */ + virtual bool applyTo(Path* path) const = 0; +}; +} // namespace tgfx \ No newline at end of file diff --git a/include/tgfx/core/PathMeasure.h b/include/tgfx/core/PathMeasure.h new file mode 100644 index 00000000..65a52405 --- /dev/null +++ b/include/tgfx/core/PathMeasure.h @@ -0,0 +1,61 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/Path.h" + +namespace tgfx { +/** + * PathMeasure calculates the length of a Path and cuts child segments from it. + */ +class PathMeasure { + public: + /** + * Initialize the PathMeasure with the specified path. + */ + static std::unique_ptr MakeFrom(const Path& path); + + virtual ~PathMeasure() = default; + + /** + * Return the total length of the the path. + */ + virtual float getLength() = 0; + + /** + * Given a start and stop distance, return in result the intervening segment(s). If the segment is + * zero-length, return false, else return true. startD and stopD are pinned to legal values + * (0..getLength()). If startD > stopD then return false (and leave dst untouched). + */ + virtual bool getSegment(float startD, float stopD, Path* result) = 0; + + /** + * Pins distance to 0 <= distance <= getLength(), and then computes + * the corresponding position and tangent. + * Returns false if there is no path, or a zero-length path was specified, in which case + * position and tangent are unchanged. + */ + virtual bool getPosTan(float distance, Point* position, Point* tangent) = 0; + + /** + * Returns true if the current contour is closed(). + */ + virtual bool isClosed() = 0; +}; +} // namespace tgfx \ No newline at end of file diff --git a/include/tgfx/core/PathTypes.h b/include/tgfx/core/PathTypes.h new file mode 100644 index 00000000..68add062 --- /dev/null +++ b/include/tgfx/core/PathTypes.h @@ -0,0 +1,104 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "tgfx/core/Point.h" + +namespace tgfx { +/** + * PathFillType selects the rule used to fill Path. + */ +enum class PathFillType { + /** + * Enclosed by a non-zero sum of contour directions. + */ + Winding, + /** + * Enclosed by an odd number of contours. + */ + EvenOdd, + /** + * Enclosed by a zero sum of contour directions. + */ + InverseWinding, + /** + * Enclosed by an even number of contours. + */ + InverseEvenOdd, +}; + +/** + * The logical operations that can be performed when combining two paths. + */ +enum class PathOp { + /** + * Appended to destination unaltered. + */ + Append, + /** + * Subtract the op path from the destination path. + */ + Difference, + /** + * Intersect the two paths. + */ + Intersect, + /** + * Union (inclusive-or) the two paths. + */ + Union, + /** + * Exclusive-or the two paths. + */ + XOR, +}; + +/** + * PathVerb instructs Path how to interpret one or more Point, manage contour, and terminate Path. + */ +enum class PathVerb { + /** + * PathIterator returns 1 point. + */ + Move, + /** + * PathIterator returns 2 points. + */ + Line, + /** + * PathIterator returns 3 points. + */ + Quad, + /** + * PathIterator returns 4 points. + */ + Cubic, + /** + * PathIterator returns 0 points. + */ + Close +}; + +/** + * Zero to four Point are stored in points, depending on the returned PathVerb + */ +using PathIterator = std::function; + +} // namespace tgfx diff --git a/include/tgfx/core/Pixmap.h b/include/tgfx/core/Pixmap.h new file mode 100644 index 00000000..8e583efe --- /dev/null +++ b/include/tgfx/core/Pixmap.h @@ -0,0 +1,219 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/Bitmap.h" +#include "tgfx/core/Rect.h" + +namespace tgfx { +/** + * Pixmap provides a utility to pair ImageInfo width pixels. Pixmap is a low-level class that + * provides convenience functions to access raster destinations, which can convert the format of + * pixels from one to another. Pixmap does not try to manage the lifetime of the pixel memory. Use + * Bitmap to manage pixel memory. Pixmap is not thread safe. + */ +class Pixmap { + public: + /** + * Creates an empty Pixmap without pixels, and with an empty ImageInfo. + */ + Pixmap() = default; + + /** + * Creates a new Pixmap with the specified ImageInfo and read-only pixels. The memory lifetime of + * pixels is managed by the caller. When Pixmap goes out of scope, the pixels is unaffected. + */ + Pixmap(const ImageInfo& info, const void* pixels); + + /** + * Creates a new Bitmap with specified ImageInfo and writable pixels. The memory lifetime of + * pixels is managed by the caller. When Pixmap goes out of scope, the pixels is unaffected. + */ + Pixmap(const ImageInfo& info, void* pixels); + + /** + * Creates a new Pixmap from the specified read-only Bitmap. Pixmap will lock pixels from the + * Bitmap and take a reference on its PixelRef object. The Bitmap will remain locked until the + * Pixmap goes out of scope or reset. + */ + explicit Pixmap(const Bitmap& bitmap); + + /** + * Creates a new Pixmap from the specified writable Bitmap. Pixmap will lock pixels from the + * Bitmap and take a reference on its PixelRef object. The Bitmap will remain locked until the + * Pixmap goes out of scope or reset. + */ + explicit Pixmap(Bitmap& bitmap); + + ~Pixmap(); + + /** + * Sets the ImageInfo to empty, and pixels to nullptr. + */ + void reset(); + + /** + * Sets the Pixmap to the specified info and pixels. The memory lifetime of pixels is managed by + * the caller. When Pixmap goes out of scope, the memory of pixels is unaffected. + */ + void reset(const ImageInfo& info, const void* pixels); + + /** + * Set the Pixmap to the specified ImageInfo and writable pixels. The memory lifetime of pixels is + * managed by the caller. When Pixmap goes out of scope, the memory of pixels is unaffected. + */ + void reset(const ImageInfo& info, void* pixels); + + /** + * Sets the Pixmap to the specified read-only Bitmap. Pixmap will lock pixels from the Bitmap and + * take a reference on its PixelRef object. The Bitmap will remain locked until the Pixmap goes + * out of scope or reset. + */ + void reset(const Bitmap& bitmap); + + /** + * Sets the Pixmap to the specified writable Bitmap. Pixmap will lock pixels from the Bitmap and + * take a reference on its PixelRef object. The Bitmap will remain locked until the Pixmap goes + * out of scope or reset. + */ + void reset(Bitmap& bitmap); + + /** + * Return true if the Pixmap describes an empty area of pixels. + */ + bool isEmpty() const { + return _info.isEmpty(); + } + + /** + * Returns an ImageInfo describing the width, height, color type, alpha type, and row bytes of the + * pixels. + */ + const ImageInfo& info() const { + return _info; + } + + /** + * Returns the width of the pixels. + */ + int width() const { + return _info.width(); + } + + /** + * Returns the height of the pixels. + */ + int height() const { + return _info.height(); + } + + /** + * Returns the ColorType of the pixels. + */ + ColorType colorType() const { + return _info.colorType(); + } + + /** + * Returns the AlphaType of the pixels. + */ + AlphaType alphaType() const { + return _info.alphaType(); + } + + /** + * Returns true if pixels represent transparency only. If true, each pixel is packed in 8 bits as + * defined by ColorType::ALPHA_8. + */ + bool isAlphaOnly() const { + return _info.isAlphaOnly(); + } + + /** + * Returns the rowBytes of the pixels. + */ + size_t rowBytes() const { + return _info.rowBytes(); + } + + /** + * Returns the byte size of the pixels. + */ + size_t byteSize() const { + return _info.byteSize(); + } + + /** + * Returns the pixel address, the base address corresponding to the pixel origin. + */ + const void* pixels() const { + return _pixels; + } + + /** + * Returns the writable pixel address, the base address corresponding to the pixel origin. Returns + * nullptr if the Pixmap is constructed from read-only pixels. + */ + void* writablePixels() const { + return _writablePixels; + } + + /** + * Returns pixel at (x, y) as unpremultiplied color. Some color precision may be lost in the + * conversion to unpremultiplied color; original pixel data may have additional precision. Returns + * a transparent color if the point (x, y) is not contained by bounds. + * @param x column index, zero or greater, and less than width() + * @param y row index, zero or greater, and less than height() + * @return pixel converted to unpremultiplied color + */ + Color getColor(int x, int y) const; + + /** + * Returns subset of the Pixmap. subset must be fully contained by Pixmap dimensions. Returns + * an empty Pixmap if subset is empty, or subset is not contained by bounds. + */ + Pixmap makeSubset(const Rect& subset) const; + + /** + * Copies a rect of pixels to dstPixels with specified ImageInfo. Copy starts at (srcX, srcY), and + * does not exceed Pixmap (width(), height()). Pixels are copied only if pixel conversion is + * possible. Returns true if pixels are copied to dstPixels. + */ + bool readPixels(const ImageInfo& dstInfo, void* dstPixels, int srcX = 0, int srcY = 0) const; + + /** + * Copies a rect of pixels from src. Copy starts at (dstX, dstY), and does not exceed + * Pixmap (width(), height()). Pixels are copied only if pixel conversion is possible and the + * Pixmap is constructed from writable pixels. Returns true if src pixels are copied to Pixmap. + */ + bool writePixels(const ImageInfo& srcInfo, const void* srcPixels, int dstX = 0, int dstY = 0); + + /** + * Replaces all pixel values with transparent colors. Returns false if the Pixmap is constructed + * from read-only pixels. + */ + bool clear(); + + private: + ImageInfo _info = {}; + const void* _pixels = nullptr; + void* _writablePixels = nullptr; + std::shared_ptr pixelRef = nullptr; +}; +} // namespace tgfx \ No newline at end of file diff --git a/include/tgfx/core/Point.h b/include/tgfx/core/Point.h new file mode 100644 index 00000000..09f6c19a --- /dev/null +++ b/include/tgfx/core/Point.h @@ -0,0 +1,135 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include +#include +#include + +namespace tgfx { +/** + * Point holds two 32-bit floating point coordinates. + */ +struct Point { + /** + * x-axis value. + */ + float x; + /** + * y-axis value. + */ + float y; + + /** + * Creates a Point set to (0, 0). + */ + static const Point& Zero() { + static const Point zero = Point::Make(0, 0); + return zero; + } + + /** + * Creates a Point with specified x and y value. + */ + static constexpr Point Make(float x, float y) { + return {x, y}; + } + + /** + * Creates a Point with specified x and y value. + */ + static constexpr Point Make(int x, int y) { + return {static_cast(x), static_cast(y)}; + } + + /** + * Returns true if fX and fY are both zero. + */ + bool isZero() const { + return (0 == x) & (0 == y); + } + + /** + * Sets fX to x and fY to y. + */ + void set(float xValue, float yValue) { + x = xValue; + y = yValue; + } + + /** + * Adds offset (dx, dy) to Point. + */ + void offset(float dx, float dy) { + x += dx; + y += dy; + } + + /** + * Returns the Euclidean distance from origin. + */ + float length() const { + return Point::Length(x, y); + } + + /** + * Returns true if a is equivalent to b. + */ + friend bool operator==(const Point& a, const Point& b) { + return a.x == b.x && a.y == b.y; + } + + /** + * Returns true if a is not equivalent to b. + */ + friend bool operator!=(const Point& a, const Point& b) { + return a.x != b.x || a.y != b.y; + } + + /** + * Returns a Point from b to a; computed as (a.fX - b.fX, a.fY - b.fY). + */ + friend Point operator-(const Point& a, const Point& b) { + return {a.x - b.x, a.y - b.y}; + } + + /** + * Returns Point resulting from Point a offset by Point b, computed as: + * (a.fX + b.fX, a.fY + b.fY). + */ + friend Point operator+(const Point& a, const Point& b) { + return {a.x + b.x, a.y + b.y}; + } + + /** + * Returns the Euclidean distance from origin. + */ + static float Length(float x, float y) { + return sqrt(x * x + y * y); + } + + /** + * Returns the Euclidean distance between a and b. + */ + static float Distance(const Point& a, const Point& b) { + return Length(a.x - b.x, a.y - b.y); + } +}; +} // namespace tgfx diff --git a/include/tgfx/core/Rect.h b/include/tgfx/core/Rect.h new file mode 100644 index 00000000..6c452e73 --- /dev/null +++ b/include/tgfx/core/Rect.h @@ -0,0 +1,499 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "tgfx/core/Point.h" +#include "tgfx/core/Size.h" + +namespace tgfx { +/** + * Rect holds four float coordinates describing the upper and lower bounds of a rectangle. Rect may + * be created from outer bounds or from position, width, and height. Rect describes an area; if its + * right is less than or equal to its left, or if its bottom is less than or equal to its top, it is + * considered empty. + */ +struct Rect { + /** + * smaller x-axis bounds. + */ + float left; + /** + * smaller y-axis bounds. + */ + float top; + /** + * larger x-axis bounds. + */ + float right; + /** + * larger y-axis bounds. + */ + float bottom; + + /** + * Returns constructed Rect set to (0, 0, 0, 0). + */ + static constexpr Rect MakeEmpty() { + return {0, 0, 0, 0}; + } + + /** + * Returns constructed Rect set to float values (0, 0, w, h). Does not validate input; w or h may + * be negative. + */ + static constexpr Rect MakeWH(float w, float h) { + return {0, 0, w, h}; + } + + /** + * Returns constructed Rect set to float values (0, 0, w, h). Does not validate input; w or h may + * be negative. + */ + static constexpr Rect MakeWH(int w, int h) { + return {0, 0, static_cast(w), static_cast(h)}; + } + + /** + * Returns constructed Rect set to (l, t, r, b). Does not sort input; Rect may result in left + * greater than right, or top greater than bottom. + */ + static constexpr Rect MakeLTRB(float l, float t, float r, float b) { + return {l, t, r, b}; + } + + /** + * Returns constructed Rect set to (x, y, x + w, y + h). Does not validate input; w or h may be + * negative. + */ + static constexpr Rect MakeXYWH(float x, float y, float w, float h) { + return {x, y, x + w, y + h}; + } + + /** + * Returns constructed Rect set to (x, y, x + w, y + h). Does not validate input; w or h may be + * negative. + */ + static constexpr Rect MakeXYWH(int x, int y, int w, int h) { + return {static_cast(x), static_cast(y), static_cast(x + w), + static_cast(y + h)}; + } + + /** + * Returns constructed Rect set to (0, 0, size.width, size.height). Does not validate input; + * size.width or size.height may be negative. + */ + static constexpr Rect MakeSize(const ISize& size) { + return Rect{0, 0, static_cast(size.width), static_cast(size.height)}; + } + + /** + * Returns constructed Rect set to (0, 0, size.width, size.height). Does not validate input; + * size.width or size.height may be negative. + */ + static constexpr Rect MakeSize(const Size& size) { + return Rect{0, 0, size.width, size.height}; + } + + /** + * Returns true if left is equal to or greater than right, or if top is equal to or greater + * than bottom. Call sort() to reverse rectangles with negative width() or height(). + */ + bool isEmpty() const { + // We write it as the NOT of a non-empty rect, so we will return true if any values + // are NaN. + return !(left < right && top < bottom); + } + + /** + * Returns true if left is equal to or less than right, or if top is equal to or less than + * bottom. Call sort() to reverse rectangles with negative width() or height(). + */ + bool isSorted() const { + return left <= right && top <= bottom; + } + + /** + * Returns left edge of Rect, if sorted. Call isSorted() to see if Rect is valid. Call sort() to + * reverse left and right if needed. + */ + float x() const { + return left; + } + + /** + * Returns top edge of Rect, if sorted. Call isEmpty() to see if Rect may be invalid, and sort() + * to reverse top and bottom if needed. + */ + float y() const { + return top; + } + + /** + * Returns span on the x-axis. This does not check if Rect is sorted, or if result fits in 32-bit + * float; result may be negative or infinity. + */ + float width() const { + return right - left; + } + + /** + * Returns span on the y-axis. This does not check if Rect is sorted, or if result fits in 32-bit + * float; result may be negative or infinity. + */ + float height() const { + return bottom - top; + } + + /** + * Returns spans on the x-axis and y-axis. + * @return Size (width, height) + */ + Size size() const { + return Size::Make(this->width(), this->height()); + } + + /** + * Returns average of left edge and right edge. Result does not change if Rect is sorted. + */ + float centerX() const { + // don't use (left + bottom) * 0.5f as that might overflow before the 0.5f + return left * 0.5f + right * 0.5f; + } + + /** + * Returns average of top edge and bottom edge. Result does not change if Rect is sorted. + */ + float centerY() const { + // don't use (top + bottom) * 0.5f as that might overflow before the 0.5f + return top * 0.5f + bottom * 0.5f; + } + + /** + * Returns true if all members in a: left, top, right, and bottom; are equal to the + * corresponding members in b. + * a and b are not equal if either contain NaN. a and b are equal if members contain zeroes with + * different signs. + */ + friend bool operator==(const Rect& a, const Rect& b) { + return a.left == b.left && a.right == b.right && a.top == b.top && a.bottom == b.bottom; + } + + /** + * Returns true if any in a: left, top, right, and bottom; does not equal the corresponding + * members in b. + * a and b are not equal if either contain NaN. a and b are equal if members contain zeroes with + * different signs. + */ + friend bool operator!=(const Rect& a, const Rect& b) { + return a.left != b.left || a.right != b.right || a.top != b.top || a.bottom != b.bottom; + } + + /** + * Sets Rect to (0, 0, 0, 0). + */ + void setEmpty() { + *this = MakeEmpty(); + } + + /** + * Sets Rect to (left, top, right, bottom). left and right are not sorted; left is not necessarily + * less than right. top and bottom are not sorted; top is not necessarily less than bottom. + */ + void setLTRB(float l, float t, float r, float b) { + left = l; + top = t; + right = r; + bottom = b; + } + + /** + * Sets to bounds of Point array with count entries. Returns false if count is zero or smaller, or + * if Point array contains an infinity or NaN; in these cases sets Rect to (0, 0, 0, 0). + * Result is either empty or sorted: left is less than or equal to right, and top is less than + * or equal to bottom. + */ + bool setBounds(const Point pts[], int count); + + /** + * Sets Rect to (x, y, x + width, y + height). Does not validate input; width or height may be + * negative. + */ + void setXYWH(float x, float y, float width, float height) { + left = x; + top = y; + right = x + width; + bottom = y + height; + } + + /** + * Sets Rect to (0, 0, width, height). Does not validate input, width or height may be negative. + */ + void setWH(float width, float height) { + left = 0; + top = 0; + right = width; + bottom = height; + } + + /** + * Returns Rect offset by (dx, dy). + * If dx is negative, Rect returned is moved to the left. + * If dx is positive, Rect returned is moved to the right. + * If dy is negative, Rect returned is moved upward. + * If dy is positive, Rect returned is moved downward. + */ + Rect makeOffset(float dx, float dy) const { + return MakeLTRB(left + dx, top + dy, right + dx, bottom + dy); + } + + /** + * Returns Rect, inset by (dx, dy). + * If dx is negative, Rect returned is wider. + * If dx is positive, Rect returned is narrower. + * If dy is negative, Rect returned is taller. + * If dy is positive, Rect returned is shorter. + */ + Rect makeInset(float dx, float dy) const { + return MakeLTRB(left + dx, top + dy, right - dx, bottom - dy); + } + + /** + * Returns Rect, outset by (dx, dy). + * If dx is negative, Rect returned is narrower. + * If dx is positive, Rect returned is wider. + * If dy is negative, Rect returned is shorter. + * If dy is positive, Rect returned is taller. + */ + Rect makeOutset(float dx, float dy) const { + return MakeLTRB(left - dx, top - dy, right + dx, bottom + dy); + } + + /** + * Offsets Rect by adding dx to left, right; and by adding dy to top, bottom. + * If dx is negative, moves Rect to the left. + * If dx is positive, moves Rect to the right. + * If dy is negative, moves Rect upward. + * If dy is positive, moves Rect downward. + */ + void offset(float dx, float dy) { + left += dx; + top += dy; + right += dx; + bottom += dy; + } + + /** + * Offsets Rect by adding delta.fX to left, right; and by adding delta.fY to top, bottom. + * If delta.fX is negative, moves Rect to the left. + * If delta.fX is positive, moves Rect to the right. + * If delta.fY is negative, moves Rect upward. + * If delta.fY is positive, moves Rect downward. + */ + void offset(const Point& delta) { + this->offset(delta.x, delta.y); + } + + /** + * Offsets Rect so that left equals newX, and top equals newY. width and height are unchanged. + */ + void offsetTo(float newX, float newY) { + right += newX - left; + bottom += newY - top; + left = newX; + top = newY; + } + + /** + * Insets Rect by (dx, dy). + * If dx is positive, makes Rect narrower. + * If dx is negative, makes Rect wider. + * If dy is positive, makes Rect shorter. + * If dy is negative, makes Rect taller. + */ + void inset(float dx, float dy) { + left += dx; + top += dy; + right -= dx; + bottom -= dy; + } + + /** + * Outsets Rect by (dx, dy). + * If dx is positive, makes Rect wider. + * If dx is negative, makes Rect narrower. + * If dy is positive, makes Rect taller. + * If dy is negative, makes Rect shorter. + */ + void outset(float dx, float dy) { + this->inset(-dx, -dy); + } + + /** + * Scale the rectangle by scaleX and scaleY. + */ + void scale(float scaleX, float scaleY); + + /** + * Returns true if Rect intersects r, and sets Rect to intersection. Returns false if Rect does + * not intersect r, and leaves Rect unchanged. Returns false if either r or Rect is empty, leaving + * Rect unchanged. + */ + bool intersect(const Rect& r) { + return this->intersect(r.left, r.top, r.right, r.bottom); + } + + /** + * Constructs Rect to intersect from (left, top, right, bottom). Does not sort construction. + * Returns true if Rect intersects construction, and sets Rect to intersection. + * Returns false if Rect does not intersect construction, and leaves Rect unchanged. + * Returns false if either construction or Rect is empty, leaving Rect unchanged. + */ + bool intersect(float l, float t, float r, float b); + + /** + * Returns true if a intersects b, and sets Rect to intersection. + * Returns false if a does not intersect b, and leaves Rect unchanged. + * Returns false if either a or b is empty, leaving Rect unchanged. + */ + bool intersect(const Rect& a, const Rect& b); + + /** + * Constructs Rect to intersect from (left, top, right, bottom). Does not sort construction. + * Returns true if Rect intersects construction. + * Returns false if either construction or Rect is empty, or do not intersect. + */ + bool intersects(float l, float t, float r, float b) const { + return Intersects(left, top, right, bottom, l, t, r, b); + } + + /** + * Returns true if Rect intersects r. Returns false if either r or Rect is empty, or do not + * intersect. + */ + bool intersects(const Rect& r) const { + return Intersects(left, top, right, bottom, r.left, r.top, r.right, r.bottom); + } + + /** + * Returns true if a intersects b. Returns false if either a or b is empty, or do not intersect. + */ + static bool Intersects(const Rect& a, const Rect& b) { + return Intersects(a.left, a.top, a.right, a.bottom, b.left, b.top, b.right, b.bottom); + } + + /** + * Constructs Rect to intersect from (left, top, right, bottom). Does not sort construction. + * Sets Rect to the union of itself and the construction. Has no effect if construction is empty. + * Otherwise, if Rect is empty, sets Rect to construction. + */ + void join(float l, float t, float r, float b); + + /** + * Sets Rect to the union of itself and r. Has no effect if r is empty. Otherwise, if Rect is + * empty, sets Rect to r. + */ + void join(const Rect& r) { + this->join(r.left, r.top, r.right, r.bottom); + } + + /** + * Returns true if: left <= x < right && top <= y < bottom. Returns false if Rect is empty. + */ + bool contains(float x, float y) const { + return x >= left && x < right && y >= top && y < bottom; + } + + /** + * Returns true if Rect contains r. Returns false if Rect is empty or r is empty. Rect contains r + * when Rect area completely includes r area. + */ + bool contains(const Rect& r) const { + return left <= r.left && top <= r.top && right >= r.right && bottom >= r.bottom; + } + + /** + * Sets Rect by discarding the fractional portion of left and top; and rounding up right and + * bottom. + */ + void roundOut() { + left = floorf(left); + top = floorf(top); + right = ceilf(right); + bottom = ceilf(bottom); + } + + /** + * Sets Rect by rounding of left, top, right and bottom. + */ + void round() { + left = roundf(left); + top = roundf(top); + right = roundf(right); + bottom = roundf(bottom); + } + + /** + * Swaps left and right if left is greater than right; and swaps top and bottom if top is + * greater than bottom. Result may be empty; and width() and height() will be zero or positive. + */ + void sort() { + if (left > right) { + std::swap(left, right); + } + + if (top > bottom) { + std::swap(top, bottom); + } + } + + /** + * Returns Rect with left and right swapped if left is greater than right, and with top and + * bottom swapped if top is greater than bottom. Result may be empty, and width() and height() + * will be zero or positive. + */ + Rect makeSorted() const { + return MakeLTRB(std::min(left, right), std::min(top, bottom), std::max(left, right), + std::max(top, bottom)); + } + + private: + static bool Intersects(float al, float at, float ar, float ab, float bl, float bt, float br, + float bb) { + float L = al > bl ? al : bl; + float R = ar < br ? ar : br; + float T = at > bt ? at : bt; + float B = ab < bb ? ab : bb; + return L < R && T < B; + } +}; + +/** + * Round Rect. + */ +struct RRect { + Rect rect = Rect::MakeEmpty(); + Point radii = Point::Zero(); + + /** + * Scale the round rectangle by scaleX and scaleY. + */ + void scale(float scaleX, float scaleY); +}; +} // namespace tgfx diff --git a/include/tgfx/core/SamplingOptions.h b/include/tgfx/core/SamplingOptions.h new file mode 100644 index 00000000..1e7e85e4 --- /dev/null +++ b/include/tgfx/core/SamplingOptions.h @@ -0,0 +1,59 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +namespace tgfx { +enum class FilterMode { + /** + * Single sample point (nearest neighbor) + */ + Nearest, + + /** + * Interpolate between 2x2 sample points (bi-linear interpolation) + */ + Linear, +}; + +enum class MipMapMode { + /** + * ignore mipmap levels, sample from the "base" + */ + None, + /** + * Sample from the nearest level + */ + Nearest, + /** + * Interpolate between the two nearest levels + */ + Linear, +}; + +struct SamplingOptions { + SamplingOptions() = default; + + explicit SamplingOptions(FilterMode filterMode, MipMapMode mipMapMode = MipMapMode::None) + : filterMode(filterMode), mipMapMode(mipMapMode) { + } + + const FilterMode filterMode = FilterMode::Linear; + const MipMapMode mipMapMode = MipMapMode::None; +}; +} // namespace tgfx diff --git a/include/tgfx/core/Shader.h b/include/tgfx/core/Shader.h new file mode 100644 index 00000000..a911e622 --- /dev/null +++ b/include/tgfx/core/Shader.h @@ -0,0 +1,140 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "tgfx/core/BlendMode.h" +#include "tgfx/core/Color.h" +#include "tgfx/core/ColorFilter.h" +#include "tgfx/core/Image.h" +#include "tgfx/core/Matrix.h" +#include "tgfx/core/Point.h" +#include "tgfx/core/SamplingOptions.h" +#include "tgfx/core/TileMode.h" + +namespace tgfx { +struct FPArgs; +class Texture; +class FragmentProcessor; + +/** + * Shaders specify the source color(s) for what is being drawn. If a paint has no shader, then the + * paint's color is used. If the paint has a shader, then the shader's color(s) are use instead, but + * they are modulated by the paint's alpha. + */ +class Shader { + public: + /** + * Creates a shader that draws the specified color. + */ + static std::shared_ptr MakeColorShader(Color color); + + /** + * Creates a shader that draws the specified image. The tile modes will be ignored if the image is + * an RGBAAA Image. + */ + static std::shared_ptr MakeImageShader(std::shared_ptr image, + TileMode tileModeX = TileMode::Clamp, + TileMode tileModeY = TileMode::Clamp, + SamplingOptions sampling = SamplingOptions()); + + static std::shared_ptr MakeBlend(BlendMode mode, std::shared_ptr dst, + std::shared_ptr src); + + /** + * Returns a shader that generates a linear gradient between the two specified points. + * @param startPoint The start point for the gradient. + * @param endPoint The end point for the gradient. + * @param colors The array of colors, to be distributed between the two points. + * @param positions May be empty. The relative position of each corresponding color in the colors + * array. If this is empty, the the colors are distributed evenly between the start and end point. + * If this is not empty, the values must begin with 0, end with 1.0, and intermediate values must + * be strictly increasing. + */ + static std::shared_ptr MakeLinearGradient(const Point& startPoint, const Point& endPoint, + const std::vector& colors, + const std::vector& positions); + + /** + * Returns a shader that generates a radial gradient given the center and radius. + * @param center The center of the circle for this gradient + * @param radius Must be positive. The radius of the circle for this gradient. + * @param colors The array of colors, to be distributed between the center and edge of the circle. + * @param positions May be empty. The relative position of each corresponding color in the colors + * array. If this is empty, the the colors are distributed evenly between the start and end point. + * If this is not empty, the values must begin with 0, end with 1.0, and intermediate values must + * be strictly increasing. + */ + static std::shared_ptr MakeRadialGradient(const Point& center, float radius, + const std::vector& colors, + const std::vector& positions); + + /** + * Returns a shader that generates a sweep gradient given a center. + * @param center The center of the circle for this gradient + * @param startAngle Start of the angular range, corresponding to pos == 0. + * @param endAngle End of the angular range, corresponding to pos == 1. + * @param colors The array of colors, to be distributed around the center, within the gradient + * angle range. + * @param positions May be empty. The relative position of each corresponding color in the colors + * array. If this is empty, the the colors are distributed evenly between the start and end point. + * If this is not empty, the values must begin with 0, end with 1.0, and intermediate values must + * be strictly increasing. + */ + static std::shared_ptr MakeSweepGradient(const Point& center, float startAngle, + float endAngle, const std::vector& colors, + const std::vector& positions); + + virtual ~Shader() = default; + + /** + * Returns true if the shader is guaranteed to produce only opaque colors, subject to the Paint + * using the shader to apply an opaque alpha value. Subclasses should override this to allow some + * optimizations. + */ + virtual bool isOpaque() const { + return false; + } + + /** + * Returns a shader that will apply the specified localMatrix to this shader. The specified + * matrix will be applied before any matrix associated with this shader. + */ + std::shared_ptr makeWithPreLocalMatrix(const Matrix& matrix) const { + return makeWithLocalMatrix(matrix, true); + } + + /** + * Returns a shader that will apply the specified localMatrix to this shader. The specified + * matrix will be applied after any matrix associated with this shader. + */ + std::shared_ptr makeWithPostLocalMatrix(const Matrix& matrix) const { + return makeWithLocalMatrix(matrix, false); + } + + std::shared_ptr makeWithColorFilter(std::shared_ptr colorFilter) const; + + virtual std::unique_ptr asFragmentProcessor(const FPArgs& args) const = 0; + + protected: + virtual std::shared_ptr makeWithLocalMatrix(const Matrix& matrix, bool isPre) const; + + std::weak_ptr weakThis; +}; +} // namespace tgfx diff --git a/include/tgfx/core/Shape.h b/include/tgfx/core/Shape.h new file mode 100644 index 00000000..e82c015e --- /dev/null +++ b/include/tgfx/core/Shape.h @@ -0,0 +1,97 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/Color.h" +#include "tgfx/core/Path.h" +#include "tgfx/core/Stroke.h" +#include "tgfx/core/TextBlob.h" +#include "tgfx/gpu/ResourceKey.h" +#include "tgfx/gpu/SurfaceOptions.h" + +namespace tgfx { +class DrawOp; + +class GpuPaint; + +/** + * Represents a geometric shape that can be used as a drawing mask. Shape is usually used to cache a + * complex Path or TextBlob for frequent drawing. Unlike Path or TextBlob, Shape's resolution is + * fixed after it is created unless it represents a simple rect or rrect. Therefore, drawing a Shape + * with scale factors greater than 1.0 may result in blurred output. Shape is thread safe. + */ +class Shape { + public: + /** + * Creates a Shape from the fills of the given path after being scaled by the resolutionScale. + * Returns nullptr if the path is empty or resolutionScale is less than 0. + */ + static std::shared_ptr MakeFromFill(const Path& path, float resolutionScale = 1.0f); + + /** + * Creates a Shape from the fills of the given textBlob after being scaled by the resolutionScale. + * Returns nullptr if textBlob contains color glyphs or resolutionScale is less than 0. + */ + static std::shared_ptr MakeFromFill(std::shared_ptr textBlob, + float resolutionScale = 1.0f); + + /** + * Creates a Shape from the strokes of the given path after being scaled by the resolutionScale. + * Returns nullptr if path is empty or resolutionScale is less than 0. + */ + static std::shared_ptr MakeFromStroke(const Path& path, const Stroke& stroke, + float resolutionScale = 1.0f); + + /** + * Creates a Shape from the strokes of the given textBlob after being scaled by the + * resolutionScale. Returns nullptr if textBlob contains color glyphs or resolutionScale is less + * than 0. + */ + static std::shared_ptr MakeFromStroke(std::shared_ptr textBlob, + const Stroke& stroke, float resolutionScale = 1.0f); + + virtual ~Shape() = default; + + /** + * Returns the resolutionScale passed in when creating the Shape. + */ + float resolutionScale() const { + return _resolutionScale; + } + + /** + * Returns the bounds of the Shape after being applied the resolutionScale. + */ + virtual Rect getBounds() const = 0; + + protected: + UniqueKey uniqueKey = {}; + + explicit Shape(float resolutionScale); + + private: + std::weak_ptr weakThis; + float _resolutionScale = 1.0f; + + virtual std::unique_ptr makeOp(GpuPaint* paint, const Matrix& viewMatrix, + uint32_t surfaceFlags) const = 0; + + friend class Canvas; +}; +} // namespace tgfx diff --git a/include/tgfx/core/Size.h b/include/tgfx/core/Size.h new file mode 100644 index 00000000..072ce868 --- /dev/null +++ b/include/tgfx/core/Size.h @@ -0,0 +1,176 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include + +namespace tgfx { +/** + * ISize holds two 32-bit integer dimensions. + */ +struct ISize { + /** + * Span on the x-axis. + */ + int width; + + /** + * Span on the y-axis. + */ + int height; + + static ISize Make(int w, int h) { + return {w, h}; + } + + static ISize MakeEmpty() { + return {0, 0}; + } + + void set(int w, int h) { + *this = ISize{w, h}; + } + + /** + * Returns true if width == 0 && height == 0 + */ + bool isZero() const { + return 0 == width && 0 == height; + } + + /** + * Returns true if either width or height are <= 0 + */ + bool isEmpty() const { + return width <= 0 || height <= 0; + } + + /** + * Set the width and height to 0. + */ + void setEmpty() { + *this = ISize{0, 0}; + } + + /** + * Returns true if a is equivalent to b. + */ + friend bool operator==(const ISize& a, const ISize& b) { + return a.width == b.width && a.height == b.height; + } + + /** + * Returns true if a is not equivalent to b. + */ + friend bool operator!=(const ISize& a, const ISize& b) { + return a.width != b.width || a.height != b.height; + } +}; + +/** + * Size holds two 32-bit floating dimensions. + */ +struct Size { + /** + * Span on the x-axis. + */ + float width; + + /** + * Span on the y-axis. + */ + float height; + + static Size Make(float w, float h) { + return {w, h}; + } + + static Size Make(const ISize& src) { + return {static_cast(src.width), static_cast(src.height)}; + } + + Size& operator=(const ISize& src) { + return *this = Size{static_cast(src.width), static_cast(src.height)}; + } + + static Size MakeEmpty() { + return {0, 0}; + } + + void set(float w, float h) { + *this = Size{w, h}; + } + + /** + * Returns true if width == 0 && height == 0 + */ + bool isZero() const { + return 0 == width && 0 == height; + } + + /** + * Returns true if either width or height are <= 0 + */ + bool isEmpty() const { + return width <= 0 || height <= 0; + } + + /** + * Set the width and height to 0. + */ + void setEmpty() { + *this = Size{0, 0}; + } + + /** + * Returns a ISize by rounding of width and height. + */ + ISize toRound() { + return {static_cast(roundf(width)), static_cast(roundf(height))}; + } + + /** + * Returns a ISize by ceiling of width and height. + */ + ISize toCeil() { + return {static_cast(ceilf(width)), static_cast(ceilf(height))}; + } + + /** + * Returns a ISize by flooring of width and height. + */ + ISize toFloor() { + return {static_cast(floorf(width)), static_cast(floorf(height))}; + } + + /** + * Returns true if a is equivalent to b. + */ + friend bool operator==(const Size& a, const Size& b) { + return a.width == b.width && a.height == b.height; + } + + /** + * Returns true if a is not equivalent to b. + */ + friend bool operator!=(const Size& a, const Size& b) { + return a.width != b.width || a.height != b.height; + } +}; +} // namespace tgfx diff --git a/include/tgfx/core/Stroke.h b/include/tgfx/core/Stroke.h new file mode 100644 index 00000000..a8937bba --- /dev/null +++ b/include/tgfx/core/Stroke.h @@ -0,0 +1,96 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +namespace tgfx { +/** + * Cap draws at the beginning and end of an open path contour. + */ +enum class LineCap { + /** + * No stroke extension. + */ + Butt, + /** + * Adds circle + */ + Round, + /** + * Adds square + */ + Square +}; + +/** + * Join specifies how corners are drawn when a shape is stroked. Join affects the four corners + * of a stroked rectangle, and the connected segments in a stroked path. Choose miter join to draw + * sharp corners. Choose round join to draw a circle with a radius equal to the stroke width on + * top of the corner. Choose bevel join to minimally connect the thick strokes. + */ +enum class LineJoin { + /** + * Extends to miter limit. + */ + Miter, + /** + * Adds circle. + */ + Round, + /** + * Connects outside edges. + */ + Bevel +}; + +/** + * Stroke controls options applied when stroking geometries (paths, glyphs). + */ +class Stroke { + public: + Stroke() = default; + + /** + * Creates a new Stroke width specified options. + */ + explicit Stroke(float width, LineCap cap = LineCap::Butt, LineJoin join = LineJoin::Miter, + float miterLimit = 4.0f) + : width(width), cap(cap), join(join), miterLimit(miterLimit) { + } + + /** + * The thickness of the pen used to outline the paths or glyphs. + */ + float width = 1.0; + + /** + * The geometry drawn at the beginning and end of strokes. + */ + LineCap cap = LineCap::Butt; + + /** + * The geometry drawn at the corners of strokes. + */ + LineJoin join = LineJoin::Miter; + + /** + * The limit at which a sharp corner is drawn beveled. + */ + float miterLimit = 4.0f; +}; +} // namespace tgfx diff --git a/include/tgfx/core/TextBlob.h b/include/tgfx/core/TextBlob.h new file mode 100644 index 00000000..d79709e2 --- /dev/null +++ b/include/tgfx/core/TextBlob.h @@ -0,0 +1,57 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/Font.h" +#include "tgfx/core/Path.h" +#include "tgfx/core/Stroke.h" + +namespace tgfx { +/** + * TextBlob combines multiple glyphs, Font, and positions into an immutable container. + */ +class TextBlob { + public: + /** + * Creates a new TextBlob from the given glyphs, positions and text font. + */ + static std::shared_ptr MakeFrom(const GlyphID glyphIDs[], const Point positions[], + size_t glyphCount, const Font& font); + + virtual ~TextBlob() = default; + + /** + * Returns true if this TextBlob has color glyphs, for example, color emojis. + */ + virtual bool hasColor() const = 0; + + /** + * Returns the bounds of the text blob's glyphs. + */ + virtual Rect getBounds(const Stroke* stroke = nullptr) const = 0; + + /** + * Creates a path corresponding to glyph shapes in the text blob. Copies the glyph fills to the + * path if the stroke is not passed in, otherwise copies the glyph outlines to the path. If text + * font is backed by bitmaps or cannot generate outlines, returns false and leaves the path + * unchanged. + */ + virtual bool getPath(Path* path, const Stroke* stroke = nullptr) const = 0; +}; +} // namespace tgfx diff --git a/include/tgfx/core/TileMode.h b/include/tgfx/core/TileMode.h new file mode 100644 index 00000000..59d05ee5 --- /dev/null +++ b/include/tgfx/core/TileMode.h @@ -0,0 +1,44 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +namespace tgfx { +enum class TileMode { + /** + * Replicate the edge color if the shader draws outside of its original bounds. + */ + Clamp, + + /** + * Repeat the shader's image horizontally and vertically. + */ + Repeat, + + /** + * Repeat the shader's image horizontally and vertically, alternating mirror images so that + * adjacent images always seam. + */ + Mirror, + + /** + * Only draw within the original domain, return transparent-black everywhere else. + */ + Decal +}; +} // namespace tgfx diff --git a/include/tgfx/core/Typeface.h b/include/tgfx/core/Typeface.h new file mode 100644 index 00000000..ed5c9fd3 --- /dev/null +++ b/include/tgfx/core/Typeface.h @@ -0,0 +1,154 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/Data.h" +#include "tgfx/core/FontMetrics.h" +#include "tgfx/core/ImageBuffer.h" +#include "tgfx/core/Path.h" + +namespace tgfx { +/** + * 16 bit unsigned integer to hold a glyph index + */ +typedef uint16_t GlyphID; + +typedef uint32_t FontTableTag; + +/** + * A set of character glyphs and layout information for drawing text. + */ +class Typeface { + public: + /** + * Returns the default typeface reference, which is never nullptr. + */ + static std::shared_ptr MakeDefault(); + + /** + * Returns a typeface reference for the given font family and font style. If the family and + * style cannot be matched identically, a best match is found and returned, which is never + * nullptr. + */ + static std::shared_ptr MakeFromName(const std::string& fontFamily, + const std::string& fontStyle); + + /** + * Creates a new typeface for the given file path and ttc index. Returns nullptr if the typeface + * can't be created. + */ + static std::shared_ptr MakeFromPath(const std::string& fontPath, int ttcIndex = 0); + + /** + * Creates a new typeface for the given file bytes and ttc index. Returns nullptr if the typeface + * can't be created. + */ + static std::shared_ptr MakeFromBytes(const void* data, size_t length, int ttcIndex = 0); + + virtual ~Typeface() = default; + + /** + * Returns the uniqueID for the specified typeface. + */ + virtual uint32_t uniqueID() const = 0; + + /** + * Returns the family name of this typeface. + */ + virtual std::string fontFamily() const = 0; + + /** + * Returns the style name of this typeface. + */ + virtual std::string fontStyle() const = 0; + + /** + * Return the number of glyphs in this typeface. + */ + virtual int glyphsCount() const = 0; + + /** + * Returns the number of glyph space units per em for this typeface. + */ + virtual int unitsPerEm() const = 0; + + /** + * Returns true if this typeface has color glyphs, for example, color emojis. + */ + virtual bool hasColor() const = 0; + + /** + * Returns the glyph ID corresponds to the specified glyph name. The glyph name must be in utf-8 + * encoding. Returns 0 if the glyph name is not associated with this typeface. + */ + virtual GlyphID getGlyphID(const std::string& name) const = 0; + + virtual std::shared_ptr getBytes() const = 0; + + /** + * Returns an immutable copy of the requested font table, or nullptr if that table was not found. + */ + virtual std::shared_ptr copyTableData(FontTableTag tag) const = 0; + + protected: + /** + * Returns the FontMetrics associated with this typeface. + */ + virtual FontMetrics getMetrics(float size) const = 0; + + /** + * Returns the bounding box of the specified glyph. The bounds is specified in glyph space + * units. + */ + virtual Rect getBounds(GlyphID glyphID, float size, bool fauxBold, + bool fauxItalic) const = 0; + + /** + * Returns the advance for specified glyph. The value is specified in glyph space units. + * @param glyphID The id of specified glyph. + * @param vertical The intended drawing orientation of the glyph. + */ + virtual float getAdvance(GlyphID glyphID, float size, bool fauxBold, bool fauxItalic, + bool verticalText) const = 0; + + /** + * Creates a path corresponding to glyph outline. If glyph has an outline, copies outline to path + * and returns true. If glyph is described by a bitmap, returns false and ignores path parameter. + * The points in path are specified in glyph space units. + */ + virtual bool getPath(GlyphID glyphID, float size, bool fauxBold, bool fauxItalic, + Path* path) const = 0; + + /** + * Creates an image buffer capturing the content of the specified glyph. The returned matrix + * should apply to the glyph image when drawing. + */ + virtual std::shared_ptr getGlyphImage(GlyphID glyphID, float size, bool fauxBold, + bool fauxItalic, Matrix* matrix) const = 0; + + /** + * Calculates the offset from the default (horizontal) origin to the vertical origin for specified + * glyph. The offset is specified in glyph space units. + */ + virtual Point getVerticalOffset(GlyphID glyphID, float size, bool fauxBold, + bool fauxItalic) const = 0; + + friend class Font; +}; +} // namespace tgfx diff --git a/include/tgfx/core/YUVColorSpace.h b/include/tgfx/core/YUVColorSpace.h new file mode 100644 index 00000000..9ea381ad --- /dev/null +++ b/include/tgfx/core/YUVColorSpace.h @@ -0,0 +1,62 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +namespace tgfx { +/** + * Describes color range of YUV pixels. The color mapping from YUV to RGB varies depending on the + * source. YUV pixels may be generated by JPEG images, standard video streams, or high-definition + * video streams. Each has its own mapping from YUV to RGB. JPEG YUV values encode the full range of + * 0 to 255 for all three components. Video YUV values often range from 16 to 235 for Y and from 16 + * to 240 for U and V (limited). Details of encoding and conversion to RGB are described in YCbCr + * color space. + */ +enum class YUVColorSpace { + /** + * Describes SDTV limited range. + */ + BT601_LIMITED, + /** + * Describes SDTV full range. + */ + BT601_FULL, + /** + * Describes HDTV limited range. + */ + BT709_LIMITED, + /** + * Describes HDTV full range. + */ + BT709_FULL, + /** + * Describes UHDTV limited range. + */ + BT2020_LIMITED, + /** + * Describes UHDTV full range. + */ + BT2020_FULL, + /** + * Describes full range. + */ + JPEG_FULL +}; + +bool IsLimitedYUVColorRange(YUVColorSpace colorSpace); +} // namespace tgfx diff --git a/include/tgfx/core/YUVData.h b/include/tgfx/core/YUVData.h new file mode 100644 index 00000000..e891e47a --- /dev/null +++ b/include/tgfx/core/YUVData.h @@ -0,0 +1,92 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include + +namespace tgfx { +/** + * YUVData represents an array of yuv pixels located in the CPU memory. YUVData holds several planes + * of immutable buffers. Not only is the YUVData immutable, but the internal buffers are guaranteed + * to always be the same for the life of this instance. + */ +class YUVData { + public: + /** + * The number of planes required by the I420 format. + */ + static constexpr size_t I420_PLANE_COUNT = 3; + + /** + * The number of planes required by the NV12 format. + */ + static constexpr size_t NV12_PLANE_COUNT = 2; + + /** + * Function that, if provided, will be called when the YUVData goes out of scope, allowing for + * custom freeing of the YUVData's contents. + */ + typedef void (*ReleaseProc)(void* context, const void** data, size_t planeCount); + + /** + * Creates a YUVData object, taking ownership of the specified data, and using the releaseProc + * to free it. If the releaseProc is nullptr, the caller must ensure that data are valid for the + * lifetime of the returned YUVData. + * @param width The width of the yuv pixels + * @param height The height of the yuv pixels. + * @param data The array of base addresses for the planes. + * @param rowBytes The array of bytes-per-row values for the planes. + * @param planeCount The number of planes. + * @param releaseProc The callback function that gets called when the YUVData is destroyed. + * @param context A pointer to user data identifying the YUVData. This value is passed to your + * release callback function. + */ + static std::shared_ptr MakeFrom(int width, int height, const void** data, + const size_t* rowBytes, size_t planeCount, + ReleaseProc releaseProc = nullptr, + void* context = nullptr); + + virtual ~YUVData() = default; + + /** + * Returns the width of the yuv pixels. + */ + virtual int width() const = 0; + + /** + * Returns the height of the yuv pixels. + */ + virtual int height() const = 0; + + /** + * Returns number of planes in this yuv data. + */ + virtual size_t planeCount() const = 0; + + /** + * Returns the base address of the plane at the specified plane index. + */ + virtual const void* getBaseAddressAt(int planeIndex) const = 0; + + /** + * Returns the number of bytes per row for a plane at the specified plane index. + */ + virtual size_t getRowBytesAt(int planeIndex) const = 0; +}; +} // namespace tgfx diff --git a/include/tgfx/gpu/Backend.h b/include/tgfx/gpu/Backend.h new file mode 100644 index 00000000..527beac0 --- /dev/null +++ b/include/tgfx/gpu/Backend.h @@ -0,0 +1,212 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/metal/MtlTypes.h" +#include "tgfx/opengl/GLTypes.h" + +namespace tgfx { +/** + * Possible GPU backend APIs that may be used by TGFX. + */ +enum class Backend { + /** + * Mock is a backend that does not draw anything. It is used for unit tests and to measure CPU + * overhead. + */ + MOCK, + OPENGL, + METAL, + VULKAN, +}; + +/** + * Wrapper class for passing into and receiving data from TGFX about a backend texture object. + */ +class BackendTexture { + public: + /** + * Creates an invalid backend texture. + */ + BackendTexture() : _width(0), _height(0) { + } + + /** + * Creates an OpenGL backend texture. + */ + BackendTexture(const GLTextureInfo& glInfo, int width, int height); + + /** + * Creates a Metal backend texture. + */ + BackendTexture(const MtlTextureInfo& mtlInfo, int width, int height); + + BackendTexture(const BackendTexture& that); + + BackendTexture& operator=(const BackendTexture& that); + + /** + * Returns true if the backend texture has been initialized. + */ + bool isValid() const { + return _width > 0 && _height > 0; + } + + /** + * Returns the width of the texture. + */ + int width() const { + return _width; + } + + /** + * Returns the height of the texture. + */ + int height() const { + return _height; + } + + /** + * Returns the backend API of this texture. + */ + Backend backend() const { + return _backend; + } + + /** + * If the backend API is GL, copies a snapshot of the GLTextureInfo struct into the passed in + * pointer and returns true. Otherwise, returns false if the backend API is not GL. + */ + bool getGLTextureInfo(GLTextureInfo* glTextureInfo) const; + + /** + * If the backend API is Metal, copies a snapshot of the GrMtlTextureInfo struct into the passed + * in pointer and returns true. Otherwise, returns false if the backend API is not Metal. + */ + bool getMtlTextureInfo(MtlTextureInfo* mtlTextureInfo) const; + + private: + Backend _backend = Backend::MOCK; + int _width = 0; + int _height = 0; + + union { + GLTextureInfo glInfo; + MtlTextureInfo mtlInfo; + }; +}; + +/** + * Wrapper class for passing into and receiving data from TGFX about a backend render target object. + */ +class BackendRenderTarget { + public: + /** + * Creates an invalid backend render target. + */ + BackendRenderTarget() : _width(0), _height(0) { + } + + /** + * Creates an OpenGL backend render target. + */ + BackendRenderTarget(const GLFrameBufferInfo& glInfo, int width, int height); + + /** + * Creates an Metal backend render target. + */ + BackendRenderTarget(const MtlTextureInfo& mtlInfo, int width, int height); + + BackendRenderTarget(const BackendRenderTarget& that); + + BackendRenderTarget& operator=(const BackendRenderTarget&); + + /** + * Returns true if the backend texture has been initialized. + */ + bool isValid() const { + return _width > 0 && _height > 0; + } + + /** + * Returns the width of this render target. + */ + int width() const { + return _width; + } + + /** + * Returns the height of this render target. + */ + int height() const { + return _height; + } + + /** + * Returns the backend API of this render target. + */ + Backend backend() const { + return _backend; + } + + /** + * If the backend API is GL, copies a snapshot of the GLFramebufferInfo struct into the passed + * in pointer and returns true. Otherwise, returns false if the backend API is not GL. + */ + bool getGLFramebufferInfo(GLFrameBufferInfo* glFrameBufferInfo) const; + + /** + * If the backend API is Metal, copies a snapshot of the MtlTextureInfo struct into the passed + * in pointer and returns true. Otherwise, returns false if the backend API is not Metal. + */ + bool getMtlTextureInfo(MtlTextureInfo* mtlTextureInfo) const; + + private: + Backend _backend = Backend::MOCK; + int _width = 0; + int _height = 0; + union { + GLFrameBufferInfo glInfo; + MtlTextureInfo mtlInfo; + }; +}; + +/** + * Wrapper class for passing into and receiving data from TGFX about a backend semaphore object. + */ +class BackendSemaphore { + public: + BackendSemaphore(); + + bool isInitialized() const { + return _isInitialized; + } + + void initGL(void* sync); + + void* glSync() const; + + private: + Backend _backend = Backend::MOCK; + union { + void* _glSync; + }; + bool _isInitialized; +}; +} // namespace tgfx diff --git a/include/tgfx/gpu/Caps.h b/include/tgfx/gpu/Caps.h new file mode 100644 index 00000000..d74c4e22 --- /dev/null +++ b/include/tgfx/gpu/Caps.h @@ -0,0 +1,52 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/gpu/PixelFormat.h" + +namespace tgfx { +class Swizzle; + +class Caps { + public: + virtual ~Caps() = default; + + virtual const Swizzle& getWriteSwizzle(PixelFormat pixelFormat) const = 0; + + virtual bool isFormatRenderable(PixelFormat pixelFormat) const = 0; + + virtual int getSampleCount(int requestedCount, PixelFormat pixelFormat) const = 0; + + virtual int getMaxMipmapLevel(int width, int height) const = 0; + + bool floatIs32Bits = true; + int maxTextureSize = 0; + bool semaphoreSupport = false; + bool multisampleDisableSupport = false; + /** + * The CLAMP_TO_BORDER wrap mode for texture coordinates was added to desktop GL in 1.3, and + * GLES 3.2, but is also available in extensions. Vulkan and Metal always have support. + */ + bool clampToBorderSupport = true; + bool npotTextureTileSupport = true; // Vulkan and Metal always have support. + bool mipMapSupport = true; + bool textureBarrierSupport = false; + bool frameBufferFetchSupport = false; +}; +} // namespace tgfx diff --git a/include/tgfx/gpu/Context.h b/include/tgfx/gpu/Context.h new file mode 100644 index 00000000..d145cdf3 --- /dev/null +++ b/include/tgfx/gpu/Context.h @@ -0,0 +1,195 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/utils/BytesKey.h" +#include "tgfx/core/Color.h" +#include "tgfx/gpu/Backend.h" +#include "tgfx/gpu/Caps.h" +#include "tgfx/gpu/Device.h" + +namespace tgfx { +class ProgramCache; +class ResourceCache; +class DrawingManager; +class Gpu; +class ResourceProvider; +class ProxyProvider; + +class Context { + public: + virtual ~Context(); + + /** + * Returns the associated device. + */ + Device* device() const { + return _device; + } + + /** + * Returns the associated cache that manages the lifetime of all Program instances. + */ + ProgramCache* programCache() const { + return _programCache; + } + + /** + * Returns the associated cache that manages the lifetime of all Resource instances. + */ + ResourceCache* resourceCache() const { + return _resourceCache; + } + + DrawingManager* drawingManager() const { + return _drawingManager; + } + + ResourceProvider* resourceProvider() const { + return _resourceProvider; + } + + ProxyProvider* proxyProvider() const { + return _proxyProvider; + } + + /** + * Returns the number of bytes consumed by internal gpu caches. + */ + size_t memoryUsage() const; + + /** + * Returns the number of bytes held by purgeable resources. + */ + size_t purgeableBytes() const; + + /** + * Returns current cache limits of max gpu memory byte size. + */ + size_t getCacheLimit() const; + + /** + * Sets the cache limit of max gpu memory byte size. + */ + void setCacheLimit(size_t bytesLimit); + + /** + * Purges GPU resources that haven't been used the passed in time. + * @param purgeTime A timestamp previously returned by Clock::Now(). + * @param scratchResourcesOnly If true, the purgeable resources containing unique keys are spared. + * If false, then all purgeable resources will be deleted. + */ + void purgeResourcesNotUsedSince(int64_t purgeTime, bool scratchResourcesOnly = false); + + /** + * Purge unreferenced resources from the cache until the provided bytesLimit has been reached, or + * we have purged all unreferenced resources. Returns true if the total resource bytes is not over + * the specified bytesLimit after purging. + * @param bytesLimit The desired number of bytes after puring. + * @param scratchResourcesOnly If true, the purgeable resources containing unique keys are spared. + * If false, then all purgeable resources will be deleted. + */ + bool purgeResourcesUntilMemoryTo(size_t bytesLimit, bool scratchResourcesOnly = false); + + /** + * Inserts a GPU semaphore that the current GPU-backed API must wait on before executing any more + * commands on the GPU for this surface. Surface will take ownership of the underlying semaphore + * and delete it once it has been signaled and waited on. If this call returns false, then the + * GPU back-end will not wait on the passed semaphore, and the client will still own the + * semaphore. Returns true if GPU is waiting on the semaphore. + */ + bool wait(const BackendSemaphore& waitSemaphore); + + /** + * Apply all pending changes to the render target immediately. After issuing all commands, the + * semaphore will be signaled by the GPU. If the signalSemaphore is not null and uninitialized, + * a new semaphore is created and initializes BackendSemaphore. The caller must delete the + * semaphore returned in signalSemaphore. BackendSemaphore can be deleted as soon as this function + * returns. If the back-end API is OpenGL, only uninitialized backend semaphores are supported. + * If false is returned, the GPU back-end did not create or add a semaphore to signal on the GPU; + * the caller should not instruct the GPU to wait on the semaphore. + */ + bool flush(BackendSemaphore* signalSemaphore = nullptr); + + /** + * Submit outstanding work to the gpu from all previously un-submitted flushes. The return + * value of the submit method will indicate whether the submission to the GPU was successful. + * + * If the call returns true, all previously passed in semaphores in flush calls will have been + * submitted to the GPU and they can safely be waited on. The caller should wait on those + * semaphores or perform some other global synchronization before deleting the semaphores. + * + * If it returns false, then those same semaphores will not have been submitted, and we will not + * try to submit them again. The caller is free to delete the semaphores at any time. + * + * If the syncCpu flag is true, this function will return once the gpu has finished with all + * submitted work. + */ + bool submit(bool syncCpu = false); + + /** + * Call to ensure all drawing to the context has been flushed and submitted to the underlying 3D + * API. This is equivalent to calling Context::flush() followed by Context::submit(syncCpu). + */ + void flushAndSubmit(bool syncCpu = false); + + /** + * Returns the GPU backend of this context. + */ + virtual Backend backend() const = 0; + + /** + * Returns the capability info of this context. + */ + virtual const Caps* caps() const = 0; + + /** + * The Context normally assumes that no outsider is setting state within the underlying GPU API's + * context/device/whatever. This call informs the context that the state was modified and it + * should resend. Shouldn't be called frequently for good performance. + */ + virtual void resetState() = 0; + + Gpu* gpu() { + return _gpu; + } + + protected: + explicit Context(Device* device); + + Gpu* _gpu = nullptr; + + private: + Device* _device = nullptr; + ProgramCache* _programCache = nullptr; + ResourceCache* _resourceCache = nullptr; + DrawingManager* _drawingManager = nullptr; + ResourceProvider* _resourceProvider = nullptr; + ProxyProvider* _proxyProvider = nullptr; + + void releaseAll(bool releaseGPU); + void onLocked(); + void onUnlocked(); + + friend class Device; + + friend class Resource; +}; + +} // namespace tgfx diff --git a/include/tgfx/gpu/Device.h b/include/tgfx/gpu/Device.h new file mode 100644 index 00000000..268ba86f --- /dev/null +++ b/include/tgfx/gpu/Device.h @@ -0,0 +1,70 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "tgfx/core/Matrix.h" + +namespace tgfx { +class Context; +/** + * The GPU interface for drawing graphics. + */ +class Device { + public: + virtual ~Device(); + + /** + * Returns a global unique ID for this device. + */ + uint32_t uniqueID() const { + return _uniqueID; + } + + /** + * Locks the rendering context associated with this device, if another thread has already locked + * the device by lockContext(), a call to lockContext() will block execution until the device + * is available. The returned context can be used to draw graphics. A nullptr is returned If the + * context can not be locked on the calling thread, and leaves the device unlocked. + */ + Context* lockContext(); + + /** + * Unlocks the device. After this method is called all subsequent calls on the previously returned + * context will be invalid and may lead to runtime crash. + */ + void unlock(); + + protected: + std::mutex locker = {}; + Context* context = nullptr; + std::weak_ptr weakThis; + + Device(); + void releaseAll(); + virtual bool onLockContext(); + virtual void onUnlockContext(); + + private: + uint32_t _uniqueID = 0; + bool contextLocked = false; + + friend class ResourceCache; +}; +} // namespace tgfx diff --git a/include/tgfx/gpu/DoubleBufferedWindow.h b/include/tgfx/gpu/DoubleBufferedWindow.h new file mode 100644 index 00000000..fddba2b0 --- /dev/null +++ b/include/tgfx/gpu/DoubleBufferedWindow.h @@ -0,0 +1,58 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/gpu/Window.h" + +namespace tgfx { +class DoubleBufferedWindow : public Window { + public: + static std::shared_ptr Make(std::shared_ptr device, + const BackendTexture& frontBackendTexture, + const BackendTexture& backBackendTexture); + + static std::shared_ptr Make(std::shared_ptr device, + HardwareBufferRef frontBuffer, + HardwareBufferRef backBuffer); + + std::shared_ptr getBackSurface() const { + return backSurface; + } + + virtual bool isFront(const BackendTexture&) const { + return false; + } + + virtual bool isFront(HardwareBufferRef) const { + return false; + } + + ~DoubleBufferedWindow() override; + + protected: + explicit DoubleBufferedWindow(std::shared_ptr device); + + void onPresent(Context* context, int64_t presentationTime) override; + + virtual void onSwapSurfaces(Context*) = 0; + + std::shared_ptr frontSurface; + std::shared_ptr backSurface; +}; +} // namespace tgfx diff --git a/include/tgfx/gpu/ImageOrigin.h b/include/tgfx/gpu/ImageOrigin.h new file mode 100644 index 00000000..780c271f --- /dev/null +++ b/include/tgfx/gpu/ImageOrigin.h @@ -0,0 +1,41 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +namespace tgfx { +/** + * Textures and Surfaces can be stored such that (0, 0) in texture space may correspond to + * either the top-left or bottom-left content pixel. + */ +enum class ImageOrigin { + /** + * The default origin of the native coordinate system in the GPU backend. For example, the + * SurfaceOrigin::TopLeft is actually the bottom-left origin in the OpenGL coordinate system for + * textures. Textures newly created by the backend API for off-screen rendering usually have a + * SurfaceOrigin::TopLeft origin. + */ + TopLeft, + + /** + * Use this origin to flip the content on the y-axis if the GPU backend has a different origin to + * your system views. It is usually used for on-screen rendering. + */ + BottomLeft +}; +} // namespace tgfx diff --git a/include/tgfx/gpu/PixelFormat.h b/include/tgfx/gpu/PixelFormat.h new file mode 100644 index 00000000..4702e32c --- /dev/null +++ b/include/tgfx/gpu/PixelFormat.h @@ -0,0 +1,56 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +namespace tgfx { +/** + * Describes the possible pixel formats of a TextureSampler. + */ +enum class PixelFormat { + /** + * uninitialized. + */ + Unknown, + + /** + * Pixel with 8 bits for alpha. Each pixel is stored on 1 byte. + */ + ALPHA_8, + + /** + * Pixel with 8 bits for grayscale. Each pixel is stored on 1 byte. + */ + GRAY_8, + + /** + * Pixel with 8 bits for red, green. Each pixel is stored on 2 bytes. + */ + RG_88, + + /** + * Pixel with 8 bits for red, green, blue, alpha. Each pixel is stored on 4 bytes. + */ + RGBA_8888, + + /** + * Pixel with 8 bits for blue, green, red, alpha. Each pixel is stored on 4 bytes. + */ + BGRA_8888 +}; +} // namespace tgfx diff --git a/include/tgfx/gpu/Resource.h b/include/tgfx/gpu/Resource.h new file mode 100644 index 00000000..69d5159c --- /dev/null +++ b/include/tgfx/gpu/Resource.h @@ -0,0 +1,111 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "tgfx/gpu/ResourceCache.h" +#include "tgfx/gpu/ResourceKey.h" + +namespace tgfx { +/** + * The base class for GPU resource. Overrides the onReleaseGPU() method to free all GPU resources. + * No backend API calls should be made during destructuring since there may be no GPU context which + * is current on the calling thread. Note: Resource is not thread safe, do not access any properties + * of a Resource unless its associated device is locked. + */ +class Resource { + public: + template + static std::shared_ptr Wrap(Context* context, T* resource) { + resource->context = context; + static_cast(resource)->computeScratchKey(&resource->scratchKey); + return std::static_pointer_cast(context->resourceCache()->addResource(resource)); + } + + virtual ~Resource() = default; + + /** + * Retrieves the context associated with this Resource. + */ + Context* getContext() const { + return context; + } + + /** + * Retrieves the amount of GPU memory used by this resource in bytes. + */ + virtual size_t memoryUsage() const = 0; + + /** + * Assigns a UniqueKey to the resource. The resource will be findable via this UniqueKey using + * ResourceCache.findUniqueResource(). This method is not thread safe, call it only when the + * associated context is locked. + */ + void assignUniqueKey(const UniqueKey& newKey); + + /* + * Removes the UniqueKey from the resource. This method is not thread safe, call it only when + * the associated context is locked. + */ + void removeUniqueKey(); + + /** + * Marks the associated UniqueKey as expired. This method is thread safe. + */ + void markUniqueKeyExpired(); + + /** + * Returns true if the Resource has the same UniqueKey to the newKey and not expired. This method + * is thread safe. + */ + bool hasUniqueKey(const UniqueKey& newKey) const; + + protected: + Context* context = nullptr; + + /** + * Overridden to compute a scratchKey to make this Resource reusable. + */ + virtual void computeScratchKey(BytesKey*) const { + } + + private: + std::weak_ptr weakThis; + ScratchKey scratchKey = {}; + UniqueKey uniqueKey = {}; + std::atomic_uint32_t uniqueKeyGeneration = 0; + std::list::iterator cachedPosition; + int64_t lastUsedTime = 0; + + bool isPurgeable() const { + return weakThis.expired(); + } + + bool hasValidUniqueKey() const { + return !uniqueKey.empty() && !uniqueKey.unique() && uniqueKey.uniqueID() == uniqueKeyGeneration; + } + + /** + * Overridden to free GPU resources in the backend API. + */ + virtual void onReleaseGPU() = 0; + + friend class ResourceCache; +}; +} // namespace tgfx diff --git a/include/tgfx/gpu/ResourceCache.h b/include/tgfx/gpu/ResourceCache.h new file mode 100644 index 00000000..81706f7c --- /dev/null +++ b/include/tgfx/gpu/ResourceCache.h @@ -0,0 +1,137 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include +#include +#include "tgfx/gpu/Context.h" +#include "tgfx/gpu/ResourceKey.h" + +namespace tgfx { +class Resource; + +/** + * Manages the lifetime of all Resource instances. + */ +class ResourceCache { + public: + explicit ResourceCache(Context* context); + + /** + * Returns true if there is no cache at all. + */ + bool empty() const; + + /** + * Returns the number of bytes consumed by resources. + */ + size_t getResourceBytes() const { + return totalBytes; + } + + /** + * Returns the number of bytes held by purgeable resources. + */ + size_t getPurgeableBytes() const { + return purgeableBytes; + } + + /** + * Returns current cache limits of max gpu memory byte size. + */ + size_t getCacheLimit() const { + return maxBytes; + } + + /** + * Sets the cache limits of max gpu memory byte size. + */ + void setCacheLimit(size_t bytesLimit); + + /** + * Returns a scratch resource in the cache by the specified ScratchKey. + */ + std::shared_ptr findScratchResource(const ScratchKey& scratchKey); + + /** + * Returns a unique resource in the cache by the specified UniqueKey. + */ + std::shared_ptr findUniqueResource(const UniqueKey& uniqueKey); + + /** + * Returns true if there is a corresponding unique resource for the specified UniqueKey. + */ + bool hasUniqueResource(const UniqueKey& uniqueKey); + + /** + * Purges GPU resources that haven't been used the passed in time. + * @param purgeTime A timestamp previously returned by Clock::Now(). + * @param scratchResourcesOnly If true, the purgeable resources containing unique keys are spared. + * If false, then all purgeable resources will be deleted. + */ + void purgeNotUsedSince(int64_t purgeTime, bool scratchResourcesOnly = false); + + /** + * Purge unreferenced resources from the cache until the the provided bytesLimit has been reached + * or we have purged all unreferenced resources. Returns true if the total resource bytes is not + * over the specified bytesLimit after purging. + * @param bytesLimit The desired number of bytes after puring. + * @param scratchResourcesOnly If true, the purgeable resources containing unique keys are spared. + * If false, then all purgeable resources will be deleted. + */ + bool purgeUntilMemoryTo(size_t bytesLimit, bool scratchResourcesOnly = false); + + private: + Context* context = nullptr; + size_t maxBytes = 0; + size_t totalBytes = 0; + size_t purgeableBytes = 0; + bool purgingResource = false; + std::vector> strongReferences = {}; + std::list nonpurgeableResources = {}; + std::list purgeableResources = {}; + ScratchKeyMap> scratchKeyMap = {}; + std::unordered_map uniqueKeyMap = {}; + std::mutex removeLocker = {}; + std::vector pendingPurgeableResources = {}; + + static void AddToList(std::list& list, Resource* resource); + static void RemoveFromList(std::list& list, Resource* resource); + static void NotifyReferenceReachedZero(Resource* resource); + + void attachToCurrentThread(); + void detachFromCurrentThread(); + void releaseAll(bool releaseGPU); + void processUnreferencedResource(Resource* resource); + std::shared_ptr wrapResource(Resource* resource); + std::shared_ptr addResource(Resource* resource); + void removeResource(Resource* resource); + void purgeResourcesByLRU(bool scratchResourcesOnly, + const std::function& satisfied); + + void changeUniqueKey(Resource* resource, const UniqueKey& uniqueKey); + void removeUniqueKey(Resource* resource); + Resource* getUniqueResource(const UniqueKey& uniqueKey); + + friend class Resource; + friend class Context; + friend class PurgeGuard; +}; +} // namespace tgfx diff --git a/include/tgfx/gpu/ResourceKey.h b/include/tgfx/gpu/ResourceKey.h new file mode 100644 index 00000000..6839e825 --- /dev/null +++ b/include/tgfx/gpu/ResourceKey.h @@ -0,0 +1,107 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "tgfx/utils/BytesKey.h" + +namespace tgfx { +/** + * A key used for scratch resources. There are three important rules about scratch keys: + * + * 1) Multiple resources can share the same scratch key. Therefore, resources assigned the same + * scratch key should be interchangeable with respect to the code that uses them. + * 2) A resource can have at most one scratch key, and it is set at resource creation by the + * resource itself. + * 3) When a scratch resource is referenced, it will not be returned from the cache for a + * subsequent cache request until all refs are released. This facilitates using a scratch key + * for multiple render-to-texture scenarios. + */ +using ScratchKey = BytesKey; + +template +using ScratchKeyMap = BytesKeyMap; + +class UniqueData; + +/** + * A key that allows for exclusive use of a resource for a use case (AKA "domain"). There are three + * rules governing the use of unique keys: + * + * 1) Only one resource can have a given unique key at a time. Hence, "unique". + * 2) A resource can have at most one unique key at a time. + * 3) Unlike scratch keys, multiple requests for a unique key will return the same resource even + * if the resource already has refs. + * + * This key type allows a code path to create cached resources for which it is the exclusive user. + * The code path creates a domain which it sets on its keys. This guarantees that there are no + * cross-domain collisions. Unique keys preempt scratch keys. While a resource has a unique key, it + * is inaccessible via its scratch key. It can become scratch again if the unique key is removed. + */ +class UniqueKey { + public: + /** + * Creates a new valid UniqueKey. + */ + static UniqueKey Next(); + + /** + * Creates an empty UniqueKey. + */ + UniqueKey() = default; + + /** + * Returns a global unique ID for the UniqueKey. + */ + uint32_t uniqueID() const { + return id == nullptr ? 0 : *id; + } + + /** + * Returns true if the UniqueKey is empty. + */ + bool empty() const { + return id == nullptr; + } + + /** + * Returns true if the UniqueKey has only one valid reference. + */ + bool unique() const { + return id.unique(); + } + + /** + * Returns true if a is equivalent to b. + */ + friend bool operator==(const UniqueKey& a, const UniqueKey& b) { + return a.id == b.id; + } + + /** + * Returns true if a is not equivalent to b. + */ + friend bool operator!=(const UniqueKey& a, const UniqueKey& b) { + return a.id != b.id; + } + + private: + std::shared_ptr id = nullptr; +}; +} // namespace tgfx diff --git a/include/tgfx/gpu/Surface.h b/include/tgfx/gpu/Surface.h new file mode 100644 index 00000000..11b5753f --- /dev/null +++ b/include/tgfx/gpu/Surface.h @@ -0,0 +1,227 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/Canvas.h" +#include "tgfx/core/ImageInfo.h" +#include "tgfx/gpu/Backend.h" +#include "tgfx/gpu/ImageOrigin.h" +#include "tgfx/gpu/SurfaceOptions.h" + +namespace tgfx { +class Canvas; +class Context; +class RenderTarget; + +/** + * The Surface class is responsible for managing the pixels that a Canvas draws into. The Surface + * takes care of allocating a Canvas that will draw into the surface. Call surface->getCanvas() to + * use that Canvas (but don't delete it, it is owned by the surface). The Surface always has + * non-zero dimensions. If there is a request for a new surface, and either of the requested + * dimensions is zero, then nullptr will be returned. + */ +class Surface { + public: + /** + * Creates a new Surface on GPU indicated by context. Allocates memory for pixels based on the + * width, height, and color type (alpha only). A Surface with MSAA enabled is returned if the + * sample count is greater than 1. Return nullptr if the size is invalid or the alpha-only + * textures are not renderable in the GPU backend. + */ + static std::shared_ptr Make(Context* context, int width, int height, + bool alphaOnly = false, int sampleCount = 1, + bool mipMapped = false, + const SurfaceOptions* options = nullptr); + + /** + * Creates a new Surface on GPU indicated by context. Allocates memory for pixels based on the + * width, height, and colorType. A Surface with MSAA enabled is returned if the sample count is + * greater than 1. Return nullptr if the size is invalid or the colorType is not renderable in + * the GPU backend. + */ + static std::shared_ptr Make(Context* context, int width, int height, ColorType colorType, + int sampleCount = 1, bool mipMapped = false, + const SurfaceOptions* options = nullptr); + + /** + * Wraps a backend render target into Surface. The caller must ensure the renderTarget is valid + * for the lifetime of the returned Surface. Returns nullptr if the context is nullptr or the + * renderTarget is invalid. + */ + static std::shared_ptr MakeFrom(Context* context, + const BackendRenderTarget& renderTarget, + ImageOrigin origin, + const SurfaceOptions* options = nullptr); + + /** + * Wraps a BackendTexture into the Surface. The caller must ensure the texture is valid for the + * lifetime of the returned Surface. If the sampleCount is greater than zero, creates an + * intermediate MSAA Surface which is used for drawing backendTexture. Returns nullptr if the + * context is nullptr or the texture is not renderable in the GPU backend. + */ + static std::shared_ptr MakeFrom(Context* context, const BackendTexture& backendTexture, + ImageOrigin origin, int sampleCount = 1, + const SurfaceOptions* options = nullptr); + + /** + * Creates a Surface from the platform-specific hardware buffer. For example, the hardware buffer + * could be an AHardwareBuffer on the android platform or a CVPixelBufferRef on the apple + * platform. The returned Surface takes a reference to the hardwareBuffer. Returns nullptr if the + * context is nullptr or the hardwareBuffer is not renderable. + */ + static std::shared_ptr MakeFrom(Context* context, HardwareBufferRef hardwareBuffer, + int sampleCount = 1, + const SurfaceOptions* options = nullptr); + + virtual ~Surface(); + + /** + * Retrieves the context associated with this Surface. + */ + Context* getContext() const; + + /** + * Returns the SurfaceOptions of the Surface. + */ + const SurfaceOptions* options() const { + return &surfaceOptions; + } + + /** + * Returns the width of this surface. + */ + int width() const; + + /** + * Returns the height of this surface. + */ + int height() const; + + /** + * Returns the origin of this surface, either SurfaceOrigin::TopLeft or SurfaceOrigin::BottomLeft. + */ + ImageOrigin origin() const; + + /** + * Retrieves the backend render target that the surface renders to. The returned + * BackendRenderTarget should be discarded if the Surface is drawn to or deleted. + */ + BackendRenderTarget getBackendRenderTarget(); + + /** + * Retrieves the backend texture that the surface renders to. Returns an invalid BackendTexture if + * the surface has no backend texture. The returned BackendTexture should be discarded if the + * Surface is drawn to or deleted. + */ + BackendTexture getBackendTexture(); + + /** + * Retrieves the backing hardware buffer. This method does not acquire any additional reference to + * the returned hardware buffer. Returns nullptr if the surface is not created from a hardware + * buffer. + */ + HardwareBufferRef getHardwareBuffer(); + + /** + * Returns Canvas that draws into Surface. Subsequent calls return the same Canvas. Canvas + * returned is managed and owned by Surface, and is deleted when Surface is deleted. + */ + Canvas* getCanvas(); + + /** + * Inserts a GPU semaphore that the current GPU-backed API must wait on before executing any more + * commands on the GPU for this surface. Surface will take ownership of the underlying semaphore + * and delete it once it has been signaled and waited on. If this call returns false, then the + * GPU back-end will not wait on the passed semaphore, and the client will still own the + * semaphore. Returns true if GPU is waiting on the semaphore. + */ + bool wait(const BackendSemaphore& waitSemaphore); + + /** + * Apply all pending changes to the render target immediately. After issuing all commands, the + * semaphore will be signaled by the GPU. If the signalSemaphore is not null and uninitialized, + * a new semaphore is created and initializes BackendSemaphore. The caller must delete the + * semaphore returned in signalSemaphore. BackendSemaphore can be deleted as soon as this function + * returns. If the back-end API is OpenGL only uninitialized backend semaphores are supported. + * If false is returned, the GPU back-end did not create or add a semaphore to signal on the GPU; + * the caller should not instruct the GPU to wait on the semaphore. + */ + bool flush(BackendSemaphore* signalSemaphore = nullptr); + + /** + * Call to ensure all reads/writes of the surface have been issued to the underlying 3D API. + * TGFX will correctly order its own draws and pixel operations. This must be used to ensure + * correct ordering when the surface backing store is accessed outside TGFX (e.g. direct use of + * the 3D API or a windowing system). Context has additional flush and submit methods that apply + * to all surfaces and images created from a Context. This is equivalent to calling + * Surface::flush() followed by Context::submit(syncCpu). + */ + void flushAndSubmit(bool syncCpu = false); + + /** + * Returns an Image capturing the Surface contents. Subsequent drawings to the Surface contents + * are not captured. This method would trigger immediate texture copying if the Surface has no + * backing texture or the backing texture was allocated externally. For example, the Surface was + * created from a BackendRenderTarget, a BackendTexture or a HardwareBuffer. + */ + std::shared_ptr makeImageSnapshot(); + + /** + * Returns pixel at (x, y) as unpremultiplied color. Some color precision may be lost in the + * conversion to unpremultiplied color; original pixel data may have additional precision. Returns + * a transparent color if the point (x, y) is not contained by the Surface bounds. + * @param x column index, zero or greater, and less than width() + * @param y row index, zero or greater, and less than height() + * @return pixel converted to unpremultiplied color + */ + Color getColor(int x, int y); + + /** + * Copies a rect of pixels to dstPixels with specified ImageInfo. Copy starts at (srcX, srcY), and + * does not exceed Surface (width(), height()). Pixels are copied only if pixel conversion is + * possible. Returns true if pixels are copied to dstPixels. + */ + bool readPixels(const ImageInfo& dstInfo, void* dstPixels, int srcX = 0, int srcY = 0); + + private: + std::shared_ptr renderTarget = nullptr; + std::shared_ptr texture = nullptr; + SurfaceOptions surfaceOptions = {}; + bool externalTexture = false; + Canvas* canvas = nullptr; + std::shared_ptr cachedImage = nullptr; + + static std::shared_ptr MakeFrom(std::shared_ptr renderTarget, + const SurfaceOptions* options = nullptr); + + static std::shared_ptr MakeFrom(std::shared_ptr texture, int sampleCount = 1, + const SurfaceOptions* options = nullptr); + + Surface(std::shared_ptr renderTarget, std::shared_ptr texture, + const SurfaceOptions* options, bool externalTexture = true); + + std::shared_ptr getTexture(); + + void aboutToDraw(bool discardContent = false); + + friend class DrawingManager; + friend class Canvas; + friend class QGLWindow; +}; +} // namespace tgfx \ No newline at end of file diff --git a/include/tgfx/gpu/SurfaceOptions.h b/include/tgfx/gpu/SurfaceOptions.h new file mode 100644 index 00000000..b547f077 --- /dev/null +++ b/include/tgfx/gpu/SurfaceOptions.h @@ -0,0 +1,65 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +namespace tgfx { +/** + * Describes properties and constraints of a given Surface. The rendering engine can parse these + * during drawing, and can sometimes optimize its performance (e.g. disabling an expensive feature). + */ +class SurfaceOptions { + public: + /** + * If this flag is set, the Surface will skip generating new caches to the associated Context + * during drawing. + */ + static constexpr uint32_t DisableCacheFlag = 1 << 0; + + /** + * If this flag is set, the Surface will perform all CPU-side tasks in the current thread rather + * than run them in parallel asynchronously. + */ + static constexpr uint32_t DisableAsyncTaskFlag = 1 << 1; + + SurfaceOptions() = default; + + SurfaceOptions(uint32_t flags) : _flags(flags) { + } + + uint32_t flags() const { + return _flags; + } + + bool cacheDisabled() const { + return _flags & DisableCacheFlag; + } + + bool asyncTaskDisabled() const { + return _flags & DisableAsyncTaskFlag; + } + + bool operator==(const SurfaceOptions& that) const { + return _flags == that._flags; + } + + private: + uint32_t _flags = 0; +}; + +} // namespace tgfx diff --git a/include/tgfx/gpu/Window.h b/include/tgfx/gpu/Window.h new file mode 100644 index 00000000..76f478ff --- /dev/null +++ b/include/tgfx/gpu/Window.h @@ -0,0 +1,61 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/gpu/Context.h" +#include "tgfx/gpu/Surface.h" + +namespace tgfx { +/** + * Window represents a native displayable resource that can be rendered or written to by a Device. + */ +class Window { + public: + virtual ~Window() = default; + + /** + * Returns the Device associated with this Window. + */ + std::shared_ptr getDevice() const { + return device; + } + + /** + * Creates a new surface connected to the window. + */ + std::shared_ptr createSurface(Context* context); + + /** + * Applies all pending graphics changes to the window. The presentationTime will be passed to the + * SurfaceTexture.getTimestamp() on the android platform, ignored by other platforms. It is in + * microseconds. A system timestamp will be used if presentationTime is not set. + */ + void present(Context* context, int64_t presentationTime = INT64_MIN); + + protected: + std::shared_ptr device = nullptr; + + explicit Window(std::shared_ptr device); + virtual std::shared_ptr onCreateSurface(Context* context) = 0; + virtual void onPresent(Context* context, int64_t presentationTime) = 0; + + private: + bool checkContext(Context* context); +}; +} // namespace tgfx diff --git a/include/tgfx/metal/MtlTypes.h b/include/tgfx/metal/MtlTypes.h new file mode 100644 index 00000000..fa64e734 --- /dev/null +++ b/include/tgfx/metal/MtlTypes.h @@ -0,0 +1,32 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +namespace tgfx { +/** + * Types for interacting with Metal resources created externally to TGFX. Holds the MTLTexture as a + * const void*. + */ +struct MtlTextureInfo { + /** + * Pointer to MTLTexture. + */ + const void* texture = nullptr; +}; +} // namespace tgfx diff --git a/include/tgfx/opengl/GLDefines.h b/include/tgfx/opengl/GLDefines.h new file mode 100644 index 00000000..14838804 --- /dev/null +++ b/include/tgfx/opengl/GLDefines.h @@ -0,0 +1,906 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +namespace tgfx { +#ifndef GL_TRUE + +// The following constants consist of the intersection of GL constants +// exported by GLES 1.0, GLES 2.0, GLES 3.0, and desktop GL required by the system. + +#define GL_DEPTH_BUFFER_BIT 0x00000100 +#define GL_STENCIL_BUFFER_BIT 0x00000400 +#define GL_COLOR_BUFFER_BIT 0x00004000 + +// Boolean +#define GL_FALSE 0 +#define GL_TRUE 1 + +// BeginMode +#define GL_POINTS 0x0000 +#define GL_LINES 0x0001 +#define GL_LINE_LOOP 0x0002 +#define GL_LINE_STRIP 0x0003 +#define GL_TRIANGLES 0x0004 +#define GL_TRIANGLE_STRIP 0x0005 +#define GL_TRIANGLE_FAN 0x0006 + +// Basic OpenGL blend equations +#define GL_FUNC_ADD 0x8006 +#define GL_BLEND_EQUATION 0x8009 +#define GL_BLEND_EQUATION_RGB 0x8009 /* same as BLEND_EQUATION */ +#define GL_BLEND_EQUATION_ALPHA 0x883D +#define GL_FUNC_SUBTRACT 0x800A +#define GL_FUNC_REVERSE_SUBTRACT 0x800B + +// KHR_blend_equation_advanced +#define GL_SCREEN 0x9295 +#define GL_OVERLAY 0x9296 +#define GL_DARKEN 0x9297 +#define GL_LIGHTEN 0x9298 +#define GL_COLORDODGE 0x9299 +#define GL_COLORBURN 0x929A +#define GL_HARDLIGHT 0x929B +#define GL_SOFTLIGHT 0x929C +#define GL_DIFFERENCE 0x929E +#define GL_EXCLUSION 0x92A0 +#define GL_MULTIPLY 0x9294 +#define GL_HSL_HUE 0x92AD +#define GL_HSL_SATURATION 0x92AE +#define GL_HSL_COLOR 0x92AF +#define GL_HSL_LUMINOSITY 0x92B0 + +// BlendingFactorDest +#define GL_ZERO 0 +#define GL_ONE 1 +#define GL_SRC_COLOR 0x0300 +#define GL_ONE_MINUS_SRC_COLOR 0x0301 +#define GL_SRC_ALPHA 0x0302 +#define GL_ONE_MINUS_SRC_ALPHA 0x0303 +#define GL_DST_ALPHA 0x0304 +#define GL_ONE_MINUS_DST_ALPHA 0x0305 + +// BlendingFactorSrc +#define GL_DST_COLOR 0x0306 +#define GL_ONE_MINUS_DST_COLOR 0x0307 +#define GL_SRC_ALPHA_SATURATE 0x0308 + +// ExtendedBlendFactors +#define GL_SRC1_COLOR 0x88F9 +#define GL_ONE_MINUS_SRC1_COLOR 0x88FA +// SRC1_ALPHA +#define GL_ONE_MINUS_SRC1_ALPHA 0x88FB + +// Separate Blend Functions +#define GL_BLEND_DST_RGB 0x80C8 +#define GL_BLEND_SRC_RGB 0x80C9 +#define GL_BLEND_DST_ALPHA 0x80CA +#define GL_BLEND_SRC_ALPHA 0x80CB +#define GL_CONSTANT_COLOR 0x8001 +#define GL_ONE_MINUS_CONSTANT_COLOR 0x8002 +#define GL_CONSTANT_ALPHA 0x8003 +#define GL_ONE_MINUS_CONSTANT_ALPHA 0x8004 +#define GL_BLEND_COLOR 0x8005 + +// Buffer Objects +#define GL_ARRAY_BUFFER 0x8892 +#define GL_ELEMENT_ARRAY_BUFFER 0x8893 +#define GL_DRAW_INDIRECT_BUFFER 0x8F3F +#define GL_TEXTURE_BUFFER 0x8C2A +#define GL_ARRAY_BUFFER_BINDING 0x8894 +#define GL_ELEMENT_ARRAY_BUFFER_BINDING 0x8895 +#define GL_DRAW_INDIRECT_BUFFER_BINDING 0x8F43 +#define GL_VERTEX_ARRAY_BINDING 0x85B5 +#define GL_PIXEL_PACK_BUFFER 0x88EB +#define GL_PIXEL_UNPACK_BUFFER 0x88EC + +#define GL_PIXEL_UNPACK_TRANSFER_BUFFER_CHROMIUM 0x78EC +#define GL_PIXEL_PACK_TRANSFER_BUFFER_CHROMIUM 0x78ED + +#define GL_STREAM_DRAW 0x88E0 +#define GL_STREAM_READ 0x88E1 +#define GL_STATIC_DRAW 0x88E4 +#define GL_STATIC_READ 0x88E5 +#define GL_DYNAMIC_DRAW 0x88E8 +#define GL_DYNAMIC_READ 0x88E9 + +#define GL_BUFFER_SIZE 0x8764 +#define GL_BUFFER_USAGE 0x8765 + +#define GL_CURRENT_VERTEX_ATTRIB 0x8626 + +// CullFaceMode +#define GL_FRONT 0x0404 +#define GL_BACK 0x0405 +#define GL_FRONT_AND_BACK 0x0408 + +// EnableCap +#define GL_TEXTURE_2D 0x0DE1 +#define GL_TEXTURE_RECTANGLE 0x84F5 +#define GL_CULL_FACE 0x0B44 +#define GL_BLEND 0x0BE2 +#define GL_DITHER 0x0BD0 +#define GL_STENCIL_TEST 0x0B90 +#define GL_DEPTH_TEST 0x0B71 +#define GL_SCISSOR_TEST 0x0C11 +#define GL_POLYGON_OFFSET_FILL 0x8037 +#define GL_SAMPLE_ALPHA_TO_COVERAGE 0x809E +#define GL_SAMPLE_COVERAGE 0x80A0 +#define GL_POLYGON_SMOOTH 0x0B41 +#define GL_POLYGON_STIPPLE 0x0B42 +#define GL_COLOR_LOGIC_OP 0x0BF2 +#define GL_COLOR_TABLE 0x80D0 +#define GL_INDEX_LOGIC_OP 0x0BF1 +#define GL_VERTEX_PROGRAM_POINT_SIZE 0x8642 +#define GL_FRAMEBUFFER_SRGB 0x8DB9 +#define GL_SHADER_PIXEL_LOCAL_STORAGE 0x8F64 +#define GL_SAMPLE_SHADING 0x8C36 + +// ErrorCode +#define GL_NO_ERROR 0 +#define GL_INVALID_ENUM 0x0500 +#define GL_INVALID_VALUE 0x0501 +#define GL_INVALID_OPERATION 0x0502 +#define GL_OUT_OF_MEMORY 0x0505 +#define GL_CONTEXT_LOST 0x300E + +// FrontFaceDirection +#define GL_CW 0x0900 +#define GL_CCW 0x0901 + +// GetPName +#define GL_ALIASED_POINT_SIZE_RANGE 0x846D +#define GL_ALIASED_LINE_WIDTH_RANGE 0x846E +#define GL_CULL_FACE_MODE 0x0B45 +#define GL_FRONT_FACE 0x0B46 +#define GL_DEPTH_RANGE 0x0B70 +#define GL_DEPTH_WRITEMASK 0x0B72 +#define GL_DEPTH_CLEAR_VALUE 0x0B73 +#define GL_DEPTH_FUNC 0x0B74 +#define GL_STENCIL_CLEAR_VALUE 0x0B91 +#define GL_STENCIL_FUNC 0x0B92 +#define GL_STENCIL_FAIL 0x0B94 +#define GL_STENCIL_PASS_DEPTH_FAIL 0x0B95 +#define GL_STENCIL_PASS_DEPTH_PASS 0x0B96 +#define GL_STENCIL_REF 0x0B97 +#define GL_STENCIL_VALUE_MASK 0x0B93 +#define GL_STENCIL_WRITEMASK 0x0B98 +#define GL_STENCIL_BACK_FUNC 0x8800 +#define GL_STENCIL_BACK_FAIL 0x8801 +#define GL_STENCIL_BACK_PASS_DEPTH_FAIL 0x8802 +#define GL_STENCIL_BACK_PASS_DEPTH_PASS 0x8803 +#define GL_STENCIL_BACK_REF 0x8CA3 +#define GL_STENCIL_BACK_VALUE_MASK 0x8CA4 +#define GL_STENCIL_BACK_WRITEMASK 0x8CA5 +#define GL_VIEWPORT 0x0BA2 +#define GL_SCISSOR_BOX 0x0C10 +#define GL_COLOR_CLEAR_VALUE 0x0C22 +#define GL_COLOR_WRITEMASK 0x0C23 +#define GL_UNPACK_ALIGNMENT 0x0CF5 +#define GL_PACK_ALIGNMENT 0x0D05 +#define GL_PACK_REVERSE_ROW_ORDER 0x93A4 +#define GL_MAX_TEXTURE_SIZE 0x0D33 +#define GL_TEXTURE_MIN_LOD 0x813A +#define GL_TEXTURE_MAX_LOD 0x813B +#define GL_TEXTURE_BASE_LEVEL 0x813C +#define GL_TEXTURE_MAX_LEVEL 0x813D +#define GL_MAX_VIEWPORT_DIMS 0x0D3A +#define GL_SUBPIXEL_BITS 0x0D50 +#define GL_RED_BITS 0x0D52 +#define GL_GREEN_BITS 0x0D53 +#define GL_BLUE_BITS 0x0D54 +#define GL_ALPHA_BITS 0x0D55 +#define GL_DEPTH_BITS 0x0D56 +#define GL_STENCIL_BITS 0x0D57 +#define GL_POLYGON_OFFSET_UNITS 0x2A00 +#define GL_POLYGON_OFFSET_FACTOR 0x8038 +#define GL_TEXTURE_BINDING_2D 0x8069 +#define GL_TEXTURE_BINDING_RECTANGLE 0x84F6 +#define GL_SAMPLE_BUFFERS 0x80A8 +#define GL_SAMPLES 0x80A9 +#define GL_SAMPLE_COVERAGE_VALUE 0x80AA +#define GL_SAMPLE_COVERAGE_INVERT 0x80AB +#define GL_RENDERBUFFER_COVERAGE_SAMPLES 0x8CAB +#define GL_RENDERBUFFER_COLOR_SAMPLES 0x8E10 +#define GL_MAX_MULTISAMPLE_COVERAGE_MODES 0x8E11 +#define GL_MULTISAMPLE_COVERAGE_MODES 0x8E12 +#define GL_MAX_TEXTURE_BUFFER_SIZE 0x8C2B + +#define GL_NUM_COMPRESSED_TEXTURE_FORMATS 0x86A2 +#define GL_COMPRESSED_TEXTURE_FORMATS 0x86A3 + +// Compressed Texture Formats +#define GL_COMPRESSED_RGB_S3TC_DXT1_EXT 0x83F0 +#define GL_COMPRESSED_RGBA_S3TC_DXT1_EXT 0x83F1 +#define GL_COMPRESSED_RGBA_S3TC_DXT3_EXT 0x83F2 +#define GL_COMPRESSED_RGBA_S3TC_DXT5_EXT 0x83F3 + +#define GL_COMPRESSED_RGB_PVRTC_4BPPV1_IMG 0x8C00 +#define GL_COMPRESSED_RGB_PVRTC_2BPPV1_IMG 0x8C01 +#define GL_COMPRESSED_RGBA_PVRTC_4BPPV1_IMG 0x8C02 +#define GL_COMPRESSED_RGBA_PVRTC_2BPPV1_IMG 0x8C03 + +#define GL_COMPRESSED_RGBA_PVRTC_2BPPV2_IMG 0x9137 +#define GL_COMPRESSED_RGBA_PVRTC_4BPPV2_IMG 0x9138 + +#define GL_COMPRESSED_ETC1_RGB8 0x8D64 + +#define GL_COMPRESSED_R11_EAC 0x9270 +#define GL_COMPRESSED_SIGNED_R11_EAC 0x9271 +#define GL_COMPRESSED_RG11_EAC 0x9272 +#define GL_COMPRESSED_SIGNED_RG11_EAC 0x9273 + +#define GL_COMPRESSED_RGB8_ETC2 0x9274 +#define GL_COMPRESSED_SRGB8 0x9275 +#define GL_COMPRESSED_RGB8_PUNCHTHROUGH_ALPHA1 0x9276 +#define GL_COMPRESSED_SRGB8_PUNCHTHROUGH_ALPHA1 0x9277 +#define GL_COMPRESSED_RGBA8_ETC2 0x9278 +#define GL_COMPRESSED_SRGB8_ALPHA8_ETC2 0x9279 + +#define GL_COMPRESSED_LUMINANCE_LATC1 0x8C70 +#define GL_COMPRESSED_SIGNED_LUMINANCE_LATC1 0x8C71 +#define GL_COMPRESSED_LUMINANCE_ALPHA_LATC2 0x8C72 +#define GL_COMPRESSED_SIGNED_LUMINANCE_ALPHA_LATC2 0x8C73 + +#define GL_COMPRESSED_RED_RGTC1 0x8DBB +#define GL_COMPRESSED_SIGNED_RED_RGTC1 0x8DBC +#define GL_COMPRESSED_RED_GREEN_RGTC2 0x8DBD +#define GL_COMPRESSED_SIGNED_RED_GREEN_RGTC2 0x8DBE + +#define GL_COMPRESSED_3DC_X 0x87F9 +#define GL_COMPRESSED_3DC_XY 0x87FA + +#define GL_COMPRESSED_RGBA_BPTC_UNORM 0x8E8C +#define GL_COMPRESSED_SRGB_ALPHA_BPTC_UNORM 0x8E8D +#define GL_COMPRESSED_RGB_BPTC_SIGNED_FLOAT 0x8E8E +#define GL_COMPRESSED_RGB_BPTC_UNSIGNED_FLOAT 0x8E8F + +#define GL_COMPRESSED_RGBA_ASTC_4x4 0x93B0 +#define GL_COMPRESSED_RGBA_ASTC_5x4 0x93B1 +#define GL_COMPRESSED_RGBA_ASTC_5x5 0x93B2 +#define GL_COMPRESSED_RGBA_ASTC_6x5 0x93B3 +#define GL_COMPRESSED_RGBA_ASTC_6x6 0x93B4 +#define GL_COMPRESSED_RGBA_ASTC_8x5 0x93B5 +#define GL_COMPRESSED_RGBA_ASTC_8x6 0x93B6 +#define GL_COMPRESSED_RGBA_ASTC_8x8 0x93B7 +#define GL_COMPRESSED_RGBA_ASTC_10x5 0x93B8 +#define GL_COMPRESSED_RGBA_ASTC_10x6 0x93B9 +#define GL_COMPRESSED_RGBA_ASTC_10x8 0x93BA +#define GL_COMPRESSED_RGBA_ASTC_10x10 0x93BB +#define GL_COMPRESSED_RGBA_ASTC_12x10 0x93BC +#define GL_COMPRESSED_RGBA_ASTC_12x12 0x93BD + +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_4x4 0x93D0 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x4 0x93D1 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x5 0x93D2 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x5 0x93D3 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x6 0x93D4 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x5 0x93D5 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x6 0x93D6 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x8 0x93D7 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x5 0x93D8 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x6 0x93D9 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x8 0x93DA +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x10 0x93DB +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_12x10 0x93DC +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_12x12 0x93DD + +// HintMode +#define GL_DONT_CARE 0x1100 +#define GL_FASTEST 0x1101 +#define GL_NICEST 0x1102 + +// HintTarget +#define GL_GENERATE_MIPMAP_HINT 0x8192 + +// DataType +#define GL_BYTE 0x1400 +#define GL_UNSIGNED_BYTE 0x1401 +#define GL_SHORT 0x1402 +#define GL_UNSIGNED_SHORT 0x1403 +#define GL_INT 0x1404 +#define GL_UNSIGNED_INT 0x1405 +#define GL_FLOAT 0x1406 +#define GL_HALF_FLOAT 0x140B +#define GL_FIXED 0x140C +#define GL_HALF_FLOAT_OES 0x8D61 + +// Lighting +#define GL_LIGHTING 0x0B50 +#define GL_LIGHT0 0x4000 +#define GL_LIGHT1 0x4001 +#define GL_LIGHT2 0x4002 +#define GL_LIGHT3 0x4003 +#define GL_LIGHT4 0x4004 +#define GL_LIGHT5 0x4005 +#define GL_LIGHT6 0x4006 +#define GL_LIGHT7 0x4007 +#define GL_SPOT_EXPONENT 0x1205 +#define GL_SPOT_CUTOFF 0x1206 +#define GL_CONSTANT_ATTENUATION 0x1207 +#define GL_LINEAR_ATTENUATION 0x1208 +#define GL_QUADRATIC_ATTENUATION 0x1209 +#define GL_AMBIENT 0x1200 +#define GL_DIFFUSE 0x1201 +#define GL_SPECULAR 0x1202 +#define GL_SHININESS 0x1601 +#define GL_EMISSION 0x1600 +#define GL_POSITION 0x1203 +#define GL_SPOT_DIRECTION 0x1204 +#define GL_AMBIENT_AND_DIFFUSE 0x1602 +#define GL_COLOR_INDEXES 0x1603 +#define GL_LIGHT_MODEL_TWO_SIDE 0x0B52 +#define GL_LIGHT_MODEL_LOCAL_VIEWER 0x0B51 +#define GL_LIGHT_MODEL_AMBIENT 0x0B53 +#define GL_SHADE_MODEL 0x0B54 +#define GL_FLAT 0x1D00 +#define GL_SMOOTH 0x1D01 +#define GL_COLOR_MATERIAL 0x0B57 +#define GL_COLOR_MATERIAL_FACE 0x0B55 +#define GL_COLOR_MATERIAL_PARAMETER 0x0B56 +#define GL_NORMALIZE 0x0BA1 + +// Matrix Mode +#define GL_MATRIX_MODE 0x0BA0 +#define GL_MODELVIEW 0x1700 +#define GL_PROJECTION 0x1701 + +// multisample +#define GL_MULTISAMPLE 0x809D +#define GL_SAMPLE_POSITION 0x8E50 + +// Points +#define GL_POINT_SMOOTH 0x0B10 +#define GL_POINT_SIZE 0x0B11 +#define GL_POINT_SIZE_GRANULARITY 0x0B13 +#define GL_POINT_SIZE_RANGE 0x0B12 + +// Lines +#define GL_LINE_SMOOTH 0x0B20 +#define GL_LINE_STIPPLE 0x0B24 +#define GL_LINE_STIPPLE_PATTERN 0x0B25 +#define GL_LINE_STIPPLE_REPEAT 0x0B26 +#define GL_LINE_WIDTH 0x0B21 +#define GL_LINE_WIDTH_GRANULARITY 0x0B23 +#define GL_LINE_WIDTH_RANGE 0x0B22 + +// PolygonMode +#define GL_POINT 0x1B00 +#define GL_LINE 0x1B01 +#define GL_FILL 0x1B02 + +// Unsized formats +#define GL_STENCIL_INDEX 0x1901 +#define GL_DEPTH_COMPONENT 0x1902 +#define GL_DEPTH_STENCIL 0x84F9 +#define GL_RED 0x1903 +#define GL_RED_INTEGER 0x8D94 +#define GL_GREEN 0x1904 +#define GL_BLUE 0x1905 +#define GL_ALPHA 0x1906 +#define GL_LUMINANCE 0x1909 +#define GL_LUMINANCE_ALPHA 0x190A +#define GL_RG_INTEGER 0x8228 +#define GL_RGB 0x1907 +#define GL_RGB_INTEGER 0x8D98 +#define GL_SRGB 0x8C40 +#define GL_RGBA 0x1908 +#define GL_RG 0x8227 +#define GL_SRGB_ALPHA 0x8C42 +#define GL_RGBA_INTEGER 0x8D99 +#define GL_BGRA 0x80E1 + +// Stencil index sized formats +#define GL_STENCIL_INDEX4 0x8D47 +#define GL_STENCIL_INDEX8 0x8D48 +#define GL_STENCIL_INDEX16 0x8D49 + +// Depth component sized formats +#define GL_DEPTH_COMPONENT16 0x81A5 + +// Depth stencil sized formats +#define GL_DEPTH24_STENCIL8 0x88F0 + +// Red sized formats +#define GL_R8 0x8229 +#define GL_R16 0x822A +#define GL_R16F 0x822D +#define GL_R32F 0x822E + +// Red integer sized formats +#define GL_R8I 0x8231 +#define GL_R8UI 0x8232 +#define GL_R16I 0x8233 +#define GL_R16UI 0x8234 +#define GL_R32I 0x8235 +#define GL_R32UI 0x8236 + +// Luminance sized formats +#define GL_LUMINANCE8 0x8040 +#define GL_LUMINANCE8_ALPHA8 0x8045 + +// Alpha sized formats +#define GL_ALPHA8 0x803C +#define GL_ALPHA16 0x803E +#define GL_ALPHA16F 0x881C +#define GL_ALPHA32F 0x8816 + +// Alpha integer sized formats +#define GL_ALPHA8I 0x8D90 +#define GL_ALPHA8UI 0x8D7E +#define GL_ALPHA16I 0x8D8A +#define GL_ALPHA16UI 0x8D78 +#define GL_ALPHA32I 0x8D84 +#define GL_ALPHA32UI 0x8D72 + +// RG sized formats +#define GL_RG8 0x822B +#define GL_RG16 0x822C + +// RG sized integer formats +#define GL_RG8I 0x8237 +#define GL_RG8UI 0x8238 +#define GL_RG16I 0x8239 +#define GL_RG16UI 0x823A +#define GL_RG32I 0x823B +#define GL_RG32UI 0x823C + +// RGB sized formats +#define GL_RGB5 0x8050 +#define GL_RGB565 0x8D62 +#define GL_RGB8 0x8051 +#define GL_SRGB8 0x8C41 + +// RGB integer sized formats +#define GL_RGB8I 0x8D8F +#define GL_RGB8UI 0x8D7D +#define GL_RGB16I 0x8D89 +#define GL_RGB16UI 0x8D77 +#define GL_RGB32I 0x8D83 +#define GL_RGB32UI 0x8D71 + +// RGBA sized formats +#define GL_RGBA4 0x8056 +#define GL_RGB5_A1 0x8057 +#define GL_RGBA8 0x8058 +#define GL_RGB10_A2 0x8059 +#define GL_SRGB8_ALPHA8 0x8C43 +#define GL_RGBA16F 0x881A +#define GL_RGBA32F 0x8814 +#define GL_RG32F 0x8230 + +// RGBA integer sized formats +#define GL_RGBA8I 0x8D8E +#define GL_RGBA8UI 0x8D7C +#define GL_RGBA16I 0x8D88 +#define GL_RGBA16UI 0x8D76 +#define GL_RGBA32I 0x8D82 +#define GL_RGBA32UI 0x8D70 + +// BGRA sized formats +#define GL_BGRA8 0x93A1 + +// PixelType +#define GL_UNSIGNED_SHORT_4_4_4_4 0x8033 +#define GL_UNSIGNED_SHORT_5_5_5_1 0x8034 +#define GL_UNSIGNED_SHORT_5_6_5 0x8363 +#define GL_UNSIGNED_INT_2_10_10_10_REV 0x8368 + +// Shaders +#define GL_FRAGMENT_SHADER 0x8B30 +#define GL_VERTEX_SHADER 0x8B31 +#define GL_GEOMETRY_SHADER 0x8DD9 +#define GL_MAX_VERTEX_ATTRIBS 0x8869 +#define GL_MAX_VERTEX_UNIFORM_VECTORS 0x8DFB +#define GL_MAX_VARYING_VECTORS 0x8DFC +#define GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS 0x8B4D +#define GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS 0x8B4C +#define GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS 0x8C29 +#define GL_MAX_TEXTURE_IMAGE_UNITS 0x8872 +#define GL_MAX_FRAGMENT_UNIFORM_VECTORS 0x8DFD +#define GL_SHADER_TYPE 0x8B4F +#define GL_DELETE_STATUS 0x8B80 +#define GL_LINK_STATUS 0x8B82 +#define GL_VALIDATE_STATUS 0x8B83 +#define GL_ATTACHED_SHADERS 0x8B85 +#define GL_ACTIVE_UNIFORMS 0x8B86 +#define GL_ACTIVE_UNIFORM_MAX_LENGTH 0x8B87 +#define GL_ACTIVE_ATTRIBUTES 0x8B89 +#define GL_ACTIVE_ATTRIBUTE_MAX_LENGTH 0x8B8A +#define GL_SHADING_LANGUAGE_VERSION 0x8B8C +#define GL_CURRENT_PROGRAM 0x8B8D +#define GL_MAX_FRAGMENT_UNIFORM_COMPONENTS 0x8B49 +#define GL_MAX_VERTEX_UNIFORM_COMPONENTS 0x8B4A +#define GL_MAX_SHADER_PIXEL_LOCAL_STORAGE_FAST_SIZE 0x8F63 +#define GL_SHADER_BINARY_FORMATS 0x8DF8 + +// StencilFunction +#define GL_NEVER 0x0200 +#define GL_LESS 0x0201 +#define GL_EQUAL 0x0202 +#define GL_LEQUAL 0x0203 +#define GL_GREATER 0x0204 +#define GL_NOTEQUAL 0x0205 +#define GL_GEQUAL 0x0206 +#define GL_ALWAYS 0x0207 + +// StencilOp +#define GL_KEEP 0x1E00 +#define GL_REPLACE 0x1E01 +#define GL_INCR 0x1E02 +#define GL_DECR 0x1E03 +#define GL_INVERT 0x150A +#define GL_INCR_WRAP 0x8507 +#define GL_DECR_WRAP 0x8508 + +// StringName +#define GL_VENDOR 0x1F00 +#define GL_RENDERER 0x1F01 +#define GL_VERSION 0x1F02 +#define GL_EXTENSIONS 0x1F03 + +// StringCounts +#define GL_NUM_EXTENSIONS 0x821D + +// Pixel Mode / Transfer +#define GL_UNPACK_ROW_LENGTH 0x0CF2 +#define GL_PACK_ROW_LENGTH 0x0D02 + +// TextureMagFilter +#define GL_NEAREST 0x2600 +#define GL_LINEAR 0x2601 + +// TextureMinFilter +#define GL_NEAREST_MIPMAP_NEAREST 0x2700 +#define GL_LINEAR_MIPMAP_NEAREST 0x2701 +#define GL_NEAREST_MIPMAP_LINEAR 0x2702 +#define GL_LINEAR_MIPMAP_LINEAR 0x2703 + +// TextureUsage +#define GL_FRAMEBUFFER_ATTACHMENT 0x93A3 + +// TextureParameterName +#define GL_TEXTURE_MAG_FILTER 0x2800 +#define GL_TEXTURE_MIN_FILTER 0x2801 +#define GL_TEXTURE_WRAP_S 0x2802 +#define GL_TEXTURE_WRAP_T 0x2803 +#define GL_TEXTURE_USAGE 0x93A2 + +// TextureTarget +#define GL_TEXTURE 0x1702 +#define GL_TEXTURE_CUBE_MAP 0x8513 +#define GL_TEXTURE_BINDING_CUBE_MAP 0x8514 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_X 0x8515 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_X 0x8516 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_Y 0x8517 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_Y 0x8518 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_Z 0x8519 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_Z 0x851A +#define GL_MAX_CUBE_MAP_TEXTURE_SIZE 0x851C + +// TextureUnit +#define GL_TEXTURE0 0x84C0 +#define GL_TEXTURE1 0x84C1 +#define GL_TEXTURE2 0x84C2 +#define GL_TEXTURE3 0x84C3 +#define GL_TEXTURE4 0x84C4 +#define GL_TEXTURE5 0x84C5 +#define GL_TEXTURE6 0x84C6 +#define GL_TEXTURE7 0x84C7 +#define GL_TEXTURE8 0x84C8 +#define GL_TEXTURE9 0x84C9 +#define GL_TEXTURE10 0x84CA +#define GL_TEXTURE11 0x84CB +#define GL_TEXTURE12 0x84CC +#define GL_TEXTURE13 0x84CD +#define GL_TEXTURE14 0x84CE +#define GL_TEXTURE15 0x84CF +#define GL_TEXTURE16 0x84D0 +#define GL_TEXTURE17 0x84D1 +#define GL_TEXTURE18 0x84D2 +#define GL_TEXTURE19 0x84D3 +#define GL_TEXTURE20 0x84D4 +#define GL_TEXTURE21 0x84D5 +#define GL_TEXTURE22 0x84D6 +#define GL_TEXTURE23 0x84D7 +#define GL_TEXTURE24 0x84D8 +#define GL_TEXTURE25 0x84D9 +#define GL_TEXTURE26 0x84DA +#define GL_TEXTURE27 0x84DB +#define GL_TEXTURE28 0x84DC +#define GL_TEXTURE29 0x84DD +#define GL_TEXTURE30 0x84DE +#define GL_TEXTURE31 0x84DF +#define GL_ACTIVE_TEXTURE 0x84E0 +#define GL_MAX_TEXTURE_UNITS 0x84E2 +#define GL_MAX_TEXTURE_COORDS 0x8871 + +// TextureWrapMode +#define GL_REPEAT 0x2901 +#define GL_CLAMP_TO_EDGE 0x812F +#define GL_MIRRORED_REPEAT 0x8370 +#define GL_CLAMP_TO_BORDER 0x812D + +// Texture Swizzle +#define GL_TEXTURE_SWIZZLE_R 0x8E42 +#define GL_TEXTURE_SWIZZLE_G 0x8E43 +#define GL_TEXTURE_SWIZZLE_B 0x8E44 +#define GL_TEXTURE_SWIZZLE_A 0x8E45 +#define GL_TEXTURE_SWIZZLE_RGBA 0x8E46 + +// Texture mapping +#define GL_TEXTURE_ENV 0x2300 +#define GL_TEXTURE_ENV_MODE 0x2200 +#define GL_TEXTURE_1D 0x0DE0 +#define GL_TEXTURE_ENV_COLOR 0x2201 +#define GL_TEXTURE_GEN_S 0x0C60 +#define GL_TEXTURE_GEN_T 0x0C61 +#define GL_TEXTURE_GEN_R 0x0C62 +#define GL_TEXTURE_GEN_Q 0x0C63 +#define GL_TEXTURE_GEN_MODE 0x2500 +#define GL_TEXTURE_BORDER_COLOR 0x1004 +#define GL_TEXTURE_WIDTH 0x1000 +#define GL_TEXTURE_HEIGHT 0x1001 +#define GL_TEXTURE_BORDER 0x1005 +#define GL_TEXTURE_COMPONENTS 0x1003 +#define GL_TEXTURE_RED_SIZE 0x805C +#define GL_TEXTURE_GREEN_SIZE 0x805D +#define GL_TEXTURE_BLUE_SIZE 0x805E +#define GL_TEXTURE_ALPHA_SIZE 0x805F +#define GL_TEXTURE_LUMINANCE_SIZE 0x8060 +#define GL_TEXTURE_INTENSITY_SIZE 0x8061 +#define GL_TEXTURE_INTERNAL_FORMAT 0x1003 +#define GL_OBJECT_LINEAR 0x2401 +#define GL_OBJECT_PLANE 0x2501 +#define GL_EYE_LINEAR 0x2400 +#define GL_EYE_PLANE 0x2502 +#define GL_SPHERE_MAP 0x2402 +#define GL_DECAL 0x2101 +#define GL_MODULATE 0x2100 +#define GL_CLAMP 0x2900 +#define GL_S 0x2000 +#define GL_T 0x2001 +#define GL_R 0x2002 +#define GL_Q 0x2003 + +// texture_env_combine +#define GL_COMBINE 0x8570 +#define GL_COMBINE_RGB 0x8571 +#define GL_COMBINE_ALPHA 0x8572 +#define GL_SOURCE0_RGB 0x8580 +#define GL_SOURCE1_RGB 0x8581 +#define GL_SOURCE2_RGB 0x8582 +#define GL_SOURCE0_ALPHA 0x8588 +#define GL_SOURCE1_ALPHA 0x8589 +#define GL_SOURCE2_ALPHA 0x858A +#define GL_OPERAND0_RGB 0x8590 +#define GL_OPERAND1_RGB 0x8591 +#define GL_OPERAND2_RGB 0x8592 +#define GL_OPERAND0_ALPHA 0x8598 +#define GL_OPERAND1_ALPHA 0x8599 +#define GL_OPERAND2_ALPHA 0x859A +#define GL_RGB_SCALE 0x8573 +#define GL_ADD_SIGNED 0x8574 +#define GL_INTERPOLATE 0x8575 +#define GL_SUBTRACT 0x84E7 +#define GL_CONSTANT 0x8576 +#define GL_PRIMARY_COLOR 0x8577 +#define GL_PREVIOUS 0x8578 +#define GL_SRC0_RGB 0x8580 +#define GL_SRC1_RGB 0x8581 +#define GL_SRC2_RGB 0x8582 +#define GL_SRC0_ALPHA 0x8588 +#define GL_SRC1_ALPHA 0x8589 +#define GL_SRC2_ALPHA 0x858A + +// Uniform Types +#define GL_FLOAT_VEC2 0x8B50 +#define GL_FLOAT_VEC3 0x8B51 +#define GL_FLOAT_VEC4 0x8B52 +#define GL_INT_VEC2 0x8B53 +#define GL_INT_VEC3 0x8B54 +#define GL_INT_VEC4 0x8B55 +#define GL_BOOL 0x8B56 +#define GL_BOOL_VEC2 0x8B57 +#define GL_BOOL_VEC3 0x8B58 +#define GL_BOOL_VEC4 0x8B59 +#define GL_FLOAT_MAT2 0x8B5A +#define GL_FLOAT_MAT3 0x8B5B +#define GL_FLOAT_MAT4 0x8B5C +#define GL_SAMPLER_2D 0x8B5E +#define GL_SAMPLER_CUBE 0x8B60 + +// Vertex Arrays +#define GL_VERTEX_ATTRIB_ARRAY_ENABLED 0x8622 +#define GL_VERTEX_ATTRIB_ARRAY_SIZE 0x8623 +#define GL_VERTEX_ATTRIB_ARRAY_STRIDE 0x8624 +#define GL_VERTEX_ATTRIB_ARRAY_TYPE 0x8625 +#define GL_VERTEX_ATTRIB_ARRAY_NORMALIZED 0x886A +#define GL_VERTEX_ATTRIB_ARRAY_POINTER 0x8645 +#define GL_VERTEX_ATTRIB_ARRAY_BUFFER_BINDING 0x889F +#define GL_VERTEX_ARRAY 0x8074 +#define GL_NORMAL_ARRAY 0x8075 +#define GL_COLOR_ARRAY 0x8076 +#define GL_SECONDARY_COLOR_ARRAY 0x845E +#define GL_INDEX_ARRAY 0x8077 +#define GL_TEXTURE_COORD_ARRAY 0x8078 +#define GL_EDGE_FLAG_ARRAY 0x8079 +#define GL_VERTEX_ARRAY_SIZE 0x807A +#define GL_VERTEX_ARRAY_TYPE 0x807B +#define GL_VERTEX_ARRAY_STRIDE 0x807C +#define GL_NORMAL_ARRAY_TYPE 0x807E +#define GL_NORMAL_ARRAY_STRIDE 0x807F +#define GL_COLOR_ARRAY_SIZE 0x8081 +#define GL_COLOR_ARRAY_TYPE 0x8082 +#define GL_COLOR_ARRAY_STRIDE 0x8083 +#define GL_INDEX_ARRAY_TYPE 0x8085 +#define GL_INDEX_ARRAY_STRIDE 0x8086 +#define GL_TEXTURE_COORD_ARRAY_SIZE 0x8088 +#define GL_TEXTURE_COORD_ARRAY_TYPE 0x8089 +#define GL_TEXTURE_COORD_ARRAY_STRIDE 0x808A +#define GL_EDGE_FLAG_ARRAY_STRIDE 0x808C +#define GL_VERTEX_ARRAY_POINTER 0x808E +#define GL_NORMAL_ARRAY_POINTER 0x808F +#define GL_COLOR_ARRAY_POINTER 0x8090 +#define GL_INDEX_ARRAY_POINTER 0x8091 +#define GL_TEXTURE_COORD_ARRAY_POINTER 0x8092 +#define GL_EDGE_FLAG_ARRAY_POINTER 0x8093 +#define GL_V2F 0x2A20 +#define GL_V3F 0x2A21 +#define GL_C4UB_V2F 0x2A22 +#define GL_C4UB_V3F 0x2A23 +#define GL_C3F_V3F 0x2A24 +#define GL_N3F_V3F 0x2A25 +#define GL_C4F_N3F_V3F 0x2A26 +#define GL_T2F_V3F 0x2A27 +#define GL_T4F_V4F 0x2A28 +#define GL_T2F_C4UB_V3F 0x2A29 +#define GL_T2F_C3F_V3F 0x2A2A +#define GL_T2F_N3F_V3F 0x2A2B +#define GL_T2F_C4F_N3F_V3F 0x2A2C +#define GL_T4F_C4F_N3F_V4F 0x2A2D +#define GL_PRIMITIVE_RESTART_FIXED_INDEX 0x8D69 + +// Buffer Object +#define GL_READ_ONLY 0x88B8 +#define GL_WRITE_ONLY 0x88B9 +#define GL_READ_WRITE 0x88BA +#define GL_BUFFER_MAPPED 0x88BC + +#define GL_MAP_READ_BIT 0x0001 +#define GL_MAP_WRITE_BIT 0x0002 +#define GL_MAP_INVALIDATE_RANGE_BIT 0x0004 +#define GL_MAP_INVALIDATE_BUFFER_BIT 0x0008 +#define GL_MAP_FLUSH_EXPLICIT_BIT 0x0010 +#define GL_MAP_UNSYNCHRONIZED_BIT 0x0020 + +// Read Format +#define GL_IMPLEMENTATION_COLOR_READ_TYPE 0x8B9A +#define GL_IMPLEMENTATION_COLOR_READ_FORMAT 0x8B9B + +// Shader Source +#define GL_COMPILE_STATUS 0x8B81 +#define GL_INFO_LOG_LENGTH 0x8B84 +#define GL_SHADER_SOURCE_LENGTH 0x8B88 +#define GL_SHADER_COMPILER 0x8DFA + +// Shader Binary +#define GL_NUM_SHADER_BINARY_FORMATS 0x8DF9 + +// Program Binary +#define GL_NUM_PROGRAM_BINARY_FORMATS 0x87FE + +// Shader Precision-Specified Types +#define GL_LOW_FLOAT 0x8DF0 +#define GL_MEDIUM_FLOAT 0x8DF1 +#define GL_HIGH_FLOAT 0x8DF2 +#define GL_LOW_INT 0x8DF3 +#define GL_MEDIUM_INT 0x8DF4 +#define GL_HIGH_INT 0x8DF5 + +// Queries +#define GL_QUERY_COUNTER_BITS 0x8864 +#define GL_CURRENT_QUERY 0x8865 +#define GL_QUERY_RESULT 0x8866 +#define GL_QUERY_RESULT_AVAILABLE 0x8867 +#define GL_SAMPLES_PASSED 0x8914 +#define GL_ANY_SAMPLES_PASSED 0x8C2F +#define GL_TIME_ELAPSED 0x88BF +#define GL_TIMESTAMP 0x8E28 +#define GL_PRIMITIVES_GENERATED 0x8C87 +#define GL_TRANSFORM_FEEDBACK_PRIMITIVES_WRITTEN 0x8C88 + +// Framebuffer Object. +#define GL_FRAMEBUFFER 0x8D40 +#define GL_DRAW_FRAMEBUFFER_BINDING 0x8CA6 +#define GL_READ_FRAMEBUFFER 0x8CA8 +#define GL_DRAW_FRAMEBUFFER 0x8CA9 +#define GL_READ_FRAMEBUFFER_BINDING 0x8CAA + +#define GL_RENDERBUFFER 0x8D41 + +#define GL_MAX_SAMPLES 0x8D57 +#define GL_MAX_SAMPLES_IMG 0x9135 + +#define GL_RENDERBUFFER_WIDTH 0x8D42 +#define GL_RENDERBUFFER_HEIGHT 0x8D43 +#define GL_RENDERBUFFER_INTERNAL_FORMAT 0x8D44 +#define GL_RENDERBUFFER_RED_SIZE 0x8D50 +#define GL_RENDERBUFFER_GREEN_SIZE 0x8D51 +#define GL_RENDERBUFFER_BLUE_SIZE 0x8D52 +#define GL_RENDERBUFFER_ALPHA_SIZE 0x8D53 +#define GL_RENDERBUFFER_DEPTH_SIZE 0x8D54 +#define GL_RENDERBUFFER_STENCIL_SIZE 0x8D55 + +#define GL_FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE 0x8CD0 +#define GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME 0x8CD1 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL 0x8CD2 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE 0x8CD3 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LAYER 0x8CD4 +#define GL_FRAMEBUFFER_ATTACHMENT_COLOR_ENCODING 0x8210 +#define GL_FRAMEBUFFER_ATTACHMENT_COMPONENT_TYPE 0x8211 +#define GL_FRAMEBUFFER_ATTACHMENT_RED_SIZE 0x8212 +#define GL_FRAMEBUFFER_ATTACHMENT_GREEN_SIZE 0x8213 +#define GL_FRAMEBUFFER_ATTACHMENT_BLUE_SIZE 0x8214 +#define GL_FRAMEBUFFER_ATTACHMENT_ALPHA_SIZE 0x8215 +#define GL_FRAMEBUFFER_ATTACHMENT_DEPTH_SIZE 0x8216 +#define GL_FRAMEBUFFER_ATTACHMENT_STENCIL_SIZE 0x8217 + +#define GL_COLOR_ATTACHMENT0 0x8CE0 +#define GL_DEPTH_ATTACHMENT 0x8D00 +#define GL_STENCIL_ATTACHMENT 0x8D20 + +// EXT_discard_framebuffer +#define GL_COLOR 0x1800 +#define GL_DEPTH 0x1801 +#define GL_STENCIL 0x1802 + +#define GL_NONE 0 +#define GL_FRAMEBUFFER_DEFAULT 0x8218 + +#define GL_FRAMEBUFFER_COMPLETE 0x8CD5 +#define GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT 0x8CD6 +#define GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT 0x8CD7 +#define GL_FRAMEBUFFER_INCOMPLETE_DIMENSIONS 0x8CD9 +#define GL_FRAMEBUFFER_UNSUPPORTED 0x8CDD + +#define GL_FRAMEBUFFER_BINDING 0x8CA6 +#define GL_RENDERBUFFER_BINDING 0x8CA7 +#define GL_MAX_RENDERBUFFER_SIZE 0x84E8 + +#define GL_INVALID_FRAMEBUFFER_OPERATION 0x0506 + +// OES +#define GL_TEXTURE_EXTERNAL_OES 0x8D65 +#define GL_TEXTURE_BINDING_EXTERNAL_OES 0x8d67 +#define GL_DEPTH_STENCIL_OES 0x84F9 +#define GL_UNSIGNED_INT_24_8_OES 0x84FA +#define GL_DEPTH24_STENCIL8_OES 0x88F0 + +/* ARB_internalformat_query */ +#define GL_NUM_SAMPLE_COUNTS 0x9380 + +/* ARM specific define for MSAA support on framebuffer fetch */ +#define GL_FETCH_PER_SAMPLE_ARM 0x8F65 + +#define GL_SYNC_GPU_COMMANDS_COMPLETE 0x9117 +#define GL_TIMEOUT_IGNORED 0xFFFFFFFFFFFFFFFFull + +#endif +} // namespace tgfx diff --git a/include/tgfx/opengl/GLDevice.h b/include/tgfx/opengl/GLDevice.h new file mode 100644 index 00000000..67292727 --- /dev/null +++ b/include/tgfx/opengl/GLDevice.h @@ -0,0 +1,76 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/gpu/Device.h" + +namespace tgfx { +/** + * The OpenGL interface for drawing graphics. + */ +class GLDevice : public Device { + public: + /** + * Returns the native handle of current OpenGL context on the calling thread. + */ + static void* CurrentNativeHandle(); + + /** + * Returns an GLDevice associated with current OpenGL context. Returns nullptr if there is no + * current OpenGL context on the calling thread. + */ + static std::shared_ptr Current(); + + /** + * Creates a new GLDevice with specified shared OpenGL context. + */ + static std::shared_ptr Make(void* sharedContext = nullptr); + + /** + * Creates a new GLDevice. If the creation fails, it will return a pre-created GLDevice. If that + * also fails, it will return the active GLDevice of the current thread. If all attempts fail, + * nullptr will be returned. + */ + static std::shared_ptr MakeWithFallback(); + + /** + * Returns the GLDevice associated with the specified OpenGL context. + */ + static std::shared_ptr Get(void* nativeHandle); + + ~GLDevice() override; + + /** + * Returns true if the specified native handle is shared context to this GLDevice. + */ + virtual bool sharableWith(void* nativeHandle) const = 0; + + protected: + void* nativeHandle = nullptr; + bool isAdopted = false; + + explicit GLDevice(void* nativeHandle); + bool onLockContext() override; + void onUnlockContext() override; + virtual bool onMakeCurrent() = 0; + virtual void onClearCurrent() = 0; + + friend class GLContext; +}; +} // namespace tgfx diff --git a/include/tgfx/opengl/GLFunctions.h b/include/tgfx/opengl/GLFunctions.h new file mode 100644 index 00000000..1a130c82 --- /dev/null +++ b/include/tgfx/opengl/GLFunctions.h @@ -0,0 +1,364 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include +#include +#include +#include "tgfx/opengl/GLDefines.h" + +namespace tgfx { +#if !defined(GL_FUNCTION_TYPE) +#if defined(_WIN32) && !defined(_WIN32_WCE) && !defined(__SCITECH_SNAP__) +#define GL_FUNCTION_TYPE __stdcall +#else +#define GL_FUNCTION_TYPE +#endif +#endif + +typedef intptr_t GLintptr; +typedef intptr_t GLsizeiptr; + +using GLActiveTexture = void GL_FUNCTION_TYPE(unsigned texture); +using GLAttachShader = void GL_FUNCTION_TYPE(unsigned program, unsigned shader); +using GLBindAttribLocation = void GL_FUNCTION_TYPE(unsigned program, unsigned index, + const char* name); +using GLBindBuffer = void GL_FUNCTION_TYPE(unsigned target, unsigned buffer); +using GLBindVertexArray = void GL_FUNCTION_TYPE(unsigned vertexArray); +using GLBindFramebuffer = void GL_FUNCTION_TYPE(unsigned target, unsigned framebuffer); +using GLBindRenderbuffer = void GL_FUNCTION_TYPE(unsigned target, unsigned renderbuffer); +using GLBindTexture = void GL_FUNCTION_TYPE(unsigned target, unsigned texture); +using GLBlendColor = void GL_FUNCTION_TYPE(float red, float green, float blue, float alpha); +using GLBlendEquation = void GL_FUNCTION_TYPE(unsigned mode); +using GLBlendEquationSeparate = void GL_FUNCTION_TYPE(unsigned modeRGB, unsigned modeAlpha); +using GLBlendFunc = void GL_FUNCTION_TYPE(unsigned sfactor, unsigned dfactor); +using GLBlendFuncSeparate = void GL_FUNCTION_TYPE(unsigned sfactorRGB, unsigned dfactorRGB, + unsigned sfactorAlpha, unsigned dfactorAlpha); +using GLBufferData = void GL_FUNCTION_TYPE(unsigned target, GLsizeiptr size, const void* data, + unsigned usage); +using GLBufferSubData = void GL_FUNCTION_TYPE(unsigned target, GLintptr offset, GLsizeiptr size, + const void* data); +using GLCheckFramebufferStatus = unsigned GL_FUNCTION_TYPE(unsigned target); +using GLClear = void GL_FUNCTION_TYPE(unsigned mask); +using GLClearColor = void GL_FUNCTION_TYPE(float red, float green, float blue, float alpha); +using GLClearDepthf = void GL_FUNCTION_TYPE(float depth); +using GLClearStencil = void GL_FUNCTION_TYPE(int s); +using GLColorMask = void GL_FUNCTION_TYPE(unsigned char red, unsigned char green, + unsigned char blue, unsigned char alpha); +using GLCompileShader = void GL_FUNCTION_TYPE(unsigned shader); +using GLCompressedTexImage2D = void GL_FUNCTION_TYPE(unsigned target, int level, + unsigned internalformat, int width, int height, + int border, int imageSize, const void* data); +using GLCompressedTexSubImage2D = void GL_FUNCTION_TYPE(unsigned target, int level, int xoffset, + int yoffset, int width, int height, + unsigned format, int imageSize, + const void* data); +using GLCopyTexSubImage2D = void GL_FUNCTION_TYPE(unsigned target, int level, int xoffset, + int yoffset, int x, int y, int width, int height); +using GLCreateProgram = unsigned GL_FUNCTION_TYPE(); +using GLCreateShader = unsigned GL_FUNCTION_TYPE(unsigned type); +using GLCullFace = void GL_FUNCTION_TYPE(unsigned mode); +using GLDeleteBuffers = void GL_FUNCTION_TYPE(int n, const unsigned* buffers); +using GLDeleteFramebuffers = void GL_FUNCTION_TYPE(int n, const unsigned* framebuffers); +using GLDeleteProgram = void GL_FUNCTION_TYPE(unsigned program); +using GLDeleteRenderbuffers = void GL_FUNCTION_TYPE(int n, const unsigned* renderbuffers); +using GLDeleteShader = void GL_FUNCTION_TYPE(unsigned shader); +using GLDeleteSync = void GL_FUNCTION_TYPE(void* sync); +using GLDeleteTextures = void GL_FUNCTION_TYPE(int n, const unsigned* textures); +using GLDeleteVertexArrays = void GL_FUNCTION_TYPE(int n, const unsigned* vertexArrays); +using GLDepthFunc = void GL_FUNCTION_TYPE(unsigned func); +using GLDepthMask = void GL_FUNCTION_TYPE(unsigned char flag); +using GLDisable = void GL_FUNCTION_TYPE(unsigned cap); +using GLDisableVertexAttribArray = void GL_FUNCTION_TYPE(unsigned index); +using GLDrawArrays = void GL_FUNCTION_TYPE(unsigned mode, int first, int count); +using GLDrawElements = void GL_FUNCTION_TYPE(unsigned mode, int count, unsigned type, + const void* indices); +using GLEnable = void GL_FUNCTION_TYPE(unsigned cap); +using GLIsEnabled = unsigned char GL_FUNCTION_TYPE(unsigned cap); +using GLEnableVertexAttribArray = void GL_FUNCTION_TYPE(unsigned index); +using GLFenceSync = void* GL_FUNCTION_TYPE(unsigned condition, unsigned flags); +using GLFinish = void GL_FUNCTION_TYPE(); +using GLFlush = void GL_FUNCTION_TYPE(); +using GLFramebufferRenderbuffer = void GL_FUNCTION_TYPE(unsigned target, unsigned attachment, + unsigned renderbuffertarget, + unsigned renderbuffer); +using GLFramebufferTexture2D = void GL_FUNCTION_TYPE(unsigned target, unsigned attachment, + unsigned textarget, unsigned texture, + int level); +using GLFramebufferTexture2DMultisample = void GL_FUNCTION_TYPE(unsigned target, + unsigned attachment, + unsigned textarget, + unsigned texture, int level, + int samples); +using GLFrontFace = void GL_FUNCTION_TYPE(unsigned mode); +using GLGenBuffers = void GL_FUNCTION_TYPE(int n, unsigned* buffers); +using GLGenVertexArrays = void GL_FUNCTION_TYPE(int n, unsigned* vertexArrays); +using GLGenFramebuffers = void GL_FUNCTION_TYPE(int n, unsigned* framebuffers); +using GLGenerateMipmap = void GL_FUNCTION_TYPE(unsigned target); +using GLGenRenderbuffers = void GL_FUNCTION_TYPE(int n, unsigned* renderbuffers); +using GLGenTextures = void GL_FUNCTION_TYPE(int n, unsigned* textures); +using GLGetBooleanv = void GL_FUNCTION_TYPE(unsigned pname, unsigned char* data); +using GLGetBufferParameteriv = void GL_FUNCTION_TYPE(unsigned target, unsigned pname, int* params); +using GLGetError = unsigned GL_FUNCTION_TYPE(); +using GLGetFramebufferAttachmentParameteriv = void GL_FUNCTION_TYPE(unsigned target, + unsigned attachment, + unsigned pname, int* params); +using GLGetIntegerv = void GL_FUNCTION_TYPE(unsigned pname, int* params); +using GLGetInternalformativ = void GL_FUNCTION_TYPE(unsigned target, unsigned internalformat, + unsigned pname, int bufSize, int* params); +using GLGetProgramInfoLog = void GL_FUNCTION_TYPE(unsigned program, int bufsize, int* length, + char* infolog); +using GLGetProgramiv = void GL_FUNCTION_TYPE(unsigned program, unsigned pname, int* params); +using GLGetRenderbufferParameteriv = void GL_FUNCTION_TYPE(unsigned target, unsigned pname, + int* params); +using GLGetShaderInfoLog = void GL_FUNCTION_TYPE(unsigned shader, int bufsize, int* length, + char* infolog); +using GLGetShaderiv = void GL_FUNCTION_TYPE(unsigned shader, unsigned pname, int* params); +using GLGetShaderPrecisionFormat = void GL_FUNCTION_TYPE(unsigned shadertype, + unsigned precisiontype, int* range, + int* precision); +using GLGetString = const unsigned char* GL_FUNCTION_TYPE(unsigned name); +using GLGetStringi = const unsigned char* GL_FUNCTION_TYPE(unsigned name, unsigned index); +using GLGetVertexAttribiv = void GL_FUNCTION_TYPE(unsigned index, unsigned pname, int* params); +using GLGetVertexAttribPointerv = void GL_FUNCTION_TYPE(unsigned index, unsigned pname, + void** pointer); +using GLGetAttribLocation = int GL_FUNCTION_TYPE(unsigned program, const char* name); +using GLGetUniformLocation = int GL_FUNCTION_TYPE(unsigned program, const char* name); +using GLIsTexture = unsigned char GL_FUNCTION_TYPE(unsigned texture); +using GLLineWidth = void GL_FUNCTION_TYPE(float width); +using GLLinkProgram = void GL_FUNCTION_TYPE(unsigned program); +using GLPixelStorei = void GL_FUNCTION_TYPE(unsigned pname, int param); +using GLReadPixels = void GL_FUNCTION_TYPE(int x, int y, int width, int height, unsigned format, + unsigned type, void* pixels); +using GLRenderbufferStorage = void GL_FUNCTION_TYPE(unsigned target, unsigned internalformat, + int width, int height); +using GLRenderbufferStorageMultisample = void GL_FUNCTION_TYPE(unsigned target, int samples, + unsigned internalformat, int width, + int height); +using GLRenderbufferStorageMultisampleAPPLE = void GL_FUNCTION_TYPE(unsigned target, int samples, + unsigned internalformat, + int width, int height); +using GLRenderbufferStorageMultisampleEXT = void GL_FUNCTION_TYPE(unsigned target, int samples, + unsigned internalformat, + int width, int height); +using GLResolveMultisampleFramebuffer = void GL_FUNCTION_TYPE(); +using GLBlitFramebuffer = void GL_FUNCTION_TYPE(int srcX0, int srcY0, int srcX1, int srcY1, + int dstX0, int dstY0, int dstX1, int dstY1, + unsigned mask, unsigned filter); +using GLScissor = void GL_FUNCTION_TYPE(int x, int y, int width, int height); +using GLShaderSource = void GL_FUNCTION_TYPE(unsigned shader, int count, const char* const* str, + const int* length); +using GLStencilFunc = void GL_FUNCTION_TYPE(unsigned func, int ref, unsigned mask); +using GLStencilFuncSeparate = void GL_FUNCTION_TYPE(unsigned face, unsigned func, int ref, + unsigned mask); +using GLStencilMask = void GL_FUNCTION_TYPE(unsigned mask); +using GLStencilMaskSeparate = void GL_FUNCTION_TYPE(unsigned face, unsigned mask); +using GLStencilOp = void GL_FUNCTION_TYPE(unsigned fail, unsigned zfail, unsigned zpass); +using GLStencilOpSeparate = void GL_FUNCTION_TYPE(unsigned face, unsigned fail, unsigned zfail, + unsigned zpass); +using GLTexImage2D = void GL_FUNCTION_TYPE(unsigned target, int level, int internalformat, + int width, int height, int border, unsigned format, + unsigned type, const void* pixels); +using GLTexParameterf = void GL_FUNCTION_TYPE(unsigned target, unsigned pname, float param); +using GLTexParameterfv = void GL_FUNCTION_TYPE(unsigned target, unsigned pname, + const float* params); +using GLTexParameteri = void GL_FUNCTION_TYPE(unsigned target, unsigned pname, int param); +using GLTexParameteriv = void GL_FUNCTION_TYPE(unsigned target, unsigned pname, const int* params); +using GLTexSubImage2D = void GL_FUNCTION_TYPE(unsigned target, int level, int xoffset, int yoffset, + int width, int height, unsigned format, unsigned type, + const void* pixels); +using GLTextureBarrier = void GL_FUNCTION_TYPE(); +using GLUniform1f = void GL_FUNCTION_TYPE(int location, float v0); +using GLUniform1i = void GL_FUNCTION_TYPE(int location, int v0); +using GLUniform1fv = void GL_FUNCTION_TYPE(int location, int count, const float* v); +using GLUniform1iv = void GL_FUNCTION_TYPE(int location, int count, const int* v); +using GLUniform2f = void GL_FUNCTION_TYPE(int location, float v0, float v1); +using GLUniform2i = void GL_FUNCTION_TYPE(int location, int v0, int v1); +using GLUniform2fv = void GL_FUNCTION_TYPE(int location, int count, const float* v); +using GLUniform2iv = void GL_FUNCTION_TYPE(int location, int count, const int* v); +using GLUniform3f = void GL_FUNCTION_TYPE(int location, float v0, float v1, float v2); +using GLUniform3i = void GL_FUNCTION_TYPE(int location, int v0, int v1, int v2); +using GLUniform3fv = void GL_FUNCTION_TYPE(int location, int count, const float* v); +using GLUniform3iv = void GL_FUNCTION_TYPE(int location, int count, const int* v); +using GLUniform4f = void GL_FUNCTION_TYPE(int location, float v0, float v1, float v2, float v3); +using GLUniform4i = void GL_FUNCTION_TYPE(int location, int v0, int v1, int v2, int v3); +using GLUniform4fv = void GL_FUNCTION_TYPE(int location, int count, const float* v); +using GLUniform4iv = void GL_FUNCTION_TYPE(int location, int count, const int* v); +using GLUniformMatrix2fv = void GL_FUNCTION_TYPE(int location, int count, unsigned char transpose, + const float* value); +using GLUniformMatrix3fv = void GL_FUNCTION_TYPE(int location, int count, unsigned char transpose, + const float* value); +using GLUniformMatrix4fv = void GL_FUNCTION_TYPE(int location, int count, unsigned char transpose, + const float* value); +using GLUseProgram = void GL_FUNCTION_TYPE(unsigned program); +using GLVertexAttrib1f = void GL_FUNCTION_TYPE(unsigned indx, float value); +using GLVertexAttrib2fv = void GL_FUNCTION_TYPE(unsigned indx, const float* values); +using GLVertexAttrib3fv = void GL_FUNCTION_TYPE(unsigned indx, const float* values); +using GLVertexAttrib4fv = void GL_FUNCTION_TYPE(unsigned indx, const float* values); +using GLVertexAttribPointer = void GL_FUNCTION_TYPE(unsigned indx, int size, unsigned type, + unsigned char normalized, int stride, + const void* ptr); +using GLViewport = void GL_FUNCTION_TYPE(int x, int y, int width, int height); +using GLWaitSync = void GL_FUNCTION_TYPE(void* sync, unsigned flags, uint64_t timeout); + +class Context; + +/** + * GLFunctions holds the OpenGL function pointers. + */ +class GLFunctions { + public: + /** + * Returns the GLFunctions pointer associated with the specified context. + */ + static const GLFunctions* Get(const Context* context); + + GLActiveTexture* activeTexture = nullptr; + GLAttachShader* attachShader = nullptr; + GLBindAttribLocation* bindAttribLocation = nullptr; + GLBindBuffer* bindBuffer = nullptr; + GLBindFramebuffer* bindFramebuffer = nullptr; + GLBindRenderbuffer* bindRenderbuffer = nullptr; + GLBindTexture* bindTexture = nullptr; + GLBindVertexArray* bindVertexArray = nullptr; + GLBlendColor* blendColor = nullptr; + GLBlendEquation* blendEquation = nullptr; + GLBlendEquationSeparate* blendEquationSeparate = nullptr; + GLBlendFunc* blendFunc = nullptr; + GLBlendFuncSeparate* blendFuncSeparate = nullptr; + GLBufferData* bufferData = nullptr; + GLBufferSubData* bufferSubData = nullptr; + GLCheckFramebufferStatus* checkFramebufferStatus = nullptr; + GLClear* clear = nullptr; + GLClearColor* clearColor = nullptr; + GLClearDepthf* clearDepthf = nullptr; + GLClearStencil* clearStencil = nullptr; + GLColorMask* colorMask = nullptr; + GLCompileShader* compileShader = nullptr; + GLCompressedTexImage2D* compressedTexImage2D = nullptr; + GLCompressedTexSubImage2D* compressedTexSubImage2D = nullptr; + GLCopyTexSubImage2D* copyTexSubImage2D = nullptr; + GLCreateProgram* createProgram = nullptr; + GLCreateShader* createShader = nullptr; + GLCullFace* cullFace = nullptr; + GLDeleteBuffers* deleteBuffers = nullptr; + GLDeleteFramebuffers* deleteFramebuffers = nullptr; + GLDeleteProgram* deleteProgram = nullptr; + GLDeleteRenderbuffers* deleteRenderbuffers = nullptr; + GLDeleteShader* deleteShader = nullptr; + GLDeleteSync* deleteSync = nullptr; + GLDeleteTextures* deleteTextures = nullptr; + GLDeleteVertexArrays* deleteVertexArrays = nullptr; + GLDepthFunc* depthFunc = nullptr; + GLDepthMask* depthMask = nullptr; + GLDisable* disable = nullptr; + GLDisableVertexAttribArray* disableVertexAttribArray = nullptr; + GLDrawArrays* drawArrays = nullptr; + GLDrawElements* drawElements = nullptr; + GLEnable* enable = nullptr; + GLIsEnabled* isEnabled = nullptr; + GLEnableVertexAttribArray* enableVertexAttribArray = nullptr; + GLFenceSync* fenceSync = nullptr; + GLFinish* finish = nullptr; + GLFlush* flush = nullptr; + GLFramebufferRenderbuffer* framebufferRenderbuffer = nullptr; + GLFramebufferTexture2D* framebufferTexture2D = nullptr; + GLFramebufferTexture2DMultisample* framebufferTexture2DMultisample = nullptr; + GLFrontFace* frontFace = nullptr; + GLGenBuffers* genBuffers = nullptr; + GLGenFramebuffers* genFramebuffers = nullptr; + GLGenerateMipmap* generateMipmap = nullptr; + GLGenRenderbuffers* genRenderbuffers = nullptr; + GLGenTextures* genTextures = nullptr; + GLGenVertexArrays* genVertexArrays = nullptr; + GLGetBufferParameteriv* getBufferParameteriv = nullptr; + GLGetError* getError = nullptr; + GLGetFramebufferAttachmentParameteriv* getFramebufferAttachmentParameteriv = nullptr; + GLGetIntegerv* getIntegerv = nullptr; + GLGetInternalformativ* getInternalformativ = nullptr; + GLGetBooleanv* getBooleanv = nullptr; + GLGetProgramInfoLog* getProgramInfoLog = nullptr; + GLGetProgramiv* getProgramiv = nullptr; + GLGetRenderbufferParameteriv* getRenderbufferParameteriv = nullptr; + GLGetShaderInfoLog* getShaderInfoLog = nullptr; + GLGetShaderiv* getShaderiv = nullptr; + GLGetShaderPrecisionFormat* getShaderPrecisionFormat = nullptr; + GLGetString* getString = nullptr; + GLGetStringi* getStringi = nullptr; + GLGetVertexAttribiv* getVertexAttribiv = nullptr; + GLGetVertexAttribPointerv* getVertexAttribPointerv = nullptr; + GLGetAttribLocation* getAttribLocation = nullptr; + GLGetUniformLocation* getUniformLocation = nullptr; + GLIsTexture* isTexture = nullptr; + GLLineWidth* lineWidth = nullptr; + GLLinkProgram* linkProgram = nullptr; + GLPixelStorei* pixelStorei = nullptr; + GLReadPixels* readPixels = nullptr; + GLRenderbufferStorage* renderbufferStorage = nullptr; + GLRenderbufferStorageMultisample* renderbufferStorageMultisample = nullptr; + GLRenderbufferStorageMultisampleAPPLE* renderbufferStorageMultisampleAPPLE = nullptr; + GLRenderbufferStorageMultisampleEXT* renderbufferStorageMultisampleEXT = nullptr; + GLResolveMultisampleFramebuffer* resolveMultisampleFramebuffer = nullptr; + GLBlitFramebuffer* blitFramebuffer = nullptr; + GLScissor* scissor = nullptr; + GLShaderSource* shaderSource = nullptr; + GLStencilFunc* stencilFunc = nullptr; + GLStencilFuncSeparate* stencilFuncSeparate = nullptr; + GLStencilMask* stencilMask = nullptr; + GLStencilMaskSeparate* stencilMaskSeparate = nullptr; + GLStencilOp* stencilOp = nullptr; + GLStencilOpSeparate* stencilOpSeparate = nullptr; + GLTexImage2D* texImage2D = nullptr; + GLTexParameterf* texParameterf = nullptr; + GLTexParameterfv* texParameterfv = nullptr; + GLTexParameteri* texParameteri = nullptr; + GLTexParameteriv* texParameteriv = nullptr; + GLTexSubImage2D* texSubImage2D = nullptr; + GLTextureBarrier* textureBarrier = nullptr; + GLUniform1f* uniform1f = nullptr; + GLUniform1i* uniform1i = nullptr; + GLUniform1fv* uniform1fv = nullptr; + GLUniform1iv* uniform1iv = nullptr; + GLUniform2f* uniform2f = nullptr; + GLUniform2i* uniform2i = nullptr; + GLUniform2fv* uniform2fv = nullptr; + GLUniform2iv* uniform2iv = nullptr; + GLUniform3f* uniform3f = nullptr; + GLUniform3i* uniform3i = nullptr; + GLUniform3fv* uniform3fv = nullptr; + GLUniform3iv* uniform3iv = nullptr; + GLUniform4f* uniform4f = nullptr; + GLUniform4i* uniform4i = nullptr; + GLUniform4fv* uniform4fv = nullptr; + GLUniform4iv* uniform4iv = nullptr; + GLUniformMatrix2fv* uniformMatrix2fv = nullptr; + GLUniformMatrix3fv* uniformMatrix3fv = nullptr; + GLUniformMatrix4fv* uniformMatrix4fv = nullptr; + GLUseProgram* useProgram = nullptr; + GLVertexAttrib1f* vertexAttrib1f = nullptr; + GLVertexAttrib2fv* vertexAttrib2fv = nullptr; + GLVertexAttrib3fv* vertexAttrib3fv = nullptr; + GLVertexAttrib4fv* vertexAttrib4fv = nullptr; + GLVertexAttribPointer* vertexAttribPointer = nullptr; + GLViewport* viewport = nullptr; + GLWaitSync* waitSync = nullptr; +}; + +} // namespace tgfx \ No newline at end of file diff --git a/include/tgfx/opengl/GLTypes.h b/include/tgfx/opengl/GLTypes.h new file mode 100644 index 00000000..cd4434a2 --- /dev/null +++ b/include/tgfx/opengl/GLTypes.h @@ -0,0 +1,54 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +namespace tgfx { +/** + * Types for interacting with GL textures created externally to TGFX. + */ +struct GLTextureInfo { + /** + * the id of this texture. + */ + unsigned id = 0; + /** + * The target of this texture. + */ + unsigned target = 0x0DE1; // GL_TEXTURE_2D; + /** + * The pixel format of this texture. + */ + unsigned format = 0x8058; // GL_RGBA8; +}; + +/** + * Types for interacting with GL frame buffers created externally to tgfx. + */ +struct GLFrameBufferInfo { + /** + * The id of this frame buffer. + */ + unsigned id = 0; + + /** + * The pixel format of this frame buffer. + */ + unsigned format = 0x8058; // GL_RGBA8; +}; +} // namespace tgfx diff --git a/include/tgfx/opengl/cgl/CGLDevice.h b/include/tgfx/opengl/cgl/CGLDevice.h new file mode 100644 index 00000000..ab7d9ce6 --- /dev/null +++ b/include/tgfx/opengl/cgl/CGLDevice.h @@ -0,0 +1,57 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "tgfx/opengl/GLDevice.h" + +namespace tgfx { +class CGLDevice : public GLDevice { + public: + /** + * Creates an CGL device with adopted CGL context. + */ + static std::shared_ptr MakeAdopted(CGLContextObj cglContext); + + ~CGLDevice() override; + + bool sharableWith(void* nativeHandle) const override; + + CGLContextObj cglContext() const; + + protected: + bool onMakeCurrent() override; + void onClearCurrent() override; + + private: + NSOpenGLContext* glContext = nil; + CGLContextObj oldContext = nil; + CVOpenGLTextureCacheRef textureCache = nil; + + static std::shared_ptr Wrap(CGLContextObj cglContext, bool isAdopted = false); + + explicit CGLDevice(CGLContextObj cglContext); + + CVOpenGLTextureCacheRef getTextureCache(); + + friend class GLDevice; + friend class CGLWindow; + friend class Texture; +}; +} // namespace tgfx diff --git a/include/tgfx/opengl/cgl/CGLWindow.h b/include/tgfx/opengl/cgl/CGLWindow.h new file mode 100644 index 00000000..692869e8 --- /dev/null +++ b/include/tgfx/opengl/cgl/CGLWindow.h @@ -0,0 +1,43 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "CGLDevice.h" +#include "tgfx/gpu/Window.h" + +namespace tgfx { +class CGLWindow : public Window { + public: + /** + * Creates a new window from an NSView with specified shared context. + */ + static std::shared_ptr MakeFrom(NSView* view, CGLContextObj sharedContext = nullptr); + + ~CGLWindow() override; + + protected: + std::shared_ptr onCreateSurface(Context* context) override; + void onPresent(Context* context, int64_t presentationTime) override; + + private: + NSView* view = nil; + + CGLWindow(std::shared_ptr device, NSView* view); +}; +} // namespace tgfx diff --git a/include/tgfx/opengl/eagl/EAGLDevice.h b/include/tgfx/opengl/eagl/EAGLDevice.h new file mode 100644 index 00000000..b46c2707 --- /dev/null +++ b/include/tgfx/opengl/eagl/EAGLDevice.h @@ -0,0 +1,66 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#import +#include "tgfx/opengl/GLDevice.h" + +namespace tgfx { +class EAGLDevice : public GLDevice { + public: + /** + * Creates an EAGL device with adopted EAGL context. + */ + static std::shared_ptr MakeAdopted(EAGLContext* eaglContext); + + ~EAGLDevice() override; + + bool sharableWith(void* nativeHandle) const override; + + EAGLContext* eaglContext() const; + + CVOpenGLESTextureCacheRef getTextureCache(); + + void releaseTexture(CVOpenGLESTextureRef texture); + + protected: + bool onMakeCurrent() override; + void onClearCurrent() override; + + private: + EAGLContext* _eaglContext = nil; + EAGLContext* oldContext = nil; + CVOpenGLESTextureCacheRef textureCache = nil; + size_t cacheArrayIndex = 0; + + static std::shared_ptr Wrap(EAGLContext* eaglContext, bool isAdopted); + static void NotifyReferenceReachedZero(EAGLDevice* device); + + explicit EAGLDevice(EAGLContext* eaglContext); + bool makeCurrent(bool force = false); + void clearCurrent(); + void finish(); + + friend class GLDevice; + friend class EAGLWindow; + + friend void ApplicationWillResignActive(); + friend void ApplicationDidBecomeActive(); +}; +} // namespace tgfx diff --git a/include/tgfx/opengl/eagl/EAGLWindow.h b/include/tgfx/opengl/eagl/EAGLWindow.h new file mode 100644 index 00000000..c41ca1f6 --- /dev/null +++ b/include/tgfx/opengl/eagl/EAGLWindow.h @@ -0,0 +1,46 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "EAGLDevice.h" +#include "tgfx/gpu/Window.h" + +namespace tgfx { +class EAGLWindow : public Window { + public: + /** + * Creates a new window from a CAEAGLLayer with the specified device. + */ + static std::shared_ptr MakeFrom(CAEAGLLayer* layer, + std::shared_ptr device = nullptr); + + ~EAGLWindow() override; + + protected: + std::shared_ptr onCreateSurface(Context* context) override; + void onPresent(Context* context, int64_t presentationTime) override; + + private: + unsigned frameBufferID = 0; + GLuint colorBuffer = 0; + CAEAGLLayer* layer = nil; + + EAGLWindow(std::shared_ptr device, CAEAGLLayer* layer); +}; +} // namespace tgfx diff --git a/include/tgfx/opengl/egl/EGLDevice.h b/include/tgfx/opengl/egl/EGLDevice.h new file mode 100644 index 00000000..dfb7cacb --- /dev/null +++ b/include/tgfx/opengl/egl/EGLDevice.h @@ -0,0 +1,75 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include +#include "tgfx/opengl/GLDevice.h" + +#ifdef None +#undef None +#endif + +namespace tgfx { +class EGLDevice : public GLDevice { + public: + /** + * Creates an EGL device with adopted EGLDisplay, EGLSurface and EGLContext. + */ + static std::shared_ptr MakeAdopted(EGLDisplay eglDisplay, EGLSurface eglSurface, + EGLContext eglContext); + + ~EGLDevice() override; + + EGLDisplay getDisplay() const { + return eglDisplay; + } + + bool sharableWith(void* nativeHandle) const override; + + protected: + bool onMakeCurrent() override; + void onClearCurrent() override; + + private: + EGLDisplay eglDisplay = nullptr; + EGLSurface eglSurface = nullptr; + EGLContext eglContext = nullptr; + EGLContext shareContext = nullptr; + + EGLDisplay oldEglDisplay = nullptr; + EGLContext oldEglContext = nullptr; + EGLSurface oldEglReadSurface = nullptr; + EGLSurface oldEglDrawSurface = nullptr; + + static std::shared_ptr MakeFrom(EGLNativeWindowType nativeWindow, + EGLContext sharedContext = nullptr); + + static std::shared_ptr Wrap(EGLDisplay eglDisplay, EGLSurface eglSurface, + EGLContext eglContext, EGLContext shareContext, + bool isAdopted = false); + + explicit EGLDevice(void* nativeHandle); + void swapBuffers(int64_t timestamp = INT64_MIN); + + friend class GLDevice; + + friend class EGLWindow; +}; +} // namespace tgfx diff --git a/include/tgfx/opengl/egl/EGLGlobals.h b/include/tgfx/opengl/egl/EGLGlobals.h new file mode 100644 index 00000000..3da93442 --- /dev/null +++ b/include/tgfx/opengl/egl/EGLGlobals.h @@ -0,0 +1,38 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include +#include + +namespace tgfx { +class EGLGlobals { + public: + static void Set(const EGLGlobals* globals); + + static const EGLGlobals* Get(); + + EGLDisplay display = nullptr; + EGLConfig windowConfig = nullptr; + EGLConfig pbufferConfig = nullptr; + std::vector windowSurfaceAttributes; + std::vector pbufferSurfaceAttributes; +}; +} // namespace tgfx diff --git a/include/tgfx/opengl/egl/EGLWindow.h b/include/tgfx/opengl/egl/EGLWindow.h new file mode 100644 index 00000000..c2aac189 --- /dev/null +++ b/include/tgfx/opengl/egl/EGLWindow.h @@ -0,0 +1,48 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "EGLDevice.h" +#include "tgfx/gpu/Window.h" + +namespace tgfx { +class EGLWindow : public Window { + public: + /** + * Returns an EGLWindow associated with current EGLSurface. Returns nullptr if there is no current + * EGLSurface on the calling thread. + */ + static std::shared_ptr Current(); + + /** + * Creates a new window from an EGL native window with specified shared context. + */ + static std::shared_ptr MakeFrom(EGLNativeWindowType nativeWindow, + EGLContext sharedContext = nullptr); + + protected: + std::shared_ptr onCreateSurface(Context* context) override; + void onPresent(Context* context, int64_t presentationTime) override; + + private: + EGLNativeWindowType nativeWindow; + + explicit EGLWindow(std::shared_ptr device); +}; +} // namespace tgfx diff --git a/include/tgfx/opengl/qt/QGLDevice.h b/include/tgfx/opengl/qt/QGLDevice.h new file mode 100644 index 00000000..bd440a2a --- /dev/null +++ b/include/tgfx/opengl/qt/QGLDevice.h @@ -0,0 +1,93 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#ifdef __APPLE__ +#include +#endif +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-copy" +#include +#include +#include +#pragma clang diagnostic pop +#include "tgfx/opengl/GLDevice.h" + +namespace tgfx { +class QGLDevice : public GLDevice { + public: + /** + * Creates a offscreen QT device with specified format and shared context. If format is not + * specified, QSurfaceFormat::defaultFormat() will be used. + * Note: Due to the fact that QOffscreenSurface is backed by a QWindow on some platforms, + * cross-platform applications must ensure that this method is only called on the main (GUI) + * thread. The returned QGLDevice is then safe to be used on other threads after calling + * moveToThread(), but the initialization and destruction must always happen on the main (GUI) + * thread. + */ + static std::shared_ptr Make(QOpenGLContext* sharedContext = nullptr, + QSurfaceFormat* format = nullptr); + + /** + * Creates an QT device with adopted EGLDisplay, EGLSurface and EGLContext. + */ + static std::shared_ptr MakeAdopted(QOpenGLContext* context, QSurface* surface); + + ~QGLDevice() override; + + bool sharableWith(void* nativeHandle) const override; + + /** + * Returns the native OpenGL context. + */ + QOpenGLContext* glContext() const { + return qtContext; + } + + /** + * Changes the thread affinity for this object and its children. + */ + void moveToThread(QThread* targetThread); + + protected: + bool onMakeCurrent() override; + void onClearCurrent() override; + + private: + QThread* ownerThread = nullptr; + QOpenGLContext* qtContext = nullptr; + QSurface* qtSurface = nullptr; + QOpenGLContext* oldContext = nullptr; + QSurface* oldSurface = nullptr; + + static std::shared_ptr Wrap(QOpenGLContext* context, QSurface* surface, + bool isAdopted); + + explicit QGLDevice(void* nativeHandle); + +#ifdef __APPLE__ + CVOpenGLTextureCacheRef textureCache = nil; + CVOpenGLTextureCacheRef getTextureCache(); +#endif + + friend class GLDevice; + friend class QGLWindow; + friend class Texture; +}; +} // namespace tgfx diff --git a/include/tgfx/opengl/qt/QGLWindow.h b/include/tgfx/opengl/qt/QGLWindow.h new file mode 100644 index 00000000..6d515146 --- /dev/null +++ b/include/tgfx/opengl/qt/QGLWindow.h @@ -0,0 +1,73 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-copy" +#include +#include +#pragma clang diagnostic pop +#include "QGLDevice.h" +#include "tgfx/gpu/DoubleBufferedWindow.h" + +namespace tgfx { +class Texture; +class GLRenderTarget; + +class QGLWindow : public DoubleBufferedWindow { + public: + ~QGLWindow() override; + + /** + * Creates a new QGLWindow from specified QQuickItem and shared context. + * Note: Due to the fact that QOffscreenSurface is backed by a QWindow on some platforms, + * cross-platform applications must ensure that this method is only called on the main (GUI) + * thread. The returned QGLWindow is then safe to be used on other threads after calling + * moveToThread(), but the initialization and destruction must always happen on the main (GUI) + * thread. + */ + static std::shared_ptr MakeFrom(QQuickItem* quickItem, + QOpenGLContext* sharedContext = nullptr); + + /** + * Changes the thread affinity for this object and its children. + */ + void moveToThread(QThread* targetThread); + + /** + * Returns the current QSGTexture for displaying. + */ + QSGTexture* getTexture(); + + protected: + std::shared_ptr onCreateSurface(Context* context) override; + void onSwapSurfaces(Context*) override; + + private: + std::mutex locker = {}; + bool textureInvalid = true; + QQuickItem* quickItem = nullptr; + QSGTexture* outTexture = nullptr; + std::shared_ptr frontTexture = nullptr; + std::shared_ptr backTexture = nullptr; + + QGLWindow(std::shared_ptr device, QQuickItem* quickItem); + void invalidateTexture(); +}; +} // namespace tgfx diff --git a/include/tgfx/opengl/webgl/WebGLDevice.h b/include/tgfx/opengl/webgl/WebGLDevice.h new file mode 100644 index 00000000..abf9faf4 --- /dev/null +++ b/include/tgfx/opengl/webgl/WebGLDevice.h @@ -0,0 +1,52 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "tgfx/opengl/GLDevice.h" + +namespace tgfx { +class WebGLDevice : public GLDevice { + public: + /** + * Creates a new WebGLDevice from a canvas. + */ + static std::shared_ptr MakeFrom(const std::string& canvasID); + + ~WebGLDevice() override; + + bool sharableWith(void* nativeHandle) const override; + + protected: + bool onMakeCurrent() override; + void onClearCurrent() override; + + private: + EMSCRIPTEN_WEBGL_CONTEXT_HANDLE context = 0; + EMSCRIPTEN_WEBGL_CONTEXT_HANDLE oldContext = 0; + + static std::shared_ptr Wrap(EMSCRIPTEN_WEBGL_CONTEXT_HANDLE context, + bool isAdopted = false); + + explicit WebGLDevice(EMSCRIPTEN_WEBGL_CONTEXT_HANDLE nativeHandle); + + friend class GLDevice; + friend class WebGLWindow; +}; +} // namespace tgfx diff --git a/include/tgfx/opengl/webgl/WebGLWindow.h b/include/tgfx/opengl/webgl/WebGLWindow.h new file mode 100644 index 00000000..1acffb39 --- /dev/null +++ b/include/tgfx/opengl/webgl/WebGLWindow.h @@ -0,0 +1,43 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "WebGLDevice.h" +#include "tgfx/gpu/Window.h" + +namespace tgfx { +class WebGLWindow : public Window { + public: + /** + * Creates a new window from a canvas. + */ + static std::shared_ptr MakeFrom(const std::string& canvasID); + + protected: + std::shared_ptr onCreateSurface(Context* context) override; + + void onPresent(Context*, int64_t) override { + } + + private: + std::string canvasID; + + explicit WebGLWindow(std::shared_ptr device); +}; +} // namespace tgfx diff --git a/include/tgfx/platform/HardwareBuffer.h b/include/tgfx/platform/HardwareBuffer.h new file mode 100644 index 00000000..ec041191 --- /dev/null +++ b/include/tgfx/platform/HardwareBuffer.h @@ -0,0 +1,87 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/ImageInfo.h" + +#if defined(__ANDROID__) || defined(ANDROID) +struct AHardwareBuffer; +#elif defined(__APPLE__) +struct __CVBuffer; +#endif + +namespace tgfx { +#if defined(__ANDROID__) || defined(ANDROID) +typedef AHardwareBuffer* HardwareBufferRef; +#elif defined(__APPLE__) +// __CVBuffer == CVPixelBufferRef +typedef __CVBuffer* HardwareBufferRef; +#else +typedef void* HardwareBufferRef; +#endif + +/** + * Returns true if the current platform has hardware buffer support. Otherwise, returns false. + */ +bool HardwareBufferAvailable(); + +/** + * Returns true if the given hardware buffer object is valid and can be bind to a texture. + * Otherwise, returns false. + */ +bool HardwareBufferCheck(HardwareBufferRef buffer); + +/** + * Allocates a hardware buffer for a given size and pixel format (alphaOnly). Returns nullptr if + * allocation fails. The returned buffer has a reference count of 1, and the caller must call + * HardwareBufferRelease() when finished with it. + */ +HardwareBufferRef HardwareBufferAllocate(int width, int height, bool alphaOnly = false); + +/** + * Retains a reference on the given hardware buffer object. This prevents the object from being + * deleted until the last reference is removed. + */ +HardwareBufferRef HardwareBufferRetain(HardwareBufferRef buffer); + +/** + * Removes a reference that was previously acquired with HardwareBufferRetain(). + */ +void HardwareBufferRelease(HardwareBufferRef buffer); + +/** + * Locks and returns the base address of the hardware buffer. Returns nullptr if the lock fails for + * any reason and leave the buffer unchanged. The caller must call HardwareBufferUnlock() when + * finished with the buffer. + */ +void* HardwareBufferLock(HardwareBufferRef buffer); + +/** + * Unlocks the base address of the hardware buffer. Call this to balance a successful call to + * HardwareBufferLock(). + */ +void HardwareBufferUnlock(HardwareBufferRef buffer); + +/** + * Returns an ImageInfo describing the width, height, color type, alpha type, and row bytes of the + * given hardware buffer object. Returns an empty ImageInfo if the buffer is nullptr or not + * recognized. + */ +ImageInfo HardwareBufferGetInfo(HardwareBufferRef buffer); +} // namespace tgfx diff --git a/include/tgfx/platform/NativeImage.h b/include/tgfx/platform/NativeImage.h new file mode 100644 index 00000000..e8503f16 --- /dev/null +++ b/include/tgfx/platform/NativeImage.h @@ -0,0 +1,53 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#if defined(__EMSCRIPTEN__) +#include +#elif defined(__ANDROID__) || defined(ANDROID) + +class _jobject; + +#elif defined(__APPLE__) + +struct CGImage; + +#endif + +namespace tgfx { +#if defined(__EMSCRIPTEN__) + +typedef emscripten::val NativeImageRef; + +#elif defined(__ANDROID__) || defined(ANDROID) + +typedef _jobject* NativeImageRef; + +#elif defined(__APPLE__) + +typedef CGImage* NativeImageRef; + +#else + +struct NativeImage {}; + +typedef NativeImage* NativeImageRef; + +#endif +} // namespace tgfx \ No newline at end of file diff --git a/include/tgfx/platform/Print.h b/include/tgfx/platform/Print.h new file mode 100644 index 00000000..4b1a4e8f --- /dev/null +++ b/include/tgfx/platform/Print.h @@ -0,0 +1,31 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +namespace tgfx { +/** + * Writes an output message pointed by format to the log facility of native platform. + */ +void PrintLog(const char format[], ...); + +/** + * Writes an error message pointed by format to the log facility of native platform. + */ +void PrintError(const char format[], ...); +} // namespace tgfx diff --git a/include/tgfx/platform/android/AndroidBitmap.h b/include/tgfx/platform/android/AndroidBitmap.h new file mode 100644 index 00000000..f7f20994 --- /dev/null +++ b/include/tgfx/platform/android/AndroidBitmap.h @@ -0,0 +1,46 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "tgfx/platform/android/HardwareBufferJNI.h" + +namespace tgfx { +/** + * AndroidBitmap provides a utility to access an Android Bitmap object. + */ +class AndroidBitmap { + public: + /** + * Returns an ImageInfo describing the width, height, color type, alpha type, and row bytes of the + * specified Android Bitmap object. Returns an empty ImageInfo if the config of the Android Bitmap + * is either 'ARGB_4444' or 'HARDWARE'. + */ + static ImageInfo GetInfo(JNIEnv* env, jobject bitmap); + + /** + * Retrieve the hardware buffer object associated with a HARDWARE Bitmap. This method does not + * acquire any additional reference to the returned HardwareBufferRef. To keep the returned + * HardwareBufferRef live after the Java Bitmap object got garbage collected, make sure to use + * HardwareBufferRetain() to acquire an additional reference. Returns nullptr if the config of the + * Bitmap is not 'HARDWARE' or the API level of the current system is less than 30. + */ + static HardwareBufferRef GetHardwareBuffer(JNIEnv* env, jobject bitmap); +}; +} // namespace tgfx diff --git a/include/tgfx/platform/android/Global.h b/include/tgfx/platform/android/Global.h new file mode 100644 index 00000000..e417d456 --- /dev/null +++ b/include/tgfx/platform/android/Global.h @@ -0,0 +1,82 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "JNIEnvironment.h" + +namespace tgfx { +/** + * The Global class manages the lifetime of JNI objects with global references. + */ +template +class Global { + public: + Global() = default; + + Global(const Global& that) = delete; + + Global& operator=(const Global& that) = delete; + + Global& operator=(const T& that) { + reset(that); + return *this; + } + + Global(T ref) { + reset(ref); + } + + ~Global() { + reset(); + } + + void reset() { + reset(nullptr); + } + + void reset(T localRef) { + if (ref == localRef) { + return; + } + JNIEnvironment environment; + auto env = environment.current(); + if (env == nullptr) { + return; + } + if (ref != nullptr) { + env->DeleteGlobalRef(ref); + ref = nullptr; + } + if (localRef != nullptr) { + ref = static_cast(env->NewGlobalRef(localRef)); + } + } + + T get() const { + return ref; + } + + bool isEmpty() const { + return ref == nullptr; + } + + private: + T ref = nullptr; +}; +} // namespace tgfx diff --git a/include/tgfx/platform/android/HardwareBufferJNI.h b/include/tgfx/platform/android/HardwareBufferJNI.h new file mode 100644 index 00000000..dace899f --- /dev/null +++ b/include/tgfx/platform/android/HardwareBufferJNI.h @@ -0,0 +1,37 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "tgfx/platform/HardwareBuffer.h" + +namespace tgfx { +/** + * Return the HardwareBufferRef associated with a Java HardwareBuffer object, for interacting with + * it through native code. This method does not acquire any additional reference to the returned + * HardwareBufferRef. To keep the HardwareBufferRef live after the Java HardwareBuffer object got + * garbage collected, make sure to use HardwareBufferRetain() to acquire an additional reference. + */ +HardwareBufferRef HardwareBufferFromJavaObject(JNIEnv* env, jobject hardwareBufferObject); + +/** + * Return a new Java HardwareBuffer object that wraps the passed native HardwareBufferRef object. + */ +jobject HardwareBufferToJavaObject(JNIEnv* env, HardwareBufferRef hardwareBuffer); +} // namespace tgfx diff --git a/include/tgfx/platform/android/JNIEnvironment.h b/include/tgfx/platform/android/JNIEnvironment.h new file mode 100644 index 00000000..d63bf89a --- /dev/null +++ b/include/tgfx/platform/android/JNIEnvironment.h @@ -0,0 +1,65 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include + +namespace tgfx { +/** + * JNIEnvironment is a stack-allocated class that provides convenience method to access the JNIEnv + * pointer of the calling thread. All the JNI local references created with the JNIEnv will be freed + * automatically when the JNIEnvironment instance goes out of scope. + */ +class JNIEnvironment { + public: + /* + * Sets the global Java virtual machine used by the tgfx runtime to retrieve the JNI environment. + * The application should call SetJavaVM() in the JNI_OnLoad() function to initialize the Java VM + * and set it to nullptr in the JNI_OnUnload() function. Once a Java VM is set, it cannot be + * changed to another non-nullptr value afterward. Returns true on success, false otherwise. + */ + static bool SetJavaVM(JavaVM* javaVM); + + /** + * Allocates a JNIEnvironment instance. If the JNIEnv pointer of the calling thread does not + * exist, permanently attach a JNIEnv to the current thread and retrieve it. If successfully + * attached, the JNIEnv will automatically be detached at thread destruction. + */ + JNIEnvironment(); + + ~JNIEnvironment(); + + /** + * Returns the JNIEnv pointer of the calling thread. + */ + JNIEnv* current() const { + return env; + } + + void* operator new(size_t) = delete; + + private: + JNIEnv* env = nullptr; + + static JNIEnv* Current(); + + template + friend class Global; +}; +} // namespace tgfx diff --git a/include/tgfx/platform/android/SurfaceTextureReader.h b/include/tgfx/platform/android/SurfaceTextureReader.h new file mode 100644 index 00000000..1797fcb2 --- /dev/null +++ b/include/tgfx/platform/android/SurfaceTextureReader.h @@ -0,0 +1,66 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "tgfx/core/ImageReader.h" + +namespace tgfx { +/** + * The SurfaceTextureReader class allows direct access to image data rendered into a SurfaceTexture + * object on the android platform. SurfaceTextureReader is safe across threads. + */ +class SurfaceTextureReader : public ImageReader { + public: + /** + * Creates a new SurfaceTextureReader with the specified image size and listener. Returns nullptr + * if the image size is zero or the listener is null. + * @param width The width of generated ImageBuffers. + * @param height The height of generated ImageBuffers. + * @param listener A java object that implements the SurfaceTexture.OnFrameAvailableListener + * interface. The implementation should make a native call to the notifyFrameAvailable() of the + * returned SurfaceTextureReader. + */ + static std::shared_ptr Make(int width, int height, jobject listener); + + /** + * Returns the Surface object used as the input to the SurfaceTextureReader. The release() method + * of the returned Surface will be called when the SurfaceTextureReader is released. + */ + jobject getInputSurface() const; + + /** + * Notifies the previously returned ImageBuffer is available for generating textures. The method + * should be called by the listener passed in when creating the reader. + */ + void notifyFrameAvailable(); + + /** + * Acquires the next ImageBuffer from the SurfaceTextureReader after a new image frame has been + * rendered into the associated input Surface instance. The returned ImageBuffer will be blocked + * when generating textures until the SurfaceTextureReader.notifyFrameAvailable() method is + * called. Note that the previously returned image buffers will immediately expire after the newly + * created ImageBuffer is drawn. + */ + std::shared_ptr acquireNextBuffer() override; + + private: + explicit SurfaceTextureReader(std::shared_ptr stream); +}; +} // namespace tgfx diff --git a/include/tgfx/platform/apple/CTTypeface.h b/include/tgfx/platform/apple/CTTypeface.h new file mode 100644 index 00000000..423b35b7 --- /dev/null +++ b/include/tgfx/platform/apple/CTTypeface.h @@ -0,0 +1,43 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "tgfx/core/Typeface.h" + +namespace tgfx { + +/** + * NativeTypeface provides convenience functions to create typefaces from platform-specific + * CTFontRef handles and access the CTFontRef handle in a Typeface instance. + */ +class CTTypeface { + public: + /** + * Creates a typeface for the specified CTFontRef. Returns nullptr if the ctFont is nil. + */ + static std::shared_ptr MakeFromCTFont(CTFontRef ctFont); + + /** + * Returns the platform-specific CTFontRef handle for a given Typeface. Note that the returned + * CTFontRef gets released when the source Typeface is destroyed. + */ + static CTFontRef GetCTFont(const Typeface* typeface); +}; +} // namespace tgfx diff --git a/include/tgfx/platform/web/VideoElementReader.h b/include/tgfx/platform/web/VideoElementReader.h new file mode 100644 index 00000000..497ce391 --- /dev/null +++ b/include/tgfx/platform/web/VideoElementReader.h @@ -0,0 +1,55 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "tgfx/core/ImageReader.h" + +namespace tgfx { +/** + * The VideoElementReader class allows direct access to image buffers rendered into a + * HTMLVideoElement on the web platform. + */ +class VideoElementReader : public ImageReader { + public: + /** + * Creates a new VideoElementReader from the specified HTMLVideoElement object and the video size. + * Returns nullptr if the video is null or the buffer size is zero. + */ + static std::shared_ptr MakeFrom(emscripten::val video, int width, int height); + + /** + * Acquires the next ImageBuffer from the VideoElementReader after a new image frame has been + * rendered into the associated HTMLVideoElement. Note that the previously returned image + * buffers will immediately expire after the newly created ImageBuffer is drawn. + */ + std::shared_ptr acquireNextBuffer() override; + + /** + * Acquires the next ImageBuffer from the VideoElementReader after a new image frame is about to + * be rendered into the associated HTMLVideoElement. The returned ImageBuffer will call the + * promise.await() method before generating textures. Note that the previously returned image + * buffers will immediately expire after the newly created ImageBuffer is drawn. + */ + std::shared_ptr acquireNextBuffer(emscripten::val promise); + + private: + explicit VideoElementReader(std::shared_ptr stream); +}; +} // namespace tgfx diff --git a/include/tgfx/platform/web/WebCodec.h b/include/tgfx/platform/web/WebCodec.h new file mode 100644 index 00000000..4b36340b --- /dev/null +++ b/include/tgfx/platform/web/WebCodec.h @@ -0,0 +1,44 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +namespace tgfx { +/** + * WebImageCodec provides convenience functions to enable/disable the async support for decoding web + * images. + */ +class WebCodec { + public: + /** + * Returns true if the async support for decoding web images is enabled. The default value is + * false. + */ + static bool AsyncSupport(); + + /** + * Enables or disables the async support for decoding web images. If set to true, the ImageBuffers + * generated from the web platform will not be fully decoded buffers. Instead, they will trigger + * promise-awaiting calls before generating textures, which can speed up the process of decoding + * multiple images simultaneously. Do not set it to true if your rendering process may require + * multiple flush() calls to the screen Surface during one single frame. Otherwise, it may result + * in screen tearing. + */ + static void SetAsyncSupport(bool enabled); +}; +} // namespace tgfx diff --git a/include/tgfx/utils/Buffer.h b/include/tgfx/utils/Buffer.h new file mode 100644 index 00000000..0af52a28 --- /dev/null +++ b/include/tgfx/utils/Buffer.h @@ -0,0 +1,144 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "tgfx/core/Data.h" + +namespace tgfx { +/** + * An object representing a mutable byte buffer in memory. + */ +class Buffer { + public: + /** + * Creates an empty Buffer. + */ + Buffer() = default; + + /** + * Creates a new Buffer with the specified length. The contents are uninitialized. + */ + explicit Buffer(size_t size); + + /** + * Creates a new Buffer by copying the specified data. + */ + Buffer(const void* data, size_t size); + + /** + * Creates a new Buffer by copying the specified Data object. + */ + explicit Buffer(std::shared_ptr data); + + ~Buffer(); + + /** + * Allocates the buffer memory with the specified size. Returns true if the memory is allocated + * successfully. + */ + bool alloc(size_t size); + + /** + * Returns the memory address of the data. + */ + void* data() const { + return _data; + } + + /** + * Returns the memory address of the data, but in this case it is cast to uint8_t*, to make it + * easy to add an offset to it. + */ + uint8_t* bytes() const { + return _data; + } + + /** + * Returns the byte size. + */ + size_t size() const { + return _size; + } + + /** + * Returns true if the Buffer is empty. + */ + bool isEmpty() const { + return _size == 0; + } + + /** + * Sets the Buffer to empty, and deletes the internal byte data. + */ + void reset(); + + /** + * Sets all bytes of the buffer to zero. + */ + void clear(); + + /** + * Copies a range of the buffer bytes into a Data object. If offset or length is out of range, + * they are clamped to the beginning and end of the buffer. Returns nullptr if the clamped length + * is 0. + */ + std::shared_ptr copyRange(size_t offset, size_t length); + + /** + * Writes a byte array into the specified range of the buffer. If offset or length is out of + * range, they are clamped to the beginning and end of the buffer. The buffer will be unchanged if + * the clamped length is 0. + */ + void writeRange(size_t offset, size_t length, const void* bytes); + + /** + * Writes a Data object into the specified offset of the buffer. If offset or data size is out of + * range, they are clamped to the beginning and end of the buffer. The buffer will be unchanged if + * the clamped data size is 0. + */ + void writeRange(size_t offset, std::shared_ptr data) { + writeRange(offset, data->size(), data->data()); + } + + /** + * Transfers the ownership of the managed bytes to the returned Data object, and set this Buffer + * to empty. Returns nullptr if the Buffer is already empty. + */ + std::shared_ptr release(); + + /** + * Returns one byte. + */ + uint8_t operator[](size_t index) const; + + /** + * Returns the reference of one byte. + */ + uint8_t& operator[](size_t index); + + private: + uint8_t* _data = nullptr; + size_t _size = 0; + + Buffer(Buffer&) = delete; + Buffer& operator=(Buffer&) = delete; + size_t getClampedLength(size_t offset, size_t length) const; +}; +} // namespace tgfx diff --git a/include/tgfx/utils/BytesKey.h b/include/tgfx/utils/BytesKey.h new file mode 100644 index 00000000..c253dd47 --- /dev/null +++ b/include/tgfx/utils/BytesKey.h @@ -0,0 +1,82 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include +#include +#include + +namespace tgfx { +/** + * A key used for hashing a byte stream. + */ +class BytesKey { + public: + /** + * Returns true if this key is valid. + */ + bool isValid() const { + return !values.empty(); + } + + /** + * Writes an uint32 value into the key. + */ + void write(uint32_t value); + + /** + * Writes a pointer value into the key. + */ + void write(const void* value); + + /** + * Writes a uint32 value into the key. + */ + void write(const uint8_t value[4]); + + /** + * Writes a float value into the key. + */ + void write(float value); + + friend bool operator==(const BytesKey& a, const BytesKey& b) { + return a.values == b.values; + } + + bool operator<(const BytesKey& key) const { + return values < key.values; + } + + private: + std::vector values = {}; + + friend struct BytesKeyHasher; +}; + +/** + * The hasher for BytesKey. + */ +struct BytesKeyHasher { + size_t operator()(const BytesKey& key) const; +}; + +template +using BytesKeyMap = std::unordered_map; +} // namespace tgfx diff --git a/include/tgfx/utils/Clock.h b/include/tgfx/utils/Clock.h new file mode 100644 index 00000000..9cb20f8e --- /dev/null +++ b/include/tgfx/utils/Clock.h @@ -0,0 +1,72 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include +#include + +namespace tgfx { +/** + * A utility class used to compute relative time. + */ +class Clock { + public: + /** + * Returns the number of microseconds since the TGFX runtime was initialized. + */ + static int64_t Now(); + + /** + * Creates a new Clock object. + */ + Clock(); + + /** + * Resets the start time of the Clock to Now() and clear all markers. + */ + void reset(); + + /** + * Returns the number of microseconds elapsed since the Clock was initialized or reset. + */ + int64_t elapsedTime(); + + /** + * Creates a timestamp in the Clock with the given name. If the specified name already exists, + * throws an error and leaves the markers unchanged. + */ + void mark(const std::string& name); + + /** + * Returns the number of microseconds from the 'makerFrom' to the 'makerTo'. + * - If the 'makerTo' is empty, returns the number of microseconds from the 'makerFrom' to now. + * - If the 'makerFrom' is empty, returns the number of microseconds from the time when the Clock + * was initialized to the 'makerTo'. + * - If both of the markers are empty, returns the number of microseconds since the Clock was + * initialized. + * - If either of the markers does not exist, throws an error and returns 0. + */ + int64_t measure(const std::string& makerFrom = "", const std::string& markerTo = "") const; + + private: + int64_t startTime = 0; + std::unordered_map markers = {}; +}; +} // namespace tgfx diff --git a/include/tgfx/utils/DataView.h b/include/tgfx/utils/DataView.h new file mode 100644 index 00000000..e304a0a0 --- /dev/null +++ b/include/tgfx/utils/DataView.h @@ -0,0 +1,259 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include + +namespace tgfx { +/** + * ByteOrder describe how computers organize the bytes that make up numbers. + */ +enum class ByteOrder { + /** + * Stores bytes in order of least-to-most-significant, where the least significant byte takes the + * first or lowest address. + */ + LittleEndian, + + /** + * The opposite order to 'LittleEndian'. + */ + BigEndian +}; + +/** + * The DataView view provides a low-level interface for reading and writing multiple number types in + * a byte buffer, without having to care about the platform's endianness. DataView does not try to + * manage the lifetime of the byte buffer. The caller must ensure that the byte buffer is valid for + * the lifetime of the DataView. + */ +class DataView { + public: + /** + * Creates an empty DataView without a byte buffer. + */ + DataView() = default; + + /** + * Creates a new DataView with a read-only byte buffer. + */ + DataView(const uint8_t* buffer, size_t length, ByteOrder byteOrder = ByteOrder::LittleEndian); + + /** + * Creates a new DataView with a writable byte buffer. + */ + DataView(uint8_t* buffer, size_t length, ByteOrder byteOrder = ByteOrder::LittleEndian); + + /** + * Sets the DataView to empty, bytes to nullptr, and leave the byte order unchanged. + */ + void reset(); + + /** + * Sets the DataView to a read-only byte buffer, and leave the byte order unchanged. + */ + void reset(const uint8_t* bytes, size_t length); + + /** + * Set the DataView to a writable byte buffer, and leave the byte order unchanged. + */ + void reset(uint8_t* bytes, size_t length); + + /** + * Returns true if the Buffer is empty. + */ + bool isEmpty() const { + return _size == 0; + } + + /** + * Returns the pixel address, the base address corresponding to the pixel origin. + */ + const uint8_t* bytes() const { + return _bytes; + } + + /** + * Returns the writable pixel address, the base address corresponding to the pixel origin. Returns + * nullptr if the Pixmap is constructed from read-only pixels. + */ + uint8_t* writableBytes() const { + return _writableBytes; + } + + /** + * Returns the byte size. + */ + size_t size() const { + return _size; + } + + /** + * Returns the DataView's byte order. The byte order is used when reading or writing multibyte + * values. The default value is LittleEndian. + */ + ByteOrder byteOrder() const { + return _byteOrder; + } + + /** + * Sets the DataView's byte order. + */ + void setByteOrder(ByteOrder order) { + _byteOrder = order; + } + + /** + * Returns a boolean value from the bytes at the specified byte offset. A signed 8-bit integer is + * read, and true is returned if the integer is nonzero, false otherwise. + */ + bool getBoolean(size_t offset) const; + + /** + * Returns a signed 8-bit integer from the bytes at the specified byte offset. + */ + int8_t getInt8(size_t offset) const; + + /** + * Returns an unsigned 8-bit integer from the bytes at the specified byte offset. + */ + uint8_t getUint8(size_t offset) const; + + /** + * Returns a signed 16-bit integer from the bytes at the specified byte offset. + */ + int16_t getInt16(size_t offset) const; + + /** + * Returns an unsigned 16-bit integer from the bytes at the specified byte offset. + */ + uint16_t getUint16(size_t offset) const; + + /** + * Returns a signed 32-bit integer from the bytes at the specified byte offset. + */ + int32_t getInt32(size_t offset) const; + + /** + * Returns an unsigned 32-bit integer from the bytes at the specified byte offset. + */ + uint32_t getUint32(size_t offset) const; + + /** + * Returns a signed 64-bit integer from the bytes at the specified byte offset. + */ + int64_t getInt64(size_t offset) const; + + /** + * Returns an unsigned 64-bit integer from the bytes at the specified byte offset. + */ + uint64_t getUint64(size_t offset) const; + + /** + * Returns an IEEE 754 single-precision (32-bit) floating-point number from the bytes at the + * specified byte offset. + */ + float getFloat(size_t offset) const; + + /** + * Returns an IEEE 754 double-precision (64-bit) floating-point number from the bytes at the + * specified byte offset. + */ + double getDouble(size_t offset) const; + + /** + * Sets a boolean value to the bytes at the specified byte offset. A signed 8-bit integer is + * written according to the value parameter, either 1 if true or 0 if false. Returns false if the + * offset is out of range or the DataView is constructed from read-only bytes. + */ + bool setBoolean(size_t offset, bool value); + + /** + * Sets an 8-bit signed integer to the bytes at the specified byte offset. Returns false if the + * offset is out of range or the DataView is constructed from read-only bytes. + */ + bool setInt8(size_t offset, int8_t value); + + /** + * Sets an unsigned 8-bit integer to the bytes at the specified byte offset. Returns false if + * the offset is out of range or the DataView is constructed from read-only bytes. + */ + bool setUint8(size_t offset, uint8_t value); + + /** + * Sets a 16-bit signed integer to the bytes at the specified byte offset. Returns false if the + * offset is out of range or the DataView is constructed from read-only bytes. + */ + bool setInt16(size_t offset, int16_t value); + + /** + * Sets an unsigned 16-bit integer to the bytes at the specified byte offset. Returns false if + * the offset is out of range or the DataView is constructed from read-only bytes. + */ + bool setUint16(size_t offset, uint16_t value); + + /** + * Sets a 32-bit signed integer to the bytes at the specified byte offset. Returns false if the + * offset is out of range or the DataView is constructed from read-only bytes. + */ + bool setInt32(size_t offset, int32_t value); + + /** + * Sets an unsigned 32-bit integer to the bytes at the specified byte offset. Returns false if + * the offset is out of range or the DataView is constructed from read-only bytes. + */ + bool setUint32(size_t offset, uint32_t value); + + /** + * Sets a 64-bit signed integer to the bytes at the specified byte offset. Returns false if the + * offset is out of range or the DataView is constructed from read-only bytes. + */ + bool setInt64(size_t offset, int64_t value); + + /** + * Sets an unsigned 64-bit integer to the bytes at the specified byte offset. Returns false if + * the offset is out of range or the DataView is constructed from read-only bytes. + */ + bool setUint64(size_t offset, uint64_t value); + + /** + * Sets an IEEE 754 single-precision (32-bit) floating-point number to the bytes at the + * specified byte offset. Returns false if the offset is out of range or the DataView is + * constructed from read-only bytes. + */ + bool setFloat(size_t offset, float value); + + /** + * Sets an IEEE 754 double-precision (64-bit) floating-point number to the bytes at the specified + * byte offset. Returns false if the offset is out of range or the DataView is constructed from + * read-only bytes. + */ + bool setDouble(size_t offset, double value); + + private: + const uint8_t* _bytes = nullptr; + uint8_t* _writableBytes = nullptr; + size_t _size = 0; + ByteOrder _byteOrder = ByteOrder::LittleEndian; + + bool readData(size_t offset, uint8_t* data, size_t byteSize) const; + bool writeData(size_t offset, const uint8_t* data, size_t byteSize); +}; +} // namespace tgfx diff --git a/include/tgfx/utils/Stream.h b/include/tgfx/utils/Stream.h new file mode 100644 index 00000000..35d2a168 --- /dev/null +++ b/include/tgfx/utils/Stream.h @@ -0,0 +1,68 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include + +namespace tgfx { +/** + * Stream represents a source of bytes. Subclasses can be backed by memory, or a file, or something + * else. Stream is not thread safe. + */ +class Stream { + public: + virtual ~Stream() = default; + + /** + * Attempts to open the specified file as a stream, returns nullptr on failure. + */ + static std::unique_ptr MakeFromFile(const std::string& filePath); + + /** + * Returns the total length of the stream. If this cannot be done, returns 0. + */ + virtual size_t size() const = 0; + + /** + * Seeks to an absolute position in the stream. If this cannot be done, returns false. If an + * attempt is made to seek past the end of the stream, the position will be set to the end of the + * stream. + */ + virtual bool seek(size_t position) = 0; + + /** + * Seeks to an relative offset in the stream. If this cannot be done, returns false. If an attempt + * is made to move to a position outside the stream, the position will be set to the closest point + * within the stream (beginning or end). + */ + virtual bool move(int offset) = 0; + + /** + * Reads size number of bytes. Copy size bytes into buffer, return how many were copied. + */ + virtual size_t read(void* buffer, size_t size) = 0; + + /** + * Rewinds to the beginning of the stream. Returns true if the stream is known to be at the + * beginning after this call returns. + */ + virtual bool rewind() = 0; +}; +} // namespace tgfx diff --git a/include/tgfx/utils/Task.h b/include/tgfx/utils/Task.h new file mode 100644 index 00000000..be84f6ec --- /dev/null +++ b/include/tgfx/utils/Task.h @@ -0,0 +1,83 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include +#include +#include + +namespace tgfx { +class TaskGroup; + +/** + * The Task class manages the concurrent execution of one or more code blocks. + */ +class Task { + public: + /** + * Submits a code block for asynchronous execution immediately and returns a Task wraps the code + * block. Hold a reference to the returned Task if you want to cancel it or wait for it to finish + * execution. Returns nullptr if the block is nullptr. + */ + static std::shared_ptr Run(std::function block); + + /** + * Returns true if the Task is currently executing its code block. + */ + bool executing(); + + /** + * Returns true if the Task has been cancelled + */ + bool cancelled(); + + /** + * Returns true if the Task has finished executing its code block. + */ + bool finished(); + + /** + * Advises the Task that it should stop executing its code block. Cancellation does not affect the + * execution of a Task that has already begun. + */ + void cancel(); + + /** + * Blocks the current thread until the Task finishes its execution. Returns immediately if the + * Task is finished or canceled. The task may be executed on the calling thread if it is not + * cancelled and still in the queue. + */ + void wait(); + + private: + std::mutex locker = {}; + std::condition_variable condition = {}; + bool _executing = true; + bool _cancelled = false; + std::function block = nullptr; + + explicit Task(std::function block); + bool removeTask(); + void execute(); + + friend class TaskGroup; +}; + +} // namespace tgfx diff --git a/include/tgfx/utils/UTF.h b/include/tgfx/utils/UTF.h new file mode 100644 index 00000000..faca171a --- /dev/null +++ b/include/tgfx/utils/UTF.h @@ -0,0 +1,41 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include + +namespace tgfx { +class UTF { + public: + /** + * Given a sequence of UTF-8 bytes, return the number of unicode codepoints. If the sequence is + * invalid UTF-8, return -1. + */ + static int CountUTF8(const char* utf8, size_t byteLength); + + /** + * Given a sequence of UTF-8 bytes, return the first unicode codepoint. The pointer will be + * incremented to point at the next codepoint's start. If invalid UTF-8 is encountered, set *ptr + * to end and return -1. + */ + static int32_t NextUTF8(const char** ptr, const char* end); +}; + +} // namespace tgfx diff --git a/resources/readme/TGFX.jpg b/resources/readme/TGFX.jpg new file mode 100644 index 00000000..527243fe --- /dev/null +++ b/resources/readme/TGFX.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b2c8ca890cf5dc247e4daaa4b14872f124812cca7ca4032dd03b99cf7294815c +size 60149 diff --git a/src/codecs/jpeg/JpegCodec.cpp b/src/codecs/jpeg/JpegCodec.cpp new file mode 100644 index 00000000..1dd3c6ff --- /dev/null +++ b/src/codecs/jpeg/JpegCodec.cpp @@ -0,0 +1,249 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "codecs/jpeg/JpegCodec.h" +#include +#include "tgfx/core/Pixmap.h" +#include "tgfx/utils/Buffer.h" +#include "utils/OrientationHelper.h" + +extern "C" { +#include "jerror.h" +#include "jpeglib.h" +} + +namespace tgfx { + +bool JpegCodec::IsJpeg(const std::shared_ptr& data) { + constexpr uint8_t jpegSig[] = {0xFF, 0xD8, 0xFF}; + return data->size() >= 3 && !memcmp(data->bytes(), jpegSig, sizeof(jpegSig)); +} + +const uint32_t kExifHeaderSize = 14; +const uint32_t kExifMarker = JPEG_APP0 + 1; + +static bool is_orientation_marker(jpeg_marker_struct* marker, EncodedOrigin* encodedOrigin) { + if (kExifMarker != marker->marker || marker->data_length < kExifHeaderSize) { + return false; + } + + constexpr uint8_t kExifSig[]{'E', 'x', 'i', 'f', '\0'}; + if (memcmp(marker->data, kExifSig, sizeof(kExifSig)) != 0) { + return false; + } + + // Account for 'E', 'x', 'i', 'f', '\0', ''. + constexpr size_t kOffset = 6; + return is_orientation_marker(marker->data + kOffset, marker->data_length - kOffset, + encodedOrigin); +} + +static EncodedOrigin get_exif_orientation(jpeg_decompress_struct* dinfo) { + EncodedOrigin origin; + for (jpeg_marker_struct* marker = dinfo->marker_list; marker; marker = marker->next) { + if (is_orientation_marker(marker, &origin)) { + return origin; + } + } + return EncodedOrigin::TopLeft; +} + +struct my_error_mgr { + struct jpeg_error_mgr pub; + jmp_buf setjmp_buffer; +}; + +std::shared_ptr JpegCodec::MakeFrom(const std::string& filePath) { + return MakeFromData(filePath, nullptr); +} + +std::shared_ptr JpegCodec::MakeFrom(std::shared_ptr imageBytes) { + return MakeFromData("", std::move(imageBytes)); +} + +std::shared_ptr JpegCodec::MakeFromData(const std::string& filePath, + std::shared_ptr byteData) { + FILE* infile = nullptr; + if (byteData == nullptr && (infile = fopen(filePath.c_str(), "rb")) == nullptr) { + return nullptr; + } + jpeg_decompress_struct cinfo = {}; + my_error_mgr jerr = {}; + cinfo.err = jpeg_std_error(&jerr.pub); + EncodedOrigin encodedOrigin = EncodedOrigin::TopLeft; + do { + if (setjmp(jerr.setjmp_buffer)) break; + jpeg_create_decompress(&cinfo); + if (infile) { + jpeg_stdio_src(&cinfo, infile); + } else { + jpeg_mem_src(&cinfo, byteData->bytes(), byteData->size()); + } + jpeg_save_markers(&cinfo, kExifMarker, 0xFFFF); + if (jpeg_read_header(&cinfo, TRUE) != JPEG_HEADER_OK) break; + encodedOrigin = get_exif_orientation(&cinfo); + } while (false); + jpeg_destroy_decompress(&cinfo); + if (infile) fclose(infile); + if (cinfo.image_width == 0 || cinfo.image_height == 0) { + return nullptr; + } + return std::shared_ptr(new JpegCodec(static_cast(cinfo.image_width), + static_cast(cinfo.image_height), + encodedOrigin, filePath, std::move(byteData))); +} + +bool JpegCodec::readPixels(const ImageInfo& dstInfo, void* dstPixels) const { + if (dstPixels == nullptr || dstInfo.isEmpty()) { + return false; + } + if (dstInfo.colorType() == ColorType::ALPHA_8) { + memset(dstPixels, 255, dstInfo.rowBytes() * height()); + return true; + } + Bitmap bitmap = {}; + auto outPixels = dstPixels; + auto outRowBytes = dstInfo.rowBytes(); + J_COLOR_SPACE out_color_space; + switch (dstInfo.colorType()) { + case ColorType::RGBA_8888: + out_color_space = JCS_EXT_RGBA; + break; + case ColorType::BGRA_8888: + out_color_space = JCS_EXT_BGRA; + break; + case ColorType::Gray_8: + out_color_space = JCS_GRAYSCALE; + break; + case ColorType::RGB_565: + out_color_space = JCS_RGB565; + break; + default: + auto success = bitmap.allocPixels(dstInfo.width(), dstInfo.height(), false, false); + if (!success) { + return false; + } + out_color_space = JCS_EXT_RGBA; + break; + } + Pixmap pixmap(bitmap); + if (!pixmap.isEmpty()) { + outPixels = pixmap.writablePixels(); + outRowBytes = pixmap.rowBytes(); + } + FILE* infile = nullptr; + if (fileData == nullptr && (infile = fopen(filePath.c_str(), "rb")) == nullptr) { + return false; + } + jpeg_decompress_struct cinfo = {}; + my_error_mgr jerr = {}; + cinfo.err = jpeg_std_error(&jerr.pub); + bool result = false; + do { + if (setjmp(jerr.setjmp_buffer)) break; + jpeg_create_decompress(&cinfo); + if (infile) { + jpeg_stdio_src(&cinfo, infile); + } else { + jpeg_mem_src(&cinfo, fileData->bytes(), fileData->size()); + } + if (jpeg_read_header(&cinfo, TRUE) != JPEG_HEADER_OK) { + break; + } + cinfo.out_color_space = out_color_space; + if (!jpeg_start_decompress(&cinfo)) { + break; + } + JSAMPROW pRow[1]; + int line = 0; + JDIMENSION h = height(); + while (cinfo.output_scanline < h) { + pRow[0] = (JSAMPROW)(static_cast(outPixels) + outRowBytes * line); + jpeg_read_scanlines(&cinfo, pRow, 1); + line++; + } + result = jpeg_finish_decompress(&cinfo); + } while (false); + jpeg_destroy_decompress(&cinfo); + if (infile) { + fclose(infile); + } + if (result && !pixmap.isEmpty()) { + pixmap.readPixels(dstInfo, dstPixels); + } + return result; +} + +#ifdef TGFX_USE_JPEG_ENCODE +std::shared_ptr JpegCodec::Encode(const Pixmap& pixmap, int quality) { + auto srcPixels = static_cast(const_cast(pixmap.pixels())); + int srcRowBytes = static_cast(pixmap.rowBytes()); + jpeg_compress_struct cinfo = {}; + jpeg_error_mgr jerr = {}; + JSAMPROW row_pointer[1]; + cinfo.err = jpeg_std_error(&jerr); + jpeg_create_compress(&cinfo); + uint8_t* dstBuffer = nullptr; + unsigned long dstBufferSize = 0; // NOLINT + jpeg_mem_dest(&cinfo, &dstBuffer, &dstBufferSize); + cinfo.image_width = pixmap.width(); + cinfo.image_height = pixmap.height(); + Buffer buffer = {}; + switch (pixmap.colorType()) { + case ColorType::RGBA_8888: + cinfo.in_color_space = JCS_EXT_RGBA; + cinfo.input_components = 4; + break; + case ColorType::BGRA_8888: + cinfo.in_color_space = JCS_EXT_BGRA; + cinfo.input_components = 4; + break; + case ColorType::Gray_8: + cinfo.in_color_space = JCS_GRAYSCALE; + cinfo.input_components = 1; + break; + default: + auto info = ImageInfo::Make(pixmap.width(), pixmap.height(), ColorType::RGBA_8888); + buffer.alloc(info.byteSize()); + if (buffer.isEmpty()) { + return nullptr; + } + srcPixels = buffer.bytes(); + srcRowBytes = static_cast(info.rowBytes()); + Pixmap(info, srcPixels).writePixels(pixmap.info(), pixmap.pixels()); + cinfo.in_color_space = JCS_EXT_RGBA; + cinfo.input_components = 4; + break; + } + jpeg_set_defaults(&cinfo); + cinfo.optimize_coding = TRUE; + jpeg_set_quality(&cinfo, quality, TRUE); + jpeg_start_compress(&cinfo, TRUE); + while (cinfo.next_scanline < cinfo.image_height) { + row_pointer[0] = &(srcPixels)[cinfo.next_scanline * srcRowBytes]; + (void)jpeg_write_scanlines(&cinfo, row_pointer, 1); + } + + /* similar to read file, clean up after we're done compressing */ + jpeg_finish_compress(&cinfo); + jpeg_destroy_compress(&cinfo); + return Data::MakeAdopted(dstBuffer, dstBufferSize, Data::FreeProc); +} +#endif + +} // namespace tgfx \ No newline at end of file diff --git a/src/codecs/jpeg/JpegCodec.h b/src/codecs/jpeg/JpegCodec.h new file mode 100644 index 00000000..0950501f --- /dev/null +++ b/src/codecs/jpeg/JpegCodec.h @@ -0,0 +1,50 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/ImageCodec.h" + +namespace tgfx { +class JpegCodec : public ImageCodec { + public: + static std::shared_ptr MakeFrom(const std::string& filePath); + static std::shared_ptr MakeFrom(std::shared_ptr imageBytes); + static bool IsJpeg(const std::shared_ptr& data); + +#ifdef TGFX_USE_JPEG_ENCODE + static std::shared_ptr Encode(const Pixmap& pixmap, int quality); +#endif + + protected: + bool readPixels(const ImageInfo& dstInfo, void* dstPixels) const override; + + private: + std::shared_ptr fileData; + const std::string filePath; + + static std::shared_ptr MakeFromData(const std::string& filePath, + std::shared_ptr byteData); + explicit JpegCodec(int width, int height, EncodedOrigin origin, std::string filePath, + std::shared_ptr fileData) + : ImageCodec(width, height, origin), fileData(std::move(fileData)), + filePath(std::move(filePath)) { + } +}; + +} // namespace tgfx \ No newline at end of file diff --git a/src/codecs/png/PngCodec.cpp b/src/codecs/png/PngCodec.cpp new file mode 100644 index 00000000..c924396d --- /dev/null +++ b/src/codecs/png/PngCodec.cpp @@ -0,0 +1,306 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "codecs/png/PngCodec.h" +#include "png.h" +#include "tgfx/core/Pixmap.h" +#include "tgfx/utils/Buffer.h" + +namespace tgfx { +std::shared_ptr PngCodec::MakeFrom(const std::string& filePath) { + return MakeFromData(filePath, nullptr); +} + +std::shared_ptr PngCodec::MakeFrom(std::shared_ptr imageBytes) { + return MakeFromData("", std::move(imageBytes)); +} + +bool PngCodec::IsPng(const std::shared_ptr& data) { + constexpr png_byte png_signature[8] = {137, 80, 78, 71, 13, 10, 26, 10}; + return data->size() >= 8 && !memcmp(data->bytes(), &png_signature[0], 8); +} + +typedef struct { + const unsigned char* base; + const unsigned char* end; + const unsigned char* cursor; +} PngReader; + +static void PngReaderReadData(png_structp png_ptr, png_bytep data, png_size_t length) { + auto reader = static_cast(png_get_io_ptr(png_ptr)); + auto avail = static_cast(reader->end - reader->cursor); + if (avail > length) { + avail = length; + } + memcpy(data, reader->cursor, avail); + reader->cursor += avail; +} + +class ReadInfo { + public: + static std::shared_ptr Make(const std::string& filePath, + const std::shared_ptr& fileData) { + auto readInfo = std::make_shared(); + if (!filePath.empty()) { + readInfo->infile = fopen(filePath.c_str(), "rb"); + } + if (fileData == nullptr && readInfo->infile == nullptr) { + return nullptr; + } + readInfo->p = png_create_read_struct(PNG_LIBPNG_VER_STRING, nullptr, nullptr, nullptr); + if (readInfo->p == nullptr) { + return nullptr; + } + readInfo->pi = png_create_info_struct(readInfo->p); + if (readInfo->pi == nullptr) { + return nullptr; + } +#ifdef PNG_SET_OPTION_SUPPORTED + png_set_option(readInfo->p, PNG_MAXIMUM_INFLATE_WINDOW, PNG_OPTION_ON); +#endif + if (setjmp(png_jmpbuf(readInfo->p))) { + return nullptr; + } + if (readInfo->infile) { + png_init_io(readInfo->p, readInfo->infile); + } else { + readInfo->reader = new PngReader(); + readInfo->reader->base = fileData->bytes(); + readInfo->reader->end = fileData->bytes() + fileData->size(); + readInfo->reader->cursor = fileData->bytes(); + png_set_read_fn(readInfo->p, readInfo->reader, PngReaderReadData); + } + png_read_info(readInfo->p, readInfo->pi); + return readInfo; + } + + ~ReadInfo() { + if (p) { + png_destroy_read_struct(&p, &pi, nullptr); + } + if (data) { + free(data); + } + if (rowPtrs) { + free(rowPtrs); + } + if (infile) { + fclose(infile); + } + delete reader; + } + + png_structp p = nullptr; + png_infop pi = nullptr; + FILE* infile = nullptr; + PngReader* reader = nullptr; + unsigned char** rowPtrs = nullptr; + unsigned char* data = nullptr; +}; + +std::shared_ptr PngCodec::MakeFromData(const std::string& filePath, + std::shared_ptr byteData) { + auto readInfo = ReadInfo::Make(filePath, byteData); + if (readInfo == nullptr) { + return nullptr; + } + png_uint_32 w = 0, h = 0; + int colorType = -1; + png_get_IHDR(readInfo->p, readInfo->pi, &w, &h, nullptr, &colorType, nullptr, nullptr, nullptr); + if (w == 0 || h == 0) { + return nullptr; + } + bool isAlphaOnly = false; + if (colorType == PNG_COLOR_TYPE_GRAY_ALPHA) { + png_color_8p sigBits; + if (png_get_sBIT(readInfo->p, readInfo->pi, &sigBits)) { + if (8 == sigBits->alpha && 1 == sigBits->gray) { + isAlphaOnly = true; + } + } + } + return std::shared_ptr(new PngCodec(static_cast(w), static_cast(h), + EncodedOrigin::TopLeft, isAlphaOnly, filePath, + std::move(byteData))); +} + +static void UpdateReadInfo(png_structp p, png_infop pi) { + int originalColorType = png_get_color_type(p, pi); + int bitDepth = png_get_bit_depth(p, pi); + if (bitDepth == 16) { + png_set_strip_16(p); + } + if (originalColorType == PNG_COLOR_TYPE_PALETTE) { + png_set_palette_to_rgb(p); + } + if (originalColorType == PNG_COLOR_TYPE_GRAY && bitDepth < 8) { + png_set_expand_gray_1_2_4_to_8(p); + } + if (png_get_valid(p, pi, PNG_INFO_tRNS)) { + png_set_tRNS_to_alpha(p); + } + if (originalColorType == PNG_COLOR_TYPE_RGB || originalColorType == PNG_COLOR_TYPE_GRAY || + originalColorType == PNG_COLOR_TYPE_PALETTE) { + png_set_filler(p, 0xFF, PNG_FILLER_AFTER); + } + if (originalColorType == PNG_COLOR_TYPE_GRAY || originalColorType == PNG_COLOR_TYPE_GRAY_ALPHA) { + png_set_gray_to_rgb(p); + } + png_read_update_info(p, pi); +} + +bool PngCodec::readPixels(const ImageInfo& dstInfo, void* dstPixels) const { + auto readInfo = ReadInfo::Make(filePath, fileData); + if (readInfo == nullptr) { + return false; + } + int w = width(); + int h = height(); + if (h == 0 || w == 0) { + return false; + } + UpdateReadInfo(readInfo->p, readInfo->pi); + readInfo->rowPtrs = (unsigned char**)malloc(sizeof(unsigned char*) * h); + if (readInfo->rowPtrs == nullptr) { + return false; + } + if (dstInfo.colorType() == ColorType::RGBA_8888 && + dstInfo.alphaType() == AlphaType::Unpremultiplied) { + for (int i = 0; i < h; i++) { + readInfo->rowPtrs[i] = static_cast(dstPixels) + (dstInfo.rowBytes() * i); + } + png_read_image(readInfo->p, readInfo->rowPtrs); + return true; + } + ImageInfo info = ImageInfo::Make(w, h, ColorType::RGBA_8888, AlphaType::Unpremultiplied); + readInfo->data = (unsigned char*)malloc(info.byteSize()); + if (readInfo->data == nullptr) { + return false; + } + for (int i = 0; i < h; i++) { + readInfo->rowPtrs[i] = readInfo->data + (info.rowBytes() * i); + } + png_read_image(readInfo->p, readInfo->rowPtrs); + Pixmap pixmap(info, readInfo->data); + return pixmap.readPixels(dstInfo, dstPixels); +} + +bool PngCodec::isAlphaOnly() const { + return _isAlphaOnly; +} + +#ifdef TGFX_USE_PNG_ENCODE +struct PngWriter { + unsigned char* data = nullptr; + size_t length = 0; +}; + +static void png_reader_write_data(png_structp png_ptr, png_bytep data, png_size_t length) { + auto writer = static_cast(png_get_io_ptr(png_ptr)); + size_t nsize = writer->length + length; + if (writer->data) { + writer->data = static_cast(realloc(writer->data, nsize)); + } else { + writer->data = static_cast(malloc(nsize)); + } + memcpy(writer->data + writer->length, data, length); + writer->length += length; +} + +std::shared_ptr PngCodec::Encode(const Pixmap& pixmap, int) { + auto srcPixels = static_cast(const_cast((pixmap.pixels()))); + auto srcRowBytes = static_cast(pixmap.rowBytes()); + uint8_t* alphaPixels = nullptr; + Buffer tempBuffer; + if (pixmap.colorType() == ColorType::ALPHA_8) { + tempBuffer.alloc(pixmap.width() * 2); + alphaPixels = tempBuffer.bytes(); + srcRowBytes = static_cast(tempBuffer.size()); + } else if (pixmap.colorType() != ColorType::RGBA_8888 || + pixmap.alphaType() == AlphaType::Premultiplied) { + auto dstInfo = ImageInfo::Make(pixmap.width(), pixmap.height(), ColorType::RGBA_8888, + AlphaType::Unpremultiplied); + tempBuffer.alloc(dstInfo.byteSize()); + if (!pixmap.readPixels(dstInfo, tempBuffer.data())) { + return nullptr; + } + srcPixels = tempBuffer.bytes(); + srcRowBytes = static_cast(dstInfo.rowBytes()); + } + PngWriter pngWriter; + png_structp png_ptr = nullptr; + png_infop info_ptr = nullptr; + png_ptr = png_create_write_struct(PNG_LIBPNG_VER_STRING, nullptr, nullptr, nullptr); + bool encodeSuccess = false; + do { + if (png_ptr == nullptr) { + break; + } + info_ptr = png_create_info_struct(png_ptr); + if (info_ptr == nullptr) { + break; + } + if (setjmp(png_jmpbuf(png_ptr))) { + break; + } + png_color_8 sigBit = {8, 8, 8, 0, 8}; + int colorType = PNG_COLOR_TYPE_RGB_ALPHA; + if (pixmap.colorType() == ColorType::ALPHA_8) { + // We store ALPHA_8 images as GrayAlpha in png. If the gray channel is set to 1, we assume the + // gray channel can be ignored, and we output just alpha. We tried 0 at first, but png doesn't + // like a 0 sigBit for a channel it expects, hence we chose 1. + sigBit = {0, 0, 0, 1, 8}; + colorType = PNG_COLOR_TYPE_GRAY_ALPHA; + } + png_set_IHDR(png_ptr, info_ptr, pixmap.width(), pixmap.height(), 8, colorType, + PNG_INTERLACE_NONE, PNG_COMPRESSION_TYPE_BASE, PNG_FILTER_TYPE_BASE); + png_set_sBIT(png_ptr, info_ptr, &sigBit); + png_set_write_fn(png_ptr, &pngWriter, png_reader_write_data, nullptr); + png_write_info(png_ptr, info_ptr); + for (int i = 0; i < pixmap.height(); ++i) { + if (pixmap.colorType() == ColorType::ALPHA_8) { + // convert alpha8 to gray + for (int j = 0; j < pixmap.width(); j++) { + *(alphaPixels++) = 0; + *(alphaPixels++) = *(srcPixels++); + } + alphaPixels -= srcRowBytes; + srcPixels += (pixmap.rowBytes() - pixmap.width()); + png_write_row(png_ptr, reinterpret_cast(alphaPixels)); + } else { + png_write_row(png_ptr, reinterpret_cast(srcPixels)); + srcPixels += srcRowBytes; + } + } + png_write_end(png_ptr, info_ptr); + encodeSuccess = true; + } while (false); + if (png_ptr) { + png_destroy_read_struct(&png_ptr, &info_ptr, nullptr); + } + if (!encodeSuccess) { + if (pngWriter.data) { + free(pngWriter.data); + } + return nullptr; + } + return Data::MakeAdopted(pngWriter.data, pngWriter.length, Data::FreeProc); +} +#endif + +} // namespace tgfx diff --git a/src/codecs/png/PngCodec.h b/src/codecs/png/PngCodec.h new file mode 100644 index 00000000..4608026a --- /dev/null +++ b/src/codecs/png/PngCodec.h @@ -0,0 +1,53 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/ImageCodec.h" + +namespace tgfx { +class PngCodec : public ImageCodec { + public: + static std::shared_ptr MakeFrom(const std::string& filePath); + static std::shared_ptr MakeFrom(std::shared_ptr imageBytes); + static bool IsPng(const std::shared_ptr& data); + + bool isAlphaOnly() const override; + +#ifdef TGFX_USE_PNG_ENCODE + static std::shared_ptr Encode(const Pixmap& pixmap, int quality); +#endif + + protected: + bool readPixels(const ImageInfo& dstInfo, void* dstPixels) const override; + + private: + static std::shared_ptr MakeFromData(const std::string& filePath, + std::shared_ptr byteData); + + PngCodec(int width, int height, EncodedOrigin origin, bool isAlphaOnly, std::string filePath, + std::shared_ptr fileData) + : ImageCodec(width, height, origin), _isAlphaOnly(isAlphaOnly), fileData(std::move(fileData)), + filePath(std::move(filePath)) { + } + + bool _isAlphaOnly = false; + std::shared_ptr fileData; + std::string filePath; +}; +} // namespace tgfx diff --git a/src/codecs/webp/WebpCodec.cpp b/src/codecs/webp/WebpCodec.cpp new file mode 100644 index 00000000..cdfda6a5 --- /dev/null +++ b/src/codecs/webp/WebpCodec.cpp @@ -0,0 +1,203 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "codecs/webp/WebpCodec.h" +#include "codecs/webp/WebpUtility.h" +#include "tgfx/core/Pixmap.h" +#include "tgfx/utils/Buffer.h" + +namespace tgfx { + +bool WebpCodec::IsWebp(const std::shared_ptr& data) { + const char* bytes = static_cast(data->data()); + return data->size() >= 14 && !memcmp(bytes, "RIFF", 4) && !memcmp(&bytes[8], "WEBPVP", 6); +} + +std::shared_ptr WebpCodec::MakeFrom(const std::string& filePath) { + auto info = WebpUtility::getDecodeInfo(filePath); + if (info.width == 0 || info.height == 0) { + auto data = Data::MakeFromFile(filePath); + info = WebpUtility::getDecodeInfo(data->data(), data->size()); + if (info.width == 0 || info.height == 0) { + return nullptr; + } + } + return std::shared_ptr( + new WebpCodec(info.width, info.height, info.origin, filePath, nullptr)); +} + +std::shared_ptr WebpCodec::MakeFrom(std::shared_ptr imageBytes) { + if (imageBytes == nullptr) { + return nullptr; + } + auto info = WebpUtility::getDecodeInfo(imageBytes->data(), imageBytes->size()); + if (info.width == 0 || info.height == 0) { + return nullptr; + } + return std::shared_ptr( + new WebpCodec(info.width, info.height, info.origin, "", std::move(imageBytes))); +} + +static WEBP_CSP_MODE webp_decode_mode(ColorType dstCT, bool premultiply) { + switch (dstCT) { + case ColorType::BGRA_8888: + return premultiply ? MODE_bgrA : MODE_BGRA; + case ColorType::RGBA_8888: + return premultiply ? MODE_rgbA : MODE_RGBA; + default: + return MODE_LAST; + } +} + +bool WebpCodec::readPixels(const ImageInfo& dstInfo, void* dstPixels) const { + if (dstPixels == nullptr || dstInfo.isEmpty()) { + return false; + } + auto byteData = fileData; + if (byteData == nullptr) { + byteData = Data::MakeFromFile(filePath); + } + if (byteData == nullptr) { + return false; + } + WebPDecoderConfig config; + if (!WebPInitDecoderConfig(&config)) return false; + if (WebPGetFeatures(byteData->bytes(), byteData->size(), &config.input) != VP8_STATUS_OK) { + return false; + } + config.output.is_external_memory = 1; + config.output.colorspace = + webp_decode_mode(dstInfo.colorType(), dstInfo.alphaType() == AlphaType::Premultiplied); + bool decodeSuccess = true; + if (config.output.colorspace == MODE_LAST) { + // decode to RGBA_8888 + auto info = dstInfo.makeColorType(ColorType::RGBA_8888); + config.output.colorspace = + webp_decode_mode(info.colorType(), info.alphaType() == AlphaType::Premultiplied); + config.output.u.RGBA.stride = static_cast(info.rowBytes()); + config.output.u.RGBA.size = info.byteSize(); + Buffer buffer(info.byteSize()); + auto pixels = buffer.bytes(); + if (pixels) { + config.output.u.RGBA.rgba = pixels; + decodeSuccess = WebPDecode(byteData->bytes(), byteData->size(), &config) == VP8_STATUS_OK; + if (decodeSuccess) { + Pixmap pixmap(info, pixels); + decodeSuccess = pixmap.readPixels(dstInfo, dstPixels); + } + } + } else { + config.output.u.RGBA.rgba = reinterpret_cast(dstPixels); + config.output.u.RGBA.stride = static_cast(dstInfo.rowBytes()); + config.output.u.RGBA.size = dstInfo.byteSize(); + auto code = WebPDecode(byteData->bytes(), byteData->size(), &config); + decodeSuccess = (code == VP8_STATUS_OK); + } + WebPFreeDecBuffer(&config.output); + return decodeSuccess; +} + +#ifdef TGFX_USE_WEBP_ENCODE +struct WebpWriter { + unsigned char* data = nullptr; + size_t length = 0; +}; + +static int webp_reader_write_data(const uint8_t* data, size_t data_size, + const WebPPicture* picture) { + auto writer = static_cast(picture->custom_ptr); + size_t nsize = writer->length + data_size; + if (writer->data) { + writer->data = static_cast(realloc(writer->data, nsize)); + } else { + writer->data = static_cast(malloc(nsize)); + } + memcpy(writer->data + writer->length, data, data_size); + writer->length += data_size; + return 1; +} + +std::shared_ptr WebpCodec::Encode(const Pixmap& pixmap, int quality) { + const uint8_t* srcPixels = static_cast(const_cast(pixmap.pixels())); + auto srcInfo = pixmap.info(); + Buffer tempBuffer = {}; + if (pixmap.alphaType() == AlphaType::Premultiplied || + (pixmap.colorType() != ColorType::RGBA_8888 && pixmap.colorType() != ColorType::BGRA_8888)) { + auto alphaType = + pixmap.alphaType() == AlphaType::Opaque ? AlphaType::Opaque : AlphaType::Unpremultiplied; + srcInfo = ImageInfo::Make(pixmap.width(), pixmap.height(), ColorType::RGBA_8888, alphaType); + tempBuffer.alloc(srcInfo.byteSize()); + srcPixels = tempBuffer.bytes(); + if (!pixmap.readPixels(srcInfo, tempBuffer.data())) { + return nullptr; + } + } + WebPConfig webp_config; + bool isLossless = false; + if (quality == 100) { + quality = 75; + isLossless = true; + } + if (!WebPConfigPreset(&webp_config, WEBP_PRESET_DEFAULT, static_cast(quality))) { + return nullptr; + } + WebPPicture pic; + WebPPictureInit(&pic); + pic.width = srcInfo.width(); + pic.height = srcInfo.height(); + pic.writer = webp_reader_write_data; + if (isLossless) { + webp_config.lossless = 1; + webp_config.method = 0; + pic.use_argb = 1; + } else { + webp_config.lossless = 0; + webp_config.method = 3; + pic.use_argb = 0; + } + + WebpWriter webpWriter; + pic.custom_ptr = &webpWriter; + auto importProc = WebPPictureImportRGBX; + if (ColorType::RGBA_8888 == srcInfo.colorType()) { + if (AlphaType::Opaque == srcInfo.alphaType()) { + importProc = WebPPictureImportRGBX; + } else { + importProc = WebPPictureImportRGBA; + } + } else if (ColorType::BGRA_8888 == srcInfo.colorType()) { + if (AlphaType::Opaque == srcInfo.alphaType()) { + importProc = WebPPictureImportBGRX; + } else { + importProc = WebPPictureImportBGRA; + } + } + auto rowBytes = static_cast(srcInfo.rowBytes()); + if (!importProc(&pic, srcPixels, rowBytes) || !WebPEncode(&webp_config, &pic)) { + WebPPictureFree(&pic); + if (webpWriter.data) { + free(webpWriter.data); + } + return nullptr; + } + WebPPictureFree(&pic); + return Data::MakeAdopted(webpWriter.data, webpWriter.length, Data::FreeProc); +} +#endif + +} // namespace tgfx diff --git a/src/codecs/webp/WebpCodec.h b/src/codecs/webp/WebpCodec.h new file mode 100644 index 00000000..23f491da --- /dev/null +++ b/src/codecs/webp/WebpCodec.h @@ -0,0 +1,51 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/ImageCodec.h" +#include "webp/decode.h" +#include "webp/demux.h" +#include "webp/encode.h" + +namespace tgfx { +class WebpCodec : public ImageCodec { + public: + static std::shared_ptr MakeFrom(const std::string& filePath); + static std::shared_ptr MakeFrom(std::shared_ptr imageBytes); + static bool IsWebp(const std::shared_ptr& data); + +#ifdef TGFX_USE_WEBP_ENCODE + static std::shared_ptr Encode(const Pixmap& pixmap, int quality); +#endif + + protected: + bool readPixels(const ImageInfo& dstInfo, void* dstPixels) const override; + + private: + std::shared_ptr fileData; + std::string filePath; + + explicit WebpCodec(int width, int height, EncodedOrigin origin, std::string filePath, + std::shared_ptr fileData) + : ImageCodec(width, height, origin), fileData(std::move(fileData)), + filePath(std::move(filePath)) { + } +}; + +} // namespace tgfx \ No newline at end of file diff --git a/src/codecs/webp/WebpUtility.cpp b/src/codecs/webp/WebpUtility.cpp new file mode 100644 index 00000000..83758b47 --- /dev/null +++ b/src/codecs/webp/WebpUtility.cpp @@ -0,0 +1,342 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "codecs/webp/WebpUtility.h" +#include +#include +#include "utils/Log.h" +#include "utils/OrientationHelper.h" +#include "webp/decode.h" +#include "webp/demux.h" +#include "webp/encode.h" + +namespace tgfx { + +#define VP8L_MAGIC_BYTE 0x2f // VP8L signature byte. +#define VP8L_FRAME_HEADER_SIZE 5 // Size of the VP8L frame header. +#define VP8_FRAME_HEADER_SIZE 10 // Size of the frame header within VP8 data. +#define TAG_SIZE 4 // Size of a chunk tag (e.g. "VP8L"). +#define CHUNK_SIZE_BYTES 4 // Size needed to store chunk's size. +#define CHUNK_HEADER_SIZE 8 // Size of a chunk header. +#define RIFF_HEADER_SIZE 12 // Size of the RIFF header ("RIFFnnnnWEBP"). +#define ANMF_CHUNK_SIZE 16 // Size of an ANMF chunk. +#define ANIM_CHUNK_SIZE 6 // Size of an ANIM chunk. +#define VP8X_CHUNK_SIZE 10 // Size of a VP8X chunk. +#define MAX_CHUNK_PAYLOAD (~0U - CHUNK_HEADER_SIZE - 1) +// Create fourcc of the chunk from the chunk tag characters. +#define MKFOURCC(a, b, c, d) ((a) | (b) << 8 | (c) << 16 | (uint32_t)(d) << 24) +// maximum number of bits (inclusive) the bit-reader can handle: +#define VP8L_MAX_NUM_BIT_READ 24 +#define VP8L_LBITS 64 // Number of bits prefetched (= bit-size of vp8l_val_t). +#define VP8L_MAGIC_BYTE 0x2f // VP8L signature byte. +#define VP8L_IMAGE_SIZE_BITS 14 // Number of bits used to store +#define VP8L_VERSION_BITS 3 // 3 bits reserved for version. + +typedef uint64_t vp8l_val_t; // right now, this bit-reader can only use 64bit. + +typedef struct { + vp8l_val_t val_; // pre-fetched bits + const uint8_t* buf_; // input byte buffer + size_t len_; // buffer length + size_t pos_; // byte position in buf_ + int bit_pos_; // current bit-reading position in val_ + int eos_; // true if a bit was read past the end of buffer +} VP8LBitReader; + +struct WebpFile { + FILE* inFile; + size_t _start; + size_t _end; + size_t _riff_end; +}; + +static const uint32_t kBitMask[VP8L_MAX_NUM_BIT_READ + 1] = { + 0, 0x000001, 0x000003, 0x000007, 0x00000f, 0x00001f, 0x00003f, 0x00007f, 0x0000ff, + 0x0001ff, 0x0003ff, 0x0007ff, 0x000fff, 0x001fff, 0x003fff, 0x007fff, 0x00ffff, 0x01ffff, + 0x03ffff, 0x07ffff, 0x0fffff, 0x1fffff, 0x3fffff, 0x7fffff, 0xffffff}; + +static inline int GetLE16(const uint8_t* const data) { + return static_cast((data[0] << 0) | (data[1] << 8)); +} +static inline int GetLE24(const uint8_t* const data) { + return GetLE16(data) | (data[2] << 16); +} +static inline uint32_t GetLE32(const uint8_t* const data) { + return GetLE16(data) | ((uint32_t)GetLE16(data + 2) << 16); +} + +static inline void Skip(WebpFile* const ptr, size_t size) { + ptr->_start += size; +} +static bool VP8CheckSignature(const uint8_t* const data) { + return (data[0] == 0x9d && data[1] == 0x01 && data[2] == 0x2a); +} + +static bool VP8LCheckSignature(const uint8_t* const data) { + return (data[0] == VP8L_MAGIC_BYTE && (data[4] >> 5) == 0); // version +} + +// Return the prefetched bits, so they can be looked up. +static inline uint32_t VP8LPrefetchBits(VP8LBitReader* const br) { + return (uint32_t)(br->val_ >> (br->bit_pos_ & (VP8L_LBITS - 1))); +} + +// Returns true if there was an attempt at reading bit past the end of +// the buffer. Doesn't set br->eos_ flag. +static inline int VP8LIsEndOfStream(const VP8LBitReader* const br) { + ASSERT(br->pos_ <= br->len_); + return br->eos_ || ((br->pos_ == br->len_) && (br->bit_pos_ > VP8L_LBITS)); +} + +static void VP8LSetEndOfStream(VP8LBitReader* const br) { + br->eos_ = 1; + br->bit_pos_ = 0; // To avoid undefined behaviour with shifts. +} + +// If not at EOS, reload up to VP8L_LBITS byte-by-byte +static void ShiftBytes(VP8LBitReader* const br) { + while (br->bit_pos_ >= 8 && br->pos_ < br->len_) { + br->val_ >>= 8; + br->val_ |= ((vp8l_val_t)br->buf_[br->pos_]) << (VP8L_LBITS - 8); + ++br->pos_; + br->bit_pos_ -= 8; + } + if (VP8LIsEndOfStream(br)) { + VP8LSetEndOfStream(br); + } +} + +static void VP8LInitBitReader(VP8LBitReader* const br, const uint8_t* const start, size_t length) { + size_t i; + vp8l_val_t value = 0; + ASSERT(br != NULL); + ASSERT(start != NULL); + ASSERT(length < 0xfffffff8u); // can't happen with a RIFF chunk. + + br->len_ = length; + br->val_ = 0; + br->bit_pos_ = 0; + br->eos_ = 0; + + if (length > sizeof(br->val_)) { + length = sizeof(br->val_); + } + for (i = 0; i < length; ++i) { + value |= (vp8l_val_t)start[i] << (8 * i); + } + br->val_ = value; + br->pos_ = length; + br->buf_ = start; +} + +static uint32_t VP8LReadBits(VP8LBitReader* const br, int n_bits) { + ASSERT(n_bits >= 0); + // Flag an error if end_of_stream or n_bits is more than allowed limit. + if (!br->eos_ && n_bits <= VP8L_MAX_NUM_BIT_READ) { + const uint32_t val = VP8LPrefetchBits(br) & kBitMask[n_bits]; + const int new_bits = br->bit_pos_ + n_bits; + br->bit_pos_ = new_bits; + ShiftBytes(br); + return val; + } else { + VP8LSetEndOfStream(br); + return 0; + } +} +static int ReadImageInfo(VP8LBitReader* const br, int* const width, int* const height, + int* const has_alpha) { + if (VP8LReadBits(br, 8) != VP8L_MAGIC_BYTE) return 0; + *width = VP8LReadBits(br, VP8L_IMAGE_SIZE_BITS) + 1; + *height = VP8LReadBits(br, VP8L_IMAGE_SIZE_BITS) + 1; + *has_alpha = VP8LReadBits(br, 1); + if (VP8LReadBits(br, VP8L_VERSION_BITS) != 0) return 0; + return !br->eos_; +} + +static bool ParseVP8L(WebpFile& webpFile, DecodeInfo& decodeInfo, + const uint32_t chunk_size_padded) { + if (decodeInfo.width <= 0) { + if (chunk_size_padded < VP8L_FRAME_HEADER_SIZE) return false; + fseek(webpFile.inFile, webpFile._start + CHUNK_HEADER_SIZE, SEEK_SET); + auto vp8xHeader = static_cast(malloc(VP8L_FRAME_HEADER_SIZE)); + if (fread(vp8xHeader, 1, VP8L_FRAME_HEADER_SIZE, webpFile.inFile) != VP8L_FRAME_HEADER_SIZE) + return false; + if (!VP8LCheckSignature(vp8xHeader)) return false; + int w, h, a; + VP8LBitReader br; + VP8LInitBitReader(&br, vp8xHeader, 10); + if (!ReadImageInfo(&br, &w, &h, &a)) { + free(vp8xHeader); + return false; + } + decodeInfo.width = w; + decodeInfo.height = h; + free(vp8xHeader); + } + if (chunk_size_padded <= webpFile._end - webpFile._start) { + Skip(&webpFile, chunk_size_padded + CHUNK_HEADER_SIZE); + } else { + return false; + } + return true; +} + +static bool ParseVP8(WebpFile& webpFile, DecodeInfo& decodeInfo, const uint32_t chunk_size_padded) { + if (decodeInfo.width <= 0) { + if (chunk_size_padded < VP8_FRAME_HEADER_SIZE) return false; + fseek(webpFile.inFile, webpFile._start + CHUNK_HEADER_SIZE, SEEK_SET); + auto vp8xHeader = static_cast(malloc(15)); + if (fread(vp8xHeader, 1, VP8_FRAME_HEADER_SIZE, webpFile.inFile) != VP8_FRAME_HEADER_SIZE) + return false; + if (VP8CheckSignature(vp8xHeader)) return false; + decodeInfo.width = ((vp8xHeader[7] << 8) | vp8xHeader[6]) & 0x3fff; + decodeInfo.height = ((vp8xHeader[9] << 8) | vp8xHeader[8]) & 0x3fff; + free(vp8xHeader); + } + if (chunk_size_padded <= webpFile._end - webpFile._start) { + Skip(&webpFile, chunk_size_padded + CHUNK_HEADER_SIZE); + } else { + return false; + } + return true; +} + +static bool ParseVP8X(WebpFile& webpFile, DecodeInfo& decodeInfo, + const uint32_t chunk_size_padded) { + if (decodeInfo.width <= 0) { + fseek(webpFile.inFile, webpFile._start + RIFF_HEADER_SIZE, SEEK_SET); + auto vp8xHeader = static_cast(malloc(6)); + if (fread(vp8xHeader, 1, 6, webpFile.inFile) != 6) return false; + decodeInfo.width = 1 + GetLE24(vp8xHeader); + decodeInfo.height = 1 + GetLE24(vp8xHeader + 3); + free(vp8xHeader); + } + if (chunk_size_padded <= webpFile._end - webpFile._start) { + Skip(&webpFile, chunk_size_padded + CHUNK_HEADER_SIZE); + } else { + return false; + } + return true; +} + +DecodeInfo WebpUtility::getDecodeInfo(const std::string& filePath) { + auto infile = fopen(filePath.c_str(), "rb"); + if (infile == nullptr) return {}; + if (fseek(infile, 0, SEEK_END)) { + fclose(infile); + return {}; + } + DecodeInfo decodeInfo; + size_t fileLength = ftell(infile); + WebpFile webpFile = {infile, 0, fileLength, fileLength}; + webpFile._start = 12; + auto chunkHeader = static_cast(malloc(RIFF_HEADER_SIZE)); + bool foundOrigin = false; + do { + fseek(infile, webpFile._start, SEEK_SET); + if ((fileLength - webpFile._start) < RIFF_HEADER_SIZE) break; + if (fread(chunkHeader, 1, RIFF_HEADER_SIZE, infile) != RIFF_HEADER_SIZE) break; + const uint32_t fourcc = GetLE32(chunkHeader); + const uint32_t chunk_size = GetLE32(chunkHeader + 4); + const uint32_t chunk_size_padded = chunk_size + (chunk_size & 1); + if (chunk_size > MAX_CHUNK_PAYLOAD) break; + if (chunk_size_padded > webpFile._riff_end - webpFile._start) break; + bool needBreak = false; + switch (fourcc) { + case MKFOURCC('V', 'P', '8', 'L'): { + needBreak = !ParseVP8L(webpFile, decodeInfo, chunk_size_padded); + break; + } + case MKFOURCC('V', 'P', '8', ' '): { + needBreak = !ParseVP8(webpFile, decodeInfo, chunk_size_padded); + break; + } + case MKFOURCC('V', 'P', '8', 'X'): { + needBreak = !ParseVP8X(webpFile, decodeInfo, chunk_size_padded); + break; + } + case MKFOURCC('E', 'X', 'I', 'F'): { + foundOrigin = true; + fseek(infile, webpFile._start + CHUNK_HEADER_SIZE, SEEK_SET); + auto exifData = static_cast(malloc(chunk_size)); + if (fread(exifData, 1, chunk_size, infile) != chunk_size) { + needBreak = true; + break; + } + is_orientation_marker(exifData, chunk_size, &decodeInfo.origin); + if (chunk_size_padded <= webpFile._end - webpFile._start) { + Skip(&webpFile, chunk_size_padded + CHUNK_HEADER_SIZE); + } else { + needBreak = true; + } + break; + } + default: { + if (chunk_size_padded <= webpFile._end - webpFile._start) { + Skip(&webpFile, chunk_size_padded + CHUNK_HEADER_SIZE); + } else { + needBreak = true; + break; + } + } + } + if (needBreak) break; + } while (!foundOrigin && (fileLength - webpFile._start) >= RIFF_HEADER_SIZE); + free(chunkHeader); + fclose(infile); + return decodeInfo; +} + +DecodeInfo WebpUtility::getDecodeInfo(const void* fileBytes, size_t byteLength) { + if (fileBytes == nullptr || byteLength == 0) return {}; + WebPData webpData = {static_cast(fileBytes), byteLength}; + WebPDemuxState state; + auto demux = WebPDemuxPartial(&webpData, &state); + switch (state) { + case WEBP_DEMUX_PARSE_ERROR: + case WEBP_DEMUX_PARSING_HEADER: + return {}; + case WEBP_DEMUX_PARSED_HEADER: + case WEBP_DEMUX_DONE: + break; + } + const int width = WebPDemuxGetI(demux, WEBP_FF_CANVAS_WIDTH); + const int height = WebPDemuxGetI(demux, WEBP_FF_CANVAS_HEIGHT); + // Sanity check for image size that's about to be decoded. + { + const int64_t size = (int64_t)width * (int64_t)height; + // now check that if we are 4-bytes per pixel, we also don't overflow + if (!static_cast(size) || static_cast(size) > (0x7FFFFFFF >> 2)) { + WebPDemuxDelete(demux); + return {}; + } + } + + EncodedOrigin origin = EncodedOrigin::TopLeft; + { + WebPChunkIterator chunkIterator; + if (WebPDemuxGetChunk(demux, "EXIF", 1, &chunkIterator)) { + is_orientation_marker(chunkIterator.chunk.bytes, chunkIterator.chunk.size, &origin); + } + WebPDemuxReleaseChunkIterator(&chunkIterator); + } + WebPDemuxDelete(demux); + return {width, height, origin}; +} + +} // namespace tgfx \ No newline at end of file diff --git a/src/codecs/webp/WebpUtility.h b/src/codecs/webp/WebpUtility.h new file mode 100644 index 00000000..d8fb9581 --- /dev/null +++ b/src/codecs/webp/WebpUtility.h @@ -0,0 +1,39 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "tgfx/core/EncodedOrigin.h" + +namespace tgfx { + +struct DecodeInfo { + int width = 0; + int height = 0; + EncodedOrigin origin = EncodedOrigin::TopLeft; +}; + +class WebpUtility { + public: + static DecodeInfo getDecodeInfo(const std::string& filePath); + + static DecodeInfo getDecodeInfo(const void* fileBytes, size_t byteLength); +}; + +} // namespace tgfx diff --git a/src/core/Bitmap.cpp b/src/core/Bitmap.cpp new file mode 100644 index 00000000..02e135a3 --- /dev/null +++ b/src/core/Bitmap.cpp @@ -0,0 +1,133 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/core/Bitmap.h" +#include "core/PixelRef.h" +#include "tgfx/core/ImageCodec.h" +#include "tgfx/core/Pixmap.h" + +namespace tgfx { +Bitmap::Bitmap(int width, int height, bool alphaOnly, bool tryHardware) { + allocPixels(width, height, alphaOnly, tryHardware); +} + +Bitmap::Bitmap(const Bitmap& src) : _info(src._info), pixelRef(src.pixelRef) { +} + +Bitmap::Bitmap(Bitmap&& src) : _info(src._info), pixelRef(std::move(src.pixelRef)) { +} + +Bitmap::Bitmap(HardwareBufferRef hardwareBuffer) { + auto pixelBuffer = PixelBuffer::MakeFrom(hardwareBuffer); + pixelRef = PixelRef::Wrap(pixelBuffer); + if (pixelRef != nullptr) { + _info = pixelRef->info(); + } +} + +Bitmap& Bitmap::operator=(const Bitmap& src) { + if (this != &src) { + _info = src._info; + pixelRef = src.pixelRef; + } + return *this; +} + +Bitmap& Bitmap::operator=(Bitmap&& src) { + if (this != &src) { + _info = src._info; + pixelRef = std::move(src.pixelRef); + } + return *this; +} + +bool Bitmap::allocPixels(int width, int height, bool alphaOnly, bool tryHardware) { + pixelRef = PixelRef::Make(width, height, alphaOnly, tryHardware); + if (pixelRef != nullptr) { + _info = pixelRef->info(); + } + return pixelRef != nullptr; +} + +void* Bitmap::lockPixels() { + if (pixelRef == nullptr) { + return nullptr; + } + auto pixels = pixelRef->lockWritablePixels(); + if (pixels != nullptr) { + pixelRef->markContentDirty(Rect::MakeWH(width(), height())); + } + return pixels; +} + +const void* Bitmap::lockPixels() const { + if (pixelRef == nullptr) { + return nullptr; + } + return pixelRef->lockPixels(); +} + +void Bitmap::unlockPixels() const { + if (pixelRef != nullptr) { + pixelRef->unlockPixels(); + } +} + +bool Bitmap::isHardwareBacked() const { + return pixelRef ? pixelRef->isHardwareBacked() : false; +} + +HardwareBufferRef Bitmap::getHardwareBuffer() const { + return pixelRef ? pixelRef->getHardwareBuffer() : nullptr; +} + +std::shared_ptr Bitmap::encode(EncodedFormat format, int quality) const { + Pixmap pixmap(*this); + return ImageCodec::Encode(pixmap, format, quality); +} + +Color Bitmap::getColor(int x, int y) const { + return Pixmap(*this).getColor(x, y); +} + +bool Bitmap::readPixels(const ImageInfo& dstInfo, void* dstPixels, int srcX, int srcY) const { + return Pixmap(*this).readPixels(dstInfo, dstPixels, srcX, srcY); +} + +bool Bitmap::writePixels(const ImageInfo& srcInfo, const void* srcPixels, int dstX, int dstY) { + auto success = Pixmap(*this).writePixels(srcInfo, srcPixels, dstX, dstY); + if (success) { + pixelRef->markContentDirty(Rect::MakeXYWH(dstX, dstY, srcInfo.width(), srcInfo.height())); + } + return success; +} + +void Bitmap::clear() { + if (pixelRef == nullptr) { + return; + } + pixelRef->clear(); +} + +std::shared_ptr Bitmap::makeBuffer() const { + if (pixelRef == nullptr) { + return nullptr; + } + return pixelRef->makeBuffer(); +} +} // namespace tgfx diff --git a/src/core/Canvas.cpp b/src/core/Canvas.cpp new file mode 100644 index 00000000..d3bf3ce5 --- /dev/null +++ b/src/core/Canvas.cpp @@ -0,0 +1,748 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/core/Canvas.h" +#include +#include "CanvasState.h" +#include "gpu/GpuPaint.h" +#include "gpu/SurfaceDrawContext.h" +#include "gpu/ops/ClearOp.h" +#include "gpu/ops/FillRectOp.h" +#include "gpu/ops/RRectOp.h" +#include "gpu/ops/TriangulatingPathOp.h" +#include "gpu/processors/AARectEffect.h" +#include "gpu/processors/ConstColorProcessor.h" +#include "gpu/processors/DeviceSpaceTextureEffect.h" +#include "gpu/processors/TextureEffect.h" +#include "tgfx/core/BlendMode.h" +#include "tgfx/core/Mask.h" +#include "tgfx/core/PathEffect.h" +#include "tgfx/core/TextBlob.h" +#include "tgfx/gpu/Surface.h" +#include "utils/MathExtra.h" + +namespace tgfx { +static uint32_t NextClipID() { + static const uint32_t kFirstUnreservedClipID = 1; + static std::atomic_uint32_t nextID{kFirstUnreservedClipID}; + uint32_t id; + do { + id = nextID++; + } while (id < kFirstUnreservedClipID); + return id; +} + +Canvas::Canvas(Surface* surface) : surface(surface), clipID(kDefaultClipID) { + drawContext = new SurfaceDrawContext(surface); + state = std::make_shared(); + state->clip.addRect(0, 0, static_cast(surface->width()), + static_cast(surface->height())); + state->clipID = NextClipID(); +} + +Canvas::~Canvas() { + delete drawContext; +} + +void Canvas::save() { + auto canvasState = std::make_shared(); + *canvasState = *state; + savedStateList.push_back(canvasState); +} + +void Canvas::restore() { + if (savedStateList.empty()) { + return; + } + state = savedStateList.back(); + savedStateList.pop_back(); +} + +Matrix Canvas::getMatrix() const { + return state->matrix; +} + +void Canvas::setMatrix(const Matrix& matrix) { + state->matrix = matrix; +} + +void Canvas::resetMatrix() { + state->matrix.reset(); +} + +void Canvas::concat(const Matrix& matrix) { + state->matrix.preConcat(matrix); +} + +float Canvas::getAlpha() const { + return state->alpha; +} + +void Canvas::setAlpha(float newAlpha) { + state->alpha = newAlpha; +} + +BlendMode Canvas::getBlendMode() const { + return state->blendMode; +} + +void Canvas::setBlendMode(BlendMode blendMode) { + state->blendMode = blendMode; +} + +Path Canvas::getTotalClip() const { + return state->clip; +} + +void Canvas::clipPath(const Path& path) { + auto clipPath = path; + clipPath.transform(state->matrix); + state->clip.addPath(clipPath, PathOp::Intersect); + state->clipID = NextClipID(); +} + +void Canvas::clear(const Color& color) { + auto oldBlend = getBlendMode(); + setBlendMode(BlendMode::Src); + Paint paint; + paint.setColor(color); + auto rect = Rect::MakeWH(surface->width(), surface->height()); + drawRect(rect, paint); + setBlendMode(oldBlend); +} + +static bool AffectsAlpha(const ColorFilter* cf) { + return cf && !cf->isAlphaUnchanged(); +} + +bool Canvas::nothingToDraw(const Paint& paint) const { + switch (getBlendMode()) { + case BlendMode::SrcOver: + case BlendMode::SrcATop: + case BlendMode::DstOut: + case BlendMode::DstOver: + case BlendMode::Plus: + if (0 == getAlpha() || 0 == paint.getAlpha()) { + return !AffectsAlpha(paint.getColorFilter().get()) && paint.getImageFilter() == nullptr; + } + break; + case BlendMode::Dst: + return true; + default: + break; + } + return false; +} + +void Canvas::drawRect(const Rect& rect, const Paint& paint) { + if (nothingToDraw(paint)) { + return; + } + Path path = {}; + path.addRect(rect); + drawPath(path, paint); +} + +static Paint CleanPaintForDrawImage(const Paint* paint) { + Paint cleaned; + if (paint) { + cleaned = *paint; + cleaned.setStyle(PaintStyle::Fill); + } + return cleaned; +} + +void Canvas::flush() { + surface->flush(); +} + +Context* Canvas::getContext() const { + return surface->getContext(); +} + +Surface* Canvas::getSurface() const { + return surface; +} + +const SurfaceOptions* Canvas::surfaceOptions() const { + return surface->options(); +} + +static bool PaintToGLPaint(Context* context, uint32_t surfaceFlags, const Paint& paint, float alpha, + std::unique_ptr shaderProcessor, GpuPaint* glPaint) { + FPArgs args(context, surfaceFlags); + glPaint->context = context; + glPaint->color = paint.getColor().makeOpaque(); + std::unique_ptr shaderFP; + if (shaderProcessor) { + shaderFP = std::move(shaderProcessor); + } else if (auto shader = paint.getShader()) { + shaderFP = shader->asFragmentProcessor(args); + if (shaderFP == nullptr) { + return false; + } + } + alpha *= paint.getAlpha(); + if (shaderFP) { + glPaint->colorFragmentProcessors.emplace_back(std::move(shaderFP)); + if (alpha != 1.f) { + glPaint->colorFragmentProcessors.emplace_back( + ConstColorProcessor::Make(Color{alpha, alpha, alpha, alpha}, InputMode::ModulateRGBA)); + } + } else { + glPaint->color.alpha = alpha; + glPaint->color = glPaint->color.premultiply(); + } + if (auto colorFilter = paint.getColorFilter()) { + if (auto processor = colorFilter->asFragmentProcessor()) { + glPaint->colorFragmentProcessors.emplace_back(std::move(processor)); + } else { + return false; + } + } + if (auto maskFilter = paint.getMaskFilter()) { + if (auto processor = maskFilter->asFragmentProcessor(args)) { + glPaint->coverageFragmentProcessors.emplace_back(std::move(processor)); + } + } + return true; +} + +static bool PaintToGLPaintWithImage(Context* context, uint32_t surfaceFlags, const Paint& paint, + float alpha, std::unique_ptr fp, + bool imageIsAlphaOnly, GpuPaint* glPaint) { + std::unique_ptr shaderFP; + if (imageIsAlphaOnly) { + if (auto shader = paint.getShader()) { + shaderFP = shader->asFragmentProcessor(FPArgs(context, surfaceFlags)); + if (!shaderFP) { + return false; + } + std::unique_ptr fpSeries[] = {std::move(shaderFP), std::move(fp)}; + shaderFP = FragmentProcessor::RunInSeries(fpSeries, 2); + } else { + shaderFP = std::move(fp); + } + } else { + shaderFP = FragmentProcessor::MulChildByInputAlpha(std::move(fp)); + } + return PaintToGLPaint(context, surfaceFlags, paint, alpha, std::move(shaderFP), glPaint); +} + +std::shared_ptr Canvas::getClipTexture() { + if (clipID != state->clipID) { + _clipSurface = nullptr; + } + if (_clipSurface == nullptr) { + _clipSurface = Surface::Make(getContext(), surface->width(), surface->height(), true); + if (_clipSurface == nullptr) { + _clipSurface = Surface::Make(getContext(), surface->width(), surface->height()); + } + } + if (_clipSurface == nullptr) { + return nullptr; + } + if (clipID != state->clipID) { + auto clipCanvas = _clipSurface->getCanvas(); + clipCanvas->clear(); + Paint paint = {}; + paint.setColor(Color::Black()); + clipCanvas->drawPath(state->clip, paint); + clipID = state->clipID; + } + return _clipSurface->getTexture(); +} + +static constexpr float BOUNDS_TO_LERANCE = 1e-3f; + +/** + * Returns true if the given rect counts as aligned with pixel boundaries. + */ +static bool IsPixelAligned(const Rect& rect) { + return fabsf(roundf(rect.left) - rect.left) <= BOUNDS_TO_LERANCE && + fabsf(roundf(rect.top) - rect.top) <= BOUNDS_TO_LERANCE && + fabsf(roundf(rect.right) - rect.right) <= BOUNDS_TO_LERANCE && + fabsf(roundf(rect.bottom) - rect.bottom) <= BOUNDS_TO_LERANCE; +} + +void FlipYIfNeeded(Rect* rect, const Surface* surface) { + if (surface->origin() == ImageOrigin::BottomLeft) { + auto height = rect->height(); + rect->top = static_cast(surface->height()) - rect->bottom; + rect->bottom = rect->top + height; + } +} + +std::pair, bool> Canvas::getClipRect() { + auto rect = Rect::MakeEmpty(); + if (state->clip.asRect(&rect)) { + FlipYIfNeeded(&rect, surface); + if (IsPixelAligned(rect)) { + rect.round(); + if (rect != Rect::MakeWH(surface->width(), surface->height())) { + return {rect, true}; + } else { + return {Rect::MakeEmpty(), false}; + } + } else { + return {rect, false}; + } + } + return {{}, false}; +} + +std::unique_ptr Canvas::getClipMask(const Rect& deviceBounds, + Rect* scissorRect) { + const auto& clipPath = state->clip; + if (clipPath.contains(deviceBounds)) { + return nullptr; + } + auto [rect, useScissor] = getClipRect(); + if (rect.has_value()) { + if (useScissor) { + *scissorRect = *rect; + } else if (!rect->isEmpty()) { + return AARectEffect::Make(*rect); + } + return nullptr; + } else { + return FragmentProcessor::MulInputByChildAlpha( + DeviceSpaceTextureEffect::Make(getClipTexture(), surface->origin())); + } +} + +Rect Canvas::clipLocalBounds(Rect localBounds) { + auto deviceBounds = state->matrix.mapRect(localBounds); + auto clipBounds = state->clip.getBounds(); + clipBounds.roundOut(); + auto clippedDeviceBounds = deviceBounds; + if (!clippedDeviceBounds.intersect(clipBounds)) { + return Rect::MakeEmpty(); + } + auto clippedLocalBounds = localBounds; + if (state->matrix.getSkewX() == 0 && state->matrix.getSkewY() == 0 && + clippedDeviceBounds != deviceBounds) { + Matrix inverse = Matrix::I(); + state->matrix.invert(&inverse); + clippedLocalBounds = inverse.mapRect(clippedDeviceBounds); + } + return clippedLocalBounds; +} + +void Canvas::drawPath(const Path& path, const Paint& paint) { + if (nothingToDraw(paint)) { + return; + } + if (paint.getStyle() == PaintStyle::Fill) { + fillPath(path, paint); + return; + } + auto strokePath = path; + auto strokeEffect = PathEffect::MakeStroke(paint.getStroke()); + if (strokeEffect == nullptr) { + return; + } + strokeEffect->applyTo(&strokePath); + fillPath(strokePath, paint); +} + +void Canvas::drawShape(std::shared_ptr shape, const Paint& paint) { + if (shape == nullptr || nothingToDraw(paint)) { + return; + } + GpuPaint glPaint; + if (!PaintToGLPaint(getContext(), surface->options()->flags(), paint, state->alpha, nullptr, + &glPaint)) { + return; + } + auto bounds = shape->getBounds(); + if (!state->matrix.isIdentity()) { + state->matrix.mapRect(&bounds); + } + auto clipBounds = state->clip.getBounds(); + clipBounds.roundOut(); + if (!clipBounds.intersect(bounds)) { + return; + } + auto op = shape->makeOp(&glPaint, state->matrix, surface->options()->flags()); + if (op == nullptr) { + return; + } + draw(std::move(op), std::move(glPaint)); +} + +void Canvas::drawImage(std::shared_ptr image, float left, float top, const Paint* paint) { + drawImage(image, Matrix::MakeTrans(left, top), paint); +} + +void Canvas::drawImage(std::shared_ptr image, const Matrix& matrix, const Paint* paint) { + auto oldMatrix = getMatrix(); + concat(matrix); + drawImage(std::move(image), paint); + setMatrix(oldMatrix); +} + +void Canvas::drawImage(std::shared_ptr image, const Paint* paint) { + if (image == nullptr) { + return; + } + auto mipMapMode = image->hasMipmaps() ? tgfx::MipMapMode::Linear : tgfx::MipMapMode::None; + tgfx::SamplingOptions sampling(tgfx::FilterMode::Linear, mipMapMode); + drawImage(std::move(image), sampling, paint); +} + +void Canvas::drawImage(std::shared_ptr image, SamplingOptions sampling, const Paint* paint) { + if (image == nullptr) { + return; + } + auto realPaint = CleanPaintForDrawImage(paint); + if (nothingToDraw(realPaint)) { + return; + } + auto oldMatrix = getMatrix(); + auto imageFilter = realPaint.getImageFilter(); + if (imageFilter != nullptr) { + realPaint.setImageFilter(nullptr); + auto clipBounds = state->clip.getBounds(); + Matrix inverted = Matrix::I(); + if (!oldMatrix.invert(&inverted)) { + return; + } + inverted.mapRect(&clipBounds); + ImageFilterContext context(getContext(), oldMatrix, clipBounds, image); + auto [outImage, offset] = imageFilter->filterImage(context); + image = outImage; + concat(Matrix::MakeTrans(offset.x, offset.y)); + } + drawImage(std::move(image), sampling, realPaint); + setMatrix(oldMatrix); +} + +void Canvas::drawImage(std::shared_ptr image, SamplingOptions sampling, const Paint& paint) { + if (image == nullptr) { + return; + } + auto localBounds = clipLocalBounds(Rect::MakeWH(image->width(), image->height())); + if (localBounds.isEmpty()) { + return; + } + auto processor = image->asFragmentProcessor(getContext(), surface->options()->flags(), sampling); + if (processor == nullptr) { + return; + } + GpuPaint glPaint; + if (!PaintToGLPaintWithImage(getContext(), surface->options()->flags(), paint, state->alpha, + std::move(processor), image->isAlphaOnly(), &glPaint)) { + return; + } + auto op = FillRectOp::Make(glPaint.color, localBounds, state->matrix); + draw(std::move(op), std::move(glPaint), true); +} + +static std::unique_ptr MakeSimplePathOp(const Path& path, const GpuPaint& glPaint, + const Matrix& viewMatrix) { + auto rect = Rect::MakeEmpty(); + if (path.asRect(&rect)) { + return FillRectOp::Make(glPaint.color, rect, viewMatrix); + } + RRect rRect; + if (path.asRRect(&rRect)) { + return RRectOp::Make(glPaint.color, rRect, viewMatrix); + } + return nullptr; +} + +void Canvas::fillPath(const Path& path, const Paint& paint) { + if (path.isEmpty()) { + return; + } + GpuPaint glPaint; + if (!PaintToGLPaint(getContext(), surface->options()->flags(), paint, state->alpha, nullptr, + &glPaint)) { + return; + } + auto bounds = path.getBounds(); + auto localBounds = clipLocalBounds(bounds); + if (localBounds.isEmpty()) { + return; + } + if (drawAsClear(path, glPaint)) { + return; + } + auto op = MakeSimplePathOp(path, glPaint, state->matrix); + if (op) { + draw(std::move(op), std::move(glPaint)); + return; + } + auto localMatrix = Matrix::I(); + if (!state->matrix.invert(&localMatrix)) { + return; + } + auto tempPath = path; + tempPath.transform(state->matrix); + op = TriangulatingPathOp::Make(glPaint.color, tempPath, state->clip.getBounds(), localMatrix); + if (op) { + save(); + resetMatrix(); + draw(std::move(op), std::move(glPaint)); + restore(); + return; + } + auto deviceBounds = state->matrix.mapRect(localBounds); + auto width = ceilf(deviceBounds.width()); + auto height = ceilf(deviceBounds.height()); + auto mask = Mask::Make(static_cast(width), static_cast(height)); + if (!mask) { + return; + } + auto totalMatrix = state->matrix; + auto matrix = Matrix::MakeTrans(-deviceBounds.x(), -deviceBounds.y()); + matrix.postScale(width / deviceBounds.width(), height / deviceBounds.height()); + totalMatrix.postConcat(matrix); + mask->setMatrix(totalMatrix); + mask->fillPath(path); + auto texture = Texture::MakeFrom(getContext(), mask->makeBuffer()); + drawMask(deviceBounds, std::move(texture), std::move(glPaint)); +} + +void Canvas::drawMask(const Rect& bounds, std::shared_ptr mask, GpuPaint glPaint) { + if (mask == nullptr) { + return; + } + auto localMatrix = Matrix::I(); + auto maskLocalMatrix = Matrix::I(); + auto invert = Matrix::I(); + if (!state->matrix.invert(&invert)) { + return; + } + localMatrix.postConcat(invert); + if (!localMatrix.invert(&invert)) { + return; + } + maskLocalMatrix.postConcat(invert); + maskLocalMatrix.postTranslate(-bounds.x(), -bounds.y()); + maskLocalMatrix.postScale(static_cast(mask->width()) / bounds.width(), + static_cast(mask->height()) / bounds.height()); + auto oldMatrix = state->matrix; + resetMatrix(); + glPaint.coverageFragmentProcessors.emplace_back(FragmentProcessor::MulInputByChildAlpha( + TextureEffect::Make(std::move(mask), SamplingOptions(), &maskLocalMatrix))); + auto op = FillRectOp::Make(glPaint.color, bounds, state->matrix, localMatrix); + draw(std::move(op), std::move(glPaint)); + setMatrix(oldMatrix); +} + +void Canvas::drawGlyphs(const GlyphID glyphIDs[], const Point positions[], size_t glyphCount, + const Font& font, const Paint& paint) { + if (nothingToDraw(paint)) { + return; + } + auto scaleX = state->matrix.getScaleX(); + auto skewY = state->matrix.getSkewY(); + auto scale = std::sqrt(scaleX * scaleX + skewY * skewY); + auto scaledFont = font.makeWithSize(font.getSize() * scale); + auto scaledPaint = paint; + scaledPaint.setStrokeWidth(paint.getStrokeWidth() * scale); + std::vector scaledPositions; + for (size_t i = 0; i < glyphCount; ++i) { + scaledPositions.push_back(Point::Make(positions[i].x * scale, positions[i].y * scale)); + } + save(); + concat(Matrix::MakeScale(1.f / scale)); + if (scaledFont.getTypeface()->hasColor()) { + drawColorGlyphs(glyphIDs, &scaledPositions[0], glyphCount, scaledFont, scaledPaint); + restore(); + return; + } + auto textBlob = TextBlob::MakeFrom(glyphIDs, &scaledPositions[0], glyphCount, scaledFont); + if (textBlob) { + drawMaskGlyphs(textBlob.get(), scaledPaint); + } + restore(); +} + +void Canvas::drawColorGlyphs(const GlyphID glyphIDs[], const Point positions[], size_t glyphCount, + const Font& font, const Paint& paint) { + for (size_t i = 0; i < glyphCount; ++i) { + const auto& glyphID = glyphIDs[i]; + const auto& position = positions[i]; + + auto glyphMatrix = Matrix::I(); + auto glyphBuffer = font.getGlyphImage(glyphID, &glyphMatrix); + if (glyphBuffer == nullptr) { + continue; + } + glyphMatrix.postTranslate(position.x, position.y); + save(); + concat(glyphMatrix); + auto image = Image::MakeFrom(glyphBuffer); + drawImage(std::move(image), &paint); + restore(); + } +} + +void Canvas::drawMaskGlyphs(TextBlob* textBlob, const Paint& paint) { + if (textBlob == nullptr) { + return; + } + GpuPaint glPaint; + if (!PaintToGLPaint(getContext(), surface->options()->flags(), paint, state->alpha, nullptr, + &glPaint)) { + return; + } + auto stroke = paint.getStyle() == PaintStyle::Stroke ? paint.getStroke() : nullptr; + auto localBounds = clipLocalBounds(textBlob->getBounds(stroke)); + if (localBounds.isEmpty()) { + return; + } + auto deviceBounds = state->matrix.mapRect(localBounds); + auto width = ceilf(deviceBounds.width()); + auto height = ceilf(deviceBounds.height()); + auto mask = Mask::Make(static_cast(width), static_cast(height)); + if (mask == nullptr) { + return; + } + auto totalMatrix = state->matrix; + auto matrix = Matrix::I(); + matrix.postTranslate(-deviceBounds.x(), -deviceBounds.y()); + matrix.postScale(width / deviceBounds.width(), height / deviceBounds.height()); + totalMatrix.postConcat(matrix); + mask->setMatrix(totalMatrix); + mask->fillText(textBlob, stroke); + auto texture = Texture::MakeFrom(getContext(), mask->makeBuffer()); + drawMask(deviceBounds, std::move(texture), std::move(glPaint)); +} + +void Canvas::drawAtlas(std::shared_ptr atlas, const Matrix matrix[], const Rect tex[], + const Color colors[], size_t count, SamplingOptions sampling) { + if (atlas == nullptr || count == 0) { + return; + } + auto totalMatrix = getMatrix(); + std::vector> ops; + FillRectOp* op = nullptr; + for (size_t i = 0; i < count; ++i) { + concat(matrix[i]); + auto width = static_cast(tex[i].width()); + auto height = static_cast(tex[i].height()); + auto localBounds = clipLocalBounds(Rect::MakeWH(width, height)); + if (localBounds.isEmpty()) { + setMatrix(totalMatrix); + continue; + } + auto localMatrix = Matrix::MakeTrans(tex[i].x(), tex[i].y()); + auto color = colors ? std::optional(colors[i].premultiply()) : std::nullopt; + if (op == nullptr) { + ops.emplace_back(FillRectOp::Make(color, localBounds, state->matrix, localMatrix)); + op = ops.back().get(); + } else { + if (!op->add(color, localBounds, state->matrix, localMatrix)) { + ops.emplace_back(FillRectOp::Make(color, localBounds, state->matrix, localMatrix)); + op = ops.back().get(); + } + } + setMatrix(totalMatrix); + } + if (ops.empty()) { + return; + } + for (auto& rectOp : ops) { + auto processor = + atlas->asFragmentProcessor(getContext(), surface->options()->flags(), sampling); + if (colors) { + processor = FragmentProcessor::MulInputByChildAlpha(std::move(processor)); + } + if (processor == nullptr) { + return; + } + GpuPaint glPaint; + glPaint.colorFragmentProcessors.emplace_back(std::move(processor)); + draw(std::move(rectOp), std::move(glPaint), false); + } +} + +bool Canvas::drawAsClear(const Path& path, const GpuPaint& paint) { + if (!paint.colorFragmentProcessors.empty() || !paint.coverageFragmentProcessors.empty() || + !state->matrix.rectStaysRect() || drawContext == nullptr) { + return false; + } + auto color = paint.color; + if (getBlendMode() == BlendMode::Clear) { + color = Color::Transparent(); + } else if (getBlendMode() != BlendMode::Src) { + if (!color.isOpaque()) { + return false; + } + } + auto bounds = Rect::MakeEmpty(); + if (!path.asRect(&bounds)) { + return false; + } + state->matrix.mapRect(&bounds); + if (!IsPixelAligned(bounds)) { + return false; + } + surface->aboutToDraw(true); + color = surface->renderTarget->writeSwizzle().applyTo(color); + auto [rect, useScissor] = getClipRect(); + if (rect.has_value()) { + if (useScissor) { + FlipYIfNeeded(&bounds, surface); + rect->intersect(bounds); + drawContext->addOp(ClearOp::Make(color, *rect)); + return true; + } else if (rect->isEmpty()) { + FlipYIfNeeded(&bounds, surface); + drawContext->addOp(ClearOp::Make(color, bounds)); + return true; + } + } + return false; +} + +void Canvas::draw(std::unique_ptr op, GpuPaint paint, bool aa) { + if (drawContext == nullptr) { + return; + } + auto aaType = AAType::None; + if (surface->renderTarget->sampleCount() > 1) { + aaType = AAType::MSAA; + } else if (aa && !IsPixelAligned(op->bounds())) { + aaType = AAType::Coverage; + } else { + const auto& matrix = state->matrix; + auto rotation = std::round(RadiansToDegrees(atan2f(matrix.getSkewX(), matrix.getScaleX()))); + if (static_cast(rotation) % 90 != 0) { + aaType = AAType::Coverage; + } + } + auto masks = std::move(paint.coverageFragmentProcessors); + Rect scissorRect = Rect::MakeEmpty(); + auto clipMask = getClipMask(op->bounds(), &scissorRect); + if (clipMask) { + masks.push_back(std::move(clipMask)); + } + op->setScissorRect(scissorRect); + op->setBlendMode(state->blendMode); + op->setAA(aaType); + op->setColors(std::move(paint.colorFragmentProcessors)); + op->setMasks(std::move(masks)); + surface->aboutToDraw(false); + drawContext->addOp(std::move(op)); +} +} // namespace tgfx diff --git a/src/core/CanvasState.h b/src/core/CanvasState.h new file mode 100644 index 00000000..4087df6e --- /dev/null +++ b/src/core/CanvasState.h @@ -0,0 +1,33 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/BlendMode.h" +#include "tgfx/core/Path.h" + +namespace tgfx { +static constexpr uint32_t kDefaultClipID = 0; +struct CanvasState { + float alpha = 1.0f; + BlendMode blendMode = BlendMode::SrcOver; + Matrix matrix = Matrix::I(); + Path clip = {}; + uint32_t clipID = kDefaultClipID; +}; +} // namespace tgfx diff --git a/src/core/Color.cpp b/src/core/Color.cpp new file mode 100644 index 00000000..0954cd52 --- /dev/null +++ b/src/core/Color.cpp @@ -0,0 +1,73 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/core/Color.h" +#include "utils/Log.h" + +namespace tgfx { +const Color& Color::Transparent() { + static const Color color = {0.0f, 0.0f, 0.0f, 0.0f}; + return color; +} + +const Color& Color::Black() { + static const Color color = {0.0f, 0.0f, 0.0f, 1.0f}; + return color; +} + +const Color& Color::White() { + static const Color color = {1.0f, 1.0f, 1.0f, 1.0f}; + return color; +} + +Color Color::FromRGBA(uint8_t r, uint8_t g, uint8_t b, uint8_t a) { + auto alpha = a == 255 ? 1.0f : static_cast(a) / 255.0f; + return {static_cast(r) / 255.0f, static_cast(g) / 255.0f, + static_cast(b) / 255.0f, alpha}; +} + +float Color::operator[](int index) const { + DEBUG_ASSERT(index >= 0 && index < 4); + return (&red)[index]; +} + +float& Color::operator[](int index) { + DEBUG_ASSERT(index >= 0 && index < 4); + return (&red)[index]; +} + +bool Color::isValid() const { + return red >= 0.0f && red <= 1.0f && green >= 0.0f && green <= 1.0f && blue >= 0.0f && + blue <= 1.0f && alpha >= 0.0f && alpha <= 1.0f; +} + +bool Color::isOpaque() const { + DEBUG_ASSERT(alpha <= 1.0f && alpha >= 0.0f); + return alpha == 1.0f; +} + +Color Color::unpremultiply() const { + if (alpha == 0.0f) { + return {0, 0, 0, 0}; + } else { + float invAlpha = 1 / alpha; + return {red * invAlpha, green * invAlpha, blue * invAlpha, alpha}; + } +} + +} // namespace tgfx diff --git a/src/core/Data.cpp b/src/core/Data.cpp new file mode 100644 index 00000000..c73b6a27 --- /dev/null +++ b/src/core/Data.cpp @@ -0,0 +1,95 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/core/Data.h" +#include +#include "tgfx/utils/Stream.h" + +namespace tgfx { +std::shared_ptr Data::MakeFromFile(const std::string& filePath) { + auto stream = Stream::MakeFromFile(filePath); + if (stream == nullptr) { + return nullptr; + } + auto buffer = new (std::nothrow) uint8_t[stream->size()]; + if (buffer == nullptr) { + return nullptr; + } + stream->read(buffer, stream->size()); + auto data = new Data(buffer, stream->size(), Data::DeleteProc, nullptr); + return std::shared_ptr(data); +} + +std::shared_ptr Data::MakeWithCopy(const void* bytes, size_t length) { + if (length == 0) { + return MakeEmpty(); + } + auto data = new (std::nothrow) uint8_t[length]; + if (data == nullptr) { + return nullptr; + } + memcpy(data, bytes, length); + auto byteData = new Data(data, length, Data::DeleteProc, nullptr); + return std::shared_ptr(byteData); +} + +std::shared_ptr Data::MakeWithoutCopy(const void* data, size_t length) { + if (length == 0) { + return MakeEmpty(); + } + auto byteData = new Data(data, length, nullptr, nullptr); + return std::shared_ptr(byteData); +} + +void Data::DeleteProc(const void* data, void*) { + if (data != nullptr) { + delete[] reinterpret_cast(data); + } +} + +void Data::FreeProc(const void* data, void*) { + if (data != nullptr) { + free(const_cast(data)); + } +} + +std::shared_ptr Data::MakeAdopted(const void* data, size_t length, ReleaseProc releaseProc, + void* context) { + if (length == 0) { + return MakeEmpty(); + } + auto byteData = new Data(data, length, releaseProc, context); + return std::shared_ptr(byteData); +} + +std::shared_ptr Data::MakeEmpty() { + static auto emptyData = std::shared_ptr(new Data(nullptr, 0, nullptr, nullptr)); + return emptyData; +} + +Data::Data(const void* data, size_t length, ReleaseProc releaseProc, void* context) + : _data(data), _size(length), releaseProc(releaseProc), releaseContext(context) { +} + +Data::~Data() { + if (releaseProc) { + releaseProc(_data, releaseContext); + } +} + +} // namespace tgfx diff --git a/src/core/EncodeOrigin.cpp b/src/core/EncodeOrigin.cpp new file mode 100644 index 00000000..c1f32173 --- /dev/null +++ b/src/core/EncodeOrigin.cpp @@ -0,0 +1,45 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/core/EncodedOrigin.h" + +namespace tgfx { +Matrix EncodedOriginToMatrix(EncodedOrigin origin, int width, int height) { + auto w = static_cast(width); + auto h = static_cast(height); + switch (origin) { + case EncodedOrigin::TopLeft: + return Matrix::I(); + case EncodedOrigin::TopRight: + return Matrix::MakeAll(-1, 0, w, 0, 1, 0); + case EncodedOrigin::BottomRight: + return Matrix::MakeAll(-1, 0, w, 0, -1, h); + case EncodedOrigin::BottomLeft: + return Matrix::MakeAll(1, 0, 0, 0, -1, h); + case EncodedOrigin::LeftTop: + return Matrix::MakeAll(0, 1, 0, 1, 0, 0); + case EncodedOrigin::RightTop: + return Matrix::MakeAll(0, -1, h, 1, 0, 0); + case EncodedOrigin::RightBottom: + return Matrix::MakeAll(0, -1, h, -1, 0, w); + case EncodedOrigin::LeftBottom: + return Matrix::MakeAll(0, 1, 0, -1, 0, w); + } + return Matrix::I(); +} +} // namespace tgfx \ No newline at end of file diff --git a/src/core/Font.cpp b/src/core/Font.cpp new file mode 100644 index 00000000..91fa399b --- /dev/null +++ b/src/core/Font.cpp @@ -0,0 +1,51 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/core/Font.h" + +namespace tgfx { +Font::Font() : Font(Typeface::MakeDefault()) { +} + +Font::Font(std::shared_ptr tf, float textSize) { + setTypeface(std::move(tf)); + setSize(textSize); +} + +Font Font::makeWithSize(float newSize) const { + auto newFont = *this; + newFont.setSize(newSize); + return newFont; +} + +void Font::setTypeface(std::shared_ptr newTypeface) { + if (newTypeface == nullptr) { + typeface = Typeface::MakeDefault(); + } else { + typeface = std::move(newTypeface); + } +} + +void Font::setSize(float newSize) { + if (newSize <= 0) { + size = 12.0f; + } else { + size = newSize; + } +} +} // namespace tgfx \ No newline at end of file diff --git a/src/core/Image.cpp b/src/core/Image.cpp new file mode 100644 index 00000000..2f080572 --- /dev/null +++ b/src/core/Image.cpp @@ -0,0 +1,264 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/core/Image.h" +#include "gpu/Texture.h" +#include "gpu/processors/TextureEffect.h" +#include "gpu/processors/TiledTextureEffect.h" +#include "images/ImageSource.h" +#include "images/RGBAAAImage.h" +#include "images/RasterBuffer.h" +#include "images/RasterGenerator.h" +#include "images/SubsetImage.h" +#include "tgfx/core/ImageCodec.h" +#include "tgfx/core/Pixmap.h" + +namespace tgfx { +std::shared_ptr Image::MakeFromFile(const std::string& filePath) { + auto codec = ImageCodec::MakeFrom(filePath); + if (codec == nullptr) { + return nullptr; + } + auto source = ImageSource::MakeFrom(UniqueKey::Next(), codec); + return MakeFrom(source, codec->origin()); +} + +std::shared_ptr Image::MakeFromEncoded(std::shared_ptr encodedData) { + auto codec = ImageCodec::MakeFrom(std::move(encodedData)); + if (codec == nullptr) { + return nullptr; + } + auto source = ImageSource::MakeFrom(UniqueKey::Next(), codec); + return MakeFrom(source, codec->origin()); +} + +std::shared_ptr Image::MakeFrom(NativeImageRef nativeImage) { + auto codec = ImageCodec::MakeFrom(nativeImage); + return MakeFrom(std::move(codec)); +} + +std::shared_ptr Image::MakeFrom(std::shared_ptr generator) { + auto source = ImageSource::MakeFrom(UniqueKey::Next(), std::move(generator)); + return MakeFrom(std::move(source)); +} + +std::shared_ptr Image::MakeFrom(const ImageInfo& info, std::shared_ptr pixels) { + auto imageBuffer = RasterBuffer::MakeFrom(info, pixels); + if (imageBuffer != nullptr) { + return MakeFrom(std::move(imageBuffer)); + } + auto imageGenerator = RasterGenerator::MakeFrom(info, std::move(pixels)); + return MakeFrom(std::move(imageGenerator)); +} + +std::shared_ptr Image::MakeFrom(const Bitmap& bitmap) { + return MakeFrom(bitmap.makeBuffer()); +} + +std::shared_ptr Image::MakeFrom(HardwareBufferRef hardwareBuffer, YUVColorSpace colorSpace) { + auto buffer = ImageBuffer::MakeFrom(hardwareBuffer, colorSpace); + return MakeFrom(std::move(buffer)); +} + +std::shared_ptr Image::MakeI420(std::shared_ptr yuvData, YUVColorSpace colorSpace) { + auto buffer = ImageBuffer::MakeI420(std::move(yuvData), colorSpace); + return MakeFrom(std::move(buffer)); +} + +std::shared_ptr Image::MakeNV12(std::shared_ptr yuvData, YUVColorSpace colorSpace) { + auto buffer = ImageBuffer::MakeNV12(std::move(yuvData), colorSpace); + return MakeFrom(std::move(buffer)); +} + +std::shared_ptr Image::MakeFrom(std::shared_ptr imageBuffer) { + auto source = ImageSource::MakeFrom(UniqueKey::Next(), std::move(imageBuffer)); + return MakeFrom(std::move(source)); +} + +std::shared_ptr Image::MakeFrom(Context* context, const BackendTexture& backendTexture, + ImageOrigin origin) { + auto texture = Texture::MakeFrom(context, backendTexture, origin); + return MakeFrom(std::move(texture)); +} + +std::shared_ptr Image::MakeAdopted(Context* context, const BackendTexture& backendTexture, + ImageOrigin origin) { + auto texture = Texture::MakeAdopted(context, backendTexture, origin); + return MakeFrom(std::move(texture)); +} + +std::shared_ptr Image::MakeFrom(std::shared_ptr texture) { + auto source = ImageSource::MakeFrom(UniqueKey::Next(), std::move(texture)); + return MakeFrom(std::move(source)); +} + +std::shared_ptr Image::MakeFrom(std::shared_ptr source, EncodedOrigin origin) { + if (source == nullptr) { + return nullptr; + } + std::shared_ptr image = nullptr; + if (origin != EncodedOrigin::TopLeft) { + image = std::shared_ptr(new SubsetImage(std::move(source), origin)); + } else { + image = std::shared_ptr(new Image(std::move(source))); + } + image->weakThis = image; + return image; +} + +Image::Image(std::shared_ptr source) : source(std::move(source)) { +} + +int Image::width() const { + return source->width(); +} + +int Image::height() const { + return source->height(); +} + +bool Image::hasMipmaps() const { + return source->hasMipmaps(); +} + +bool Image::isAlphaOnly() const { + return source->isAlphaOnly(); +} + +bool Image::isRGBAAA() const { + return false; +} + +bool Image::isLazyGenerated() const { + return source->isLazyGenerated(); +} + +bool Image::isTextureBacked() const { + return source->isTextureBacked(); +} + +BackendTexture Image::getBackendTexture() const { + return source->getBackendTexture(); +} + +std::shared_ptr Image::makeTextureImage(Context* context) const { + auto textureSource = source->makeTextureSource(context); + if (textureSource == source) { + return weakThis.lock(); + } + if (textureSource == nullptr) { + return nullptr; + } + return cloneWithSource(std::move(textureSource)); +} + +std::shared_ptr Image::onCloneWith(std::shared_ptr newSource) const { + return std::shared_ptr(new Image(std::move(newSource))); +} + +std::shared_ptr Image::makeSubset(const Rect& subset) const { + auto rect = subset; + rect.round(); + auto bounds = Rect::MakeWH(width(), height()); + if (bounds == rect) { + return weakThis.lock(); + } + if (!bounds.contains(rect)) { + return nullptr; + } + auto subsetImage = onMakeSubset(rect); + subsetImage->weakThis = subsetImage; + return subsetImage; +} + +std::shared_ptr Image::onMakeSubset(const Rect& subset) const { + return std::shared_ptr(new SubsetImage(source, subset)); +} + +std::shared_ptr Image::makeDecoded(Context* context) const { + auto decodedSource = source->makeDecoded(context); + if (decodedSource == source) { + return weakThis.lock(); + } + return cloneWithSource(std::move(decodedSource)); +} + +std::shared_ptr Image::makeMipMapped() const { + auto mipMappedSource = source->makeMipMapped(); + if (mipMappedSource == source) { + return weakThis.lock(); + } + return cloneWithSource(std::move(mipMappedSource)); +} + +std::shared_ptr Image::makeRGBAAA(int displayWidth, int displayHeight, int alphaStartX, + int alphaStartY) { + if (alphaStartX == 0 && alphaStartY == 0) { + return makeSubset(Rect::MakeWH(displayWidth, displayHeight)); + } + auto image = onMakeRGBAAA(displayWidth, displayHeight, alphaStartX, alphaStartY); + if (image != nullptr) { + image->weakThis = image; + } + return image; +} + +std::shared_ptr Image::onMakeRGBAAA(int displayWidth, int displayHeight, int alphaStartX, + int alphaStartY) const { + if (isAlphaOnly() || alphaStartX + displayWidth > source->width() || + alphaStartY + displayHeight > source->height()) { + return nullptr; + } + return std::shared_ptr( + new RGBAAAImage(source, displayWidth, displayHeight, alphaStartX, alphaStartY)); +} + +std::shared_ptr Image::applyOrigin(EncodedOrigin origin) const { + if (origin == EncodedOrigin::TopLeft) { + return weakThis.lock(); + } + auto image = onApplyOrigin(origin); + if (image != nullptr) { + image->weakThis = image; + } + return image; +} + +std::shared_ptr Image::onApplyOrigin(EncodedOrigin encodedOrigin) const { + return std::shared_ptr(new SubsetImage(std::move(source), encodedOrigin)); +} + +std::unique_ptr Image::asFragmentProcessor( + Context* context, uint32_t surfaceFlags, TileMode tileModeX, TileMode tileModeY, + const SamplingOptions& sampling, const Matrix* localMatrix) { + return TiledTextureEffect::Make(source->lockTextureProxy(context, surfaceFlags), tileModeX, + tileModeY, sampling, localMatrix); +} + +std::unique_ptr Image::asFragmentProcessor(Context* context, + uint32_t surfaceFlags, + const SamplingOptions& sampling) { + return asFragmentProcessor(context, surfaceFlags, TileMode::Clamp, TileMode::Clamp, sampling); +} + +std::shared_ptr Image::cloneWithSource(std::shared_ptr newSource) const { + auto decodedImage = onCloneWith(std::move(newSource)); + decodedImage->weakThis = decodedImage; + return decodedImage; +} +} // namespace tgfx diff --git a/src/core/ImageBuffer.cpp b/src/core/ImageBuffer.cpp new file mode 100644 index 00000000..2233dac7 --- /dev/null +++ b/src/core/ImageBuffer.cpp @@ -0,0 +1,50 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/core/ImageBuffer.h" +#include "images/RasterBuffer.h" +#include "images/RasterGenerator.h" +#include "images/RasterYUVBuffer.h" + +namespace tgfx { +std::shared_ptr ImageBuffer::MakeFrom(const ImageInfo& info, + std::shared_ptr pixels) { + auto buffer = RasterBuffer::MakeFrom(info, pixels); + if (buffer != nullptr) { + return buffer; + } + auto generator = RasterGenerator::MakeFrom(info, std::move(pixels)); + return generator->makeBuffer(); +} + +std::shared_ptr ImageBuffer::MakeI420(std::shared_ptr yuvData, + YUVColorSpace colorSpace) { + if (yuvData == nullptr || yuvData->planeCount() != YUVData::I420_PLANE_COUNT) { + return nullptr; + } + return std::make_shared(std::move(yuvData), YUVPixelFormat::I420, colorSpace); +} + +std::shared_ptr ImageBuffer::MakeNV12(std::shared_ptr yuvData, + YUVColorSpace colorSpace) { + if (yuvData == nullptr || yuvData->planeCount() != YUVData::NV12_PLANE_COUNT) { + return nullptr; + } + return std::make_shared(std::move(yuvData), YUVPixelFormat::NV12, colorSpace); +} +} // namespace tgfx diff --git a/src/core/ImageCodec.cpp b/src/core/ImageCodec.cpp new file mode 100644 index 00000000..860d00ba --- /dev/null +++ b/src/core/ImageCodec.cpp @@ -0,0 +1,147 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/core/ImageCodec.h" +#include "core/PixelBuffer.h" +#include "tgfx/core/ImageInfo.h" +#include "tgfx/core/Pixmap.h" +#include "tgfx/utils/Buffer.h" +#include "tgfx/utils/Stream.h" +#include "utils/USE.h" + +#if defined(TGFX_USE_WEBP_DECODE) || defined(TGFX_USE_WEBP_ENCODE) +#include "codecs/webp/WebpCodec.h" +#endif + +#if defined(TGFX_USE_PNG_DECODE) || defined(TGFX_USE_PNG_ENCODE) + +#include "codecs/png/PngCodec.h" + +#endif + +#if defined(TGFX_USE_JPEG_DECODE) || defined(TGFX_USE_JPEG_ENCODE) +#include "codecs/jpeg/JpegCodec.h" +#endif + +namespace tgfx { +std::shared_ptr ImageCodec::MakeFrom(const std::string& filePath) { + std::shared_ptr codec = nullptr; + auto stream = Stream::MakeFromFile(filePath); + if (stream == nullptr || stream->size() <= 14) { + return nullptr; + } + Buffer buffer(14); + if (stream->read(buffer.data(), 14) < 14) { + return nullptr; + } + auto data = buffer.release(); +#ifdef TGFX_USE_WEBP_DECODE + if (WebpCodec::IsWebp(data)) { + codec = WebpCodec::MakeFrom(filePath); + } +#endif + +#ifdef TGFX_USE_PNG_DECODE + if (PngCodec::IsPng(data)) { + codec = PngCodec::MakeFrom(filePath); + } +#endif + +#ifdef TGFX_USE_JPEG_DECODE + if (JpegCodec::IsJpeg(data)) { + codec = JpegCodec::MakeFrom(filePath); + } +#endif + if (codec == nullptr) { + codec = MakeNativeCodec(filePath); + } + if (codec && !ImageInfo::IsValidSize(codec->width(), codec->height())) { + codec = nullptr; + } + return codec; +} + +std::shared_ptr ImageCodec::MakeFrom(std::shared_ptr imageBytes) { + if (imageBytes == nullptr || imageBytes->size() == 0) { + return nullptr; + } + std::shared_ptr codec = nullptr; +#ifdef TGFX_USE_WEBP_DECODE + if (WebpCodec::IsWebp(imageBytes)) { + codec = WebpCodec::MakeFrom(imageBytes); + } +#endif +#ifdef TGFX_USE_PNG_DECODE + if (PngCodec::IsPng(imageBytes)) { + codec = PngCodec::MakeFrom(imageBytes); + } +#endif +#ifdef TGFX_USE_JPEG_DECODE + if (JpegCodec::IsJpeg(imageBytes)) { + codec = JpegCodec::MakeFrom(imageBytes); + } +#endif + if (codec == nullptr) { + codec = MakeNativeCodec(imageBytes); + } + if (codec && !ImageInfo::IsValidSize(codec->width(), codec->height())) { + codec = nullptr; + } + return codec; +} + +std::shared_ptr ImageCodec::Encode(const Pixmap& pixmap, EncodedFormat format, int quality) { + if (pixmap.isEmpty()) { + return nullptr; + } + USE(format); + if (quality > 100) { + quality = 100; + } + if (quality < 0) { + quality = 0; + } +#ifdef TGFX_USE_JPEG_ENCODE + if (format == EncodedFormat::JPEG) { + return JpegCodec::Encode(pixmap, quality); + } +#endif +#ifdef TGFX_USE_WEBP_ENCODE + if (format == EncodedFormat::WEBP) { + return WebpCodec::Encode(pixmap, quality); + } +#endif +#ifdef TGFX_USE_PNG_ENCODE + if (format == EncodedFormat::PNG) { + return PngCodec::Encode(pixmap, quality); + } +#endif + return nullptr; +} + +std::shared_ptr ImageCodec::onMakeBuffer(bool tryHardware) const { + auto pixelBuffer = PixelBuffer::Make(width(), height(), isAlphaOnly(), tryHardware); + if (pixelBuffer == nullptr) { + return nullptr; + } + auto pixels = pixelBuffer->lockPixels(); + auto result = readPixels(pixelBuffer->info(), pixels); + pixelBuffer->unlockPixels(); + return result ? pixelBuffer : nullptr; +} +} // namespace tgfx \ No newline at end of file diff --git a/src/core/ImageFilter.cpp b/src/core/ImageFilter.cpp new file mode 100644 index 00000000..802c3930 --- /dev/null +++ b/src/core/ImageFilter.cpp @@ -0,0 +1,42 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/core/ImageFilter.h" + +namespace tgfx { +bool ImageFilter::applyCropRect(const Rect& srcRect, Rect* dstRect, const Rect* clipRect) const { + *dstRect = onFilterNodeBounds(srcRect); + if (!cropRect.isEmpty()) { + dstRect->intersect(cropRect); + } + if (clipRect) { + return dstRect->intersect(*clipRect); + } + return true; +} + +Rect ImageFilter::filterBounds(const Rect& rect) const { + auto dstBounds = Rect::MakeEmpty(); + applyCropRect(rect, &dstBounds); + return dstBounds; +} + +Rect ImageFilter::onFilterNodeBounds(const Rect& srcRect) const { + return srcRect; +} +} // namespace tgfx diff --git a/src/core/ImageInfo.cpp b/src/core/ImageInfo.cpp new file mode 100644 index 00000000..fe15f113 --- /dev/null +++ b/src/core/ImageInfo.cpp @@ -0,0 +1,110 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/core/ImageInfo.h" + +namespace tgfx { +static constexpr int MaxDimension = INT32_MAX >> 2; + +bool ImageInfo::IsValidSize(int width, int height) { + return width > 0 && width <= MaxDimension && height > 0 && height <= MaxDimension; +} + +int ImageInfo::GetBytesPerPixel(ColorType colorType) { + switch (colorType) { + case ColorType::ALPHA_8: + case ColorType::Gray_8: + return 1; + case ColorType::RGB_565: + return 2; + case ColorType::RGBA_8888: + case ColorType::BGRA_8888: + case ColorType::RGBA_1010102: + return 4; + case ColorType::RGBA_F16: + return 8; + default: + return 0; + } +} + +ImageInfo ImageInfo::Make(int width, int height, ColorType colorType, AlphaType alphaType, + size_t rowBytes) { + static ImageInfo emptyInfo = {}; + if (colorType == ColorType::Unknown || alphaType == AlphaType::Unknown || + !IsValidSize(width, height)) { + return emptyInfo; + } + auto bytesPerPixel = GetBytesPerPixel(colorType); + if (rowBytes > 0) { + if (rowBytes < static_cast(width) * bytesPerPixel) { + return emptyInfo; + } + } else { + rowBytes = width * bytesPerPixel; + } + if (colorType == ColorType::ALPHA_8) { + alphaType = AlphaType::Premultiplied; + } else if (colorType == ColorType::RGB_565) { + alphaType = AlphaType::Opaque; + } + return {width, height, colorType, alphaType, rowBytes}; +} + +int ImageInfo::bytesPerPixel() const { + return GetBytesPerPixel(_colorType); +} + +ImageInfo ImageInfo::makeIntersect(int x, int y, int targetWidth, int targetHeight) const { + int left = std::max(0, x); + int right = std::min(_width, x + targetWidth); + int top = std::max(0, y); + int bottom = std::min(_height, y + targetHeight); + return makeWH(right - left, bottom - top); +} + +static int Clamp(int value, int max) { + if (value < 0) { + value = 0; + } + if (value > max) { + value = max; + } + return value; +} + +const void* ImageInfo::computeOffset(const void* pixels, int x, int y) const { + if (pixels == nullptr || isEmpty()) { + return pixels; + } + x = Clamp(x, _width - 1); + y = Clamp(y, _height - 1); + auto offset = y * _rowBytes + x * bytesPerPixel(); + return reinterpret_cast(pixels) + offset; +} + +void* ImageInfo::computeOffset(void* pixels, int x, int y) const { + if (pixels == nullptr || isEmpty()) { + return pixels; + } + x = Clamp(x, _width - 1); + y = Clamp(y, _height - 1); + auto offset = y * _rowBytes + x * bytesPerPixel(); + return reinterpret_cast(pixels) + offset; +} +} // namespace tgfx \ No newline at end of file diff --git a/src/core/ImageReader.cpp b/src/core/ImageReader.cpp new file mode 100644 index 00000000..c6ec1de6 --- /dev/null +++ b/src/core/ImageReader.cpp @@ -0,0 +1,146 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/core/ImageReader.h" +#include "core/ImageStream.h" +#include "core/PixelRef.h" +#include "gpu/Texture.h" +#include "utils/Log.h" + +namespace tgfx { +class ImageReaderBuffer : public ImageBuffer { + public: + explicit ImageReaderBuffer(std::shared_ptr imageReader, uint64_t bufferVersion) + : imageReader(std::move(imageReader)), contentVersion(bufferVersion) { + } + + int width() const override { + return imageReader->width(); + } + + int height() const override { + return imageReader->height(); + } + + bool isAlphaOnly() const override { + return imageReader->stream->isAlphaOnly(); + } + + bool expired() const override { + return imageReader->checkExpired(contentVersion); + } + + protected: + std::shared_ptr onMakeTexture(Context* context, bool mipMapped) const override { + return imageReader->readTexture(contentVersion, context, mipMapped); + } + + private: + std::shared_ptr imageReader = nullptr; + uint64_t contentVersion = 0; +}; + +std::shared_ptr ImageReader::MakeFrom(const Bitmap& bitmap) { + return ImageReader::MakeFrom(bitmap.pixelRef); +} + +std::shared_ptr ImageReader::MakeFrom(std::shared_ptr mask) { + return ImageReader::MakeFrom(mask->getImageStream()); +} + +std::shared_ptr ImageReader::MakeFrom(std::shared_ptr imageStream) { + if (imageStream == nullptr) { + return nullptr; + } + auto imageReader = std::shared_ptr(new ImageReader(std::move(imageStream))); + imageReader->weakThis = imageReader; + return imageReader; +} + +ImageReader::ImageReader(std::shared_ptr imageStream) + : stream(std::move(imageStream)) { + stream->attachToStream(this); + dirtyBounds = Rect::MakeWH(stream->width(), stream->height()); +} + +ImageReader::~ImageReader() { + stream->detachFromStream(this); +} + +int ImageReader::width() const { + return stream->width(); +} + +int ImageReader::height() const { + return stream->height(); +} + +std::shared_ptr ImageReader::acquireNextBuffer() { + std::lock_guard autoLock(locker); + DEBUG_ASSERT(!weakThis.expired()); + if (!hasPendingChanges) { + return nullptr; + } + hasPendingChanges = false; + bufferVersion++; + return std::make_shared(weakThis.lock(), bufferVersion); +} + +bool ImageReader::checkExpired(uint64_t contentVersion) { + std::lock_guard autoLock(locker); + return contentVersion != textureVersion && contentVersion < bufferVersion; +} + +std::shared_ptr ImageReader::readTexture(uint64_t contentVersion, Context* context, + bool mipMapped) { + std::lock_guard autoLock(locker); + if (contentVersion == textureVersion) { + return texture; + } + if (contentVersion < bufferVersion) { + LOGE( + "ImageReader::readTexture(): Failed to read texture, the target ImageBuffer is already " + "expired!"); + return nullptr; + } + bool success = true; + if (texture == nullptr) { + texture = stream->onMakeTexture(context, mipMapped); + success = texture != nullptr; + } else if (!stream->isHardwareBacked()) { + success = stream->onUpdateTexture(texture, dirtyBounds); + } + if (success) { + dirtyBounds.setEmpty(); + texture->markUniqueKeyExpired(); + textureVersion = contentVersion; + } + return texture; +} + +void ImageReader::onContentDirty(const Rect& bounds) { + std::lock_guard autoLock(locker); + hasPendingChanges = true; + dirtyBounds.join(bounds); + if (stream->isHardwareBacked() && texture != nullptr) { + texture->markUniqueKeyExpired(); + textureVersion = 0; + bufferVersion++; + } +} +} // namespace tgfx \ No newline at end of file diff --git a/src/core/ImageStream.cpp b/src/core/ImageStream.cpp new file mode 100644 index 00000000..b326d5b4 --- /dev/null +++ b/src/core/ImageStream.cpp @@ -0,0 +1,42 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "core/ImageStream.h" +#include "tgfx/core/ImageReader.h" + +namespace tgfx { +void ImageStream::markContentDirty(const Rect& bounds) { + std::lock_guard autoLock(locker); + for (auto& reader : readers) { + reader->onContentDirty(bounds); + } +} + +void ImageStream::attachToStream(ImageReader* imageReader) { + std::lock_guard autoLock(locker); + readers.push_back(imageReader); +} + +void ImageStream::detachFromStream(ImageReader* imageReader) { + std::lock_guard autoLock(locker); + auto result = std::find(readers.begin(), readers.end(), imageReader); + if (result != readers.end()) { + readers.erase(result); + } +} +} // namespace tgfx diff --git a/src/core/ImageStream.h b/src/core/ImageStream.h new file mode 100644 index 00000000..6ce85ab7 --- /dev/null +++ b/src/core/ImageStream.h @@ -0,0 +1,91 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include +#include +#include "tgfx/core/Rect.h" + +namespace tgfx { +class ImageReader; +class Texture; +class Context; + +/** + * ImageStream represents a writable pixel buffer that can continuously generate ImageBuffer + * objects, which can be directly accessed by The ImageReader class. ImageStream is an abstract + * class. Use its subclasses instead. + */ +class ImageStream { + public: + virtual ~ImageStream() = default; + + /** + * Returns the width of the ImageStream. + */ + virtual int width() const = 0; + + /** + * Returns the height of the ImageStream. + */ + virtual int height() const = 0; + + /** + * Returns true if pixels represent transparency only. If true, each pixel is packed in 8 bits as + * defined by ColorType::ALPHA_8. + */ + virtual bool isAlphaOnly() const = 0; + + /** + * Returns true if the ImageStream is backed by a platform-specified hardware buffer. Hardware + * buffers allow sharing memory across CPU and GPU, which can be used to speed up the texture + * uploading. + */ + virtual bool isHardwareBacked() const = 0; + + /** + * Marks the specified bounds as dirty. The next time the texture is updated, the pixels in + * the specified bounds will be uploaded to the texture. + */ + void markContentDirty(const Rect& bounds); + + protected: + /** + * Creates a new Texture capturing the pixels in the TextureBuffer. The mipMapped parameter + * specifies whether created texture must allocate mip map levels. + */ + virtual std::shared_ptr onMakeTexture(Context* context, bool mipMapped) = 0; + + /** + * Updates the specified bounds of the texture with the pixels in the TextureBuffer. + */ + virtual bool onUpdateTexture(std::shared_ptr texture, const Rect& bounds) = 0; + + private: + std::mutex locker = {}; + std::vector readers = {}; + + void attachToStream(ImageReader* imageReader); + + void detachFromStream(ImageReader* imageReader); + + friend class ImageReader; +}; +} // namespace tgfx diff --git a/src/core/Mask.cpp b/src/core/Mask.cpp new file mode 100644 index 00000000..80ea0cb6 --- /dev/null +++ b/src/core/Mask.cpp @@ -0,0 +1,56 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/core/Mask.h" +#include "core/ImageStream.h" +#include "tgfx/core/PathEffect.h" + +namespace tgfx { +void Mask::fillPath(const Path& path, const Stroke* stroke) { + if (path.isEmpty()) { + return; + } + auto effect = PathEffect::MakeStroke(stroke); + if (effect != nullptr) { + auto newPath = path; + effect->applyTo(&newPath); + onFillPath(newPath, matrix); + } else { + onFillPath(path, matrix); + } +} + +bool Mask::fillText(const TextBlob* textBlob, const Stroke* stroke) { + if (textBlob == nullptr || textBlob->hasColor()) { + return false; + } + if (onFillText(textBlob, stroke, matrix)) { + return true; + } + Path path = {}; + if (textBlob->getPath(&path, stroke)) { + onFillPath(path, matrix, true); + return true; + } + return false; +} + +bool Mask::onFillText(const TextBlob*, const Stroke*, const Matrix&) { + return false; +} +} // namespace tgfx diff --git a/src/core/Matrix.cpp b/src/core/Matrix.cpp new file mode 100644 index 00000000..79361781 --- /dev/null +++ b/src/core/Matrix.cpp @@ -0,0 +1,438 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/core/Matrix.h" +#include +#include "utils/MathExtra.h" + +namespace tgfx { + +void Matrix::reset() { + values[SCALE_X] = values[SCALE_Y] = 1; + values[SKEW_X] = values[SKEW_Y] = values[TRANS_X] = values[TRANS_Y] = 0; +} + +bool operator==(const Matrix& a, const Matrix& b) { + const float* ma = a.values; + const float* mb = b.values; + return ma[0] == mb[0] && ma[1] == mb[1] && ma[2] == mb[2] && ma[3] == mb[3] && ma[4] == mb[4] && + ma[5] == mb[5]; +} + +void Matrix::setAll(float scaleX, float skewX, float transX, float skewY, float scaleY, + float transY) { + values[SCALE_X] = scaleX; + values[SKEW_X] = skewX; + values[TRANS_X] = transX; + values[SKEW_Y] = skewY; + values[SCALE_Y] = scaleY; + values[TRANS_Y] = transY; +} + +void Matrix::setAffine(float a, float b, float c, float d, float tx, float ty) { + values[SCALE_X] = a; + values[SKEW_X] = c; + values[TRANS_X] = tx; + values[SKEW_Y] = b; + values[SCALE_Y] = d; + values[TRANS_Y] = ty; +} + +void Matrix::get9(float buffer[9]) const { + memcpy(buffer, values, 6 * sizeof(float)); + buffer[6] = buffer[7] = 0.0f; + buffer[8] = 1.0f; +} + +void Matrix::setTranslate(float tx, float ty) { + if ((tx != 0) | (ty != 0)) { + values[TRANS_X] = tx; + values[TRANS_Y] = ty; + + values[SCALE_X] = values[SCALE_Y] = 1; + values[SKEW_X] = values[SKEW_Y] = 0; + } else { + this->reset(); + } +} + +static inline float sdot(float a, float b, float c, float d) { + return a * b + c * d; +} + +void Matrix::preTranslate(float tx, float ty) { + values[TRANS_X] += sdot(values[SCALE_X], tx, values[SKEW_X], ty); + values[TRANS_Y] += sdot(values[SKEW_Y], tx, values[SCALE_Y], ty); +} + +void Matrix::postTranslate(float tx, float ty) { + values[TRANS_X] += tx; + values[TRANS_Y] += ty; +} + +void Matrix::setScaleTranslate(float sx, float sy, float tx, float ty) { + values[SCALE_X] = sx; + values[SKEW_X] = 0; + values[TRANS_X] = tx; + values[SKEW_Y] = 0; + values[SCALE_Y] = sy; + values[TRANS_Y] = ty; +} + +void Matrix::setScale(float sx, float sy, float px, float py) { + if (1 == sx && 1 == sy) { + this->reset(); + } else { + values[SCALE_X] = sx; + values[SKEW_X] = 0; + values[TRANS_X] = 0; + values[SKEW_Y] = 0; + values[SCALE_Y] = sy; + values[TRANS_Y] = 0; + this->setScaleTranslate(sx, sy, px - sx * px, py - sy * py); + } +} + +void Matrix::setScale(float sx, float sy) { + if (1 == sx && 1 == sy) { + this->reset(); + } else { + values[SCALE_X] = sx; + values[SCALE_Y] = sy; + values[TRANS_X] = values[TRANS_Y] = values[SKEW_X] = values[SKEW_Y] = 0; + } +} + +void Matrix::preScale(float sx, float sy, float px, float py) { + if (1 == sx && 1 == sy) { + return; + } + + Matrix m = {}; + m.setScale(sx, sy, px, py); + this->preConcat(m); +} + +void Matrix::preScale(float sx, float sy) { + if (1 == sx && 1 == sy) { + return; + } + values[SCALE_X] *= sx; + values[SKEW_Y] *= sx; + values[SKEW_X] *= sy; + values[SCALE_Y] *= sy; +} + +void Matrix::postScale(float sx, float sy, float px, float py) { + if (1 == sx && 1 == sy) { + return; + } + Matrix m = {}; + m.setScale(sx, sy, px, py); + this->postConcat(m); +} + +void Matrix::postScale(float sx, float sy) { + if (1 == sx && 1 == sy) { + return; + } + Matrix m = {}; + m.setScale(sx, sy); + this->postConcat(m); +} + +void Matrix::setSinCos(float sinV, float cosV, float px, float py) { + const float oneMinusCosV = 1 - cosV; + values[SCALE_X] = cosV; + values[SKEW_X] = -sinV; + values[TRANS_X] = sdot(sinV, py, oneMinusCosV, px); + values[SKEW_Y] = sinV; + values[SCALE_Y] = cosV; + values[TRANS_Y] = sdot(-sinV, px, oneMinusCosV, py); +} + +void Matrix::setSinCos(float sinV, float cosV) { + values[SCALE_X] = cosV; + values[SKEW_X] = -sinV; + values[TRANS_X] = 0; + values[SKEW_Y] = sinV; + values[SCALE_Y] = cosV; + values[TRANS_Y] = 0; +} + +void Matrix::setRotate(float degrees, float px, float py) { + float rad = DegreesToRadians(degrees); + this->setSinCos(SinSnapToZero(rad), CosSnapToZero(rad), px, py); +} + +void Matrix::setRotate(float degrees) { + float rad = DegreesToRadians(degrees); + this->setSinCos(SinSnapToZero(rad), CosSnapToZero(rad)); +} + +void Matrix::preRotate(float degrees, float px, float py) { + Matrix m = {}; + m.setRotate(degrees, px, py); + this->preConcat(m); +} + +void Matrix::preRotate(float degrees) { + Matrix m = {}; + m.setRotate(degrees); + this->preConcat(m); +} + +void Matrix::postRotate(float degrees, float px, float py) { + Matrix m = {}; + m.setRotate(degrees, px, py); + this->postConcat(m); +} + +void Matrix::postRotate(float degrees) { + Matrix m = {}; + m.setRotate(degrees); + this->postConcat(m); +} + +void Matrix::setSkew(float sx, float sy, float px, float py) { + values[SCALE_X] = 1; + values[SKEW_X] = sx; + values[TRANS_X] = -sx * py; + values[SKEW_Y] = sy; + values[SCALE_Y] = 1; + values[TRANS_Y] = -sy * px; +} + +void Matrix::setSkew(float sx, float sy) { + values[SCALE_X] = 1; + values[SKEW_X] = sx; + values[TRANS_X] = 0; + values[SKEW_Y] = sy; + values[SCALE_Y] = 1; + values[TRANS_Y] = 0; +} + +void Matrix::preSkew(float sx, float sy, float px, float py) { + Matrix m = {}; + m.setSkew(sx, sy, px, py); + this->preConcat(m); +} + +void Matrix::preSkew(float sx, float sy) { + Matrix m = {}; + m.setSkew(sx, sy); + this->preConcat(m); +} + +void Matrix::postSkew(float sx, float sy, float px, float py) { + Matrix m = {}; + m.setSkew(sx, sy, px, py); + this->postConcat(m); +} + +void Matrix::postSkew(float sx, float sy) { + Matrix m = {}; + m.setSkew(sx, sy); + this->postConcat(m); +} + +void Matrix::setConcat(const Matrix& first, const Matrix& second) { + auto& matA = first.values; + auto& matB = second.values; + auto a = matB[SCALE_X] * matA[SCALE_X]; + auto b = 0.0; + auto c = 0.0; + auto d = matB[SCALE_Y] * matA[SCALE_Y]; + auto tx = matB[TRANS_X] * matA[SCALE_X] + matA[TRANS_X]; + auto ty = matB[TRANS_Y] * matA[SCALE_Y] + matA[TRANS_Y]; + + if (matB[SKEW_Y] != 0.0 || matB[SKEW_X] != 0.0 || matA[SKEW_Y] != 0.0 || matA[SKEW_X] != 0.0) { + a += matB[SKEW_Y] * matA[SKEW_X]; + d += matB[SKEW_X] * matA[SKEW_Y]; + b += matB[SCALE_X] * matA[SKEW_Y] + matB[SKEW_Y] * matA[SCALE_Y]; + c += matB[SKEW_X] * matA[SCALE_X] + matB[SCALE_Y] * matA[SKEW_X]; + tx += matB[TRANS_Y] * matA[SKEW_X]; + ty += matB[TRANS_X] * matA[SKEW_Y]; + } + setAffine(a, b, c, d, tx, ty); +} + +void Matrix::preConcat(const Matrix& mat) { + // check for identity first, so we don't do a needless copy of ourselves + // to ourselves inside setConcat() + if (!mat.isIdentity()) { + this->setConcat(*this, mat); + } +} + +void Matrix::postConcat(const Matrix& mat) { + // check for identity first, so we don't do a needless copy of ourselves + // to ourselves inside setConcat() + if (!mat.isIdentity()) { + this->setConcat(mat, *this); + } +} + +bool Matrix::invertible() const { + float determinant = values[SCALE_X] * values[SCALE_Y] - values[SKEW_Y] * values[SKEW_X]; + return !(FloatNearlyZero(determinant, FLOAT_NEARLY_ZERO * FLOAT_NEARLY_ZERO * FLOAT_NEARLY_ZERO)); +} + +bool Matrix::invertNonIdentity(Matrix* inverse) const { + auto a = values[SCALE_X]; + auto d = values[SCALE_Y]; + auto c = values[SKEW_X]; + auto b = values[SKEW_Y]; + auto tx = values[TRANS_X]; + auto ty = values[TRANS_Y]; + + if (b == 0 && c == 0) { + if (a == 0 || d == 0) { + return false; + } + a = 1 / a; + d = 1 / d; + tx = -a * tx; + ty = -d * ty; + inverse->setAffine(a, b, c, d, tx, ty); + return true; + } + float determinant = a * d - b * c; + if (FloatNearlyZero(determinant, FLOAT_NEARLY_ZERO * FLOAT_NEARLY_ZERO * FLOAT_NEARLY_ZERO)) { + return false; + } + determinant = 1 / determinant; + a = d * determinant; + b = -b * determinant; + c = -c * determinant; + d = values[SCALE_X] * determinant; + tx = -(a * values[TRANS_X] + c * values[TRANS_Y]); + ty = -(b * values[TRANS_X] + d * values[TRANS_Y]); + inverse->setAffine(a, b, c, d, tx, ty); + return true; +} + +void Matrix::mapPoints(Point dst[], const Point src[], int count) const { + auto tx = values[TRANS_X]; + auto ty = values[TRANS_Y]; + auto sx = values[SCALE_X]; + auto sy = values[SCALE_Y]; + auto kx = values[SKEW_X]; + auto ky = values[SKEW_Y]; + for (int i = 0; i < count; i++) { + auto x = src[i].x * sx + src[i].y * kx + tx; + auto y = src[i].x * ky + src[i].y * sy + ty; + dst[i].set(x, y); + } +} + +void Matrix::mapXY(float x, float y, Point* result) const { + auto tx = values[TRANS_X]; + auto ty = values[TRANS_Y]; + auto sx = values[SCALE_X]; + auto sy = values[SCALE_Y]; + auto kx = values[SKEW_X]; + auto ky = values[SKEW_Y]; + result->set(x * sx + y * kx + tx, x * ky + y * sy + ty); +} + +bool Matrix::rectStaysRect() const { + float m00 = values[SCALE_X]; + float m01 = values[SKEW_X]; + float m10 = values[SKEW_Y]; + float m11 = values[SCALE_Y]; + if (m01 || m10) { + return m00 == 0 && m11 == 0 && m10 != 0 && m01 != 0; + } else { + return m00 != 0 && m11 != 0; + } +} + +void Matrix::mapRect(Rect* dst, const Rect& src) const { + Point quad[4]; + quad[0].set(src.left, src.top); + quad[1].set(src.right, src.top); + quad[2].set(src.right, src.bottom); + quad[3].set(src.left, src.bottom); + mapPoints(quad, quad, 4); + dst->setBounds(quad, 4); +} + +float Matrix::getMinScale() const { + float results[2]; + if (getMinMaxScaleFactors(results)) { + return results[0]; + } + return -1.0f; +} + +float Matrix::getMaxScale() const { + float results[2]; + if (getMinMaxScaleFactors(results)) { + return results[1]; + } + return -1.0f; +} + +bool Matrix::getMinMaxScaleFactors(float* results) const { + float a = sdot(values[SCALE_X], values[SCALE_X], values[SKEW_Y], values[SKEW_Y]); + float b = sdot(values[SCALE_X], values[SKEW_X], values[SCALE_Y], values[SKEW_Y]); + float c = sdot(values[SKEW_X], values[SKEW_X], values[SCALE_Y], values[SCALE_Y]); + float bSqd = b * b; + if (bSqd <= FLOAT_NEARLY_ZERO * FLOAT_NEARLY_ZERO) { + results[0] = a; + results[1] = c; + if (results[0] > results[1]) { + using std::swap; + swap(results[0], results[1]); + } + } else { + float aminusc = a - c; + float apluscdiv2 = (a + c) * 0.5f; + float x = sqrtf(aminusc * aminusc + 4 * bSqd) * 0.5f; + results[0] = apluscdiv2 - x; + results[1] = apluscdiv2 + x; + } + auto isFinite = (results[0] * 0 == 0); + if (!isFinite) { + return false; + } + if (results[0] < 0) { + results[0] = 0; + } + results[0] = sqrtf(results[0]); + isFinite = (results[1] * 0 == 0); + if (!isFinite) { + return false; + } + if (results[1] < 0) { + results[1] = 0; + } + results[1] = sqrtf(results[1]); + return true; +} + +bool Matrix::isFinite() const { + return FloatsAreFinite(values, 6); +} + +const Matrix& Matrix::I() { + static const Matrix identity = Matrix::MakeAll(1, 0, 0, 0, 1, 0); + return identity; +} +} // namespace tgfx diff --git a/src/core/Paint.cpp b/src/core/Paint.cpp new file mode 100644 index 00000000..ce04027c --- /dev/null +++ b/src/core/Paint.cpp @@ -0,0 +1,28 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/core/Paint.h" + +namespace tgfx { +void Paint::reset() { + style = PaintStyle::Fill; + color = Color::Black(); + stroke = Stroke(0); + shader = nullptr; +} +} // namespace tgfx \ No newline at end of file diff --git a/src/core/Path.cpp b/src/core/Path.cpp new file mode 100644 index 00000000..2c0199cb --- /dev/null +++ b/src/core/Path.cpp @@ -0,0 +1,413 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/core/Path.h" +#include "core/PathRef.h" +#include "utils/MathExtra.h" + +namespace tgfx { +using namespace pk; + +static SkPathDirection ToSkDirection(bool reversed) { + return reversed ? SkPathDirection::kCCW : SkPathDirection::kCW; +} + +#define DistanceToControlPoint(angleStep) (4 * tanf((angleStep)*0.25f) / 3) + +class PointIterator { + public: + PointIterator(const std::vector& points, bool reversed, unsigned startIndex) + : points(points), index(startIndex % points.size()), + advance(reversed ? points.size() - 1 : 1) { + } + + const SkPoint& current() const { + return *reinterpret_cast(&points[index]); + } + + const SkPoint& next() { + index = (index + advance) % points.size(); + return this->current(); + } + + protected: + std::vector points = {}; + + private: + size_t index = 0; + size_t advance = 0; +}; + +static SkRect ToSkRect(const Rect& rect) { + return {rect.left, rect.top, rect.right, rect.bottom}; +} + +Path::Path() : pathRef(std::make_shared()) { +} + +bool operator==(const Path& a, const Path& b) { + return a.pathRef->path == b.pathRef->path; +} + +bool operator!=(const Path& a, const Path& b) { + return a.pathRef->path != b.pathRef->path; +} + +PathFillType Path::getFillType() const { + auto fillType = pathRef->path.getFillType(); + switch (fillType) { + case SkPathFillType::kEvenOdd: + return PathFillType::EvenOdd; + case SkPathFillType::kInverseEvenOdd: + return PathFillType::InverseEvenOdd; + case SkPathFillType::kInverseWinding: + return PathFillType::InverseWinding; + default: + return PathFillType::Winding; + } +} + +void Path::setFillType(PathFillType fillType) { + SkPathFillType type; + switch (fillType) { + case PathFillType::EvenOdd: + type = SkPathFillType::kEvenOdd; + break; + case PathFillType::InverseEvenOdd: + type = SkPathFillType::kInverseEvenOdd; + break; + case PathFillType::InverseWinding: + type = SkPathFillType::kInverseWinding; + break; + default: + type = SkPathFillType::kWinding; + break; + } + writableRef()->path.setFillType(type); +} + +bool Path::isInverseFillType() const { + auto fillType = pathRef->path.getFillType(); + return fillType == SkPathFillType::kInverseWinding || fillType == SkPathFillType::kInverseEvenOdd; +} + +void Path::toggleInverseFillType() { + auto& path = writableRef()->path; + switch (path.getFillType()) { + case SkPathFillType::kWinding: + path.setFillType(SkPathFillType::kInverseWinding); + break; + case SkPathFillType::kEvenOdd: + path.setFillType(SkPathFillType::kInverseEvenOdd); + break; + case SkPathFillType::kInverseWinding: + path.setFillType(SkPathFillType::kWinding); + break; + case SkPathFillType::kInverseEvenOdd: + path.setFillType(SkPathFillType::kEvenOdd); + break; + } +} + +bool Path::asRect(Rect* rect) const { + SkRect skRect = {}; + if (!pathRef->path.isRect(&skRect)) { + return false; + } + if (rect) { + rect->setLTRB(skRect.fLeft, skRect.fTop, skRect.fRight, skRect.fBottom); + } + return true; +} + +bool Path::asRRect(RRect* rRect) const { + SkRRect skRRect = {}; + SkRect skRect = {}; + if (pathRef->path.isOval(&skRect)) { + skRRect.setOval(skRect); + } else if (pathRef->path.isRRect(&skRRect)) { + if (skRRect.isComplex() || skRRect.isNinePatch()) { + return false; + } + } else { + return false; + } + if (rRect) { + const auto& rect = skRRect.rect(); + rRect->rect.setLTRB(rect.fLeft, rect.fTop, rect.fRight, rect.fBottom); + auto radii = skRRect.getSimpleRadii(); + rRect->radii.set(radii.fX, radii.fY); + } + return true; +} + +bool Path::isLine(Point line[2]) const { + return pathRef->path.isLine(reinterpret_cast(line)); +} + +Rect Path::getBounds() const { + // Internally, SkPath lazily computes bounds. Use this function instead of path.getBounds() + // for thread safety. + const auto& path = pathRef->path; + auto count = path.countPoints(); + auto points = new SkPoint[count]; + path.getPoints(points, count); + auto rect = SkRect::MakeEmpty(); + rect.setBounds(points, count); + delete[] points; + return {rect.fLeft, rect.fTop, rect.fRight, rect.fBottom}; +} + +bool Path::isEmpty() const { + return pathRef->path.isEmpty(); +} + +bool Path::contains(float x, float y) const { + return pathRef->path.contains(x, y); +} + +bool Path::contains(const Rect& rect) const { + return pathRef->path.conservativelyContainsRect(ToSkRect(rect)); +} + +void Path::moveTo(float x, float y) { + writableRef()->path.moveTo(x, y); +} + +void Path::moveTo(const Point& point) { + moveTo(point.x, point.y); +} + +void Path::lineTo(float x, float y) { + writableRef()->path.lineTo(x, y); +} + +void Path::lineTo(const Point& point) { + lineTo(point.x, point.y); +} + +void Path::quadTo(float controlX, float controlY, float x, float y) { + writableRef()->path.quadTo(controlX, controlY, x, y); +} + +void Path::quadTo(const Point& control, const Point& point) { + quadTo(control.x, control.y, point.x, point.y); +} + +void Path::cubicTo(float controlX1, float controlY1, float controlX2, float controlY2, float x, + float y) { + writableRef()->path.cubicTo(controlX1, controlY1, controlX2, controlY2, x, y); +} + +void Path::cubicTo(const Point& control1, const Point& control2, const Point& point) { + cubicTo(control1.x, control1.y, control2.x, control2.y, point.x, point.y); +} + +void Path::close() { + writableRef()->path.close(); +} + +void Path::addRect(const Rect& rect, bool reversed, unsigned startIndex) { + addRect(rect.left, rect.top, rect.right, rect.bottom, reversed, startIndex); +} + +void Path::addRect(float left, float top, float right, float bottom, bool reversed, + unsigned startIndex) { + std::vector points = {}; + points.push_back({left, top}); + points.push_back({right, top}); + points.push_back({right, bottom}); + points.push_back({left, bottom}); + PointIterator iter(points, reversed, startIndex); + auto path = &(writableRef()->path); + path->moveTo(iter.current()); + path->lineTo(iter.next()); + path->lineTo(iter.next()); + path->lineTo(iter.next()); + path->close(); +} + +static std::vector GetArcPoints(float centerX, float centerY, float radiusX, float radiusY, + float startAngle, float endAngle, int* numBeziers) { + std::vector points = {}; + float start = startAngle; + float end = std::min(endAngle, start + M_PI_2_F); + float currentX, currentY; + *numBeziers = 0; + for (int i = 0; i < 4; i++) { + auto angleStep = end - start; + auto distance = DistanceToControlPoint(angleStep); + currentX = centerX + cosf(start) * radiusX; + currentY = centerY + sinf(start) * radiusY; + points.push_back({currentX, currentY}); + auto u = cosf(start); + auto v = sinf(start); + auto x1 = currentX - v * distance * radiusX; + auto y1 = currentY + u * distance * radiusY; + points.push_back({x1, y1}); + u = cosf(end); + v = sinf(end); + currentX = centerX + u * radiusX; + currentY = centerY + v * radiusY; + auto x2 = currentX + v * distance * radiusX; + auto y2 = currentY - u * distance * radiusY; + points.push_back({x2, y2}); + (*numBeziers)++; + if (end == endAngle) { + break; + } + start = end; + end = std::min(endAngle, start + M_PI_2_F); + } + return points; +} + +void Path::addOval(const Rect& oval, bool reversed, unsigned startIndex) { + writableRef()->path.addOval(ToSkRect(oval), ToSkDirection(reversed), startIndex); +} + +void Path::addArc(const Rect& oval, float startAngle, float sweepAngle) { + auto radiusX = oval.width() * 0.5f; + auto radiusY = oval.height() * 0.5f; + auto endAngle = startAngle + sweepAngle; + auto reversed = sweepAngle < 0; + if (reversed) { + std::swap(startAngle, endAngle); + } + int numBeziers = 0; + auto points = GetArcPoints(oval.centerX(), oval.centerY(), radiusX, radiusY, startAngle, endAngle, + &numBeziers); + PointIterator iter(points, reversed, 0); + auto path = &(writableRef()->path); + path->moveTo(iter.current()); + for (int i = 0; i < numBeziers; i++) { + path->cubicTo(iter.next(), iter.next(), iter.next()); + } + path->close(); +} + +void Path::addRoundRect(const Rect& rect, float radiusX, float radiusY, bool reversed, + unsigned startIndex) { + writableRef()->path.addRRect(SkRRect::MakeRectXY(ToSkRect(rect), radiusX, radiusY), + ToSkDirection(reversed), startIndex); +} + +void Path::addPath(const Path& src, PathOp op) { + auto& path = writableRef()->path; + const auto& newPath = src.pathRef->path; + if (op == PathOp::Append) { + if (path.isEmpty()) { + path = newPath; + } else { + path.addPath(newPath); + } + return; + } + SkPathOp pathOp; + switch (op) { + case PathOp::Intersect: + pathOp = SkPathOp::kIntersect_SkPathOp; + break; + case PathOp::Difference: + pathOp = SkPathOp::kDifference_SkPathOp; + break; + case PathOp::XOR: + pathOp = SkPathOp::kXOR_SkPathOp; + break; + default: + pathOp = SkPathOp::kUnion_SkPathOp; + break; + } + Op(path, newPath, pathOp, &path); +} + +void Path::reset() { + pathRef = std::make_shared(); +} + +void Path::transform(const Matrix& matrix) { + if (matrix.isIdentity()) { + return; + } + float values[9] = {}; + matrix.get9(values); + SkMatrix skMatrix = {}; + skMatrix.set9(values); + writableRef()->path.transform(skMatrix); +} + +void Path::reverse() { + auto& path = writableRef()->path; + SkPath tempPath; + tempPath.reverseAddPath(path); + path = tempPath; +} + +void Path::decompose(const PathIterator& iterator, void* info) const { + if (iterator == nullptr) { + return; + } + const auto& skPath = pathRef->path; + SkPath::Iter iter(skPath, false); + SkPoint points[4]; + SkPoint quads[5]; + SkPath::Verb verb; + while ((verb = iter.next(points)) != SkPath::kDone_Verb) { + switch (verb) { + case SkPath::kMove_Verb: + iterator(PathVerb::Move, reinterpret_cast(points), info); + break; + case SkPath::kLine_Verb: + iterator(PathVerb::Line, reinterpret_cast(points), info); + break; + case SkPath::kQuad_Verb: + iterator(PathVerb::Quad, reinterpret_cast(points), info); + break; + case SkPath::kConic_Verb: + // approximate with 2^1=2 quads. + SkPath::ConvertConicToQuads(points[0], points[1], points[2], iter.conicWeight(), quads, 1); + iterator(PathVerb::Quad, reinterpret_cast(quads), info); + iterator(PathVerb::Quad, reinterpret_cast(quads) + 2, info); + break; + case SkPath::kCubic_Verb: + iterator(PathVerb::Cubic, reinterpret_cast(points), info); + break; + case SkPath::kClose_Verb: + iterator(PathVerb::Close, reinterpret_cast(points), info); + break; + default: + break; + } + } +} + +PathRef* Path::writableRef() { + if (!pathRef.unique()) { + pathRef = std::make_shared(pathRef->path); + } + return pathRef.get(); +} + +int Path::countPoints() const { + return pathRef->path.countPoints(); +} + +int Path::countVerbs() const { + return pathRef->path.countVerbs(); +} +} // namespace tgfx diff --git a/src/core/PathEffect.cpp b/src/core/PathEffect.cpp new file mode 100644 index 00000000..52cfbcee --- /dev/null +++ b/src/core/PathEffect.cpp @@ -0,0 +1,111 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/core/PathEffect.h" +#include "core/PathRef.h" +#include "tgfx/core/Stroke.h" + +namespace tgfx { +using namespace pk; + +class PkPathEffect : public PathEffect { + public: + explicit PkPathEffect(sk_sp effect) : pathEffect(std::move(effect)) { + } + + bool applyTo(Path* path) const override { + if (path == nullptr) { + return false; + } + auto& skPath = PathRef::WriteAccess(*path); + SkStrokeRec rec(SkStrokeRec::kHairline_InitStyle); + return pathEffect->filterPath(&skPath, skPath, &rec, nullptr); + } + + private: + sk_sp pathEffect = nullptr; +}; + +class StrokePathEffect : public PathEffect { + public: + explicit StrokePathEffect(SkPaint paint) : paint(std::move(paint)) { + } + + bool applyTo(Path* path) const override { + if (path == nullptr) { + return false; + } + auto& skPath = PathRef::WriteAccess(*path); + return paint.getFillPath(skPath, &skPath); + } + + private: + SkPaint paint = {}; +}; + +static SkPaint::Cap ToSkLineCap(LineCap cap) { + switch (cap) { + case LineCap::Round: + return SkPaint::kRound_Cap; + case LineCap::Square: + return SkPaint::kSquare_Cap; + default: + return SkPaint::kButt_Cap; + } +} + +static SkPaint::Join ToSkLineJoin(LineJoin join) { + switch (join) { + case LineJoin::Round: + return SkPaint::kRound_Join; + case LineJoin::Bevel: + return SkPaint::kBevel_Join; + default: + return SkPaint::kMiter_Join; + } +} + +std::unique_ptr PathEffect::MakeStroke(const Stroke* stroke) { + if (stroke == nullptr || stroke->width <= 0) { + return nullptr; + } + SkPaint paint = {}; + paint.setStyle(SkPaint::kStroke_Style); + paint.setStrokeWidth(stroke->width); + paint.setStrokeCap(ToSkLineCap(stroke->cap)); + paint.setStrokeJoin(ToSkLineJoin(stroke->join)); + paint.setStrokeMiter(stroke->miterLimit); + return std::unique_ptr(new StrokePathEffect(std::move(paint))); +} + +std::unique_ptr PathEffect::MakeDash(const float* intervals, int count, float phase) { + auto effect = SkDashPathEffect::Make(intervals, count, phase); + if (effect == nullptr) { + return nullptr; + } + return std::unique_ptr(new PkPathEffect(std::move(effect))); +} + +std::unique_ptr PathEffect::MakeCorner(float radius) { + auto effect = SkCornerPathEffect::Make(radius); + if (effect == nullptr) { + return nullptr; + } + return std::unique_ptr(new PkPathEffect(std::move(effect))); +} +} // namespace tgfx \ No newline at end of file diff --git a/src/core/PathMeasure.cpp b/src/core/PathMeasure.cpp new file mode 100644 index 00000000..cdb57daf --- /dev/null +++ b/src/core/PathMeasure.cpp @@ -0,0 +1,72 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/core/PathMeasure.h" +#include "core/PathRef.h" + +namespace tgfx { +using namespace pk; + +class PkPathMeasure : public PathMeasure { + public: + explicit PkPathMeasure(const SkPath& path) { + pathMeasure = new SkPathMeasure(path, false); + } + + ~PkPathMeasure() override { + delete pathMeasure; + } + + float getLength() override { + return pathMeasure->getLength(); + } + + bool getSegment(float startD, float stopD, Path* result) override { + if (result == nullptr) { + return false; + } + auto& path = PathRef::WriteAccess(*result); + return pathMeasure->getSegment(startD, stopD, &path, true); + } + + bool getPosTan(float distance, Point* position, Point* tangent) override { + if (position == nullptr || tangent == nullptr) { + return false; + } + + SkPoint point{}; + SkVector tan{}; + + auto ret = pathMeasure->getPosTan(distance, &point, &tan); + position->set(point.x(), point.y()); + tangent->set(tan.x(), tan.y()); + return ret; + } + + bool isClosed() override { + return pathMeasure->isClosed(); + } + + private: + SkPathMeasure* pathMeasure = nullptr; +}; + +std::unique_ptr PathMeasure::MakeFrom(const Path& path) { + return std::make_unique(PathRef::ReadAccess(path)); +} +} // namespace tgfx \ No newline at end of file diff --git a/src/core/PathRef.cpp b/src/core/PathRef.cpp new file mode 100644 index 00000000..178a1a12 --- /dev/null +++ b/src/core/PathRef.cpp @@ -0,0 +1,44 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "PathRef.h" +#include "tgfx/core/Path.h" + +namespace tgfx { +using namespace pk; + +// When tessellating curved paths into linear segments, this defines the maximum distance in +// screen space which a segment may deviate from the mathematically correct value. Above this +// value, the segment will be subdivided. This value was chosen to approximate the super sampling +// accuracy of the raster path (16 samples, or one quarter pixel). +static constexpr float DEFAULT_TOLERANCE = 0.25f; + +const SkPath& PathRef::ReadAccess(const Path& path) { + return path.pathRef->path; +} + +SkPath& PathRef::WriteAccess(Path& path) { + return path.writableRef()->path; +} + +int PathRef::ToAATriangles(const Path& path, const Rect& clipBounds, std::vector* vertices) { + const auto& skPath = path.pathRef->path; + return skPath.toAATriangles(DEFAULT_TOLERANCE, *reinterpret_cast(&clipBounds), + vertices); +} +} // namespace tgfx \ No newline at end of file diff --git a/src/core/PathRef.h b/src/core/PathRef.h new file mode 100644 index 00000000..8969a787 --- /dev/null +++ b/src/core/PathRef.h @@ -0,0 +1,54 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "pathkit.h" + +namespace tgfx { +// https://chromium-review.googlesource.com/c/chromium/src/+/1099564/ +static constexpr int AA_TESSELLATOR_MAX_VERB_COUNT = 100; +// A factor used to estimate the memory size of a tessellated path, based on the average value of +// Buffer.size() / Path.countPoints() from 4300+ tessellated path data. +static constexpr int AA_TESSELLATOR_BUFFER_SIZE_FACTOR = 170; + +class Path; + +struct Rect; + +class PathRef { + public: + static const pk::SkPath& ReadAccess(const Path& path); + + static pk::SkPath& WriteAccess(Path& path); + + static int ToAATriangles(const Path& path, const Rect& clipBounds, std::vector* vertices); + + PathRef() = default; + + explicit PathRef(const pk::SkPath& path) : path(path) { + } + + private: + pk::SkPath path = {}; + + friend class Path; + friend bool operator==(const Path& a, const Path& b); + friend bool operator!=(const Path& a, const Path& b); +}; +} // namespace tgfx diff --git a/src/core/PixelBuffer.cpp b/src/core/PixelBuffer.cpp new file mode 100644 index 00000000..eaa7c6c6 --- /dev/null +++ b/src/core/PixelBuffer.cpp @@ -0,0 +1,154 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "core/PixelBuffer.h" +#include "tgfx/gpu/Device.h" +#include "utils/PixelFormatUtil.h" + +namespace tgfx { +class RasterPixelBuffer : public PixelBuffer { + public: + RasterPixelBuffer(const ImageInfo& info, uint8_t* pixels) : PixelBuffer(info), _pixels(pixels) { + } + + ~RasterPixelBuffer() override { + delete[] _pixels; + } + + bool isHardwareBacked() const override { + return false; + } + + HardwareBufferRef getHardwareBuffer() const override { + return nullptr; + } + + protected: + void* onLockPixels() const override { + return _pixels; + } + + void onUnlockPixels() const override { + } + + std::shared_ptr onBindToHardwareTexture(Context*) const override { + return nullptr; + } + + private: + uint8_t* _pixels = nullptr; +}; + +class HardwarePixelBuffer : public PixelBuffer { + public: + HardwarePixelBuffer(const ImageInfo& info, HardwareBufferRef hardwareBuffer) + : PixelBuffer(info), hardwareBuffer(HardwareBufferRetain(hardwareBuffer)) { + } + + ~HardwarePixelBuffer() override { + HardwareBufferRelease(hardwareBuffer); + } + + bool isHardwareBacked() const override { + return HardwareBufferCheck(hardwareBuffer); + } + + HardwareBufferRef getHardwareBuffer() const override { + return isHardwareBacked() ? hardwareBuffer : nullptr; + } + + protected: + void* onLockPixels() const override { + return HardwareBufferLock(hardwareBuffer); + } + + void onUnlockPixels() const override { + HardwareBufferUnlock(hardwareBuffer); + } + + std::shared_ptr onBindToHardwareTexture(Context* context) const override { + return Texture::MakeFrom(context, hardwareBuffer); + } + + private: + HardwareBufferRef hardwareBuffer; +}; + +std::shared_ptr PixelBuffer::Make(int width, int height, bool alphaOnly, + bool tryHardware) { + if (width <= 0 || height <= 0) { + return nullptr; + } + if (tryHardware) { + auto hardwareBuffer = HardwareBufferAllocate(width, height, alphaOnly); + auto pixelBuffer = PixelBuffer::MakeFrom(hardwareBuffer); + tgfx::HardwareBufferRelease(hardwareBuffer); + if (pixelBuffer != nullptr) { + return pixelBuffer; + } + } + auto colorType = alphaOnly ? ColorType::ALPHA_8 : ColorType::RGBA_8888; + auto info = ImageInfo::Make(width, height, colorType); + if (info.isEmpty()) { + return nullptr; + } + auto pixels = new (std::nothrow) uint8_t[info.byteSize()]; + if (pixels == nullptr) { + return nullptr; + } + return std::make_shared(info, pixels); +} + +std::shared_ptr PixelBuffer::MakeFrom(HardwareBufferRef hardwareBuffer) { + auto info = HardwareBufferGetInfo(hardwareBuffer); + return info.isEmpty() ? nullptr : std::make_shared(info, hardwareBuffer); +} + +PixelBuffer::PixelBuffer(const tgfx::ImageInfo& info) : _info(info) { +} + +void* PixelBuffer::lockPixels() { + locker.lock(); + auto pixels = onLockPixels(); + if (pixels == nullptr) { + locker.unlock(); + } + return pixels; +} + +void PixelBuffer::unlockPixels() { + onUnlockPixels(); + locker.unlock(); +} + +std::shared_ptr PixelBuffer::onMakeTexture(Context* context, bool mipMapped) const { + std::lock_guard autoLock(locker); + if (!mipMapped && isHardwareBacked()) { + return onBindToHardwareTexture(context); + } + auto pixels = onLockPixels(); + if (pixels == nullptr) { + return nullptr; + } + auto format = ColorTypeToPixelFormat(_info.colorType()); + auto texture = Texture::MakeFormat(context, width(), height(), pixels, _info.rowBytes(), format, + ImageOrigin::TopLeft, mipMapped); + onUnlockPixels(); + return texture; +} +} // namespace tgfx diff --git a/src/core/PixelBuffer.h b/src/core/PixelBuffer.h new file mode 100644 index 00000000..fcf2648f --- /dev/null +++ b/src/core/PixelBuffer.h @@ -0,0 +1,103 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/Texture.h" +#include "tgfx/core/ImageBuffer.h" +#include "tgfx/platform/HardwareBuffer.h" + +namespace tgfx { +/** + * A container for writable pixel memory. PixelBuffer is thread safe. + */ +class PixelBuffer : public ImageBuffer { + public: + /** + * Creates a new PixelBuffer object width specified width and height. Returns nullptr if width or + * height is not greater than zero. If the alphaOnly is true, sets colorType to + * ColorType::ALPHA_8, otherwise sets to the native 32-bit color type of the current platform. If + * the tryHardware is true, a PixelBuffer backed by hardware is returned if it is available on the + * current platform. Otherwise, a raster PixelBuffer is returned. + */ + static std::shared_ptr Make(int width, int height, bool alphaOnly = false, + bool tryHardware = true); + + /** + * Creates a PixelBuffer from the specified hardware buffer. Returns nullptr if the hardwareBuffer + * is invalid or the current platform has no hardware buffer support. + */ + static std::shared_ptr MakeFrom(HardwareBufferRef hardwareBuffer); + + int width() const override { + return _info.width(); + } + + int height() const override { + return _info.height(); + } + + bool isAlphaOnly() const override { + return _info.isAlphaOnly(); + } + + /** + * Returns an ImageInfo describing the width, height, color type, alpha type, and row bytes of the + * PixelBuffer. + */ + const ImageInfo& info() const { + return _info; + } + + /** + * Locks and returns the address of the pixels to ensure that the memory is accessible. + */ + void* lockPixels(); + + /** + * Call this to balance a successful call to lockPixels(). + */ + void unlockPixels(); + + /** + * Returns true if this pixel buffer is hardware backed. A hardware-backed PixelBuffer allows + * sharing buffers across CPU and GPU, which can be used to speed up the texture uploading. + */ + virtual bool isHardwareBacked() const = 0; + + /** + * Retrieves the backing hardware buffer. This method does not acquire any additional reference to + * the returned hardware buffer. Returns nullptr if the PixelBuffer is not backed by a hardware + * buffer. + */ + virtual HardwareBufferRef getHardwareBuffer() const = 0; + + protected: + explicit PixelBuffer(const ImageInfo& info); + + std::shared_ptr onMakeTexture(Context* context, bool mipMapped) const override; + + virtual void* onLockPixels() const = 0; + virtual void onUnlockPixels() const = 0; + virtual std::shared_ptr onBindToHardwareTexture(Context* context) const = 0; + + private: + mutable std::mutex locker = {}; + ImageInfo _info = {}; +}; +} // namespace tgfx \ No newline at end of file diff --git a/src/core/PixelRef.cpp b/src/core/PixelRef.cpp new file mode 100644 index 00000000..6b8c9f6f --- /dev/null +++ b/src/core/PixelRef.cpp @@ -0,0 +1,85 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "PixelRef.h" +#include "gpu/Gpu.h" + +namespace tgfx { +std::shared_ptr PixelRef::Make(int width, int height, bool alphaOnly, bool tryHardware) { + auto pixelBuffer = PixelBuffer::Make(width, height, alphaOnly, tryHardware); + return Wrap(std::move(pixelBuffer)); +} + +std::shared_ptr PixelRef::Wrap(std::shared_ptr pixelBuffer) { + if (pixelBuffer == nullptr) { + return nullptr; + } + return std::shared_ptr(new PixelRef((std::move(pixelBuffer)))); +} + +PixelRef::PixelRef(std::shared_ptr pixelBuffer) : pixelBuffer(std::move(pixelBuffer)) { +} + +void* PixelRef::lockWritablePixels() { + auto pixels = pixelBuffer->lockPixels(); + if (pixels == nullptr) { + return nullptr; + } + if (!pixelBuffer.unique()) { + auto& info = pixelBuffer->info(); + auto newBuffer = PixelBuffer::Make(info.width(), info.height(), info.isAlphaOnly(), + pixelBuffer->isHardwareBacked()); + if (newBuffer == nullptr) { + pixelBuffer->unlockPixels(); + return nullptr; + } + auto dstPixels = newBuffer->lockPixels(); + memcpy(dstPixels, pixels, info.byteSize()); + pixelBuffer->unlockPixels(); + pixels = dstPixels; + pixelBuffer = newBuffer; + } + return pixels; +} + +void PixelRef::clear() { + auto pixels = lockWritablePixels(); + if (pixels != nullptr) { + markContentDirty(Rect::MakeWH(width(), height())); + memset(pixels, 0, info().byteSize()); + } + unlockPixels(); +} + +std::shared_ptr PixelRef::onMakeTexture(Context* context, bool mipMapped) { + return Texture::MakeFrom(context, pixelBuffer, mipMapped); +} + +bool PixelRef::onUpdateTexture(std::shared_ptr texture, const Rect& bounds) { + auto gpu = texture->getContext()->gpu(); + auto pixels = lockPixels(); + if (pixels == nullptr) { + return false; + } + pixels = + info().computeOffset(pixels, static_cast(bounds.left), static_cast(bounds.top)); + gpu->writePixels(texture->getSampler(), bounds, pixels, info().rowBytes()); + unlockPixels(); + return true; +} +} // namespace tgfx diff --git a/src/core/PixelRef.h b/src/core/PixelRef.h new file mode 100644 index 00000000..72fc2278 --- /dev/null +++ b/src/core/PixelRef.h @@ -0,0 +1,124 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "core/ImageStream.h" +#include "core/PixelBuffer.h" + +namespace tgfx { +/** + * This class is the smart container for pixel memory, and is used with Bitmap. + */ +class PixelRef : public ImageStream { + public: + /** + * Creates a new PixelRef object width specified width and height. Returns nullptr if width or + * height is not greater than zero. + * @param width pixel column count, must be greater than zero. + * @param height pixel row count, must be greater than zero. + * @param alphaOnly If true, sets colorType to ColorType::ALPHA_8, otherwise sets to the native + * 32-bit color type of the current platform. + * @param tryHardware If true, a PixelRef backed by hardware is returned if it is available on + * the current platform. Otherwise, a raster PixelRef is returned. + */ + static std::shared_ptr Make(int width, int height, bool alphaOnly = false, + bool tryHardware = true); + + /** + * Creates a new PixelRef with the specified PixelBuffer. Returns nullptr if the pixelBuffer is + * nullptr. + */ + static std::shared_ptr Wrap(std::shared_ptr pixelBuffer); + + /** + * Returns an ImageInfo describing the width, height, color type, alpha type, and row bytes of the + * PixelRef. + */ + const ImageInfo& info() const { + return pixelBuffer->info(); + } + + int width() const override { + return pixelBuffer->width(); + } + + int height() const override { + return pixelBuffer->height(); + } + + bool isAlphaOnly() const override { + return pixelBuffer->isAlphaOnly(); + } + + bool isHardwareBacked() const override { + return pixelBuffer->isHardwareBacked(); + } + + /** + * Retrieves the backing hardware buffer. This method does not acquire any additional reference to + * the returned hardware buffer. Returns nullptr if the PixelRef is not backed by a hardware + * buffer. + */ + HardwareBufferRef getHardwareBuffer() const { + return pixelBuffer->getHardwareBuffer(); + } + + /** + * Locks and returns the address of the read-only pixels to ensure that the memory is accessible. + */ + const void* lockPixels() const { + return pixelBuffer->lockPixels(); + } + + /** + * Locks and returns the address of the writable pixels to ensure that the memory is accessible. + */ + void* lockWritablePixels(); + + /** + * Call this to balance a successful call to lockPixels(). + */ + void unlockPixels() const { + pixelBuffer->unlockPixels(); + } + + /** + * Returns an ImageBuffer object capturing the pixels in the PixelRef. Subsequent writing of the + * PixelRef will not be captured. + */ + std::shared_ptr makeBuffer() const { + return pixelBuffer; + } + + /** + * Replaces all pixel values with transparent colors. + */ + void clear(); + + protected: + std::shared_ptr onMakeTexture(Context* context, bool mipMapped) override; + + bool onUpdateTexture(std::shared_ptr texture, const Rect& bounds) override; + + private: + std::shared_ptr pixelBuffer = nullptr; + + explicit PixelRef(std::shared_ptr pixelBuffer); +}; +} // namespace tgfx diff --git a/src/core/PixelRefMask.cpp b/src/core/PixelRefMask.cpp new file mode 100644 index 00000000..54b0dd92 --- /dev/null +++ b/src/core/PixelRefMask.cpp @@ -0,0 +1,61 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "PixelRefMask.h" +#include "gpu/Gpu.h" +#include "tgfx/core/Pixmap.h" + +namespace tgfx { +PixelRefMask::PixelRefMask(std::shared_ptr pixelRef) : pixelRef(std::move(pixelRef)) { +} + +void PixelRefMask::markContentDirty(const Rect& bounds, bool flipY) { + if (flipY) { + auto rect = bounds; + auto height = rect.height(); + rect.top = static_cast(pixelRef->height()) - rect.bottom; + rect.bottom = rect.top + height; + pixelRef->markContentDirty(rect); + } else { + pixelRef->markContentDirty(bounds); + } +} + +static float LinearToSRGB(float linear) { + // The magic numbers are derived from the sRGB specification. + // See http://www.color.org/chardata/rgb/srgb.xalter. + if (linear <= 0.0031308f) { + return linear * 12.92f; + } + return 1.055f * std::pow(linear, 1.f / 2.4f) - 0.055f; +} + +const std::array& PixelRefMask::GammaTable() { + static const std::array table = [] { + std::array table{}; + table[0] = 0; + table[255] = 255; + for (int i = 1; i < 255; ++i) { + auto v = std::roundf(LinearToSRGB(static_cast(i) / 255.f) * 255.f); + table[i] = static_cast(v); + } + return table; + }(); + return table; +} +} // namespace tgfx diff --git a/src/core/PixelRefMask.h b/src/core/PixelRefMask.h new file mode 100644 index 00000000..8184917b --- /dev/null +++ b/src/core/PixelRefMask.h @@ -0,0 +1,61 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "core/PixelRef.h" +#include "tgfx/core/Mask.h" + +namespace tgfx { +class PixelRefMask : public Mask { + public: + explicit PixelRefMask(std::shared_ptr pixelRef); + + int width() const override { + return pixelRef->width(); + } + + int height() const override { + return pixelRef->height(); + } + + bool isHardwareBacked() const override { + return pixelRef->width(); + } + + void clear() override { + return pixelRef->clear(); + } + + std::shared_ptr makeBuffer() const override { + return pixelRef->makeBuffer(); + } + + protected: + std::shared_ptr pixelRef = nullptr; + + void markContentDirty(const Rect& bounds, bool flipY); + + static const std::array& GammaTable(); + + std::shared_ptr getImageStream() const override { + return pixelRef; + } +}; +} // namespace tgfx diff --git a/src/core/Pixmap.cpp b/src/core/Pixmap.cpp new file mode 100644 index 00000000..0f6f2109 --- /dev/null +++ b/src/core/Pixmap.cpp @@ -0,0 +1,238 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/core/Pixmap.h" +#include +#include "core/PixelRef.h" +#include "skcms.h" + +namespace tgfx { + +static inline void* AddOffset(void* pixels, size_t offset) { + return reinterpret_cast(pixels) + offset; +} + +static inline const void* AddOffset(const void* pixels, size_t offset) { + return reinterpret_cast(pixels) + offset; +} + +static void CopyRectMemory(const void* src, size_t srcRB, void* dst, size_t dstRB, + size_t trimRowBytes, int rowCount) { + if (trimRowBytes == dstRB && trimRowBytes == srcRB) { + memcpy(dst, src, trimRowBytes * rowCount); + return; + } + for (int i = 0; i < rowCount; i++) { + memcpy(dst, src, trimRowBytes); + dst = AddOffset(dst, dstRB); + src = AddOffset(src, srcRB); + } +} + +static const std::unordered_map ColorMapper{ + {ColorType::RGBA_8888, gfx::skcms_PixelFormat::skcms_PixelFormat_RGBA_8888}, + {ColorType::BGRA_8888, gfx::skcms_PixelFormat::skcms_PixelFormat_BGRA_8888}, + {ColorType::ALPHA_8, gfx::skcms_PixelFormat::skcms_PixelFormat_A_8}, + {ColorType::RGB_565, gfx::skcms_PixelFormat::skcms_PixelFormat_BGR_565}, + {ColorType::Gray_8, gfx::skcms_PixelFormat::skcms_PixelFormat_G_8}, + {ColorType::RGBA_F16, gfx::skcms_PixelFormat::skcms_PixelFormat_RGBA_hhhh}, + {ColorType::RGBA_1010102, gfx::skcms_PixelFormat::skcms_PixelFormat_RGBA_1010102}, +}; + +static const std::unordered_map AlphaMapper{ + {AlphaType::Unpremultiplied, gfx::skcms_AlphaFormat::skcms_AlphaFormat_Unpremul}, + {AlphaType::Premultiplied, gfx::skcms_AlphaFormat::skcms_AlphaFormat_PremulAsEncoded}, + {AlphaType::Opaque, gfx::skcms_AlphaFormat::skcms_AlphaFormat_Opaque}, +}; + +static void ConvertPixels(const ImageInfo& srcInfo, const void* srcPixels, const ImageInfo& dstInfo, + void* dstPixels) { + if (srcInfo.colorType() == dstInfo.colorType() && srcInfo.alphaType() == dstInfo.alphaType()) { + CopyRectMemory(srcPixels, srcInfo.rowBytes(), dstPixels, dstInfo.rowBytes(), + dstInfo.minRowBytes(), dstInfo.height()); + return; + } + auto srcFormat = ColorMapper.at(srcInfo.colorType()); + auto srcAlpha = AlphaMapper.at(srcInfo.alphaType()); + auto dstFormat = ColorMapper.at(dstInfo.colorType()); + auto dstAlpha = AlphaMapper.at(dstInfo.alphaType()); + auto width = dstInfo.width(); + auto height = dstInfo.height(); + for (int i = 0; i < height; i++) { + gfx::skcms_Transform(srcPixels, srcFormat, srcAlpha, nullptr, dstPixels, dstFormat, dstAlpha, + nullptr, width); + dstPixels = AddOffset(dstPixels, dstInfo.rowBytes()); + srcPixels = AddOffset(srcPixels, srcInfo.rowBytes()); + } +} + +Pixmap::Pixmap(const ImageInfo& info, const void* pixels) : _info(info), _pixels(pixels) { + if (_info.isEmpty() || _pixels == nullptr) { + _info = {}; + _pixels = nullptr; + } +} + +Pixmap::Pixmap(const ImageInfo& info, void* pixels) + : _info(info), _pixels(pixels), _writablePixels(pixels) { + if (_info.isEmpty() || _pixels == nullptr) { + _info = {}; + _pixels = nullptr; + _writablePixels = nullptr; + } +} + +Pixmap::Pixmap(const Bitmap& bitmap) { + reset(bitmap); +} + +Pixmap::Pixmap(Bitmap& bitmap) { + reset(bitmap); +} + +Pixmap::~Pixmap() { + reset(); +} + +void Pixmap::reset() { + if (pixelRef != nullptr) { + pixelRef->unlockPixels(); + pixelRef = nullptr; + } + _pixels = nullptr; + _writablePixels = nullptr; + _info = {}; +} + +void Pixmap::reset(const ImageInfo& info, const void* pixels) { + reset(); + if (!info.isEmpty() && pixels != nullptr) { + _info = info; + _pixels = pixels; + } +} + +void Pixmap::reset(const ImageInfo& info, void* pixels) { + reset(); + if (!info.isEmpty() && pixels != nullptr) { + _info = info; + _pixels = pixels; + _writablePixels = pixels; + } +} + +void Pixmap::reset(const Bitmap& bitmap) { + reset(); + pixelRef = bitmap.pixelRef; + _pixels = pixelRef ? pixelRef->lockPixels() : nullptr; + if (_pixels == nullptr) { + pixelRef = nullptr; + return; + } + _info = pixelRef->info(); +} + +void Pixmap::reset(Bitmap& bitmap) { + reset(); + pixelRef = bitmap.pixelRef; + _writablePixels = pixelRef ? pixelRef->lockWritablePixels() : nullptr; + if (_writablePixels == nullptr) { + pixelRef = nullptr; + return; + } + _pixels = _writablePixels; + _info = pixelRef->info(); +} + +Color Pixmap::getColor(int x, int y) const { + auto dstInfo = ImageInfo::Make(1, 1, ColorType::RGBA_8888, AlphaType::Unpremultiplied, 4); + uint8_t color[4]; + if (!readPixels(dstInfo, color, x, y)) { + return Color::Transparent(); + } + return Color::FromRGBA(color[0], color[1], color[2], color[3]); +} + +Pixmap Pixmap::makeSubset(const Rect& subset) const { + auto rect = subset; + rect.round(); + auto bounds = Rect::MakeWH(width(), height()); + if (bounds == rect) { + return *this; + } + if (!bounds.contains(rect)) { + return {}; + } + auto srcX = static_cast(rect.x()); + auto srcY = static_cast(rect.y()); + auto width = static_cast(rect.width()); + auto height = static_cast(rect.height()); + auto srcPixels = _info.computeOffset(_pixels, srcX, srcY); + auto srcInfo = _info.makeWH(width, height); + return {srcInfo, srcPixels}; +} + +bool Pixmap::readPixels(const ImageInfo& dstInfo, void* dstPixels, int srcX, int srcY) const { + if (_pixels == nullptr || dstPixels == nullptr) { + return false; + } + auto imageInfo = dstInfo.makeIntersect(-srcX, -srcY, _info.width(), _info.height()); + if (imageInfo.isEmpty()) { + return false; + } + auto srcPixels = _info.computeOffset(_pixels, srcX, srcY); + auto srcInfo = _info.makeWH(imageInfo.width(), imageInfo.height()); + dstPixels = imageInfo.computeOffset(dstPixels, -srcX, -srcY); + ConvertPixels(srcInfo, srcPixels, imageInfo, dstPixels); + return true; +} + +bool Pixmap::writePixels(const ImageInfo& srcInfo, const void* srcPixels, int dstX, int dstY) { + if (_writablePixels == nullptr || srcPixels == nullptr) { + return false; + } + auto imageInfo = srcInfo.makeIntersect(-dstX, -dstY, _info.width(), _info.height()); + if (imageInfo.isEmpty()) { + return false; + } + srcPixels = imageInfo.computeOffset(srcPixels, -dstX, -dstY); + auto dstPixels = _info.computeOffset(_writablePixels, dstX, dstY); + auto dstInfo = _info.makeWH(imageInfo.width(), imageInfo.height()); + ConvertPixels(imageInfo, srcPixels, dstInfo, dstPixels); + return true; +} + +bool Pixmap::clear() { + if (_writablePixels == nullptr) { + return false; + } + if (_info.rowBytes() == _info.minRowBytes()) { + memset(_writablePixels, 0, byteSize()); + } else { + auto rowCount = _info.height(); + auto trimRowBytes = _info.width() * _info.bytesPerPixel(); + auto* pixels = static_cast(_writablePixels); + for (int i = 0; i < rowCount; i++) { + memset(pixels, 0, trimRowBytes); + pixels += info().rowBytes(); + } + } + return true; +} + +} // namespace tgfx diff --git a/src/core/Rect.cpp b/src/core/Rect.cpp new file mode 100644 index 00000000..4b9d6990 --- /dev/null +++ b/src/core/Rect.cpp @@ -0,0 +1,109 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/core/Rect.h" + +namespace tgfx { +void Rect::scale(float scaleX, float scaleY) { + left *= scaleX; + right *= scaleY; + top *= scaleX; + bottom *= scaleY; +} + +bool Rect::setBounds(const Point pts[], int count) { + if (count <= 0) { + this->setEmpty(); + return true; + } + float minX, maxX; + float minY, maxY; + minX = maxX = pts[0].x; + minY = maxY = pts[0].y; + + for (int i = 1; i < count; i++) { + auto x = pts[i].x; + auto y = pts[i].y; + auto isFinite = ((x + y) * 0 == 0); + if (!isFinite) { + setEmpty(); + return false; + } + if (x < minX) { + minX = x; + } + if (x > maxX) { + maxX = x; + } + if (y < minY) { + minY = y; + } + if (y > maxY) { + maxY = y; + } + } + setLTRB(minX, minY, maxX, maxY); + return true; +} + +#define CHECK_INTERSECT(al, at, ar, ab, bl, bt, br, bb) \ + float L = al > bl ? al : bl; \ + float R = ar < br ? ar : br; \ + float T = at > bt ? at : bt; \ + float B = ab < bb ? ab : bb; \ + do { \ + if (!(L < R && T < B)) { \ + return false; \ + } \ + } while (0) +// do the !(opposite) check so we return false if either arg is NaN + +bool Rect::intersect(float l, float t, float r, float b) { + CHECK_INTERSECT(l, t, r, b, left, top, right, bottom); + this->setLTRB(L, T, R, B); + return true; +} + +bool Rect::intersect(const Rect& a, const Rect& b) { + CHECK_INTERSECT(a.left, a.top, a.right, a.bottom, b.left, b.top, b.right, b.bottom); + this->setLTRB(L, T, R, B); + return true; +} + +void Rect::join(float l, float t, float r, float b) { + // do nothing if the params are empty + if (l >= r || t >= b) { + return; + } + // if we are empty, just assign + if (left >= right || top >= bottom) { + this->setLTRB(l, t, r, b); + } else { + left = left < l ? left : l; + top = top < t ? top : t; + right = right > r ? right : r; + bottom = bottom > b ? bottom : b; + } +} + +void RRect::scale(float scaleX, float scaleY) { + rect.scale(scaleX, scaleY); + radii.x *= scaleX; + radii.y *= scaleY; +} +} // namespace tgfx diff --git a/src/core/Shape.cpp b/src/core/Shape.cpp new file mode 100644 index 00000000..d938f61a --- /dev/null +++ b/src/core/Shape.cpp @@ -0,0 +1,101 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/core/Shape.h" +#include "core/PathRef.h" +#include "shapes/PathProxy.h" +#include "shapes/RRectShape.h" +#include "shapes/RectShape.h" +#include "shapes/TextureShape.h" +#include "shapes/TriangulatingShape.h" +#include "tgfx/core/PathEffect.h" + +namespace tgfx { +std::shared_ptr Shape::MakeFromFill(const Path& path, float resolutionScale) { + if (path.isEmpty() || resolutionScale <= 0) { + return nullptr; + } + std::shared_ptr shape; + Rect rect = {}; + RRect rRect = {}; + if (path.asRect(&rect)) { + shape = std::make_shared(rect, resolutionScale); + } else if (path.asRRect(&rRect)) { + shape = std::make_shared(rRect, resolutionScale); + } else { + auto proxy = PathProxy::MakeFromFill(path); + auto bounds = proxy->getBounds(resolutionScale); + auto width = static_cast(ceilf(bounds.width())); + auto height = static_cast(ceilf(bounds.height())); + if (path.countVerbs() <= AA_TESSELLATOR_MAX_VERB_COUNT && + width * height >= path.countPoints() * AA_TESSELLATOR_BUFFER_SIZE_FACTOR) { + shape = std::make_shared(std::move(proxy), resolutionScale); + } else { + shape = std::make_shared(std::move(proxy), resolutionScale); + } + } + shape->weakThis = shape; + return shape; +} + +std::shared_ptr Shape::MakeFromFill(std::shared_ptr textBlob, + float resolutionScale) { + if (textBlob == nullptr || resolutionScale <= 0) { + return nullptr; + } + auto proxy = PathProxy::MakeFromFill(textBlob); + if (proxy == nullptr) { + return nullptr; + } + auto shape = std::make_shared(std::move(proxy), resolutionScale); + shape->weakThis = shape; + return shape; +} + +std::shared_ptr Shape::MakeFromStroke(const Path& path, const Stroke& stroke, + float resolutionScale) { + if (path.isEmpty() || resolutionScale <= 0) { + return nullptr; + } + auto strokePath = path; + auto effect = PathEffect::MakeStroke(&stroke); + if (effect == nullptr) { + return nullptr; + } + effect->applyTo(&strokePath); + return MakeFromFill(strokePath, resolutionScale); +} + +std::shared_ptr Shape::MakeFromStroke(std::shared_ptr textBlob, + const Stroke& stroke, float resolutionScale) { + if (textBlob == nullptr || resolutionScale <= 0) { + return nullptr; + } + auto proxy = PathProxy::MakeFromStroke(textBlob, stroke); + if (proxy == nullptr) { + return nullptr; + } + auto shape = std::make_shared(std::move(proxy), resolutionScale); + shape->weakThis = shape; + return shape; +} + +Shape::Shape(float resolutionScale) : _resolutionScale(resolutionScale) { + uniqueKey = UniqueKey::Next(); +} +} // namespace tgfx diff --git a/src/core/SimpleTextBlob.cpp b/src/core/SimpleTextBlob.cpp new file mode 100644 index 00000000..8dc5318f --- /dev/null +++ b/src/core/SimpleTextBlob.cpp @@ -0,0 +1,78 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "core/SimpleTextBlob.h" +#include "tgfx/core/PathEffect.h" + +namespace tgfx { +std::shared_ptr TextBlob::MakeFrom(const GlyphID glyphIDs[], const Point positions[], + size_t glyphCount, const Font& font) { + if (glyphCount == 0) { + return nullptr; + } + auto textBlob = std::make_shared(); + textBlob->glyphIDs = {glyphIDs, glyphIDs + glyphCount}; + textBlob->positions = {positions, positions + glyphCount}; + textBlob->font = font; + return textBlob; +} + +bool SimpleTextBlob::hasColor() const { + return font.getTypeface()->hasColor(); +} + +Rect SimpleTextBlob::getBounds(const Stroke* stroke) const { + auto totalBounds = Rect::MakeEmpty(); + int index = 0; + for (auto& glyphID : glyphIDs) { + auto bounds = font.getBounds(glyphID); + bounds.offset(positions[index]); + if (stroke) { + bounds.outset(stroke->width, stroke->width); + } + totalBounds.join(bounds); + index++; + } + return totalBounds; +} + +bool SimpleTextBlob::getPath(Path* path, const Stroke* stroke) const { + if (hasColor()) { + return false; + } + auto pathEffect = PathEffect::MakeStroke(stroke); + Path totalPath = {}; + auto glyphCount = glyphIDs.size(); + for (size_t i = 0; i < glyphCount; ++i) { + const auto& glyphID = glyphIDs[i]; + Path glyphPath = {}; + if (font.getPath(glyphID, &glyphPath)) { + const auto& position = positions[i]; + if (pathEffect) { + pathEffect->applyTo(&glyphPath); + } + glyphPath.transform(Matrix::MakeTrans(position.x, position.y)); + totalPath.addPath(glyphPath); + } else { + return false; + } + } + *path = totalPath; + return true; +} +} // namespace tgfx diff --git a/src/core/SimpleTextBlob.h b/src/core/SimpleTextBlob.h new file mode 100644 index 00000000..c1652507 --- /dev/null +++ b/src/core/SimpleTextBlob.h @@ -0,0 +1,51 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/TextBlob.h" + +namespace tgfx { +class SimpleTextBlob : public TextBlob { + public: + bool hasColor() const override; + + Rect getBounds(const Stroke* stroke = nullptr) const override; + + bool getPath(Path* path, const Stroke* stroke = nullptr) const override; + + const Font& getFont() const { + return font; + } + + const std::vector& getGlyphIDs() const { + return glyphIDs; + } + + const std::vector& getPositions() const { + return positions; + } + + private: + Font font = {}; + std::vector glyphIDs = {}; + std::vector positions = {}; + + friend class TextBlob; +}; +} // namespace tgfx diff --git a/src/core/YUVColorSpace.cpp b/src/core/YUVColorSpace.cpp new file mode 100644 index 00000000..6eacc1d5 --- /dev/null +++ b/src/core/YUVColorSpace.cpp @@ -0,0 +1,33 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/core/YUVColorSpace.h" + +namespace tgfx { +bool IsLimitedYUVColorRange(YUVColorSpace colorSpace) { + switch (colorSpace) { + case YUVColorSpace::BT601_LIMITED: + case YUVColorSpace::BT709_LIMITED: + case YUVColorSpace::BT2020_LIMITED: + return true; + default: + break; + } + return false; +} +} // namespace tgfx diff --git a/src/core/YUVData.cpp b/src/core/YUVData.cpp new file mode 100644 index 00000000..1591057e --- /dev/null +++ b/src/core/YUVData.cpp @@ -0,0 +1,81 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/core/YUVData.h" +#include + +namespace tgfx { +class RasterYUVData : public YUVData { + public: + RasterYUVData(int width, int height, const void** buffer, const size_t* planeRowBytes, + size_t planeCount, ReleaseProc releaseProc, void* context) + : _width(width), _height(height), releaseProc(releaseProc), releaseContext(context) { + data.reserve(planeCount); + rowBytes.reserve(planeCount); + for (int i = 0; i < static_cast(planeCount); i++) { + data.push_back(buffer[i]); + rowBytes.push_back(planeRowBytes[i]); + } + } + + ~RasterYUVData() override { + if (releaseProc != nullptr) { + releaseProc(releaseContext, &data[0], data.size()); + } + } + + int width() const override { + return _width; + } + + int height() const override { + return _height; + } + + size_t planeCount() const override { + return data.size(); + } + + const void* getBaseAddressAt(int planeIndex) const override { + return data[planeIndex]; + } + + size_t getRowBytesAt(int planeIndex) const override { + return rowBytes[planeIndex]; + } + + private: + int _width = 0; + int _height = 0; + ReleaseProc releaseProc = nullptr; + void* releaseContext = nullptr; + std::vector data = {}; + std::vector rowBytes = {}; +}; + +std::shared_ptr YUVData::MakeFrom(int width, int height, const void** data, + const size_t* rowBytes, size_t planeCount, + ReleaseProc releaseProc, void* context) { + if (width <= 0 || height <= 0 || data == nullptr || rowBytes == nullptr || planeCount == 0) { + return nullptr; + } + return std::make_shared(width, height, data, rowBytes, planeCount, releaseProc, + context); +} + +} // namespace tgfx diff --git a/src/effects/BlurImageFilter.cpp b/src/effects/BlurImageFilter.cpp new file mode 100644 index 00000000..c3a2fbd6 --- /dev/null +++ b/src/effects/BlurImageFilter.cpp @@ -0,0 +1,171 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "BlurImageFilter.h" +#include "gpu/SurfaceDrawContext.h" +#include "gpu/TextureSampler.h" +#include "gpu/processors/DualBlurFragmentProcessor.h" + +namespace tgfx { +static const float BLUR_LEVEL_1_LIMIT = 10.0f; +static const float BLUR_LEVEL_2_LIMIT = 15.0f; +static const float BLUR_LEVEL_3_LIMIT = 55.0f; +static const float BLUR_LEVEL_4_LIMIT = 120.0f; +static const float BLUR_LEVEL_5_LIMIT = 300.0f; + +static const float BLUR_LEVEL_MAX_LIMIT = BLUR_LEVEL_5_LIMIT; + +static const int BLUR_LEVEL_1_DEPTH = 1; +static const int BLUR_LEVEL_2_DEPTH = 2; +static const int BLUR_LEVEL_3_DEPTH = 2; +static const int BLUR_LEVEL_4_DEPTH = 3; +static const int BLUR_LEVEL_5_DEPTH = 3; + +static const float BLUR_LEVEL_1_SCALE = 1.0f; +static const float BLUR_LEVEL_2_SCALE = 0.8f; +static const float BLUR_LEVEL_3_SCALE = 0.5f; +static const float BLUR_LEVEL_4_SCALE = 0.5f; +static const float BLUR_LEVEL_5_SCALE = 0.5f; + +static const float BLUR_STABLE = 10.0f; + +static std::tuple Get(float blurriness) { + blurriness = blurriness < BLUR_LEVEL_MAX_LIMIT ? blurriness : BLUR_LEVEL_MAX_LIMIT; + if (blurriness < BLUR_LEVEL_1_LIMIT) { + return {BLUR_LEVEL_1_DEPTH, BLUR_LEVEL_1_SCALE, blurriness / BLUR_LEVEL_1_LIMIT * 2.0}; + } else if (blurriness < BLUR_LEVEL_2_LIMIT) { + return {BLUR_LEVEL_2_DEPTH, BLUR_LEVEL_2_SCALE, + (blurriness - BLUR_STABLE) / (BLUR_LEVEL_2_LIMIT - BLUR_STABLE) * 3.0}; + } else if (blurriness < BLUR_LEVEL_3_LIMIT) { + return {BLUR_LEVEL_3_DEPTH, BLUR_LEVEL_3_SCALE, + (blurriness - BLUR_STABLE) / (BLUR_LEVEL_3_LIMIT - BLUR_STABLE) * 5.0}; + } else if (blurriness < BLUR_LEVEL_4_LIMIT) { + return {BLUR_LEVEL_4_DEPTH, BLUR_LEVEL_4_SCALE, + (blurriness - BLUR_STABLE) / (BLUR_LEVEL_4_LIMIT - BLUR_STABLE) * 6.0}; + } else { + return { + BLUR_LEVEL_5_DEPTH, BLUR_LEVEL_5_SCALE, + 6.0 + (blurriness - BLUR_STABLE * 12.0) / (BLUR_LEVEL_5_LIMIT - BLUR_STABLE * 12.0) * 5.0}; + } +} + +std::shared_ptr ImageFilter::Blur(float blurrinessX, float blurrinessY, + TileMode tileMode, const Rect& cropRect) { + if (blurrinessX < 0 || blurrinessY < 0 || (blurrinessX == 0 && blurrinessY == 0)) { + return nullptr; + } + auto x = Get(blurrinessX); + auto y = Get(blurrinessY); + return std::make_shared( + Point::Make(std::get<2>(x), std::get<2>(y)), std::max(std::get<1>(x), std::get<1>(y)), + std::max(std::get<0>(x), std::get<0>(y)), tileMode, cropRect); +} + +void BlurImageFilter::draw(std::shared_ptr image, Surface* toSurface, bool isDown) { + auto drawContext = std::make_unique(toSurface); + auto dstRect = Rect::MakeWH(toSurface->width(), toSurface->height()); + auto localMatrix = Matrix::MakeScale(static_cast(image->width()) / dstRect.width(), + static_cast(image->height()) / dstRect.height()); + auto texelSize = Point::Make(0.5f / static_cast(image->width()), + 0.5f / static_cast(image->height())); + auto processor = + image->asFragmentProcessor(toSurface->getContext(), toSurface->options()->flags(), tileMode, + tileMode, SamplingOptions()); + drawContext->fillRectWithFP( + dstRect, localMatrix, + DualBlurFragmentProcessor::Make(isDown ? DualBlurPassMode::Down : DualBlurPassMode::Up, + std::move(processor), blurOffset, texelSize)); +} + +static std::shared_ptr ExtendImage(Context* context, std::shared_ptr image, + const Rect& dstBounds, TileMode tileMode) { + auto dstWidth = dstBounds.width(); + auto dstHeight = dstBounds.height(); + auto surface = Surface::Make(context, static_cast(dstWidth), static_cast(dstHeight)); + if (surface == nullptr) { + return nullptr; + } + auto canvas = surface->getCanvas(); + auto localMatrix = Matrix::MakeTrans(-dstBounds.left, -dstBounds.top); + canvas->concat(localMatrix); + auto shader = Shader::MakeImageShader(image, tileMode, tileMode); + Paint paint = {}; + paint.setShader(shader); + canvas->drawRect(dstBounds, paint); + return surface->makeImageSnapshot(); +} + +std::pair, Point> BlurImageFilter::filterImage( + const ImageFilterContext& context) { + auto image = context.source; + if (image == nullptr) { + return {}; + } + auto inputBounds = Rect::MakeWH(image->width(), image->height()); + Rect dstBounds = Rect::MakeEmpty(); + if (!applyCropRect(inputBounds, &dstBounds, &context.clipBounds)) { + return {}; + } + if (!inputBounds.intersect(dstBounds)) { + return {}; + } + dstBounds.roundOut(); + std::shared_ptr last; + if (dstBounds != Rect::MakeWH(image->width(), image->height())) { + last = ExtendImage(context.context, image, dstBounds, tileMode); + if (last == nullptr) { + return {}; + } + image = last; + } + auto dstOffset = Point::Make(dstBounds.x(), dstBounds.y()); + int tw = static_cast(dstBounds.width() * downScaling); + int th = static_cast(dstBounds.height() * downScaling); + std::vector> upSurfaces; + upSurfaces.emplace_back(static_cast(dstBounds.width()), + static_cast(dstBounds.height())); + for (int i = 0; i < iteration; ++i) { + auto surface = Surface::Make(context.context, tw, th); + if (surface == nullptr) { + return {}; + } + upSurfaces.emplace_back(tw, th); + draw(image, surface.get(), true); + last = surface->makeImageSnapshot(); + image = last; + tw = std::max(static_cast(static_cast(tw) * downScaling), 1); + th = std::max(static_cast(static_cast(th) * downScaling), 1); + } + for (int i = static_cast(upSurfaces.size()) - 2; i >= 0; --i) { + auto width = upSurfaces[i].first; + auto height = upSurfaces[i].second; + auto surface = Surface::Make(context.context, width, height); + if (surface == nullptr) { + return {}; + } + draw(last, surface.get(), false); + last = surface->makeImageSnapshot(); + } + return {last, dstOffset}; +} + +Rect BlurImageFilter::onFilterNodeBounds(const Rect& srcRect) const { + auto mul = static_cast(std::pow(2, iteration)) / downScaling; + return srcRect.makeOutset(blurOffset.x * mul, blurOffset.y * mul); +} +} // namespace tgfx diff --git a/src/effects/BlurImageFilter.h b/src/effects/BlurImageFilter.h new file mode 100644 index 00000000..b57bf6e0 --- /dev/null +++ b/src/effects/BlurImageFilter.h @@ -0,0 +1,45 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/ImageFilter.h" +#include "tgfx/gpu/Surface.h" + +namespace tgfx { +class BlurImageFilter : public ImageFilter { + public: + BlurImageFilter(Point blurOffset, float downScaling, int iteration, TileMode tileMode, + const Rect& cropRect) + : ImageFilter(cropRect), blurOffset(blurOffset), downScaling(downScaling), + iteration(iteration), tileMode(tileMode) { + } + + std::pair, Point> filterImage(const ImageFilterContext& context) override; + + private: + Rect onFilterNodeBounds(const Rect& srcRect) const override; + + void draw(std::shared_ptr image, Surface* toSurface, bool isDown); + + Point blurOffset; + float downScaling; + int iteration; + TileMode tileMode; +}; +} // namespace tgfx diff --git a/src/effects/ColorMatrixFilter.cpp b/src/effects/ColorMatrixFilter.cpp new file mode 100644 index 00000000..b895efb3 --- /dev/null +++ b/src/effects/ColorMatrixFilter.cpp @@ -0,0 +1,41 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "ColorMatrixFilter.h" +#include "gpu/processors/ColorMatrixFragmentProcessor.h" +#include "utils/MathExtra.h" + +namespace tgfx { +std::shared_ptr ColorFilter::Matrix(const std::array& rowMajor) { + return std::make_shared(rowMajor); +} + +static bool IsAlphaUnchanged(const float matrix[20]) { + const float* srcA = matrix + 15; + return FloatNearlyZero(srcA[0]) && FloatNearlyZero(srcA[1]) && FloatNearlyZero(srcA[2]) && + FloatNearlyEqual(srcA[3], 1) && FloatNearlyZero(srcA[4]); +} + +ColorMatrixFilter::ColorMatrixFilter(const std::array& matrix) + : matrix(matrix), alphaIsUnchanged(IsAlphaUnchanged(matrix.data())) { +} + +std::unique_ptr ColorMatrixFilter::asFragmentProcessor() const { + return ColorMatrixFragmentProcessor::Make(matrix); +} +} // namespace tgfx diff --git a/src/effects/ColorMatrixFilter.h b/src/effects/ColorMatrixFilter.h new file mode 100644 index 00000000..70f5d15b --- /dev/null +++ b/src/effects/ColorMatrixFilter.h @@ -0,0 +1,38 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/ColorFilter.h" + +namespace tgfx { +class ColorMatrixFilter : public ColorFilter { + public: + explicit ColorMatrixFilter(const std::array& matrix); + + std::unique_ptr asFragmentProcessor() const override; + + bool isAlphaUnchanged() const override { + return alphaIsUnchanged; + } + + private: + std::array matrix; + bool alphaIsUnchanged; +}; +} // namespace tgfx diff --git a/src/effects/DropShadowImageFilter.cpp b/src/effects/DropShadowImageFilter.cpp new file mode 100644 index 00000000..f310e264 --- /dev/null +++ b/src/effects/DropShadowImageFilter.cpp @@ -0,0 +1,89 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "DropShadowImageFilter.h" +#include "gpu/Texture.h" +#include "tgfx/core/ColorFilter.h" +#include "tgfx/gpu/Surface.h" + +namespace tgfx { +std::shared_ptr ImageFilter::DropShadow(float dx, float dy, float blurrinessX, + float blurrinessY, const Color& color, + const Rect& cropRect) { + return std::make_shared(dx, dy, blurrinessX, blurrinessY, color, false, + cropRect); +} + +std::shared_ptr ImageFilter::DropShadowOnly(float dx, float dy, float blurrinessX, + float blurrinessY, const Color& color, + const Rect& cropRect) { + return std::make_shared(dx, dy, blurrinessX, blurrinessY, color, true, + cropRect); +} + +std::pair, Point> DropShadowImageFilter::filterImage( + const ImageFilterContext& context) { + auto image = context.source; + if (image == nullptr) { + return {}; + } + auto inputBounds = Rect::MakeWH(image->width(), image->height()); + Rect dstBounds = Rect::MakeEmpty(); + if (!applyCropRect(inputBounds, &dstBounds, &context.clipBounds)) { + return {}; + } + if (!inputBounds.intersect(dstBounds)) { + return {}; + } + dstBounds.roundOut(); + auto dstOffset = Point::Make(dstBounds.x(), dstBounds.y()); + auto surface = Surface::Make(context.context, static_cast(dstBounds.width()), + static_cast(dstBounds.height())); + if (surface == nullptr) { + return {}; + } + auto canvas = surface->getCanvas(); + Paint paint; + paint.setImageFilter(ImageFilter::Blur(blurrinessX, blurrinessY)); + paint.setColorFilter(ColorFilter::Blend(color, BlendMode::SrcIn)); + canvas->concat(Matrix::MakeTrans(-dstOffset.x, -dstOffset.y)); + canvas->save(); + canvas->concat(Matrix::MakeTrans(dx, dy)); + canvas->drawImage(image, &paint); + canvas->restore(); + if (!shadowOnly) { + canvas->drawImage(image); + } + return {surface->makeImageSnapshot(), dstOffset}; +} + +Rect DropShadowImageFilter::onFilterNodeBounds(const Rect& srcRect) const { + auto bounds = srcRect; + auto shadowBounds = bounds; + shadowBounds.offset(dx, dy); + if (auto filter = ImageFilter::Blur(blurrinessX, blurrinessY)) { + shadowBounds = filter->filterBounds(shadowBounds); + } + if (!shadowOnly) { + bounds.join(shadowBounds); + } else { + bounds = shadowBounds; + } + return bounds; +} +} // namespace tgfx diff --git a/src/effects/DropShadowImageFilter.h b/src/effects/DropShadowImageFilter.h new file mode 100644 index 00000000..10b2f952 --- /dev/null +++ b/src/effects/DropShadowImageFilter.h @@ -0,0 +1,44 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/ImageFilter.h" + +namespace tgfx { +class DropShadowImageFilter : public ImageFilter { + public: + DropShadowImageFilter(float dx, float dy, float blurrinessX, float blurrinessY, + const Color& color, bool shadowOnly, const Rect& cropRect) + : ImageFilter(cropRect), dx(dx), dy(dy), blurrinessX(blurrinessX), blurrinessY(blurrinessY), + color(color), shadowOnly(shadowOnly) { + } + + std::pair, Point> filterImage(const ImageFilterContext& context) override; + + private: + Rect onFilterNodeBounds(const Rect& srcRect) const override; + + float dx; + float dy; + float blurrinessX; + float blurrinessY; + Color color; + bool shadowOnly; +}; +} // namespace tgfx diff --git a/src/effects/LumaColorFilter.cpp b/src/effects/LumaColorFilter.cpp new file mode 100644 index 00000000..8e981667 --- /dev/null +++ b/src/effects/LumaColorFilter.cpp @@ -0,0 +1,30 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "LumaColorFilter.h" +#include "gpu/processors/LumaColorFilterEffect.h" + +namespace tgfx { +std::shared_ptr ColorFilter::MakeLumaColorFilter() { + return std::make_shared(); +} + +std::unique_ptr LumaColorFilter::asFragmentProcessor() const { + return LumaColorFilterEffect::Make(); +} +} // namespace tgfx diff --git a/src/effects/LumaColorFilter.h b/src/effects/LumaColorFilter.h new file mode 100644 index 00000000..43fd7044 --- /dev/null +++ b/src/effects/LumaColorFilter.h @@ -0,0 +1,28 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/ColorFilter.h" + +namespace tgfx { +class LumaColorFilter : public ColorFilter { + public: + std::unique_ptr asFragmentProcessor() const override; +}; +} // namespace tgfx diff --git a/src/effects/ModeColorFilter.cpp b/src/effects/ModeColorFilter.cpp new file mode 100644 index 00000000..7d7b73a0 --- /dev/null +++ b/src/effects/ModeColorFilter.cpp @@ -0,0 +1,59 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "ModeColorFilter.h" +#include "gpu/processors/ConstColorProcessor.h" +#include "gpu/processors/XfermodeFragmentProcessor.h" + +namespace tgfx { +static bool IsNoOps(float alpha, BlendMode mode) { + return BlendMode::Dst == mode || + (0.f == alpha && + (BlendMode::DstOver == mode || BlendMode::DstOut == mode || BlendMode::SrcATop == mode || + BlendMode::Xor == mode || BlendMode::Darken == mode)) || + (1.f == alpha && BlendMode::DstIn == mode); +} + +std::shared_ptr ColorFilter::Blend(Color color, BlendMode mode) { + float alpha = color.alpha; + if (BlendMode::Clear == mode) { + color = Color::Transparent(); + mode = BlendMode::Src; + } else if (BlendMode::SrcOver == mode) { + if (0.f == alpha) { + mode = BlendMode::Dst; + } else if (1.f == alpha) { + mode = BlendMode::Src; + } + } + // weed out combinations that are no-ops, and just return null. + if (IsNoOps(alpha, mode)) { + return nullptr; + } + return std::make_shared(color, mode); +} + +std::unique_ptr ModeColorFilter::asFragmentProcessor() const { + return XfermodeFragmentProcessor::MakeFromSrcProcessor( + ConstColorProcessor::Make(color.premultiply(), InputMode::Ignore), mode); +} + +bool ModeColorFilter::isAlphaUnchanged() const { + return BlendMode::Dst == mode || BlendMode::SrcATop == mode; +} +} // namespace tgfx diff --git a/src/effects/ModeColorFilter.h b/src/effects/ModeColorFilter.h new file mode 100644 index 00000000..eb241ab9 --- /dev/null +++ b/src/effects/ModeColorFilter.h @@ -0,0 +1,37 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/ColorFilter.h" + +namespace tgfx { +class ModeColorFilter : public ColorFilter { + public: + ModeColorFilter(Color color, BlendMode mode) : color(color), mode(mode) { + } + + std::unique_ptr asFragmentProcessor() const override; + + bool isAlphaUnchanged() const override; + + private: + Color color; + BlendMode mode; +}; +} // namespace tgfx diff --git a/src/effects/ShaderMaskFilter.cpp b/src/effects/ShaderMaskFilter.cpp new file mode 100644 index 00000000..3d9da9ef --- /dev/null +++ b/src/effects/ShaderMaskFilter.cpp @@ -0,0 +1,33 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "ShaderMaskFilter.h" +#include "gpu/processors/FragmentProcessor.h" + +namespace tgfx { +std::shared_ptr MaskFilter::Make(std::shared_ptr shader, bool inverted) { + if (shader == nullptr) { + return nullptr; + } + return std::make_shared(std::move(shader), inverted); +} + +std::unique_ptr ShaderMaskFilter::asFragmentProcessor(const FPArgs& args) const { + return FragmentProcessor::MulInputByChildAlpha(shader->asFragmentProcessor(args), inverted); +} +} // namespace tgfx diff --git a/src/effects/ShaderMaskFilter.h b/src/effects/ShaderMaskFilter.h new file mode 100644 index 00000000..edb6a1fe --- /dev/null +++ b/src/effects/ShaderMaskFilter.h @@ -0,0 +1,37 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/MaskFilter.h" +#include "tgfx/core/Shader.h" + +namespace tgfx { +class ShaderMaskFilter : public MaskFilter { + public: + ShaderMaskFilter(std::shared_ptr shader, bool inverted) + : shader(std::move(shader)), inverted(inverted) { + } + + std::unique_ptr asFragmentProcessor(const FPArgs& args) const override; + + private: + std::shared_ptr shader; + bool inverted; +}; +} // namespace tgfx diff --git a/src/gpu/AAType.h b/src/gpu/AAType.h new file mode 100644 index 00000000..e19c38cc --- /dev/null +++ b/src/gpu/AAType.h @@ -0,0 +1,30 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +namespace tgfx { +enum class AAType { + // No antialiasing. + None, + // Use fragment shader code to compute a fractional pixel coverage. + Coverage, + // Use normal MSAA. + MSAA +}; +} diff --git a/src/gpu/Backend.cpp b/src/gpu/Backend.cpp new file mode 100644 index 00000000..bfbff208 --- /dev/null +++ b/src/gpu/Backend.cpp @@ -0,0 +1,139 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/gpu/Backend.h" + +namespace tgfx { +BackendTexture::BackendTexture(const GLTextureInfo& glInfo, int width, int height) + : _backend(Backend::OPENGL), _width(width), _height(height), glInfo(glInfo) { +} + +BackendTexture::BackendTexture(const MtlTextureInfo& mtlInfo, int width, int height) + : _backend(Backend::METAL), _width(width), _height(height), mtlInfo(mtlInfo) { +} + +BackendTexture::BackendTexture(const BackendTexture& that) { + *this = that; +} + +BackendTexture& BackendTexture::operator=(const BackendTexture& that) { + if (!that.isValid()) { + _width = _height = 0; + return *this; + } + _width = that._width; + _height = that._height; + _backend = that._backend; + switch (that._backend) { + case Backend::OPENGL: + glInfo = that.glInfo; + break; + case Backend::METAL: + mtlInfo = that.mtlInfo; + break; + default: + break; + } + return *this; +} + +bool BackendTexture::getGLTextureInfo(GLTextureInfo* glTextureInfo) const { + if (!isValid() || _backend != Backend::OPENGL) { + return false; + } + *glTextureInfo = glInfo; + return true; +} + +bool BackendTexture::getMtlTextureInfo(MtlTextureInfo* mtlTextureInfo) const { + if (!isValid() || _backend != Backend::METAL) { + return false; + } + *mtlTextureInfo = mtlInfo; + return true; +} + +BackendRenderTarget::BackendRenderTarget(const GLFrameBufferInfo& glInfo, int width, int height) + : _backend(Backend::OPENGL), _width(width), _height(height), glInfo(glInfo) { +} + +BackendRenderTarget::BackendRenderTarget(const MtlTextureInfo& mtlInfo, int width, int height) + : _backend(Backend::METAL), _width(width), _height(height), mtlInfo(mtlInfo) { +} + +BackendRenderTarget::BackendRenderTarget(const BackendRenderTarget& that) { + *this = that; +} + +BackendRenderTarget& BackendRenderTarget::operator=(const BackendRenderTarget& that) { + if (!that.isValid()) { + _width = _height = 0; + return *this; + } + _width = that._width; + _height = that._height; + _backend = that._backend; + switch (that._backend) { + case Backend::OPENGL: + glInfo = that.glInfo; + break; + case Backend::METAL: + mtlInfo = that.mtlInfo; + break; + default: + break; + } + return *this; +} + +bool BackendRenderTarget::getGLFramebufferInfo(GLFrameBufferInfo* glFrameBufferInfo) const { + if (!isValid() || _backend != Backend::OPENGL) { + return false; + } + *glFrameBufferInfo = glInfo; + return true; +} + +bool BackendRenderTarget::getMtlTextureInfo(MtlTextureInfo* mtlTextureInfo) const { + if (!isValid() || _backend != Backend::METAL) { + return false; + } + *mtlTextureInfo = mtlInfo; + return true; +} + +BackendSemaphore::BackendSemaphore() + : _backend(Backend::MOCK), _glSync(nullptr), _isInitialized(false) { +} + +void BackendSemaphore::initGL(void* sync) { + if (sync == nullptr) { + return; + } + _backend = Backend::OPENGL; + _glSync = sync; + _isInitialized = true; +} + +void* BackendSemaphore::glSync() const { + if (!_isInitialized || _backend != Backend::OPENGL) { + return nullptr; + } + return _glSync; +} +} // namespace tgfx \ No newline at end of file diff --git a/src/gpu/BitmaskOperators.h b/src/gpu/BitmaskOperators.h new file mode 100644 index 00000000..2e786a26 --- /dev/null +++ b/src/gpu/BitmaskOperators.h @@ -0,0 +1,90 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include + +namespace tgfx { +#define TGFX_MARK_AS_BITMASK_ENUM(LargestValue) TGFX_BITMASK_LARGEST_ENUMERATOR = LargestValue + +/// Traits class to determine whether an enum has a +/// TGFX_BITMASK_LARGEST_ENUMERATOR enumerator. +template +struct is_bitmask_enum : std::false_type {}; + +template +struct is_bitmask_enum= 0>> + : std::true_type {}; + +template +std::underlying_type_t BitMask() { + // On overflow, NextPowerOf2 returns zero with the type uint64_t, so + // subtracting 1 gives us the mask with all bits set, like we want. + return NextPowerOf2(static_cast>(E::TGFX_BITMASK_LARGEST_ENUMERATOR)) - + 1; +} + +/// Check that Val is in range for E, and return Val cast to E's underlying +/// type. +template +std::underlying_type_t Underlying(E Val) { + return static_cast>(Val); +} + +template ::value>> +E operator~(E Val) { + return static_cast(~Underlying(Val) & BitMask()); +} + +template ::value>> +E operator|(E LHS, E RHS) { + return static_cast(Underlying(LHS) | Underlying(RHS)); +} + +template ::value>> +E operator&(E LHS, E RHS) { + return static_cast(Underlying(LHS) & Underlying(RHS)); +} + +template ::value>> +E operator^(E LHS, E RHS) { + return static_cast(Underlying(LHS) ^ Underlying(RHS)); +} + +// |=, &=, and ^= return a reference to LHS, to match the behavior of the +// operators on builtin types. + +template ::value>> +E& operator|=(E& LHS, E RHS) { + LHS = LHS | RHS; + return LHS; +} + +template ::value>> +E& operator&=(E& LHS, E RHS) { + LHS = LHS & RHS; + return LHS; +} + +template ::value>> +E& operator^=(E& LHS, E RHS) { + LHS = LHS ^ RHS; + return LHS; +} +} // namespace tgfx diff --git a/src/gpu/Blend.cpp b/src/gpu/Blend.cpp new file mode 100644 index 00000000..2bb5a2a9 --- /dev/null +++ b/src/gpu/Blend.cpp @@ -0,0 +1,56 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "Blend.h" +#include + +namespace tgfx { +bool BlendModeAsCoeff(BlendMode mode, BlendInfo* blendInfo) { + // clang-format off + static constexpr std::pair kCoeffs[] = { + // For Porter-Duff blend functions, color = src * src coeff + dst * dst coeff + // src coeff dst coeff blend func + // ------------------- -------------------- ---------- + { BlendModeCoeff::Zero, BlendModeCoeff::Zero }, // clear + { BlendModeCoeff::One, BlendModeCoeff::Zero }, // src + { BlendModeCoeff::Zero, BlendModeCoeff::One }, // dst + { BlendModeCoeff::One, BlendModeCoeff::ISA }, // src-over + { BlendModeCoeff::IDA, BlendModeCoeff::One }, // dst-over + { BlendModeCoeff::DA, BlendModeCoeff::Zero }, // src-in + { BlendModeCoeff::Zero, BlendModeCoeff::SA }, // dst-in + { BlendModeCoeff::IDA, BlendModeCoeff::Zero }, // src-out + { BlendModeCoeff::Zero, BlendModeCoeff::ISA }, // dst-out + { BlendModeCoeff::DA, BlendModeCoeff::ISA }, // src-atop + { BlendModeCoeff::IDA, BlendModeCoeff::SA }, // dst-atop + { BlendModeCoeff::IDA, BlendModeCoeff::ISA }, // xor + { BlendModeCoeff::One, BlendModeCoeff::One }, // plus + { BlendModeCoeff::Zero, BlendModeCoeff::SC }, // modulate + { BlendModeCoeff::One, BlendModeCoeff::ISC }, // screen + }; + // clang-format on + + if (mode > BlendMode::Screen) { + return false; + } + if (blendInfo != nullptr) { + blendInfo->srcBlend = kCoeffs[static_cast(mode)].first; + blendInfo->dstBlend = kCoeffs[static_cast(mode)].second; + } + return true; +} +} // namespace tgfx diff --git a/src/gpu/Blend.h b/src/gpu/Blend.h new file mode 100644 index 00000000..7ed55f6d --- /dev/null +++ b/src/gpu/Blend.h @@ -0,0 +1,77 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/BlendMode.h" + +namespace tgfx { +enum class BlendModeCoeff { + /** + * 0 + */ + Zero, + /** + * 1 + */ + One, + /** + * src color + */ + SC, + /** + * inverse src color (i.e. 1 - sc) + */ + ISC, + /** + * dst color + */ + DC, + /** + * inverse dst color (i.e. 1 - dc) + */ + IDC, + /** + * src alpha + */ + SA, + /** + * inverse src alpha (i.e. 1 - sa) + */ + ISA, + /** + * dst alpha + */ + DA, + /** + * inverse dst alpha (i.e. 1 - da) + */ + IDA, +}; + +struct BlendInfo { + BlendModeCoeff srcBlend; + BlendModeCoeff dstBlend; +}; + +/** + * Returns true if 'mode' is a coefficient-based blend mode. If true is returned, the mode's src and + * dst coefficient functions are set in 'src' and 'dst'. + */ +bool BlendModeAsCoeff(BlendMode mode, BlendInfo* blendInfo = nullptr); +} // namespace tgfx diff --git a/src/gpu/Context.cpp b/src/gpu/Context.cpp new file mode 100644 index 00000000..5d406427 --- /dev/null +++ b/src/gpu/Context.cpp @@ -0,0 +1,113 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/gpu/Context.h" +#include "gpu/DrawingManager.h" +#include "gpu/ProgramCache.h" +#include "gpu/ProxyProvider.h" +#include "gpu/ResourceProvider.h" +#include "tgfx/gpu/ResourceCache.h" +#include "tgfx/utils/Clock.h" +#include "utils/Log.h" + +namespace tgfx { +Context::Context(Device* device) : _device(device) { + _programCache = new ProgramCache(this); + _resourceCache = new ResourceCache(this); + _drawingManager = new DrawingManager(this); + _resourceProvider = new ResourceProvider(this); + _proxyProvider = new ProxyProvider(this); +} + +Context::~Context() { + // The Device owner must call releaseAll() before deleting this Context, otherwise, GPU resources + // may leak. + DEBUG_ASSERT(_resourceCache->empty()); + DEBUG_ASSERT(_programCache->empty()); + delete _programCache; + delete _resourceCache; + delete _drawingManager; + delete _gpu; + delete _resourceProvider; + delete _proxyProvider; +} + +bool Context::flush(BackendSemaphore* signalSemaphore) { + auto semaphore = Semaphore::Wrap(signalSemaphore); + auto success = _drawingManager->flush(semaphore.get()); + if (signalSemaphore != nullptr) { + *signalSemaphore = semaphore->getBackendSemaphore(); + } + return success; +} + +bool Context::submit(bool syncCpu) { + return _gpu->submitToGpu(syncCpu); +} + +void Context::flushAndSubmit(bool syncCpu) { + flush(); + submit(syncCpu); +} + +bool Context::wait(const BackendSemaphore& waitSemaphore) { + auto semaphore = Semaphore::Wrap(&waitSemaphore); + if (semaphore == nullptr) { + return false; + } + return caps()->semaphoreSupport && _gpu->waitSemaphore(semaphore.get()); +} + +void Context::onLocked() { + _resourceCache->attachToCurrentThread(); +} + +void Context::onUnlocked() { + _resourceCache->detachFromCurrentThread(); +} + +size_t Context::memoryUsage() const { + return _resourceCache->getResourceBytes(); +} + +size_t Context::purgeableBytes() const { + return _resourceCache->getPurgeableBytes(); +} + +size_t Context::getCacheLimit() const { + return _resourceCache->getCacheLimit(); +} + +void Context::setCacheLimit(size_t bytesLimit) { + _resourceCache->setCacheLimit(bytesLimit); +} + +void Context::purgeResourcesNotUsedSince(int64_t purgeTime, bool scratchResourcesOnly) { + _resourceCache->purgeNotUsedSince(purgeTime, scratchResourcesOnly); +} + +bool Context::purgeResourcesUntilMemoryTo(size_t bytesLimit, bool scratchResourcesOnly) { + return _resourceCache->purgeUntilMemoryTo(bytesLimit, scratchResourcesOnly); +} + +void Context::releaseAll(bool releaseGPU) { + _resourceProvider->releaseAll(); + _programCache->releaseAll(releaseGPU); + _resourceCache->releaseAll(releaseGPU); +} +} // namespace tgfx \ No newline at end of file diff --git a/src/gpu/CoordTransform.cpp b/src/gpu/CoordTransform.cpp new file mode 100644 index 00000000..036c7a5a --- /dev/null +++ b/src/gpu/CoordTransform.cpp @@ -0,0 +1,57 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "CoordTransform.h" +#include "gpu/TextureSampler.h" + +namespace tgfx { +Matrix CoordTransform::getTotalMatrix() const { + if (textureProxy == nullptr || textureProxy->getTexture() == nullptr) { + return matrix; + } + auto texture = textureProxy->getTexture(); + auto combined = matrix; + // normalize + auto scale = texture->getTextureCoord(1, 1); + combined.postScale(scale.x, scale.y); + // If the texture has a crop rectangle, we need to shrink it to prevent bilinear sampling beyond + // the edge of the crop rectangle. + auto textureType = texture->getSampler()->type(); + auto edgePoint = texture->getTextureCoord(texture->width(), texture->height()); + static constexpr Point FullEdge = Point::Make(1.0f, 1.0f); + if (textureType != TextureType::Rectangle && edgePoint != FullEdge && alphaStart.isZero()) { + // https://cs.android.com/android/platform/superproject/+/master:frameworks/native/libs/nativedisplay/surfacetexture/SurfaceTexture.cpp;l=275;drc=master;bpv=0;bpt=1 + // https://stackoverflow.com/questions/6023400/opengl-es-texture-coordinates-slightly-off + // Normally this would just need to take 1/2 a texel off each end, but because the chroma + // channels of YUV420 images are subsampled we may need to shrink the crop region by a whole + // texel on each side. + auto shrinkAmount = textureType == TextureType::External ? 1.0f : 0.5f; + scale = texture->getTextureCoord(static_cast(texture->width()) - (2.0f * shrinkAmount), + static_cast(texture->height()) - (2.0f * shrinkAmount)); + combined.postScale(scale.x, scale.y); + auto translate = texture->getTextureCoord(shrinkAmount, shrinkAmount); + combined.postTranslate(translate.x, translate.y); + } + if (texture->origin() == ImageOrigin::BottomLeft) { + combined.postScale(1, -1); + auto translate = texture->getTextureCoord(0, static_cast(texture->height())); + combined.postTranslate(translate.x, translate.y); + } + return combined; +} +} // namespace tgfx diff --git a/src/gpu/CoordTransform.h b/src/gpu/CoordTransform.h new file mode 100644 index 00000000..2788bfd5 --- /dev/null +++ b/src/gpu/CoordTransform.h @@ -0,0 +1,40 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/TextureProxy.h" +#include "tgfx/core/Matrix.h" + +namespace tgfx { +struct CoordTransform { + CoordTransform() = default; + + explicit CoordTransform(Matrix matrix, const TextureProxy* proxy = nullptr, + const Point& alphaStart = Point::Zero()) + : matrix(matrix), textureProxy(proxy), alphaStart(alphaStart) { + } + + Matrix getTotalMatrix() const; + + Matrix matrix = Matrix::I(); + const TextureProxy* textureProxy = nullptr; + // The alpha start point of the RGBAAA layout. + Point alphaStart = Point::Zero(); +}; +} // namespace tgfx diff --git a/src/gpu/Device.cpp b/src/gpu/Device.cpp new file mode 100644 index 00000000..e1191c76 --- /dev/null +++ b/src/gpu/Device.cpp @@ -0,0 +1,75 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/gpu/Device.h" +#include "tgfx/gpu/Context.h" +#include "utils/Log.h" +#include "utils/UniqueID.h" + +namespace tgfx { +Device::Device() : _uniqueID(UniqueID::Next()) { +} + +Device::~Device() { + // Subclasses must call releaseAll() before the Device is destructed to clean up all GPU + // resources in context. + DEBUG_ASSERT(context == nullptr); +} + +Context* Device::lockContext() { + locker.lock(); + contextLocked = onLockContext(); + if (!contextLocked) { + locker.unlock(); + return nullptr; + } + context->onLocked(); + return context; +} + +void Device::unlock() { + if (contextLocked) { + context->onUnlocked(); + contextLocked = false; + onUnlockContext(); + } + locker.unlock(); +} + +void Device::releaseAll() { + std::lock_guard autoLock(locker); + if (context == nullptr) { + return; + } + contextLocked = onLockContext(); + context->releaseAll(contextLocked); + if (contextLocked) { + contextLocked = false; + onUnlockContext(); + } + delete context; + context = nullptr; +} + +bool Device::onLockContext() { + return context != nullptr; +} + +void Device::onUnlockContext() { +} +} // namespace tgfx diff --git a/src/gpu/DoubleBufferedWindow.cpp b/src/gpu/DoubleBufferedWindow.cpp new file mode 100644 index 00000000..da72a5b7 --- /dev/null +++ b/src/gpu/DoubleBufferedWindow.cpp @@ -0,0 +1,144 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/gpu/DoubleBufferedWindow.h" +#include "gpu/Texture.h" + +namespace tgfx { +class DoubleBufferedWindowExternal : public DoubleBufferedWindow { + public: + DoubleBufferedWindowExternal(std::shared_ptr device, HardwareBufferRef frontBuffer, + HardwareBufferRef backBuffer) + : DoubleBufferedWindow(std::move(device)), frontBuffer(frontBuffer), backBuffer(backBuffer) { + HardwareBufferRetain(frontBuffer); + HardwareBufferRetain(backBuffer); + } + + ~DoubleBufferedWindowExternal() override { + HardwareBufferRelease(frontBuffer); + HardwareBufferRelease(backBuffer); + } + + bool isFront(HardwareBufferRef buffer) const override { + return frontBuffer == buffer; + } + + private: + std::shared_ptr onCreateSurface(Context* context) override { + if (frontSurface == nullptr) { + frontSurface = Surface::MakeFrom(context, frontBuffer); + } + if (backSurface == nullptr) { + backSurface = Surface::MakeFrom(context, backBuffer); + } + return backSurface; + } + + void onSwapSurfaces(Context*) override { + std::swap(frontBuffer, backBuffer); + } + + HardwareBufferRef frontBuffer = nullptr; + HardwareBufferRef backBuffer = nullptr; +}; + +class DoubleBufferedWindowTexture : public DoubleBufferedWindow { + public: + DoubleBufferedWindowTexture(std::shared_ptr device, + const BackendTexture& frontBackendTexture, + const BackendTexture& backBackendTexture) + : DoubleBufferedWindow(std::move(device)), frontBackendTexture(frontBackendTexture), + backBackendTexture(backBackendTexture) { + } + + bool isFront(const BackendTexture& texture) const override { + if (!texture.isValid() || !frontBackendTexture.isValid()) { + return false; + } + if (texture.backend() != Backend::OPENGL) { + return false; + } + GLTextureInfo info1; + texture.getGLTextureInfo(&info1); + GLTextureInfo info2; + frontBackendTexture.getGLTextureInfo(&info2); + if (info1.format != info2.format) { + return false; + } + if (info1.target != info2.target) { + return false; + } + return info1.id != info2.id; + } + + private: + std::shared_ptr onCreateSurface(Context* context) override { + if (frontSurface == nullptr) { + frontSurface = Surface::MakeFrom(context, frontBackendTexture, ImageOrigin::TopLeft); + } + if (backSurface == nullptr) { + backSurface = Surface::MakeFrom(context, backBackendTexture, ImageOrigin::TopLeft); + } + return backSurface; + } + + void onSwapSurfaces(Context*) override { + std::swap(frontBackendTexture, backBackendTexture); + } + + BackendTexture frontBackendTexture; + BackendTexture backBackendTexture; +}; + +std::shared_ptr DoubleBufferedWindow::Make(std::shared_ptr device, + HardwareBufferRef frontBuffer, + HardwareBufferRef backBuffer) { + if (device == nullptr || frontBuffer == nullptr || backBuffer == nullptr) { + return nullptr; + } + return std::shared_ptr( + new DoubleBufferedWindowExternal(std::move(device), frontBuffer, backBuffer)); +} + +std::shared_ptr DoubleBufferedWindow::Make( + std::shared_ptr device, const BackendTexture& frontBackendTexture, + const BackendTexture& backBackendTexture) { + if (device == nullptr || !frontBackendTexture.isValid() || !backBackendTexture.isValid()) { + return nullptr; + } + return std::shared_ptr( + new DoubleBufferedWindowTexture(std::move(device), frontBackendTexture, backBackendTexture)); +} + +DoubleBufferedWindow::DoubleBufferedWindow(std::shared_ptr device) + : Window(std::move(device)) { +} + +DoubleBufferedWindow::~DoubleBufferedWindow() { + frontSurface = nullptr; + backSurface = nullptr; +} + +void DoubleBufferedWindow::onPresent(Context* context, int64_t) { + if (frontSurface == nullptr) { + return; + } + std::swap(frontSurface, backSurface); + onSwapSurfaces(context); +} +} // namespace tgfx diff --git a/src/gpu/DrawingManager.cpp b/src/gpu/DrawingManager.cpp new file mode 100644 index 00000000..27e992e6 --- /dev/null +++ b/src/gpu/DrawingManager.cpp @@ -0,0 +1,79 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "DrawingManager.h" +#include "TextureProxy.h" +#include "TextureResolveRenderTask.h" +#include "gpu/Gpu.h" +#include "gpu/RenderTarget.h" +#include "utils/Log.h" + +namespace tgfx { +void DrawingManager::closeActiveOpsTask() { + if (activeOpsTask) { + activeOpsTask->makeClosed(); + activeOpsTask = nullptr; + } +} + +void DrawingManager::newTextureResolveRenderTask(Surface* surface) { + if (surface->renderTarget->sampleCount() <= 1) { + return; + } + closeActiveOpsTask(); + auto task = std::make_shared(surface->renderTarget); + task->makeClosed(); + tasks.push_back(std::move(task)); +} + +std::shared_ptr DrawingManager::newOpsTask(Surface* surface) { + closeActiveOpsTask(); + auto opsTask = std::make_shared(surface->renderTarget, surface->texture); + tasks.push_back(opsTask); + activeOpsTask = opsTask.get(); + return opsTask; +} + +bool DrawingManager::flush(Semaphore* signalSemaphore) { + auto* gpu = context->gpu(); + closeAllTasks(); + activeOpsTask = nullptr; + std::vector proxies; + std::for_each(tasks.begin(), tasks.end(), + [&proxies](std::shared_ptr& task) { task->gatherProxies(&proxies); }); + std::for_each(proxies.begin(), proxies.end(), [](TextureProxy* proxy) { + if (!(proxy->isInstantiated() || proxy->instantiate())) { + LOGE("Failed to instantiate texture from proxy."); + } + }); + std::for_each(tasks.begin(), tasks.end(), + [gpu](std::shared_ptr& task) { task->execute(gpu); }); + removeAllTasks(); + return context->caps()->semaphoreSupport && gpu->insertSemaphore(signalSemaphore); +} + +void DrawingManager::closeAllTasks() { + for (auto& task : tasks) { + task->makeClosed(); + } +} + +void DrawingManager::removeAllTasks() { + tasks.clear(); +} +} // namespace tgfx diff --git a/src/gpu/DrawingManager.h b/src/gpu/DrawingManager.h new file mode 100644 index 00000000..f8f4c6dc --- /dev/null +++ b/src/gpu/DrawingManager.h @@ -0,0 +1,49 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "RenderTask.h" +#include "gpu/ops/OpsTask.h" +#include "tgfx/gpu/Surface.h" + +namespace tgfx { +class DrawingManager { + public: + explicit DrawingManager(Context* context) : context(context) { + } + + std::shared_ptr newOpsTask(Surface* surface); + + bool flush(Semaphore* signalSemaphore); + + void newTextureResolveRenderTask(Surface* surface); + + private: + void closeAllTasks(); + + void removeAllTasks(); + + void closeActiveOpsTask(); + + Context* context = nullptr; + std::vector> tasks; + OpsTask* activeOpsTask = nullptr; +}; +} // namespace tgfx diff --git a/src/gpu/ExternalTexture.cpp b/src/gpu/ExternalTexture.cpp new file mode 100644 index 00000000..1965caaf --- /dev/null +++ b/src/gpu/ExternalTexture.cpp @@ -0,0 +1,68 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "ExternalTexture.h" +#include "gpu/Gpu.h" +#include "utils/PixelFormatUtil.h" + +namespace tgfx { +std::shared_ptr Texture::MakeFrom(Context* context, const BackendTexture& backendTexture, + ImageOrigin origin) { + return ExternalTexture::MakeFrom(context, backendTexture, origin, false); +} + +std::shared_ptr Texture::MakeAdopted(Context* context, + const BackendTexture& backendTexture, + ImageOrigin origin) { + return ExternalTexture::MakeFrom(context, backendTexture, origin, true); +} + +std::shared_ptr ExternalTexture::MakeFrom(Context* context, + const BackendTexture& backendTexture, + ImageOrigin origin, bool adopted) { + if (context == nullptr || !backendTexture.isValid()) { + return nullptr; + } + auto sampler = TextureSampler::MakeFrom(context, backendTexture); + if (sampler == nullptr) { + return nullptr; + } + auto texture = new ExternalTexture(std::move(sampler), backendTexture.width(), + backendTexture.height(), origin, adopted); + return Resource::Wrap(context, texture); +} + +ExternalTexture::ExternalTexture(std::unique_ptr sampler, int width, int height, + ImageOrigin origin, bool adopted) + : Texture(width, height, origin), sampler(std::move(sampler)), adopted(adopted) { +} + +size_t ExternalTexture::memoryUsage() const { + if (!adopted) { + return 0; + } + auto colorSize = width() * height() * PixelFormatBytesPerPixel(sampler->format); + return sampler->hasMipmaps() ? colorSize * 4 / 3 : colorSize; +} + +void ExternalTexture::onReleaseGPU() { + if (adopted) { + context->gpu()->deleteSampler(sampler.get()); + } +} +} // namespace tgfx diff --git a/src/gpu/ExternalTexture.h b/src/gpu/ExternalTexture.h new file mode 100644 index 00000000..b0287e4c --- /dev/null +++ b/src/gpu/ExternalTexture.h @@ -0,0 +1,47 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/Texture.h" + +namespace tgfx { +/** + * Texture created externally to TGFX + */ +class ExternalTexture : public Texture { + public: + static std::shared_ptr MakeFrom(Context* context, const BackendTexture& backendTexture, + ImageOrigin origin, bool adopted); + + size_t memoryUsage() const override; + + const TextureSampler* getSampler() const override { + return sampler.get(); + } + + private: + std::unique_ptr sampler = {}; + bool adopted = false; + + ExternalTexture(std::unique_ptr sampler, int width, int height, + ImageOrigin origin, bool adopted); + + void onReleaseGPU() override; +}; +} // namespace tgfx diff --git a/src/gpu/FragmentShaderBuilder.cpp b/src/gpu/FragmentShaderBuilder.cpp new file mode 100644 index 00000000..fcf958a9 --- /dev/null +++ b/src/gpu/FragmentShaderBuilder.cpp @@ -0,0 +1,49 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "FragmentShaderBuilder.h" +#include "ProgramBuilder.h" + +namespace tgfx { +FragmentShaderBuilder::FragmentShaderBuilder(ProgramBuilder* program) : ShaderBuilder(program) { + subStageIndices.push_back(0); +} + +void FragmentShaderBuilder::onFinalize() { + programBuilder->varyingHandler()->getFragDecls(&shaderStrings[Type::Inputs]); +} + +void FragmentShaderBuilder::declareCustomOutputColor() { + outputs.emplace_back(CustomColorOutputName(), SLType::Float4, ShaderVar::TypeModifier::Out); +} + +void FragmentShaderBuilder::onBeforeChildProcEmitCode() { + subStageIndices.push_back(0); + // second-to-last value in the subStageIndices stack is the index of the child proc + // at that level which is currently emitting code. + _mangleString.append("_c"); + _mangleString += std::to_string(subStageIndices[subStageIndices.size() - 2]); +} + +void FragmentShaderBuilder::onAfterChildProcEmitCode() { + subStageIndices.pop_back(); + subStageIndices.back()++; + auto removeAt = _mangleString.rfind('_'); + _mangleString.erase(removeAt, _mangleString.size() - removeAt); +} +} // namespace tgfx diff --git a/src/gpu/FragmentShaderBuilder.h b/src/gpu/FragmentShaderBuilder.h new file mode 100644 index 00000000..06821eb7 --- /dev/null +++ b/src/gpu/FragmentShaderBuilder.h @@ -0,0 +1,71 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "ShaderBuilder.h" + +namespace tgfx { +class FragmentShaderBuilder : public ShaderBuilder { + public: + explicit FragmentShaderBuilder(ProgramBuilder* program); + + const std::string& mangleString() const { + return _mangleString; + } + + virtual std::string dstColor() = 0; + + void onBeforeChildProcEmitCode(); + + void onAfterChildProcEmitCode(); + + void declareCustomOutputColor(); + + protected: + virtual std::string colorOutputName() = 0; + + static std::string CustomColorOutputName() { + return "tgfx_FragColor"; + } + + void onFinalize() override; + + /** + * State that tracks which child proc in the proc tree is currently emitting code. This is + * used to update the _mangleString, which is used to mangle the names of uniforms and functions + * emitted by the proc. subStageIndices is a stack: its count indicates how many levels deep + * we are in the tree, and its second-to-last value is the index of the child proc at that + * level which is currently emitting code. For example, if subStageIndices = [3, 1, 2, 0], that + * means we're currently emitting code for the base proc's 3rd child's 1st child's 2nd child. + */ + std::vector subStageIndices; + + /** + * The mangle string is used to mangle the names of uniforms/functions emitted by the child + * procs so no duplicate uniforms/functions appear in the generated shader program. The mangle + * string is simply based on subStageIndices. For example, if subStageIndices = [3, 1, 2, 0], + * then the _mangleString will be "_c3_c1_c2", and any uniform/function emitted by that proc will + * have "_c3_c1_c2" appended to its name, which can be interpreted as "base proc's 3rd child's + * 1st child's 2nd child". + */ + std::string _mangleString; + + friend class ProgramBuilder; +}; +} // namespace tgfx diff --git a/src/gpu/Gpu.cpp b/src/gpu/Gpu.cpp new file mode 100644 index 00000000..e3e94a5c --- /dev/null +++ b/src/gpu/Gpu.cpp @@ -0,0 +1,29 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "Gpu.h" +#include "gpu/Texture.h" + +namespace tgfx { +void Gpu::regenerateMipMapLevels(const TextureSampler* sampler) { + if (!sampler->hasMipmaps()) { + return; + } + onRegenerateMipMapLevels(sampler); +} +} // namespace tgfx diff --git a/src/gpu/Gpu.h b/src/gpu/Gpu.h new file mode 100644 index 00000000..b25cc244 --- /dev/null +++ b/src/gpu/Gpu.h @@ -0,0 +1,75 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "gpu/Semaphore.h" +#include "gpu/TextureSampler.h" +#include "tgfx/gpu/Context.h" + +namespace tgfx { +class RenderPass; + +class RenderTarget; + +class Texture; + +class Gpu { + public: + virtual ~Gpu() = default; + + Context* context() { + return _context; + } + + virtual std::unique_ptr createSampler(int width, int height, PixelFormat format, + int mipLevelCount) = 0; + + virtual void deleteSampler(TextureSampler* sampler) = 0; + + virtual void writePixels(const TextureSampler* sampler, Rect rect, const void* pixels, + size_t rowBytes) = 0; + + virtual void copyRenderTargetToTexture(const RenderTarget* renderTarget, Texture* texture, + const Rect& srcRect, const Point& dstPoint) = 0; + + virtual void resolveRenderTarget(RenderTarget* renderTarget) = 0; + + virtual bool insertSemaphore(Semaphore* semaphore) = 0; + + virtual bool waitSemaphore(const Semaphore* semaphore) = 0; + + virtual RenderPass* getRenderPass(std::shared_ptr renderTarget, + std::shared_ptr renderTargetTexture) = 0; + + virtual bool submitToGpu(bool syncCpu) = 0; + + virtual void submit(RenderPass* renderPass) = 0; + + void regenerateMipMapLevels(const TextureSampler* sampler); + + protected: + explicit Gpu(Context* context) : _context(context) { + } + + virtual void onRegenerateMipMapLevels(const TextureSampler* sampler) = 0; + + Context* _context; +}; +} // namespace tgfx diff --git a/src/gpu/GpuBuffer.h b/src/gpu/GpuBuffer.h new file mode 100644 index 00000000..d1186d75 --- /dev/null +++ b/src/gpu/GpuBuffer.h @@ -0,0 +1,52 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/gpu/Resource.h" + +namespace tgfx { +enum class BufferType { + Index, + Vertex, +}; + +class GpuBuffer : public Resource { + public: + static std::shared_ptr Make(Context* context, BufferType bufferType, + const void* buffer, size_t size); + + size_t size() const { + return _sizeInBytes; + } + + size_t memoryUsage() const override { + return _sizeInBytes; + } + + protected: + GpuBuffer(BufferType bufferType, size_t sizeInBytes) + : _bufferType(bufferType), _sizeInBytes(sizeInBytes) { + } + + BufferType _bufferType; + + private: + size_t _sizeInBytes; +}; +} // namespace tgfx diff --git a/src/gpu/GpuPaint.h b/src/gpu/GpuPaint.h new file mode 100644 index 00000000..2bd72184 --- /dev/null +++ b/src/gpu/GpuPaint.h @@ -0,0 +1,31 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/processors/FragmentProcessor.h" + +namespace tgfx { +class GpuPaint { + public: + Context* context = nullptr; + Color color = Color::Black(); + std::vector> colorFragmentProcessors; + std::vector> coverageFragmentProcessors; +}; +} // namespace tgfx diff --git a/src/gpu/GradientCache.cpp b/src/gpu/GradientCache.cpp new file mode 100644 index 00000000..09fb1ec8 --- /dev/null +++ b/src/gpu/GradientCache.cpp @@ -0,0 +1,127 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GradientCache.h" +#include + +namespace tgfx { +// Each bitmap will be 256x1. +static constexpr size_t kMaxNumCachedGradientBitmaps = 32; +static constexpr int kGradientTextureSize = 256; + +std::shared_ptr GradientCache::find(const BytesKey& bytesKey) { + auto iter = textures.find(bytesKey); + if (iter == textures.end()) { + return nullptr; + } + keys.remove(bytesKey); + keys.push_front(bytesKey); + return iter->second; +} + +void GradientCache::add(const BytesKey& bytesKey, std::shared_ptr texture) { + textures[bytesKey] = std::move(texture); + keys.push_front(bytesKey); + while (keys.size() > kMaxNumCachedGradientBitmaps) { + auto key = keys.back(); + keys.pop_back(); + textures.erase(key); + } +} + +std::shared_ptr CreateGradient(const Color* colors, const float* positions, int count, + int resolution) { + Bitmap bitmap(resolution, 1, false, false); + Pixmap pixmap(bitmap); + if (pixmap.isEmpty()) { + return nullptr; + } + pixmap.clear(); + auto pixels = reinterpret_cast(pixmap.writablePixels()); + int prevIndex = 0; + for (int i = 1; i < count; ++i) { + int nextIndex = + std::min(static_cast(positions[i] * static_cast(resolution)), resolution - 1); + + if (nextIndex > prevIndex) { + auto r0 = colors[i - 1].red; + auto g0 = colors[i - 1].green; + auto b0 = colors[i - 1].blue; + auto a0 = colors[i - 1].alpha; + auto r1 = colors[i].red; + auto g1 = colors[i].green; + auto b1 = colors[i].blue; + auto a1 = colors[i].alpha; + + auto step = 1.0f / static_cast(nextIndex - prevIndex); + auto deltaR = (r1 - r0) * step; + auto deltaG = (g1 - g0) * step; + auto deltaB = (b1 - b0) * step; + auto deltaA = (a1 - a0) * step; + + for (int curIndex = prevIndex; curIndex <= nextIndex; ++curIndex) { + pixels[curIndex * 4] = static_cast(r0 * 255.0f); + pixels[curIndex * 4 + 1] = static_cast(g0 * 255.0f); + pixels[curIndex * 4 + 2] = static_cast(b0 * 255.0f); + pixels[curIndex * 4 + 3] = static_cast(a0 * 255.0f); + r0 += deltaR; + g0 += deltaG; + b0 += deltaB; + a0 += deltaA; + } + } + prevIndex = nextIndex; + } + return bitmap.makeBuffer(); +} + +std::shared_ptr GradientCache::getGradient(Context* context, const Color* colors, + const float* positions, int count) { + BytesKey bytesKey = {}; + for (int i = 0; i < count; ++i) { + bytesKey.write(colors[i].red); + bytesKey.write(colors[i].green); + bytesKey.write(colors[i].blue); + bytesKey.write(colors[i].alpha); + bytesKey.write(positions[i]); + } + + auto texture = find(bytesKey); + if (texture) { + return texture; + } + auto pixelBuffer = CreateGradient(colors, positions, count, kGradientTextureSize); + if (pixelBuffer == nullptr) { + return nullptr; + } + texture = Texture::MakeFrom(context, pixelBuffer); + if (texture) { + add(bytesKey, texture); + } + return texture; +} + +void GradientCache::releaseAll() { + textures.clear(); + keys.clear(); +} + +bool GradientCache::empty() const { + return textures.empty() && keys.empty(); +} +} // namespace tgfx diff --git a/src/gpu/GradientCache.h b/src/gpu/GradientCache.h new file mode 100644 index 00000000..933d41d3 --- /dev/null +++ b/src/gpu/GradientCache.h @@ -0,0 +1,46 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include +#include "core/PixelBuffer.h" +#include "tgfx/core/Color.h" +#include "tgfx/core/Pixmap.h" +#include "tgfx/utils/BytesKey.h" + +namespace tgfx { +class GradientCache { + public: + std::shared_ptr getGradient(Context* context, const Color* colors, + const float* positions, int count); + + void releaseAll(); + + bool empty() const; + + private: + std::shared_ptr find(const BytesKey& bytesKey); + + void add(const BytesKey& bytesKey, std::shared_ptr texture); + + std::list keys = {}; + BytesKeyMap> textures = {}; +}; +} // namespace tgfx diff --git a/src/gpu/Pipeline.cpp b/src/gpu/Pipeline.cpp new file mode 100644 index 00000000..b3f906fa --- /dev/null +++ b/src/gpu/Pipeline.cpp @@ -0,0 +1,99 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "Pipeline.h" +#include "gpu/ProgramBuilder.h" +#include "gpu/StagedUniformBuffer.h" +#include "gpu/TextureSampler.h" +#include "gpu/processors/PorterDuffXferProcessor.h" + +namespace tgfx { +Pipeline::Pipeline(std::unique_ptr geometryProcessor, + std::vector> fragmentProcessors, + size_t numColorProcessors, BlendMode blendMode, + const DstTextureInfo& dstTextureInfo, const Swizzle* outputSwizzle) + : geometryProcessor(std::move(geometryProcessor)), + fragmentProcessors(std::move(fragmentProcessors)), numColorProcessors(numColorProcessors), + dstTextureInfo(dstTextureInfo), _outputSwizzle(outputSwizzle) { + if (!BlendModeAsCoeff(blendMode, &_blendInfo)) { + xferProcessor = PorterDuffXferProcessor::Make(blendMode); + } +} + +const XferProcessor* Pipeline::getXferProcessor() const { + if (xferProcessor == nullptr) { + return EmptyXferProcessor::GetInstance(); + } + return xferProcessor.get(); +} + +void Pipeline::getUniforms(UniformBuffer* uniformBuffer) const { + auto buffer = static_cast(uniformBuffer); + buffer->advanceStage(); + FragmentProcessor::CoordTransformIter coordTransformIter(this); + geometryProcessor->setData(buffer, &coordTransformIter); + for (auto& fragmentProcessor : fragmentProcessors) { + buffer->advanceStage(); + FragmentProcessor::Iter iter(fragmentProcessor.get()); + const FragmentProcessor* fp = iter.next(); + while (fp) { + fp->setData(buffer); + fp = iter.next(); + } + } + if (dstTextureInfo.texture != nullptr) { + buffer->advanceStage(); + xferProcessor->setData(buffer, dstTextureInfo.texture.get(), dstTextureInfo.offset); + } + buffer->resetStage(); +} + +std::vector Pipeline::getSamplers() const { + std::vector samplers = {}; + FragmentProcessor::Iter iter(this); + const FragmentProcessor* fp = iter.next(); + while (fp) { + for (size_t i = 0; i < fp->numTextureSamplers(); ++i) { + SamplerInfo sampler = {fp->textureSampler(i), fp->samplerState(i)}; + samplers.push_back(sampler); + } + fp = iter.next(); + } + if (dstTextureInfo.texture != nullptr) { + SamplerInfo sampler = {dstTextureInfo.texture->getSampler(), {}}; + samplers.push_back(sampler); + } + return samplers; +} + +void Pipeline::computeUniqueKey(Context* context, BytesKey* bytesKey) const { + geometryProcessor->computeProcessorKey(context, bytesKey); + if (dstTextureInfo.texture != nullptr) { + dstTextureInfo.texture->getSampler()->computeKey(context, bytesKey); + } + for (const auto& processor : fragmentProcessors) { + processor->computeProcessorKey(context, bytesKey); + } + getXferProcessor()->computeProcessorKey(context, bytesKey); + bytesKey->write(static_cast(_outputSwizzle->asKey())); +} + +std::unique_ptr Pipeline::createProgram(Context* context) const { + return ProgramBuilder::CreateProgram(context, this); +} +} // namespace tgfx diff --git a/src/gpu/Pipeline.h b/src/gpu/Pipeline.h new file mode 100644 index 00000000..4ca2d4ad --- /dev/null +++ b/src/gpu/Pipeline.h @@ -0,0 +1,102 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "Swizzle.h" +#include "gpu/Blend.h" +#include "gpu/ProgramInfo.h" +#include "gpu/processors/EmptyXferProcessor.h" +#include "gpu/processors/FragmentProcessor.h" +#include "gpu/processors/GeometryProcessor.h" + +namespace tgfx { +struct DstTextureInfo { + bool requiresBarrier = false; + Point offset = Point::Zero(); + std::shared_ptr texture = nullptr; +}; + +/** + * Pipeline is a ProgramInfo that uses a list of Processors to assemble a shader program and set API + * state for a draw. + */ +class Pipeline : public ProgramInfo { + public: + Pipeline(std::unique_ptr geometryProcessor, + std::vector> fragmentProcessors, + size_t numColorProcessors, BlendMode blendMode, const DstTextureInfo& dstTextureInfo, + const Swizzle* outputSwizzle); + + size_t numColorFragmentProcessors() const { + return numColorProcessors; + } + + size_t numFragmentProcessors() const { + return fragmentProcessors.size(); + } + + const GeometryProcessor* getGeometryProcessor() const { + return geometryProcessor.get(); + } + + const FragmentProcessor* getFragmentProcessor(size_t idx) const { + return fragmentProcessors[idx].get(); + } + + const XferProcessor* getXferProcessor() const; + + const Swizzle* outputSwizzle() const { + return _outputSwizzle; + } + + const Texture* dstTexture() const { + return dstTextureInfo.texture.get(); + } + + const Point& dstTextureOffset() const { + return dstTextureInfo.offset; + } + + bool requiresBarrier() const override { + return dstTextureInfo.requiresBarrier; + } + + const BlendInfo* blendInfo() const override { + return xferProcessor == nullptr ? &_blendInfo : nullptr; + } + + void getUniforms(UniformBuffer* uniformBuffer) const override; + + std::vector getSamplers() const override; + + void computeUniqueKey(Context* context, BytesKey* uniqueKey) const override; + + std::unique_ptr createProgram(Context* context) const override; + + private: + std::unique_ptr geometryProcessor; + std::vector> fragmentProcessors; + // This value is also the index in fragmentProcessors where coverage processors begin. + size_t numColorProcessors = 0; + std::unique_ptr xferProcessor; + BlendInfo _blendInfo = {}; + DstTextureInfo dstTextureInfo = {}; + const Swizzle* _outputSwizzle = nullptr; +}; +} // namespace tgfx diff --git a/src/gpu/PlainTexture.cpp b/src/gpu/PlainTexture.cpp new file mode 100644 index 00000000..652dce62 --- /dev/null +++ b/src/gpu/PlainTexture.cpp @@ -0,0 +1,95 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "PlainTexture.h" +#include "gpu/Gpu.h" +#include "utils/PixelFormatUtil.h" +#include "utils/UniqueID.h" + +namespace tgfx { +static void ComputeScratchKey(BytesKey* scratchKey, int width, int height, PixelFormat format, + bool mipMapped) { + static const uint32_t PlainTextureType = UniqueID::Next(); + scratchKey->write(PlainTextureType); + scratchKey->write(static_cast(width)); + scratchKey->write(static_cast(height)); + auto formatValue = static_cast(format); + auto mipMapValue = static_cast(mipMapped ? 1 : 0); + scratchKey->write(formatValue | (mipMapValue << 30)); +} + +std::shared_ptr Texture::MakeFormat(Context* context, int width, int height, + const void* pixels, size_t rowBytes, + PixelFormat pixelFormat, ImageOrigin origin, + bool mipMapped) { + if (!PlainTexture::CheckSizeAndFormat(context, width, height, pixelFormat)) { + return nullptr; + } + auto caps = context->caps(); + int maxMipmapLevel = mipMapped ? caps->getMaxMipmapLevel(width, height) : 0; + ScratchKey scratchKey = {}; + ComputeScratchKey(&scratchKey, width, height, pixelFormat, maxMipmapLevel > 0); + auto texture = + std::static_pointer_cast(context->resourceCache()->findScratchResource(scratchKey)); + if (texture) { + texture->_origin = origin; + } else { + auto sampler = context->gpu()->createSampler(width, height, pixelFormat, maxMipmapLevel + 1); + if (sampler == nullptr) { + return nullptr; + } + texture = Resource::Wrap(context, new PlainTexture(std::move(sampler), width, height, origin)); + } + if (pixels != nullptr) { + context->gpu()->writePixels(texture->getSampler(), Rect::MakeWH(width, height), pixels, + rowBytes); + } + return texture; +} + +bool PlainTexture::CheckSizeAndFormat(Context* context, int width, int height, PixelFormat format) { + if (context == nullptr || width < 1 || height < 1) { + return false; + } + if (format != PixelFormat::ALPHA_8 && format != PixelFormat::RGBA_8888 && + format != PixelFormat::BGRA_8888) { + return false; + } + auto caps = context->caps(); + auto maxTextureSize = caps->maxTextureSize; + return width <= maxTextureSize && height <= maxTextureSize; +} + +PlainTexture::PlainTexture(std::unique_ptr sampler, int width, int height, + ImageOrigin origin) + : Texture(width, height, origin), sampler(std::move(sampler)) { +} + +size_t PlainTexture::memoryUsage() const { + auto colorSize = width() * height() * PixelFormatBytesPerPixel(sampler->format); + return sampler->hasMipmaps() ? colorSize * 4 / 3 : colorSize; +} + +void PlainTexture::computeScratchKey(BytesKey* scratchKey) const { + ComputeScratchKey(scratchKey, width(), height(), sampler->format, sampler->hasMipmaps()); +} + +void PlainTexture::onReleaseGPU() { + context->gpu()->deleteSampler(sampler.get()); +} +} // namespace tgfx diff --git a/src/gpu/PlainTexture.h b/src/gpu/PlainTexture.h new file mode 100644 index 00000000..4b30d5a0 --- /dev/null +++ b/src/gpu/PlainTexture.h @@ -0,0 +1,52 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/Texture.h" + +namespace tgfx { +/** + * Texture with a single plane. + */ +class PlainTexture : public Texture { + public: + /** + * Returns true if the specified texture size and format can be created by the GPU backend. + */ + static bool CheckSizeAndFormat(Context* context, int width, int height, PixelFormat format); + + PlainTexture(std::unique_ptr sampler, int width, int height, ImageOrigin origin); + + size_t memoryUsage() const override; + + const TextureSampler* getSampler() const override { + return sampler.get(); + } + + protected: + void computeScratchKey(BytesKey* scratchKey) const override; + + private: + std::unique_ptr sampler = {}; + + void onReleaseGPU() override; + + friend class Texture; +}; +} // namespace tgfx diff --git a/src/gpu/Program.h b/src/gpu/Program.h new file mode 100644 index 00000000..3fc16e11 --- /dev/null +++ b/src/gpu/Program.h @@ -0,0 +1,57 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/gpu/Context.h" +#include "tgfx/utils/BytesKey.h" + +namespace tgfx { +/** + * The base class for GPU program. Overrides the onReleaseGPU() method to to free all GPU resources. + * No backend API calls should be made during destructuring since there may be no GPU context which + * is current on the calling thread. + */ +class Program { + public: + explicit Program(Context* context) : context(context) { + } + + virtual ~Program() = default; + + /** + * Retrieves the context associated with this Program. + */ + Context* getContext() const { + return context; + } + + protected: + Context* context = nullptr; + + private: + BytesKey uniqueKey = {}; + + /** + * Overridden to free GPU resources in the backend API. + */ + virtual void onReleaseGPU() = 0; + + friend class ProgramCache; +}; +} // namespace tgfx diff --git a/src/gpu/ProgramBuilder.cpp b/src/gpu/ProgramBuilder.cpp new file mode 100644 index 00000000..626b0177 --- /dev/null +++ b/src/gpu/ProgramBuilder.cpp @@ -0,0 +1,199 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "ProgramBuilder.h" +#include "gpu/processors/FragmentProcessor.h" + +namespace tgfx { +ProgramBuilder::ProgramBuilder(Context* context, const Pipeline* pipeline) + : context(context), pipeline(pipeline) { +} + +bool ProgramBuilder::emitAndInstallProcessors() { + std::string inputColor; + std::string inputCoverage; + emitAndInstallGeoProc(&inputColor, &inputCoverage); + emitAndInstallFragProcessors(&inputColor, &inputCoverage); + emitAndInstallXferProc(inputColor, inputCoverage); + emitFSOutputSwizzle(); + + return checkSamplerCounts(); +} + +void ProgramBuilder::advanceStage() { + _stageIndex++; + // Each output to the fragment processor gets its own code section + fragmentShaderBuilder()->nextStage(); +} + +void ProgramBuilder::emitAndInstallGeoProc(std::string* outputColor, std::string* outputCoverage) { + uniformHandler()->addUniform(ShaderFlags::Vertex, SLType::Float4, RTAdjustName); + advanceStage(); + nameExpression(outputColor, "outputColor"); + nameExpression(outputCoverage, "outputCoverage"); + auto geometryProcessor = pipeline->getGeometryProcessor(); + // Enclose custom code in a block to avoid namespace conflicts + fragmentShaderBuilder()->codeAppendf("{ // Stage %d %s\n", _stageIndex, + geometryProcessor->name().c_str()); + vertexShaderBuilder()->codeAppendf("// Geometry Processor %s\n", + geometryProcessor->name().c_str()); + + GeometryProcessor::FPCoordTransformHandler transformHandler(pipeline, &transformedCoordVars); + GeometryProcessor::EmitArgs args(vertexShaderBuilder(), fragmentShaderBuilder(), varyingHandler(), + uniformHandler(), getContext()->caps(), *outputColor, + *outputCoverage, &transformHandler); + geometryProcessor->emitCode(args); + fragmentShaderBuilder()->codeAppend("}"); +} + +void ProgramBuilder::emitAndInstallFragProcessors(std::string* color, std::string* coverage) { + size_t transformedCoordVarsIdx = 0; + std::string** inOut = &color; + for (size_t i = 0; i < pipeline->numFragmentProcessors(); ++i) { + if (i == pipeline->numColorFragmentProcessors()) { + inOut = &coverage; + } + const auto* fp = pipeline->getFragmentProcessor(i); + auto output = emitAndInstallFragProc(fp, transformedCoordVarsIdx, **inOut); + FragmentProcessor::Iter iter(fp); + while (const FragmentProcessor* tempFP = iter.next()) { + transformedCoordVarsIdx += tempFP->numCoordTransforms(); + } + **inOut = output; + } +} + +template +static const T* GetPointer(const std::vector& vector, size_t atIndex) { + if (atIndex >= vector.size()) { + return nullptr; + } + return &vector[atIndex]; +} + +std::string ProgramBuilder::emitAndInstallFragProc(const FragmentProcessor* processor, + size_t transformedCoordVarsIdx, + const std::string& input) { + advanceStage(); + std::string output; + nameExpression(&output, "output"); + + // Enclose custom code in a block to avoid namespace conflicts + fragmentShaderBuilder()->codeAppendf("{ // Stage %d %s\n", _stageIndex, + processor->name().c_str()); + + std::vector texSamplers; + FragmentProcessor::Iter fpIter(processor); + int samplerIndex = 0; + while (const auto* subFP = fpIter.next()) { + for (size_t i = 0; i < subFP->numTextureSamplers(); ++i) { + std::string name = "TextureSampler_"; + name += std::to_string(samplerIndex++); + const auto* sampler = subFP->textureSampler(i); + texSamplers.emplace_back(emitSampler(sampler, name)); + } + } + FragmentProcessor::TransformedCoordVars coords( + processor, GetPointer(transformedCoordVars, transformedCoordVarsIdx)); + FragmentProcessor::TextureSamplers textureSamplers(processor, GetPointer(texSamplers, 0)); + FragmentProcessor::EmitArgs args(fragmentShaderBuilder(), uniformHandler(), output, input, + &coords, &textureSamplers); + + processor->emitCode(args); + + fragmentShaderBuilder()->codeAppend("}"); + return output; +} + +void ProgramBuilder::emitAndInstallXferProc(const std::string& colorIn, + const std::string& coverageIn) { + advanceStage(); + + auto xferProcessor = pipeline->getXferProcessor(); + fragmentShaderBuilder()->codeAppendf("{ // Xfer Processor %s\n", xferProcessor->name().c_str()); + + SamplerHandle dstTextureSamplerHandle; + if (auto dstTexture = pipeline->dstTexture()) { + dstTextureSamplerHandle = emitSampler(dstTexture->getSampler(), "DstTextureSampler"); + } + + std::string inputColor = !colorIn.empty() ? colorIn : "vec4(1)"; + std::string inputCoverage = !coverageIn.empty() ? coverageIn : "vec4(1)"; + XferProcessor::EmitArgs args(fragmentShaderBuilder(), uniformHandler(), inputColor, inputCoverage, + fragmentShaderBuilder()->colorOutputName(), dstTextureSamplerHandle); + xferProcessor->emitCode(args); + fragmentShaderBuilder()->codeAppend("}"); +} + +SamplerHandle ProgramBuilder::emitSampler(const TextureSampler* sampler, const std::string& name) { + ++numFragmentSamplers; + return uniformHandler()->addSampler(sampler, name); +} + +void ProgramBuilder::emitFSOutputSwizzle() { + // Swizzle the fragment shader outputs if necessary. + const auto& swizzle = *pipeline->outputSwizzle(); + if (swizzle == Swizzle::RGBA()) { + return; + } + auto* fragBuilder = fragmentShaderBuilder(); + const auto& output = fragBuilder->colorOutputName(); + fragBuilder->codeAppendf("%s = %s.%s;", output.c_str(), output.c_str(), swizzle.c_str()); +} + +std::string ProgramBuilder::nameVariable(char prefix, const std::string& name, bool mangle) const { + std::string out; + if ('\0' != prefix) { + out += prefix; + } + out += name; + if (mangle && _stageIndex >= 0) { + if (out.rfind('_') == out.length() - 1) { + // Names containing "__" are reserved. + out += "x"; + } + out += "_Stage"; + out += std::to_string(_stageIndex); + } + return out; +} + +void ProgramBuilder::nameExpression(std::string* output, const std::string& baseName) { + // create var to hold stage result. If we already have a valid output name, just use that + // otherwise create a new mangled one. This name is only valid if we are reordering stages + // and have to tell stage exactly where to put its output. + std::string outName; + if (!output->empty()) { + outName = *output; + } else { + outName = nameVariable('\0', baseName); + } + fragmentShaderBuilder()->codeAppendf("vec4 %s;", outName.c_str()); + *output = outName; +} + +std::string ProgramBuilder::getUniformDeclarations(ShaderFlags visibility) const { + return uniformHandler()->getUniformDeclarations(visibility); +} + +void ProgramBuilder::finalizeShaders() { + varyingHandler()->finalize(); + vertexShaderBuilder()->finalize(ShaderFlags::Vertex); + fragmentShaderBuilder()->finalize(ShaderFlags::Fragment); +} +} // namespace tgfx diff --git a/src/gpu/ProgramBuilder.h b/src/gpu/ProgramBuilder.h new file mode 100644 index 00000000..5f81306a --- /dev/null +++ b/src/gpu/ProgramBuilder.h @@ -0,0 +1,122 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "FragmentShaderBuilder.h" +#include "Pipeline.h" +#include "UniformHandler.h" +#include "VaryingHandler.h" +#include "VertexShaderBuilder.h" +#include "gpu/processors/GeometryProcessor.h" + +namespace tgfx { +class ProgramBuilder { + public: + /** + * Generates a shader program. + */ + static std::unique_ptr CreateProgram(Context* context, const Pipeline* pipeline); + + virtual ~ProgramBuilder() = default; + + Context* getContext() const { + return context; + } + + virtual std::string versionDeclString() = 0; + + virtual std::string textureFuncName() const = 0; + + virtual std::string getShaderVarDeclarations(const ShaderVar& var, ShaderFlags flag) const = 0; + + std::string getUniformDeclarations(ShaderFlags visibility) const; + + const ShaderVar& samplerVariable(SamplerHandle handle) const { + return uniformHandler()->samplerVariable(handle); + } + + Swizzle samplerSwizzle(SamplerHandle handle) const { + return uniformHandler()->samplerSwizzle(handle); + } + + /** + * Generates a name for a variable. The generated string will be named prefixed by the prefix + * char (unless the prefix is '\0'). It also will mangle the name to be stage-specific unless + * explicitly asked not to. + */ + std::string nameVariable(char prefix, const std::string& name, bool mangle = true) const; + + int stageIndex() const { + return _stageIndex; + } + + virtual UniformHandler* uniformHandler() = 0; + + virtual const UniformHandler* uniformHandler() const = 0; + + virtual VaryingHandler* varyingHandler() = 0; + + virtual VertexShaderBuilder* vertexShaderBuilder() = 0; + + virtual FragmentShaderBuilder* fragmentShaderBuilder() = 0; + + protected: + Context* context = nullptr; + const Pipeline* pipeline = nullptr; + + ProgramBuilder(Context* context, const Pipeline* pipeline); + + bool emitAndInstallProcessors(); + + void finalizeShaders(); + + virtual bool checkSamplerCounts() = 0; + + int numFragmentSamplers = 0; + + private: + /** + * advanceStage is called by program creator between each processor's emit code. It increments + * the stage offset for variable name mangling, and also ensures verification variables in the + * fragment shader are cleared. + */ + void advanceStage(); + + /** + * Generates a possibly mangled name for a stage variable and writes it to the fragment shader. + */ + void nameExpression(std::string* output, const std::string& baseName); + + void emitAndInstallGeoProc(std::string* outputColor, std::string* outputCoverage); + + void emitAndInstallFragProcessors(std::string* color, std::string* coverage); + + std::string emitAndInstallFragProc(const FragmentProcessor* processor, + size_t transformedCoordVarsIdx, const std::string& input); + + void emitAndInstallXferProc(const std::string& colorIn, const std::string& coverageIn); + + SamplerHandle emitSampler(const TextureSampler* sampler, const std::string& name); + + void emitFSOutputSwizzle(); + + int _stageIndex = -1; + std::vector transformedCoordVars = {}; +}; +} // namespace tgfx diff --git a/src/gpu/ProgramCache.cpp b/src/gpu/ProgramCache.cpp new file mode 100644 index 00000000..d1b06782 --- /dev/null +++ b/src/gpu/ProgramCache.cpp @@ -0,0 +1,69 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "ProgramCache.h" + +namespace tgfx { +#define MAX_PROGRAM_COUNT 128 + +ProgramCache::ProgramCache(Context* context) : context(context) { +} + +bool ProgramCache::empty() const { + return programMap.empty(); +} + +Program* ProgramCache::getProgram(const ProgramInfo* programInfo) { + BytesKey uniqueKey = {}; + programInfo->computeUniqueKey(context, &uniqueKey); + auto result = programMap.find(uniqueKey); + if (result != programMap.end()) { + programLRU.remove(result->second); + programLRU.push_front(result->second); + return result->second; + } + // TODO(domrjchen): createProgram() 应该统计到 programCompilingTime 里。 + auto program = programInfo->createProgram(context).release(); + if (program == nullptr) { + return nullptr; + } + program->uniqueKey = uniqueKey; + programLRU.push_front(program); + programMap[uniqueKey] = program; + while (programLRU.size() > MAX_PROGRAM_COUNT) { + removeOldestProgram(); + } + return program; +} + +void ProgramCache::removeOldestProgram(bool releaseGPU) { + auto program = programLRU.back(); + programLRU.pop_back(); + programMap.erase(program->uniqueKey); + if (releaseGPU) { + program->onReleaseGPU(); + } + delete program; +} + +void ProgramCache::releaseAll(bool releaseGPU) { + while (!programLRU.empty()) { + removeOldestProgram(releaseGPU); + } +} +} // namespace tgfx diff --git a/src/gpu/ProgramCache.h b/src/gpu/ProgramCache.h new file mode 100644 index 00000000..2cc11e81 --- /dev/null +++ b/src/gpu/ProgramCache.h @@ -0,0 +1,56 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include +#include "Program.h" +#include "ProgramInfo.h" + +namespace tgfx { +/** + * Manages the lifetime of all Program instances. + */ +class ProgramCache { + public: + explicit ProgramCache(Context* context); + + /** + * Returns true if there is no cache at all. + */ + bool empty() const; + + /** + * Returns a program cache of specified ProgramMaker. If there is no associated cache available, + * a new program will be created by programMaker. Returns null if the programMaker fails to make a + * new program. + */ + Program* getProgram(const ProgramInfo* programInfo); + + private: + Context* context = nullptr; + std::list programLRU = {}; + BytesKeyMap programMap = {}; + + void removeOldestProgram(bool releaseGPU = true); + void releaseAll(bool releaseGPU); + + friend class Context; +}; +} // namespace tgfx diff --git a/src/gpu/ProgramInfo.h b/src/gpu/ProgramInfo.h new file mode 100644 index 00000000..253fa262 --- /dev/null +++ b/src/gpu/ProgramInfo.h @@ -0,0 +1,71 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/Blend.h" +#include "gpu/Program.h" +#include "gpu/SamplerState.h" +#include "gpu/TextureSampler.h" +#include "gpu/UniformBuffer.h" + +namespace tgfx { +struct SamplerInfo { + const TextureSampler* sampler; + SamplerState state; +}; +/** + * This immutable object contains information needed to build a shader program and set API state for + * a draw. + */ +class ProgramInfo { + public: + virtual ~ProgramInfo() = default; + + /** + * Returns true if the draw requires a texture barrier. + */ + virtual bool requiresBarrier() const = 0; + + /** + * Returns the blend info for the draw. A nullptr is returned if the draw does not require + * blending. + */ + virtual const BlendInfo* blendInfo() const = 0; + + /** + * Collects uniform data for the draw. + */ + virtual void getUniforms(UniformBuffer* uniformBuffer) const = 0; + + /** + * Collects texture samplers for the draw. + */ + virtual std::vector getSamplers() const = 0; + + /** + * Computes a unique key for the program. + */ + virtual void computeUniqueKey(Context* context, BytesKey* uniqueKey) const = 0; + + /** + * Creates a new program. + */ + virtual std::unique_ptr createProgram(Context* context) const = 0; +}; +} // namespace tgfx diff --git a/src/gpu/ProxyProvider.cpp b/src/gpu/ProxyProvider.cpp new file mode 100644 index 00000000..a90c7eb0 --- /dev/null +++ b/src/gpu/ProxyProvider.cpp @@ -0,0 +1,228 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "ProxyProvider.h" +#include "gpu/PlainTexture.h" + +namespace tgfx { +class ImageBufferTextureProxy : public TextureProxy { + public: + ImageBufferTextureProxy(ProxyProvider* provider, std::shared_ptr imageBuffer, + bool mipMapped) + : TextureProxy(provider), imageBuffer(std::move(imageBuffer)), mipMapped(mipMapped) { + } + + int width() const override { + return texture ? texture->width() : imageBuffer->width(); + } + + int height() const override { + return texture ? texture->height() : imageBuffer->height(); + } + + bool hasMipmaps() const override { + return texture ? texture->getSampler()->hasMipmaps() : mipMapped; + } + + protected: + std::shared_ptr onMakeTexture(Context* context) override { + if (imageBuffer == nullptr) { + return nullptr; + } + auto texture = Texture::MakeFrom(context, imageBuffer, mipMapped); + if (texture != nullptr) { + imageBuffer = nullptr; + } + return texture; + } + + private: + std::shared_ptr imageBuffer = nullptr; + bool mipMapped = false; +}; + +class ImageGeneratorTextureProxy : public TextureProxy { + public: + ImageGeneratorTextureProxy(ProxyProvider* provider, std::shared_ptr task, + bool mipMapped) + : TextureProxy(provider), task(std::move(task)), mipMapped(mipMapped) { + } + + int width() const override { + return texture ? texture->width() : task->imageWidth(); + } + + int height() const override { + return texture ? texture->height() : task->imageHeight(); + } + + bool hasMipmaps() const override { + return texture ? texture->getSampler()->hasMipmaps() : mipMapped; + } + + protected: + std::shared_ptr onMakeTexture(Context* context) override { + if (task == nullptr) { + return nullptr; + } + auto buffer = task->getBuffer(); + if (buffer == nullptr) { + return nullptr; + } + auto texture = Texture::MakeFrom(context, buffer, mipMapped); + if (texture != nullptr) { + task = nullptr; + } + return texture; + } + + private: + std::shared_ptr task = nullptr; + bool mipMapped = false; +}; + +class DeferredTextureProxy : public TextureProxy { + public: + DeferredTextureProxy(ProxyProvider* provider, int width, int height, PixelFormat format, + ImageOrigin origin, bool mipMapped) + : TextureProxy(provider), _width(width), _height(height), format(format), origin(origin), + mipMapped(mipMapped) { + } + + int width() const override { + return _width; + } + + int height() const override { + return _height; + } + + bool hasMipmaps() const override { + return texture ? texture->getSampler()->hasMipmaps() : mipMapped; + } + + protected: + std::shared_ptr onMakeTexture(Context* context) override { + if (context == nullptr) { + return nullptr; + } + return Texture::MakeFormat(context, width(), height(), format, origin, mipMapped); + } + + private: + int _width = 0; + int _height = 0; + PixelFormat format = PixelFormat::RGBA_8888; + ImageOrigin origin = ImageOrigin::TopLeft; + bool mipMapped = false; +}; + +ProxyProvider::ProxyProvider(Context* context) : context(context) { +} + +std::shared_ptr ProxyProvider::findProxyByUniqueKey(const UniqueKey& uniqueKey) { + if (uniqueKey.empty()) { + return nullptr; + } + auto result = proxyOwnerMap.find(uniqueKey.uniqueID()); + if (result != proxyOwnerMap.end()) { + return result->second->weakThis.lock(); + } + auto resourceCache = context->resourceCache(); + auto texture = std::static_pointer_cast(resourceCache->findUniqueResource(uniqueKey)); + if (texture == nullptr) { + return nullptr; + } + auto proxy = wrapTexture(texture); + proxy->assignUniqueKey(uniqueKey, false); + return proxy; +} + +std::shared_ptr ProxyProvider::createTextureProxy( + std::shared_ptr imageBuffer, bool mipMapped) { + if (imageBuffer == nullptr) { + return nullptr; + } + auto proxy = std::make_shared(this, std::move(imageBuffer), mipMapped); + proxy->weakThis = proxy; + return proxy; +} + +std::shared_ptr ProxyProvider::createTextureProxy( + std::shared_ptr generator, bool mipMapped, bool disableAsyncTask) { + if (generator == nullptr) { + return nullptr; + } + if (disableAsyncTask || generator->asyncSupport()) { + auto buffer = generator->makeBuffer(!mipMapped); + return createTextureProxy(std::move(buffer), mipMapped); + } + auto task = ImageGeneratorTask::MakeFrom(std::move(generator), !mipMapped); + return createTextureProxy(std::move(task), mipMapped); +} + +std::shared_ptr ProxyProvider::createTextureProxy( + std::shared_ptr task, bool mipMapped) { + if (task == nullptr) { + return nullptr; + } + auto proxy = std::make_shared(this, std::move(task), mipMapped); + proxy->weakThis = proxy; + return proxy; +} + +std::shared_ptr ProxyProvider::createTextureProxy(int width, int height, + PixelFormat format, + ImageOrigin origin, + bool mipMapped) { + if (!PlainTexture::CheckSizeAndFormat(context, width, height, format)) { + return nullptr; + } + auto proxy = + std::make_shared(this, width, height, format, origin, mipMapped); + proxy->weakThis = proxy; + return proxy; +} + +std::shared_ptr ProxyProvider::wrapTexture(std::shared_ptr texture) { + if (texture == nullptr || texture->getContext() != context) { + return nullptr; + } + auto proxy = std::shared_ptr(new TextureProxy(this, std::move(texture))); + proxy->weakThis = proxy; + return proxy; +} + +void ProxyProvider::changeUniqueKey(TextureProxy* proxy, const UniqueKey& uniqueKey) { + auto result = proxyOwnerMap.find(uniqueKey.uniqueID()); + if (result != proxyOwnerMap.end()) { + result->second->removeUniqueKey(); + } + if (!proxy->uniqueKey.empty()) { + proxyOwnerMap.erase(proxy->uniqueKey.uniqueID()); + } + proxy->uniqueKey = uniqueKey; + proxyOwnerMap[uniqueKey.uniqueID()] = proxy; +} + +void ProxyProvider::removeUniqueKey(TextureProxy* proxy) { + proxyOwnerMap.erase(proxy->uniqueKey.uniqueID()); + proxy->uniqueKey = {}; +} + +} // namespace tgfx diff --git a/src/gpu/ProxyProvider.h b/src/gpu/ProxyProvider.h new file mode 100644 index 00000000..7fcdf198 --- /dev/null +++ b/src/gpu/ProxyProvider.h @@ -0,0 +1,82 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "TextureProxy.h" +#include "images/ImageGeneratorTask.h" +#include "tgfx/core/ImageBuffer.h" + +namespace tgfx { +/** + * A factory for creating proxy-derived objects. + */ +class ProxyProvider { + public: + explicit ProxyProvider(Context* context); + + /* + * Finds a proxy by the specified UniqueKey. + */ + std::shared_ptr findProxyByUniqueKey(const UniqueKey& uniqueKey); + + /* + * Create a texture proxy for the image buffer. The image buffer will be released after being + * uploaded to the GPU. + */ + std::shared_ptr createTextureProxy(std::shared_ptr imageBuffer, + bool mipMapped = false); + + /* + * Create a texture proxy for the ImageGeneratorTask. The task will be released after the + * associated texture being instantiated. + */ + std::shared_ptr createTextureProxy(std::shared_ptr generator, + bool mipMapped = false, + bool disableAsyncTask = false); + + /* + * Create a texture proxy for the ImageGeneratorTask. The task will be released after the + * associated texture being instantiated. + */ + std::shared_ptr createTextureProxy(std::shared_ptr task, + bool mipMapped = false); + + /** + * Create a TextureProxy without any data. + */ + std::shared_ptr createTextureProxy(int width, int height, PixelFormat format, + ImageOrigin origin = ImageOrigin::TopLeft, + bool mipMapped = false); + + /* + * Create a texture proxy that wraps an existing texture. + */ + std::shared_ptr wrapTexture(std::shared_ptr texture); + + private: + Context* context = nullptr; + std::unordered_map proxyOwnerMap = {}; + + void changeUniqueKey(TextureProxy* proxy, const UniqueKey& uniqueKey); + + void removeUniqueKey(TextureProxy* proxy); + + friend class TextureProxy; +}; +} // namespace tgfx diff --git a/src/gpu/Quad.cpp b/src/gpu/Quad.cpp new file mode 100644 index 00000000..40ab3712 --- /dev/null +++ b/src/gpu/Quad.cpp @@ -0,0 +1,39 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "Quad.h" + +namespace tgfx { +Quad Quad::MakeFromRect(const Rect& rect, const Matrix& matrix) { + std::vector points; + points.push_back(Point::Make(rect.left, rect.top)); + points.push_back(Point::Make(rect.left, rect.bottom)); + points.push_back(Point::Make(rect.right, rect.top)); + points.push_back(Point::Make(rect.right, rect.bottom)); + matrix.mapPoints(&points[0], 4); + return Quad(points); +} + +Rect Quad::bounds() const { + auto min = [](const float c[4]) { return std::min(std::min(c[0], c[1]), std::min(c[2], c[3])); }; + auto max = [](const float c[4]) { return std::max(std::max(c[0], c[1]), std::max(c[2], c[3])); }; + float x[4] = {points[0].x, points[1].x, points[2].x, points[3].x}; + float y[4] = {points[0].y, points[1].y, points[2].y, points[3].y}; + return {min(x), min(y), max(x), max(y)}; +} +} // namespace tgfx diff --git a/src/gpu/Quad.h b/src/gpu/Quad.h new file mode 100644 index 00000000..012f0e99 --- /dev/null +++ b/src/gpu/Quad.h @@ -0,0 +1,41 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/Matrix.h" +#include "tgfx/core/Rect.h" + +namespace tgfx { +class Quad { + public: + static Quad MakeFromRect(const Rect& rect, const Matrix& matrix); + + const Point& point(size_t i) const { + return points[i]; + } + + Rect bounds() const; + + private: + explicit Quad(std::vector points) : points(std::move(points)) { + } + + std::vector points; +}; +} // namespace tgfx diff --git a/src/gpu/RenderPass.cpp b/src/gpu/RenderPass.cpp new file mode 100644 index 00000000..2bb0b345 --- /dev/null +++ b/src/gpu/RenderPass.cpp @@ -0,0 +1,71 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "RenderPass.h" + +namespace tgfx { +void RenderPass::begin() { + drawPipelineStatus = DrawPipelineStatus::NotConfigured; +} + +void RenderPass::end() { + resetActiveBuffers(); +} + +void RenderPass::resetActiveBuffers() { + _program = nullptr; + _indexBuffer = nullptr; + _vertexBuffer = nullptr; +} + +void RenderPass::bindProgramAndScissorClip(const ProgramInfo* programInfo, const Rect& drawBounds) { + resetActiveBuffers(); + if (!onBindProgramAndScissorClip(programInfo, drawBounds)) { + drawPipelineStatus = DrawPipelineStatus::FailedToBind; + return; + } + drawPipelineStatus = DrawPipelineStatus::Ok; +} + +void RenderPass::bindBuffers(std::shared_ptr indexBuffer, + std::shared_ptr vertexBuffer) { + if (drawPipelineStatus != DrawPipelineStatus::Ok) { + return; + } + onBindBuffers(std::move(indexBuffer), std::move(vertexBuffer)); +} + +void RenderPass::draw(PrimitiveType primitiveType, int baseVertex, int vertexCount) { + if (drawPipelineStatus != DrawPipelineStatus::Ok) { + return; + } + onDraw(primitiveType, baseVertex, vertexCount); +} + +void RenderPass::drawIndexed(PrimitiveType primitiveType, int baseIndex, int indexCount) { + if (drawPipelineStatus != DrawPipelineStatus::Ok) { + return; + } + onDrawIndexed(primitiveType, baseIndex, indexCount); +} + +void RenderPass::clear(const Rect& scissor, Color color) { + drawPipelineStatus = DrawPipelineStatus::NotConfigured; + onClear(scissor, color); +} +} // namespace tgfx diff --git a/src/gpu/RenderPass.h b/src/gpu/RenderPass.h new file mode 100644 index 00000000..14d360ec --- /dev/null +++ b/src/gpu/RenderPass.h @@ -0,0 +1,91 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "Blend.h" +#include "GpuBuffer.h" +#include "Program.h" +#include "gpu/ProgramInfo.h" +#include "gpu/RenderTarget.h" +#include "gpu/processors/GeometryProcessor.h" +#include "tgfx/core/BlendMode.h" +#include "tgfx/core/Color.h" +#include "tgfx/gpu/Context.h" + +namespace tgfx { +/** + * Geometric primitives used for drawing. + */ +enum class PrimitiveType { + Triangles, + TriangleStrip, +}; + +class RenderPass { + public: + explicit RenderPass(Context* context) : _context(context) { + } + + Context* context() { + return _context; + } + + std::shared_ptr renderTarget() { + return _renderTarget; + } + + std::shared_ptr renderTargetTexture() { + return _renderTargetTexture; + } + + virtual ~RenderPass() = default; + + void begin(); + void end(); + void bindProgramAndScissorClip(const ProgramInfo* programInfo, const Rect& drawBounds); + void bindBuffers(std::shared_ptr indexBuffer, std::shared_ptr vertexBuffer); + void draw(PrimitiveType primitiveType, int baseVertex, int vertexCount); + void drawIndexed(PrimitiveType primitiveType, int baseIndex, int indexCount); + void clear(const Rect& scissor, Color color); + + protected: + virtual bool onBindProgramAndScissorClip(const ProgramInfo* programInfo, + const Rect& drawBounds) = 0; + virtual void onBindBuffers(std::shared_ptr indexBuffer, + std::shared_ptr vertexBuffer) = 0; + virtual void onDraw(PrimitiveType primitiveType, int baseVertex, int vertexCount) = 0; + virtual void onDrawIndexed(PrimitiveType primitiveType, int baseIndex, int indexCount) = 0; + virtual void onClear(const Rect& scissor, Color color) = 0; + + Context* _context = nullptr; + std::shared_ptr _renderTarget = nullptr; + std::shared_ptr _renderTargetTexture = nullptr; + Program* _program = nullptr; + std::shared_ptr _indexBuffer; + std::shared_ptr _vertexBuffer; + + private: + enum class DrawPipelineStatus { Ok = 0, NotConfigured, FailedToBind }; + + void resetActiveBuffers(); + + DrawPipelineStatus drawPipelineStatus = DrawPipelineStatus::NotConfigured; +}; +} // namespace tgfx diff --git a/src/gpu/RenderTarget.h b/src/gpu/RenderTarget.h new file mode 100644 index 00000000..2d868319 --- /dev/null +++ b/src/gpu/RenderTarget.h @@ -0,0 +1,117 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/Texture.h" +#include "tgfx/core/ImageInfo.h" + +namespace tgfx { +/** + * RenderTarget represents a 2D buffer of pixels that can be rendered to. + */ +class RenderTarget : public Resource { + public: + /** + * Wraps a backend renderTarget into RenderTarget. The caller must ensure the backend renderTarget + * is valid for the lifetime of the returned RenderTarget. Returns nullptr if the context is + * nullptr or the backend renderTarget is invalid. + */ + static std::shared_ptr MakeFrom(Context* context, + const BackendRenderTarget& renderTarget, + ImageOrigin origin); + + /** + * Creates a new RenderTarget which uses specified Texture as pixel storage. The caller must + * ensure the texture is valid for the lifetime of the returned RenderTarget. + */ + static std::shared_ptr MakeFrom(const Texture* texture, int sampleCount = 1); + + /** + * Returns the display width of the render target. + */ + int width() const { + return _width; + } + + /** + * Returns the display height of the render target. + */ + int height() const { + return _height; + } + + /** + * Returns the origin of the render target, either ImageOrigin::TopLeft or + * ImageOrigin::BottomLeft. + */ + ImageOrigin origin() const { + return _origin; + } + + /** + * Returns the sample count of the render target. + */ + int sampleCount() const { + return _sampleCount; + } + + /** + * Returns the pixel format of the render target. + */ + virtual PixelFormat format() const = 0; + + size_t memoryUsage() const override { + return 0; + } + + /** + * Retrieves the backend render target. + */ + virtual BackendRenderTarget getBackendRenderTarget() const = 0; + + /** + * Copies a rect of pixels to dstPixels with specified color type, alpha type and row bytes. Copy + * starts at (srcX, srcY), and does not exceed Surface (width(), height()). Pixels are copied + * only if pixel conversion is possible. Returns true if pixels are copied to dstPixels. + */ + virtual bool readPixels(const ImageInfo& dstInfo, void* dstPixels, int srcX = 0, + int srcY = 0) const = 0; + + /** + * Replaces the backing texture of the render target with the specified Texture. + */ + virtual bool replaceTexture(const Texture* texture) = 0; + + protected: + RenderTarget(int width, int height, ImageOrigin origin, int sampleCount = 1) + : _width(width), _height(height), _origin(origin), _sampleCount(sampleCount) { + } + + private: + virtual const Swizzle& writeSwizzle() const = 0; + + int _width = 0; + int _height = 0; + ImageOrigin _origin = ImageOrigin::TopLeft; + int _sampleCount = 1; + + friend class DrawOp; + friend class Canvas; +}; +} // namespace tgfx diff --git a/src/gpu/RenderTask.h b/src/gpu/RenderTask.h new file mode 100644 index 00000000..a0eb82d5 --- /dev/null +++ b/src/gpu/RenderTask.h @@ -0,0 +1,52 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "Gpu.h" +#include "tgfx/gpu/Surface.h" + +namespace tgfx { +class RenderTask { + public: + virtual ~RenderTask() = default; + + virtual bool execute(Gpu* gpu) = 0; + + virtual void gatherProxies(std::vector*) const { + } + + void makeClosed() { + closed = true; + } + + bool isClosed() const { + return closed; + } + + protected: + explicit RenderTask(std::shared_ptr renderTarget) + : renderTarget(std::move(renderTarget)) { + } + + std::shared_ptr renderTarget = nullptr; + + private: + bool closed = false; +}; +} // namespace tgfx diff --git a/src/gpu/Resource.cpp b/src/gpu/Resource.cpp new file mode 100644 index 00000000..c0ad66e9 --- /dev/null +++ b/src/gpu/Resource.cpp @@ -0,0 +1,45 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/gpu/Resource.h" + +namespace tgfx { +void Resource::assignUniqueKey(const UniqueKey& newKey) { + if (newKey.empty()) { + removeUniqueKey(); + return; + } + if (newKey != uniqueKey) { + context->resourceCache()->changeUniqueKey(this, newKey); + } +} + +void Resource::removeUniqueKey() { + if (!uniqueKey.empty()) { + context->resourceCache()->removeUniqueKey(this); + } +} + +void Resource::markUniqueKeyExpired() { + uniqueKeyGeneration = 0; +} + +bool Resource::hasUniqueKey(const UniqueKey& newKey) const { + return !newKey.empty() && newKey.uniqueID() == uniqueKeyGeneration; +} +} // namespace tgfx \ No newline at end of file diff --git a/src/gpu/ResourceCache.cpp b/src/gpu/ResourceCache.cpp new file mode 100644 index 00000000..15df4ac2 --- /dev/null +++ b/src/gpu/ResourceCache.cpp @@ -0,0 +1,305 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/gpu/ResourceCache.h" +#include +#include +#include "tgfx/gpu/Resource.h" +#include "tgfx/utils/BytesKey.h" +#include "tgfx/utils/Clock.h" +#include "utils/Log.h" + +namespace tgfx { +// Default maximum number of bytes of gpu memory of budgeted resources in the cache. +static const size_t DefaultMaxBytes = 96 * (1 << 20); + +static thread_local std::unordered_set currentThreadCaches = {}; + +class PurgeGuard { + public: + explicit PurgeGuard(ResourceCache* cache) : cache(cache) { + cache->purgingResource = true; + } + + ~PurgeGuard() { + cache->purgingResource = false; + } + + private: + ResourceCache* cache = nullptr; +}; + +ResourceCache::ResourceCache(Context* context) : context(context), maxBytes(DefaultMaxBytes) { +} + +bool ResourceCache::empty() const { + return nonpurgeableResources.empty() && pendingPurgeableResources.empty() && + purgeableResources.empty(); +} + +void ResourceCache::attachToCurrentThread() { + currentThreadCaches.insert(this); + // Triggers NotifyReferenceReachedZero() if a Resource has no other reference. + strongReferences.clear(); +} + +void ResourceCache::detachFromCurrentThread() { + for (auto& resource : nonpurgeableResources) { + // Adds a strong reference to the external Resource to make sure it never triggers + // NotifyReferenceReachedZero() while associated device is not locked. + auto strongReference = resource->weakThis.lock(); + if (strongReference) { + strongReferences.push_back(strongReference); + } + } + // strongReferences 保护已经开启,这时候不会再有任何外部的 Resource 会触发 + // NotifyReferenceReachedZero()。 + std::vector pendingResources = {}; + std::swap(pendingResources, pendingPurgeableResources); + for (auto& resource : pendingResources) { + processUnreferencedResource(resource); + } + DEBUG_ASSERT(pendingPurgeableResources.empty()); + currentThreadCaches.erase(this); +} + +void ResourceCache::releaseAll(bool releaseGPU) { + PurgeGuard guard(this); + for (auto& resource : nonpurgeableResources) { + if (releaseGPU) { + resource->onReleaseGPU(); + } + // 标记 Resource 已经被释放,等外部指针计数为 0 时可以直接 delete。 + resource->context = nullptr; + } + nonpurgeableResources.clear(); + for (auto& resource : purgeableResources) { + if (releaseGPU) { + resource->onReleaseGPU(); + } + delete resource; + } + purgeableResources.clear(); + scratchKeyMap.clear(); + uniqueKeyMap.clear(); + purgeableBytes = 0; + totalBytes = 0; +} + +void ResourceCache::setCacheLimit(size_t bytesLimit) { + if (maxBytes == bytesLimit) { + return; + } + maxBytes = bytesLimit; + purgeUntilMemoryTo(maxBytes); +} + +std::shared_ptr ResourceCache::findScratchResource(const BytesKey& scratchKey) { + auto result = scratchKeyMap.find(scratchKey); + if (result == scratchKeyMap.end()) { + return nullptr; + } + auto& list = result->second; + int index = 0; + bool found = false; + for (auto& resource : list) { + if (resource->isPurgeable() && !resource->hasValidUniqueKey()) { + found = true; + break; + } + index++; + } + if (!found) { + return nullptr; + } + auto resource = list[index]; + RemoveFromList(purgeableResources, resource); + purgeableBytes -= resource->memoryUsage(); + return wrapResource(resource); +} + +std::shared_ptr ResourceCache::findUniqueResource(const UniqueKey& uniqueKey) { + auto resource = getUniqueResource(uniqueKey); + if (resource == nullptr) { + return nullptr; + } + if (resource->isPurgeable()) { + RemoveFromList(purgeableResources, resource); + purgeableBytes -= resource->memoryUsage(); + return wrapResource(resource); + } + return resource->weakThis.lock(); +} + +bool ResourceCache::hasUniqueResource(const UniqueKey& uniqueKey) { + return getUniqueResource(uniqueKey) != nullptr; +} + +Resource* ResourceCache::getUniqueResource(const UniqueKey& uniqueKey) { + if (uniqueKey.empty()) { + return nullptr; + } + auto result = uniqueKeyMap.find(uniqueKey.uniqueID()); + if (result == uniqueKeyMap.end()) { + return nullptr; + } + auto resource = result->second; + if (!resource->hasValidUniqueKey()) { + uniqueKeyMap.erase(result); + resource->uniqueKey = {}; + resource->uniqueKeyGeneration = 0; + return nullptr; + } + return resource; +} + +void ResourceCache::AddToList(std::list& list, Resource* resource) { + list.push_front(resource); + resource->cachedPosition = list.begin(); +} + +void ResourceCache::RemoveFromList(std::list& list, Resource* resource) { + list.erase(resource->cachedPosition); +} + +void ResourceCache::NotifyReferenceReachedZero(Resource* resource) { + if (resource->context) { + resource->context->resourceCache()->processUnreferencedResource(resource); + } else { + // Resource 上的属性都是在 Context 加锁的情况下读写的,这里如果 context + // 为空,说明已经被释放了,可以直接删除。 + delete resource; + } +} + +void ResourceCache::processUnreferencedResource(Resource* resource) { + if (currentThreadCaches.count(this) == 0) { + // 当 strongReferences 列表清空时,其他线程的 Resource 也有可能会触发 + // NotifyReferenceReachedZero()。 如果触发的线程不是当前 context 被锁定时的线程, + // 先放进一个队列延后处理。 + std::lock_guard autoLock(removeLocker); + pendingPurgeableResources.push_back(resource); + return; + } + // 禁止 Resource 嵌套,防止 Context 析构时无法释放子项。 + DEBUG_ASSERT(!purgingResource); + // 只有 context 锁定的情况下才有可能 + // 触发 NotifyReferenceReachedZero() + DEBUG_ASSERT(context->device()->contextLocked); + RemoveFromList(nonpurgeableResources, resource); + if (resource->scratchKey.isValid()) { + auto hasBudget = purgeUntilMemoryTo(maxBytes - resource->memoryUsage()); + if (hasBudget) { + AddToList(purgeableResources, resource); + purgeableBytes += resource->memoryUsage(); + resource->lastUsedTime = Clock::Now(); + return; + } + } + removeResource(resource); +} + +void ResourceCache::changeUniqueKey(Resource* resource, const UniqueKey& uniqueKey) { + auto result = uniqueKeyMap.find(uniqueKey.uniqueID()); + if (result != uniqueKeyMap.end()) { + result->second->removeUniqueKey(); + } + if (!resource->uniqueKey.empty()) { + uniqueKeyMap.erase(resource->uniqueKey.uniqueID()); + } + resource->uniqueKey = uniqueKey; + resource->uniqueKeyGeneration = uniqueKey.uniqueID(); + uniqueKeyMap[uniqueKey.uniqueID()] = resource; +} + +void ResourceCache::removeUniqueKey(Resource* resource) { + uniqueKeyMap.erase(resource->uniqueKey.uniqueID()); + resource->uniqueKey = {}; + resource->uniqueKeyGeneration = 0; +} + +std::shared_ptr ResourceCache::wrapResource(Resource* resource) { + AddToList(nonpurgeableResources, resource); + auto result = std::shared_ptr(resource, ResourceCache::NotifyReferenceReachedZero); + result->weakThis = result; + return result; +} + +std::shared_ptr ResourceCache::addResource(Resource* resource) { + if (resource->scratchKey.isValid()) { + scratchKeyMap[resource->scratchKey].push_back(resource); + } + totalBytes += resource->memoryUsage(); + return wrapResource(resource); +} + +void ResourceCache::removeResource(Resource* resource) { + if (!resource->uniqueKey.empty()) { + removeUniqueKey(resource); + } + if (resource->scratchKey.isValid()) { + auto result = scratchKeyMap.find(resource->scratchKey); + if (result != scratchKeyMap.end()) { + auto& list = result->second; + list.erase(std::remove(list.begin(), list.end(), resource), list.end()); + if (list.empty()) { + scratchKeyMap.erase(resource->scratchKey); + } + } + } + purgingResource = true; + resource->onReleaseGPU(); + purgingResource = false; + totalBytes -= resource->memoryUsage(); + if (resource->isPurgeable()) { + delete resource; + } else { + // 标记 Resource 已经被释放,等外部指针计数为 0 时可以直接 delete。 + resource->context = nullptr; + } +} + +void ResourceCache::purgeNotUsedSince(int64_t purgeTime, bool scratchResourcesOnly) { + purgeResourcesByLRU(scratchResourcesOnly, + [&](Resource* resource) { return resource->lastUsedTime >= purgeTime; }); +} + +bool ResourceCache::purgeUntilMemoryTo(size_t bytesLimit, bool scratchResourcesOnly) { + purgeResourcesByLRU(scratchResourcesOnly, [&](Resource*) { return totalBytes <= bytesLimit; }); + return totalBytes <= bytesLimit; +} + +void ResourceCache::purgeResourcesByLRU(bool scratchResourcesOnly, + const std::function& satisfied) { + std::vector needToPurge = {}; + for (auto item = purgeableResources.rbegin(); item != purgeableResources.rend(); item++) { + auto* resource = *item; + if (satisfied(resource)) { + break; + } + if (!scratchResourcesOnly || !resource->hasValidUniqueKey()) { + needToPurge.push_back(resource); + } + } + for (auto& resource : needToPurge) { + RemoveFromList(purgeableResources, resource); + purgeableBytes -= resource->memoryUsage(); + removeResource(resource); + } +} +} // namespace tgfx diff --git a/src/gpu/ResourceProvider.cpp b/src/gpu/ResourceProvider.cpp new file mode 100644 index 00000000..c58a9345 --- /dev/null +++ b/src/gpu/ResourceProvider.cpp @@ -0,0 +1,130 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "ResourceProvider.h" +#include "GradientCache.h" +#include "utils/Log.h" + +namespace tgfx { +ResourceProvider::~ResourceProvider() { + if (_gradientCache) { + DEBUG_ASSERT(_gradientCache->empty()); + } + DEBUG_ASSERT(_aaQuadIndexBuffer == nullptr); + DEBUG_ASSERT(_nonAAQuadIndexBuffer == nullptr); + delete _gradientCache; +} + +std::shared_ptr ResourceProvider::getGradient(const Color* colors, const float* positions, + int count) { + if (_gradientCache == nullptr) { + _gradientCache = new GradientCache(); + } + return _gradientCache->getGradient(context, colors, positions, count); +} + +std::shared_ptr ResourceProvider::nonAAQuadIndexBuffer() { + if (_nonAAQuadIndexBuffer == nullptr) { + _nonAAQuadIndexBuffer = createNonAAQuadIndexBuffer(); + } + return _nonAAQuadIndexBuffer; +} + +std::shared_ptr ResourceProvider::aaQuadIndexBuffer() { + if (_aaQuadIndexBuffer == nullptr) { + _aaQuadIndexBuffer = createAAQuadIndexBuffer(); + } + return _aaQuadIndexBuffer; +} + +std::shared_ptr CreatePatternedIndexBuffer(Context* context, const uint16_t* pattern, + uint16_t patternSize, uint16_t reps, + uint16_t vertCount) { + auto size = static_cast(reps * patternSize); + auto* data = new (std::nothrow) uint16_t[size]; + if (data == nullptr) { + return nullptr; + } + for (uint16_t i = 0; i < reps; ++i) { + uint16_t baseIdx = i * patternSize; + auto baseVert = static_cast(i * vertCount); + for (uint16_t j = 0; j < patternSize; ++j) { + data[baseIdx + j] = baseVert + pattern[j]; + } + } + auto buffer = GpuBuffer::Make(context, BufferType::Index, data, size * sizeof(uint16_t)); + delete[] data; + return buffer; +} + +static constexpr uint16_t kMaxNumNonAAQuads = 1 << 8; // max possible: (1 << 14) - 1; +static constexpr uint16_t kVerticesPerNonAAQuad = 4; +static constexpr uint16_t kIndicesPerNonAAQuad = 6; + +std::shared_ptr ResourceProvider::createNonAAQuadIndexBuffer() { + // clang-format off + static constexpr uint16_t kNonAAQuadIndexPattern[] = { + 0, 1, 2, 2, 1, 3 + }; + // clang-format on + return CreatePatternedIndexBuffer(context, kNonAAQuadIndexPattern, kIndicesPerNonAAQuad, + kMaxNumNonAAQuads, kVerticesPerNonAAQuad); +} + +uint16_t ResourceProvider::MaxNumNonAAQuads() { + return kMaxNumNonAAQuads; +} + +uint16_t ResourceProvider::NumIndicesPerNonAAQuad() { + return kIndicesPerNonAAQuad; +} + +static constexpr uint16_t kMaxNumAAQuads = 1 << 6; // max possible: (1 << 13) - 1; +static constexpr uint16_t kVerticesPerAAQuad = 8; +static constexpr uint16_t kIndicesPerAAQuad = 30; + +std::shared_ptr ResourceProvider::createAAQuadIndexBuffer() { + // clang-format off + static constexpr uint16_t kAAQuadIndexPattern[] = { + 0, 1, 2, 1, 3, 2, + 0, 4, 1, 4, 5, 1, + 0, 6, 4, 0, 2, 6, + 2, 3, 6, 3, 7, 6, + 1, 5, 3, 3, 5, 7, + }; + // clang-format on + return CreatePatternedIndexBuffer(context, kAAQuadIndexPattern, kIndicesPerAAQuad, kMaxNumAAQuads, + kVerticesPerAAQuad); +} + +uint16_t ResourceProvider::MaxNumAAQuads() { + return kMaxNumAAQuads; +} + +uint16_t ResourceProvider::NumIndicesPerAAQuad() { + return kIndicesPerAAQuad; +} + +void ResourceProvider::releaseAll() { + if (_gradientCache) { + _gradientCache->releaseAll(); + } + _aaQuadIndexBuffer = nullptr; + _nonAAQuadIndexBuffer = nullptr; +} +} // namespace tgfx diff --git a/src/gpu/ResourceProvider.h b/src/gpu/ResourceProvider.h new file mode 100644 index 00000000..60e86aee --- /dev/null +++ b/src/gpu/ResourceProvider.h @@ -0,0 +1,61 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "GpuBuffer.h" +#include "gpu/Texture.h" +#include "tgfx/gpu/Context.h" + +namespace tgfx { +class GradientCache; + +class ResourceProvider { + public: + explicit ResourceProvider(Context* context) : context(context) { + } + + ~ResourceProvider(); + + std::shared_ptr getGradient(const Color* colors, const float* positions, int count); + + std::shared_ptr nonAAQuadIndexBuffer(); + + static uint16_t MaxNumNonAAQuads(); + + static uint16_t NumIndicesPerNonAAQuad(); + + std::shared_ptr aaQuadIndexBuffer(); + + static uint16_t MaxNumAAQuads(); + + static uint16_t NumIndicesPerAAQuad(); + + void releaseAll(); + + private: + std::shared_ptr createNonAAQuadIndexBuffer(); + + std::shared_ptr createAAQuadIndexBuffer(); + + Context* context = nullptr; + GradientCache* _gradientCache = nullptr; + std::shared_ptr _aaQuadIndexBuffer; + std::shared_ptr _nonAAQuadIndexBuffer; +}; +} // namespace tgfx diff --git a/src/gpu/SLType.h b/src/gpu/SLType.h new file mode 100644 index 00000000..9acf05a9 --- /dev/null +++ b/src/gpu/SLType.h @@ -0,0 +1,42 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +namespace tgfx { +/** + * Types of shader-language-specific boxed variables we can create. + */ +enum class SLType { + Void, + Float, + Float2, + Float3, + Float4, + Float2x2, + Float3x3, + Float4x4, + Int, + Int2, + Int3, + Int4, + Texture2DSampler, + TextureExternalSampler, + Texture2DRectSampler, +}; +} // namespace tgfx diff --git a/src/gpu/SamplerHandle.h b/src/gpu/SamplerHandle.h new file mode 100644 index 00000000..96a0609e --- /dev/null +++ b/src/gpu/SamplerHandle.h @@ -0,0 +1,43 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include + +namespace tgfx { +class SamplerHandle { + public: + SamplerHandle() = default; + + explicit SamplerHandle(size_t value) : value(value) { + } + + bool isValid() const { + return value != ULONG_MAX; + } + + size_t toIndex() const { + return value; + } + + private: + size_t value = ULONG_MAX; +}; +} // namespace tgfx diff --git a/src/gpu/SamplerState.cpp b/src/gpu/SamplerState.cpp new file mode 100644 index 00000000..b3ed67f1 --- /dev/null +++ b/src/gpu/SamplerState.cpp @@ -0,0 +1,50 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "SamplerState.h" + +namespace tgfx { +static SamplerState::WrapMode TileModeToWrapMode(TileMode tileMode) { + switch (tileMode) { + case TileMode::Clamp: + return SamplerState::WrapMode::Clamp; + case TileMode::Repeat: + return SamplerState::WrapMode::Repeat; + case TileMode::Mirror: + return SamplerState::WrapMode::MirrorRepeat; + case TileMode::Decal: + return SamplerState::WrapMode::ClampToBorder; + } +} + +SamplerState::SamplerState(TileMode tileMode) { + wrapModeX = TileModeToWrapMode(tileMode); + wrapModeY = wrapModeX; +} + +SamplerState::SamplerState(TileMode tileModeX, TileMode tileModeY, SamplingOptions sampling) + : filterMode(sampling.filterMode), mipMapMode(sampling.mipMapMode) { + wrapModeX = TileModeToWrapMode(tileModeX); + wrapModeY = TileModeToWrapMode(tileModeY); +} + +bool operator==(const SamplerState& a, const SamplerState& b) { + return a.wrapModeX == b.wrapModeX && a.wrapModeY == b.wrapModeY && a.filterMode == b.filterMode && + a.mipMapMode == b.mipMapMode; +} +} // namespace tgfx diff --git a/src/gpu/SamplerState.h b/src/gpu/SamplerState.h new file mode 100644 index 00000000..b513c3fc --- /dev/null +++ b/src/gpu/SamplerState.h @@ -0,0 +1,63 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/SamplingOptions.h" +#include "tgfx/core/TileMode.h" + +namespace tgfx { +/** + * Represents the tile modes used to access a texture. + */ +class SamplerState { + public: + enum class WrapMode { + Clamp, + Repeat, + MirrorRepeat, + ClampToBorder, + }; + + SamplerState() = default; + + explicit SamplerState(TileMode tileMode); + + SamplerState(TileMode tileModeX, TileMode tileModeY, SamplingOptions sampling); + + SamplerState(WrapMode wrapModeX, WrapMode wrapModeY, FilterMode filterMode = FilterMode::Linear, + MipMapMode mipMapMode = MipMapMode::None) + : wrapModeX(wrapModeX), wrapModeY(wrapModeY), filterMode(filterMode), mipMapMode(mipMapMode) { + } + + explicit SamplerState(SamplingOptions sampling) + : filterMode(sampling.filterMode), mipMapMode(sampling.mipMapMode) { + } + + bool mipMapped() const { + return mipMapMode != MipMapMode::None; + } + + friend bool operator==(const SamplerState& a, const SamplerState& b); + + WrapMode wrapModeX = WrapMode::Clamp; + WrapMode wrapModeY = WrapMode::Clamp; + FilterMode filterMode = FilterMode::Linear; + MipMapMode mipMapMode = MipMapMode::None; +}; +} // namespace tgfx diff --git a/src/gpu/Semaphore.h b/src/gpu/Semaphore.h new file mode 100644 index 00000000..91ba8e8d --- /dev/null +++ b/src/gpu/Semaphore.h @@ -0,0 +1,36 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "tgfx/gpu/Backend.h" + +namespace tgfx { +/** + * Wrapper class for a backend semaphore object. + */ +class Semaphore { + public: + static std::unique_ptr Wrap(const BackendSemaphore* backendSemaphore); + + virtual ~Semaphore() = default; + + virtual BackendSemaphore getBackendSemaphore() const = 0; +}; +} // namespace tgfx diff --git a/src/gpu/ShaderBlend.cpp b/src/gpu/ShaderBlend.cpp new file mode 100644 index 00000000..206f1ae0 --- /dev/null +++ b/src/gpu/ShaderBlend.cpp @@ -0,0 +1,55 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "ShaderBlend.h" +#include "gpu/processors/FragmentProcessor.h" +#include "gpu/processors/XfermodeFragmentProcessor.h" + +namespace tgfx { +std::shared_ptr Shader::MakeBlend(BlendMode mode, std::shared_ptr dst, + std::shared_ptr src) { + switch (mode) { + case BlendMode::Clear: + return MakeColorShader(Color::Transparent()); + case BlendMode::Dst: + return dst; + case BlendMode::Src: + return src; + default: + break; + } + if (dst == nullptr || src == nullptr) { + return nullptr; + } + auto shader = std::make_shared(mode, std::move(dst), std::move(src)); + shader->weakThis = shader; + return shader; +} + +std::unique_ptr ShaderBlend::asFragmentProcessor(const FPArgs& args) const { + auto fpA = dst->asFragmentProcessor(args); + if (fpA == nullptr) { + return nullptr; + } + auto fpB = src->asFragmentProcessor(args); + if (fpB == nullptr) { + return nullptr; + } + return XfermodeFragmentProcessor::MakeFromTwoProcessors(std::move(fpB), std::move(fpA), mode); +} +} // namespace tgfx diff --git a/src/gpu/ShaderBlend.h b/src/gpu/ShaderBlend.h new file mode 100644 index 00000000..cbfced27 --- /dev/null +++ b/src/gpu/ShaderBlend.h @@ -0,0 +1,37 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/Shader.h" + +namespace tgfx { +class ShaderBlend : public Shader { + public: + ShaderBlend(BlendMode mode, std::shared_ptr dst, std::shared_ptr src) + : mode(mode), dst(std::move(dst)), src(std::move(src)) { + } + + std::unique_ptr asFragmentProcessor(const FPArgs& args) const override; + + private: + BlendMode mode; + std::shared_ptr dst; + std::shared_ptr src; +}; +} // namespace tgfx diff --git a/src/gpu/ShaderBuilder.cpp b/src/gpu/ShaderBuilder.cpp new file mode 100644 index 00000000..01e5f3a8 --- /dev/null +++ b/src/gpu/ShaderBuilder.cpp @@ -0,0 +1,161 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "ShaderBuilder.h" +#include "ProgramBuilder.h" +#include "Swizzle.h" +#include "stdarg.h" + +namespace tgfx { +// number of bytes (on the stack) to receive the printf result +static constexpr size_t kBufferSize = 1024; + +static bool NeedsAppendEnter(const std::string& code) { + if (code.empty()) { + return false; + } + auto& ch = code[code.size() - 1]; + return ch == ';' || ch == '{' || ch == '}'; +} + +ShaderBuilder::ShaderBuilder(ProgramBuilder* program) : programBuilder(program) { + for (int i = 0; i <= Type::Code; ++i) { + shaderStrings.emplace_back(""); + } + shaderStrings[Type::Main] = "void main() {"; + indentation = 1; + atLineStart = true; +} + +void ShaderBuilder::setPrecisionQualifier(const std::string& precision) { + shaderStrings[Type::PrecisionQualifier] = precision; +} + +void ShaderBuilder::codeAppendf(const char* format, ...) { + char buffer[kBufferSize]; + va_list args; + va_start(args, format); + auto length = vsnprintf(buffer, kBufferSize, format, args); + va_end(args); + codeAppend(std::string(buffer, length)); +} + +void ShaderBuilder::codeAppend(const std::string& str) { + if (str.empty()) { + return; + } + appendIndentationIfNeeded(str); + auto& code = shaderStrings[Type::Code]; + code += str; + if (NeedsAppendEnter(code)) { + appendEnterIfNotEmpty(Type::Code); + } + atLineStart = code[code.size() - 1] == '\n'; +} + +void ShaderBuilder::addFunction(const std::string& str) { + appendEnterIfNotEmpty(Type::Functions); + shaderStrings[Type::Functions] += str; +} + +static std::string TextureSwizzleString(const Swizzle& swizzle) { + if (swizzle == Swizzle::RGBA()) { + return ""; + } + std::string ret = "."; + ret.append(swizzle.c_str()); + return ret; +} + +void ShaderBuilder::appendTextureLookup(SamplerHandle samplerHandle, const std::string& coordName) { + const auto& sampler = programBuilder->samplerVariable(samplerHandle); + codeAppendf("%s(%s, %s)", programBuilder->textureFuncName().c_str(), sampler.name().c_str(), + coordName.c_str()); + codeAppend(TextureSwizzleString(programBuilder->samplerSwizzle(samplerHandle))); +} + +void ShaderBuilder::addFeature(PrivateFeature featureBit, const std::string& extensionName) { + if ((featureBit & featuresAddedMask) == featureBit) { + return; + } + char buffer[kBufferSize]; + auto length = snprintf(buffer, kBufferSize, "#extension %s: require\n", extensionName.c_str()); + shaderStrings[Type::Extensions].append(buffer, length); + featuresAddedMask |= featureBit; +} + +std::string ShaderBuilder::getDeclarations(const std::vector& vars, + ShaderFlags flag) const { + std::string ret; + for (const auto& var : vars) { + ret += programBuilder->getShaderVarDeclarations(var, flag); + ret += ";\n"; + } + return ret; +} + +void ShaderBuilder::finalize(ShaderFlags visibility) { + if (finalized) { + return; + } + shaderStrings[Type::VersionDecl] = programBuilder->versionDeclString(); + shaderStrings[Type::Uniforms] += programBuilder->getUniformDeclarations(visibility); + shaderStrings[Type::Inputs] += getDeclarations(inputs, visibility); + shaderStrings[Type::Outputs] += getDeclarations(outputs, visibility); + onFinalize(); + // append the 'footer' to code + shaderStrings[Type::Code] += "}\n"; + + finalized = true; +} + +void ShaderBuilder::appendEnterIfNotEmpty(uint8_t type) { + if (shaderStrings[type].empty()) { + return; + } + shaderStrings[type] += "\n"; +} + +void ShaderBuilder::appendIndentationIfNeeded(const std::string& code) { + if (indentation <= 0 || !atLineStart) { + return; + } + if (code.find('}') != std::string::npos) { + --indentation; + } + for (int i = 0; i < indentation; ++i) { + shaderStrings[Type::Code] += " "; + } + if (code.find('{') != std::string::npos) { + ++indentation; + } + atLineStart = false; +} + +std::string ShaderBuilder::shaderString() { + std::string fragment; + for (auto& str : shaderStrings) { + if (str.empty()) { + continue; + } + fragment += str; + fragment += "\n"; + } + return fragment; +} +} // namespace tgfx diff --git a/src/gpu/ShaderBuilder.h b/src/gpu/ShaderBuilder.h new file mode 100644 index 00000000..fab67f0c --- /dev/null +++ b/src/gpu/ShaderBuilder.h @@ -0,0 +1,113 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "SamplerHandle.h" +#include "ShaderVar.h" + +namespace tgfx { +class ProgramBuilder; + +/** + * Features that should only be enabled internally by the builders. + */ +enum class PrivateFeature : unsigned { + None = 0, + OESTexture = 1 << 0, + FramebufferFetch = 1 << 1, + TGFX_MARK_AS_BITMASK_ENUM(FramebufferFetch) +}; + +class ShaderBuilder { + public: + explicit ShaderBuilder(ProgramBuilder* program); + + virtual ~ShaderBuilder() = default; + + void setPrecisionQualifier(const std::string& precision); + + /** + * Appends a 2D texture sample. The vec length and swizzle order of the result depends on the + * TextureSampler associated with the SamplerHandle. + */ + void appendTextureLookup(SamplerHandle samplerHandle, const std::string& coordName); + + /** + * Called by Processors to add code to one of the shaders. + */ + void codeAppendf(const char format[], ...); + + void codeAppend(const std::string& str); + + void addFunction(const std::string& str); + + /** + * Combines the various parts of the shader to create a single finalized shader string. + */ + void finalize(ShaderFlags visibility); + + std::string shaderString(); + + protected: + class Type { + public: + static const uint8_t VersionDecl = 0; + static const uint8_t Extensions = 1; + static const uint8_t Definitions = 2; + static const uint8_t PrecisionQualifier = 3; + static const uint8_t Uniforms = 4; + static const uint8_t Inputs = 5; + static const uint8_t Outputs = 6; + static const uint8_t Functions = 7; + static const uint8_t Main = 8; + static const uint8_t Code = 9; + }; + + /** + * A general function which enables an extension in a shader if the feature bit is not present + */ + void addFeature(PrivateFeature featureBit, const std::string& extensionName); + + void nextStage() { + codeIndex++; + } + + virtual void onFinalize() = 0; + + void appendEnterIfNotEmpty(uint8_t type); + + void appendIndentationIfNeeded(const std::string& code); + + std::string getDeclarations(const std::vector& vars, ShaderFlags flag) const; + + std::vector shaderStrings; + ProgramBuilder* programBuilder = nullptr; + std::vector inputs; + std::vector outputs; + PrivateFeature featuresAddedMask = PrivateFeature::None; + int codeIndex = 0; + bool finalized = false; + int indentation = 0; + bool atLineStart = false; + + friend class ProgramBuilder; + + friend class GLUniformHandler; +}; +} // namespace tgfx diff --git a/src/gpu/ShaderVar.h b/src/gpu/ShaderVar.h new file mode 100644 index 00000000..31420b46 --- /dev/null +++ b/src/gpu/ShaderVar.h @@ -0,0 +1,82 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include +#include "BitmaskOperators.h" +#include "SLType.h" + +namespace tgfx { +enum class ShaderFlags : unsigned { + None = 0, + Vertex = 1 << 0, + Fragment = 1 << 1, + TGFX_MARK_AS_BITMASK_ENUM(Fragment) +}; + +class ShaderVar { + public: + enum class TypeModifier { + None, + Attribute, + Varying, + Uniform, + Out, + }; + + ShaderVar() = default; + + ShaderVar(std::string name, SLType type) : _type(type), _name(std::move(name)) { + } + + ShaderVar(std::string name, SLType type, TypeModifier typeModifier) + : _type(type), _typeModifier(typeModifier), _name(std::move(name)) { + } + + void setName(const std::string& name) { + _name = name; + } + + const std::string& name() const { + return _name; + } + + void setType(SLType type) { + _type = type; + } + + SLType type() const { + return _type; + } + + void setTypeModifier(TypeModifier type) { + _typeModifier = type; + } + + TypeModifier typeModifier() const { + return _typeModifier; + } + + private: + SLType _type = SLType::Void; + TypeModifier _typeModifier = TypeModifier::None; + std::string _name; +}; +} // namespace tgfx diff --git a/src/gpu/StagedUniformBuffer.cpp b/src/gpu/StagedUniformBuffer.cpp new file mode 100644 index 00000000..583bd48b --- /dev/null +++ b/src/gpu/StagedUniformBuffer.cpp @@ -0,0 +1,36 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "StagedUniformBuffer.h" + +namespace tgfx { +std::string StagedUniformBuffer::GetMangledName(const std::string& name, int stageIndex) { + if (stageIndex >= 0) { + return name + "_Stage" + std::to_string(stageIndex); + } + return name; +} + +StagedUniformBuffer::StagedUniformBuffer(std::vector uniforms) + : UniformBuffer(std::move(uniforms)) { +} + +std::string StagedUniformBuffer::getUniformKey(const std::string& name) const { + return GetMangledName(name, stageIndex); +} +} // namespace tgfx \ No newline at end of file diff --git a/src/gpu/StagedUniformBuffer.h b/src/gpu/StagedUniformBuffer.h new file mode 100644 index 00000000..086a376c --- /dev/null +++ b/src/gpu/StagedUniformBuffer.h @@ -0,0 +1,62 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/UniformBuffer.h" +#include "tgfx/gpu/Context.h" + +namespace tgfx { +/** + * StagedUniformBuffer is a UniformBuffer that supports multiple stages. + */ +class StagedUniformBuffer : public UniformBuffer { + public: + /** + * Returns the mangled name for the specified uniform name and stage index. + */ + static std::string GetMangledName(const std::string& name, int stageIndex); + + explicit StagedUniformBuffer(std::vector uniforms); + + /** + * advanceStage is called by Program between each processor's set data. It increments the stage + * offset for uniform name mangling. If advanceStage is not called, the uniform names will not be + * mangled. + */ + void advanceStage() { + stageIndex++; + } + + /** + * Resets the stage offset and uploads the uniform data to the GPU. + */ + void resetStage() { + stageIndex = -1; + } + + protected: + /** + * Generates a uniform key based on the specified name and the current stage index. + */ + std::string getUniformKey(const std::string& name) const override; + + private: + int stageIndex = -1; +}; +} // namespace tgfx diff --git a/src/gpu/Surface.cpp b/src/gpu/Surface.cpp new file mode 100644 index 00000000..c5df48c6 --- /dev/null +++ b/src/gpu/Surface.cpp @@ -0,0 +1,263 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/gpu/Surface.h" +#include "DrawingManager.h" +#include "core/PixelBuffer.h" +#include "gpu/RenderTarget.h" +#include "utils/Log.h" +#include "utils/PixelFormatUtil.h" + +namespace tgfx { +std::shared_ptr Surface::Make(Context* context, int width, int height, bool alphaOnly, + int sampleCount, bool mipMapped, + const SurfaceOptions* options) { + return Make(context, width, height, alphaOnly ? ColorType::ALPHA_8 : ColorType::RGBA_8888, + sampleCount, mipMapped, options); +} + +std::shared_ptr Surface::Make(Context* context, int width, int height, ColorType colorType, + int sampleCount, bool mipMapped, + const SurfaceOptions* options) { + auto caps = context->caps(); + auto pixelFormat = ColorTypeToPixelFormat(colorType); + if (!caps->isFormatRenderable(pixelFormat)) { + return nullptr; + } + auto texture = + Texture::MakeFormat(context, width, height, pixelFormat, ImageOrigin::TopLeft, mipMapped); + if (texture == nullptr) { + return nullptr; + } + sampleCount = caps->getSampleCount(sampleCount, pixelFormat); + auto renderTarget = RenderTarget::MakeFrom(texture.get(), sampleCount); + if (renderTarget == nullptr) { + return nullptr; + } + auto surface = new Surface(renderTarget, texture, options, false); + // 对于内部创建的 RenderTarget 默认清屏。 + surface->getCanvas()->clear(); + return std::shared_ptr(surface); +} + +std::shared_ptr Surface::MakeFrom(Context* context, + const BackendRenderTarget& renderTarget, + ImageOrigin origin, const SurfaceOptions* options) { + auto rt = RenderTarget::MakeFrom(context, renderTarget, origin); + return MakeFrom(std::move(rt), options); +} + +std::shared_ptr Surface::MakeFrom(Context* context, const BackendTexture& backendTexture, + ImageOrigin origin, int sampleCount, + const SurfaceOptions* options) { + auto texture = Texture::MakeFrom(context, backendTexture, origin); + return MakeFrom(std::move(texture), sampleCount, options); +} + +std::shared_ptr Surface::MakeFrom(Context* context, HardwareBufferRef hardwareBuffer, + int sampleCount, const SurfaceOptions* options) { + auto texture = Texture::MakeFrom(context, hardwareBuffer); + return MakeFrom(std::move(texture), sampleCount, options); +} + +std::shared_ptr Surface::MakeFrom(std::shared_ptr renderTarget, + const SurfaceOptions* options) { + if (renderTarget == nullptr) { + return nullptr; + } + return std::shared_ptr(new Surface(std::move(renderTarget), nullptr, options)); +} + +std::shared_ptr Surface::MakeFrom(std::shared_ptr texture, int sampleCount, + const SurfaceOptions* options) { + if (texture == nullptr || texture->isYUV()) { + return nullptr; + } + auto renderTarget = RenderTarget::MakeFrom(texture.get(), sampleCount); + if (renderTarget == nullptr) { + return nullptr; + } + return std::shared_ptr(new Surface(renderTarget, std::move(texture), options)); +} + +Surface::Surface(std::shared_ptr renderTarget, std::shared_ptr texture, + const SurfaceOptions* options, bool externalTexture) + : renderTarget(std::move(renderTarget)), texture(std::move(texture)), + externalTexture(externalTexture) { + DEBUG_ASSERT(this->renderTarget != nullptr); + if (options != nullptr) { + surfaceOptions = *options; + } +} + +Surface::~Surface() { + delete canvas; +} + +Context* Surface::getContext() const { + return renderTarget->getContext(); +} + +int Surface::width() const { + return renderTarget->width(); +} + +int Surface::height() const { + return renderTarget->height(); +} + +ImageOrigin Surface::origin() const { + return renderTarget->origin(); +} + +std::shared_ptr Surface::getTexture() { + if (texture == nullptr) { + return nullptr; + } + flush(); + return texture; +} + +BackendRenderTarget Surface::getBackendRenderTarget() { + flush(); + return renderTarget->getBackendRenderTarget(); +} + +BackendTexture Surface::getBackendTexture() { + if (texture == nullptr) { + return {}; + } + flush(); + return texture->getBackendTexture(); +} + +HardwareBufferRef Surface::getHardwareBuffer() { + auto hardwareBuffer = texture ? texture->getHardwareBuffer() : nullptr; + if (hardwareBuffer == nullptr) { + return nullptr; + } + flushAndSubmit(true); + return hardwareBuffer; +} + +bool Surface::wait(const BackendSemaphore& waitSemaphore) { + return renderTarget->getContext()->wait(waitSemaphore); +} + +Canvas* Surface::getCanvas() { + if (canvas == nullptr) { + canvas = new Canvas(this); + } + return canvas; +} + +bool Surface::flush(BackendSemaphore* signalSemaphore) { + auto semaphore = Semaphore::Wrap(signalSemaphore); + renderTarget->getContext()->drawingManager()->newTextureResolveRenderTask(this); + auto result = renderTarget->getContext()->drawingManager()->flush(semaphore.get()); + if (signalSemaphore != nullptr) { + *signalSemaphore = semaphore->getBackendSemaphore(); + } + return result; +} + +void Surface::flushAndSubmit(bool syncCpu) { + flush(); + renderTarget->getContext()->submit(syncCpu); +} + +static std::shared_ptr MakeTextureFromRenderTarget(const RenderTarget* renderTarget, + bool discardContent = false) { + auto context = renderTarget->getContext(); + auto width = renderTarget->width(); + auto height = renderTarget->height(); + if (discardContent) { + return Texture::MakeFormat(context, width, height, renderTarget->format(), + renderTarget->origin()); + } + auto texture = + Texture::MakeFormat(context, width, height, renderTarget->format(), renderTarget->origin()); + if (texture == nullptr) { + return nullptr; + } + context->gpu()->copyRenderTargetToTexture(renderTarget, texture.get(), + Rect::MakeWH(width, height), Point::Zero()); + return texture; +} + +std::shared_ptr Surface::makeImageSnapshot() { + flush(); + if (cachedImage != nullptr) { + return cachedImage; + } + if (texture != nullptr && !externalTexture) { + cachedImage = Image::MakeFrom(texture); + } else { + auto textureCopy = MakeTextureFromRenderTarget(renderTarget.get()); + cachedImage = Image::MakeFrom(textureCopy); + } + return cachedImage; +} + +void Surface::aboutToDraw(bool discardContent) { + if (cachedImage == nullptr) { + return; + } + auto isUnique = cachedImage.unique(); + cachedImage = nullptr; + if (isUnique) { + return; + } + if (texture == nullptr || externalTexture) { + return; + } + auto newTexture = MakeTextureFromRenderTarget(renderTarget.get(), discardContent); + auto success = renderTarget->replaceTexture(newTexture.get()); + if (!success) { + LOGE("Surface::aboutToDraw(): Failed to replace the backing texture of the renderTarget!"); + } + texture = newTexture; +} + +Color Surface::getColor(int x, int y) { + uint8_t color[4]; + auto info = ImageInfo::Make(1, 1, ColorType::RGBA_8888, AlphaType::Premultiplied); + if (!readPixels(info, color, x, y)) { + return Color::Transparent(); + } + return Color::FromRGBA(color[0], color[1], color[2], color[3]); +} + +bool Surface::readPixels(const ImageInfo& dstInfo, void* dstPixels, int srcX, int srcY) { + if (dstInfo.isEmpty() || dstPixels == nullptr) { + return false; + } + auto hardwareBuffer = texture ? texture->getHardwareBuffer() : nullptr; + flushAndSubmit(hardwareBuffer != nullptr); + if (hardwareBuffer != nullptr) { + auto pixels = HardwareBufferLock(hardwareBuffer); + if (pixels != nullptr) { + auto info = HardwareBufferGetInfo(hardwareBuffer); + auto success = Pixmap(info, pixels).readPixels(dstInfo, dstPixels, srcX, srcY); + HardwareBufferUnlock(hardwareBuffer); + return success; + } + } + return renderTarget->readPixels(dstInfo, dstPixels, srcX, srcY); +} +} // namespace tgfx diff --git a/src/gpu/SurfaceDrawContext.cpp b/src/gpu/SurfaceDrawContext.cpp new file mode 100644 index 00000000..c5ca4882 --- /dev/null +++ b/src/gpu/SurfaceDrawContext.cpp @@ -0,0 +1,50 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "gpu/SurfaceDrawContext.h" +#include "DrawingManager.h" +#include "gpu/ops/FillRectOp.h" + +namespace tgfx { +void SurfaceDrawContext::addOp(std::unique_ptr op) { + getOpsTask()->addOp(std::move(op)); +} + +void SurfaceDrawContext::fillRectWithFP(const Rect& dstRect, const Matrix& localMatrix, + std::unique_ptr fp) { + if (fp == nullptr) { + return; + } + auto op = FillRectOp::Make({}, dstRect, Matrix::I(), localMatrix); + std::vector> colors; + colors.emplace_back(std::move(fp)); + op->setColors(std::move(colors)); + addOp(std::move(op)); +} + +OpsTask* SurfaceDrawContext::getOpsTask() { + if (opsTask == nullptr || opsTask->isClosed()) { + replaceOpsTask(); + } + return opsTask.get(); +} + +void SurfaceDrawContext::replaceOpsTask() { + opsTask = surface->getContext()->drawingManager()->newOpsTask(surface); +} +} // namespace tgfx diff --git a/src/gpu/SurfaceDrawContext.h b/src/gpu/SurfaceDrawContext.h new file mode 100644 index 00000000..f3efc595 --- /dev/null +++ b/src/gpu/SurfaceDrawContext.h @@ -0,0 +1,46 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/ops/OpsTask.h" +#include "gpu/processors/FragmentProcessor.h" +#include "tgfx/core/Matrix.h" +#include "tgfx/core/Rect.h" +#include "tgfx/gpu/Surface.h" + +namespace tgfx { +class SurfaceDrawContext { + public: + explicit SurfaceDrawContext(Surface* surface) : surface(surface) { + } + + void fillRectWithFP(const Rect& dstRect, const Matrix& localMatrix, + std::unique_ptr fp); + + void addOp(std::unique_ptr op); + + protected: + OpsTask* getOpsTask(); + + void replaceOpsTask(); + + Surface* surface = nullptr; + std::shared_ptr opsTask; +}; +} // namespace tgfx diff --git a/src/gpu/Swizzle.h b/src/gpu/Swizzle.h new file mode 100644 index 00000000..b6d9e8ea --- /dev/null +++ b/src/gpu/Swizzle.h @@ -0,0 +1,132 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "tgfx/core/Color.h" + +namespace tgfx { +class Swizzle { + public: + constexpr Swizzle() : Swizzle("rgba") { + } + + constexpr bool operator==(const Swizzle& that) const { + return key == that.key; + } + constexpr bool operator!=(const Swizzle& that) const { + return !(*this == that); + } + + /** + * Compact representation of the swizzle suitable for a key. + */ + constexpr uint16_t asKey() const { + return key; + } + + /** + * 4 char null terminated string consisting only of chars 'r', 'g', 'b', 'a'. + */ + const char* c_str() const { + return swiz; + } + + char operator[](int i) const { + return swiz[i]; + } + + static constexpr Swizzle RGBA() { + return Swizzle("rgba"); + } + static constexpr Swizzle AAAA() { + return Swizzle("aaaa"); + } + static constexpr Swizzle RRRR() { + return Swizzle("rrrr"); + } + static constexpr Swizzle RRRA() { + return Swizzle("rrra"); + } + static constexpr Swizzle RGRG() { + return Swizzle("rgrg"); + } + static constexpr Swizzle RARA() { + return Swizzle("rara"); + } + + Color applyTo(const Color& color) const { + int idx; + uint32_t k = key; + // Index of the input color that should be mapped to output r. + idx = static_cast(k & 15); + float outR = ComponentIdxToFloat(color, idx); + k >>= 4; + idx = static_cast(k & 15); + float outG = ComponentIdxToFloat(color, idx); + k >>= 4; + idx = static_cast(k & 15); + float outB = ComponentIdxToFloat(color, idx); + k >>= 4; + idx = static_cast(k & 15); + float outA = ComponentIdxToFloat(color, idx); + return {outR, outG, outB, outA}; + } + + private: + char swiz[5]; + uint16_t key = 0; + + static constexpr int CToI(char c) { + switch (c) { + case 'r': + return 0; + case 'g': + return 1; + case 'b': + return 2; + case 'a': + return 3; + case '1': + return 4; + default: + return -1; + } + } + + constexpr explicit Swizzle(const char c[4]) + : swiz{c[0], c[1], c[2], c[3], '\0'}, + key((CToI(c[0]) << 0) | (CToI(c[1]) << 4) | (CToI(c[2]) << 8) | (CToI(c[3]) << 12)) { + } + + // The normal component swizzles map to key values 0-3. We set the key for constant 1 to the + // next int. + static const int k1KeyValue = 4; + + static float ComponentIdxToFloat(const Color& color, int idx) { + if (idx <= 3) { + return color[idx]; + } + if (idx == k1KeyValue) { + return 1.0f; + } + return -1.0f; + } +}; +} // namespace tgfx diff --git a/src/gpu/Texture.cpp b/src/gpu/Texture.cpp new file mode 100644 index 00000000..d8c09fa3 --- /dev/null +++ b/src/gpu/Texture.cpp @@ -0,0 +1,50 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "gpu/Texture.h" +#include "gpu/TextureSampler.h" + +namespace tgfx { +std::shared_ptr Texture::MakeFrom(Context* context, + std::shared_ptr imageBuffer, + bool mipMapped) { + if (context == nullptr || imageBuffer == nullptr) { + return nullptr; + } + return imageBuffer->onMakeTexture(context, mipMapped); +} + +Texture::Texture(int width, int height, ImageOrigin origin) + : _width(width), _height(height), _origin(origin) { +} + +Point Texture::getTextureCoord(float x, float y) const { + if (getSampler()->type() == TextureType::Rectangle) { + return {x, y}; + } + return {x / static_cast(width()), y / static_cast(height())}; +} + +BackendTexture Texture::getBackendTexture() const { + return getSampler()->getBackendTexture(width(), height()); +} + +HardwareBufferRef Texture::getHardwareBuffer() const { + return nullptr; +} +} // namespace tgfx diff --git a/src/gpu/Texture.h b/src/gpu/Texture.h new file mode 100644 index 00000000..172ca6aa --- /dev/null +++ b/src/gpu/Texture.h @@ -0,0 +1,211 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/TextureSampler.h" +#include "tgfx/core/ImageBuffer.h" +#include "tgfx/core/ImageInfo.h" +#include "tgfx/core/Point.h" +#include "tgfx/core/YUVColorSpace.h" +#include "tgfx/core/YUVData.h" +#include "tgfx/gpu/ImageOrigin.h" +#include "tgfx/gpu/Resource.h" +#include "tgfx/platform/HardwareBuffer.h" + +namespace tgfx { +class BackendTexture; + +/** + * Texture describes a two-dimensional array of pixels in the GPU backend for drawing. + */ +class Texture : public Resource { + public: + /** + * Creates a new texture from the specified pixel data with each pixel stored as 32-bit RGBA + * data. Returns nullptr if any of the parameters is invalid. + */ + static std::shared_ptr MakeRGBA(Context* context, int width, int height, + const void* pixels, size_t rowBytes, + ImageOrigin origin = ImageOrigin::TopLeft, + bool mipMapped = false) { + return MakeFormat(context, width, height, pixels, rowBytes, PixelFormat::RGBA_8888, origin, + mipMapped); + } + /** + * Creates an empty texture with each pixel stored as 32-bit RGBA data. Returns nullptr if any of + * the parameters is invalid. + */ + static std::shared_ptr MakeRGBA(Context* context, int width, int height, + ImageOrigin origin = ImageOrigin::TopLeft, + bool mipMapped = false) { + return MakeFormat(context, width, height, nullptr, 0, PixelFormat::RGBA_8888, origin, + mipMapped); + } + + /** + * Creates a new texture from the specified pixel data with each pixel stored as a single + * translucency (alpha) channel. Returns nullptr if any of the parameters is invalid or the + * backend does not support creating alpha only textures. + */ + static std::shared_ptr MakeAlpha(Context* context, int width, int height, + const void* pixels, size_t rowBytes, + ImageOrigin origin = ImageOrigin::TopLeft, + bool mipMapped = false) { + return MakeFormat(context, width, height, pixels, rowBytes, PixelFormat::ALPHA_8, origin, + mipMapped); + } + /** + * Creates an empty texture with each pixel stored as a single translucency (alpha) channel. + * Returns nullptr if any of the parameters is invalid or the backend does not support creating + * alpha only textures. + */ + static std::shared_ptr MakeAlpha(Context* context, int width, int height, + ImageOrigin origin = ImageOrigin::TopLeft, + bool mipMapped = false) { + return MakeFormat(context, width, height, nullptr, 0, PixelFormat::ALPHA_8, origin, mipMapped); + } + + /** + * Creates an empty texture with each pixel store as the pixelFormat describes. Returns nullptr if + * any of the parameters is invalid or the backend does not support creating textures of the + * specified pixelFormat. + */ + static std::shared_ptr MakeFormat(Context* context, int width, int height, + PixelFormat pixelFormat, ImageOrigin origin, + bool mipMapped = false) { + return MakeFormat(context, width, height, nullptr, 0, pixelFormat, origin, mipMapped); + } + + /** + * Creates a new texture from the specified pixel data with each pixel store as the pixelFormat + * describes. Returns nullptr if any of the parameters is invalid or the backend does not support + * creating textures of the specified pixelFormat. + */ + static std::shared_ptr MakeFormat(Context* context, int width, int height, + const void* pixels, size_t rowBytes, + PixelFormat pixelFormat, ImageOrigin origin, + bool mipMapped = false); + + /** + * Creates a new Texture which wraps the specified backend texture. The caller must ensure the + * backend texture is valid for the lifetime of returned Texture. + */ + static std::shared_ptr MakeFrom(Context* context, const BackendTexture& backendTexture, + ImageOrigin origin = ImageOrigin::TopLeft); + + /** + * Creates a new Texture which wraps the specified backend texture. The returned Texture takes + * ownership of the specified backend texture. + */ + static std::shared_ptr MakeAdopted(Context* context, + const BackendTexture& backendTexture, + ImageOrigin origin = ImageOrigin::TopLeft); + + /** + * Creates a Texture from the specified ImageBuffer. Returns nullptr if the context is nullptr or + * the imageBuffer is nullptr. + */ + static std::shared_ptr MakeFrom(Context* context, + std::shared_ptr imageBuffer, + bool mipMapped = false); + + /** + * Creates a Texture from the platform-specific hardware buffer. For example, the hardware + * buffer could be an AHardwareBuffer on the android platform or a CVPixelBufferRef on the + * apple platform. The returned Image takes a reference to the hardwareBuffer. The colorSpace + * is ignored if the hardwareBuffer contains only one plane, which means it is not in the YUV + * format. Returns nullptr if the context is nullptr or the hardwareBuffer is nullptr. + */ + static std::shared_ptr MakeFrom(Context* context, HardwareBufferRef hardwareBuffer, + YUVColorSpace colorSpace = YUVColorSpace::BT601_LIMITED); + + /** + * Creates a new Texture in the I420 format from the specified YUVData and YUVColorSpace. Returns + * nullptr if the context is nullptr or the yuvData is invalid; + */ + static std::shared_ptr MakeI420(Context* context, const YUVData* yuvData, + YUVColorSpace colorSpace = YUVColorSpace::BT601_LIMITED); + + /** + * Creates a new Texture in the NV12 format from the specified YUVData and YUVColorSpace. Returns + * nullptr if the context is nullptr or the yuvData is invalid; + */ + static std::shared_ptr MakeNV12(Context* context, const YUVData* yuvData, + YUVColorSpace colorSpace = YUVColorSpace::BT601_LIMITED); + + /** + * Returns the display width of this texture. + */ + int width() const { + return _width; + } + + /** + * Returns the display height of this texture. + */ + int height() const { + return _height; + } + + /** + * Returns the origin of the texture, either ImageOrigin::TopLeft or ImageOrigin::BottomLeft. + */ + ImageOrigin origin() const { + return _origin; + } + + /** + * Returns true if this is a YUVTexture. + */ + virtual bool isYUV() const { + return false; + } + + /** + * Returns the default texture sampler. + */ + virtual const TextureSampler* getSampler() const = 0; + + /** + * Returns the texture coordinates in backend units corresponding to specified position in pixels. + */ + virtual Point getTextureCoord(float x, float y) const; + + /** + * Retrieves the backend texture. Returns an invalid BackendTexture if the texture is a + * YUVTexture. + */ + virtual BackendTexture getBackendTexture() const; + + /** + * Retrieves the backing hardware buffer. This method does not acquire any additional reference to + * the returned hardware buffer. Returns nullptr if the texture is not created from a hardware + * buffer. + */ + virtual HardwareBufferRef getHardwareBuffer() const; + + protected: + Texture(int width, int height, ImageOrigin origin); + + private: + int _width = 0; + int _height = 0; + ImageOrigin _origin = ImageOrigin::TopLeft; +}; +} // namespace tgfx diff --git a/src/gpu/TextureProxy.cpp b/src/gpu/TextureProxy.cpp new file mode 100644 index 00000000..c1fe4d82 --- /dev/null +++ b/src/gpu/TextureProxy.cpp @@ -0,0 +1,90 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "TextureProxy.h" +#include "ProxyProvider.h" + +namespace tgfx { +TextureProxy::TextureProxy(ProxyProvider* provider, std::shared_ptr texture) + : provider(provider), texture(std::move(texture)) { +} + +TextureProxy::~TextureProxy() { + if (!uniqueKey.empty()) { + provider->proxyOwnerMap.erase(uniqueKey.uniqueID()); + } +} + +int TextureProxy::width() const { + return texture->width(); +} + +int TextureProxy::height() const { + return texture->height(); +} + +bool TextureProxy::hasMipmaps() const { + return texture->getSampler()->hasMipmaps(); +} + +std::shared_ptr TextureProxy::getTexture() const { + return texture; +} + +bool TextureProxy::isInstantiated() const { + return texture != nullptr; +} + +bool TextureProxy::instantiate() { + if (texture != nullptr) { + return true; + } + texture = onMakeTexture(provider->context); + if (texture != nullptr && setTextureUniqueKey) { + texture->assignUniqueKey(uniqueKey); + } + return texture != nullptr; +} + +void TextureProxy::assignUniqueKey(const UniqueKey& newKey, bool updateTextureKey) { + if (newKey.empty()) { + removeUniqueKey(updateTextureKey); + return; + } + if (newKey != uniqueKey) { + provider->changeUniqueKey(this, newKey); + } + setTextureUniqueKey = updateTextureKey; + if (setTextureUniqueKey && texture != nullptr) { + texture->assignUniqueKey(newKey); + } +} + +void TextureProxy::removeUniqueKey(bool updateTextureKey) { + if (!uniqueKey.empty()) { + provider->removeUniqueKey(this); + } + if (updateTextureKey && texture != nullptr) { + texture->removeUniqueKey(); + } +} + +std::shared_ptr TextureProxy::onMakeTexture(Context*) { + return nullptr; +} +} // namespace tgfx \ No newline at end of file diff --git a/src/gpu/TextureProxy.h b/src/gpu/TextureProxy.h new file mode 100644 index 00000000..8e6c9a1b --- /dev/null +++ b/src/gpu/TextureProxy.h @@ -0,0 +1,97 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/Texture.h" +#include "gpu/TextureSampler.h" + +namespace tgfx { +class ProxyProvider; + +/** + * This class delays the acquisition of textures until they are actually required. + */ +class TextureProxy { + public: + virtual ~TextureProxy(); + + /** + * Returns the width of the texture proxy. + */ + virtual int width() const; + + /** + * Returns the height of the texture proxy. + */ + virtual int height() const; + + /** + * If we are instantiated and have a texture, return the mipmap state of that texture. Otherwise, + * returns the proxy's mipmap state from creation time. + */ + virtual bool hasMipmaps() const; + + /** + * Returns the Texture of the proxy. Returns nullptr if the proxy is not instantiated yet. + */ + std::shared_ptr getTexture() const; + + /** + * Returns true if the backing texture is instantiated. + */ + bool isInstantiated() const; + + /** + * Instantiates the backing texture, if necessary. Returns true if the backing texture is + * instantiated. + */ + bool instantiate(); + + /** + * Assigns a UniqueKey to the proxy. The proxy will be findable via this UniqueKey using + * ProxyProvider.findProxyByUniqueKey(). If the updateTextureKey is true, it will also assign the + * UniqueKey to the target texture. + */ + void assignUniqueKey(const UniqueKey& uniqueKey, bool updateTextureKey = true); + + /* + * Removes the UniqueKey from the proxy. If the updateTextureKey is true, it will also remove + * the UniqueKey from the target texture. + */ + void removeUniqueKey(bool updateTextureKey = true); + + protected: + ProxyProvider* provider = nullptr; + bool setTextureUniqueKey = true; + std::shared_ptr texture = nullptr; + + explicit TextureProxy(ProxyProvider* provider, std::shared_ptr texture = nullptr); + + /** + * Overrides to create a new Texture associated with specified context. + */ + virtual std::shared_ptr onMakeTexture(Context* context); + + private: + UniqueKey uniqueKey = {}; + std::weak_ptr weakThis; + + friend class ProxyProvider; +}; +} // namespace tgfx diff --git a/src/gpu/TextureResolveRenderTask.cpp b/src/gpu/TextureResolveRenderTask.cpp new file mode 100644 index 00000000..549717b2 --- /dev/null +++ b/src/gpu/TextureResolveRenderTask.cpp @@ -0,0 +1,27 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "TextureResolveRenderTask.h" +#include "Gpu.h" + +namespace tgfx { +bool TextureResolveRenderTask::execute(Gpu* gpu) { + gpu->resolveRenderTarget(renderTarget.get()); + return true; +} +} // namespace tgfx diff --git a/src/gpu/TextureResolveRenderTask.h b/src/gpu/TextureResolveRenderTask.h new file mode 100644 index 00000000..4a83dec5 --- /dev/null +++ b/src/gpu/TextureResolveRenderTask.h @@ -0,0 +1,32 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "RenderTask.h" + +namespace tgfx { +class TextureResolveRenderTask : public RenderTask { + public: + explicit TextureResolveRenderTask(std::shared_ptr renderTarget) + : RenderTask(std::move(renderTarget)) { + } + + bool execute(Gpu* gpu) override; +}; +} // namespace tgfx diff --git a/src/gpu/TextureSampler.h b/src/gpu/TextureSampler.h new file mode 100644 index 00000000..f5d69605 --- /dev/null +++ b/src/gpu/TextureSampler.h @@ -0,0 +1,73 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/gpu/Context.h" +#include "tgfx/gpu/PixelFormat.h" +#include "tgfx/utils/BytesKey.h" + +namespace tgfx { +enum class TextureType { Unknown, TwoD, Rectangle, External }; + +/** + * TextureSampler stores the sampling parameters for a backend texture uint. + */ +class TextureSampler { + public: + /** + * Creates a new TextureSampler which wraps the specified backend texture. The caller is + * responsible for managing the lifetime of the backendTexture. + */ + static std::unique_ptr MakeFrom(Context* context, + const BackendTexture& backendTexture); + + virtual ~TextureSampler() = default; + + /** + * The pixel format of the sampler. + */ + PixelFormat format = PixelFormat::RGBA_8888; + int maxMipMapLevel = 0; + + /** + * Returns the TextureType of TextureSampler. + */ + virtual TextureType type() const { + return TextureType::TwoD; + } + + /** + * Returns true if the TextureSampler has mipmap levels. + */ + bool hasMipmaps() const { + return maxMipMapLevel > 0; + } + + /** + * Retrieves the backend texture with the specified size. + */ + virtual BackendTexture getBackendTexture(int width, int height) const = 0; + + protected: + virtual void computeKey(Context* context, BytesKey* bytesKey) const = 0; + + friend class FragmentProcessor; + friend class Pipeline; +}; +} // namespace tgfx diff --git a/src/gpu/UniformBuffer.cpp b/src/gpu/UniformBuffer.cpp new file mode 100644 index 00000000..3ddeb568 --- /dev/null +++ b/src/gpu/UniformBuffer.cpp @@ -0,0 +1,79 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "UniformBuffer.h" +#include "utils/Log.h" + +namespace tgfx { +size_t Uniform::size() const { + switch (type) { + case Uniform::Type::Float: + case Uniform::Type::Int: + return 4; + case Uniform::Type::Float2: + case Uniform::Type::Int2: + return 8; + case Uniform::Type::Float3: + case Uniform::Type::Int3: + return 12; + case Uniform::Type::Float4: + case Uniform::Type::Int4: + case Uniform::Type::Float2x2: + return 16; + case Uniform::Type::Float3x3: + return 36; + case Uniform::Type::Float4x4: + return 64; + } + return 0; +} + +UniformBuffer::UniformBuffer(std::vector uniformList) : uniforms(std::move(uniformList)) { + int index = 0; + size_t offset = 0; + for (auto& uniform : uniforms) { + uniformMap[uniform.name] = index++; + offsets.push_back(offset); + offset += uniform.size(); + } +} + +void UniformBuffer::setData(const std::string& name, const tgfx::Matrix& matrix) { + float values[6]; + matrix.get6(values); + float data[] = {values[0], values[3], 0, values[1], values[4], 0, values[2], values[5], 1}; + onSetData(name, data, sizeof(data)); +} + +void UniformBuffer::onSetData(const std::string& name, const void* data, size_t size) { + auto key = getUniformKey(name); + auto result = uniformMap.find(key); + if (result == uniformMap.end()) { + LOGE("UniformBuffer::onSetData() uniform '%s' not found!", name.c_str()); + return; + } + auto index = result->second; + auto uniformSize = uniforms[index].size(); + if (uniformSize != size) { + LOGE("UniformBuffer::onSetData() data size mismatch!"); + return; + } + onCopyData(index, offsets[index], size, data); +} + +} // namespace tgfx \ No newline at end of file diff --git a/src/gpu/UniformBuffer.h b/src/gpu/UniformBuffer.h new file mode 100644 index 00000000..9cf08098 --- /dev/null +++ b/src/gpu/UniformBuffer.h @@ -0,0 +1,106 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include +#include +#include "tgfx/core/Matrix.h" + +namespace tgfx { +/** + * Reflected description of a uniform variable in the GPU program. + */ +struct Uniform { + /** + * Possible types of a uniform variable. + */ + enum class Type { + Float, + Float2, + Float3, + Float4, + Float2x2, + Float3x3, + Float4x4, + Int, + Int2, + Int3, + Int4, + }; + + std::string name; + Type type; + + /** + * Returns the size of the uniform in bytes. + */ + size_t size() const; +}; + +/** + * An object representing the collection of uniform variables in a GPU program. + */ +class UniformBuffer { + public: + /** + * Constructs a uniform buffer with the specified uniforms. + */ + explicit UniformBuffer(std::vector uniforms); + + virtual ~UniformBuffer() = default; + + /** + * Copies value into the uniform buffer. The data must have the same size as the uniform specified + * by name. + */ + template + std::enable_if_t && !std::is_pointer_v, void> setData( + const std::string& name, const T& value) { + onSetData(name, &value, sizeof(value)); + } + + /** + * Convenience method for copying a Matrix to a 3x3 matrix in column-major order. + */ + void setData(const std::string& name, const Matrix& matrix); + + protected: + std::vector uniforms = {}; + std::vector offsets = {}; + + /** + * Generates a uniform key based on the specified name and the current stage index. + */ + virtual std::string getUniformKey(const std::string& name) const { + return name; + } + + /** + * Copies data into the uniform buffer. The data must have the same size as the uniform specified + * by name. + */ + virtual void onCopyData(int index, size_t offset, size_t size, const void* data) = 0; + + private: + std::unordered_map uniformMap = {}; + + void onSetData(const std::string& name, const void* data, size_t size); +}; +} // namespace tgfx diff --git a/src/gpu/UniformHandler.h b/src/gpu/UniformHandler.h new file mode 100644 index 00000000..65ab8de5 --- /dev/null +++ b/src/gpu/UniformHandler.h @@ -0,0 +1,67 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "SamplerHandle.h" +#include "ShaderVar.h" +#include "Swizzle.h" +#include "gpu/TextureSampler.h" + +namespace tgfx { +static constexpr char NO_MANGLE_PREFIX[] = "tgfx_"; + +class ProgramBuilder; + +class UniformHandler { + public: + virtual ~UniformHandler() = default; + + /** + * Add a uniform variable to the current program, that has visibility in one or more shaders. + * visibility is a bitfield of ShaderFlag values indicating from which shaders the uniform should + * be accessible. At least one bit must be set. The actual uniform name will be mangled. Returns + * the final uniform name. + */ + std::string addUniform(ShaderFlags visibility, SLType type, const std::string& name) { + bool mangle = name.find(NO_MANGLE_PREFIX) != 0; + return internalAddUniform(visibility, type, name, mangle); + } + + protected: + explicit UniformHandler(ProgramBuilder* program) : programBuilder(program) { + } + + // This is not owned by the class + ProgramBuilder* programBuilder; + + private: + virtual const ShaderVar& samplerVariable(SamplerHandle samplerHandle) const = 0; + + virtual const Swizzle& samplerSwizzle(SamplerHandle samplerHandle) const = 0; + + virtual SamplerHandle addSampler(const TextureSampler* sampler, const std::string& name) = 0; + + virtual std::string internalAddUniform(ShaderFlags visibility, SLType type, + const std::string& name, bool mangleName) = 0; + + virtual std::string getUniformDeclarations(ShaderFlags visibility) const = 0; + + friend class ProgramBuilder; +}; +} // namespace tgfx diff --git a/src/gpu/UniqueKey.cpp b/src/gpu/UniqueKey.cpp new file mode 100644 index 00000000..0c4d8e09 --- /dev/null +++ b/src/gpu/UniqueKey.cpp @@ -0,0 +1,29 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include +#include "tgfx/gpu/ResourceKey.h" +#include "utils/UniqueID.h" + +namespace tgfx { +UniqueKey UniqueKey::Next() { + UniqueKey uniqueKey = {}; + uniqueKey.id = std::make_shared(UniqueID::Next()); + return uniqueKey; +} +} // namespace tgfx diff --git a/src/gpu/VaryingHandler.cpp b/src/gpu/VaryingHandler.cpp new file mode 100644 index 00000000..37081e2a --- /dev/null +++ b/src/gpu/VaryingHandler.cpp @@ -0,0 +1,70 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "VaryingHandler.h" +#include "ProgramBuilder.h" + +namespace tgfx { +Varying VaryingHandler::addVarying(const std::string& name, SLType type) { + Varying varying; + varying._type = type; + varying._name = programBuilder->nameVariable('v', name); + varyings.push_back(std::move(varying)); + return varyings[varyings.size() - 1]; +} + +void VaryingHandler::emitAttributes(const GeometryProcessor& processor) { + for (const auto* attr : processor.vertexAttributes()) { + addAttribute(attr->asShaderVar()); + } +} + +void VaryingHandler::addAttribute(const ShaderVar& var) { + for (const auto& attr : vertexInputs) { + // if attribute already added, don't add it again + if (attr.name() == var.name()) { + return; + } + } + vertexInputs.push_back(var); +} + +void VaryingHandler::finalize() { + for (const auto& v : varyings) { + vertexOutputs.emplace_back(v._name, v.type(), ShaderVar::TypeModifier::Varying); + fragInputs.emplace_back(v._name, v.type(), ShaderVar::TypeModifier::Varying); + } +} + +void VaryingHandler::appendDecls(const std::vector& vars, std::string* out, + ShaderFlags flag) const { + for (const auto& var : vars) { + out->append(programBuilder->getShaderVarDeclarations(var, flag)); + out->append(";\n"); + } +} + +void VaryingHandler::getVertexDecls(std::string* inputDecls, std::string* outputDecls) const { + appendDecls(vertexInputs, inputDecls, ShaderFlags::Vertex); + appendDecls(vertexOutputs, outputDecls, ShaderFlags::Vertex); +} + +void VaryingHandler::getFragDecls(std::string* inputDecls) const { + appendDecls(fragInputs, inputDecls, ShaderFlags::Fragment); +} +} // namespace tgfx diff --git a/src/gpu/VaryingHandler.h b/src/gpu/VaryingHandler.h new file mode 100644 index 00000000..845cb1a1 --- /dev/null +++ b/src/gpu/VaryingHandler.h @@ -0,0 +1,84 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "ShaderVar.h" + +namespace tgfx { +class ProgramBuilder; +class GeometryProcessor; + +class Varying { + public: + const std::string& vsOut() const { + return _name; + } + const std::string& fsIn() const { + return _name; + } + const std::string& name() const { + return _name; + } + SLType type() const { + return _type; + } + + private: + SLType _type = SLType::Void; + std::string _name; + + friend class VaryingHandler; +}; + +class VaryingHandler { + public: + explicit VaryingHandler(ProgramBuilder* program) : programBuilder(program) { + } + + virtual ~VaryingHandler() = default; + + Varying addVarying(const std::string& name, SLType type); + + void emitAttributes(const GeometryProcessor& processor); + + /** + * This should be called once all attributes and varyings have been added to the VaryingHandler + * and before getting/adding any of the declarations to the shaders. + */ + void finalize(); + + void getVertexDecls(std::string* inputDecls, std::string* outputDecls) const; + + void getFragDecls(std::string* inputDecls) const; + + private: + void appendDecls(const std::vector& vars, std::string* out, ShaderFlags flag) const; + + void addAttribute(const ShaderVar& var); + + std::vector varyings; + std::vector vertexInputs; + std::vector vertexOutputs; + std::vector fragInputs; + + // This is not owned by the class + ProgramBuilder* programBuilder; +}; +} // namespace tgfx diff --git a/src/gpu/VertexShaderBuilder.cpp b/src/gpu/VertexShaderBuilder.cpp new file mode 100644 index 00000000..a394d035 --- /dev/null +++ b/src/gpu/VertexShaderBuilder.cpp @@ -0,0 +1,27 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "VertexShaderBuilder.h" +#include "ProgramBuilder.h" + +namespace tgfx { +void VertexShaderBuilder::onFinalize() { + programBuilder->varyingHandler()->getVertexDecls(&shaderStrings[Type::Inputs], + &shaderStrings[Type::Outputs]); +} +} // namespace tgfx diff --git a/src/gpu/VertexShaderBuilder.h b/src/gpu/VertexShaderBuilder.h new file mode 100644 index 00000000..069382c7 --- /dev/null +++ b/src/gpu/VertexShaderBuilder.h @@ -0,0 +1,36 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "ShaderBuilder.h" + +namespace tgfx { +static const std::string RTAdjustName = "tgfx_RTAdjust"; + +class VertexShaderBuilder : public ShaderBuilder { + public: + explicit VertexShaderBuilder(ProgramBuilder* program) : ShaderBuilder(program) { + } + + virtual void emitNormalizedPosition(const std::string& devPos) = 0; + + private: + void onFinalize() override; +}; +} // namespace tgfx diff --git a/src/gpu/Window.cpp b/src/gpu/Window.cpp new file mode 100644 index 00000000..19809019 --- /dev/null +++ b/src/gpu/Window.cpp @@ -0,0 +1,52 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/gpu/Window.h" +#include "tgfx/gpu/Device.h" +#include "utils/Log.h" + +namespace tgfx { +Window::Window(std::shared_ptr device) : device(std::move(device)) { +} + +std::shared_ptr Window::createSurface(Context* context) { + if (!checkContext(context)) { + return nullptr; + } + return onCreateSurface(context); +} + +void Window::present(Context* context, int64_t presentationTime) { + if (!checkContext(context)) { + return; + } + context->flush(); + onPresent(context, presentationTime); +} + +bool Window::checkContext(Context* context) { + if (context == nullptr) { + return false; + } + if (context->device() != device.get()) { + LOGE("Window::checkContext() : context is not locked from the same device of this window"); + return false; + } + return true; +} +} // namespace tgfx \ No newline at end of file diff --git a/src/gpu/YUVTexture.cpp b/src/gpu/YUVTexture.cpp new file mode 100644 index 00000000..42e773fa --- /dev/null +++ b/src/gpu/YUVTexture.cpp @@ -0,0 +1,151 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "YUVTexture.h" +#include "gpu/Gpu.h" +#include "utils/UniqueID.h" + +namespace tgfx { +static constexpr int YUV_SIZE_FACTORS[] = {0, 1, 1}; + +static void ComputeScratchKey(BytesKey* scratchKey, int width, int height, YUVPixelFormat format) { + static const uint32_t YUVTextureType = UniqueID::Next(); + scratchKey->write(YUVTextureType); + scratchKey->write(static_cast(width)); + scratchKey->write(static_cast(height)); + scratchKey->write(static_cast(format)); +} + +static std::vector> MakeTexturePlanes(Context* context, + const YUVData* yuvData, + const PixelFormat* formats) { + std::vector> texturePlanes = {}; + auto count = static_cast(yuvData->planeCount()); + for (int index = 0; index < count; index++) { + auto w = yuvData->width() >> YUV_SIZE_FACTORS[index]; + auto h = yuvData->height() >> YUV_SIZE_FACTORS[index]; + auto sampler = context->gpu()->createSampler(w, h, formats[index], 1); + if (sampler == nullptr) { + for (auto& plane : texturePlanes) { + context->gpu()->deleteSampler(plane.get()); + } + return {}; + } + texturePlanes.push_back(std::move(sampler)); + } + return texturePlanes; +} + +static void SubmitYUVTexture(Context* context, const YUVData* yuvData, + const std::vector>* samplers) { + auto count = static_cast(yuvData->planeCount()); + for (int index = 0; index < count; index++) { + auto sampler = (*samplers)[index].get(); + auto w = yuvData->width() >> YUV_SIZE_FACTORS[index]; + auto h = yuvData->height() >> YUV_SIZE_FACTORS[index]; + auto pixels = yuvData->getBaseAddressAt(index); + auto rowBytes = yuvData->getRowBytesAt(index); + context->gpu()->writePixels(sampler, Rect::MakeWH(w, h), pixels, rowBytes); + } +} + +std::shared_ptr Texture::MakeI420(Context* context, const YUVData* yuvData, + YUVColorSpace colorSpace) { + if (context == nullptr || yuvData == nullptr || + yuvData->planeCount() != YUVData::I420_PLANE_COUNT) { + return nullptr; + } + PixelFormat yuvFormats[YUVData::I420_PLANE_COUNT] = {PixelFormat::GRAY_8, PixelFormat::GRAY_8, + PixelFormat::GRAY_8}; + ScratchKey scratchKey = {}; + ComputeScratchKey(&scratchKey, yuvData->width(), yuvData->height(), YUVPixelFormat::I420); + auto texture = std::static_pointer_cast( + context->resourceCache()->findScratchResource(scratchKey)); + if (texture == nullptr) { + auto texturePlanes = MakeTexturePlanes(context, yuvData, yuvFormats); + if (texturePlanes.empty()) { + return nullptr; + } + texture = std::static_pointer_cast(Resource::Wrap( + context, + new YUVTexture(yuvData->width(), yuvData->height(), YUVPixelFormat::I420, colorSpace))); + texture->samplers = std::move(texturePlanes); + } + SubmitYUVTexture(context, yuvData, &texture->samplers); + return texture; +} + +std::shared_ptr Texture::MakeNV12(Context* context, const YUVData* yuvData, + YUVColorSpace colorSpace) { + if (context == nullptr || yuvData == nullptr || + yuvData->planeCount() != YUVData::NV12_PLANE_COUNT) { + return nullptr; + } + PixelFormat yuvFormats[YUVData::NV12_PLANE_COUNT] = {PixelFormat::GRAY_8, PixelFormat::RG_88}; + ScratchKey scratchKey = {}; + ComputeScratchKey(&scratchKey, yuvData->width(), yuvData->height(), YUVPixelFormat::NV12); + auto texture = std::static_pointer_cast( + context->resourceCache()->findScratchResource(scratchKey)); + if (texture == nullptr) { + auto texturePlanes = MakeTexturePlanes(context, yuvData, yuvFormats); + if (texturePlanes.empty()) { + return nullptr; + } + texture = std::static_pointer_cast(Resource::Wrap( + context, + new YUVTexture(yuvData->width(), yuvData->height(), YUVPixelFormat::NV12, colorSpace))); + texture->samplers = std::move(texturePlanes); + } + SubmitYUVTexture(context, yuvData, &texture->samplers); + return texture; +} + +YUVTexture::YUVTexture(int width, int height, YUVPixelFormat pixelFormat, YUVColorSpace colorSpace) + : Texture(width, height, ImageOrigin::TopLeft), _pixelFormat(pixelFormat), + _colorSpace(colorSpace) { +} + +Point YUVTexture::getTextureCoord(float x, float y) const { + return {x / static_cast(width()), y / static_cast(height())}; +} + +BackendTexture YUVTexture::getBackendTexture() const { + return {}; +} + +const TextureSampler* YUVTexture::getSamplerAt(size_t index) const { + if (index >= samplers.size()) { + return nullptr; + } + return samplers[index].get(); +} + +size_t YUVTexture::memoryUsage() const { + return width() * height() * 3 / 2; +} + +void YUVTexture::computeScratchKey(BytesKey* scratchKey) const { + ComputeScratchKey(scratchKey, width(), height(), _pixelFormat); +} + +void YUVTexture::onReleaseGPU() { + for (auto& sampler : samplers) { + context->gpu()->deleteSampler(sampler.get()); + } +} +} // namespace tgfx diff --git a/src/gpu/YUVTexture.h b/src/gpu/YUVTexture.h new file mode 100644 index 00000000..37ab54a5 --- /dev/null +++ b/src/gpu/YUVTexture.h @@ -0,0 +1,101 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/Texture.h" +#include "gpu/TextureSampler.h" +#include "tgfx/core/YUVColorSpace.h" +#include "tgfx/core/YUVData.h" + +namespace tgfx { +/** + * Defines pixel formats for YUV textures. + */ +enum class YUVPixelFormat { + /** + * 8-bit Y plane followed by 8 bit 2x2 subsampled U and V planes. + */ + I420, + /** + * 8-bit Y plane followed by an interleaved U/V plane with 2x2 subsampling. + */ + NV12 +}; + +/** + * YUVTexture wraps separate texture samplers in the GPU backend for Y, U, and V planes. + */ +class YUVTexture : public Texture { + public: + /** + * The pixel format of this yuv texture. + */ + YUVPixelFormat pixelFormat() const { + return _pixelFormat; + } + + /** + * The color space of the yuv texture. + */ + YUVColorSpace colorSpace() const { + return _colorSpace; + } + + /** + * Returns the number of the samplers in the texture. + */ + size_t samplerCount() const { + return samplers.size(); + } + + const TextureSampler* getSampler() const override { + return samplers[0].get(); + } + + /** + * Returns a texture sampler at the specified index. + */ + const TextureSampler* getSamplerAt(size_t index) const; + + bool isYUV() const final { + return true; + } + + size_t memoryUsage() const override; + + Point getTextureCoord(float x, float y) const override; + + BackendTexture getBackendTexture() const override; + + protected: + std::vector> samplers = {}; + + YUVTexture(int width, int height, YUVPixelFormat pixelFormat, YUVColorSpace colorSpace); + + void computeScratchKey(BytesKey* scratchKey) const override; + + private: + YUVPixelFormat _pixelFormat = YUVPixelFormat::I420; + YUVColorSpace _colorSpace = YUVColorSpace::BT601_LIMITED; + + void onReleaseGPU() override; + + friend class Texture; +}; +} // namespace tgfx diff --git a/src/gpu/ops/ClearOp.cpp b/src/gpu/ops/ClearOp.cpp new file mode 100644 index 00000000..c9b44aa5 --- /dev/null +++ b/src/gpu/ops/ClearOp.cpp @@ -0,0 +1,46 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "ClearOp.h" +#include "gpu/RenderPass.h" + +namespace tgfx { +std::unique_ptr ClearOp::Make(Color color, const Rect& scissor) { + return std::unique_ptr(new ClearOp(color, scissor)); +} + +bool ContainsScissor(const Rect& a, const Rect& b) { + return a.isEmpty() || (!b.isEmpty() && a.contains(b)); +} + +bool ClearOp::onCombineIfPossible(Op* op) { + auto that = static_cast(op); + if (ContainsScissor(that->scissor, scissor)) { + scissor = that->scissor; + color = that->color; + return true; + } else if (color == that->color && ContainsScissor(scissor, that->scissor)) { + return true; + } + return false; +} + +void ClearOp::execute(RenderPass* renderPass) { + renderPass->clear(scissor, color); +} +} // namespace tgfx diff --git a/src/gpu/ops/ClearOp.h b/src/gpu/ops/ClearOp.h new file mode 100644 index 00000000..49b11cb1 --- /dev/null +++ b/src/gpu/ops/ClearOp.h @@ -0,0 +1,42 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "Op.h" + +namespace tgfx { +class ClearOp : public Op { + public: + static std::unique_ptr Make(Color color, const Rect& scissor); + + void execute(RenderPass* renderPass) override; + + private: + DEFINE_OP_CLASS_ID + + explicit ClearOp(Color color, const Rect& scissor) + : Op(ClassID()), color(color), scissor(scissor) { + } + + bool onCombineIfPossible(Op* op) override; + + Color color = Color::Transparent(); + Rect scissor = Rect::MakeEmpty(); +}; +} // namespace tgfx diff --git a/src/gpu/ops/DrawOp.cpp b/src/gpu/ops/DrawOp.cpp new file mode 100644 index 00000000..667c1ef6 --- /dev/null +++ b/src/gpu/ops/DrawOp.cpp @@ -0,0 +1,108 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "DrawOp.h" +#include "gpu/Gpu.h" +#include "utils/Log.h" + +namespace tgfx { +void DrawOp::visitProxies(const std::function& func) const { + auto f = [&](const std::unique_ptr& fp) { fp->visitProxies(func); }; + std::for_each(_colors.begin(), _colors.end(), f); + std::for_each(_masks.begin(), _masks.end(), f); +} + +void DrawOp::prepare(Gpu* gpu) { + onPrepare(gpu); +} + +void DrawOp::execute(RenderPass* renderPass) { + onExecute(renderPass); +} + +static DstTextureInfo CreateDstTextureInfo(RenderPass* renderPass, Rect dstRect) { + DstTextureInfo dstTextureInfo = {}; + if (renderPass->context()->caps()->textureBarrierSupport && renderPass->renderTargetTexture()) { + dstTextureInfo.texture = renderPass->renderTargetTexture(); + dstTextureInfo.requiresBarrier = true; + return dstTextureInfo; + } + auto bounds = + Rect::MakeWH(renderPass->renderTarget()->width(), renderPass->renderTarget()->height()); + if (renderPass->renderTarget()->origin() == ImageOrigin::BottomLeft) { + auto height = dstRect.height(); + dstRect.top = static_cast(renderPass->renderTarget()->height()) - dstRect.bottom; + dstRect.bottom = dstRect.top + height; + } + if (!dstRect.intersect(bounds)) { + return {}; + } + dstRect.roundOut(); + dstTextureInfo.offset = {dstRect.x(), dstRect.y()}; + auto dstTexture = + Texture::MakeRGBA(renderPass->context(), static_cast(dstRect.width()), + static_cast(dstRect.height()), renderPass->renderTarget()->origin()); + if (dstTexture == nullptr) { + LOGE("Failed to create dst texture(%f*%f).", dstRect.width(), dstRect.height()); + return {}; + } + dstTextureInfo.texture = dstTexture; + renderPass->context()->gpu()->copyRenderTargetToTexture(renderPass->renderTarget().get(), + dstTexture.get(), dstRect, Point::Zero()); + return dstTextureInfo; +} + +std::unique_ptr DrawOp::createPipeline(RenderPass* renderPass, + std::unique_ptr gp) { + auto numColorProcessors = _colors.size(); + std::vector> fragmentProcessors = {}; + fragmentProcessors.resize(numColorProcessors + _masks.size()); + std::move(_colors.begin(), _colors.end(), fragmentProcessors.begin()); + std::move(_masks.begin(), _masks.end(), + fragmentProcessors.begin() + static_cast(numColorProcessors)); + DstTextureInfo dstTextureInfo = {}; + auto caps = renderPass->context()->caps(); + if (!BlendModeAsCoeff(blendMode) && !caps->frameBufferFetchSupport) { + dstTextureInfo = CreateDstTextureInfo(renderPass, bounds()); + } + const auto& swizzle = renderPass->renderTarget()->writeSwizzle(); + return std::make_unique(std::move(gp), std::move(fragmentProcessors), + numColorProcessors, blendMode, dstTextureInfo, &swizzle); +} + +static bool CompareFragments(const std::vector>& frags1, + const std::vector>& frags2) { + if (frags1.size() != frags2.size()) { + return false; + } + for (size_t i = 0; i < frags1.size(); i++) { + if (!frags1[i]->isEqual(*frags2[i])) { + return false; + } + } + return true; +} + +bool DrawOp::onCombineIfPossible(Op* op) { + auto* that = static_cast(op); + return aa == that->aa && _scissorRect == that->_scissorRect && + CompareFragments(_colors, that->_colors) && CompareFragments(_masks, that->_masks) && + blendMode == that->blendMode; +} + +} // namespace tgfx diff --git a/src/gpu/ops/DrawOp.h b/src/gpu/ops/DrawOp.h new file mode 100644 index 00000000..b15e1aa5 --- /dev/null +++ b/src/gpu/ops/DrawOp.h @@ -0,0 +1,81 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "Op.h" +#include "gpu/AAType.h" +#include "gpu/Pipeline.h" +#include "gpu/RenderPass.h" + +namespace tgfx { +class DrawOp : public Op { + public: + explicit DrawOp(uint8_t classID) : Op(classID) { + } + + void visitProxies(const std::function& func) const override; + + void prepare(Gpu* gpu) final; + + void execute(RenderPass* renderPass) final; + + std::unique_ptr createPipeline(RenderPass* renderPass, + std::unique_ptr gp); + + const Rect& scissorRect() const { + return _scissorRect; + } + + void setScissorRect(Rect scissorRect) { + _scissorRect = scissorRect; + } + + void setBlendMode(BlendMode mode) { + blendMode = mode; + } + + void setAA(AAType type) { + aa = type; + } + + void setColors(std::vector> colors) { + _colors = std::move(colors); + } + + void setMasks(std::vector> masks) { + _masks = std::move(masks); + } + + protected: + bool onCombineIfPossible(Op* op) override; + + virtual void onPrepare(Gpu* gpu) = 0; + + virtual void onExecute(RenderPass* renderPass) = 0; + + AAType aa = AAType::None; + + private: + Rect _scissorRect = Rect::MakeEmpty(); + std::vector> _colors; + std::vector> _masks; + BlendMode blendMode = BlendMode::SrcOver; +}; +} // namespace tgfx diff --git a/src/gpu/ops/FillRectOp.cpp b/src/gpu/ops/FillRectOp.cpp new file mode 100644 index 00000000..66632473 --- /dev/null +++ b/src/gpu/ops/FillRectOp.cpp @@ -0,0 +1,184 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "FillRectOp.h" +#include "gpu/Gpu.h" +#include "gpu/Quad.h" +#include "gpu/ResourceProvider.h" +#include "gpu/processors/QuadPerEdgeAAGeometryProcessor.h" +#include "utils/UniqueID.h" + +namespace tgfx { +std::vector FillRectOp::coverageVertices() const { + std::vector vertices; + for (size_t i = 0; i < rects.size(); ++i) { + auto scale = sqrtf(viewMatrices[i].getScaleX() * viewMatrices[i].getScaleX() + + viewMatrices[i].getSkewY() * viewMatrices[i].getSkewY()); + // we want the new edge to be .5px away from the old line. + auto padding = 0.5f / scale; + auto insetBounds = rects[i].makeInset(padding, padding); + auto insetQuad = Quad::MakeFromRect(insetBounds, viewMatrices[i]); + auto outsetBounds = rects[i].makeOutset(padding, padding); + auto outsetQuad = Quad::MakeFromRect(outsetBounds, viewMatrices[i]); + + auto normalInsetQuad = Quad::MakeFromRect(insetBounds, localMatrices[i]); + auto normalOutsetQuad = Quad::MakeFromRect(outsetBounds, localMatrices[i]); + + for (int j = 0; j < 2; ++j) { + const auto& quad = j == 0 ? insetQuad : outsetQuad; + const auto& normalQuad = j == 0 ? normalInsetQuad : normalOutsetQuad; + auto coverage = j == 0 ? 1.0f : 0.0f; + for (int k = 0; k < 4; ++k) { + vertices.push_back(quad.point(k).x); + vertices.push_back(quad.point(k).y); + vertices.push_back(coverage); + vertices.push_back(normalQuad.point(k).x); + vertices.push_back(normalQuad.point(k).y); + if (!colors.empty()) { + vertices.push_back(colors[i].red); + vertices.push_back(colors[i].green); + vertices.push_back(colors[i].blue); + vertices.push_back(colors[i].alpha); + } + } + } + } + return vertices; +} + +std::vector FillRectOp::noCoverageVertices() const { + std::vector vertices; + for (size_t i = 0; i < rects.size(); ++i) { + auto quad = Quad::MakeFromRect(rects[i], viewMatrices[i]); + auto localQuad = Quad::MakeFromRect(rects[i], localMatrices[i]); + for (int j = 3; j >= 0; --j) { + vertices.push_back(quad.point(j).x); + vertices.push_back(quad.point(j).y); + vertices.push_back(localQuad.point(j).x); + vertices.push_back(localQuad.point(j).y); + if (!colors.empty()) { + vertices.push_back(colors[i].red); + vertices.push_back(colors[i].green); + vertices.push_back(colors[i].blue); + vertices.push_back(colors[i].alpha); + } + } + } + return vertices; +} + +std::vector FillRectOp::vertices() { + if (aa != AAType::Coverage) { + return noCoverageVertices(); + } else { + return coverageVertices(); + } +} + +std::unique_ptr FillRectOp::Make(std::optional color, const Rect& rect, + const Matrix& viewMatrix, const Matrix& localMatrix) { + return std::unique_ptr(new FillRectOp(color, rect, viewMatrix, localMatrix)); +} + +FillRectOp::FillRectOp(std::optional color, const Rect& rect, const Matrix& viewMatrix, + const Matrix& localMatrix) + : DrawOp(ClassID()), colors(color ? std::vector{*color} : std::vector()), + rects({rect}), viewMatrices({viewMatrix}), localMatrices({localMatrix}) { + auto bounds = Rect::MakeEmpty(); + bounds.join(viewMatrix.mapRect(rect)); + setBounds(bounds); +} + +bool FillRectOp::add(std::optional color, const Rect& rect, const Matrix& viewMatrix, + const Matrix& localMatrix) { + if ((!color && !colors.empty()) || (color && colors.empty()) || !canAdd(1)) { + return false; + } + auto b = bounds(); + b.join(viewMatrix.mapRect(rect)); + setBounds(b); + rects.emplace_back(rect); + viewMatrices.emplace_back(viewMatrix); + localMatrices.emplace_back(localMatrix); + if (color) { + colors.push_back(*color); + } + return true; +} + +bool FillRectOp::canAdd(size_t count) const { + return rects.size() + count <= static_cast(aa == AAType::Coverage + ? ResourceProvider::MaxNumAAQuads() + : ResourceProvider::MaxNumNonAAQuads()); +} + +bool FillRectOp::onCombineIfPossible(Op* op) { + auto* that = static_cast(op); + if (colors.empty() != that->colors.empty() || !canAdd(that->rects.size()) || + !DrawOp::onCombineIfPossible(op)) { + return false; + } + rects.insert(rects.end(), that->rects.begin(), that->rects.end()); + viewMatrices.insert(viewMatrices.end(), that->viewMatrices.begin(), that->viewMatrices.end()); + localMatrices.insert(localMatrices.end(), that->localMatrices.begin(), that->localMatrices.end()); + colors.insert(colors.end(), that->colors.begin(), that->colors.end()); + return true; +} + +bool FillRectOp::needsIndexBuffer() const { + return rects.size() > 1 || aa == AAType::Coverage; +} + +void FillRectOp::onPrepare(Gpu* gpu) { + auto data = vertices(); + vertexBuffer = + GpuBuffer::Make(gpu->context(), BufferType::Vertex, data.data(), data.size() * sizeof(float)); + if (vertexBuffer == nullptr) { + return; + } + if (aa == AAType::Coverage) { + indexBuffer = gpu->context()->resourceProvider()->aaQuadIndexBuffer(); + } else { + indexBuffer = gpu->context()->resourceProvider()->nonAAQuadIndexBuffer(); + } +} + +void FillRectOp::onExecute(RenderPass* renderPass) { + if (vertexBuffer == nullptr || (needsIndexBuffer() && indexBuffer == nullptr)) { + return; + } + auto pipeline = + createPipeline(renderPass, QuadPerEdgeAAGeometryProcessor::Make( + renderPass->renderTarget()->width(), + renderPass->renderTarget()->height(), aa, !colors.empty())); + renderPass->bindProgramAndScissorClip(pipeline.get(), scissorRect()); + renderPass->bindBuffers(indexBuffer, vertexBuffer); + if (needsIndexBuffer()) { + uint16_t numIndicesPerQuad; + if (aa == AAType::Coverage) { + numIndicesPerQuad = ResourceProvider::NumIndicesPerAAQuad(); + } else { + numIndicesPerQuad = ResourceProvider::NumIndicesPerNonAAQuad(); + } + renderPass->drawIndexed(PrimitiveType::Triangles, 0, + static_cast(rects.size()) * numIndicesPerQuad); + } else { + renderPass->draw(PrimitiveType::TriangleStrip, 0, 4); + } +} +} // namespace tgfx diff --git a/src/gpu/ops/FillRectOp.h b/src/gpu/ops/FillRectOp.h new file mode 100644 index 00000000..a9349f02 --- /dev/null +++ b/src/gpu/ops/FillRectOp.h @@ -0,0 +1,63 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "gpu/ops/DrawOp.h" + +namespace tgfx { +class FillRectOp : public DrawOp { + public: + static std::unique_ptr Make(std::optional color, const Rect& rect, + const Matrix& viewMatrix, + const Matrix& localMatrix = Matrix::I()); + + bool add(std::optional color, const Rect& rect, const Matrix& viewMatrix, + const Matrix& localMatrix); + + private: + DEFINE_OP_CLASS_ID + + FillRectOp(std::optional color, const Rect& rect, const Matrix& viewMatrix, + const Matrix& localMatrix); + + bool onCombineIfPossible(Op* op) override; + + void onPrepare(Gpu* gpu) override; + + void onExecute(RenderPass* renderPass) override; + + bool canAdd(size_t count) const; + + std::vector vertices(); + + std::vector coverageVertices() const; + + std::vector noCoverageVertices() const; + + bool needsIndexBuffer() const; + + std::vector colors; + std::vector rects; + std::vector viewMatrices; + std::vector localMatrices; + std::shared_ptr vertexBuffer; + std::shared_ptr indexBuffer; +}; +} // namespace tgfx diff --git a/src/gpu/ops/Op.cpp b/src/gpu/ops/Op.cpp new file mode 100644 index 00000000..fe06cc34 --- /dev/null +++ b/src/gpu/ops/Op.cpp @@ -0,0 +1,39 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "Op.h" +#include + +namespace tgfx { +static std::atomic_uint8_t currentOpClassID = {1}; + +uint8_t Op::GenOpClassID() { + return currentOpClassID++; +} + +bool Op::combineIfPossible(Op* op) { + if (_classID != op->_classID) { + return false; + } + auto result = onCombineIfPossible(op); + if (result) { + _bounds.join(op->_bounds); + } + return result; +} +} // namespace tgfx diff --git a/src/gpu/ops/Op.h b/src/gpu/ops/Op.h new file mode 100644 index 00000000..b717c11e --- /dev/null +++ b/src/gpu/ops/Op.h @@ -0,0 +1,76 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "gpu/RenderTarget.h" +#include "gpu/TextureProxy.h" +#include "tgfx/core/Rect.h" +#include "tgfx/gpu/Context.h" + +namespace tgfx { +class RenderPass; + +#define DEFINE_OP_CLASS_ID \ + static uint8_t ClassID() { \ + static uint8_t ClassID = GenOpClassID(); \ + return ClassID; \ + } + +class Op { + public: + explicit Op(uint8_t classID) : _classID(classID) { + } + + virtual ~Op() = default; + + virtual void visitProxies(const std::function&) const { + } + + virtual void prepare(Gpu*) { + } + + virtual void execute(RenderPass* renderPass) = 0; + + bool combineIfPossible(Op* op); + + const Rect& bounds() const { + return _bounds; + } + + protected: + static uint8_t GenOpClassID(); + + void setBounds(Rect bounds) { + _bounds = bounds; + } + + void setTransformedBounds(const Rect& srcBounds, const Matrix& matrix) { + _bounds = matrix.mapRect(srcBounds); + } + + virtual bool onCombineIfPossible(Op*) { + return false; + } + + private: + uint8_t _classID = 0; + Rect _bounds = Rect::MakeEmpty(); +}; +} // namespace tgfx diff --git a/src/gpu/ops/OpsTask.cpp b/src/gpu/ops/OpsTask.cpp new file mode 100644 index 00000000..792393fc --- /dev/null +++ b/src/gpu/ops/OpsTask.cpp @@ -0,0 +1,60 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "OpsTask.h" +#include "gpu/Gpu.h" +#include "gpu/RenderPass.h" + +namespace tgfx { +void OpsTask::addOp(std::unique_ptr op) { + if (!ops.empty() && ops.back()->combineIfPossible(op.get())) { + return; + } + ops.emplace_back(std::move(op)); +} + +bool OpsTask::execute(Gpu* gpu) { + if (ops.empty()) { + return false; + } + auto renderPass = gpu->getRenderPass(renderTarget, renderTargetTexture); + if (renderPass == nullptr) { + return false; + } + std::for_each(ops.begin(), ops.end(), [gpu](auto& op) { op->prepare(gpu); }); + renderPass->begin(); + auto tempOps = std::move(ops); + for (auto& op : tempOps) { + op->execute(renderPass); + } + if (renderTargetTexture) { + gpu->regenerateMipMapLevels(renderTargetTexture->getSampler()); + } + renderPass->end(); + gpu->submit(renderPass); + return true; +} + +void OpsTask::gatherProxies(std::vector* proxies) const { + if (ops.empty()) { + return; + } + auto func = [proxies](TextureProxy* proxy) { proxies->emplace_back(proxy); }; + std::for_each(ops.begin(), ops.end(), [&func](auto& op) { op->visitProxies(func); }); +} +} // namespace tgfx diff --git a/src/gpu/ops/OpsTask.h b/src/gpu/ops/OpsTask.h new file mode 100644 index 00000000..2b9dc73f --- /dev/null +++ b/src/gpu/ops/OpsTask.h @@ -0,0 +1,42 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/RenderTask.h" +#include "gpu/ops/Op.h" +#include "tgfx/gpu/Surface.h" + +namespace tgfx { +class OpsTask : public RenderTask { + public: + OpsTask(std::shared_ptr renderTarget, std::shared_ptr texture) + : RenderTask(std::move(renderTarget)), renderTargetTexture(std::move(texture)) { + } + + void addOp(std::unique_ptr op); + + bool execute(Gpu* gpu) override; + + void gatherProxies(std::vector* proxies) const override; + + private: + std::shared_ptr renderTargetTexture = nullptr; + std::vector> ops; +}; +} // namespace tgfx diff --git a/src/gpu/ops/RRectOp.cpp b/src/gpu/ops/RRectOp.cpp new file mode 100644 index 00000000..98a795f6 --- /dev/null +++ b/src/gpu/ops/RRectOp.cpp @@ -0,0 +1,280 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "RRectOp.h" +#include "gpu/Gpu.h" +#include "gpu/GpuBuffer.h" +#include "gpu/processors/EllipseGeometryProcessor.h" +#include "utils/MathExtra.h" + +namespace tgfx { +// We have three possible cases for geometry for a round rect. +// +// In the case of a normal fill or a stroke, we draw the round rect as a 9-patch: +// ____________ +// |_|________|_| +// | | | | +// | | | | +// | | | | +// |_|________|_| +// |_|________|_| +// +// For strokes, we don't draw the center quad. +// +// For circular round rects, in the case where the stroke width is greater than twice +// the corner radius (over stroke), we add additional geometry to mark out the rectangle +// in the center. The shared vertices are duplicated, so we can set a different outer radius +// for the fill calculation. +// ____________ +// |_|________|_| +// | |\ ____ /| | +// | | | | | | +// | | |____| | | +// |_|/______\|_| +// |_|________|_| +// +// We don't draw the center quad from the fill rect in this case. +// +// For filled rrects that need to provide a distance vector we reuse the overstroke +// geometry but make the inner rect degenerate (either a point or a horizontal or +// vertical line). + +// clang-format off +static const uint16_t gOverstrokeRRectIndices[] = { + // overstroke quads + // we place this at the beginning so that we can skip these indices when rendering normally + 16, 17, 19, 16, 19, 18, + 19, 17, 23, 19, 23, 21, + 21, 23, 22, 21, 22, 20, + 22, 16, 18, 22, 18, 20, + + // corners + 0, 1, 5, 0, 5, 4, + 2, 3, 7, 2, 7, 6, + 8, 9, 13, 8, 13, 12, + 10, 11, 15, 10, 15, 14, + + // edges + 1, 2, 6, 1, 6, 5, + 4, 5, 9, 4, 9, 8, + 6, 7, 11, 6, 11, 10, + 9, 10, 14, 9, 14, 13, + + // center + // we place this at the end so that we can ignore these indices when not rendering as filled + 5, 6, 10, 5, 10, 9, +}; +// clang-format on + +static constexpr int kOverstrokeIndicesCount = 6 * 4; +static constexpr int kCornerIndicesCount = 6 * 4; +static constexpr int kEdgeIndicesCount = 6 * 4; +static constexpr int kCenterIndicesCount = 6; + +// fill and standard stroke indices skip the overstroke "ring" +static const uint16_t* gStandardRRectIndices = gOverstrokeRRectIndices + kOverstrokeIndicesCount; + +// overstroke count is arraysize minus the center indices +// static constexpr int kIndicesPerOverstrokeRRect = +// kOverstrokeIndicesCount + kCornerIndicesCount + kEdgeIndicesCount; +// fill count skips overstroke indices and includes center +static constexpr size_t kIndicesPerFillRRect = + kCornerIndicesCount + kEdgeIndicesCount + kCenterIndicesCount; +// stroke count is fill count minus center indices +// static constexpr int kIndicesPerStrokeRRect = kCornerIndicesCount + kEdgeIndicesCount; + +std::unique_ptr RRectOp::Make(Color color, const RRect& rRect, const Matrix& viewMatrix) { + Matrix matrix = Matrix::I(); + if (!viewMatrix.invert(&matrix)) { + return nullptr; + } + if (/*!isStrokeOnly && */ 0.5f <= rRect.radii.x && 0.5f <= rRect.radii.y) { + return std::unique_ptr(new RRectOp(color, rRect, viewMatrix, matrix)); + } + return nullptr; +} + +RRectOp::RRectOp(Color color, const RRect& rRect, const Matrix& viewMatrix, + const Matrix& localMatrix) + : DrawOp(ClassID()), localMatrix(localMatrix) { + setTransformedBounds(rRect.rect, viewMatrix); + rRects.emplace_back(RRectWrap{color, 0.f, 0.f, rRect, viewMatrix}); +} + +bool RRectOp::onCombineIfPossible(Op* op) { + if (!DrawOp::onCombineIfPossible(op)) { + return false; + } + auto* that = static_cast(op); + if (localMatrix != that->localMatrix) { + return false; + } + rRects.insert(rRects.end(), that->rRects.begin(), that->rRects.end()); + return true; +} + +static bool UseScale(Context* context) { + return !context->caps()->floatIs32Bits; +} + +void WriteColor(std::vector& vertices, const Color& color) { + vertices.push_back(color.red); + vertices.push_back(color.green); + vertices.push_back(color.blue); + vertices.push_back(color.alpha); +} + +void RRectOp::RRectWrap::writeToVertices(std::vector& vertices, bool useScale, + AAType aa) const { + float reciprocalRadii[4] = {1e6f, 1e6f, 1e6f, 1e6f}; + if (rRect.radii.x > 0) { + reciprocalRadii[0] = 1.f / rRect.radii.x; + } + if (rRect.radii.y > 0) { + reciprocalRadii[1] = 1.f / rRect.radii.y; + } + if (innerXRadius > 0) { + reciprocalRadii[2] = 1.f / innerXRadius; + } + if (innerYRadius > 0) { + reciprocalRadii[3] = 1.f / innerYRadius; + } + // On MSAA, bloat enough to guarantee any pixel that might be touched by the rRect has + // full sample coverage. + float aaBloat = aa == AAType::MSAA ? FLOAT_SQRT2 : .5f; + // Extend out the radii to antialias. + float xOuterRadius = rRect.radii.x + aaBloat; + float yOuterRadius = rRect.radii.y + aaBloat; + + float xMaxOffset = xOuterRadius; + float yMaxOffset = yOuterRadius; + // if (!stroked) { + // For filled rRects we map a unit circle in the vertex attributes rather than + // computing an ellipse and modifying that distance, so we normalize to 1. + xMaxOffset /= rRect.radii.x; + yMaxOffset /= rRect.radii.y; + // } + auto bounds = rRect.rect.makeOutset(aaBloat, aaBloat); + float yCoords[4] = {bounds.top, bounds.top + yOuterRadius, bounds.bottom - yOuterRadius, + bounds.bottom}; + float yOuterOffsets[4] = { + yMaxOffset, + FLOAT_NEARLY_ZERO, // we're using inversesqrt() in shader, so can't be exactly 0 + FLOAT_NEARLY_ZERO, yMaxOffset}; + auto maxRadius = std::max(rRect.radii.x, rRect.radii.y); + for (int i = 0; i < 4; ++i) { + auto point = Point::Make(bounds.left, yCoords[i]); + viewMatrix.mapPoints(&point, 1); + vertices.push_back(point.x); + vertices.push_back(point.y); + WriteColor(vertices, color); + vertices.push_back(xMaxOffset); + vertices.push_back(yOuterOffsets[i]); + if (useScale) { + vertices.push_back(maxRadius); + } + vertices.push_back(reciprocalRadii[0]); + vertices.push_back(reciprocalRadii[1]); + vertices.push_back(reciprocalRadii[2]); + vertices.push_back(reciprocalRadii[3]); + + point = Point::Make(bounds.left + xOuterRadius, yCoords[i]); + viewMatrix.mapPoints(&point, 1); + vertices.push_back(point.x); + vertices.push_back(point.y); + WriteColor(vertices, color); + vertices.push_back(FLOAT_NEARLY_ZERO); + vertices.push_back(yOuterOffsets[i]); + if (useScale) { + vertices.push_back(maxRadius); + } + vertices.push_back(reciprocalRadii[0]); + vertices.push_back(reciprocalRadii[1]); + vertices.push_back(reciprocalRadii[2]); + vertices.push_back(reciprocalRadii[3]); + + point = Point::Make(bounds.right - xOuterRadius, yCoords[i]); + viewMatrix.mapPoints(&point, 1); + vertices.push_back(point.x); + vertices.push_back(point.y); + WriteColor(vertices, color); + vertices.push_back(FLOAT_NEARLY_ZERO); + vertices.push_back(yOuterOffsets[i]); + if (useScale) { + vertices.push_back(maxRadius); + } + vertices.push_back(reciprocalRadii[0]); + vertices.push_back(reciprocalRadii[1]); + vertices.push_back(reciprocalRadii[2]); + vertices.push_back(reciprocalRadii[3]); + + point = Point::Make(bounds.right, yCoords[i]); + viewMatrix.mapPoints(&point, 1); + vertices.push_back(point.x); + vertices.push_back(point.y); + WriteColor(vertices, color); + vertices.push_back(xMaxOffset); + vertices.push_back(yOuterOffsets[i]); + if (useScale) { + vertices.push_back(maxRadius); + } + vertices.push_back(reciprocalRadii[0]); + vertices.push_back(reciprocalRadii[1]); + vertices.push_back(reciprocalRadii[2]); + vertices.push_back(reciprocalRadii[3]); + } +} + +void RRectOp::onPrepare(Gpu* gpu) { + auto* context = gpu->context(); + auto useScale = UseScale(context); + std::vector vertices; + for (const auto& rRectWrap : rRects) { + rRectWrap.writeToVertices(vertices, useScale, aa); + } + vertexBuffer = GpuBuffer::Make(context, BufferType::Vertex, vertices.data(), + vertices.size() * sizeof(float)); + if (vertexBuffer == nullptr) { + return; + } + + std::vector indices; + for (size_t i = 0; i < rRects.size(); ++i) { + auto offset = i * 16; + for (size_t j = 0; j < kIndicesPerFillRRect; ++j) { + indices.emplace_back(gStandardRRectIndices[j] + offset); + } + } + indexBuffer = + GpuBuffer::Make(context, BufferType::Index, &(indices[0]), indices.size() * sizeof(uint16_t)); +} + +void RRectOp::onExecute(RenderPass* renderPass) { + if (indexBuffer == nullptr || vertexBuffer == nullptr) { + return; + } + auto pipeline = createPipeline( + renderPass, EllipseGeometryProcessor::Make(renderPass->renderTarget()->width(), + renderPass->renderTarget()->height(), false, + UseScale(renderPass->context()), localMatrix)); + renderPass->bindProgramAndScissorClip(pipeline.get(), scissorRect()); + renderPass->bindBuffers(indexBuffer, vertexBuffer); + renderPass->drawIndexed(PrimitiveType::Triangles, 0, + static_cast(rRects.size() * kIndicesPerFillRRect)); +} +} // namespace tgfx diff --git a/src/gpu/ops/RRectOp.h b/src/gpu/ops/RRectOp.h new file mode 100644 index 00000000..5612402d --- /dev/null +++ b/src/gpu/ops/RRectOp.h @@ -0,0 +1,58 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "DrawOp.h" +#include "tgfx/core/Path.h" + +namespace tgfx { +class RRectOp : public DrawOp { + public: + static std::unique_ptr Make(Color color, const RRect& rRect, const Matrix& viewMatrix); + + private: + DEFINE_OP_CLASS_ID + + RRectOp(Color color, const RRect& rRect, const Matrix& viewMatrix, const Matrix& localMatrix); + + bool onCombineIfPossible(Op* op) override; + + void onPrepare(Gpu* gpu) override; + + void onExecute(RenderPass* renderPass) override; + + struct RRectWrap { + Color color = Color::Transparent(); + float innerXRadius = 0; + float innerYRadius = 0; + RRect rRect; + Matrix viewMatrix = Matrix::I(); + + void writeToVertices(std::vector& vertices, bool useScale, AAType aa) const; + }; + + std::vector rRects; + Matrix localMatrix = Matrix::I(); + std::shared_ptr vertexBuffer; + std::shared_ptr indexBuffer; + + // bool stroked = false; + // Point strokeWidths = Point::Zero(); +}; +} // namespace tgfx diff --git a/src/gpu/ops/TriangulatingPathOp.cpp b/src/gpu/ops/TriangulatingPathOp.cpp new file mode 100644 index 00000000..8532a8a5 --- /dev/null +++ b/src/gpu/ops/TriangulatingPathOp.cpp @@ -0,0 +1,90 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "TriangulatingPathOp.h" +#include "core/PathRef.h" +#include "gpu/Gpu.h" +#include "gpu/processors/DefaultGeometryProcessor.h" + +namespace tgfx { +std::unique_ptr TriangulatingPathOp::Make(Color color, const Path& path, + const Rect& clipBounds, + const Matrix& localMatrix) { + if (path.countVerbs() > AA_TESSELLATOR_MAX_VERB_COUNT) { + return nullptr; + } + std::vector vertices = {}; + int count = PathRef::ToAATriangles(path, clipBounds, &vertices); + if (count == 0) { + return nullptr; + } + return std::make_unique(color, vertices, count, path.getBounds(), + Matrix::I(), localMatrix); +} + +TriangulatingPathOp::TriangulatingPathOp(Color color, std::shared_ptr buffer, + int vertexCount, const Rect& bounds, + const Matrix& viewMatrix, const Matrix& localMatrix) + : DrawOp(ClassID()), color(color), buffer(std::move(buffer)), vertexCount(vertexCount), + viewMatrix(viewMatrix), localMatrix(localMatrix) { + setBounds(bounds); +} + +TriangulatingPathOp::TriangulatingPathOp(Color color, std::vector vertices, int vertexCount, + const Rect& bounds, const Matrix& viewMatrix, + const Matrix& localMatrix) + : DrawOp(ClassID()), color(color), vertices(std::move(vertices)), vertexCount(vertexCount), + viewMatrix(viewMatrix), localMatrix(localMatrix) { + setBounds(bounds); +} + +bool TriangulatingPathOp::onCombineIfPossible(Op* op) { + if (!DrawOp::onCombineIfPossible(op)) { + return false; + } + auto* that = static_cast(op); + if (viewMatrix != that->viewMatrix || localMatrix != that->localMatrix || color != that->color || + buffer != nullptr || that->buffer != nullptr) { + return false; + } + vertices.insert(vertices.end(), that->vertices.begin(), that->vertices.end()); + vertexCount += that->vertexCount; + return true; +} + +void TriangulatingPathOp::onPrepare(Gpu* gpu) { + if (buffer == nullptr) { + buffer = GpuBuffer::Make(gpu->context(), BufferType::Vertex, vertices.data(), + vertices.size() * sizeof(float)); + vertices = {}; + } +} + +void TriangulatingPathOp::onExecute(RenderPass* renderPass) { + if (buffer == nullptr) { + return; + } + auto pipeline = createPipeline( + renderPass, DefaultGeometryProcessor::Make(color, renderPass->renderTarget()->width(), + renderPass->renderTarget()->height(), viewMatrix, + localMatrix)); + renderPass->bindProgramAndScissorClip(pipeline.get(), scissorRect()); + renderPass->bindBuffers(nullptr, buffer); + renderPass->draw(PrimitiveType::Triangles, 0, vertexCount); +} +} // namespace tgfx diff --git a/src/gpu/ops/TriangulatingPathOp.h b/src/gpu/ops/TriangulatingPathOp.h new file mode 100644 index 00000000..36219414 --- /dev/null +++ b/src/gpu/ops/TriangulatingPathOp.h @@ -0,0 +1,56 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "DrawOp.h" +#include "gpu/GpuBuffer.h" +#include "tgfx/core/Path.h" + +namespace tgfx { +class TriangulatingPathOp : public DrawOp { + public: + static std::unique_ptr Make(Color color, const Path& path, + const Rect& clipBounds, + const Matrix& localMatrix); + + TriangulatingPathOp(Color color, std::shared_ptr buffer, int vertexCount, + const Rect& bounds, const Matrix& viewMatrix = Matrix::I(), + const Matrix& localMatrix = Matrix::I()); + + TriangulatingPathOp(Color color, std::vector vertices, int vertexCount, const Rect& bounds, + const Matrix& viewMatrix = Matrix::I(), + const Matrix& localMatrix = Matrix::I()); + + private: + DEFINE_OP_CLASS_ID + + bool onCombineIfPossible(Op* op) override; + + void onPrepare(Gpu* gpu) override; + + void onExecute(RenderPass* renderPass) override; + + Color color = Color::Transparent(); + std::shared_ptr buffer = nullptr; + std::vector vertices = {}; + int vertexCount = 0; + Matrix viewMatrix = Matrix::I(); + Matrix localMatrix = Matrix::I(); +}; +} // namespace tgfx diff --git a/src/gpu/processors/AARectEffect.cpp b/src/gpu/processors/AARectEffect.cpp new file mode 100644 index 00000000..32abf10a --- /dev/null +++ b/src/gpu/processors/AARectEffect.cpp @@ -0,0 +1,25 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "AARectEffect.h" + +namespace tgfx { +bool AARectEffect::onIsEqual(const FragmentProcessor& processor) const { + return rect == static_cast(processor).rect; +} +} // namespace tgfx diff --git a/src/gpu/processors/AARectEffect.h b/src/gpu/processors/AARectEffect.h new file mode 100644 index 00000000..cff0ca6a --- /dev/null +++ b/src/gpu/processors/AARectEffect.h @@ -0,0 +1,42 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/processors/FragmentProcessor.h" + +namespace tgfx { +class AARectEffect : public FragmentProcessor { + public: + static std::unique_ptr Make(const Rect& rect); + + std::string name() const override { + return "AARectEffect"; + } + + protected: + DEFINE_PROCESSOR_CLASS_ID + + explicit AARectEffect(const Rect& rect) : FragmentProcessor(ClassID()), rect(rect) { + } + + bool onIsEqual(const FragmentProcessor& processor) const override; + + Rect rect = Rect::MakeEmpty(); +}; +} // namespace tgfx diff --git a/src/gpu/processors/ClampedGradientEffect.cpp b/src/gpu/processors/ClampedGradientEffect.cpp new file mode 100644 index 00000000..89760abe --- /dev/null +++ b/src/gpu/processors/ClampedGradientEffect.cpp @@ -0,0 +1,42 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "ClampedGradientEffect.h" + +namespace tgfx { +void ClampedGradientEffect::onComputeProcessorKey(BytesKey* bytesKey) const { + uint32_t flag = makePremultiply ? 1 : 0; + bytesKey->write(flag); +} + +bool ClampedGradientEffect::onIsEqual(const FragmentProcessor& processor) const { + const auto& that = static_cast(processor); + return makePremultiply == that.makePremultiply && leftBorderColor == that.leftBorderColor && + rightBorderColor == that.rightBorderColor; +} + +ClampedGradientEffect::ClampedGradientEffect(std::unique_ptr colorizer, + std::unique_ptr gradLayout, + Color leftBorderColor, Color rightBorderColor, + bool makePremultiplied) + : FragmentProcessor(ClassID()), leftBorderColor(leftBorderColor), + rightBorderColor(rightBorderColor), makePremultiply(makePremultiplied) { + colorizerIndex = registerChildProcessor(std::move(colorizer)); + gradLayoutIndex = registerChildProcessor(std::move(gradLayout)); +} +} // namespace tgfx diff --git a/src/gpu/processors/ClampedGradientEffect.h b/src/gpu/processors/ClampedGradientEffect.h new file mode 100644 index 00000000..e780b5db --- /dev/null +++ b/src/gpu/processors/ClampedGradientEffect.h @@ -0,0 +1,54 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "gpu/processors/FragmentProcessor.h" +#include "tgfx/core/Color.h" + +namespace tgfx { +class ClampedGradientEffect : public FragmentProcessor { + public: + static std::unique_ptr Make(std::unique_ptr colorizer, + std::unique_ptr gradLayout, + Color leftBorderColor, Color rightBorderColor, + bool makePremultiply); + + std::string name() const override { + return "ClampedGradientEffect"; + } + + void onComputeProcessorKey(BytesKey* bytesKey) const override; + + protected: + DEFINE_PROCESSOR_CLASS_ID + + ClampedGradientEffect(std::unique_ptr colorizer, + std::unique_ptr gradLayout, Color leftBorderColor, + Color rightBorderColor, bool makePremultiplied); + + bool onIsEqual(const FragmentProcessor& processor) const override; + + size_t colorizerIndex = ULONG_MAX; + size_t gradLayoutIndex = ULONG_MAX; + Color leftBorderColor; + Color rightBorderColor; + bool makePremultiply; +}; +} // namespace tgfx diff --git a/src/gpu/processors/ColorMatrixFragmentProcessor.cpp b/src/gpu/processors/ColorMatrixFragmentProcessor.cpp new file mode 100644 index 00000000..3046a201 --- /dev/null +++ b/src/gpu/processors/ColorMatrixFragmentProcessor.cpp @@ -0,0 +1,25 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "gpu/processors/ColorMatrixFragmentProcessor.h" + +namespace tgfx { +bool ColorMatrixFragmentProcessor::onIsEqual(const FragmentProcessor& processor) const { + return matrix == static_cast(processor).matrix; +} +} // namespace tgfx diff --git a/src/gpu/processors/ColorMatrixFragmentProcessor.h b/src/gpu/processors/ColorMatrixFragmentProcessor.h new file mode 100644 index 00000000..4c127c29 --- /dev/null +++ b/src/gpu/processors/ColorMatrixFragmentProcessor.h @@ -0,0 +1,44 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "gpu/processors/FragmentProcessor.h" + +namespace tgfx { +class ColorMatrixFragmentProcessor : public FragmentProcessor { + public: + static std::unique_ptr Make(const std::array& matrix); + + std::string name() const override { + return "ColorMatrixFragmentProcessor"; + } + + protected: + DEFINE_PROCESSOR_CLASS_ID + + explicit ColorMatrixFragmentProcessor(const std::array& matrix) + : FragmentProcessor(ClassID()), matrix(matrix) { + } + + bool onIsEqual(const FragmentProcessor& processor) const override; + + std::array matrix; +}; +} // namespace tgfx diff --git a/src/gpu/processors/ConstColorProcessor.cpp b/src/gpu/processors/ConstColorProcessor.cpp new file mode 100644 index 00000000..907b13e9 --- /dev/null +++ b/src/gpu/processors/ConstColorProcessor.cpp @@ -0,0 +1,30 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "gpu/processors/ConstColorProcessor.h" + +namespace tgfx { +void ConstColorProcessor::onComputeProcessorKey(BytesKey* bytesKey) const { + bytesKey->write(static_cast(inputMode)); +} + +bool ConstColorProcessor::onIsEqual(const FragmentProcessor& processor) const { + const auto& that = static_cast(processor); + return inputMode == that.inputMode && color == that.color; +} +} // namespace tgfx diff --git a/src/gpu/processors/ConstColorProcessor.h b/src/gpu/processors/ConstColorProcessor.h new file mode 100644 index 00000000..ff6c881f --- /dev/null +++ b/src/gpu/processors/ConstColorProcessor.h @@ -0,0 +1,49 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/processors/FragmentProcessor.h" +#include "tgfx/core/Color.h" + +namespace tgfx { +enum class InputMode { Ignore = 0, ModulateRGBA = 1, ModulateA = 2 }; + +class ConstColorProcessor : public FragmentProcessor { + public: + static std::unique_ptr Make(Color color, InputMode inputMode); + + std::string name() const override { + return "ConstColorProcessor"; + } + + void onComputeProcessorKey(BytesKey* bytesKey) const override; + + protected: + DEFINE_PROCESSOR_CLASS_ID + + ConstColorProcessor(Color color, InputMode mode) + : FragmentProcessor(ClassID()), color(color), inputMode(mode) { + } + + bool onIsEqual(const FragmentProcessor& processor) const override; + + Color color; + InputMode inputMode; +}; +} // namespace tgfx diff --git a/src/gpu/processors/DefaultGeometryProcessor.cpp b/src/gpu/processors/DefaultGeometryProcessor.cpp new file mode 100644 index 00000000..c96b1456 --- /dev/null +++ b/src/gpu/processors/DefaultGeometryProcessor.cpp @@ -0,0 +1,31 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "DefaultGeometryProcessor.h" + +namespace tgfx { +DefaultGeometryProcessor::DefaultGeometryProcessor(Color color, int width, int height, + const Matrix& viewMatrix, + const Matrix& localMatrix) + : GeometryProcessor(ClassID()), color(color), width(width), height(height), + viewMatrix(viewMatrix), localMatrix(localMatrix) { + position = {"aPosition", SLType::Float2}; + coverage = {"inCoverage", SLType::Float}; + setVertexAttributes(&position, 2); +} +} // namespace tgfx diff --git a/src/gpu/processors/DefaultGeometryProcessor.h b/src/gpu/processors/DefaultGeometryProcessor.h new file mode 100644 index 00000000..6d9e579d --- /dev/null +++ b/src/gpu/processors/DefaultGeometryProcessor.h @@ -0,0 +1,49 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "GeometryProcessor.h" + +namespace tgfx { +class DefaultGeometryProcessor : public GeometryProcessor { + public: + static std::unique_ptr Make(Color color, int width, int height, + const Matrix& viewMatrix, + const Matrix& localMatrix); + + std::string name() const override { + return "DefaultGeometryProcessor"; + } + + protected: + DEFINE_PROCESSOR_CLASS_ID + + DefaultGeometryProcessor(Color color, int width, int height, const Matrix& viewMatrix, + const Matrix& localMatrix); + + Attribute position; + Attribute coverage; + + Color color; + int width = 1; + int height = 1; + Matrix viewMatrix = Matrix::I(); + Matrix localMatrix = Matrix::I(); +}; +} // namespace tgfx diff --git a/src/gpu/processors/DeviceSpaceTextureEffect.cpp b/src/gpu/processors/DeviceSpaceTextureEffect.cpp new file mode 100644 index 00000000..8479dc69 --- /dev/null +++ b/src/gpu/processors/DeviceSpaceTextureEffect.cpp @@ -0,0 +1,38 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "DeviceSpaceTextureEffect.h" + +namespace tgfx { +DeviceSpaceTextureEffect::DeviceSpaceTextureEffect(std::shared_ptr texture, + ImageOrigin deviceOrigin) + : FragmentProcessor(ClassID()), texture(std::move(texture)) { + if (deviceOrigin == ImageOrigin::BottomLeft) { + deviceCoordMatrix.postScale(1, -1); + deviceCoordMatrix.postTranslate(0, 1); + } + auto scale = this->texture->getTextureCoord(static_cast(this->texture->width()), + static_cast(this->texture->height())); + deviceCoordMatrix.postScale(scale.x, scale.y); +} + +bool DeviceSpaceTextureEffect::onIsEqual(const FragmentProcessor& processor) const { + const auto& that = static_cast(processor); + return texture == that.texture && deviceCoordMatrix == that.deviceCoordMatrix; +} +} // namespace tgfx diff --git a/src/gpu/processors/DeviceSpaceTextureEffect.h b/src/gpu/processors/DeviceSpaceTextureEffect.h new file mode 100644 index 00000000..66006e3d --- /dev/null +++ b/src/gpu/processors/DeviceSpaceTextureEffect.h @@ -0,0 +1,51 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/processors/FragmentProcessor.h" + +namespace tgfx { +class DeviceSpaceTextureEffect : public FragmentProcessor { + public: + static std::unique_ptr Make(std::shared_ptr texture, + ImageOrigin deviceOrigin); + + std::string name() const override { + return "DeviceSpaceTextureEffect"; + } + + protected: + DEFINE_PROCESSOR_CLASS_ID + + DeviceSpaceTextureEffect(std::shared_ptr texture, ImageOrigin deviceOrigin); + + bool onIsEqual(const FragmentProcessor& processor) const override; + + size_t onCountTextureSamplers() const override { + return 1; + } + + const TextureSampler* onTextureSampler(size_t) const override { + return texture->getSampler(); + } + + std::shared_ptr texture; + Matrix deviceCoordMatrix = Matrix::I(); +}; +} // namespace tgfx diff --git a/src/gpu/processors/DualBlurFragmentProcessor.cpp b/src/gpu/processors/DualBlurFragmentProcessor.cpp new file mode 100644 index 00000000..3fde1224 --- /dev/null +++ b/src/gpu/processors/DualBlurFragmentProcessor.cpp @@ -0,0 +1,38 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "gpu/processors/DualBlurFragmentProcessor.h" + +namespace tgfx { +DualBlurFragmentProcessor::DualBlurFragmentProcessor(DualBlurPassMode passMode, + std::unique_ptr processor, + Point blurOffset, Point texelSize) + : FragmentProcessor(ClassID()), passMode(passMode), blurOffset(blurOffset), + texelSize(texelSize) { + registerChildProcessor(std::move(processor)); +} + +void DualBlurFragmentProcessor::onComputeProcessorKey(BytesKey* bytesKey) const { + bytesKey->write(static_cast(passMode)); +} + +bool DualBlurFragmentProcessor::onIsEqual(const FragmentProcessor& processor) const { + const auto& that = static_cast(processor); + return passMode == that.passMode && blurOffset == that.blurOffset && texelSize == that.texelSize; +} +} // namespace tgfx diff --git a/src/gpu/processors/DualBlurFragmentProcessor.h b/src/gpu/processors/DualBlurFragmentProcessor.h new file mode 100644 index 00000000..9690c55f --- /dev/null +++ b/src/gpu/processors/DualBlurFragmentProcessor.h @@ -0,0 +1,50 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/processors/FragmentProcessor.h" + +namespace tgfx { +enum class DualBlurPassMode { Up = 0, Down = 1 }; + +class DualBlurFragmentProcessor : public FragmentProcessor { + public: + static std::unique_ptr Make( + DualBlurPassMode passMode, std::unique_ptr processor, Point blurOffset, + Point texelSize); + + std::string name() const override { + return "DualBlurFragmentProcessor"; + } + + void onComputeProcessorKey(BytesKey* bytesKey) const override; + + protected: + DEFINE_PROCESSOR_CLASS_ID + + DualBlurFragmentProcessor(DualBlurPassMode passMode, std::unique_ptr processor, + Point blurOffset, Point texelSize); + + bool onIsEqual(const FragmentProcessor& processor) const override; + + DualBlurPassMode passMode; + Point blurOffset; + Point texelSize; +}; +} // namespace tgfx diff --git a/src/gpu/processors/DualIntervalGradientColorizer.cpp b/src/gpu/processors/DualIntervalGradientColorizer.cpp new file mode 100644 index 00000000..27b1e857 --- /dev/null +++ b/src/gpu/processors/DualIntervalGradientColorizer.cpp @@ -0,0 +1,27 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "DualIntervalGradientColorizer.h" + +namespace tgfx { +bool DualIntervalGradientColorizer::onIsEqual(const FragmentProcessor& processor) const { + const auto& that = static_cast(processor); + return scale01 == that.scale01 && bias01 == that.bias01 && scale23 == that.scale23 && + bias23 == that.bias23 && threshold == that.threshold; +} +} // namespace tgfx diff --git a/src/gpu/processors/DualIntervalGradientColorizer.h b/src/gpu/processors/DualIntervalGradientColorizer.h new file mode 100644 index 00000000..c5b9c4e6 --- /dev/null +++ b/src/gpu/processors/DualIntervalGradientColorizer.h @@ -0,0 +1,51 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/processors/FragmentProcessor.h" +#include "tgfx/core/Color.h" + +namespace tgfx { +class DualIntervalGradientColorizer : public FragmentProcessor { + public: + static std::unique_ptr Make(Color c0, Color c1, Color c2, Color c3, + float threshold); + + std::string name() const override { + return "DualIntervalGradientColorizer"; + } + + protected: + DEFINE_PROCESSOR_CLASS_ID + + DualIntervalGradientColorizer(Color scale01, Color bias01, Color scale23, Color bias23, + float threshold) + : FragmentProcessor(ClassID()), scale01(scale01), bias01(bias01), scale23(scale23), + bias23(bias23), threshold(threshold) { + } + + bool onIsEqual(const FragmentProcessor& processor) const override; + + Color scale01; + Color bias01; + Color scale23; + Color bias23; + float threshold; +}; +} // namespace tgfx diff --git a/src/gpu/processors/EllipseGeometryProcessor.cpp b/src/gpu/processors/EllipseGeometryProcessor.cpp new file mode 100644 index 00000000..8595a4c7 --- /dev/null +++ b/src/gpu/processors/EllipseGeometryProcessor.cpp @@ -0,0 +1,41 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "EllipseGeometryProcessor.h" + +namespace tgfx { +EllipseGeometryProcessor::EllipseGeometryProcessor(int width, int height, bool stroke, + bool useScale, const Matrix& localMatrix) + : GeometryProcessor(ClassID()), width(width), height(height), localMatrix(localMatrix), + stroke(stroke), useScale(useScale) { + inPosition = {"inPosition", SLType::Float2}; + inColor = {"inColor", SLType::Float4}; + if (useScale) { + inEllipseOffset = {"inEllipseOffset", SLType::Float3}; + } else { + inEllipseOffset = {"inEllipseOffset", SLType::Float2}; + } + inEllipseRadii = {"inEllipseRadii", SLType::Float4}; + this->setVertexAttributes(&inPosition, 4); +} + +void EllipseGeometryProcessor::onComputeProcessorKey(BytesKey* bytesKey) const { + uint32_t flags = stroke ? 1 : 0; + bytesKey->write(flags); +} +} // namespace tgfx diff --git a/src/gpu/processors/EllipseGeometryProcessor.h b/src/gpu/processors/EllipseGeometryProcessor.h new file mode 100644 index 00000000..feb5e96e --- /dev/null +++ b/src/gpu/processors/EllipseGeometryProcessor.h @@ -0,0 +1,61 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "GeometryProcessor.h" + +namespace tgfx { +/** + * Skia's sharing: + * https://www.essentialmath.com/GDC2015/VanVerth_Jim_DrawingAntialiasedEllipse.pdf + * + * There is a formula that calculates the approximate distance from the point to the ellipse, + * and the proof of the formula can be found in the link below. + * https://www.researchgate.net/publication/222440289_Sampson_PD_Fitting_conic_sections_to_very_scattered_data_An_iterative_refinement_of_the_Bookstein_algorithm_Comput_Graphics_Image_Process_18_97-108 + */ +class EllipseGeometryProcessor : public GeometryProcessor { + public: + static std::unique_ptr Make(int width, int height, bool stroke, + bool useScale, const Matrix& localMatrix); + + std::string name() const override { + return "EllipseGeometryProcessor"; + } + + protected: + DEFINE_PROCESSOR_CLASS_ID + + EllipseGeometryProcessor(int width, int height, bool stroke, bool useScale, + const Matrix& localMatrix); + + void onComputeProcessorKey(BytesKey* bytesKey) const override; + + Attribute inPosition; + Attribute inColor; + Attribute inEllipseOffset; + Attribute inEllipseRadii; + + int width = 1; + int height = 1; + // UV 也使用 inPosition 的坐标,localMatrix 可以把 inPosition 的坐标转换成 UV 的 + Matrix localMatrix; + bool stroke; + bool useScale; +}; +} // namespace tgfx diff --git a/src/gpu/processors/EmptyXferProcessor.cpp b/src/gpu/processors/EmptyXferProcessor.cpp new file mode 100644 index 00000000..c8362065 --- /dev/null +++ b/src/gpu/processors/EmptyXferProcessor.cpp @@ -0,0 +1,25 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "EmptyXferProcessor.h" + +namespace tgfx { +void EmptyXferProcessor::computeProcessorKey(Context*, BytesKey* bytesKey) const { + bytesKey->write(classID()); +} +} // namespace tgfx diff --git a/src/gpu/processors/EmptyXferProcessor.h b/src/gpu/processors/EmptyXferProcessor.h new file mode 100644 index 00000000..ef761499 --- /dev/null +++ b/src/gpu/processors/EmptyXferProcessor.h @@ -0,0 +1,41 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "XferProcessor.h" + +namespace tgfx { +class EmptyXferProcessor : public XferProcessor { + public: + static const EmptyXferProcessor* GetInstance(); + + std::string name() const override { + return "EmptyXferProcessor"; + } + + void computeProcessorKey(Context* context, BytesKey* bytesKey) const override; + + protected: + EmptyXferProcessor() : XferProcessor(ClassID()) { + } + + private: + DEFINE_PROCESSOR_CLASS_ID +}; +} // namespace tgfx diff --git a/src/gpu/processors/FragmentProcessor.cpp b/src/gpu/processors/FragmentProcessor.cpp new file mode 100644 index 00000000..7f231800 --- /dev/null +++ b/src/gpu/processors/FragmentProcessor.cpp @@ -0,0 +1,225 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "gpu/processors/FragmentProcessor.h" +#include "SeriesFragmentProcessor.h" +#include "gpu/Pipeline.h" +#include "gpu/processors/XfermodeFragmentProcessor.h" + +namespace tgfx { +bool ComputeTotalInverse(const FPArgs& args, Matrix* totalInverse) { + if (totalInverse == nullptr) { + return false; + } + totalInverse->reset(); + if (!args.preLocalMatrix.isIdentity()) { + totalInverse->preConcat(args.preLocalMatrix); + } + if (!args.postLocalMatrix.isIdentity()) { + totalInverse->postConcat(args.postLocalMatrix); + } + return totalInverse->invert(totalInverse); +} + +std::unique_ptr FragmentProcessor::MulChildByInputAlpha( + std::unique_ptr child) { + if (child == nullptr) { + return nullptr; + } + return XfermodeFragmentProcessor::MakeFromDstProcessor(std::move(child), BlendMode::DstIn); +} + +std::unique_ptr FragmentProcessor::MulInputByChildAlpha( + std::unique_ptr child, bool inverted) { + if (child == nullptr) { + return nullptr; + } + return XfermodeFragmentProcessor::MakeFromDstProcessor( + std::move(child), inverted ? BlendMode::SrcOut : BlendMode::SrcIn); +} + +std::unique_ptr FragmentProcessor::RunInSeries( + std::unique_ptr* series, int count) { + return SeriesFragmentProcessor::Make(series, count); +} + +void FragmentProcessor::computeProcessorKey(Context* context, BytesKey* bytesKey) const { + bytesKey->write(classID()); + onComputeProcessorKey(bytesKey); + auto textureSamplerCount = onCountTextureSamplers(); + for (size_t i = 0; i < textureSamplerCount; ++i) { + textureSampler(i)->computeKey(context, bytesKey); + } + for (const auto& childProcessor : childProcessors) { + childProcessor->computeProcessorKey(context, bytesKey); + } +} + +size_t FragmentProcessor::registerChildProcessor(std::unique_ptr child) { + auto index = childProcessors.size(); + childProcessors.push_back(std::move(child)); + return index; +} + +FragmentProcessor::Iter::Iter(const Pipeline* pipeline) { + for (int i = static_cast(pipeline->numFragmentProcessors()) - 1; i >= 0; --i) { + fpStack.push_back(pipeline->getFragmentProcessor(i)); + } +} + +const FragmentProcessor* FragmentProcessor::Iter::next() { + if (fpStack.empty()) { + return nullptr; + } + const FragmentProcessor* back = fpStack.back(); + fpStack.pop_back(); + for (size_t i = 0; i < back->numChildProcessors(); ++i) { + fpStack.push_back(back->childProcessor(back->numChildProcessors() - i - 1)); + } + return back; +} + +FragmentProcessor::CoordTransformIter::CoordTransformIter(const Pipeline* pipeline) + : fpIter(pipeline) { + currFP = fpIter.next(); +} + +const CoordTransform* FragmentProcessor::CoordTransformIter::next() { + if (!currFP) { + return nullptr; + } + while (currentIndex == currFP->numCoordTransforms()) { + currentIndex = 0; + currFP = fpIter.next(); + if (!currFP) { + return nullptr; + } + } + return currFP->coordTransform(currentIndex++); +} + +bool FragmentProcessor::isEqual(const FragmentProcessor& that) const { + if (classID() != that.classID()) { + return false; + } + if (this->numTextureSamplers() != that.numTextureSamplers()) { + return false; + } + for (size_t i = 0; i < numTextureSamplers(); ++i) { + if (textureSampler(i) != that.textureSampler(i)) { + return false; + } + } + if (numCoordTransforms() != that.numCoordTransforms()) { + return false; + } + for (size_t i = 0; i < numCoordTransforms(); ++i) { + if (coordTransform(i)->matrix != that.coordTransform(i)->matrix) { + return false; + } + } + if (!onIsEqual(that)) { + return false; + } + if (numChildProcessors() != that.numChildProcessors()) { + return false; + } + for (size_t i = 0; i < numChildProcessors(); ++i) { + if (!childProcessor(i)->isEqual(*that.childProcessor(i))) { + return false; + } + } + return true; +} + +void FragmentProcessor::visitProxies(const std::function& func) const { + onVisitProxies(func); + for (const auto& fp : childProcessors) { + fp->visitProxies(func); + } +} + +void FragmentProcessor::setData(UniformBuffer* uniformBuffer) const { + onSetData(uniformBuffer); +} + +void FragmentProcessor::emitChild(size_t childIndex, const std::string& inputColor, + std::string* outputColor, EmitArgs& args, + std::function coordFunc) const { + auto* fragBuilder = args.fragBuilder; + outputColor->append(fragBuilder->mangleString()); + fragBuilder->codeAppendf("vec4 %s;", outputColor->c_str()); + internalEmitChild(childIndex, inputColor, *outputColor, args, std::move(coordFunc)); +} + +void FragmentProcessor::emitChild(size_t childIndex, const std::string& inputColor, + EmitArgs& parentArgs) const { + internalEmitChild(childIndex, inputColor, parentArgs.outputColor, parentArgs); +} + +void FragmentProcessor::internalEmitChild( + size_t childIndex, const std::string& inputColor, const std::string& outputColor, + EmitArgs& args, std::function coordFunc) const { + auto* fragBuilder = args.fragBuilder; + fragBuilder->onBeforeChildProcEmitCode(); // call first so mangleString is updated + // Prepare a mangled input color variable if the default is not used, + // inputName remains the empty string if no variable is needed. + std::string inputName; + if (!inputColor.empty() && inputColor != "vec4(1.0)" && inputColor != "vec4(1)") { + // The input name is based off of the current mangle string, and + // since this is called after onBeforeChildProcEmitCode(), it will be + // unique to the child processor (exactly what we want for its input). + inputName += "_childInput"; + inputName += fragBuilder->mangleString(); + fragBuilder->codeAppendf("vec4 %s = %s;", inputName.c_str(), inputColor.c_str()); + } + + const auto* childProc = childProcessor(childIndex); + + // emit the code for the child in its own scope + fragBuilder->codeAppend("{\n"); + fragBuilder->codeAppendf("// Child Index %d (mangle: %s): %s\n", childIndex, + fragBuilder->mangleString().c_str(), childProc->name().c_str()); + TransformedCoordVars coordVars = args.transformedCoords->childInputs(childIndex); + TextureSamplers textureSamplers = args.textureSamplers->childInputs(childIndex); + + EmitArgs childArgs(fragBuilder, args.uniformHandler, outputColor, + inputName.empty() ? "vec4(1.0)" : inputName, &coordVars, &textureSamplers, + std::move(coordFunc)); + childProcessor(childIndex)->emitCode(childArgs); + fragBuilder->codeAppend("}\n"); + + fragBuilder->onAfterChildProcEmitCode(); +} + +template +FragmentProcessor::BuilderInputProvider +FragmentProcessor::BuilderInputProvider::childInputs(size_t childIndex) const { + const FragmentProcessor* child = fragmentProcessor->childProcessor(childIndex); + FragmentProcessor::Iter iter(fragmentProcessor); + int numToSkip = 0; + while (true) { + const FragmentProcessor* processor = iter.next(); + if (processor == child) { + return BuilderInputProvider(child, t + numToSkip); + } + numToSkip += (processor->*COUNT)(); + } +} + +} // namespace tgfx diff --git a/src/gpu/processors/FragmentProcessor.h b/src/gpu/processors/FragmentProcessor.h new file mode 100644 index 00000000..71ae5ddf --- /dev/null +++ b/src/gpu/processors/FragmentProcessor.h @@ -0,0 +1,312 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "gpu/CoordTransform.h" +#include "gpu/FragmentShaderBuilder.h" +#include "gpu/SamplerState.h" +#include "gpu/Texture.h" +#include "gpu/TextureProxy.h" +#include "gpu/UniformBuffer.h" +#include "gpu/UniformHandler.h" +#include "gpu/processors/Processor.h" + +namespace tgfx { +struct FPArgs { + FPArgs(Context* context, uint32_t surfaceFlags) : context(context), surfaceFlags(surfaceFlags) { + } + + Context* context = nullptr; + uint32_t surfaceFlags = 0; + Matrix preLocalMatrix = Matrix::I(); + Matrix postLocalMatrix = Matrix::I(); +}; + +bool ComputeTotalInverse(const FPArgs& args, Matrix* totalInverse); + +class Pipeline; + +class FragmentProcessor : public Processor { + public: + /** + * In many instances (e.g. Shader::asFragmentProcessor() implementations) it is desirable to + * only consider the input color's alpha. However, there is a competing desire to have reusable + * FragmentProcessor subclasses that can be used in other scenarios where the entire input + * color is considered. This function exists to filter the input color and pass it to a FP. It + * does so by returning a parent FP that multiplies the passed in FPs output by the parent's + * input alpha. The passed in FP will not receive an input color. + */ + static std::unique_ptr MulChildByInputAlpha( + std::unique_ptr child); + + /** + * Returns the input modulated by the child's alpha. The passed in FP will not receive an input + * color. + * + * @param inverted false: output = input * child.a; true: output = input * (1 - child.a) + */ + static std::unique_ptr MulInputByChildAlpha( + std::unique_ptr child, bool inverted = false); + + /** + * Returns a fragment processor that runs the passed in array of fragment processors in a + * series. The original input is passed to the first, the first's output is passed to the + * second, etc. The output of the returned processor is the output of the last processor of the + * series. + * + * The array elements with be moved. + */ + static std::unique_ptr RunInSeries(std::unique_ptr* series, + int count); + + size_t numTextureSamplers() const { + return onCountTextureSamplers(); + } + + const TextureSampler* textureSampler(size_t i) const { + return onTextureSampler(i); + } + + SamplerState samplerState(size_t i) const { + return onSamplerState(i); + } + + void computeProcessorKey(Context* context, BytesKey* bytesKey) const override; + + size_t numChildProcessors() const { + return childProcessors.size(); + } + + const FragmentProcessor* childProcessor(size_t index) const { + return childProcessors[index].get(); + } + + size_t numCoordTransforms() const { + return coordTransforms.size(); + } + + void visitProxies(const std::function& func) const; + + /** + * Returns the coordinate transformation at index. index must be valid according to + * numCoordTransforms(). + */ + const CoordTransform* coordTransform(size_t index) const { + return coordTransforms[index]; + } + + bool isEqual(const FragmentProcessor& that) const; + + /** + * Pre-order traversal of a FP hierarchy, or of the forest of FPs in a Pipeline. In the latter + * case the tree rooted at each FP in the Pipeline is visited successively. + */ + class Iter { + public: + explicit Iter(const FragmentProcessor* fp) { + fpStack.push_back(fp); + } + + explicit Iter(const Pipeline* pipeline); + + const FragmentProcessor* next(); + + private: + std::vector fpStack; + }; + + /** + * Iterates over all the CoordTransforms owned by the forest of FragmentProcessors in a Pipeline. + * FPs are visited in the same order as Iter and each of an FP's CoordTransforms are visited in + * order. + */ + class CoordTransformIter { + public: + explicit CoordTransformIter(const Pipeline* pipeline); + + const CoordTransform* next(); + + private: + const FragmentProcessor* currFP = nullptr; + size_t currentIndex = 0; + FragmentProcessor::Iter fpIter; + }; + + template + class BuilderInputProvider { + public: + BuilderInputProvider(const FragmentProcessor* fp, const T* t) : fragmentProcessor(fp), t(t) { + } + + const T& operator[](size_t i) const { + return t[i]; + } + + size_t count() const { + return (fragmentProcessor->*COUNT)(); + } + + BuilderInputProvider childInputs(size_t childIndex) const; + + private: + const FragmentProcessor* fragmentProcessor; + const T* t; + }; + + using TransformedCoordVars = + BuilderInputProvider; + using TextureSamplers = + BuilderInputProvider; + + struct EmitArgs { + EmitArgs(FragmentShaderBuilder* fragBuilder, UniformHandler* uniformHandler, + std::string outputColor, std::string inputColor, + const TransformedCoordVars* transformedCoords, const TextureSamplers* textureSamplers, + std::function coordFunc = {}) + : fragBuilder(fragBuilder), uniformHandler(uniformHandler), + outputColor(std::move(outputColor)), inputColor(std::move(inputColor)), + transformedCoords(transformedCoords), textureSamplers(textureSamplers), + coordFunc(std::move(coordFunc)) { + } + /** + * Interface used to emit code in the shaders. + */ + FragmentShaderBuilder* fragBuilder; + UniformHandler* uniformHandler; + /** + * A predefined vec4 in the FS in which the stage should place its output color (or coverage). + */ + const std::string outputColor; + /** + * A vec4 that holds the input color to the stage in the FS. + */ + const std::string inputColor; + /** + * Fragment shader variables containing the coords computed using each of the + * FragmentProcessor's CoordTransforms. + */ + const TransformedCoordVars* transformedCoords; + /** + * Contains one entry for each TextureSampler of the Processor. These can be passed to the + * builder to emit texture reads in the generated code. + */ + const TextureSamplers* textureSamplers; + const std::function coordFunc; + }; + + /** + * Called when the program stage should insert its code into the shaders. The code in each shader + * will be in its own block ({}) and so locally scoped names will not collide across stages. + */ + virtual void emitCode(EmitArgs& args) const = 0; + + void setData(UniformBuffer* uniformBuffer) const; + + /** + * Emit the child with the default input color (solid white) + */ + void emitChild(size_t childIndex, std::string* outputColor, EmitArgs& parentArgs, + std::function coordFunc = {}) const { + emitChild(childIndex, "", outputColor, parentArgs, std::move(coordFunc)); + } + + /** + * Will emit the code of a child proc in its own scope. Pass in the parent's EmitArgs and + * emitChild will automatically extract the coords and samplers of that child and pass them + * on to the child's emitCode(). Also, any uniforms or functions emitted by the child will + * have their names mangled to prevent redefinitions. The output color name is also mangled + * therefore in an in/out param. It will be declared in mangled form by emitChild(). It is + * legal to pass empty string as inputColor, since all fragment processors are required to work + * without an input color. + */ + void emitChild(size_t childIndex, const std::string& inputColor, std::string* outputColor, + EmitArgs& parentArgs, + std::function coordFunc = {}) const; + + /** + * Variation that uses the parent's output color variable to hold the child's output. + */ + void emitChild(size_t childIndex, const std::string& inputColor, EmitArgs& parentArgs) const; + + protected: + explicit FragmentProcessor(uint32_t classID) : Processor(classID) { + } + + /** + * FragmentProcessor subclasses call this from their constructor to register any child + * FragmentProcessors they have. This must be called AFTER all texture accesses and coord + * transforms have been added. + * This is for processors whose shader code will be composed of nested processors whose output + * colors will be combined somehow to produce its output color. Registering these child + * processors will allow the ProgramBuilder to automatically handle their transformed coords and + * texture accesses and mangle their uniform and output color names. + */ + size_t registerChildProcessor(std::unique_ptr child); + + /** + * Fragment Processor subclasses call this from their constructor to register coordinate + * transformations. Coord transforms provide a mechanism for a processor to receive coordinates + * in their FS code. The matrix expresses a transformation from local space. For a given + * fragment the matrix will be applied to the local coordinate that maps to the fragment. + * + * This must only be called from the constructor because Processors are immutable. The + * processor subclass manages the lifetime of the transformations (this function only stores a + * pointer). The CoordTransform is typically a member field of the Processor subclass. + * + * A processor subclass that has multiple methods of construction should always add its coord + * transforms in a consistent order. + */ + void addCoordTransform(const CoordTransform* transform) { + coordTransforms.push_back(transform); + } + + virtual void onSetData(UniformBuffer*) const { + } + + private: + virtual void onComputeProcessorKey(BytesKey*) const { + } + + virtual size_t onCountTextureSamplers() const { + return 0; + } + + virtual const TextureSampler* onTextureSampler(size_t) const { + return nullptr; + } + + virtual SamplerState onSamplerState(size_t) const { + return {}; + } + + virtual bool onIsEqual(const FragmentProcessor&) const { + return true; + } + + virtual void onVisitProxies(const std::function&) const { + } + + void internalEmitChild(size_t, const std::string&, const std::string&, EmitArgs&, + std::function = {}) const; + + std::vector coordTransforms; + std::vector> childProcessors; +}; +} // namespace tgfx diff --git a/src/gpu/processors/GeometryProcessor.cpp b/src/gpu/processors/GeometryProcessor.cpp new file mode 100644 index 00000000..17695f5e --- /dev/null +++ b/src/gpu/processors/GeometryProcessor.cpp @@ -0,0 +1,115 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GeometryProcessor.h" + +namespace tgfx { +static constexpr char TRANSFORM_UNIFORM_PREFIX[] = "CoordTransformMatrix_"; + +/** + * Returns the size of the attrib type in bytes. + */ +static constexpr size_t VertexAttribTypeSize(SLType type) { + switch (type) { + case SLType::Float: + return sizeof(float); + case SLType::Float2: + return 2 * sizeof(float); + case SLType::Float3: + return 3 * sizeof(float); + case SLType::Float4: + return 4 * sizeof(float); + case SLType::Int: + return sizeof(int32_t); + case SLType::Int2: + return 2 * sizeof(int32_t); + case SLType::Int3: + return 3 * sizeof(int32_t); + case SLType::Int4: + return 4 * sizeof(int32_t); + default: + return 0; + } +} + +template +static constexpr T Align4(T x) { + return (x + 3) >> 2 << 2; +} + +size_t GeometryProcessor::Attribute::sizeAlign4() const { + return Align4(VertexAttribTypeSize(_gpuType)); +} + +void GeometryProcessor::computeProcessorKey(Context*, BytesKey* bytesKey) const { + bytesKey->write(classID()); + onComputeProcessorKey(bytesKey); + for (const auto* attribute : attributes) { + attribute->computeKey(bytesKey); + } +} + +void GeometryProcessor::setVertexAttributes(const Attribute* attrs, int attrCount) { + for (int i = 0; i < attrCount; ++i) { + if (attrs[i].isInitialized()) { + attributes.push_back(attrs + i); + } + } +} + +void GeometryProcessor::setTransformDataHelper(const Matrix& localMatrix, + UniformBuffer* uniformBuffer, + FPCoordTransformIter* transformIter) const { + int i = 0; + while (const CoordTransform* coordTransform = transformIter->next()) { + Matrix combined = Matrix::I(); + combined.setConcat(coordTransform->getTotalMatrix(), localMatrix); + std::string uniformName = TRANSFORM_UNIFORM_PREFIX; + uniformName += std::to_string(i); + uniformBuffer->setData(uniformName, combined); + ++i; + } +} + +void GeometryProcessor::emitTransforms(VertexShaderBuilder* vertexBuilder, + VaryingHandler* varyingHandler, + UniformHandler* uniformHandler, + const ShaderVar& localCoordsVar, + FPCoordTransformHandler* transformHandler) const { + std::string localCoords = "vec3("; + localCoords += localCoordsVar.name(); + localCoords += ", 1)"; + int i = 0; + while (transformHandler->nextCoordTransform() != nullptr) { + std::string strUniName = TRANSFORM_UNIFORM_PREFIX; + strUniName += std::to_string(i); + auto uniName = uniformHandler->addUniform(ShaderFlags::Vertex, SLType::Float3x3, strUniName); + std::string strVaryingName = "TransformedCoords_"; + strVaryingName += std::to_string(i); + SLType varyingType = SLType::Float2; + auto varying = varyingHandler->addVarying(strVaryingName, varyingType); + + transformHandler->specifyCoordsForCurrCoordTransform(varying.name(), varyingType); + + vertexBuilder->codeAppendf("%s = (%s * %s).xy;", varying.vsOut().c_str(), uniName.c_str(), + localCoords.c_str()); + ++i; + } +} + +} // namespace tgfx diff --git a/src/gpu/processors/GeometryProcessor.h b/src/gpu/processors/GeometryProcessor.h new file mode 100644 index 00000000..92603690 --- /dev/null +++ b/src/gpu/processors/GeometryProcessor.h @@ -0,0 +1,150 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "gpu/FragmentShaderBuilder.h" +#include "gpu/ShaderVar.h" +#include "gpu/TextureSampler.h" +#include "gpu/UniformBuffer.h" +#include "gpu/UniformHandler.h" +#include "gpu/VaryingHandler.h" +#include "gpu/VertexShaderBuilder.h" +#include "gpu/processors/FragmentProcessor.h" +#include "gpu/processors/Processor.h" + +namespace tgfx { +class GeometryProcessor : public Processor { + public: + // Use only for easy-to-use aliases. + using FPCoordTransformIter = FragmentProcessor::CoordTransformIter; + + /** + * Describes a vertex attribute. + */ + class Attribute { + public: + Attribute() = default; + Attribute(std::string name, SLType gpuType) : _name(std::move(name)), _gpuType(gpuType) { + } + + bool isInitialized() const { + return !_name.empty(); + } + + const std::string& name() const { + return _name; + } + SLType gpuType() const { + return _gpuType; + } + + size_t sizeAlign4() const; + + ShaderVar asShaderVar() const { + return {_name, _gpuType, ShaderVar::TypeModifier::Attribute}; + } + + void computeKey(BytesKey* bytesKey) const { + bytesKey->write(isInitialized() ? static_cast(_gpuType) : ~0); + } + + private: + std::string _name; + SLType _gpuType = SLType::Float; + }; + + const std::vector& vertexAttributes() const { + return attributes; + } + + void computeProcessorKey(Context* context, BytesKey* bytesKey) const override; + + class FPCoordTransformHandler { + public: + FPCoordTransformHandler(const Pipeline* pipeline, std::vector* transformedCoordVars) + : iter(pipeline), transformedCoordVars(transformedCoordVars) { + } + + const CoordTransform* nextCoordTransform() { + return iter.next(); + } + + // 'args' are constructor params to ShaderVar. + template + void specifyCoordsForCurrCoordTransform(Args&&... args) { + transformedCoordVars->emplace_back(std::forward(args)...); + } + + private: + FragmentProcessor::CoordTransformIter iter; + std::vector* transformedCoordVars; + }; + + struct EmitArgs { + EmitArgs(VertexShaderBuilder* vertBuilder, FragmentShaderBuilder* fragBuilder, + VaryingHandler* varyingHandler, UniformHandler* uniformHandler, const Caps* caps, + std::string outputColor, std::string outputCoverage, + FPCoordTransformHandler* transformHandler) + : vertBuilder(vertBuilder), fragBuilder(fragBuilder), varyingHandler(varyingHandler), + uniformHandler(uniformHandler), caps(caps), outputColor(std::move(outputColor)), + outputCoverage(std::move(outputCoverage)), fpCoordTransformHandler(transformHandler) { + } + VertexShaderBuilder* vertBuilder; + FragmentShaderBuilder* fragBuilder; + VaryingHandler* varyingHandler; + UniformHandler* uniformHandler; + const Caps* caps; + const std::string outputColor; + const std::string outputCoverage; + FPCoordTransformHandler* fpCoordTransformHandler; + }; + + virtual void emitCode(EmitArgs&) const = 0; + + virtual void setData(UniformBuffer* uniformBuffer, + FPCoordTransformIter* coordTransformIter) const = 0; + + protected: + explicit GeometryProcessor(uint32_t classID) : Processor(classID) { + } + + void setVertexAttributes(const Attribute* attrs, int attrCount); + + /** + * A helper to upload coord transform matrices in setData(). + */ + void setTransformDataHelper(const Matrix& localMatrix, UniformBuffer* uniformBuffer, + FPCoordTransformIter* transformIter) const; + + /** + * Emit transformed local coords from the vertex shader as a uniform matrix and varying per + * coord-transform. localCoordsVar must be a 2-component vector. + */ + void emitTransforms(VertexShaderBuilder* vertexBuilder, VaryingHandler* varyingHandler, + UniformHandler* uniformHandler, const ShaderVar& localCoordsVar, + FPCoordTransformHandler* transformHandler) const; + + private: + virtual void onComputeProcessorKey(BytesKey*) const { + } + + std::vector attributes = {}; +}; +} // namespace tgfx diff --git a/src/gpu/processors/LinearGradientLayout.cpp b/src/gpu/processors/LinearGradientLayout.cpp new file mode 100644 index 00000000..c983b30b --- /dev/null +++ b/src/gpu/processors/LinearGradientLayout.cpp @@ -0,0 +1,31 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "LinearGradientLayout.h" + +namespace tgfx { +LinearGradientLayout::LinearGradientLayout(Matrix matrix) + : FragmentProcessor(ClassID()), coordTransform(matrix) { + addCoordTransform(&coordTransform); +} + +bool LinearGradientLayout::onIsEqual(const FragmentProcessor& processor) const { + return coordTransform.matrix == + static_cast(processor).coordTransform.matrix; +} +} // namespace tgfx diff --git a/src/gpu/processors/LinearGradientLayout.h b/src/gpu/processors/LinearGradientLayout.h new file mode 100644 index 00000000..1ae95ccd --- /dev/null +++ b/src/gpu/processors/LinearGradientLayout.h @@ -0,0 +1,41 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/processors/FragmentProcessor.h" + +namespace tgfx { +class LinearGradientLayout : public FragmentProcessor { + public: + static std::unique_ptr Make(Matrix matrix); + + std::string name() const override { + return "LinearGradientLayout"; + } + + protected: + DEFINE_PROCESSOR_CLASS_ID + + explicit LinearGradientLayout(Matrix matrix); + + bool onIsEqual(const FragmentProcessor& processor) const override; + + CoordTransform coordTransform; +}; +} // namespace tgfx diff --git a/src/gpu/processors/LumaColorFilterEffect.h b/src/gpu/processors/LumaColorFilterEffect.h new file mode 100644 index 00000000..65a05a8b --- /dev/null +++ b/src/gpu/processors/LumaColorFilterEffect.h @@ -0,0 +1,39 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/processors/FragmentProcessor.h" + +namespace tgfx { +class LumaColorFilterEffect : public FragmentProcessor { + public: + static std::unique_ptr Make(); + + std::string name() const override { + return "LumaColorFilterEffect"; + } + + protected: + LumaColorFilterEffect() : FragmentProcessor(ClassID()) { + } + + private: + DEFINE_PROCESSOR_CLASS_ID +}; +} // namespace tgfx diff --git a/src/gpu/processors/PorterDuffXferProcessor.cpp b/src/gpu/processors/PorterDuffXferProcessor.cpp new file mode 100644 index 00000000..30ebd04b --- /dev/null +++ b/src/gpu/processors/PorterDuffXferProcessor.cpp @@ -0,0 +1,26 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "PorterDuffXferProcessor.h" + +namespace tgfx { +void PorterDuffXferProcessor::computeProcessorKey(Context*, BytesKey* bytesKey) const { + bytesKey->write(classID()); + bytesKey->write(static_cast(blend)); +} +} // namespace tgfx diff --git a/src/gpu/processors/PorterDuffXferProcessor.h b/src/gpu/processors/PorterDuffXferProcessor.h new file mode 100644 index 00000000..d0034beb --- /dev/null +++ b/src/gpu/processors/PorterDuffXferProcessor.h @@ -0,0 +1,47 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/processors/XferProcessor.h" +#include "tgfx/core/BlendMode.h" + +namespace tgfx { +class PorterDuffXferProcessor : public XferProcessor { + public: + static std::unique_ptr Make(BlendMode blend); + + BlendMode getBlend() const { + return blend; + } + + std::string name() const override { + return "PorterDuffXferProcessor"; + } + + void computeProcessorKey(Context* context, BytesKey* bytesKey) const override; + + protected: + DEFINE_PROCESSOR_CLASS_ID + + explicit PorterDuffXferProcessor(BlendMode blend) : XferProcessor(ClassID()), blend(blend) { + } + + BlendMode blend; +}; +} // namespace tgfx diff --git a/src/gpu/processors/Processor.h b/src/gpu/processors/Processor.h new file mode 100644 index 00000000..a5059864 --- /dev/null +++ b/src/gpu/processors/Processor.h @@ -0,0 +1,53 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/gpu/Context.h" +#include "tgfx/utils/BytesKey.h" +#include "utils/UniqueID.h" + +namespace tgfx { +#define DEFINE_PROCESSOR_CLASS_ID \ + static uint32_t ClassID() { \ + static uint32_t ClassID = UniqueID::Next(); \ + return ClassID; \ + } + +class Processor { + public: + explicit Processor(uint32_t classID) : _classID(classID) { + } + + virtual ~Processor() = default; + + /** + * Human-meaningful string to identify this processor. + */ + virtual std::string name() const = 0; + + uint32_t classID() const { + return _classID; + } + + virtual void computeProcessorKey(Context* context, BytesKey* bytesKey) const = 0; + + private: + uint32_t _classID = 0; +}; +} // namespace tgfx diff --git a/src/gpu/processors/QuadPerEdgeAAGeometryProcessor.cpp b/src/gpu/processors/QuadPerEdgeAAGeometryProcessor.cpp new file mode 100644 index 00000000..4fbeafac --- /dev/null +++ b/src/gpu/processors/QuadPerEdgeAAGeometryProcessor.cpp @@ -0,0 +1,43 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "QuadPerEdgeAAGeometryProcessor.h" +#include "gpu/YUVTexture.h" + +namespace tgfx { +QuadPerEdgeAAGeometryProcessor::QuadPerEdgeAAGeometryProcessor(int width, int height, AAType aa, + bool hasColor) + : GeometryProcessor(ClassID()), width(width), height(height), aa(aa) { + if (aa == AAType::Coverage) { + position = {"aPositionWithCoverage", SLType::Float3}; + } else { + position = {"aPosition", SLType::Float2}; + } + localCoord = {"localCoord", SLType::Float2}; + if (hasColor) { + color = {"inColor", SLType::Float4}; + } + setVertexAttributes(&position, 3); +} + +void QuadPerEdgeAAGeometryProcessor::onComputeProcessorKey(BytesKey* bytesKey) const { + uint32_t flags = aa == AAType::Coverage ? 1 : 0; + flags |= color.isInitialized() ? 2 : 0; + bytesKey->write(flags); +} +} // namespace tgfx diff --git a/src/gpu/processors/QuadPerEdgeAAGeometryProcessor.h b/src/gpu/processors/QuadPerEdgeAAGeometryProcessor.h new file mode 100644 index 00000000..b72ebb6c --- /dev/null +++ b/src/gpu/processors/QuadPerEdgeAAGeometryProcessor.h @@ -0,0 +1,50 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "GeometryProcessor.h" +#include "gpu/AAType.h" +#include "tgfx/core/Paint.h" + +namespace tgfx { +class QuadPerEdgeAAGeometryProcessor : public GeometryProcessor { + public: + static std::unique_ptr Make(int width, int height, AAType aa, + bool hasColor); + + std::string name() const override { + return "QuadPerEdgeAAGeometryProcessor"; + } + + protected: + DEFINE_PROCESSOR_CLASS_ID + + QuadPerEdgeAAGeometryProcessor(int width, int height, AAType aa, bool hasColor); + + void onComputeProcessorKey(BytesKey* bytesKey) const override; + + Attribute position; // May contain coverage as last channel + Attribute localCoord; + Attribute color; + + int width = 1; + int height = 1; + AAType aa = AAType::None; +}; +} // namespace tgfx diff --git a/src/gpu/processors/RadialGradientLayout.cpp b/src/gpu/processors/RadialGradientLayout.cpp new file mode 100644 index 00000000..19f49ad2 --- /dev/null +++ b/src/gpu/processors/RadialGradientLayout.cpp @@ -0,0 +1,31 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "RadialGradientLayout.h" + +namespace tgfx { +RadialGradientLayout::RadialGradientLayout(Matrix matrix) + : FragmentProcessor(ClassID()), coordTransform(matrix) { + addCoordTransform(&coordTransform); +} + +bool RadialGradientLayout::onIsEqual(const FragmentProcessor& processor) const { + return coordTransform.matrix == + static_cast(processor).coordTransform.matrix; +} +} // namespace tgfx diff --git a/src/gpu/processors/RadialGradientLayout.h b/src/gpu/processors/RadialGradientLayout.h new file mode 100644 index 00000000..15078430 --- /dev/null +++ b/src/gpu/processors/RadialGradientLayout.h @@ -0,0 +1,41 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/processors/FragmentProcessor.h" + +namespace tgfx { +class RadialGradientLayout : public FragmentProcessor { + public: + static std::unique_ptr Make(Matrix matrix); + + std::string name() const override { + return "RadialGradientLayout"; + } + + protected: + DEFINE_PROCESSOR_CLASS_ID + + explicit RadialGradientLayout(Matrix matrix); + + bool onIsEqual(const FragmentProcessor& processor) const override; + + CoordTransform coordTransform; +}; +} // namespace tgfx diff --git a/src/gpu/processors/SeriesFragmentProcessor.cpp b/src/gpu/processors/SeriesFragmentProcessor.cpp new file mode 100644 index 00000000..30da3691 --- /dev/null +++ b/src/gpu/processors/SeriesFragmentProcessor.cpp @@ -0,0 +1,29 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "SeriesFragmentProcessor.h" + +namespace tgfx { +SeriesFragmentProcessor::SeriesFragmentProcessor(std::unique_ptr* children, + int count) + : FragmentProcessor(ClassID()) { + for (int i = 0; i < count; ++i) { + registerChildProcessor(std::move(children[i])); + } +} +} // namespace tgfx diff --git a/src/gpu/processors/SeriesFragmentProcessor.h b/src/gpu/processors/SeriesFragmentProcessor.h new file mode 100644 index 00000000..ff72593c --- /dev/null +++ b/src/gpu/processors/SeriesFragmentProcessor.h @@ -0,0 +1,38 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/processors/FragmentProcessor.h" + +namespace tgfx { +class SeriesFragmentProcessor : public FragmentProcessor { + public: + static std::unique_ptr Make(std::unique_ptr* children, + int count); + + std::string name() const override { + return "SeriesFragmentProcessor"; + } + + protected: + DEFINE_PROCESSOR_CLASS_ID + + SeriesFragmentProcessor(std::unique_ptr* children, int count); +}; +} // namespace tgfx diff --git a/src/gpu/processors/SingleIntervalGradientColorizer.cpp b/src/gpu/processors/SingleIntervalGradientColorizer.cpp new file mode 100644 index 00000000..9cbab632 --- /dev/null +++ b/src/gpu/processors/SingleIntervalGradientColorizer.cpp @@ -0,0 +1,26 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "SingleIntervalGradientColorizer.h" + +namespace tgfx { +bool SingleIntervalGradientColorizer::onIsEqual(const FragmentProcessor& processor) const { + const auto& that = static_cast(processor); + return start == that.start && end == that.end; +} +} // namespace tgfx diff --git a/src/gpu/processors/SingleIntervalGradientColorizer.h b/src/gpu/processors/SingleIntervalGradientColorizer.h new file mode 100644 index 00000000..e1a69546 --- /dev/null +++ b/src/gpu/processors/SingleIntervalGradientColorizer.h @@ -0,0 +1,45 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/processors/FragmentProcessor.h" +#include "tgfx/core/Color.h" + +namespace tgfx { +class SingleIntervalGradientColorizer : public FragmentProcessor { + public: + static std::unique_ptr Make(Color start, Color end); + + std::string name() const override { + return "SingleIntervalGradientColorizer"; + } + + protected: + DEFINE_PROCESSOR_CLASS_ID + + SingleIntervalGradientColorizer(Color start, Color end) + : FragmentProcessor(ClassID()), start(start), end(end) { + } + + bool onIsEqual(const FragmentProcessor& processor) const override; + + Color start; + Color end; +}; +} // namespace tgfx diff --git a/src/gpu/processors/SweepGradientLayout.cpp b/src/gpu/processors/SweepGradientLayout.cpp new file mode 100644 index 00000000..d3040af4 --- /dev/null +++ b/src/gpu/processors/SweepGradientLayout.cpp @@ -0,0 +1,32 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "SweepGradientLayout.h" + +namespace tgfx { +SweepGradientLayout::SweepGradientLayout(Matrix matrix, float bias, float scale) + : FragmentProcessor(ClassID()), coordTransform(matrix), bias(bias), scale(scale) { + addCoordTransform(&coordTransform); +} + +bool SweepGradientLayout::onIsEqual(const FragmentProcessor& processor) const { + const auto& that = static_cast(processor); + return coordTransform.matrix == that.coordTransform.matrix && bias == that.bias && + scale == that.scale; +} +} // namespace tgfx diff --git a/src/gpu/processors/SweepGradientLayout.h b/src/gpu/processors/SweepGradientLayout.h new file mode 100644 index 00000000..9180283e --- /dev/null +++ b/src/gpu/processors/SweepGradientLayout.h @@ -0,0 +1,43 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/processors/FragmentProcessor.h" + +namespace tgfx { +class SweepGradientLayout : public FragmentProcessor { + public: + static std::unique_ptr Make(Matrix matrix, float bias, float scale); + + std::string name() const override { + return "SweepGradientLayout"; + } + + protected: + DEFINE_PROCESSOR_CLASS_ID + + SweepGradientLayout(Matrix matrix, float bias, float scale); + + bool onIsEqual(const FragmentProcessor& processor) const override; + + CoordTransform coordTransform; + float bias; + float scale; +}; +} // namespace tgfx diff --git a/src/gpu/processors/TextureEffect.cpp b/src/gpu/processors/TextureEffect.cpp new file mode 100644 index 00000000..039dde10 --- /dev/null +++ b/src/gpu/processors/TextureEffect.cpp @@ -0,0 +1,118 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "TextureEffect.h" +#include "gpu/ProxyProvider.h" + +namespace tgfx { +std::unique_ptr TextureEffect::Make(std::shared_ptr proxy, + const SamplingOptions& sampling, + const Matrix* localMatrix) { + return MakeRGBAAA(std::move(proxy), sampling, Point::Zero(), localMatrix); +} + +std::unique_ptr TextureEffect::Make(std::shared_ptr texture, + const SamplingOptions& sampling, + const Matrix* localMatrix) { + if (texture == nullptr) { + return nullptr; + } + auto context = texture->getContext(); + if (context == nullptr) { + return nullptr; + } + auto proxy = context->proxyProvider()->wrapTexture(std::move(texture)); + return MakeRGBAAA(std::move(proxy), sampling, Point::Zero(), localMatrix); +} + +std::unique_ptr TextureEffect::MakeRGBAAA(std::shared_ptr texture, + const SamplingOptions& sampling, + const Point& alphaStart, + const Matrix* localMatrix) { + if (texture == nullptr) { + return nullptr; + } + auto context = texture->getContext(); + if (context == nullptr) { + return nullptr; + } + auto proxy = context->proxyProvider()->wrapTexture(std::move(texture)); + return MakeRGBAAA(std::move(proxy), sampling, alphaStart, localMatrix); +} + +TextureEffect::TextureEffect(std::shared_ptr proxy, SamplingOptions sampling, + const Point& alphaStart, const Matrix& localMatrix) + : FragmentProcessor(ClassID()), textureProxy(std::move(proxy)), samplerState(sampling), + alphaStart(alphaStart), coordTransform(localMatrix, textureProxy.get(), alphaStart) { + addCoordTransform(&coordTransform); +} + +bool TextureEffect::onIsEqual(const FragmentProcessor& processor) const { + const auto& that = static_cast(processor); + return textureProxy == that.textureProxy && alphaStart == that.alphaStart && + coordTransform.matrix == that.coordTransform.matrix && samplerState == that.samplerState; +} + +void TextureEffect::onComputeProcessorKey(BytesKey* bytesKey) const { + auto texture = getTexture(); + if (texture == nullptr) { + return; + } + uint32_t flags = alphaStart == Point::Zero() ? 1 : 0; + auto yuvTexture = getYUVTexture(); + if (yuvTexture) { + flags |= yuvTexture->pixelFormat() == YUVPixelFormat::I420 ? 0 : 2; + flags |= IsLimitedYUVColorRange(yuvTexture->colorSpace()) ? 0 : 4; + } + bytesKey->write(flags); +} + +size_t TextureEffect::onCountTextureSamplers() const { + auto texture = getTexture(); + if (texture == nullptr) { + return 0; + } + if (texture->isYUV()) { + return reinterpret_cast(texture)->samplerCount(); + } + return 1; +} + +const TextureSampler* TextureEffect::onTextureSampler(size_t index) const { + auto texture = getTexture(); + if (texture == nullptr) { + return nullptr; + } + if (texture->isYUV()) { + return reinterpret_cast(texture)->getSamplerAt(index); + } + return texture->getSampler(); +} + +Texture* TextureEffect::getTexture() const { + return textureProxy->getTexture().get(); +} + +YUVTexture* TextureEffect::getYUVTexture() const { + auto texture = textureProxy->getTexture().get(); + if (texture && texture->isYUV()) { + return reinterpret_cast(texture); + } + return nullptr; +} +} // namespace tgfx diff --git a/src/gpu/processors/TextureEffect.h b/src/gpu/processors/TextureEffect.h new file mode 100644 index 00000000..191667fc --- /dev/null +++ b/src/gpu/processors/TextureEffect.h @@ -0,0 +1,82 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/SamplerState.h" +#include "gpu/TextureProxy.h" +#include "gpu/YUVTexture.h" +#include "gpu/processors/FragmentProcessor.h" + +namespace tgfx { +class TextureEffect : public FragmentProcessor { + public: + static std::unique_ptr Make(std::shared_ptr proxy, + const SamplingOptions& sampling, + const Matrix* localMatrix = nullptr); + + static std::unique_ptr MakeRGBAAA(std::shared_ptr proxy, + const SamplingOptions& sampling, + const Point& alphaStart, + const Matrix* localMatrix = nullptr); + + static std::unique_ptr Make(std::shared_ptr texture, + const SamplingOptions& sampling, + const Matrix* localMatrix = nullptr); + + static std::unique_ptr MakeRGBAAA(std::shared_ptr texture, + const SamplingOptions& sampling, + const Point& alphaStart, + const Matrix* localMatrix = nullptr); + + std::string name() const override { + return "TextureEffect"; + } + + protected: + DEFINE_PROCESSOR_CLASS_ID + + TextureEffect(std::shared_ptr proxy, SamplingOptions sampling, + const Point& alphaStart, const Matrix& localMatrix); + + bool onIsEqual(const FragmentProcessor& processor) const override; + + void onComputeProcessorKey(BytesKey* bytesKey) const override; + + size_t onCountTextureSamplers() const override; + + const TextureSampler* onTextureSampler(size_t index) const override; + + SamplerState onSamplerState(size_t) const override { + return samplerState; + } + + Texture* getTexture() const; + + YUVTexture* getYUVTexture() const; + + void onVisitProxies(const std::function& func) const override { + func(textureProxy.get()); + } + + std::shared_ptr textureProxy; + SamplerState samplerState; + Point alphaStart = Point::Zero(); + CoordTransform coordTransform; +}; +} // namespace tgfx diff --git a/src/gpu/processors/TextureGradientColorizer.cpp b/src/gpu/processors/TextureGradientColorizer.cpp new file mode 100644 index 00000000..6702e87e --- /dev/null +++ b/src/gpu/processors/TextureGradientColorizer.cpp @@ -0,0 +1,25 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "TextureGradientColorizer.h" + +namespace tgfx { +bool TextureGradientColorizer::onIsEqual(const FragmentProcessor& processor) const { + return gradient == static_cast(processor).gradient; +} +} // namespace tgfx diff --git a/src/gpu/processors/TextureGradientColorizer.h b/src/gpu/processors/TextureGradientColorizer.h new file mode 100644 index 00000000..6febe7a6 --- /dev/null +++ b/src/gpu/processors/TextureGradientColorizer.h @@ -0,0 +1,52 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "gpu/processors/FragmentProcessor.h" + +namespace tgfx { +class TextureGradientColorizer : public FragmentProcessor { + public: + static std::unique_ptr Make(std::shared_ptr gradient); + + std::string name() const override { + return "TextureGradientColorizer"; + } + + protected: + DEFINE_PROCESSOR_CLASS_ID + + explicit TextureGradientColorizer(std::shared_ptr gradient) + : FragmentProcessor(ClassID()), gradient(std::move(gradient)) { + } + + size_t onCountTextureSamplers() const override { + return 1; + } + + const TextureSampler* onTextureSampler(size_t) const override { + return gradient->getSampler(); + } + + bool onIsEqual(const FragmentProcessor& processor) const override; + + std::shared_ptr gradient; +}; +} // namespace tgfx diff --git a/src/gpu/processors/TiledTextureEffect.cpp b/src/gpu/processors/TiledTextureEffect.cpp new file mode 100644 index 00000000..1c43a12d --- /dev/null +++ b/src/gpu/processors/TiledTextureEffect.cpp @@ -0,0 +1,191 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "TiledTextureEffect.h" +#include "ConstColorProcessor.h" +#include "TextureEffect.h" +#include "gpu/ProxyProvider.h" +#include "utils/MathExtra.h" + +namespace tgfx { +using Wrap = SamplerState::WrapMode; + +TiledTextureEffect::ShaderMode TiledTextureEffect::GetShaderMode(Wrap wrap, FilterMode filter, + MipMapMode mm) { + switch (wrap) { + case Wrap::MirrorRepeat: + return ShaderMode::MirrorRepeat; + case Wrap::Clamp: + return ShaderMode::Clamp; + case Wrap::Repeat: + switch (mm) { + case MipMapMode::None: + switch (filter) { + case FilterMode::Nearest: + return ShaderMode::RepeatNearestNone; + case FilterMode::Linear: + return ShaderMode::RepeatLinearNone; + } + case MipMapMode::Nearest: + case MipMapMode::Linear: + switch (filter) { + case FilterMode::Nearest: + return ShaderMode::RepeatNearestMipmap; + case FilterMode::Linear: + return ShaderMode::RepeatLinearMipmap; + } + } + case Wrap::ClampToBorder: + switch (filter) { + case FilterMode::Nearest: + return ShaderMode::ClampToBorderNearest; + case FilterMode::Linear: + return ShaderMode::ClampToBorderLinear; + } + } +} + +std::unique_ptr TiledTextureEffect::Make(std::shared_ptr texture, + TileMode tileModeX, TileMode tileModeY, + const SamplingOptions& sampling, + const Matrix* localMatrix) { + if (texture == nullptr) { + return nullptr; + } + auto context = texture->getContext(); + if (context == nullptr) { + return nullptr; + } + auto proxy = context->proxyProvider()->wrapTexture(std::move(texture)); + return Make(std::move(proxy), tileModeX, tileModeY, sampling, localMatrix); +} + +TiledTextureEffect::Sampling::Sampling(const Texture* texture, SamplerState sampler, + const Rect& subset) { + struct Span { + float a = 0.f; + float b = 0.f; + + Span makeInset(float o) const { + Span r = {a + o, b - o}; + if (r.a > r.b) { + r.a = r.b = (r.a + r.b) / 2; + } + return r; + } + }; + struct Result1D { + ShaderMode shaderMode = ShaderMode::None; + Span shaderSubset; + Span shaderClamp; + Wrap hwWrap = Wrap::Clamp; + }; + auto caps = texture->getContext()->caps(); + auto canDoWrapInHW = [&](int size, Wrap wrap) { + if (wrap == Wrap::ClampToBorder && !caps->clampToBorderSupport) { + return false; + } + if (wrap != Wrap::Clamp && !caps->npotTextureTileSupport && !IsPow2(size)) { + return false; + } + if (texture->getSampler()->type() != TextureType::TwoD && + !(wrap == Wrap::Clamp || wrap == Wrap::ClampToBorder)) { + return false; + } + return true; + }; + auto resolve = [&](int size, Wrap wrap, Span subset) { + Result1D r; + bool canDoModeInHW = canDoWrapInHW(size, wrap); + if (canDoModeInHW && subset.a <= 0 && subset.b >= static_cast(size)) { + r.hwWrap = wrap; + return r; + } + r.shaderSubset = subset; + r.shaderClamp = subset.makeInset(0.5f); + r.shaderMode = GetShaderMode(wrap, sampler.filterMode, sampler.mipMapMode); + return r; + }; + + Span subsetX{subset.left, subset.right}; + auto x = resolve(texture->width(), sampler.wrapModeX, subsetX); + Span subsetY{subset.top, subset.bottom}; + auto y = resolve(texture->height(), sampler.wrapModeY, subsetY); + hwSampler = SamplerState(x.hwWrap, y.hwWrap, sampler.filterMode, sampler.mipMapMode); + shaderModeX = x.shaderMode; + shaderModeY = y.shaderMode; + shaderSubset = {x.shaderSubset.a, y.shaderSubset.a, x.shaderSubset.b, y.shaderSubset.b}; + shaderClamp = {x.shaderClamp.a, y.shaderClamp.a, x.shaderClamp.b, y.shaderClamp.b}; +} + +TiledTextureEffect::TiledTextureEffect(std::shared_ptr proxy, + const SamplerState& samplerState, const Matrix& localMatrix) + : FragmentProcessor(ClassID()), textureProxy(std::move(proxy)), samplerState(samplerState), + coordTransform(localMatrix, textureProxy.get()) { + addCoordTransform(&coordTransform); +} + +void TiledTextureEffect::onComputeProcessorKey(BytesKey* bytesKey) const { + auto texture = getTexture(); + if (texture == nullptr) { + return; + } + auto subset = Rect::MakeWH(texture->width(), texture->height()); + Sampling sampling(texture, samplerState, subset); + auto flags = static_cast(sampling.shaderModeX); + flags |= static_cast(sampling.shaderModeY) << 4; + bytesKey->write(flags); +} + +bool TiledTextureEffect::onIsEqual(const FragmentProcessor& processor) const { + const auto& that = static_cast(processor); + return textureProxy == that.textureProxy && samplerState == that.samplerState && + coordTransform.matrix == that.coordTransform.matrix; +} + +size_t TiledTextureEffect::onCountTextureSamplers() const { + auto texture = getTexture(); + return texture ? 1 : 0; +} + +const TextureSampler* TiledTextureEffect::onTextureSampler(size_t) const { + auto texture = getTexture(); + if (texture == nullptr) { + return nullptr; + } + return texture->getSampler(); +} + +SamplerState TiledTextureEffect::onSamplerState(size_t) const { + auto texture = getTexture(); + if (texture == nullptr) { + return {}; + } + auto subset = Rect::MakeWH(texture->width(), texture->height()); + Sampling sampling(texture, samplerState, subset); + return sampling.hwSampler; +} + +const Texture* TiledTextureEffect::getTexture() const { + auto texture = textureProxy->getTexture().get(); + if (texture && !texture->isYUV()) { + return texture; + } + return nullptr; +} +} // namespace tgfx diff --git a/src/gpu/processors/TiledTextureEffect.h b/src/gpu/processors/TiledTextureEffect.h new file mode 100644 index 00000000..f2d61fff --- /dev/null +++ b/src/gpu/processors/TiledTextureEffect.h @@ -0,0 +1,92 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/SamplerState.h" +#include "gpu/TextureProxy.h" +#include "gpu/processors/FragmentProcessor.h" + +namespace tgfx { +class TiledTextureEffect : public FragmentProcessor { + public: + static std::unique_ptr Make(std::shared_ptr textureProxy, + TileMode tileModeX, TileMode tileModeY, + const SamplingOptions& sampling, + const Matrix* localMatrix = nullptr); + + static std::unique_ptr Make(std::shared_ptr texture, + TileMode tileModeX, TileMode tileModeY, + const SamplingOptions& sampling, + const Matrix* localMatrix = nullptr); + + std::string name() const override { + return "TiledTextureEffect"; + } + + protected: + DEFINE_PROCESSOR_CLASS_ID + + enum class ShaderMode { + None, // Using HW mode + Clamp, // Shader based clamp, no filter specialization + RepeatNearestNone, // Simple repeat for nearest sampling, no mipmapping. + RepeatLinearNone, // Filter the subset boundary for kRepeat mode, no mip mapping + RepeatLinearMipmap, // Logic for linear filtering and LOD selection with kRepeat mode. + RepeatNearestMipmap, // Logic for nearest filtering and LOD selection with kRepeat mode. + MirrorRepeat, // Mirror repeat (doesn't depend on filter)) + ClampToBorderNearest, + ClampToBorderLinear + }; + + struct Sampling { + Sampling(const Texture* texture, SamplerState sampler, const Rect& subset); + + SamplerState hwSampler; + ShaderMode shaderModeX = ShaderMode::None; + ShaderMode shaderModeY = ShaderMode::None; + Rect shaderSubset = Rect::MakeEmpty(); + Rect shaderClamp = Rect::MakeEmpty(); + }; + + TiledTextureEffect(std::shared_ptr proxy, const SamplerState& samplerState, + const Matrix& localMatrix); + + void onComputeProcessorKey(BytesKey* bytesKey) const override; + + bool onIsEqual(const FragmentProcessor& processor) const override; + + size_t onCountTextureSamplers() const override; + + const TextureSampler* onTextureSampler(size_t) const override; + + SamplerState onSamplerState(size_t) const override; + + const Texture* getTexture() const; + + static ShaderMode GetShaderMode(SamplerState::WrapMode mode, FilterMode filter, MipMapMode mm); + + void onVisitProxies(const std::function& func) const override { + func(textureProxy.get()); + } + + std::shared_ptr textureProxy; + SamplerState samplerState; + CoordTransform coordTransform; +}; +} // namespace tgfx diff --git a/src/gpu/processors/UnrolledBinaryGradientColorizer.cpp b/src/gpu/processors/UnrolledBinaryGradientColorizer.cpp new file mode 100644 index 00000000..afcc4165 --- /dev/null +++ b/src/gpu/processors/UnrolledBinaryGradientColorizer.cpp @@ -0,0 +1,37 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "UnrolledBinaryGradientColorizer.h" + +namespace tgfx { +void UnrolledBinaryGradientColorizer::onComputeProcessorKey(BytesKey* bytesKey) const { + bytesKey->write(static_cast(intervalCount)); +} + +bool UnrolledBinaryGradientColorizer::onIsEqual(const FragmentProcessor& processor) const { + const auto& that = static_cast(processor); + return intervalCount == that.intervalCount && scale0_1 == that.scale0_1 && + scale2_3 == that.scale2_3 && scale4_5 == that.scale4_5 && scale6_7 == that.scale6_7 && + scale8_9 == that.scale8_9 && scale10_11 == that.scale10_11 && + scale12_13 == that.scale12_13 && scale14_15 == that.scale14_15 && + bias0_1 == that.bias0_1 && bias2_3 == that.bias2_3 && bias4_5 == that.bias4_5 && + bias6_7 == that.bias6_7 && bias8_9 == that.bias8_9 && bias10_11 == that.bias10_11 && + bias12_13 == that.bias12_13 && bias14_15 == that.bias14_15 && + thresholds1_7 == that.thresholds1_7 && thresholds9_13 == that.thresholds9_13; +} +} // namespace tgfx diff --git a/src/gpu/processors/UnrolledBinaryGradientColorizer.h b/src/gpu/processors/UnrolledBinaryGradientColorizer.h new file mode 100644 index 00000000..30347542 --- /dev/null +++ b/src/gpu/processors/UnrolledBinaryGradientColorizer.h @@ -0,0 +1,72 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/processors/FragmentProcessor.h" +#include "tgfx/core/Color.h" + +namespace tgfx { +class UnrolledBinaryGradientColorizer : public FragmentProcessor { + public: + static constexpr int kMaxColorCount = 16; + static std::unique_ptr Make(const Color* colors, + const float* positions, int count); + + std::string name() const override { + return "UnrolledBinaryGradientColorizer"; + } + + void onComputeProcessorKey(BytesKey* bytesKey) const override; + + protected: + DEFINE_PROCESSOR_CLASS_ID + + UnrolledBinaryGradientColorizer(int intervalCount, Color* scales, Color* biases, + Rect thresholds1_7, Rect thresholds9_13) + : FragmentProcessor(ClassID()), intervalCount(intervalCount), scale0_1(scales[0]), + scale2_3(scales[1]), scale4_5(scales[2]), scale6_7(scales[3]), scale8_9(scales[4]), + scale10_11(scales[5]), scale12_13(scales[6]), scale14_15(scales[7]), bias0_1(biases[0]), + bias2_3(biases[1]), bias4_5(biases[2]), bias6_7(biases[3]), bias8_9(biases[4]), + bias10_11(biases[5]), bias12_13(biases[6]), bias14_15(biases[7]), + thresholds1_7(thresholds1_7), thresholds9_13(thresholds9_13) { + } + + bool onIsEqual(const FragmentProcessor& processor) const override; + + int intervalCount; + Color scale0_1; + Color scale2_3; + Color scale4_5; + Color scale6_7; + Color scale8_9; + Color scale10_11; + Color scale12_13; + Color scale14_15; + Color bias0_1; + Color bias2_3; + Color bias4_5; + Color bias6_7; + Color bias8_9; + Color bias10_11; + Color bias12_13; + Color bias14_15; + Rect thresholds1_7; + Rect thresholds9_13; +}; +} // namespace tgfx diff --git a/src/gpu/processors/XferProcessor.h b/src/gpu/processors/XferProcessor.h new file mode 100644 index 00000000..fabba98f --- /dev/null +++ b/src/gpu/processors/XferProcessor.h @@ -0,0 +1,57 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "gpu/FragmentShaderBuilder.h" +#include "gpu/Texture.h" +#include "gpu/UniformBuffer.h" +#include "gpu/UniformHandler.h" +#include "gpu/processors/Processor.h" +#include "tgfx/utils/BytesKey.h" + +namespace tgfx { +class XferProcessor : public Processor { + public: + struct EmitArgs { + EmitArgs(FragmentShaderBuilder* fragBuilder, UniformHandler* uniformHandler, + std::string inputColor, std::string inputCoverage, std::string outputColor, + const SamplerHandle dstTextureSamplerHandle) + : fragBuilder(fragBuilder), uniformHandler(uniformHandler), + inputColor(std::move(inputColor)), inputCoverage(std::move(inputCoverage)), + outputColor(std::move(outputColor)), dstTextureSamplerHandle(dstTextureSamplerHandle) { + } + FragmentShaderBuilder* fragBuilder; + UniformHandler* uniformHandler; + const std::string inputColor; + const std::string inputCoverage; + const std::string outputColor; + const SamplerHandle dstTextureSamplerHandle; + }; + + virtual void emitCode(const EmitArgs& args) const = 0; + + virtual void setData(UniformBuffer*, const Texture*, const Point&) const { + } + + protected: + explicit XferProcessor(uint32_t classID) : Processor(classID) { + } +}; +} // namespace tgfx diff --git a/src/gpu/processors/XfermodeFragmentProcessor.cpp b/src/gpu/processors/XfermodeFragmentProcessor.cpp new file mode 100644 index 00000000..2ed1c474 --- /dev/null +++ b/src/gpu/processors/XfermodeFragmentProcessor.cpp @@ -0,0 +1,69 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "XfermodeFragmentProcessor.h" +#include "gpu/processors/ConstColorProcessor.h" + +namespace tgfx { +std::unique_ptr XfermodeFragmentProcessor::MakeFromSrcProcessor( + std::unique_ptr src, BlendMode mode) { + return XfermodeFragmentProcessor::MakeFromTwoProcessors(std::move(src), nullptr, mode); +} + +std::unique_ptr XfermodeFragmentProcessor::MakeFromDstProcessor( + std::unique_ptr dst, BlendMode mode) { + return XfermodeFragmentProcessor::MakeFromTwoProcessors(nullptr, std::move(dst), mode); +} + +std::string XfermodeFragmentProcessor::name() const { + switch (child) { + case Child::TwoChild: + return "XfermodeFragmentProcessor - two"; + case Child::DstChild: + return "XfermodeFragmentProcessor - dst"; + case Child::SrcChild: + return "XfermodeFragmentProcessor - src"; + } +} + +XfermodeFragmentProcessor::XfermodeFragmentProcessor(std::unique_ptr src, + std::unique_ptr dst, + BlendMode mode) + : FragmentProcessor(ClassID()), mode(mode) { + if (src && dst) { + child = Child::TwoChild; + registerChildProcessor(std::move(src)); + registerChildProcessor(std::move(dst)); + } else if (src) { + child = Child::SrcChild; + registerChildProcessor(std::move(src)); + } else { + child = Child::DstChild; + registerChildProcessor(std::move(dst)); + } +} + +void XfermodeFragmentProcessor::onComputeProcessorKey(BytesKey* bytesKey) const { + bytesKey->write(static_cast(mode) | (static_cast(child) << 16)); +} + +bool XfermodeFragmentProcessor::onIsEqual(const FragmentProcessor& processor) const { + const auto& that = static_cast(processor); + return mode == that.mode && child == that.child; +} +} // namespace tgfx diff --git a/src/gpu/processors/XfermodeFragmentProcessor.h b/src/gpu/processors/XfermodeFragmentProcessor.h new file mode 100644 index 00000000..a6564384 --- /dev/null +++ b/src/gpu/processors/XfermodeFragmentProcessor.h @@ -0,0 +1,67 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/processors/FragmentProcessor.h" +#include "tgfx/core/BlendMode.h" + +namespace tgfx { +class XfermodeFragmentProcessor : public FragmentProcessor { + public: + /** + * The color input to the returned processor is treated as the src and the passed in processor is + * the dst. + */ + static std::unique_ptr MakeFromDstProcessor( + std::unique_ptr dst, BlendMode mode); + + /** + * The color input to the returned processor is treated as the dst and the passed in processor is + * the src. + */ + static std::unique_ptr MakeFromSrcProcessor( + std::unique_ptr src, BlendMode mode); + + /** + * Takes the input color, which is assumed to be unpremultiplied, passes it as an opaque color + * to both src and dst. The outputs of a src and dst are blended using mode and the original + * input's alpha is applied to the blended color to produce a premul output. + */ + static std::unique_ptr MakeFromTwoProcessors( + std::unique_ptr src, std::unique_ptr dst, + BlendMode mode); + + std::string name() const override; + + void onComputeProcessorKey(BytesKey* bytesKey) const override; + + protected: + DEFINE_PROCESSOR_CLASS_ID + + enum class Child { DstChild, SrcChild, TwoChild }; + + XfermodeFragmentProcessor(std::unique_ptr src, + std::unique_ptr dst, BlendMode mode); + + bool onIsEqual(const FragmentProcessor& processor) const override; + + Child child; + BlendMode mode; +}; +} // namespace tgfx diff --git a/src/images/AsyncSource.cpp b/src/images/AsyncSource.cpp new file mode 100644 index 00000000..3e41ce0d --- /dev/null +++ b/src/images/AsyncSource.cpp @@ -0,0 +1,45 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "AsyncSource.h" +#include "BufferSource.h" + +namespace tgfx { +AsyncSource::AsyncSource(UniqueKey uniqueKey, std::shared_ptr imageGenerator, + bool mipMapped) + : EncodedSource(std::move(uniqueKey), std::move(imageGenerator), mipMapped) { + auto tryHardware = !mipMapped; + if (generator->asyncSupport()) { + imageBuffer = generator->makeBuffer(tryHardware); + } else { + imageTask = ImageGeneratorTask::MakeFrom(generator, tryHardware); + } +} + +std::shared_ptr AsyncSource::onMakeDecoded(Context*) const { + return nullptr; +} + +std::shared_ptr AsyncSource::onMakeTextureProxy(Context* context, uint32_t) const { + auto provider = context->proxyProvider(); + if (imageBuffer) { + return provider->createTextureProxy(imageBuffer, mipMapped); + } + return provider->createTextureProxy(imageTask, mipMapped); +} +} // namespace tgfx diff --git a/src/images/AsyncSource.h b/src/images/AsyncSource.h new file mode 100644 index 00000000..4bf2c927 --- /dev/null +++ b/src/images/AsyncSource.h @@ -0,0 +1,48 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "EncodedSource.h" + +namespace tgfx { +/** + * AsyncSource wraps an ImageGenerator and schedules an asynchronous decoding task immediately. + */ +class AsyncSource : public EncodedSource { + public: + bool isLazyGenerated() const override { + return false; + } + + protected: + std::shared_ptr onMakeDecoded(Context* context) const override; + + std::shared_ptr onMakeTextureProxy(Context* context, + uint32_t surfaceFlags) const override; + + private: + std::shared_ptr imageTask = nullptr; + std::shared_ptr imageBuffer = nullptr; + + explicit AsyncSource(UniqueKey uniqueKey, std::shared_ptr generator, + bool mipMapped = false); + + friend class EncodedSource; +}; +} // namespace tgfx diff --git a/src/images/BufferSource.cpp b/src/images/BufferSource.cpp new file mode 100644 index 00000000..ac7b604f --- /dev/null +++ b/src/images/BufferSource.cpp @@ -0,0 +1,33 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "BufferSource.h" + +namespace tgfx { +BufferSource::BufferSource(UniqueKey uniqueKey, std::shared_ptr buffer, bool mipMapped) + : ImageSource(std::move(uniqueKey)), imageBuffer(std::move(buffer)), mipMapped(mipMapped) { +} + +std::shared_ptr BufferSource::onMakeMipMapped() const { + return std::shared_ptr(new BufferSource(UniqueKey::Next(), imageBuffer, true)); +} + +std::shared_ptr BufferSource::onMakeTextureProxy(Context* context, uint32_t) const { + return context->proxyProvider()->createTextureProxy(imageBuffer, mipMapped); +} +} // namespace tgfx diff --git a/src/images/BufferSource.h b/src/images/BufferSource.h new file mode 100644 index 00000000..db28dfc5 --- /dev/null +++ b/src/images/BufferSource.h @@ -0,0 +1,60 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "ImageSource.h" + +namespace tgfx { +/** + * BufferSource wraps a decoded ImageBuffer that can generate textures on demand. + */ +class BufferSource : public ImageSource { + public: + int width() const override { + return imageBuffer->width(); + } + + int height() const override { + return imageBuffer->height(); + } + + bool hasMipmaps() const override { + return mipMapped; + } + + bool isAlphaOnly() const override { + return imageBuffer->isAlphaOnly(); + } + + protected: + std::shared_ptr onMakeMipMapped() const override; + + std::shared_ptr onMakeTextureProxy(Context* context, + uint32_t surfaceFlags) const override; + + private: + std::shared_ptr imageBuffer = nullptr; + bool mipMapped = false; + + BufferSource(UniqueKey uniqueKey, std::shared_ptr buffer, bool mipMapped = false); + + friend class ImageSource; + friend class AsyncSource; +}; +} // namespace tgfx diff --git a/src/images/EncodedSource.cpp b/src/images/EncodedSource.cpp new file mode 100644 index 00000000..4ff05b75 --- /dev/null +++ b/src/images/EncodedSource.cpp @@ -0,0 +1,45 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "EncodedSource.h" +#include "AsyncSource.h" + +namespace tgfx { +EncodedSource::EncodedSource(UniqueKey uniqueKey, std::shared_ptr generator, + bool mipMapped) + : ImageSource(std::move(uniqueKey)), generator(std::move(generator)), mipMapped(mipMapped) { +} + +std::shared_ptr EncodedSource::onMakeDecoded(Context* context) const { + if (context != nullptr && context->resourceCache()->hasUniqueResource(uniqueKey)) { + return nullptr; + } + auto encodedSource = std::static_pointer_cast(weakThis.lock()); + return std::shared_ptr(new AsyncSource(uniqueKey, generator, mipMapped)); +} + +std::shared_ptr EncodedSource::onMakeMipMapped() const { + return std::shared_ptr(new EncodedSource(UniqueKey::Next(), generator, true)); +} + +std::shared_ptr EncodedSource::onMakeTextureProxy(Context* context, + uint32_t surfaceFlags) const { + bool disableAsyncTask = surfaceFlags & SurfaceOptions::DisableAsyncTaskFlag; + return context->proxyProvider()->createTextureProxy(generator, mipMapped, disableAsyncTask); +} +} // namespace tgfx diff --git a/src/images/EncodedSource.h b/src/images/EncodedSource.h new file mode 100644 index 00000000..24f10e83 --- /dev/null +++ b/src/images/EncodedSource.h @@ -0,0 +1,67 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "ImageSource.h" + +namespace tgfx { +/** + * EncodedSource wraps an ImageGenerator that can generate ImageBuffers on demand. + */ +class EncodedSource : public ImageSource { + public: + int width() const override { + return generator->width(); + } + + int height() const override { + return generator->height(); + } + + bool hasMipmaps() const override { + return mipMapped; + } + + bool isAlphaOnly() const override { + return generator->isAlphaOnly(); + } + + bool isLazyGenerated() const override { + return true; + } + + protected: + std::shared_ptr onMakeDecoded(Context* context) const override; + + std::shared_ptr onMakeMipMapped() const override; + + std::shared_ptr onMakeTextureProxy(Context* context, + uint32_t surfaceFlags) const override; + + protected: + std::shared_ptr generator = nullptr; + bool mipMapped = false; + + EncodedSource(UniqueKey uniqueKey, std::shared_ptr generator, + bool mipMapped = false); + + friend class AsyncSource; + friend class ImageSource; +}; +} // namespace tgfx diff --git a/src/images/ImageGeneratorTask.cpp b/src/images/ImageGeneratorTask.cpp new file mode 100644 index 00000000..e9262b1c --- /dev/null +++ b/src/images/ImageGeneratorTask.cpp @@ -0,0 +1,52 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "ImageGeneratorTask.h" + +namespace tgfx { +class ImageResult { + public: + std::shared_ptr imageBuffer = nullptr; +}; + +std::shared_ptr ImageGeneratorTask::MakeFrom( + std::shared_ptr generator, bool tryHardware) { + if (generator == nullptr) { + return nullptr; + } + return std::shared_ptr( + new ImageGeneratorTask(std::move(generator), tryHardware)); +} + +ImageGeneratorTask::ImageGeneratorTask(std::shared_ptr generator, bool tryHardware) + : imageGenerator(std::move(generator)) { + imageResult = std::make_shared(); + task = Task::Run([result = imageResult, generator = imageGenerator, tryHardware]() { + result->imageBuffer = generator->makeBuffer(tryHardware); + }); +} + +ImageGeneratorTask::~ImageGeneratorTask() { + task->cancel(); +} + +std::shared_ptr ImageGeneratorTask::getBuffer() const { + task->wait(); + return imageResult->imageBuffer; +} +} // namespace tgfx diff --git a/src/images/ImageGeneratorTask.h b/src/images/ImageGeneratorTask.h new file mode 100644 index 00000000..e09b2d75 --- /dev/null +++ b/src/images/ImageGeneratorTask.h @@ -0,0 +1,55 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/ImageGenerator.h" +#include "tgfx/utils/Task.h" + +namespace tgfx { +class ImageResult; + +/** + * ImageGeneratorTask wraps an ImageGenerator and schedules an asynchronous decoding task + * immediately. + */ +class ImageGeneratorTask { + public: + static std::shared_ptr MakeFrom(std::shared_ptr generator, + bool tryHardware = true); + + ~ImageGeneratorTask(); + + int imageWidth() const { + return imageGenerator->width(); + } + + int imageHeight() const { + return imageGenerator->height(); + } + + std::shared_ptr getBuffer() const; + + private: + std::shared_ptr task = nullptr; + std::shared_ptr imageResult = nullptr; + std::shared_ptr imageGenerator = nullptr; + + ImageGeneratorTask(std::shared_ptr generator, bool tryHardware); +}; +} // namespace tgfx diff --git a/src/images/ImageSource.cpp b/src/images/ImageSource.cpp new file mode 100644 index 00000000..da5e78c3 --- /dev/null +++ b/src/images/ImageSource.cpp @@ -0,0 +1,126 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "ImageSource.h" +#include "BufferSource.h" +#include "EncodedSource.h" +#include "TextureSource.h" +#include "gpu/Texture.h" + +namespace tgfx { + +std::shared_ptr ImageSource::MakeFrom(UniqueKey uniqueKey, + std::shared_ptr generator) { + if (generator == nullptr) { + return nullptr; + } + auto source = + std::shared_ptr(new EncodedSource(std::move(uniqueKey), std::move(generator))); + source->weakThis = source; + return source; +} + +std::shared_ptr ImageSource::MakeFrom(UniqueKey uniqueKey, + std::shared_ptr buffer) { + if (buffer == nullptr) { + return nullptr; + } + auto source = + std::shared_ptr(new BufferSource(std::move(uniqueKey), std::move(buffer))); + source->weakThis = source; + return source; +} + +std::shared_ptr ImageSource::MakeFrom(UniqueKey uniqueKey, + std::shared_ptr texture) { + if (texture == nullptr) { + return nullptr; + } + texture->assignUniqueKey(uniqueKey); + auto source = + std::shared_ptr(new TextureSource(std::move(uniqueKey), std::move(texture))); + source->weakThis = source; + return source; +} + +ImageSource::ImageSource(UniqueKey uniqueKey) : uniqueKey(std::move(uniqueKey)) { +} + +std::shared_ptr ImageSource::makeTextureSource(Context* context) const { + auto resourceCache = context->resourceCache(); + auto texture = std::static_pointer_cast(resourceCache->findUniqueResource(uniqueKey)); + if (texture != nullptr) { + return MakeFrom(uniqueKey, texture); + } + auto proxy = lockTextureProxy(context, SurfaceOptions::DisableAsyncTaskFlag); + if (proxy == nullptr) { + return nullptr; + } + if (!proxy->isInstantiated()) { + proxy->instantiate(); + } + return MakeFrom(uniqueKey, proxy->getTexture()); +} + +std::shared_ptr ImageSource::makeDecoded(Context* context) const { + if (!isLazyGenerated()) { + return std::static_pointer_cast(weakThis.lock()); + } + auto source = onMakeDecoded(context); + if (source == nullptr) { + return std::static_pointer_cast(weakThis.lock()); + } + source->weakThis = source; + return source; +} + +std::shared_ptr ImageSource::onMakeDecoded(Context*) const { + return nullptr; +} + +std::shared_ptr ImageSource::makeMipMapped() const { + if (hasMipmaps()) { + return std::static_pointer_cast(weakThis.lock()); + } + auto source = onMakeMipMapped(); + if (source == nullptr) { + return std::static_pointer_cast(weakThis.lock()); + } + source->weakThis = source; + return source; +} + +std::shared_ptr ImageSource::lockTextureProxy(Context* context, + uint32_t surfaceFlags) const { + if (context == nullptr) { + return nullptr; + } + auto provider = context->proxyProvider(); + auto proxy = provider->findProxyByUniqueKey(uniqueKey); + if (proxy != nullptr) { + return proxy; + } + proxy = onMakeTextureProxy(context, surfaceFlags); + if (proxy != nullptr) { + auto updateTextureKey = + !(surfaceFlags & SurfaceOptions::DisableCacheFlag) && !isTextureBacked(); + proxy->assignUniqueKey(uniqueKey, updateTextureKey); + } + return proxy; +} +} // namespace tgfx diff --git a/src/images/ImageSource.h b/src/images/ImageSource.h new file mode 100644 index 00000000..f22959d5 --- /dev/null +++ b/src/images/ImageSource.h @@ -0,0 +1,143 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/ProxyProvider.h" +#include "tgfx/core/ImageBuffer.h" +#include "tgfx/core/ImageGenerator.h" +#include "tgfx/gpu/Context.h" +#include "tgfx/gpu/SurfaceOptions.h" + +namespace tgfx { +class UniqueKey; + +/** + * ImageSource generates texture proxies for images. + */ +class ImageSource { + public: + /** + * Creates ImageSource from an image generator. ImageSource is returned if the generator is not + * nullptr. The image generator may wrap codec data or custom data. + */ + static std::shared_ptr MakeFrom(UniqueKey uniqueKey, + std::shared_ptr generator); + + /** + * Creates ImageSource from ImageBuffer, ImageSource is returned if the imageBuffer is not nullptr + * and its dimensions are greater than zero. + */ + static std::shared_ptr MakeFrom(UniqueKey uniqueKey, + std::shared_ptr buffer); + + /** + * Creates ImageSource from Texture, ImageSource is returned if texture is not nullptr. Note that + * this method is not thread safe, must be called while the asscociated context is locked. + */ + static std::shared_ptr MakeFrom(UniqueKey uniqueKey, + std::shared_ptr texture); + + virtual ~ImageSource() = default; + + /** + * Returns the width of the target image. + */ + virtual int width() const = 0; + + /** + * Returns the height of the target image. + */ + virtual int height() const = 0; + + /** + * Returns true if the ImageSource has mipmap levels. The value was passed in when creating the + * ImageSource. It may be ignored if mipmaps are not supported by the GPU or the ImageSource. + */ + virtual bool hasMipmaps() const = 0; + + /** + * Returns true if pixels represent transparency only. If true, each pixel is packed in 8 bits as + * defined by ColorType::ALPHA_8. + */ + virtual bool isAlphaOnly() const = 0; + + /** + * Returns true if ImageSource is backed by an image generator or other services that create its + * pixels on-demand. + */ + virtual bool isLazyGenerated() const { + return false; + } + + /** + * Returns true if the ImageSource was created from a GPU texture. + */ + virtual bool isTextureBacked() const { + return false; + } + + /** + * Retrieves the backend texture. Returns an invalid BackendTexture if the ImageSource is not + * backed by a Texture. + */ + virtual BackendTexture getBackendTexture() const { + return {}; + } + + /** + * Returns an ImageSource backed by GPU texture associated with context. Returns original + * ImageSource if context is compatible with backing GPU texture. Returns nullptr if context is + * nullptr, or if ImageSource was created with another context. + */ + virtual std::shared_ptr makeTextureSource(Context* context) const; + + /** + * Returns a decoded ImageSource from the lazy ImageSource. The returned ImageSource shares the + * same texture cache with the original ImageSource and immediately schedules an asynchronous + * decoding task, which will not block the calling thread. Returns nullptr if the ImageSource is + * not lazy or has a corresponding texture cache in the specified context. + */ + std::shared_ptr makeDecoded(Context* context = nullptr) const; + + /** + * Returns an Image with mipmaps enabled. Returns the original Image if the Image has mipmaps + * enabled already or fails to enable mipmaps. + */ + std::shared_ptr makeMipMapped() const; + + /** + * Returns a TextureProxy if there is a corresponding cache in the context. Otherwise, immediately + * creates one. + */ + std::shared_ptr lockTextureProxy(Context* context, uint32_t surfaceFlags = 0) const; + + protected: + UniqueKey uniqueKey = {}; + std::weak_ptr weakThis; + + explicit ImageSource(UniqueKey uniqueKey); + + virtual std::shared_ptr onMakeDecoded(Context* context) const; + + virtual std::shared_ptr onMakeMipMapped() const = 0; + + virtual std::shared_ptr onMakeTextureProxy(Context* context, + uint32_t surfaceFlags) const = 0; +}; +} // namespace tgfx diff --git a/src/images/RGBAAAImage.cpp b/src/images/RGBAAAImage.cpp new file mode 100644 index 00000000..c015f0b1 --- /dev/null +++ b/src/images/RGBAAAImage.cpp @@ -0,0 +1,53 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "RGBAAAImage.h" +#include "ImageSource.h" +#include "gpu/processors/TextureEffect.h" + +namespace tgfx { +RGBAAAImage::RGBAAAImage(std::shared_ptr source, int displayWidth, int displayHeight, + int alphaStartX, int alphaStartY) + : SubsetImage(std::move(source), Rect::MakeWH(displayWidth, displayHeight)), + alphaStart(Point::Make(alphaStartX, alphaStartY)) { +} + +RGBAAAImage::RGBAAAImage(std::shared_ptr source, const Rect& bounds, + EncodedOrigin origin, const Point& alphaStart) + : SubsetImage(std::move(source), bounds, origin), alphaStart(alphaStart) { +} + +std::shared_ptr RGBAAAImage::onCloneWith(const Rect& newBounds, + EncodedOrigin newOrigin) const { + return std::shared_ptr(new RGBAAAImage(source, newBounds, newOrigin, alphaStart)); +} + +std::shared_ptr RGBAAAImage::onCloneWith(std::shared_ptr newSource) const { + return std::shared_ptr(new RGBAAAImage(std::move(newSource), bounds, origin, alphaStart)); +} + +std::unique_ptr RGBAAAImage::asFragmentProcessor(Context* context, + uint32_t surfaceFlags, TileMode, + TileMode, + const SamplingOptions& sampling, + const Matrix* localMatrix) { + auto matrix = getTotalMatrix(localMatrix); + return TextureEffect::MakeRGBAAA(source->lockTextureProxy(context, surfaceFlags), sampling, + alphaStart, &matrix); +} +} // namespace tgfx diff --git a/src/images/RGBAAAImage.h b/src/images/RGBAAAImage.h new file mode 100644 index 00000000..84b477b3 --- /dev/null +++ b/src/images/RGBAAAImage.h @@ -0,0 +1,49 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "SubsetImage.h" + +namespace tgfx { +class RGBAAAImage : public SubsetImage { + public: + RGBAAAImage(std::shared_ptr source, int displayWidth, int displayHeight, + int alphaStartX, int alphaStartY); + + bool isRGBAAA() const override { + return true; + } + + protected: + std::shared_ptr onCloneWith(const Rect& newBounds, + EncodedOrigin newOrigin) const override; + + std::shared_ptr onCloneWith(std::shared_ptr newSource) const override; + + std::unique_ptr asFragmentProcessor( + Context* context, uint32_t surfaceFlags, TileMode tileModeX, TileMode tileModeY, + const SamplingOptions& sampling, const Matrix* localMatrix = nullptr) override; + + private: + Point alphaStart = Point::Zero(); + + RGBAAAImage(std::shared_ptr source, const Rect& bounds, EncodedOrigin origin, + const Point& alphaStart); +}; +} // namespace tgfx diff --git a/src/images/RasterBuffer.cpp b/src/images/RasterBuffer.cpp new file mode 100644 index 00000000..f4647062 --- /dev/null +++ b/src/images/RasterBuffer.cpp @@ -0,0 +1,59 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "RasterBuffer.h" +#include "gpu/Texture.h" + +namespace tgfx { +std::shared_ptr RasterBuffer::MakeFrom(const ImageInfo& info, + std::shared_ptr pixels) { + if (info.isEmpty() || pixels == nullptr || info.byteSize() > pixels->size() || + info.alphaType() == AlphaType::Unpremultiplied) { + return nullptr; + } + switch (info.colorType()) { + case ColorType::ALPHA_8: + case ColorType::RGBA_8888: + case ColorType::BGRA_8888: + return std::shared_ptr(new RasterBuffer(info, std::move(pixels))); + default: + return nullptr; + } +} + +RasterBuffer::RasterBuffer(const ImageInfo& info, std::shared_ptr pixels) + : info(info), pixels(std::move(pixels)) { +} + +std::shared_ptr RasterBuffer::onMakeTexture(Context* context, bool mipMapped) const { + switch (info.colorType()) { + case ColorType::ALPHA_8: + return Texture::MakeAlpha(context, info.width(), info.height(), pixels->data(), + info.rowBytes(), ImageOrigin::TopLeft, mipMapped); + case ColorType::BGRA_8888: + return Texture::MakeFormat(context, info.width(), info.height(), pixels->data(), + info.rowBytes(), PixelFormat::BGRA_8888, ImageOrigin::TopLeft, + mipMapped); + case ColorType::RGBA_8888: + return Texture::MakeRGBA(context, info.width(), info.height(), pixels->data(), + info.rowBytes(), ImageOrigin::TopLeft, mipMapped); + default: + return nullptr; + } +} +} // namespace tgfx diff --git a/src/images/RasterBuffer.h b/src/images/RasterBuffer.h new file mode 100644 index 00000000..3be4ece2 --- /dev/null +++ b/src/images/RasterBuffer.h @@ -0,0 +1,51 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/Data.h" +#include "tgfx/core/ImageBuffer.h" +#include "tgfx/core/ImageInfo.h" + +namespace tgfx { +class RasterBuffer : public ImageBuffer { + public: + static std::shared_ptr MakeFrom(const ImageInfo& info, std::shared_ptr pixels); + + int width() const override { + return info.width(); + } + + int height() const override { + return info.height(); + } + + bool isAlphaOnly() const override { + return info.isAlphaOnly(); + } + + protected: + std::shared_ptr onMakeTexture(Context* context, bool mipMapped) const override; + + private: + ImageInfo info = {}; + std::shared_ptr pixels = nullptr; + + RasterBuffer(const ImageInfo& info, std::shared_ptr pixels); +}; +} // namespace tgfx diff --git a/src/images/RasterGenerator.cpp b/src/images/RasterGenerator.cpp new file mode 100644 index 00000000..07ce695d --- /dev/null +++ b/src/images/RasterGenerator.cpp @@ -0,0 +1,46 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "RasterGenerator.h" +#include "tgfx/core/Bitmap.h" + +namespace tgfx { +std::shared_ptr RasterGenerator::MakeFrom(const ImageInfo& info, + std::shared_ptr pixels) { + if (info.isEmpty() || pixels == nullptr || info.byteSize() > pixels->size()) { + return nullptr; + } + return std::shared_ptr(new RasterGenerator(info, std::move(pixels))); +} + +RasterGenerator::RasterGenerator(const ImageInfo& info, std::shared_ptr pixels) + : ImageGenerator(info.width(), info.height()), info(info), pixels(std::move(pixels)) { +} + +std::shared_ptr RasterGenerator::onMakeBuffer(bool tryHardware) const { + Bitmap bitmap(width(), height(), isAlphaOnly(), tryHardware); + if (bitmap.isEmpty()) { + return nullptr; + } + auto success = bitmap.writePixels(info, pixels->data()); + if (!success) { + return nullptr; + } + return bitmap.makeBuffer(); +} +} // namespace tgfx diff --git a/src/images/RasterGenerator.h b/src/images/RasterGenerator.h new file mode 100644 index 00000000..99751ad0 --- /dev/null +++ b/src/images/RasterGenerator.h @@ -0,0 +1,44 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/Data.h" +#include "tgfx/core/ImageGenerator.h" +#include "tgfx/core/ImageInfo.h" + +namespace tgfx { +class RasterGenerator : public ImageGenerator { + public: + static std::shared_ptr MakeFrom(const ImageInfo& info, + std::shared_ptr pixels); + + bool isAlphaOnly() const override { + return info.isAlphaOnly(); + } + + protected: + std::shared_ptr onMakeBuffer(bool tryHardware) const override; + + private: + ImageInfo info = {}; + std::shared_ptr pixels = nullptr; + + RasterGenerator(const ImageInfo& info, std::shared_ptr pixels); +}; +} // namespace tgfx diff --git a/src/images/RasterYUVBuffer.cpp b/src/images/RasterYUVBuffer.cpp new file mode 100644 index 00000000..3f721abe --- /dev/null +++ b/src/images/RasterYUVBuffer.cpp @@ -0,0 +1,33 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "RasterYUVBuffer.h" + +namespace tgfx { +RasterYUVBuffer::RasterYUVBuffer(std::shared_ptr data, YUVPixelFormat format, + YUVColorSpace colorSpace) + : data(std::move(data)), colorSpace(colorSpace), format(format) { +} + +std::shared_ptr RasterYUVBuffer::onMakeTexture(tgfx::Context* context, bool) const { + if (format == YUVPixelFormat::NV12) { + return YUVTexture::MakeNV12(context, data.get(), colorSpace); + } + return YUVTexture::MakeI420(context, data.get(), colorSpace); +} +} // namespace tgfx diff --git a/src/images/RasterYUVBuffer.h b/src/images/RasterYUVBuffer.h new file mode 100644 index 00000000..93e43a9a --- /dev/null +++ b/src/images/RasterYUVBuffer.h @@ -0,0 +1,56 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/YUVTexture.h" +#include "tgfx/core/Data.h" +#include "tgfx/core/ImageBuffer.h" +#include "tgfx/core/YUVColorSpace.h" +#include "tgfx/core/YUVData.h" + +namespace tgfx { +/** + * YUVBuffer represents a raster pixel array described in the YUV format. + */ +class RasterYUVBuffer : public ImageBuffer { + public: + RasterYUVBuffer(std::shared_ptr data, YUVPixelFormat format, YUVColorSpace colorSpace); + + int width() const override { + return data->width(); + } + + int height() const override { + return data->height(); + } + + bool isAlphaOnly() const final { + return false; + } + + protected: + std::shared_ptr onMakeTexture(tgfx::Context* context, + bool mipMapped) const override; + + private: + std::shared_ptr data = nullptr; + YUVColorSpace colorSpace = YUVColorSpace::BT601_LIMITED; + YUVPixelFormat format = YUVPixelFormat::I420; +}; +} // namespace tgfx diff --git a/src/images/SubsetImage.cpp b/src/images/SubsetImage.cpp new file mode 100644 index 00000000..280a6f1a --- /dev/null +++ b/src/images/SubsetImage.cpp @@ -0,0 +1,142 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "SubsetImage.h" +#include "ImageSource.h" + +namespace tgfx { +static const auto TopLeftMatrix = Matrix::I(); +static const auto TopRightMatrix = Matrix::MakeAll(-1, 0, 1, 0, 1, 0); +static const auto BottomRightMatrix = Matrix::MakeAll(-1, 0, 1, 0, -1, 1); +static const auto BottomLeftMatrix = Matrix::MakeAll(1, 0, 0, 0, -1, 1); +static const auto LeftTopMatrix = Matrix::MakeAll(0, 1, 0, 1, 0, 0); +static const auto RightTopMatrix = Matrix::MakeAll(0, -1, 1, 1, 0, 0); +static const auto RightBottomMatrix = Matrix::MakeAll(0, -1, 1, -1, 0, 1); +static const auto LeftBottomMatrix = Matrix::MakeAll(0, 1, 0, -1, 0, 1); + +static Matrix OriginToMatrix(EncodedOrigin origin) { + switch (origin) { + case EncodedOrigin::TopRight: + return TopRightMatrix; + case EncodedOrigin::BottomRight: + return BottomRightMatrix; + case EncodedOrigin::BottomLeft: + return BottomLeftMatrix; + case EncodedOrigin::LeftTop: + return LeftTopMatrix; + case EncodedOrigin::RightTop: + return RightTopMatrix; + case EncodedOrigin::RightBottom: + return RightBottomMatrix; + case EncodedOrigin::LeftBottom: + return LeftBottomMatrix; + default: + break; + } + return TopLeftMatrix; +} + +static EncodedOrigin ConcatOrigin(EncodedOrigin oldOrigin, EncodedOrigin newOrigin) { + auto oldMatrix = OriginToMatrix(oldOrigin); + auto newMatrix = OriginToMatrix(newOrigin); + oldMatrix.postConcat(newMatrix); + if (oldMatrix == TopRightMatrix) { + return EncodedOrigin::TopRight; + } + if (oldMatrix == BottomRightMatrix) { + return EncodedOrigin::BottomRight; + } + if (oldMatrix == BottomLeftMatrix) { + return EncodedOrigin::BottomLeft; + } + if (oldMatrix == LeftTopMatrix) { + return EncodedOrigin::LeftTop; + } + if (oldMatrix == RightTopMatrix) { + return EncodedOrigin::RightTop; + } + if (oldMatrix == RightBottomMatrix) { + return EncodedOrigin::RightBottom; + } + if (oldMatrix == LeftBottomMatrix) { + return EncodedOrigin::LeftBottom; + } + return EncodedOrigin::TopLeft; +} + +SubsetImage::SubsetImage(std::shared_ptr source, const Rect& bounds) + : Image(std::move(source)), bounds(bounds) { +} + +SubsetImage::SubsetImage(std::shared_ptr imageSource, EncodedOrigin origin) + : Image(std::move(imageSource)), origin(origin) { + bounds = Rect::MakeWH(source->width(), source->height()); + auto matrix = EncodedOriginToMatrix(origin, source->width(), source->height()); + matrix.mapRect(&bounds); +} + +SubsetImage::SubsetImage(std::shared_ptr source, const Rect& bounds, + EncodedOrigin origin) + : Image(std::move(source)), bounds(bounds), origin(origin) { +} + +std::shared_ptr SubsetImage::onCloneWith(const Rect& newBounds, + EncodedOrigin newOrigin) const { + return std::shared_ptr(new SubsetImage(source, newBounds, newOrigin)); +} + +std::shared_ptr SubsetImage::onCloneWith(std::shared_ptr newSource) const { + return std::shared_ptr(new SubsetImage(std::move(newSource), bounds, origin)); +} + +std::shared_ptr SubsetImage::onMakeSubset(const Rect& subset) const { + auto newBounds = subset; + newBounds.offset(bounds.x(), bounds.y()); + return onCloneWith(newBounds, origin); +} + +std::shared_ptr SubsetImage::onApplyOrigin(EncodedOrigin encodedOrigin) const { + auto width = source->width(); + auto height = source->height(); + if (origin == EncodedOrigin::LeftTop || origin == EncodedOrigin::RightTop || + origin == EncodedOrigin::RightBottom || origin == EncodedOrigin::LeftBottom) { + std::swap(width, height); + } + auto matrix = EncodedOriginToMatrix(encodedOrigin, width, height); + auto newBounds = matrix.mapRect(bounds); + auto newOrigin = ConcatOrigin(origin, encodedOrigin); + return onCloneWith(newBounds, newOrigin); +} + +Matrix SubsetImage::getTotalMatrix(const Matrix* localMatrix) const { + auto matrix = EncodedOriginToMatrix(origin, source->width(), source->height()); + matrix.postTranslate(-bounds.x(), -bounds.y()); + matrix.invert(&matrix); + if (localMatrix != nullptr) { + matrix.preConcat(*localMatrix); + } + return matrix; +} + +std::unique_ptr SubsetImage::asFragmentProcessor( + Context* context, uint32_t surfaceFlags, TileMode tileModeX, TileMode tileModeY, + const SamplingOptions& sampling, const Matrix* localMatrix) { + auto matrix = getTotalMatrix(localMatrix); + return Image::asFragmentProcessor(context, surfaceFlags, tileModeX, tileModeY, sampling, &matrix); +} +} // namespace tgfx diff --git a/src/images/SubsetImage.h b/src/images/SubsetImage.h new file mode 100644 index 00000000..598684fc --- /dev/null +++ b/src/images/SubsetImage.h @@ -0,0 +1,64 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/processors/FragmentProcessor.h" +#include "tgfx/core/Image.h" + +namespace tgfx { +class SubsetImage : public Image { + public: + SubsetImage(std::shared_ptr source, const Rect& bounds); + + SubsetImage(std::shared_ptr source, EncodedOrigin origin); + + int width() const override { + return bounds.width(); + } + + int height() const override { + return bounds.height(); + } + + protected: + Rect bounds = Rect::MakeEmpty(); + EncodedOrigin origin = EncodedOrigin::TopLeft; + + SubsetImage(std::shared_ptr source, const Rect& bounds, EncodedOrigin origin); + + virtual std::shared_ptr onCloneWith(const Rect& newBounds, + EncodedOrigin newOrigin) const; + + std::shared_ptr onCloneWith(std::shared_ptr newSource) const override; + + std::shared_ptr onMakeSubset(const Rect& subset) const override; + + std::shared_ptr onMakeRGBAAA(int, int, int, int) const override { + return nullptr; + } + + std::shared_ptr onApplyOrigin(EncodedOrigin encodedOrigin) const override; + + std::unique_ptr asFragmentProcessor( + Context* context, uint32_t surfaceFlags, TileMode tileModeX, TileMode tileModeY, + const SamplingOptions& sampling, const Matrix* localMatrix = nullptr) override; + + Matrix getTotalMatrix(const Matrix* localMatrix) const; +}; +} // namespace tgfx diff --git a/src/images/TextureSource.cpp b/src/images/TextureSource.cpp new file mode 100644 index 00000000..99cbcb0c --- /dev/null +++ b/src/images/TextureSource.cpp @@ -0,0 +1,42 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "TextureSource.h" + +namespace tgfx { +BackendTexture TextureSource::getBackendTexture() const { + if (!texture->hasUniqueKey(uniqueKey)) { + return {}; + } + return texture->getBackendTexture(); +} + +std::shared_ptr TextureSource::makeTextureSource(Context* context) const { + if (texture->getContext() == context && texture->hasUniqueKey(uniqueKey)) { + return std::static_pointer_cast(weakThis.lock()); + } + return nullptr; +} + +std::shared_ptr TextureSource::onMakeTextureProxy(Context* context, uint32_t) const { + if (!texture->hasUniqueKey(uniqueKey)) { + return {}; + } + return context->proxyProvider()->wrapTexture(texture); +} +} // namespace tgfx \ No newline at end of file diff --git a/src/images/TextureSource.h b/src/images/TextureSource.h new file mode 100644 index 00000000..0a46ad74 --- /dev/null +++ b/src/images/TextureSource.h @@ -0,0 +1,71 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "ImageSource.h" +#include "gpu/TextureSampler.h" + +namespace tgfx { +/** + * TextureSource wraps an existing texture. + */ +class TextureSource : public ImageSource { + public: + int width() const override { + return texture->width(); + } + + int height() const override { + return texture->height(); + } + + bool hasMipmaps() const override { + return texture->getSampler()->hasMipmaps(); + } + + bool isAlphaOnly() const override { + return texture->getSampler()->format == PixelFormat::ALPHA_8; + } + + bool isTextureBacked() const override { + return true; + } + + BackendTexture getBackendTexture() const override; + + std::shared_ptr makeTextureSource(Context* context) const override; + + protected: + std::shared_ptr onMakeMipMapped() const override { + return nullptr; + } + + std::shared_ptr onMakeTextureProxy(Context* context, + uint32_t surfaceFlags) const override; + + private: + std::shared_ptr texture = nullptr; + + explicit TextureSource(UniqueKey uniqueKey, std::shared_ptr texture) + : ImageSource(std::move(uniqueKey)), texture(std::move(texture)) { + } + + friend class ImageSource; +}; +} // namespace tgfx diff --git a/src/opengl/GLAssembledGLESInterface.cpp b/src/opengl/GLAssembledGLESInterface.cpp new file mode 100644 index 00000000..d925cc10 --- /dev/null +++ b/src/opengl/GLAssembledGLESInterface.cpp @@ -0,0 +1,109 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLAssembledGLESInterface.h" +#include "GLInterface.h" + +namespace tgfx { +static void InitFramebufferTexture2DMultisample(const GLProcGetter* getter, GLFunctions* functions, + const GLInfo& info) { + if (info.hasExtension("GL_EXT_multisampled_render_to_texture")) { + functions->framebufferTexture2DMultisample = + reinterpret_cast( + getter->getProcAddress("glFramebufferTexture2DMultisampleEXT")); + } else if (info.hasExtension("GL_IMG_multisampled_render_to_texture")) { + functions->framebufferTexture2DMultisample = + reinterpret_cast( + getter->getProcAddress("glFramebufferTexture2DMultisampleIMG")); + } +} + +static void InitRenderbufferStorageMultisample(const GLProcGetter* getter, GLFunctions* functions, + const GLInfo& info) { + if (info.version >= GL_VER(3, 0)) { + functions->renderbufferStorageMultisample = reinterpret_cast( + getter->getProcAddress("glRenderbufferStorageMultisample")); + } else if (info.hasExtension("GL_CHROMIUM_framebuffer_multisample")) { + functions->renderbufferStorageMultisample = reinterpret_cast( + getter->getProcAddress("glRenderbufferStorageMultisampleCHROMIUM")); + } else if (info.hasExtension("GL_ANGLE_framebuffer_multisample")) { + functions->renderbufferStorageMultisample = reinterpret_cast( + getter->getProcAddress("glRenderbufferStorageMultisampleANGLE")); + } + if (info.hasExtension("GL_EXT_multisampled_render_to_texture")) { + functions->renderbufferStorageMultisampleEXT = + reinterpret_cast( + getter->getProcAddress("glRenderbufferStorageMultisampleEXT")); + } + if (info.hasExtension("GL_IMG_multisampled_render_to_texture")) { + functions->renderbufferStorageMultisampleEXT = + reinterpret_cast( + getter->getProcAddress("glRenderbufferStorageMultisampleIMG")); + } + if (info.hasExtension("GL_APPLE_framebuffer_multisample")) { + functions->renderbufferStorageMultisampleAPPLE = + reinterpret_cast( + getter->getProcAddress("glRenderbufferStorageMultisampleAPPLE")); + } +} + +static void InitBlitFramebuffer(const GLProcGetter* getter, GLFunctions* functions, + const GLInfo& info) { + if (info.version >= GL_VER(3, 0)) { + functions->blitFramebuffer = + reinterpret_cast(getter->getProcAddress("glBlitFramebuffer")); + } else if (info.hasExtension("GL_CHROMIUM_framebuffer_multisample")) { + functions->blitFramebuffer = + reinterpret_cast(getter->getProcAddress("glBlitFramebufferCHROMIUM")); + } else if (info.hasExtension("GL_ANGLE_framebuffer_blit")) { + functions->blitFramebuffer = + reinterpret_cast(getter->getProcAddress("glBlitFramebufferANGLE")); + } +} + +static void InitVertexArray(const GLProcGetter* getter, GLFunctions* functions, + const GLInfo& info) { + if (info.version >= GL_VER(3, 0)) { + functions->bindVertexArray = + reinterpret_cast(getter->getProcAddress("glBindVertexArray")); + functions->deleteVertexArrays = + reinterpret_cast(getter->getProcAddress("glDeleteVertexArrays")); + functions->genVertexArrays = + reinterpret_cast(getter->getProcAddress("glGenVertexArrays")); + } else if (info.hasExtension("GL_OES_vertex_array_object")) { + functions->bindVertexArray = + reinterpret_cast(getter->getProcAddress("glBindVertexArrayOES")); + functions->deleteVertexArrays = + reinterpret_cast(getter->getProcAddress("glDeleteVertexArraysOES")); + functions->genVertexArrays = + reinterpret_cast(getter->getProcAddress("glGenVertexArraysOES")); + } +} + +void GLAssembleGLESInterface(const GLProcGetter* getter, GLFunctions* functions, + const GLInfo& info) { + if (info.hasExtension("GL_NV_texture_barrier")) { + functions->textureBarrier = + reinterpret_cast(getter->getProcAddress("glTextureBarrierNV")); + } + InitBlitFramebuffer(getter, functions, info); + InitRenderbufferStorageMultisample(getter, functions, info); + InitFramebufferTexture2DMultisample(getter, functions, info); + InitVertexArray(getter, functions, info); +} +} // namespace tgfx diff --git a/src/opengl/GLAssembledGLESInterface.h b/src/opengl/GLAssembledGLESInterface.h new file mode 100644 index 00000000..77f25eb0 --- /dev/null +++ b/src/opengl/GLAssembledGLESInterface.h @@ -0,0 +1,28 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +namespace tgfx { +class GLProcGetter; +class GLFunctions; +class GLInfo; + +void GLAssembleGLESInterface(const GLProcGetter* getter, GLFunctions* functions, + const GLInfo& info); +} // namespace tgfx diff --git a/src/opengl/GLAssembledGLInterface.cpp b/src/opengl/GLAssembledGLInterface.cpp new file mode 100644 index 00000000..38f33928 --- /dev/null +++ b/src/opengl/GLAssembledGLInterface.cpp @@ -0,0 +1,81 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLAssembledGLInterface.h" +#include "GLInterface.h" + +namespace tgfx { +static void InitTextureBarrier(const GLProcGetter* getter, GLFunctions* functions, + const GLInfo& info) { + if (info.version >= GL_VER(4, 5) || info.hasExtension("GL_ARB_texture_barrier")) { + functions->textureBarrier = + reinterpret_cast(getter->getProcAddress("glTextureBarrier")); + } else if (info.hasExtension("GL_NV_texture_barrier")) { + functions->textureBarrier = + reinterpret_cast(getter->getProcAddress("glTextureBarrierNV")); + } +} + +static void InitBlitFrameBuffer(const GLProcGetter* getter, GLFunctions* functions, + const GLInfo& info) { + if (info.version >= GL_VER(3, 0) || info.hasExtension("GL_ARB_framebuffer_object")) { + functions->blitFramebuffer = + reinterpret_cast(getter->getProcAddress("glBlitFramebuffer")); + } else if (info.hasExtension("GL_EXT_framebuffer_blit")) { + functions->blitFramebuffer = + reinterpret_cast(getter->getProcAddress("glBlitFramebufferEXT")); + } +} + +static void InitVertexArray(const GLProcGetter* getter, GLFunctions* functions, + const GLInfo& info) { + if (info.version >= GL_VER(3, 0) || info.hasExtension("GL_ARB_vertex_array_object")) { + functions->bindVertexArray = + reinterpret_cast(getter->getProcAddress("glBindVertexArray")); + functions->deleteVertexArrays = + reinterpret_cast(getter->getProcAddress("glDeleteVertexArrays")); + functions->genVertexArrays = + reinterpret_cast(getter->getProcAddress("glGenVertexArrays")); + } else if (info.hasExtension("GL_APPLE_vertex_array_object")) { + functions->bindVertexArray = + reinterpret_cast(getter->getProcAddress("glBindVertexArrayAPPLE")); + functions->deleteVertexArrays = reinterpret_cast( + getter->getProcAddress("glDeleteVertexArraysAPPLE")); + functions->genVertexArrays = + reinterpret_cast(getter->getProcAddress("glGenVertexArraysAPPLE")); + } +} + +static void InitRenderbufferStorageMultisample(const GLProcGetter* getter, GLFunctions* functions, + const GLInfo& info) { + if (info.version >= GL_VER(3, 0) || info.hasExtension("GL_ARB_framebuffer_object")) { + functions->renderbufferStorageMultisample = reinterpret_cast( + getter->getProcAddress("glRenderbufferStorageMultisample")); + } else if (info.hasExtension("GL_EXT_framebuffer_multisample")) { + functions->renderbufferStorageMultisample = reinterpret_cast( + getter->getProcAddress("glRenderbufferStorageMultisampleEXT")); + } +} + +void GLAssembleGLInterface(const GLProcGetter* getter, GLFunctions* functions, const GLInfo& info) { + InitTextureBarrier(getter, functions, info); + InitBlitFrameBuffer(getter, functions, info); + InitRenderbufferStorageMultisample(getter, functions, info); + InitVertexArray(getter, functions, info); +} +} // namespace tgfx diff --git a/src/opengl/GLAssembledGLInterface.h b/src/opengl/GLAssembledGLInterface.h new file mode 100644 index 00000000..0d2a8e7c --- /dev/null +++ b/src/opengl/GLAssembledGLInterface.h @@ -0,0 +1,27 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +namespace tgfx { +class GLProcGetter; +class GLFunctions; +class GLInfo; + +void GLAssembleGLInterface(const GLProcGetter* getter, GLFunctions* functions, const GLInfo& info); +} // namespace tgfx diff --git a/src/opengl/GLAssembledWebGLInterface.cpp b/src/opengl/GLAssembledWebGLInterface.cpp new file mode 100644 index 00000000..f6a0eb47 --- /dev/null +++ b/src/opengl/GLAssembledWebGLInterface.cpp @@ -0,0 +1,53 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLAssembledWebGLInterface.h" +#include "GLInterface.h" + +namespace tgfx { +static void InitVertexArray(const GLProcGetter* getter, GLFunctions* functions, + const GLInfo& info) { + if (info.version >= GL_VER(2, 0)) { + functions->bindVertexArray = + reinterpret_cast(getter->getProcAddress("glBindVertexArray")); + functions->deleteVertexArrays = + reinterpret_cast(getter->getProcAddress("glDeleteVertexArrays")); + functions->genVertexArrays = + reinterpret_cast(getter->getProcAddress("glGenVertexArrays")); + } else if (info.hasExtension("GL_OES_vertex_array_object") || + info.hasExtension("OES_vertex_array_object")) { + functions->bindVertexArray = + reinterpret_cast(getter->getProcAddress("glBindVertexArrayOES")); + functions->deleteVertexArrays = + reinterpret_cast(getter->getProcAddress("glDeleteVertexArraysOES")); + functions->genVertexArrays = + reinterpret_cast(getter->getProcAddress("glGenVertexArraysOES")); + } +} + +void GLAssembleWebGLInterface(const GLProcGetter* getter, GLFunctions* functions, + const GLInfo& info) { + if (info.version >= GL_VER(2, 0)) { + functions->blitFramebuffer = + reinterpret_cast(getter->getProcAddress("glBlitFramebuffer")); + functions->renderbufferStorageMultisample = reinterpret_cast( + getter->getProcAddress("glRenderbufferStorageMultisample")); + } + InitVertexArray(getter, functions, info); +} +} // namespace tgfx diff --git a/src/opengl/GLAssembledWebGLInterface.h b/src/opengl/GLAssembledWebGLInterface.h new file mode 100644 index 00000000..b04ef233 --- /dev/null +++ b/src/opengl/GLAssembledWebGLInterface.h @@ -0,0 +1,28 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +namespace tgfx { +class GLProcGetter; +class GLFunctions; +class GLInfo; + +void GLAssembleWebGLInterface(const GLProcGetter* getter, GLFunctions* functions, + const GLInfo& info); +} // namespace tgfx diff --git a/src/opengl/GLBlend.cpp b/src/opengl/GLBlend.cpp new file mode 100644 index 00000000..daf4b185 --- /dev/null +++ b/src/opengl/GLBlend.cpp @@ -0,0 +1,456 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLBlend.h" +#include "gpu/Blend.h" + +namespace tgfx { +static void HardLight(FragmentShaderBuilder* fsBuilder, const char* final, const char* src, + const char* dst) { + static constexpr char kComponents[] = {'r', 'g', 'b'}; + for (auto& component : kComponents) { + fsBuilder->codeAppendf("if (2.0 * %s.%c < %s.a) {", src, component, src); + fsBuilder->codeAppendf("%s.%c = 2.0 * %s.%c * %s.%c;", final, component, src, component, dst, + component); + fsBuilder->codeAppend("} else {"); + fsBuilder->codeAppendf("%s.%c = %s.a * %s.a - 2.0 * (%s.a - %s.%c) * (%s.a - %s.%c);", final, + component, src, dst, dst, dst, component, src, src, component); + fsBuilder->codeAppend("}"); + } + fsBuilder->codeAppendf("%s.rgb += %s.rgb * (1.0 - %s.a) + %s.rgb * (1.0 - %s.a);", final, src, + dst, dst, src); +} + +// Does one component of color-dodge +static void ColorDodgeComponent(FragmentShaderBuilder* fsBuilder, const char* final, + const char* src, const char* dst, const char component) { + fsBuilder->codeAppendf("if (0.0 == %s.%c) {", dst, component); + fsBuilder->codeAppendf("%s.%c = %s.%c * (1.0 - %s.a);", final, component, src, component, dst); + fsBuilder->codeAppend("} else {"); + fsBuilder->codeAppendf("float d = %s.a - %s.%c;", src, src, component); + fsBuilder->codeAppend("if (0.0 == d) {"); + fsBuilder->codeAppendf("%s.%c = %s.a * %s.a + %s.%c * (1.0 - %s.a) + %s.%c * (1.0 - %s.a);", + final, component, src, dst, src, component, dst, dst, component, src); + fsBuilder->codeAppend("} else {"); + fsBuilder->codeAppendf("d = min(%s.a, %s.%c * %s.a / d);", dst, dst, component, src); + fsBuilder->codeAppendf("%s.%c = d * %s.a + %s.%c * (1.0 - %s.a) + %s.%c * (1.0 - %s.a);", final, + component, src, src, component, dst, dst, component, src); + fsBuilder->codeAppend("}"); + fsBuilder->codeAppend("}"); +} + +// Does one component of color-burn +static void ColorBurnComponent(FragmentShaderBuilder* fsBuilder, const char* final, const char* src, + const char* dst, const char component) { + fsBuilder->codeAppendf("if (%s.a == %s.%c) {", dst, dst, component); + fsBuilder->codeAppendf("%s.%c = %s.a * %s.a + %s.%c * (1.0 - %s.a) + %s.%c * (1.0 - %s.a);", + final, component, src, dst, src, component, dst, dst, component, src); + fsBuilder->codeAppendf("} else if (0.0 == %s.%c) {", src, component); + fsBuilder->codeAppendf("%s.%c = %s.%c * (1.0 - %s.a);", final, component, dst, component, src); + fsBuilder->codeAppend("} else {"); + fsBuilder->codeAppendf("float d = max(0.0, %s.a - (%s.a - %s.%c) * %s.a / %s.%c);", dst, dst, dst, + component, src, src, component); + fsBuilder->codeAppendf("%s.%c = %s.a * d + %s.%c * (1.0 - %s.a) + %s.%c * (1.0 - %s.a);", final, + component, src, src, component, dst, dst, component, src); + fsBuilder->codeAppend("}"); +} + +// Does one component of soft-light. Caller should have already checked that dst alpha > 0. +static void SoftLightComponentPosDstAlpha(FragmentShaderBuilder* fsBuilder, const char* final, + const char* src, const char* dst, const char component) { + // if (2S < Sa) + fsBuilder->codeAppendf("if (2.0 * %s.%c <= %s.a) {", src, component, src); + // (D^2 (Sa-2 S))/Da+(1-Da) S+D (-Sa+2 S+1) + fsBuilder->codeAppendf( + "%s.%c = (%s.%c*%s.%c*(%s.a - 2.0*%s.%c)) / %s.a +" + "(1.0 - %s.a) * %s.%c + %s.%c*(-%s.a + 2.0*%s.%c + 1.0);", + final, component, dst, component, dst, component, src, src, component, dst, dst, src, + component, dst, component, src, src, component); + // else if (4D < Da) + fsBuilder->codeAppendf("} else if (4.0 * %s.%c <= %s.a) {", dst, component, dst); + fsBuilder->codeAppendf("float DSqd = %s.%c * %s.%c;", dst, component, dst, component); + fsBuilder->codeAppendf("float DCub = DSqd * %s.%c;", dst, component); + fsBuilder->codeAppendf("float DaSqd = %s.a * %s.a;", dst, dst); + fsBuilder->codeAppendf("float DaCub = DaSqd * %s.a;", dst); + // (Da^3 (-S)+Da^2 (S-D (3 Sa-6 S-1))+12 Da D^2 (Sa-2 S)-16 D^3 (Sa-2 S))/Da^2 + fsBuilder->codeAppendf( + "%s.%c =" + "(DaSqd*(%s.%c - %s.%c * (3.0*%s.a - 6.0*%s.%c - 1.0)) +" + " 12.0*%s.a*DSqd*(%s.a - 2.0*%s.%c) - 16.0*DCub * (%s.a - 2.0*%s.%c) -" + " DaCub*%s.%c) / DaSqd;", + final, component, src, component, dst, component, src, src, component, dst, src, src, + component, src, src, component, src, component); + fsBuilder->codeAppend("} else {"); + // -sqrt(Da * D) (Sa-2 S)-Da S+D (Sa-2 S+1)+S + fsBuilder->codeAppendf( + "%s.%c = %s.%c*(%s.a - 2.0*%s.%c + 1.0) + %s.%c -" + " sqrt(%s.a*%s.%c)*(%s.a - 2.0*%s.%c) - %s.a*%s.%c;", + final, component, dst, component, src, src, component, src, component, dst, dst, component, + src, src, component, dst, src, component); + fsBuilder->codeAppend("}"); +} + +// Adds a function that takes two colors and an alpha as input. It produces a color with the +// hue and saturation of the first color, the luminosity of the second color, and the input +// alpha. It has this signature: +// vec3 set_luminance(vec3 hueSatColor, float alpha, vec3 lumColor). +static void AddLumFunction(FragmentShaderBuilder* fsBuilder, std::string* setLumFunction) { + // Emit a helper that gets the luminance of a color. + fsBuilder->addFunction(R"( +float luminance(vec3 color) { + return dot(vec3(0.3, 0.59, 0.11), color); +} +)"); + + // Emit the set luminance function. + fsBuilder->addFunction(R"( +vec3 set_luminance(vec3 hueSat, float alpha, vec3 lumColor) { + float diff = luminance(lumColor - hueSat); + vec3 outColor = hueSat + diff; + float outLum = luminance(outColor); + float minComp = min(min(outColor.r, outColor.g), outColor.b); + float maxComp = max(max(outColor.r, outColor.g), outColor.b); + if (minComp < 0.0 && outLum != minComp) { + outColor = outLum + ((outColor - vec3(outLum, outLum, outLum)) * outLum) / (outLum - minComp); + } + if (maxComp > alpha && maxComp != outLum) { + outColor = outLum + ((outColor - vec3(outLum, outLum, outLum)) * (alpha - outLum)) / (maxComp - outLum); + } + return outColor; +} +)"); + *setLumFunction = "set_luminance"; +} + +// Adds a function that creates a color with the hue and luminosity of one input color and +// the saturation of another color. It will have this signature: +// float set_saturation(vec3 hueLumColor, vec3 satColor) +static void AddSatFunction(FragmentShaderBuilder* fsBuilder, std::string* setSatFunction) { + // Emit a helper that gets the saturation of a color + fsBuilder->addFunction(R"( +float saturation(vec3 color) { + return max(max(color.r, color.g), color.b) - min(min(color.r, color.g), color.b); +} +)"); + + // Emit a helper that sets the saturation given sorted input channels. This used + // to use inout params for min, mid, and max components but that seems to cause + // problems on PowerVR drivers. So instead it returns a vec3 where r, g ,b are the + // adjusted min, mid, and max inputs, respectively. + fsBuilder->addFunction(R"( +vec3 set_saturation_helper(float minComp, float midComp, float maxComp, float sat) { + if (minComp < maxComp) { + vec3 result; + result.r = 0.0; + result.g = sat * (midComp - minComp) / (maxComp - minComp); + result.b = sat; + return result; + } else { + return vec3(0, 0, 0); + } +} +)"); + + fsBuilder->addFunction(R"( +vec3 set_saturation(vec3 hueLumColor, vec3 satColor) { + float sat = saturation(satColor); + if (hueLumColor.r <= hueLumColor.g) { + if (hueLumColor.g <= hueLumColor.b) { + hueLumColor.rgb = set_saturation_helper(hueLumColor.r, hueLumColor.g, hueLumColor.b, sat); + } else if (hueLumColor.r <= hueLumColor.b) { + hueLumColor.rbg = set_saturation_helper(hueLumColor.r, hueLumColor.b, hueLumColor.g, sat); + } else { + hueLumColor.brg = set_saturation_helper(hueLumColor.b, hueLumColor.r, hueLumColor.g, sat); + } + } else if (hueLumColor.r <= hueLumColor.b) { + hueLumColor.grb = set_saturation_helper(hueLumColor.g, hueLumColor.r, hueLumColor.b, sat); + } else if (hueLumColor.g <= hueLumColor.b) { + hueLumColor.gbr = set_saturation_helper(hueLumColor.g, hueLumColor.b, hueLumColor.r, sat); + } else { + hueLumColor.bgr = set_saturation_helper(hueLumColor.b, hueLumColor.g, hueLumColor.r, sat); + } + return hueLumColor; +} +)"); + *setSatFunction = "set_saturation"; +} + +static void BlendHandler_Overlay(FragmentShaderBuilder* fsBuilder, const char* srcColor, + const char* dstColor, const char* outputColor) { + // Overlay is Hard-Light with the src and dst reversed + HardLight(fsBuilder, outputColor, dstColor, srcColor); +} + +static void BlendHandler_Darken(FragmentShaderBuilder* fsBuilder, const char* srcColor, + const char* dstColor, const char* outputColor) { + fsBuilder->codeAppendf( + "%s.rgb = min((1.0 - %s.a) * %s.rgb + %s.rgb, (1.0 - %s.a) * %s.rgb + %s.rgb);", outputColor, + srcColor, dstColor, srcColor, dstColor, srcColor, dstColor); +} + +static void BlendHandler_Lighten(FragmentShaderBuilder* fsBuilder, const char* srcColor, + const char* dstColor, const char* outputColor) { + fsBuilder->codeAppendf( + "%s.rgb = max((1.0 - %s.a) * %s.rgb + %s.rgb, (1.0 - %s.a) * %s.rgb + %s.rgb);", outputColor, + srcColor, dstColor, srcColor, dstColor, srcColor, dstColor); +} + +static void BlendHandler_ColorDodge(FragmentShaderBuilder* fsBuilder, const char* srcColor, + const char* dstColor, const char* outputColor) { + ColorDodgeComponent(fsBuilder, outputColor, srcColor, dstColor, 'r'); + ColorDodgeComponent(fsBuilder, outputColor, srcColor, dstColor, 'g'); + ColorDodgeComponent(fsBuilder, outputColor, srcColor, dstColor, 'b'); +} + +static void BlendHandler_ColorBurn(FragmentShaderBuilder* fsBuilder, const char* srcColor, + const char* dstColor, const char* outputColor) { + ColorBurnComponent(fsBuilder, outputColor, srcColor, dstColor, 'r'); + ColorBurnComponent(fsBuilder, outputColor, srcColor, dstColor, 'g'); + ColorBurnComponent(fsBuilder, outputColor, srcColor, dstColor, 'b'); +} + +static void BlendHandler_HardLight(FragmentShaderBuilder* fsBuilder, const char* srcColor, + const char* dstColor, const char* outputColor) { + HardLight(fsBuilder, outputColor, srcColor, dstColor); +} + +static void BlendHandler_SoftLight(FragmentShaderBuilder* fsBuilder, const char* srcColor, + const char* dstColor, const char* outputColor) { + fsBuilder->codeAppendf("if (0.0 == %s.a) {", dstColor); + fsBuilder->codeAppendf("%s.rgba = %s;", outputColor, srcColor); + fsBuilder->codeAppend("} else {"); + SoftLightComponentPosDstAlpha(fsBuilder, outputColor, srcColor, dstColor, 'r'); + SoftLightComponentPosDstAlpha(fsBuilder, outputColor, srcColor, dstColor, 'g'); + SoftLightComponentPosDstAlpha(fsBuilder, outputColor, srcColor, dstColor, 'b'); + fsBuilder->codeAppend("}"); +} + +static void BlendHandler_Difference(FragmentShaderBuilder* fsBuilder, const char* srcColor, + const char* dstColor, const char* outputColor) { + fsBuilder->codeAppendf("%s.rgb = %s.rgb + %s.rgb - 2.0 * min(%s.rgb * %s.a, %s.rgb * %s.a);", + outputColor, srcColor, dstColor, srcColor, dstColor, dstColor, srcColor); +} + +static void BlendHandler_Exclusion(FragmentShaderBuilder* fsBuilder, const char* srcColor, + const char* dstColor, const char* outputColor) { + fsBuilder->codeAppendf("%s.rgb = %s.rgb + %s.rgb - 2.0 * %s.rgb * %s.rgb;", outputColor, dstColor, + srcColor, dstColor, srcColor); +} + +static void BlendHandler_Multiply(FragmentShaderBuilder* fsBuilder, const char* srcColor, + const char* dstColor, const char* outputColor) { + fsBuilder->codeAppendf( + "%s.rgb = (1.0 - %s.a) * %s.rgb + (1.0 - %s.a) * %s.rgb + %s.rgb * %s.rgb;", outputColor, + srcColor, dstColor, dstColor, srcColor, srcColor, dstColor); +} + +static void BlendHandler_Hue(FragmentShaderBuilder* fsBuilder, const char* srcColor, + const char* dstColor, const char* outputColor) { + // SetLum(SetSat(S * Da, Sat(D * Sa)), Sa*Da, D*Sa) + (1 - Sa) * D + (1 - Da) * S + std::string setSat, setLum; + AddSatFunction(fsBuilder, &setSat); + AddLumFunction(fsBuilder, &setLum); + fsBuilder->codeAppendf("vec4 dstSrcAlpha = %s * %s.a;", dstColor, srcColor); + fsBuilder->codeAppendf( + "%s.rgb = %s(%s(%s.rgb * %s.a, dstSrcAlpha.rgb), dstSrcAlpha.a, dstSrcAlpha.rgb);", + outputColor, setLum.c_str(), setSat.c_str(), srcColor, dstColor); + fsBuilder->codeAppendf("%s.rgb += (1.0 - %s.a) * %s.rgb + (1.0 - %s.a) * %s.rgb;", outputColor, + srcColor, dstColor, dstColor, srcColor); +} + +static void BlendHandler_Saturation(FragmentShaderBuilder* fsBuilder, const char* srcColor, + const char* dstColor, const char* outputColor) { + // SetLum(SetSat(D * Sa, Sat(S * Da)), Sa*Da, D*Sa)) + (1 - Sa) * D + (1 - Da) * S + std::string setSat, setLum; + AddSatFunction(fsBuilder, &setSat); + AddLumFunction(fsBuilder, &setLum); + fsBuilder->codeAppendf("vec4 dstSrcAlpha = %s * %s.a;", dstColor, srcColor); + fsBuilder->codeAppendf( + "%s.rgb = %s(%s(dstSrcAlpha.rgb, %s.rgb * %s.a), dstSrcAlpha.a, dstSrcAlpha.rgb);", + outputColor, setLum.c_str(), setSat.c_str(), srcColor, dstColor); + fsBuilder->codeAppendf("%s.rgb += (1.0 - %s.a) * %s.rgb + (1.0 - %s.a) * %s.rgb;", outputColor, + srcColor, dstColor, dstColor, srcColor); +} + +static void BlendHandler_Color(FragmentShaderBuilder* fsBuilder, const char* srcColor, + const char* dstColor, const char* outputColor) { + // SetLum(S * Da, Sa* Da, D * Sa) + (1 - Sa) * D + (1 - Da) * S + std::string setLum; + AddLumFunction(fsBuilder, &setLum); + fsBuilder->codeAppendf("vec4 srcDstAlpha = %s * %s.a;", srcColor, dstColor); + fsBuilder->codeAppendf("%s.rgb = %s(srcDstAlpha.rgb, srcDstAlpha.a, %s.rgb * %s.a);", outputColor, + setLum.c_str(), dstColor, srcColor); + fsBuilder->codeAppendf("%s.rgb += (1.0 - %s.a) * %s.rgb + (1.0 - %s.a) * %s.rgb;", outputColor, + srcColor, dstColor, dstColor, srcColor); +} + +static void BlendHandler_Luminosity(FragmentShaderBuilder* fsBuilder, const char* srcColor, + const char* dstColor, const char* outputColor) { + // SetLum(D * Sa, Sa* Da, S * Da) + (1 - Sa) * D + (1 - Da) * S + std::string setLum; + AddLumFunction(fsBuilder, &setLum); + fsBuilder->codeAppendf("vec4 srcDstAlpha = %s * %s.a;", srcColor, dstColor); + fsBuilder->codeAppendf("%s.rgb = %s(%s.rgb * %s.a, srcDstAlpha.a, srcDstAlpha.rgb);", outputColor, + setLum.c_str(), dstColor, srcColor); + fsBuilder->codeAppendf("%s.rgb += (1.0 - %s.a) * %s.rgb + (1.0 - %s.a) * %s.rgb;", outputColor, + srcColor, dstColor, dstColor, srcColor); +} + +using BlendHandler = void (*)(FragmentShaderBuilder* fsBuilder, const char* srcColor, + const char* dstColor, const char* outputColor); + +static constexpr std::pair kBlendHandlers[] = { + {BlendMode::Overlay, BlendHandler_Overlay}, + {BlendMode::Darken, BlendHandler_Darken}, + {BlendMode::Lighten, BlendHandler_Lighten}, + {BlendMode::ColorDodge, BlendHandler_ColorDodge}, + {BlendMode::ColorBurn, BlendHandler_ColorBurn}, + {BlendMode::HardLight, BlendHandler_HardLight}, + {BlendMode::SoftLight, BlendHandler_SoftLight}, + {BlendMode::Difference, BlendHandler_Difference}, + {BlendMode::Exclusion, BlendHandler_Exclusion}, + {BlendMode::Multiply, BlendHandler_Multiply}, + {BlendMode::Hue, BlendHandler_Hue}, + {BlendMode::Saturation, BlendHandler_Saturation}, + {BlendMode::Color, BlendHandler_Color}, + {BlendMode::Luminosity, BlendHandler_Luminosity}}; + +static void HandleBlendModes(FragmentShaderBuilder* fsBuilder, const std::string& srcColor, + const std::string& dstColor, const std::string& outputColor, + BlendMode blendMode) { + // These all perform src-over on the alpha channel. + fsBuilder->codeAppendf("%s.a = %s.a + (1.0 - %s.a) * %s.a;", outputColor.c_str(), + srcColor.c_str(), srcColor.c_str(), dstColor.c_str()); + for (const auto& pair : kBlendHandlers) { + if (pair.first == blendMode) { + pair.second(fsBuilder, srcColor.c_str(), dstColor.c_str(), outputColor.c_str()); + break; + } + } +} + +static void CoeffHandler_ONE(FragmentShaderBuilder*, const char*, const char*) { +} + +static void CoeffHandler_SRC_COLOR(FragmentShaderBuilder* fsBuilder, const char* srcColorName, + const char*) { + fsBuilder->codeAppendf(" * %s", srcColorName); +} + +static void CoeffHandler_ONE_MINUS_SRC_COLOR(FragmentShaderBuilder* fsBuilder, + const char* srcColorName, const char*) { + fsBuilder->codeAppendf(" * (vec4(1.0) - %s)", srcColorName); +} + +static void CoeffHandler_DST_COLOR(FragmentShaderBuilder* fsBuilder, const char*, + const char* dstColorName) { + fsBuilder->codeAppendf(" * %s", dstColorName); +} + +static void CoeffHandler_ONE_MINUS_DST_COLOR(FragmentShaderBuilder* fsBuilder, const char*, + const char* dstColorName) { + fsBuilder->codeAppendf(" * (vec4(1.0) - %s)", dstColorName); +} + +static void CoeffHandler_SRC_ALPHA(FragmentShaderBuilder* fsBuilder, const char* srcColorName, + const char*) { + fsBuilder->codeAppendf(" * %s.a", srcColorName); +} + +static void CoeffHandler_ONE_MINUS_SRC_ALPHA(FragmentShaderBuilder* fsBuilder, + const char* srcColorName, const char*) { + fsBuilder->codeAppendf(" * (1.0 - %s.a)", srcColorName); +} + +static void CoeffHandler_DST_ALPHA(FragmentShaderBuilder* fsBuilder, const char*, + const char* dstColorName) { + fsBuilder->codeAppendf(" * %s.a", dstColorName); +} + +static void CoeffHandler_ONE_MINUS_DST_ALPHA(FragmentShaderBuilder* fsBuilder, const char*, + const char* dstColorName) { + fsBuilder->codeAppendf(" * (1.0 - %s.a)", dstColorName); +} + +using CoeffHandler = void (*)(FragmentShaderBuilder* fsBuilder, const char* srcColorName, + const char* dstColorName); + +static constexpr CoeffHandler kCoeffHandleMap[] = {CoeffHandler_ONE, + CoeffHandler_SRC_COLOR, + CoeffHandler_ONE_MINUS_SRC_COLOR, + CoeffHandler_DST_COLOR, + CoeffHandler_ONE_MINUS_DST_COLOR, + CoeffHandler_SRC_ALPHA, + CoeffHandler_ONE_MINUS_SRC_ALPHA, + CoeffHandler_DST_ALPHA, + CoeffHandler_ONE_MINUS_DST_ALPHA}; + +static bool AppendPorterDuffTerm(FragmentShaderBuilder* fsBuilder, BlendModeCoeff coeff, + const std::string& colorName, const std::string& srcColorName, + const std::string& dstColorName, bool hasPrevious) { + if (BlendModeCoeff::Zero == coeff) { + return hasPrevious; + } else { + if (hasPrevious) { + fsBuilder->codeAppend(" + "); + } + fsBuilder->codeAppendf("%s", colorName.c_str()); + kCoeffHandleMap[static_cast(coeff) - 1](fsBuilder, srcColorName.c_str(), + dstColorName.c_str()); + return true; + } +} + +void AppendMode(FragmentShaderBuilder* fsBuilder, const std::string& srcColor, + const std::string& dstColor, const std::string& outColor, BlendMode blendMode) { + BlendInfo blendInfo = {}; + if (BlendModeAsCoeff(blendMode, &blendInfo)) { + // The only coeff mode that can go out of range is plus. + bool clamp = blendMode == BlendMode::Plus; + + fsBuilder->codeAppendf("%s = ", outColor.c_str()); + if (clamp) { + fsBuilder->codeAppend("clamp("); + } + // append src blend + bool didAppend = + AppendPorterDuffTerm(fsBuilder, blendInfo.srcBlend, srcColor, srcColor, dstColor, false); + // append dst blend + if (!AppendPorterDuffTerm(fsBuilder, blendInfo.dstBlend, dstColor, srcColor, dstColor, + didAppend)) { + fsBuilder->codeAppend("vec4(0, 0, 0, 0)"); + } + if (clamp) { + fsBuilder->codeAppend(", 0, 1);"); + } + fsBuilder->codeAppend(";"); + } else { + HandleBlendModes(fsBuilder, srcColor, dstColor, outColor, blendMode); + } +} + +const char* BlendModeName(BlendMode mode) { + const char* ModeStrings[] = {"Clear", "Src", "Dst", "SrcOver", "DstOver", + "SrcIn", "DstIn", "SrcOut", "DstOut", "SrcATop", + "DstATop", "Xor", "Plus", "Modulate", "Screen", + "Overlay", "Darken", "Lighten", "ColorDodge", "ColorBurn", + "HardLight", "SoftLight", "Difference", "Exclusion", "Multiply", + "Hue", "Saturation", "Color", "Luminosity"}; + return ModeStrings[static_cast(mode)]; +} +} // namespace tgfx diff --git a/src/opengl/GLBlend.h b/src/opengl/GLBlend.h new file mode 100644 index 00000000..c3d713e9 --- /dev/null +++ b/src/opengl/GLBlend.h @@ -0,0 +1,31 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/FragmentShaderBuilder.h" +#include "tgfx/core/BlendMode.h" + +namespace tgfx { +// Appends GLSL code to fragment that assigns a specified blend of the srcColor and dstColor +// variables to the outColor variable. +void AppendMode(FragmentShaderBuilder* fsBuilder, const std::string& srcColor, + const std::string& dstColor, const std::string& outColor, BlendMode blendMode); + +const char* BlendModeName(BlendMode mode); +} // namespace tgfx diff --git a/src/opengl/GLBuffer.cpp b/src/opengl/GLBuffer.cpp new file mode 100644 index 00000000..bf9d569b --- /dev/null +++ b/src/opengl/GLBuffer.cpp @@ -0,0 +1,76 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLBuffer.h" +#include "GLContext.h" +#include "GLUtil.h" +#include "utils/UniqueID.h" + +namespace tgfx { +static void ComputeScratchKey(BytesKey* scratchKey, BufferType bufferType) { + static const uint32_t Type = UniqueID::Next(); + scratchKey->write(Type); + scratchKey->write(static_cast(bufferType)); +} + +std::shared_ptr GpuBuffer::Make(Context* context, BufferType bufferType, + const void* buffer, size_t size) { + if (buffer == nullptr || size == 0) { + return nullptr; + } + // 防止前面产生的GLError,导致后面CheckGLError逻辑返回错误结果 + CheckGLError(context); + + auto target = bufferType == BufferType::Index ? GL_ELEMENT_ARRAY_BUFFER : GL_ARRAY_BUFFER; + ScratchKey scratchKey = {}; + ComputeScratchKey(&scratchKey, bufferType); + auto glBuffer = + std::static_pointer_cast(context->resourceCache()->findScratchResource(scratchKey)); + auto gl = GLFunctions::Get(context); + if (glBuffer == nullptr) { + unsigned bufferID = 0; + gl->genBuffers(1, &bufferID); + if (bufferID == 0) { + return nullptr; + } + glBuffer = Resource::Wrap(context, new GLBuffer(bufferType, size, bufferID)); + } else { + glBuffer->_sizeInBytes = size; + } + gl->bindBuffer(target, glBuffer->_bufferID); + gl->bufferData(target, static_cast(size), buffer, GL_STATIC_DRAW); + if (!CheckGLError(context)) { + gl->bindBuffer(target, 0); + return nullptr; + } + gl->bindBuffer(target, 0); + return glBuffer; +} + +void GLBuffer::computeScratchKey(BytesKey* bytesKey) const { + ComputeScratchKey(bytesKey, _bufferType); +} + +void GLBuffer::onReleaseGPU() { + if (_bufferID > 0) { + auto gl = GLFunctions::Get(context); + gl->deleteBuffers(1, &_bufferID); + _bufferID = 0; + } +} +} // namespace tgfx diff --git a/src/opengl/GLBuffer.h b/src/opengl/GLBuffer.h new file mode 100644 index 00000000..1c0cca20 --- /dev/null +++ b/src/opengl/GLBuffer.h @@ -0,0 +1,44 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/GpuBuffer.h" + +namespace tgfx { +class GLBuffer : public GpuBuffer { + public: + unsigned bufferID() const { + return _bufferID; + } + + protected: + void computeScratchKey(BytesKey*) const override; + + private: + GLBuffer(BufferType bufferType, size_t size, unsigned bufferID) + : GpuBuffer(bufferType, size), _bufferID(bufferID) { + } + + void onReleaseGPU() override; + + unsigned _bufferID = 0; + + friend class GpuBuffer; +}; +} // namespace tgfx diff --git a/src/opengl/GLCaps.cpp b/src/opengl/GLCaps.cpp new file mode 100644 index 00000000..5336da32 --- /dev/null +++ b/src/opengl/GLCaps.cpp @@ -0,0 +1,456 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLCaps.h" +#include "GLUtil.h" + +namespace tgfx { +static GLStandard GetGLStandard(const char* versionString) { + if (versionString == nullptr) { + return GLStandard::None; + } + int major, minor; + int n = sscanf(versionString, "%d.%d", &major, &minor); + if (2 == n) { + return GLStandard::GL; + } + int esMajor, esMinor; + n = sscanf(versionString, "OpenGL ES %d.%d (WebGL %d.%d", &esMajor, &esMinor, &major, &minor); + if (4 == n) { + return GLStandard::WebGL; + } + + // check for ES 1 + char profile[2]; + n = sscanf(versionString, "OpenGL ES-%c%c %d.%d", profile, profile + 1, &major, &minor); + if (4 == n) { + // we no longer support ES1. + return GLStandard::None; + } + + // check for ES2 + n = sscanf(versionString, "OpenGL ES %d.%d", &major, &minor); + if (2 == n) { + return GLStandard::GLES; + } + return GLStandard::None; +} + +static GLVendor GetVendorFromString(const char* vendorString) { + if (vendorString) { + if (0 == strcmp(vendorString, "ARM")) { + return GLVendor::ARM; + } + if (0 == strcmp(vendorString, "Google Inc.")) { + return GLVendor::Google; + } + if (0 == strcmp(vendorString, "Imagination Technologies")) { + return GLVendor::Imagination; + } + if (0 == strncmp(vendorString, "Intel ", 6) || 0 == strcmp(vendorString, "Intel")) { + return GLVendor::Intel; + } + if (0 == strcmp(vendorString, "Qualcomm")) { + return GLVendor::Qualcomm; + } + if (0 == strcmp(vendorString, "NVIDIA Corporation")) { + return GLVendor::NVIDIA; + } + if (0 == strcmp(vendorString, "ATI Technologies Inc.")) { + return GLVendor::ATI; + } + } + return GLVendor::Other; +} + +GLInfo::GLInfo(GLGetString* getString, GLGetStringi* getStringi, GLGetIntegerv* getIntegerv, + GLGetInternalformativ* getInternalformativ, + GLGetShaderPrecisionFormat getShaderPrecisionFormat) + : getString(getString), getStringi(getStringi), getIntegerv(getIntegerv), + getInternalformativ(getInternalformativ), getShaderPrecisionFormat(getShaderPrecisionFormat) { + auto versionString = (const char*)getString(GL_VERSION); + auto glVersion = GetGLVersion(versionString); + version = GL_VER(glVersion.majorVersion, glVersion.minorVersion); + standard = GetGLStandard(versionString); + fetchExtensions(); +} + +static void eatSpaceSepStrings(std::vector* out, const char text[]) { + if (!text) { + return; + } + while (true) { + while (' ' == *text) { + ++text; + } + if ('\0' == *text) { + break; + } + size_t length = strcspn(text, " "); + out->emplace_back(text, length); + text += length; + } +} + +void GLInfo::fetchExtensions() { + bool indexed = false; + if (standard == GLStandard::GL || standard == GLStandard::GLES) { + // glGetStringi and indexed extensions were added in version 3.0 of desktop GL and ES. + indexed = version >= GL_VER(3, 0); + } else if (standard == GLStandard::WebGL) { + // WebGL (1.0 or 2.0) doesn't natively support glGetStringi, but emscripten adds it in + // https://github.com/emscripten-core/emscripten/issues/3472 + indexed = version >= GL_VER(2, 0); + } + if (indexed) { + if (getStringi) { + int extensionCount = 0; + getIntegerv(GL_NUM_EXTENSIONS, &extensionCount); + for (int i = 0; i < extensionCount; ++i) { + const char* ext = reinterpret_cast(getStringi(GL_EXTENSIONS, i)); + extensions.emplace_back(ext); + } + } + } else { + auto text = reinterpret_cast(getString(GL_EXTENSIONS)); + eatSpaceSepStrings(&extensions, text); + } +} + +bool GLInfo::hasExtension(const std::string& extension) const { + auto result = std::find(extensions.begin(), extensions.end(), extension); + return result != extensions.end(); +} + +static bool IsMediumFloatFp32(const GLInfo& ctxInfo) { + if (ctxInfo.standard == GLStandard::GL && ctxInfo.version < GL_VER(4, 1) && + !ctxInfo.hasExtension("GL_ARB_ES2_compatibility")) { + // We're on a desktop GL that doesn't have precision info. Assume they're all 32bit float. + return true; + } + // glGetShaderPrecisionFormat doesn't accept GL_GEOMETRY_SHADER as a shader type. Hopefully the + // geometry shaders don't have lower precision than vertex and fragment. + for (unsigned shader : {GL_FRAGMENT_SHADER, GL_VERTEX_SHADER}) { + int range[2]; + int bits; + ctxInfo.getShaderPrecisionFormat(shader, GL_MEDIUM_FLOAT, range, &bits); + if (range[0] < 127 || range[1] < 127 || bits < 23) { + return false; + } + } + return true; +} + +const GLCaps* GLCaps::Get(Context* context) { + return context ? static_cast(context->caps()) : nullptr; +} + +GLCaps::GLCaps(const GLInfo& info) { + standard = info.standard; + version = info.version; + vendor = GetVendorFromString((const char*)info.getString(GL_VENDOR)); + floatIs32Bits = IsMediumFloatFp32(info); + switch (standard) { + case GLStandard::GL: + initGLSupport(info); + break; + case GLStandard::GLES: + initGLESSupport(info); + break; + case GLStandard::WebGL: + initWebGLSupport(info); + break; + default: + break; + } + info.getIntegerv(GL_MAX_TEXTURE_SIZE, &maxTextureSize); + info.getIntegerv(GL_MAX_TEXTURE_IMAGE_UNITS, &maxFragmentSamplers); + initFSAASupport(info); + initFormatMap(info); +} + +const TextureFormat& GLCaps::getTextureFormat(PixelFormat pixelFormat) const { + return pixelFormatMap.at(pixelFormat).format; +} + +const Swizzle& GLCaps::getReadSwizzle(PixelFormat pixelFormat) const { + return pixelFormatMap.at(pixelFormat).readSwizzle; +} + +const Swizzle& GLCaps::getWriteSwizzle(PixelFormat pixelFormat) const { + return pixelFormatMap.at(pixelFormat).writeSwizzle; +} + +bool GLCaps::isFormatRenderable(PixelFormat pixelFormat) const { + switch (pixelFormat) { + case PixelFormat::RGBA_8888: + case PixelFormat::BGRA_8888: + return true; + case PixelFormat::ALPHA_8: + if (textureRedSupport) { + return true; + } + break; + default: + break; + } + return false; +} + +int GLCaps::getSampleCount(int requestedCount, PixelFormat pixelFormat) const { + if (requestedCount <= 1) { + return 1; + } + auto result = pixelFormatMap.find(pixelFormat); + if (result == pixelFormatMap.end() || result->second.colorSampleCounts.empty()) { + return 1; + } + for (auto colorSampleCount : result->second.colorSampleCounts) { + if (colorSampleCount >= requestedCount) { + return colorSampleCount; + } + } + return 1; +} + +int GLCaps::getMaxMipmapLevel(int width, int height) const { + if (!mipMapSupport) { + return 0; + } + int maxDimension = std::max(width, height); + return static_cast(std::log2(maxDimension)); +} + +void GLCaps::initGLSupport(const GLInfo& info) { + packRowLengthSupport = true; + unpackRowLengthSupport = true; + vertexArrayObjectSupport = version >= GL_VER(3, 0) || + info.hasExtension("GL_ARB_vertex_array_object") || + info.hasExtension("GL_APPLE_vertex_array_object"); + textureRedSupport = version >= GL_VER(3, 0) || info.hasExtension("GL_ARB_texture_rg"); + multisampleDisableSupport = true; + if (vendor != GLVendor::Intel) { + textureBarrierSupport = version >= GL_VER(4, 5) || + info.hasExtension("GL_ARB_texture_barrier") || + info.hasExtension("GL_NV_texture_barrier"); + } + semaphoreSupport = version >= GL_VER(3, 2) || info.hasExtension("GL_ARB_sync"); + if (version < GL_VER(1, 3) && !info.hasExtension("GL_ARB_texture_border_clamp")) { + clampToBorderSupport = false; + } +} + +void GLCaps::initGLESSupport(const GLInfo& info) { + packRowLengthSupport = version >= GL_VER(3, 0) || info.hasExtension("GL_NV_pack_subimage"); + unpackRowLengthSupport = version >= GL_VER(3, 0) || info.hasExtension("GL_EXT_unpack_subimage"); + vertexArrayObjectSupport = + version >= GL_VER(3, 0) || info.hasExtension("GL_OES_vertex_array_object"); + textureRedSupport = version >= GL_VER(3, 0) || info.hasExtension("GL_EXT_texture_rg"); + multisampleDisableSupport = info.hasExtension("GL_EXT_multisample_compatibility"); + textureBarrierSupport = info.hasExtension("GL_NV_texture_barrier"); + if (info.hasExtension("GL_EXT_shader_framebuffer_fetch")) { + frameBufferFetchSupport = true; + frameBufferFetchColorName = "gl_LastFragData[0]"; + frameBufferFetchExtensionString = "GL_EXT_shader_framebuffer_fetch"; + frameBufferFetchRequiresEnablePerSample = false; + } else if (info.hasExtension("GL_NV_shader_framebuffer_fetch")) { + // Actually, we haven't seen an ES3.0 device with this extension yet, so we don't know. + frameBufferFetchSupport = true; + frameBufferFetchColorName = "gl_LastFragData[0]"; + frameBufferFetchExtensionString = "GL_NV_shader_framebuffer_fetch"; + frameBufferFetchRequiresEnablePerSample = false; + } else if (info.hasExtension("GL_ARM_shader_framebuffer_fetch")) { + frameBufferFetchSupport = true; + frameBufferFetchColorName = "gl_LastFragColorARM"; + frameBufferFetchExtensionString = "GL_ARM_shader_framebuffer_fetch"; + // The arm extension requires specifically enabling MSAA fetching per sample. + // On some devices this may have a perf hit. Also multiple render targets are disabled + frameBufferFetchRequiresEnablePerSample = true; + } + semaphoreSupport = version >= GL_VER(3, 0) || info.hasExtension("GL_APPLE_sync"); + if (version < GL_VER(3, 2) && !info.hasExtension("GL_EXT_texture_border_clamp") && + !info.hasExtension("GL_NV_texture_border_clamp") && + !info.hasExtension("GL_OES_texture_border_clamp")) { + clampToBorderSupport = false; + } + npotTextureTileSupport = version >= GL_VER(3, 0) || info.hasExtension("GL_OES_texture_npot"); + mipMapSupport = npotTextureTileSupport || info.hasExtension("GL_IMG_texture_npot"); +} + +void GLCaps::initWebGLSupport(const GLInfo& info) { + packRowLengthSupport = version >= GL_VER(2, 0); + unpackRowLengthSupport = version >= GL_VER(2, 0); + vertexArrayObjectSupport = version >= GL_VER(2, 0) || + info.hasExtension("GL_OES_vertex_array_object") || + info.hasExtension("OES_vertex_array_object"); + textureRedSupport = false; + multisampleDisableSupport = false; // no WebGL support + textureBarrierSupport = false; + semaphoreSupport = version >= GL_VER(2, 0); + clampToBorderSupport = false; + npotTextureTileSupport = version >= GL_VER(2, 0); + mipMapSupport = npotTextureTileSupport; +} + +void GLCaps::initFormatMap(const GLInfo& info) { + pixelFormatMap[PixelFormat::RGBA_8888].format.sizedFormat = GL_RGBA8; + pixelFormatMap[PixelFormat::RGBA_8888].format.externalFormat = GL_RGBA; + pixelFormatMap[PixelFormat::RGBA_8888].readSwizzle = Swizzle::RGBA(); + pixelFormatMap[PixelFormat::BGRA_8888].format.sizedFormat = GL_RGBA8; + pixelFormatMap[PixelFormat::BGRA_8888].format.externalFormat = GL_BGRA; + pixelFormatMap[PixelFormat::BGRA_8888].readSwizzle = Swizzle::RGBA(); + if (textureRedSupport) { + pixelFormatMap[PixelFormat::ALPHA_8].format.sizedFormat = GL_R8; + pixelFormatMap[PixelFormat::ALPHA_8].format.externalFormat = GL_RED; + pixelFormatMap[PixelFormat::ALPHA_8].readSwizzle = Swizzle::RRRR(); + // Shader output swizzles will default to RGBA. When we've use GL_RED instead of GL_ALPHA to + // implement PixelFormat::ALPHA_8 we need to swizzle the shader outputs so the alpha channel + // gets written to the single component. + pixelFormatMap[PixelFormat::ALPHA_8].writeSwizzle = Swizzle::AAAA(); + pixelFormatMap[PixelFormat::GRAY_8].format.sizedFormat = GL_R8; + pixelFormatMap[PixelFormat::GRAY_8].format.externalFormat = GL_RED; + pixelFormatMap[PixelFormat::GRAY_8].readSwizzle = Swizzle::RRRA(); + pixelFormatMap[PixelFormat::RG_88].format.sizedFormat = GL_RG8; + pixelFormatMap[PixelFormat::RG_88].format.externalFormat = GL_RG; + pixelFormatMap[PixelFormat::RG_88].readSwizzle = Swizzle::RGRG(); + } else { + pixelFormatMap[PixelFormat::ALPHA_8].format.sizedFormat = GL_ALPHA8; + pixelFormatMap[PixelFormat::ALPHA_8].format.externalFormat = GL_ALPHA; + pixelFormatMap[PixelFormat::ALPHA_8].readSwizzle = Swizzle::AAAA(); + pixelFormatMap[PixelFormat::GRAY_8].format.sizedFormat = GL_LUMINANCE8; + pixelFormatMap[PixelFormat::GRAY_8].format.externalFormat = GL_LUMINANCE; + pixelFormatMap[PixelFormat::GRAY_8].readSwizzle = Swizzle::RGBA(); + pixelFormatMap[PixelFormat::RG_88].format.sizedFormat = GL_LUMINANCE8_ALPHA8; + pixelFormatMap[PixelFormat::RG_88].format.externalFormat = GL_LUMINANCE_ALPHA; + pixelFormatMap[PixelFormat::RG_88].readSwizzle = Swizzle::RARA(); + } + // ES 2.0 requires that the internal/external formats match. + bool useSizedTexFormats = + (standard == GLStandard::GL || (standard == GLStandard::GLES && version >= GL_VER(3, 0))); + bool useSizedRbFormats = standard == GLStandard::GLES || standard == GLStandard::WebGL; + + for (auto& item : pixelFormatMap) { + auto& format = item.second.format; + format.internalFormatTexImage = useSizedTexFormats ? format.sizedFormat : format.externalFormat; + format.internalFormatRenderBuffer = + useSizedRbFormats ? format.sizedFormat : format.externalFormat; + } + if (info.hasExtension("GL_APPLE_texture_format_BGRA8888") || + info.hasExtension("GL_EXT_texture_format_BGRA8888")) { + pixelFormatMap[PixelFormat::BGRA_8888].format.internalFormatTexImage = GL_RGBA; + } + initColorSampleCount(info); +} + +static bool UsesInternalformatQuery(GLStandard standard, const GLInfo& glInterface, + uint32_t version) { + return (standard == GLStandard::GL && + ((version >= GL_VER(4, 2)) || glInterface.hasExtension("GL_ARB_internalformat_query"))) || + (standard == GLStandard::GLES && version >= GL_VER(3, 0)); +} + +void GLCaps::initColorSampleCount(const GLInfo& info) { + std::vector pixelFormats = {PixelFormat::RGBA_8888}; + for (auto pixelFormat : pixelFormats) { + if (UsesInternalformatQuery(standard, info, version)) { + int count = 0; + unsigned format = pixelFormatMap[pixelFormat].format.internalFormatRenderBuffer; + info.getInternalformativ(GL_RENDERBUFFER, format, GL_NUM_SAMPLE_COUNTS, 1, &count); + if (count) { + int* temp = new int[count]; + info.getInternalformativ(GL_RENDERBUFFER, format, GL_SAMPLES, count, temp); + // GL has a concept of MSAA rasterization with a single sample but we do not. + if (temp[count - 1] == 1) { + --count; + } + // We initialize our supported values with 1 (no msaa) and reverse the order + // returned by GL so that the array is ascending. + pixelFormatMap[pixelFormat].colorSampleCounts.push_back(1); + for (int j = 0; j < count; ++j) { + pixelFormatMap[pixelFormat].colorSampleCounts.push_back(temp[count - j - 1]); + } + delete[] temp; + } + } else { + // Fake out the table using some semi-standard counts up to the max allowed sample + // count. + int maxSampleCnt = 1; + if (MSFBOType::ES_IMG_MsToTexture == msFBOType) { + info.getIntegerv(GL_MAX_SAMPLES_IMG, &maxSampleCnt); + } else if (MSFBOType::None != msFBOType) { + info.getIntegerv(GL_MAX_SAMPLES, &maxSampleCnt); + } + // Chrome has a mock GL implementation that returns 0. + maxSampleCnt = std::max(1, maxSampleCnt); + + std::vector defaultSamples{1, 2, 4, 8}; + for (auto samples : defaultSamples) { + if (samples > maxSampleCnt) { + break; + } + pixelFormatMap[pixelFormat].colorSampleCounts.push_back(samples); + } + } + } +} + +static MSFBOType GetMSFBOType_GLES(uint32_t version, const GLInfo& glInterface) { + // We prefer multisampled-render-to-texture extensions over ES3 MSAA because we've observed + // ES3 driver bugs on at least one device with a tiled GPU (N10). + if (glInterface.hasExtension("GL_EXT_multisampled_render_to_texture")) { + return MSFBOType::ES_EXT_MsToTexture; + } else if (glInterface.hasExtension("GL_IMG_multisampled_render_to_texture")) { + return MSFBOType::ES_IMG_MsToTexture; + } else if (version >= GL_VER(3, 0) || + glInterface.hasExtension("GL_CHROMIUM_framebuffer_multisample") || + glInterface.hasExtension("GL_ANGLE_framebuffer_multisample")) { + return MSFBOType::Standard; + } else if (glInterface.hasExtension("GL_APPLE_framebuffer_multisample")) { + return MSFBOType::ES_Apple; + } + return MSFBOType::None; +} + +void GLCaps::initFSAASupport(const GLInfo& info) { + if (standard == GLStandard::GL) { + if (version >= GL_VER(3, 0) || info.hasExtension("GL_ARB_framebuffer_object") || + (info.hasExtension("GL_EXT_framebuffer_multisample") && + info.hasExtension("GL_EXT_framebuffer_blit"))) { + msFBOType = MSFBOType::Standard; + } + } else if (standard == GLStandard::GLES) { + msFBOType = GetMSFBOType_GLES(version, info); + } else if (standard == GLStandard::WebGL) { + // No support in WebGL + msFBOType = MSFBOType::None; + } + + // We disable MSAA across the board for Intel GPUs for performance reasons. + if (GLVendor::Intel == vendor) { + msFBOType = MSFBOType::None; + } +} +bool GLCaps::usesMSAARenderBuffers() const { + return MSFBOType::None != msFBOType && MSFBOType::ES_IMG_MsToTexture != msFBOType && + MSFBOType::ES_EXT_MsToTexture != msFBOType; +} + +bool GLCaps::usesImplicitMSAAResolve() const { + return MSFBOType::ES_IMG_MsToTexture == msFBOType || MSFBOType::ES_EXT_MsToTexture == msFBOType; +} +} // namespace tgfx diff --git a/src/opengl/GLCaps.h b/src/opengl/GLCaps.h new file mode 100644 index 00000000..c3a32ac5 --- /dev/null +++ b/src/opengl/GLCaps.h @@ -0,0 +1,162 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include +#include +#include "gpu/Swizzle.h" +#include "tgfx/gpu/Caps.h" +#include "tgfx/gpu/PixelFormat.h" +#include "tgfx/opengl/GLDefines.h" +#include "tgfx/opengl/GLFunctions.h" +#include "utils/EnumHasher.h" +#include "utils/Log.h" + +#define GL_VER(major, minor) ((static_cast(major) << 16) | static_cast(minor)) + +namespace tgfx { +enum class GLStandard { None, GL, GLES, WebGL }; + +struct TextureFormat { + unsigned sizedFormat = 0; + unsigned internalFormatTexImage = 0; + unsigned internalFormatRenderBuffer = 0; + unsigned externalFormat = 0; +}; + +struct ConfigInfo { + TextureFormat format; + std::vector colorSampleCounts; + Swizzle readSwizzle = Swizzle::RGBA(); + Swizzle writeSwizzle = Swizzle::RGBA(); +}; + +enum class GLVendor { ARM, Google, Imagination, Intel, Qualcomm, NVIDIA, ATI, Other }; + +/** + * The type of MSAA for FBOs supported. Different extensions have different + * semantics of how / when a resolve is performed. + */ +enum class MSFBOType { + /** + * no support for MSAA FBOs + */ + None, + /** + * OpenGL 3.0+, OpenGL ES 3.0+, GL_ARB_framebuffer_object, + * GL_CHROMIUM_framebuffer_multisample, GL_ANGLE_framebuffer_multisample, + * or GL_EXT_framebuffer_multisample + */ + Standard, + /** + * GL_APPLE_framebuffer_multisample ES extension + */ + ES_Apple, + /** + * GL_IMG_multisampled_render_to_texture. This variation does not have MSAA renderbuffers. + * Instead the texture is multisampled when bound to the FBO and then resolved automatically + * when read. It also defines an alternate value for GL_MAX_SAMPLES (which we call + * GL_MAX_SAMPLES_IMG). + */ + ES_IMG_MsToTexture, + /** + * GL_EXT_multisampled_render_to_texture. Same as the IMG one above but uses the standard + * GL_MAX_SAMPLES value. + */ + ES_EXT_MsToTexture +}; + +class GLInfo { + public: + GLStandard standard = GLStandard::None; + uint32_t version = 0; + GLGetString* getString = nullptr; + GLGetStringi* getStringi = nullptr; + GLGetIntegerv* getIntegerv = nullptr; + GLGetInternalformativ* getInternalformativ = nullptr; + GLGetShaderPrecisionFormat* getShaderPrecisionFormat = nullptr; + + GLInfo(GLGetString* getString, GLGetStringi* getStringi, GLGetIntegerv* getIntegerv, + GLGetInternalformativ* getInternalformativ, + GLGetShaderPrecisionFormat* getShaderPrecisionFormat); + + bool hasExtension(const std::string& extension) const; + + private: + void fetchExtensions(); + + std::vector extensions = {}; +}; + +static const int kMaxSaneSamplers = 32; + +class GLCaps : public Caps { + public: + GLStandard standard = GLStandard::None; + uint32_t version = 0; + GLVendor vendor = GLVendor::Other; + bool vertexArrayObjectSupport = false; + bool packRowLengthSupport = false; + bool unpackRowLengthSupport = false; + bool textureRedSupport = false; + MSFBOType msFBOType = MSFBOType::None; + bool frameBufferFetchRequiresEnablePerSample = false; + std::string frameBufferFetchColorName; + std::string frameBufferFetchExtensionString; + int maxFragmentSamplers = kMaxSaneSamplers; + + static const GLCaps* Get(Context* context); + + explicit GLCaps(const GLInfo& info); + + const TextureFormat& getTextureFormat(PixelFormat pixelFormat) const; + + const Swizzle& getReadSwizzle(PixelFormat pixelFormat) const; + + const Swizzle& getWriteSwizzle(PixelFormat pixelFormat) const override; + + bool isFormatRenderable(PixelFormat pixelFormat) const override; + + int getSampleCount(int requestedCount, PixelFormat pixelFormat) const override; + + int getMaxMipmapLevel(int width, int height) const override; + + /** + * Does the preferred MSAA FBO extension have MSAA renderBuffers? + */ + bool usesMSAARenderBuffers() const; + + /** + * Is the MSAA FBO extension one where the texture is multisampled when bound to an FBO and + * then implicitly resolved when read. + */ + bool usesImplicitMSAAResolve() const; + + private: + std::unordered_map pixelFormatMap = {}; + + void initFormatMap(const GLInfo& info); + void initColorSampleCount(const GLInfo& info); + void initGLSupport(const GLInfo& info); + void initGLESSupport(const GLInfo& info); + void initWebGLSupport(const GLInfo& info); + void initFSAASupport(const GLInfo& info); +}; +} // namespace tgfx diff --git a/src/opengl/GLContext.cpp b/src/opengl/GLContext.cpp new file mode 100644 index 00000000..d9700c50 --- /dev/null +++ b/src/opengl/GLContext.cpp @@ -0,0 +1,31 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "opengl/GLContext.h" +#include "GLGpu.h" +#include "tgfx/opengl/GLDevice.h" + +namespace tgfx { +GLContext::GLContext(Device* device, const GLInterface* glInterface) + : Context(device), glInterface(glInterface) { + _gpu = GLGpu::Make(this).release(); +} + +void GLContext::resetState() { +} +} // namespace tgfx diff --git a/src/opengl/GLContext.h b/src/opengl/GLContext.h new file mode 100644 index 00000000..c21f3749 --- /dev/null +++ b/src/opengl/GLContext.h @@ -0,0 +1,57 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "GLInterface.h" +#include "gpu/SamplerState.h" +#include "opengl/GLSampler.h" +#include "tgfx/gpu/Context.h" + +namespace tgfx { +class GLCaps; + +class GLContext : public Context { + public: + static GLContext* Unwrap(Context* context) { + return static_cast(context); + } + + GLContext(Device* device, const GLInterface* glInterface); + + Backend backend() const override { + return Backend::OPENGL; + } + + const GLFunctions* functions() const { + return glInterface->functions.get(); + } + + const Caps* caps() const override { + return glInterface->caps.get(); + } + + void resetState() override; + + private: + const GLInterface* glInterface = nullptr; + + friend class GLDevice; + friend class GLInterface; +}; +} // namespace tgfx diff --git a/src/opengl/GLDevice.cpp b/src/opengl/GLDevice.cpp new file mode 100644 index 00000000..fadf773d --- /dev/null +++ b/src/opengl/GLDevice.cpp @@ -0,0 +1,94 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/opengl/GLDevice.h" +#include +#include "opengl/GLContext.h" + +namespace tgfx { +static std::mutex deviceMapLocker = {}; +static std::unordered_map deviceMap = {}; + +std::shared_ptr GLDevice::MakeWithFallback() { + auto device = GLDevice::Make(); + if (device != nullptr) { + return device; + } +#ifndef TGFX_BUILD_FOR_WEB + for (auto& item : deviceMap) { + device = std::static_pointer_cast(item.second->weakThis.lock()); + if (device != nullptr && !device->isAdopted) { + LOGE( + "GLDevice::MakeWithFallback(): Failed to create a new GLDevice! Fall back to the " + "existing one."); + return device; + } + } +#endif + return GLDevice::Current(); +} + +std::shared_ptr GLDevice::Get(void* nativeHandle) { + if (nativeHandle == nullptr) { + return nullptr; + } + std::lock_guard autoLock(deviceMapLocker); + auto result = deviceMap.find(nativeHandle); + if (result != deviceMap.end()) { + auto device = result->second->weakThis.lock(); + if (device) { + return std::static_pointer_cast(device); + } + deviceMap.erase(result); + } + return nullptr; +} + +GLDevice::GLDevice(void* nativeHandle) : nativeHandle(nativeHandle) { + std::lock_guard autoLock(deviceMapLocker); + deviceMap[nativeHandle] = this; +} + +GLDevice::~GLDevice() { + std::lock_guard autoLock(deviceMapLocker); + deviceMap.erase(nativeHandle); +} + +bool GLDevice::onLockContext() { + if (!onMakeCurrent()) { + return false; + } + if (context == nullptr) { + auto glInterface = GLInterface::GetNative(); + if (glInterface != nullptr) { + context = new GLContext(this, glInterface); + } else { + LOGE("GLDevice::onLockContext(): Error on creating GLInterface! "); + } + } + if (context == nullptr) { + onClearCurrent(); + return false; + } + return true; +} + +void GLDevice::onUnlockContext() { + onClearCurrent(); +} +} // namespace tgfx \ No newline at end of file diff --git a/src/opengl/GLFragmentShaderBuilder.cpp b/src/opengl/GLFragmentShaderBuilder.cpp new file mode 100644 index 00000000..2dfa0d6d --- /dev/null +++ b/src/opengl/GLFragmentShaderBuilder.cpp @@ -0,0 +1,44 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLFragmentShaderBuilder.h" +#include "GLContext.h" +#include "GLProgramBuilder.h" + +namespace tgfx { +static constexpr char kDstColorName[] = "_dstColor"; + +GLFragmentShaderBuilder::GLFragmentShaderBuilder(ProgramBuilder* program) + : FragmentShaderBuilder(program) { + setPrecisionQualifier("precision mediump float;"); +} + +std::string GLFragmentShaderBuilder::dstColor() { + auto caps = GLCaps::Get(programBuilder->getContext()); + if (caps->frameBufferFetchSupport) { + addFeature(PrivateFeature::FramebufferFetch, caps->frameBufferFetchExtensionString); + return caps->frameBufferFetchColorName; + } + return kDstColorName; +} + +std::string GLFragmentShaderBuilder::colorOutputName() { + return static_cast(programBuilder)->isDesktopGL() ? CustomColorOutputName() + : "gl_FragColor"; +} +} // namespace tgfx diff --git a/src/opengl/GLFragmentShaderBuilder.h b/src/opengl/GLFragmentShaderBuilder.h new file mode 100644 index 00000000..d8d00b9f --- /dev/null +++ b/src/opengl/GLFragmentShaderBuilder.h @@ -0,0 +1,33 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/FragmentShaderBuilder.h" + +namespace tgfx { +class GLFragmentShaderBuilder : public FragmentShaderBuilder { + public: + explicit GLFragmentShaderBuilder(ProgramBuilder* program); + + std::string dstColor() override; + + private: + std::string colorOutputName() override; +}; +} // namespace tgfx diff --git a/src/opengl/GLFrameBuffer.h b/src/opengl/GLFrameBuffer.h new file mode 100644 index 00000000..e8bd8fec --- /dev/null +++ b/src/opengl/GLFrameBuffer.h @@ -0,0 +1,38 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/gpu/PixelFormat.h" + +namespace tgfx { +/** + * Types for interacting with GL frame buffers. + */ +struct GLFrameBuffer { + /** + * The id of this frame buffer. + */ + unsigned id = 0; + + /** + * The pixel format of this frame buffer. + */ + PixelFormat format = PixelFormat::RGBA_8888; +}; +} // namespace tgfx diff --git a/src/opengl/GLFunctions.cpp b/src/opengl/GLFunctions.cpp new file mode 100644 index 00000000..cc74587e --- /dev/null +++ b/src/opengl/GLFunctions.cpp @@ -0,0 +1,26 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/opengl/GLFunctions.h" +#include "opengl/GLContext.h" + +namespace tgfx { +const GLFunctions* GLFunctions::Get(const Context* context) { + return context ? static_cast(context)->functions() : nullptr; +} +} // namespace tgfx \ No newline at end of file diff --git a/src/opengl/GLGpu.cpp b/src/opengl/GLGpu.cpp new file mode 100644 index 00000000..58b02eee --- /dev/null +++ b/src/opengl/GLGpu.cpp @@ -0,0 +1,272 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLGpu.h" +#include "GLUtil.h" +#include "opengl/GLRenderTarget.h" +#include "opengl/GLSemaphore.h" +#include "utils/PixelFormatUtil.h" + +namespace tgfx { +std::unique_ptr GLGpu::Make(Context* context) { + return std::unique_ptr(new GLGpu(context)); +} + +std::unique_ptr GLGpu::createSampler(int width, int height, PixelFormat format, + int mipLevelCount) { + // Texture memory must be allocated first on the web platform then can write pixels. + DEBUG_ASSERT(mipLevelCount > 0); + // Clear the previously generated GLError, causing the subsequent CheckGLError to return an + // incorrect result. + CheckGLError(_context); + auto gl = GLFunctions::Get(_context); + auto sampler = std::make_unique(); + gl->genTextures(1, &(sampler->id)); + if (sampler->id == 0) { + return nullptr; + } + sampler->target = GL_TEXTURE_2D; + sampler->format = format; + sampler->maxMipMapLevel = mipLevelCount - 1; + gl->bindTexture(sampler->target, sampler->id); + gl->texParameteri(sampler->target, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + gl->texParameteri(sampler->target, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + gl->texParameteri(sampler->target, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + gl->texParameteri(sampler->target, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + const auto& textureFormat = GLCaps::Get(_context)->getTextureFormat(format); + bool success = true; + for (int level = 0; level < mipLevelCount && success; level++) { + const int twoToTheMipLevel = 1 << level; + const int currentWidth = std::max(1, width / twoToTheMipLevel); + const int currentHeight = std::max(1, height / twoToTheMipLevel); + gl->texImage2D(sampler->target, level, static_cast(textureFormat.internalFormatTexImage), + currentWidth, currentHeight, 0, textureFormat.externalFormat, GL_UNSIGNED_BYTE, + nullptr); + success = CheckGLError(_context); + } + if (!success) { + gl->deleteTextures(1, &(sampler->id)); + return nullptr; + } + return sampler; +} + +void GLGpu::deleteSampler(TextureSampler* sampler) { + auto glSampler = static_cast(sampler); + if (glSampler == nullptr || glSampler->id == 0) { + return; + } + GLFunctions::Get(_context)->deleteTextures(1, &glSampler->id); + glSampler->id = 0; +} + +void GLGpu::writePixels(const TextureSampler* sampler, Rect rect, const void* pixels, + size_t rowBytes) { + if (sampler == nullptr) { + return; + } + auto gl = GLFunctions::Get(_context); + // https://skia-review.googlesource.com/c/skia/+/571418 + // HUAWEI nova9 pro(Adreno 642L), iqoo neo5(Adreno 650), Redmi K30pro(Adreno 650), + // Xiaomi 8(Adreno 630), glaxy s9(Adreno 630) + gl->flush(); + auto caps = GLCaps::Get(_context); + auto glSampler = static_cast(sampler); + gl->bindTexture(glSampler->target, glSampler->id); + const auto& format = caps->getTextureFormat(sampler->format); + auto bytesPerPixel = PixelFormatBytesPerPixel(sampler->format); + gl->pixelStorei(GL_UNPACK_ALIGNMENT, static_cast(bytesPerPixel)); + int x = static_cast(rect.x()); + int y = static_cast(rect.y()); + int width = static_cast(rect.width()); + int height = static_cast(rect.height()); + if (caps->unpackRowLengthSupport) { + // the number of pixels, not bytes + gl->pixelStorei(GL_UNPACK_ROW_LENGTH, static_cast(rowBytes / bytesPerPixel)); + gl->texSubImage2D(glSampler->target, 0, x, y, width, height, format.externalFormat, + GL_UNSIGNED_BYTE, pixels); + gl->pixelStorei(GL_UNPACK_ROW_LENGTH, 0); + } else { + if (static_cast(width) * bytesPerPixel == rowBytes) { + gl->texSubImage2D(glSampler->target, 0, x, y, width, height, format.externalFormat, + GL_UNSIGNED_BYTE, pixels); + } else { + auto data = reinterpret_cast(pixels); + for (int row = 0; row < height; ++row) { + gl->texSubImage2D(glSampler->target, 0, x, y + row, width, 1, format.externalFormat, + GL_UNSIGNED_BYTE, data + (row * rowBytes)); + } + } + } + if (sampler->hasMipmaps()) { + onRegenerateMipMapLevels(sampler); + } +} + +static int FilterToGLMagFilter(FilterMode filterMode) { + switch (filterMode) { + case FilterMode::Nearest: + return GL_NEAREST; + case FilterMode::Linear: + return GL_LINEAR; + } +} + +static int FilterToGLMinFilter(FilterMode filterMode, MipMapMode mipMapMode) { + switch (mipMapMode) { + case MipMapMode::None: + return FilterToGLMagFilter(filterMode); + case MipMapMode::Nearest: + switch (filterMode) { + case FilterMode::Nearest: + return GL_NEAREST_MIPMAP_NEAREST; + case FilterMode::Linear: + return GL_LINEAR_MIPMAP_NEAREST; + } + case MipMapMode::Linear: + switch (filterMode) { + case FilterMode::Nearest: + return GL_NEAREST_MIPMAP_LINEAR; + case FilterMode::Linear: + return GL_LINEAR_MIPMAP_LINEAR; + } + } +} + +static int GetGLWrap(unsigned target, SamplerState::WrapMode wrapMode) { + if (target == GL_TEXTURE_RECTANGLE) { + if (wrapMode == SamplerState::WrapMode::ClampToBorder) { + return GL_CLAMP_TO_BORDER; + } else { + return GL_CLAMP_TO_EDGE; + } + } + switch (wrapMode) { + case SamplerState::WrapMode::Clamp: + return GL_CLAMP_TO_EDGE; + case SamplerState::WrapMode::Repeat: + return GL_REPEAT; + case SamplerState::WrapMode::MirrorRepeat: + return GL_MIRRORED_REPEAT; + case SamplerState::WrapMode::ClampToBorder: + return GL_CLAMP_TO_BORDER; + } +} + +void GLGpu::bindTexture(int unitIndex, const TextureSampler* sampler, SamplerState samplerState) { + if (sampler == nullptr) { + return; + } + auto glSampler = static_cast(sampler); + auto gl = GLFunctions::Get(_context); + gl->activeTexture(GL_TEXTURE0 + unitIndex); + gl->bindTexture(glSampler->target, glSampler->id); + gl->texParameteri(glSampler->target, GL_TEXTURE_WRAP_S, + GetGLWrap(glSampler->target, samplerState.wrapModeX)); + gl->texParameteri(glSampler->target, GL_TEXTURE_WRAP_T, + GetGLWrap(glSampler->target, samplerState.wrapModeY)); + if (samplerState.mipMapped() && (!_context->caps()->mipMapSupport || !glSampler->hasMipmaps())) { + samplerState.mipMapMode = MipMapMode::None; + } + gl->texParameteri(glSampler->target, GL_TEXTURE_MIN_FILTER, + FilterToGLMinFilter(samplerState.filterMode, samplerState.mipMapMode)); + gl->texParameteri(glSampler->target, GL_TEXTURE_MAG_FILTER, + FilterToGLMagFilter(samplerState.filterMode)); +} + +void GLGpu::copyRenderTargetToTexture(const RenderTarget* renderTarget, Texture* texture, + const Rect& srcRect, const Point& dstPoint) { + auto gl = GLFunctions::Get(_context); + auto glRenderTarget = static_cast(renderTarget); + gl->bindFramebuffer(GL_FRAMEBUFFER, glRenderTarget->getFrameBufferID(false)); + auto glSampler = static_cast(texture->getSampler()); + gl->bindTexture(glSampler->target, glSampler->id); + // format != BGRA && !srcHasMSAARenderBuffer && !dstHasMSAARenderBuffer && dstIsTextureable && + // dstOrigin == srcOrigin && canConfigBeFBOColorAttachment(srcConfig) && (!srcIsTextureable || + // srcIsGLTexture2D) + gl->copyTexSubImage2D(glSampler->target, 0, static_cast(dstPoint.x), + static_cast(dstPoint.y), static_cast(srcRect.x()), + static_cast(srcRect.y()), static_cast(srcRect.width()), + static_cast(srcRect.height())); +} + +void GLGpu::resolveRenderTarget(RenderTarget* renderTarget) { + static_cast(renderTarget)->resolve(); +} + +bool GLGpu::insertSemaphore(Semaphore* semaphore) { + if (semaphore == nullptr) { + return false; + } + auto gl = GLFunctions::Get(_context); + auto* sync = gl->fenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0); + if (sync) { + static_cast(semaphore)->glSync = sync; + // If we inserted semaphores during the flush, we need to call glFlush. + gl->flush(); + return true; + } + return false; +} + +bool GLGpu::waitSemaphore(const Semaphore* semaphore) { + auto glSync = static_cast(semaphore)->glSync; + if (glSync == nullptr) { + return false; + } + auto gl = GLFunctions::Get(_context); + gl->waitSync(glSync, 0, GL_TIMEOUT_IGNORED); + gl->deleteSync(glSync); + return true; +} + +RenderPass* GLGpu::getRenderPass(std::shared_ptr renderTarget, + std::shared_ptr renderTargetTexture) { + if (glRenderPass == nullptr) { + glRenderPass = GLRenderPass::Make(_context); + } + if (glRenderPass) { + glRenderPass->set(renderTarget, renderTargetTexture); + } + return glRenderPass.get(); +} + +bool GLGpu::submitToGpu(bool syncCpu) { + auto gl = GLFunctions::Get(_context); + if (syncCpu) { + gl->finish(); + } else { + gl->flush(); + } + return true; +} + +void GLGpu::submit(RenderPass*) { + glRenderPass->reset(); +} + +void GLGpu::onRegenerateMipMapLevels(const TextureSampler* sampler) { + auto gl = GLFunctions::Get(_context); + auto glSampler = static_cast(sampler); + if (glSampler->target != GL_TEXTURE_2D) { + return; + } + gl->bindTexture(glSampler->target, glSampler->id); + gl->generateMipmap(glSampler->target); +} +} // namespace tgfx diff --git a/src/opengl/GLGpu.h b/src/opengl/GLGpu.h new file mode 100644 index 00000000..e993290e --- /dev/null +++ b/src/opengl/GLGpu.h @@ -0,0 +1,63 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/Gpu.h" +#include "opengl/GLRenderPass.h" + +namespace tgfx { +class GLGpu : public Gpu { + public: + static std::unique_ptr Make(Context* context); + + std::unique_ptr createSampler(int width, int height, PixelFormat format, + int mipLevelCount) override; + + void deleteSampler(TextureSampler* sampler) override; + + void writePixels(const TextureSampler* sampler, Rect rect, const void* pixels, + size_t rowBytes) override; + + void bindTexture(int unitIndex, const TextureSampler* sampler, SamplerState samplerState = {}); + + void copyRenderTargetToTexture(const RenderTarget* renderTarget, Texture* texture, + const Rect& srcRect, const Point& dstPoint) override; + + void resolveRenderTarget(RenderTarget* renderTarget) override; + + bool insertSemaphore(Semaphore* semaphore) override; + + bool waitSemaphore(const Semaphore* semaphore) override; + + RenderPass* getRenderPass(std::shared_ptr renderTarget, + std::shared_ptr renderTargetTexture) override; + + bool submitToGpu(bool syncCpu) override; + + void submit(RenderPass* renderPass) override; + + private: + explicit GLGpu(Context* context) : Gpu(context) { + } + + void onRegenerateMipMapLevels(const TextureSampler* sampler) override; + + std::unique_ptr glRenderPass; +}; +} // namespace tgfx diff --git a/src/opengl/GLInterface.cpp b/src/opengl/GLInterface.cpp new file mode 100644 index 00000000..46ceff5a --- /dev/null +++ b/src/opengl/GLInterface.cpp @@ -0,0 +1,296 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLInterface.h" +#include +#include +#include "GLAssembledGLESInterface.h" +#include "GLAssembledGLInterface.h" +#include "GLAssembledWebGLInterface.h" +#include "GLUtil.h" + +namespace tgfx { +static std::mutex interfaceLocker = {}; +static std::unordered_map> glInterfaceMap = {}; + +static int GetGLVersion(const GLProcGetter* getter) { + if (getter == nullptr) { + return -1; + } + auto glGetString = reinterpret_cast(getter->getProcAddress("glGetString")); + if (glGetString == nullptr) { + return -1; + } + auto versionString = (const char*)glGetString(GL_VERSION); + return GetGLVersion(versionString).majorVersion; +} + +const GLInterface* GLInterface::Get(const Context* context) { + return context ? static_cast(context)->glInterface : nullptr; +} + +const GLInterface* GLInterface::GetNative() { + auto getter = GLProcGetter::Make(); + if (getter == nullptr) { + return nullptr; + } + auto version = GetGLVersion(getter.get()); + if (version <= 0) { + return nullptr; + } + std::lock_guard autoLock(interfaceLocker); + auto result = glInterfaceMap.find(version); + if (result != glInterfaceMap.end()) { + return result->second.get(); + } + glInterfaceMap[version] = MakeNativeInterface(getter.get()); + return glInterfaceMap[version].get(); +} + +std::unique_ptr GLInterface::MakeNativeInterface(const GLProcGetter* getter) { + if (getter == nullptr) { + return nullptr; + } + auto getString = reinterpret_cast(getter->getProcAddress("glGetString")); + if (getString == nullptr) { + return nullptr; + } + auto getIntegerv = reinterpret_cast(getter->getProcAddress("glGetIntegerv")); + if (getIntegerv == nullptr) { + return nullptr; + } + auto getShaderPrecisionFormat = reinterpret_cast( + getter->getProcAddress("glGetShaderPrecisionFormat")); + auto getStringi = reinterpret_cast(getter->getProcAddress("glGetStringi")); + auto getInternalformativ = + reinterpret_cast(getter->getProcAddress("glGetInternalformativ")); + GLInfo info(getString, getStringi, getIntegerv, getInternalformativ, getShaderPrecisionFormat); + auto interface = new GLInterface(); + auto functions = std::make_shared(); + interface->functions = functions; + functions->activeTexture = + reinterpret_cast(getter->getProcAddress("glActiveTexture")); + functions->attachShader = + reinterpret_cast(getter->getProcAddress("glAttachShader")); + functions->bindAttribLocation = + reinterpret_cast(getter->getProcAddress("glBindAttribLocation")); + functions->bindBuffer = reinterpret_cast(getter->getProcAddress("glBindBuffer")); + functions->bindFramebuffer = + reinterpret_cast(getter->getProcAddress("glBindFramebuffer")); + functions->bindRenderbuffer = + reinterpret_cast(getter->getProcAddress("glBindRenderbuffer")); + functions->bindTexture = + reinterpret_cast(getter->getProcAddress("glBindTexture")); + functions->blendColor = reinterpret_cast(getter->getProcAddress("glBlendColor")); + functions->blendEquation = + reinterpret_cast(getter->getProcAddress("glBlendEquation")); + functions->blendEquationSeparate = + reinterpret_cast(getter->getProcAddress("glBlendEquationSeparate")); + functions->blendFunc = reinterpret_cast(getter->getProcAddress("glBlendFunc")); + functions->blendFuncSeparate = + reinterpret_cast(getter->getProcAddress("glBlendFuncSeparate")); + functions->bufferData = reinterpret_cast(getter->getProcAddress("glBufferData")); + functions->bufferSubData = + reinterpret_cast(getter->getProcAddress("glBufferSubData")); + functions->checkFramebufferStatus = reinterpret_cast( + getter->getProcAddress("glCheckFramebufferStatus")); + functions->clear = reinterpret_cast(getter->getProcAddress("glClear")); + functions->clearColor = reinterpret_cast(getter->getProcAddress("glClearColor")); + functions->clearDepthf = + reinterpret_cast(getter->getProcAddress("glClearDepthf")); + functions->clearStencil = + reinterpret_cast(getter->getProcAddress("glClearStencil")); + functions->colorMask = reinterpret_cast(getter->getProcAddress("glColorMask")); + functions->compileShader = + reinterpret_cast(getter->getProcAddress("glCompileShader")); + functions->compressedTexImage2D = + reinterpret_cast(getter->getProcAddress("glCompressedTexImage2D")); + functions->compressedTexSubImage2D = reinterpret_cast( + getter->getProcAddress("glCompressedTexSubImage2D")); + functions->copyTexSubImage2D = + reinterpret_cast(getter->getProcAddress("glCopyTexSubImage2D")); + functions->createProgram = + reinterpret_cast(getter->getProcAddress("glCreateProgram")); + functions->createShader = + reinterpret_cast(getter->getProcAddress("glCreateShader")); + functions->cullFace = reinterpret_cast(getter->getProcAddress("glCullFace")); + functions->deleteBuffers = + reinterpret_cast(getter->getProcAddress("glDeleteBuffers")); + functions->deleteFramebuffers = + reinterpret_cast(getter->getProcAddress("glDeleteFramebuffers")); + functions->deleteProgram = + reinterpret_cast(getter->getProcAddress("glDeleteProgram")); + functions->deleteRenderbuffers = + reinterpret_cast(getter->getProcAddress("glDeleteRenderbuffers")); + functions->deleteShader = + reinterpret_cast(getter->getProcAddress("glDeleteShader")); + functions->deleteSync = reinterpret_cast(getter->getProcAddress("glDeleteSync")); + functions->deleteTextures = + reinterpret_cast(getter->getProcAddress("glDeleteTextures")); + functions->depthFunc = reinterpret_cast(getter->getProcAddress("glDepthFunc")); + functions->depthMask = reinterpret_cast(getter->getProcAddress("glDepthMask")); + functions->disable = reinterpret_cast(getter->getProcAddress("glDisable")); + functions->disableVertexAttribArray = reinterpret_cast( + getter->getProcAddress("glDisableVertexAttribArray")); + functions->drawArrays = reinterpret_cast(getter->getProcAddress("glDrawArrays")); + functions->drawElements = + reinterpret_cast(getter->getProcAddress("glDrawElements")); + functions->enable = reinterpret_cast(getter->getProcAddress("glEnable")); + functions->isEnabled = reinterpret_cast(getter->getProcAddress("glIsEnabled")); + functions->enableVertexAttribArray = reinterpret_cast( + getter->getProcAddress("glEnableVertexAttribArray")); + functions->fenceSync = reinterpret_cast(getter->getProcAddress("glFenceSync")); + functions->finish = reinterpret_cast(getter->getProcAddress("glFinish")); + functions->flush = reinterpret_cast(getter->getProcAddress("glFlush")); + functions->framebufferRenderbuffer = reinterpret_cast( + getter->getProcAddress("glFramebufferRenderbuffer")); + functions->framebufferTexture2D = + reinterpret_cast(getter->getProcAddress("glFramebufferTexture2D")); + functions->frontFace = reinterpret_cast(getter->getProcAddress("glFrontFace")); + functions->genBuffers = reinterpret_cast(getter->getProcAddress("glGenBuffers")); + functions->genFramebuffers = + reinterpret_cast(getter->getProcAddress("glGenFramebuffers")); + functions->generateMipmap = + reinterpret_cast(getter->getProcAddress("glGenerateMipmap")); + functions->genRenderbuffers = + reinterpret_cast(getter->getProcAddress("glGenRenderbuffers")); + functions->genTextures = + reinterpret_cast(getter->getProcAddress("glGenTextures")); + functions->getBufferParameteriv = + reinterpret_cast(getter->getProcAddress("glGetBufferParameteriv")); + functions->getError = reinterpret_cast(getter->getProcAddress("glGetError")); + functions->getFramebufferAttachmentParameteriv = + reinterpret_cast( + getter->getProcAddress("glGetFramebufferAttachmentParameteriv")); + functions->getIntegerv = + reinterpret_cast(getter->getProcAddress("glGetIntegerv")); + functions->getInternalformativ = + reinterpret_cast(getter->getProcAddress("glGetInternalformativ")); + functions->getBooleanv = + reinterpret_cast(getter->getProcAddress("glGetBooleanv")); + functions->getProgramInfoLog = + reinterpret_cast(getter->getProcAddress("glGetProgramInfoLog")); + functions->getProgramiv = + reinterpret_cast(getter->getProcAddress("glGetProgramiv")); + functions->getRenderbufferParameteriv = reinterpret_cast( + getter->getProcAddress("glGetRenderbufferParameteriv")); + functions->getShaderInfoLog = + reinterpret_cast(getter->getProcAddress("glGetShaderInfoLog")); + functions->getShaderiv = + reinterpret_cast(getter->getProcAddress("glGetShaderiv")); + functions->getShaderPrecisionFormat = reinterpret_cast( + getter->getProcAddress("glGetShaderPrecisionFormat")); + functions->getString = reinterpret_cast(getter->getProcAddress("glGetString")); + functions->getStringi = reinterpret_cast(getter->getProcAddress("glGetStringi")); + functions->getVertexAttribiv = + reinterpret_cast(getter->getProcAddress("glGetVertexAttribiv")); + functions->getVertexAttribPointerv = reinterpret_cast( + getter->getProcAddress("glGetVertexAttribPointerv")); + functions->getAttribLocation = + reinterpret_cast(getter->getProcAddress("glGetAttribLocation")); + functions->getUniformLocation = + reinterpret_cast(getter->getProcAddress("glGetUniformLocation")); + functions->isTexture = reinterpret_cast(getter->getProcAddress("glIsTexture")); + functions->lineWidth = reinterpret_cast(getter->getProcAddress("glLineWidth")); + functions->linkProgram = + reinterpret_cast(getter->getProcAddress("glLinkProgram")); + functions->pixelStorei = + reinterpret_cast(getter->getProcAddress("glPixelStorei")); + functions->readPixels = reinterpret_cast(getter->getProcAddress("glReadPixels")); + functions->renderbufferStorage = + reinterpret_cast(getter->getProcAddress("glRenderbufferStorage")); + functions->resolveMultisampleFramebuffer = reinterpret_cast( + getter->getProcAddress("glResolveMultisampleFramebufferAPPLE")); + functions->scissor = reinterpret_cast(getter->getProcAddress("glScissor")); + functions->shaderSource = + reinterpret_cast(getter->getProcAddress("glShaderSource")); + functions->stencilFunc = + reinterpret_cast(getter->getProcAddress("glStencilFunc")); + functions->stencilFuncSeparate = + reinterpret_cast(getter->getProcAddress("glStencilFuncSeparate")); + functions->stencilMask = + reinterpret_cast(getter->getProcAddress("glStencilMask")); + functions->stencilMaskSeparate = + reinterpret_cast(getter->getProcAddress("glStencilMaskSeparate")); + functions->stencilOp = reinterpret_cast(getter->getProcAddress("glStencilOp")); + functions->stencilOpSeparate = + reinterpret_cast(getter->getProcAddress("glStencilOpSeparate")); + functions->texImage2D = reinterpret_cast(getter->getProcAddress("glTexImage2D")); + functions->texParameterf = + reinterpret_cast(getter->getProcAddress("glTexParameterf")); + functions->texParameterfv = + reinterpret_cast(getter->getProcAddress("glTexParameterfv")); + functions->texParameteri = + reinterpret_cast(getter->getProcAddress("glTexParameteri")); + functions->texParameteriv = + reinterpret_cast(getter->getProcAddress("glTexParameteriv")); + functions->texSubImage2D = + reinterpret_cast(getter->getProcAddress("glTexSubImage2D")); + functions->uniform1f = reinterpret_cast(getter->getProcAddress("glUniform1f")); + functions->uniform1i = reinterpret_cast(getter->getProcAddress("glUniform1i")); + functions->uniform1fv = reinterpret_cast(getter->getProcAddress("glUniform1fv")); + functions->uniform1iv = reinterpret_cast(getter->getProcAddress("glUniform1iv")); + functions->uniform2f = reinterpret_cast(getter->getProcAddress("glUniform2f")); + functions->uniform2i = reinterpret_cast(getter->getProcAddress("glUniform2i")); + functions->uniform2fv = reinterpret_cast(getter->getProcAddress("glUniform2fv")); + functions->uniform2iv = reinterpret_cast(getter->getProcAddress("glUniform2iv")); + functions->uniform3f = reinterpret_cast(getter->getProcAddress("glUniform3f")); + functions->uniform3i = reinterpret_cast(getter->getProcAddress("glUniform3i")); + functions->uniform3fv = reinterpret_cast(getter->getProcAddress("glUniform3fv")); + functions->uniform3iv = reinterpret_cast(getter->getProcAddress("glUniform3iv")); + functions->uniform4f = reinterpret_cast(getter->getProcAddress("glUniform4f")); + functions->uniform4i = reinterpret_cast(getter->getProcAddress("glUniform4i")); + functions->uniform4fv = reinterpret_cast(getter->getProcAddress("glUniform4fv")); + functions->uniform4iv = reinterpret_cast(getter->getProcAddress("glUniform4iv")); + functions->uniformMatrix2fv = + reinterpret_cast(getter->getProcAddress("glUniformMatrix2fv")); + functions->uniformMatrix3fv = + reinterpret_cast(getter->getProcAddress("glUniformMatrix3fv")); + functions->uniformMatrix4fv = + reinterpret_cast(getter->getProcAddress("glUniformMatrix4fv")); + functions->useProgram = reinterpret_cast(getter->getProcAddress("glUseProgram")); + functions->vertexAttrib1f = + reinterpret_cast(getter->getProcAddress("glVertexAttrib1f")); + functions->vertexAttrib2fv = + reinterpret_cast(getter->getProcAddress("glVertexAttrib2fv")); + functions->vertexAttrib3fv = + reinterpret_cast(getter->getProcAddress("glVertexAttrib3fv")); + functions->vertexAttrib4fv = + reinterpret_cast(getter->getProcAddress("glVertexAttrib4fv")); + functions->vertexAttribPointer = + reinterpret_cast(getter->getProcAddress("glVertexAttribPointer")); + functions->viewport = reinterpret_cast(getter->getProcAddress("glViewport")); + functions->waitSync = reinterpret_cast(getter->getProcAddress("glWaitSync")); + + switch (info.standard) { + case GLStandard::None: + break; + case GLStandard::GL: + GLAssembleGLInterface(getter, functions.get(), info); + break; + case GLStandard::GLES: + GLAssembleGLESInterface(getter, functions.get(), info); + break; + case GLStandard::WebGL: + GLAssembleWebGLInterface(getter, functions.get(), info); + break; + } + interface->caps = std::shared_ptr(new GLCaps(info)); + return std::unique_ptr(interface); +} +} // namespace tgfx diff --git a/src/opengl/GLInterface.h b/src/opengl/GLInterface.h new file mode 100644 index 00000000..cd921c91 --- /dev/null +++ b/src/opengl/GLInterface.h @@ -0,0 +1,43 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "GLCaps.h" +#include "GLProcGetter.h" +#include "tgfx/opengl/GLDefines.h" +#include "tgfx/opengl/GLFunctions.h" + +namespace tgfx { +class GLState; + +class GLInterface { + public: + static const GLInterface* Get(const Context* context); + + std::shared_ptr caps = nullptr; + std::shared_ptr functions = nullptr; + + private: + static const GLInterface* GetNative(); + static std::unique_ptr MakeNativeInterface(const GLProcGetter* getter); + + friend class GLDevice; + friend class GLContext; +}; +} // namespace tgfx diff --git a/src/opengl/GLProcGetter.h b/src/opengl/GLProcGetter.h new file mode 100644 index 00000000..70391063 --- /dev/null +++ b/src/opengl/GLProcGetter.h @@ -0,0 +1,32 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include + +namespace tgfx { +class GLProcGetter { + public: + static std::unique_ptr Make(); + + virtual ~GLProcGetter() = default; + + virtual void* getProcAddress(const char name[]) const = 0; +}; +} // namespace tgfx diff --git a/src/opengl/GLProgram.cpp b/src/opengl/GLProgram.cpp new file mode 100644 index 00000000..78115702 --- /dev/null +++ b/src/opengl/GLProgram.cpp @@ -0,0 +1,91 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLProgram.h" +#include "GLGpu.h" +#include "GLUtil.h" + +namespace tgfx { +GLProgram::GLProgram(Context* context, unsigned programID, + std::unique_ptr uniformBuffer, + std::vector attributes, int vertexStride) + : Program(context), programId(programID), uniformBuffer(std::move(uniformBuffer)), + attributes(std::move(attributes)), _vertexStride(vertexStride) { +} + +void GLProgram::setupSamplerUniforms(const std::vector& textureSamplers) const { + auto gl = GLFunctions::Get(context); + gl->useProgram(programId); + // Assign texture units to sampler uniforms one time up front. + auto count = static_cast(textureSamplers.size()); + for (int i = 0; i < count; ++i) { + const auto& sampler = textureSamplers[i]; + if (UNUSED_UNIFORM != sampler.location) { + gl->uniform1i(sampler.location, i); + } + } +} + +void GLProgram::onReleaseGPU() { + if (programId) { + auto gl = GLFunctions::Get(context); + gl->deleteProgram(programId); + } +} + +void GLProgram::updateUniformsAndTextureBindings(const GLRenderTarget* renderTarget, + const ProgramInfo* programInfo) { + setRenderTargetState(renderTarget); + programInfo->getUniforms(uniformBuffer.get()); + uniformBuffer->uploadToGPU(context); + auto samplers = programInfo->getSamplers(); + int textureUnit = 0; + auto gpu = static_cast(context->gpu()); + for (auto& info : samplers) { + gpu->bindTexture(textureUnit++, info.sampler, info.state); + } +} + +static std::array GetRTAdjustArray(int width, int height, bool flipY) { + std::array result = {}; + result[0] = 2.f / static_cast(width); + result[2] = 2.f / static_cast(height); + result[1] = -1.f; + result[3] = -1.f; + if (flipY) { + result[2] = -result[2]; + result[3] = -result[3]; + } + return result; +} + +void GLProgram::setRenderTargetState(const GLRenderTarget* renderTarget) { + int width = renderTarget->width(); + int height = renderTarget->height(); + auto origin = renderTarget->origin(); + if (renderTargetState.width == width && renderTargetState.height == height && + renderTargetState.origin == origin) { + return; + } + renderTargetState.width = width; + renderTargetState.height = height; + renderTargetState.origin = origin; + auto v = GetRTAdjustArray(width, height, origin == ImageOrigin::BottomLeft); + uniformBuffer->setData(RTAdjustName, v); +} +} // namespace tgfx diff --git a/src/opengl/GLProgram.h b/src/opengl/GLProgram.h new file mode 100644 index 00000000..2f270858 --- /dev/null +++ b/src/opengl/GLProgram.h @@ -0,0 +1,85 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "GLContext.h" +#include "gpu/Program.h" +#include "gpu/ProgramInfo.h" +#include "gpu/SLType.h" +#include "opengl/GLRenderTarget.h" +#include "opengl/GLUniformHandler.h" + +namespace tgfx { +class GLProgram : public Program { + public: + struct Attribute { + SLType gpuType = SLType::Float; + size_t offset = 0; + int location = 0; + }; + + GLProgram(Context* context, unsigned programID, std::unique_ptr uniformBuffer, + std::vector attributes, int vertexStride); + + void setupSamplerUniforms(const std::vector& textureSamplers) const; + + /** + * Gets the GL program ID for this program. + */ + unsigned programID() const { + return programId; + } + + /** + * This function uploads uniforms, calls each GL*Processor's setData. It binds all fragment + * processor textures. + * + * It is the caller's responsibility to ensure the program is bound before calling. + */ + void updateUniformsAndTextureBindings(const GLRenderTarget* renderTarget, + const ProgramInfo* programInfo); + + int vertexStride() const { + return _vertexStride; + } + + const std::vector& vertexAttributes() const { + return attributes; + } + + private: + struct RenderTargetState { + std::optional width; + std::optional height; + std::optional origin; + }; + + void setRenderTargetState(const GLRenderTarget* renderTarget); + + void onReleaseGPU() override; + + RenderTargetState renderTargetState; + unsigned programId = 0; + std::unique_ptr uniformBuffer = nullptr; + + std::vector attributes; + int _vertexStride = 0; +}; +} // namespace tgfx diff --git a/src/opengl/GLProgramBuilder.cpp b/src/opengl/GLProgramBuilder.cpp new file mode 100644 index 00000000..19d9c78f --- /dev/null +++ b/src/opengl/GLProgramBuilder.cpp @@ -0,0 +1,163 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLProgramBuilder.h" +#include "GLContext.h" +#include "GLUtil.h" + +namespace tgfx { +static std::string TypeModifierString(bool isDesktopGL, ShaderVar::TypeModifier t, + ShaderFlags flag) { + switch (t) { + case ShaderVar::TypeModifier::None: + return ""; + case ShaderVar::TypeModifier::Attribute: + return isDesktopGL ? "in" : "attribute"; + case ShaderVar::TypeModifier::Varying: + return isDesktopGL ? (flag == ShaderFlags::Vertex ? "out" : "in") : "varying"; + case ShaderVar::TypeModifier::Uniform: + return "uniform"; + case ShaderVar::TypeModifier::Out: + return "out"; + } +} + +static constexpr std::pair SLTypes[] = { + {SLType::Void, "void"}, + {SLType::Float, "float"}, + {SLType::Float2, "vec2"}, + {SLType::Float3, "vec3"}, + {SLType::Float4, "vec4"}, + {SLType::Float2x2, "mat2"}, + {SLType::Float3x3, "mat3"}, + {SLType::Float4x4, "mat4"}, + {SLType::Int, "int"}, + {SLType::Int2, "ivec2"}, + {SLType::Int3, "ivec3"}, + {SLType::Int4, "ivec4"}, + {SLType::Texture2DRectSampler, "sampler2DRect"}, + {SLType::TextureExternalSampler, "samplerExternalOES"}, + {SLType::Texture2DSampler, "sampler2D"}, +}; + +static std::string SLTypeString(SLType t) { + for (const auto& pair : SLTypes) { + if (pair.first == t) { + return pair.second; + } + } + return ""; +} + +std::unique_ptr ProgramBuilder::CreateProgram(Context* context, const Pipeline* pipeline) { + GLProgramBuilder builder(context, pipeline); + if (!builder.emitAndInstallProcessors()) { + return nullptr; + } + return builder.finalize(); +} + +GLProgramBuilder::GLProgramBuilder(Context* context, const Pipeline* pipeline) + : ProgramBuilder(context, pipeline), _varyingHandler(this), _uniformHandler(this), + _vertexBuilder(this), _fragBuilder(this) { +} + +std::string GLProgramBuilder::versionDeclString() { + return isDesktopGL() ? "#version 150\n" : "#version 100\n"; +} + +std::string GLProgramBuilder::textureFuncName() const { + return isDesktopGL() ? "texture" : "texture2D"; +} + +std::string GLProgramBuilder::getShaderVarDeclarations(const ShaderVar& var, + ShaderFlags flag) const { + std::string ret; + if (var.typeModifier() != ShaderVar::TypeModifier::None) { + ret += TypeModifierString(isDesktopGL(), var.typeModifier(), flag); + ret += " "; + // On Android,fragment shader's varying needs high precision. + if (var.typeModifier() == ShaderVar::TypeModifier::Varying && flag == ShaderFlags::Fragment) { + ret += "highp "; + } + } + ret += SLTypeString(var.type()); + ret += " "; + ret += var.name(); + return ret; +} + +std::unique_ptr GLProgramBuilder::finalize() { + if (isDesktopGL()) { + fragmentShaderBuilder()->declareCustomOutputColor(); + } + finalizeShaders(); + + auto vertex = vertexShaderBuilder()->shaderString(); + auto fragment = fragmentShaderBuilder()->shaderString(); + auto programID = CreateGLProgram(context, vertex, fragment); + if (programID == 0) { + return nullptr; + } + computeCountsAndStrides(programID); + resolveProgramResourceLocations(programID); + + return createProgram(programID); +} + +void GLProgramBuilder::computeCountsAndStrides(unsigned int programID) { + auto gl = GLFunctions::Get(context); + vertexStride = 0; + for (const auto* attr : pipeline->getGeometryProcessor()->vertexAttributes()) { + GLProgram::Attribute attribute; + attribute.gpuType = attr->gpuType(); + attribute.offset = vertexStride; + vertexStride += static_cast(attr->sizeAlign4()); + attribute.location = gl->getAttribLocation(programID, attr->name().c_str()); + if (attribute.location >= 0) { + attributes.push_back(attribute); + } + } +} + +void GLProgramBuilder::resolveProgramResourceLocations(unsigned programID) { + _uniformHandler.resolveUniformLocations(programID); +} + +bool GLProgramBuilder::checkSamplerCounts() { + auto caps = GLCaps::Get(context); + if (numFragmentSamplers > caps->maxFragmentSamplers) { + LOGE("Program would use too many fragment samplers."); + return false; + } + return true; +} + +std::unique_ptr GLProgramBuilder::createProgram(unsigned programID) { + auto uniformBuffer = _uniformHandler.makeUniformBuffer(); + auto program = + new GLProgram(context, programID, std::move(uniformBuffer), attributes, vertexStride); + program->setupSamplerUniforms(_uniformHandler.samplers); + return std::unique_ptr(program); +} + +bool GLProgramBuilder::isDesktopGL() const { + auto caps = GLCaps::Get(context); + return caps->standard == GLStandard::GL; +} +} // namespace tgfx diff --git a/src/opengl/GLProgramBuilder.h b/src/opengl/GLProgramBuilder.h new file mode 100644 index 00000000..134704ad --- /dev/null +++ b/src/opengl/GLProgramBuilder.h @@ -0,0 +1,81 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "GLContext.h" +#include "GLFragmentShaderBuilder.h" +#include "GLProgram.h" +#include "GLUniformHandler.h" +#include "GLVertexShaderBuilder.h" +#include "gpu/ProgramBuilder.h" + +namespace tgfx { +class GLProgramBuilder : public ProgramBuilder { + public: + std::string versionDeclString() override; + + std::string textureFuncName() const override; + + std::string getShaderVarDeclarations(const ShaderVar& var, ShaderFlags flag) const override; + + bool isDesktopGL() const; + + private: + GLProgramBuilder(Context* context, const Pipeline* pipeline); + + void computeCountsAndStrides(unsigned programID); + + std::unique_ptr finalize(); + + void resolveProgramResourceLocations(unsigned programID); + + std::unique_ptr createProgram(unsigned programID); + + UniformHandler* uniformHandler() override { + return &_uniformHandler; + } + + const UniformHandler* uniformHandler() const override { + return &_uniformHandler; + } + + VaryingHandler* varyingHandler() override { + return &_varyingHandler; + } + + VertexShaderBuilder* vertexShaderBuilder() override { + return &_vertexBuilder; + } + + FragmentShaderBuilder* fragmentShaderBuilder() override { + return &_fragBuilder; + } + + bool checkSamplerCounts() override; + + VaryingHandler _varyingHandler; + GLUniformHandler _uniformHandler; + GLVertexShaderBuilder _vertexBuilder; + GLFragmentShaderBuilder _fragBuilder; + std::vector attributes; + int vertexStride = 0; + + friend class ProgramBuilder; +}; +} // namespace tgfx diff --git a/src/opengl/GLRenderPass.cpp b/src/opengl/GLRenderPass.cpp new file mode 100644 index 00000000..d9ec451b --- /dev/null +++ b/src/opengl/GLRenderPass.cpp @@ -0,0 +1,216 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLRenderPass.h" +#include "GLGpu.h" +#include "GLUtil.h" +#include "gpu/ProgramCache.h" + +namespace tgfx { +struct AttribLayout { + bool normalized = false; // Only used by floating point types. + int count = 0; + unsigned type = 0; +}; + +static constexpr std::pair attribLayoutPair[] = { + {SLType::Float, {false, 1, GL_FLOAT}}, {SLType::Float2, {false, 2, GL_FLOAT}}, + {SLType::Float3, {false, 3, GL_FLOAT}}, {SLType::Float4, {false, 4, GL_FLOAT}}, + {SLType::Int, {false, 1, GL_INT}}, {SLType::Int2, {false, 2, GL_INT}}, + {SLType::Int3, {false, 3, GL_INT}}, {SLType::Int4, {false, 4, GL_INT}}}; + +static AttribLayout GetAttribLayout(SLType type) { + for (const auto& pair : attribLayoutPair) { + if (pair.first == type) { + return pair.second; + } + } + return {false, 0, 0}; +} + +class VertexArrayObject : public Resource { + public: + static std::shared_ptr Make(Context* context) { + auto gl = GLFunctions::Get(context); + unsigned id = 0; + // Using VAO is required in the core profile. + gl->genVertexArrays(1, &id); + if (id == 0) { + return nullptr; + } + return Resource::Wrap(context, new VertexArrayObject(id)); + } + + explicit VertexArrayObject(unsigned id) : id(id) { + } + + size_t memoryUsage() const override { + return 0; + } + + unsigned id = 0; + + private: + void onReleaseGPU() override { + auto gl = GLFunctions::Get(context); + if (id > 0) { + gl->deleteVertexArrays(1, &id); + id = 0; + } + } +}; + +std::unique_ptr GLRenderPass::Make(Context* context) { + std::shared_ptr vertexArrayObject; + if (GLCaps::Get(context)->vertexArrayObjectSupport) { + vertexArrayObject = VertexArrayObject::Make(context); + if (vertexArrayObject == nullptr) { + return nullptr; + } + } + return std::unique_ptr(new GLRenderPass(context, vertexArrayObject)); +} + +static void UpdateScissor(Context* context, const Rect& scissorRect) { + auto gl = GLFunctions::Get(context); + if (scissorRect.isEmpty()) { + gl->disable(GL_SCISSOR_TEST); + } else { + gl->enable(GL_SCISSOR_TEST); + gl->scissor(static_cast(scissorRect.x()), static_cast(scissorRect.y()), + static_cast(scissorRect.width()), static_cast(scissorRect.height())); + } +} + +static const unsigned gXfermodeCoeff2Blend[] = { + GL_ZERO, GL_ONE, + GL_SRC_COLOR, GL_ONE_MINUS_SRC_COLOR, + GL_DST_COLOR, GL_ONE_MINUS_DST_COLOR, + GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, + GL_DST_ALPHA, GL_ONE_MINUS_DST_ALPHA, +}; + +static void UpdateBlend(Context* context, const BlendInfo* blendFactors) { + auto gl = GLFunctions::Get(context); + if (blendFactors != nullptr) { + gl->enable(GL_BLEND); + gl->blendFunc(gXfermodeCoeff2Blend[static_cast(blendFactors->srcBlend)], + gXfermodeCoeff2Blend[static_cast(blendFactors->dstBlend)]); + gl->blendEquation(GL_FUNC_ADD); + } else { + gl->disable(GL_BLEND); + auto caps = GLCaps::Get(context); + if (caps->frameBufferFetchSupport && caps->frameBufferFetchRequiresEnablePerSample) { + gl->enable(GL_FETCH_PER_SAMPLE_ARM); + } + } +} + +void GLRenderPass::set(std::shared_ptr renderTarget, + std::shared_ptr renderTargetTexture) { + _renderTarget = std::move(renderTarget); + _renderTargetTexture = std::move(renderTargetTexture); +} + +void GLRenderPass::reset() { + _renderTarget = nullptr; + _renderTargetTexture = nullptr; +} + +bool GLRenderPass::onBindProgramAndScissorClip(const ProgramInfo* programInfo, + const Rect& drawBounds) { + _program = static_cast(_context->programCache()->getProgram(programInfo)); + if (_program == nullptr) { + return false; + } + auto gl = GLFunctions::Get(_context); + CheckGLError(_context); + auto glRT = static_cast(_renderTarget.get()); + auto* program = static_cast(_program); + gl->useProgram(program->programID()); + gl->bindFramebuffer(GL_FRAMEBUFFER, glRT->getFrameBufferID()); + gl->viewport(0, 0, glRT->width(), glRT->height()); + UpdateScissor(_context, drawBounds); + UpdateBlend(_context, programInfo->blendInfo()); + if (programInfo->requiresBarrier()) { + gl->textureBarrier(); + } + program->updateUniformsAndTextureBindings(glRT, programInfo); + return true; +} + +void GLRenderPass::onBindBuffers(std::shared_ptr indexBuffer, + std::shared_ptr vertexBuffer) { + _indexBuffer = std::move(indexBuffer); + _vertexBuffer = std::move(vertexBuffer); +} + +static const unsigned gPrimitiveType[] = {GL_TRIANGLES, GL_TRIANGLE_STRIP}; + +void GLRenderPass::onDraw(PrimitiveType primitiveType, int baseVertex, int vertexCount) { + auto func = [&]() { + auto gl = GLFunctions::Get(_context); + gl->drawArrays(gPrimitiveType[static_cast(primitiveType)], baseVertex, vertexCount); + }; + draw(func); +} + +void GLRenderPass::onDrawIndexed(PrimitiveType primitiveType, int baseIndex, int indexCount) { + auto func = [&]() { + auto gl = GLFunctions::Get(_context); + gl->bindBuffer(GL_ELEMENT_ARRAY_BUFFER, + std::static_pointer_cast(_indexBuffer)->bufferID()); + gl->drawElements(gPrimitiveType[static_cast(primitiveType)], indexCount, GL_UNSIGNED_SHORT, + reinterpret_cast(baseIndex * sizeof(uint16_t))); + gl->bindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0); + }; + draw(func); +} + +void GLRenderPass::draw(const std::function& func) { + auto gl = GLFunctions::Get(_context); + if (vertexArrayObject) { + gl->bindVertexArray(vertexArrayObject->id); + } + gl->bindBuffer(GL_ARRAY_BUFFER, std::static_pointer_cast(_vertexBuffer)->bufferID()); + auto* program = static_cast(_program); + for (const auto& attribute : program->vertexAttributes()) { + const AttribLayout& layout = GetAttribLayout(attribute.gpuType); + gl->vertexAttribPointer(static_cast(attribute.location), layout.count, layout.type, + layout.normalized, program->vertexStride(), + reinterpret_cast(attribute.offset)); + gl->enableVertexAttribArray(static_cast(attribute.location)); + } + func(); + if (vertexArrayObject) { + gl->bindVertexArray(0); + } + gl->bindBuffer(GL_ARRAY_BUFFER, 0); + CheckGLError(_context); +} + +void GLRenderPass::onClear(const Rect& scissor, Color color) { + auto gl = GLFunctions::Get(_context); + auto glRT = static_cast(_renderTarget.get()); + gl->bindFramebuffer(GL_FRAMEBUFFER, glRT->getFrameBufferID()); + gl->viewport(0, 0, glRT->width(), glRT->height()); + UpdateScissor(_context, scissor); + gl->clearColor(color.red, color.green, color.blue, color.alpha); + gl->clear(GL_COLOR_BUFFER_BIT); +} +} // namespace tgfx diff --git a/src/opengl/GLRenderPass.h b/src/opengl/GLRenderPass.h new file mode 100644 index 00000000..3eafe8e1 --- /dev/null +++ b/src/opengl/GLRenderPass.h @@ -0,0 +1,61 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "GLBuffer.h" +#include "GLProgram.h" +#include "gpu/AAType.h" +#include "gpu/Pipeline.h" +#include "gpu/RenderPass.h" +#include "gpu/ops/Op.h" +#include "gpu/processors/FragmentProcessor.h" +#include "gpu/processors/GeometryProcessor.h" +#include "tgfx/core/BlendMode.h" + +namespace tgfx { +class VertexArrayObject; + +class GLRenderPass : public RenderPass { + public: + static std::unique_ptr Make(Context* context); + + void set(std::shared_ptr renderTarget, + std::shared_ptr renderTargetTexture); + + void reset(); + + protected: + bool onBindProgramAndScissorClip(const ProgramInfo* programInfo, const Rect& drawBounds) override; + void onBindBuffers(std::shared_ptr indexBuffer, + std::shared_ptr vertexBuffer) override; + void onDraw(PrimitiveType primitiveType, int baseVertex, int vertexCount) override; + void onDrawIndexed(PrimitiveType primitiveType, int baseIndex, int indexCount) override; + void onClear(const Rect& scissor, Color color) override; + + private: + GLRenderPass(Context* context, std::shared_ptr vertexArrayObject) + : RenderPass(context), vertexArrayObject(std::move(vertexArrayObject)) { + } + + void draw(const std::function& func); + + std::shared_ptr vertexArrayObject; +}; +} // namespace tgfx diff --git a/src/opengl/GLRenderTarget.cpp b/src/opengl/GLRenderTarget.cpp new file mode 100644 index 00000000..63d42c72 --- /dev/null +++ b/src/opengl/GLRenderTarget.cpp @@ -0,0 +1,335 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLRenderTarget.h" +#include "gpu/TextureSampler.h" +#include "opengl/GLContext.h" +#include "opengl/GLSampler.h" +#include "opengl/GLUtil.h" +#include "tgfx/core/Pixmap.h" +#include "tgfx/utils/Buffer.h" +#include "utils/PixelFormatUtil.h" + +namespace tgfx { +std::shared_ptr RenderTarget::MakeFrom(Context* context, + const BackendRenderTarget& renderTarget, + ImageOrigin origin) { + if (context == nullptr || !renderTarget.isValid()) { + return nullptr; + } + GLFrameBufferInfo frameBufferInfo = {}; + if (!renderTarget.getGLFramebufferInfo(&frameBufferInfo)) { + return nullptr; + } + GLFrameBuffer frameBuffer = {}; + frameBuffer.id = frameBufferInfo.id; + frameBuffer.format = GLSizeFormatToPixelFormat(frameBufferInfo.format); + auto target = + new GLRenderTarget(renderTarget.width(), renderTarget.height(), origin, 1, frameBuffer); + target->frameBufferForDraw = frameBuffer; + target->externalResource = true; + return Resource::Wrap(context, target); +} + +static bool RenderbufferStorageMSAA(Context* context, int sampleCount, PixelFormat pixelFormat, + int width, int height) { + CheckGLError(context); + auto gl = GLFunctions::Get(context); + auto caps = GLCaps::Get(context); + auto format = caps->getTextureFormat(pixelFormat).sizedFormat; + switch (caps->msFBOType) { + case MSFBOType::Standard: + gl->renderbufferStorageMultisample(GL_RENDERBUFFER, sampleCount, format, width, height); + break; + case MSFBOType::ES_Apple: + gl->renderbufferStorageMultisampleAPPLE(GL_RENDERBUFFER, sampleCount, format, width, height); + break; + case MSFBOType::ES_EXT_MsToTexture: + case MSFBOType::ES_IMG_MsToTexture: + gl->renderbufferStorageMultisampleEXT(GL_RENDERBUFFER, sampleCount, format, width, height); + break; + case MSFBOType::None: + LOGE("Shouldn't be here if we don't support multisampled renderbuffers."); + break; + } + return CheckGLError(context); +} + +static void FrameBufferTexture2D(Context* context, unsigned textureTarget, unsigned textureID, + int sampleCount) { + auto gl = GLFunctions::Get(context); + auto caps = GLCaps::Get(context); + // 解绑的时候framebufferTexture2DMultisample在华为手机上会出现crash,统一走framebufferTexture2D解绑 + if (textureID != 0 && sampleCount > 1 && caps->usesImplicitMSAAResolve()) { + gl->framebufferTexture2DMultisample(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, textureTarget, + textureID, 0, sampleCount); + } else { + gl->framebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, textureTarget, textureID, 0); + } +} + +static void ReleaseResource(Context* context, GLFrameBuffer* textureFBInfo, + GLFrameBuffer* renderTargetFBInfo = nullptr, + unsigned* msRenderBufferID = nullptr) { + auto gl = GLFunctions::Get(context); + if (textureFBInfo && textureFBInfo->id) { + gl->deleteFramebuffers(1, &(textureFBInfo->id)); + if (renderTargetFBInfo && renderTargetFBInfo->id == textureFBInfo->id) { + renderTargetFBInfo->id = 0; + } + textureFBInfo->id = 0; + } + if (renderTargetFBInfo && renderTargetFBInfo->id > 0) { + gl->bindFramebuffer(GL_FRAMEBUFFER, renderTargetFBInfo->id); + gl->framebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, 0); + gl->bindFramebuffer(GL_FRAMEBUFFER, 0); + gl->deleteFramebuffers(1, &(renderTargetFBInfo->id)); + renderTargetFBInfo->id = 0; + } + if (msRenderBufferID && *msRenderBufferID > 0) { + gl->deleteRenderbuffers(1, msRenderBufferID); + *msRenderBufferID = 0; + } +} + +static bool CreateRenderBuffer(const Texture* texture, GLFrameBuffer* renderTargetFBInfo, + unsigned* msRenderBufferID, int sampleCount) { + auto gl = GLFunctions::Get(texture->getContext()); + gl->genFramebuffers(1, &(renderTargetFBInfo->id)); + if (renderTargetFBInfo->id == 0) { + return false; + } + gl->genRenderbuffers(1, msRenderBufferID); + if (*msRenderBufferID == 0) { + return false; + } + gl->bindRenderbuffer(GL_RENDERBUFFER, *msRenderBufferID); + if (!RenderbufferStorageMSAA(texture->getContext(), sampleCount, renderTargetFBInfo->format, + texture->width(), texture->height())) { + return false; + } + gl->bindFramebuffer(GL_FRAMEBUFFER, renderTargetFBInfo->id); + gl->framebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, + *msRenderBufferID); +#ifdef TGFX_BUILD_FOR_WEB + return true; +#else + return gl->checkFramebufferStatus(GL_FRAMEBUFFER) == GL_FRAMEBUFFER_COMPLETE; +#endif +} + +std::shared_ptr RenderTarget::MakeFrom(const Texture* texture, int sampleCount) { + if (texture == nullptr) { + return nullptr; + } + auto context = texture->getContext(); + auto caps = GLCaps::Get(context); + auto glSampler = static_cast(texture->getSampler()); + if (!caps->isFormatRenderable(glSampler->format)) { + return nullptr; + } + auto gl = GLFunctions::Get(context); + GLFrameBuffer textureFBInfo = {}; + textureFBInfo.format = glSampler->format; + gl->genFramebuffers(1, &textureFBInfo.id); + if (textureFBInfo.id == 0) { + return nullptr; + } + GLFrameBuffer renderTargetFBInfo = {}; + renderTargetFBInfo.format = glSampler->format; + unsigned msRenderBufferID = 0; + if (sampleCount > 1 && caps->usesMSAARenderBuffers()) { + if (!CreateRenderBuffer(texture, &renderTargetFBInfo, &msRenderBufferID, sampleCount)) { + ReleaseResource(context, &textureFBInfo, &renderTargetFBInfo, &msRenderBufferID); + return nullptr; + } + } else { + renderTargetFBInfo = textureFBInfo; + } + gl->bindFramebuffer(GL_FRAMEBUFFER, textureFBInfo.id); + FrameBufferTexture2D(context, glSampler->target, glSampler->id, sampleCount); +#ifndef TGFX_BUILD_FOR_WEB + if (gl->checkFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) { + ReleaseResource(context, &textureFBInfo, &renderTargetFBInfo, &msRenderBufferID); + return nullptr; + } +#endif + auto rt = new GLRenderTarget(texture->width(), texture->height(), texture->origin(), sampleCount, + textureFBInfo, glSampler->target); + rt->frameBufferForDraw = renderTargetFBInfo; + rt->msRenderBufferID = msRenderBufferID; + return Resource::Wrap(context, rt); +} + +GLRenderTarget::GLRenderTarget(int width, int height, ImageOrigin origin, int sampleCount, + GLFrameBuffer frameBuffer, unsigned textureTarget) + : RenderTarget(width, height, origin, sampleCount), frameBufferForRead(frameBuffer), + textureTarget(textureTarget) { +} + +static bool CanReadDirectly(Context* context, ImageOrigin origin, const ImageInfo& srcInfo, + const ImageInfo& dstInfo) { + if (origin != ImageOrigin::TopLeft || dstInfo.alphaType() != srcInfo.alphaType() || + dstInfo.colorType() != srcInfo.colorType()) { + return false; + } + auto caps = GLCaps::Get(context); + if (dstInfo.rowBytes() != dstInfo.minRowBytes() && !caps->packRowLengthSupport) { + return false; + } + return true; +} + +static void CopyPixels(const ImageInfo& srcInfo, const void* srcPixels, const ImageInfo& dstInfo, + void* dstPixels, bool flipY) { + auto pixels = srcPixels; + Buffer tempBuffer = {}; + if (flipY) { + tempBuffer.alloc(srcInfo.byteSize()); + auto rowCount = srcInfo.height(); + auto rowBytes = srcInfo.rowBytes(); + auto dst = tempBuffer.bytes(); + for (int i = 0; i < rowCount; i++) { + auto src = reinterpret_cast(srcPixels) + (rowCount - i - 1) * rowBytes; + memcpy(dst, src, rowBytes); + dst += rowBytes; + } + pixels = tempBuffer.data(); + } + Pixmap pixmap(srcInfo, pixels); + pixmap.readPixels(dstInfo, dstPixels); +} + +BackendRenderTarget GLRenderTarget::getBackendRenderTarget() const { + GLFrameBufferInfo glInfo = {}; + glInfo.id = frameBufferForDraw.id; + glInfo.format = PixelFormatToGLSizeFormat(frameBufferForDraw.format); + return {glInfo, width(), height()}; +} + +bool GLRenderTarget::readPixels(const ImageInfo& dstInfo, void* dstPixels, int srcX, + int srcY) const { + dstPixels = dstInfo.computeOffset(dstPixels, -srcX, -srcY); + auto outInfo = dstInfo.makeIntersect(-srcX, -srcY, width(), height()); + if (outInfo.isEmpty()) { + return false; + } + auto pixelFormat = frameBufferForRead.format; + auto gl = GLFunctions::Get(context); + auto caps = GLCaps::Get(context); + const auto& format = caps->getTextureFormat(pixelFormat); + gl->bindFramebuffer(GL_FRAMEBUFFER, frameBufferForRead.id); + + auto colorType = PixelFormatToColorType(pixelFormat); + auto srcInfo = + ImageInfo::Make(outInfo.width(), outInfo.height(), colorType, AlphaType::Premultiplied); + void* pixels = nullptr; + Buffer tempBuffer = {}; + auto restoreGLRowLength = false; + if (CanReadDirectly(context, origin(), srcInfo, outInfo)) { + pixels = dstPixels; + if (outInfo.rowBytes() != outInfo.minRowBytes()) { + gl->pixelStorei(GL_PACK_ROW_LENGTH, + static_cast(outInfo.rowBytes() / outInfo.bytesPerPixel())); + restoreGLRowLength = true; + } + } else { + tempBuffer.alloc(srcInfo.byteSize()); + pixels = tempBuffer.data(); + } + auto alignment = pixelFormat == PixelFormat::ALPHA_8 ? 1 : 4; + gl->pixelStorei(GL_PACK_ALIGNMENT, alignment); + auto flipY = origin() == ImageOrigin::BottomLeft; + auto readX = std::max(0, srcX); + auto readY = std::max(0, srcY); + if (flipY) { + readY = height() - readY - outInfo.height(); + } + gl->readPixels(readX, readY, outInfo.width(), outInfo.height(), format.externalFormat, + GL_UNSIGNED_BYTE, pixels); + if (restoreGLRowLength) { + gl->pixelStorei(GL_PACK_ROW_LENGTH, 0); + } + if (!tempBuffer.isEmpty()) { + CopyPixels(srcInfo, tempBuffer.data(), outInfo, dstPixels, flipY); + } + return true; +} + +unsigned GLRenderTarget::getFrameBufferID(bool forDraw) const { + return forDraw ? frameBufferForDraw.id : frameBufferForRead.id; +} + +void GLRenderTarget::resolve() const { + if (sampleCount() <= 1) { + return; + } + auto gl = GLFunctions::Get(context); + auto caps = GLCaps::Get(context); + if (!caps->usesMSAARenderBuffers()) { + return; + } + gl->bindFramebuffer(GL_READ_FRAMEBUFFER, frameBufferForDraw.id); + gl->bindFramebuffer(GL_DRAW_FRAMEBUFFER, frameBufferForRead.id); + if (caps->msFBOType == MSFBOType::ES_Apple) { + // Apple's extension uses the scissor as the blit bounds. + gl->enable(GL_SCISSOR_TEST); + gl->scissor(0, 0, width(), height()); + gl->resolveMultisampleFramebuffer(); + gl->disable(GL_SCISSOR_TEST); + } else { + // BlitFrameBuffer respects the scissor, so disable it. + gl->disable(GL_SCISSOR_TEST); + gl->blitFramebuffer(0, 0, width(), height(), 0, 0, width(), height(), GL_COLOR_BUFFER_BIT, + GL_NEAREST); + } +} + +bool GLRenderTarget::replaceTexture(const Texture* texture) { + if (textureTarget == 0 || texture == nullptr || texture->width() != width() || + texture->height() != height() || texture->origin() != origin() || + texture->getSampler()->format != format()) { + return false; + } + auto sampler = static_cast(texture->getSampler()); + auto gl = GLFunctions::Get(context); + gl->bindFramebuffer(GL_FRAMEBUFFER, frameBufferForRead.id); + FrameBufferTexture2D(context, textureTarget, 0, sampleCount()); + textureTarget = sampler->target; + FrameBufferTexture2D(context, textureTarget, sampler->id, sampleCount()); + gl->bindFramebuffer(GL_FRAMEBUFFER, 0); + return true; +} + +void GLRenderTarget::onReleaseGPU() { + if (externalResource) { + return; + } + if (textureTarget != 0) { + auto gl = GLFunctions::Get(context); + gl->bindFramebuffer(GL_FRAMEBUFFER, frameBufferForRead.id); + FrameBufferTexture2D(context, textureTarget, 0, sampleCount()); + gl->bindFramebuffer(GL_FRAMEBUFFER, 0); + } + ReleaseResource(context, &frameBufferForRead, &frameBufferForDraw, &msRenderBufferID); +} + +const Swizzle& GLRenderTarget::writeSwizzle() const { + return context->caps()->getWriteSwizzle(frameBufferForDraw.format); +} +} // namespace tgfx diff --git a/src/opengl/GLRenderTarget.h b/src/opengl/GLRenderTarget.h new file mode 100644 index 00000000..97db9676 --- /dev/null +++ b/src/opengl/GLRenderTarget.h @@ -0,0 +1,66 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/RenderTarget.h" +#include "opengl/GLFrameBuffer.h" + +namespace tgfx { +class GLInterface; + +/** + * Represents an OpenGL 2D buffer of pixels that can be rendered to. + */ +class GLRenderTarget : public RenderTarget { + public: + PixelFormat format() const override { + return frameBufferForDraw.format; + } + + /** + * Returns the frame buffer id associated with this render target. + */ + unsigned getFrameBufferID(bool forDraw = true) const; + + void resolve() const; + + BackendRenderTarget getBackendRenderTarget() const override; + + bool readPixels(const ImageInfo& dstInfo, void* dstPixels, int srcX = 0, + int srcY = 0) const override; + + bool replaceTexture(const Texture* texture) override; + + private: + GLFrameBuffer frameBufferForRead = {}; + GLFrameBuffer frameBufferForDraw = {}; + unsigned msRenderBufferID = 0; + unsigned textureTarget = 0; + bool externalResource = false; + + GLRenderTarget(int width, int height, ImageOrigin origin, int sampleCount, + GLFrameBuffer frameBuffer, unsigned textureTarget = 0); + + void onReleaseGPU() override; + + const Swizzle& writeSwizzle() const override; + + friend class RenderTarget; +}; +} // namespace tgfx diff --git a/src/opengl/GLSampler.cpp b/src/opengl/GLSampler.cpp new file mode 100644 index 00000000..943c62fa --- /dev/null +++ b/src/opengl/GLSampler.cpp @@ -0,0 +1,63 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "opengl/GLSampler.h" +#include "GLCaps.h" +#include "opengl/GLUtil.h" + +namespace tgfx { +std::unique_ptr TextureSampler::MakeFrom(Context* context, + const BackendTexture& backendTexture) { + GLTextureInfo textureInfo = {}; + if (context == nullptr || !backendTexture.getGLTextureInfo(&textureInfo)) { + return nullptr; + } + auto sampler = std::make_unique(); + sampler->id = textureInfo.id; + sampler->target = textureInfo.target; + sampler->format = GLSizeFormatToPixelFormat(textureInfo.format); + return sampler; +} + +TextureType GLSampler::type() const { + switch (target) { + case GL_TEXTURE_2D: + return TextureType::TwoD; + case GL_TEXTURE_RECTANGLE: + return TextureType::Rectangle; + case GL_TEXTURE_EXTERNAL_OES: + return TextureType::External; + default: + return TextureType::Unknown; + } +} + +BackendTexture GLSampler::getBackendTexture(int width, int height) const { + GLTextureInfo textureInfo = {}; + textureInfo.id = id; + textureInfo.target = target; + textureInfo.format = PixelFormatToGLSizeFormat(format); + return {textureInfo, width, height}; +} + +void GLSampler::computeKey(Context* context, BytesKey* bytesKey) const { + auto caps = GLCaps::Get(context); + bytesKey->write(static_cast(caps->getReadSwizzle(format).asKey())); + bytesKey->write(target); +} +} // namespace tgfx \ No newline at end of file diff --git a/src/opengl/GLSampler.h b/src/opengl/GLSampler.h new file mode 100644 index 00000000..321de4e6 --- /dev/null +++ b/src/opengl/GLSampler.h @@ -0,0 +1,47 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/TextureSampler.h" +#include "tgfx/opengl/GLDefines.h" + +namespace tgfx { +/** + * Defines the sampling parameters for an OpenGL texture uint. + */ +class GLSampler : public TextureSampler { + public: + /** + * The OpenGL texture id of the sampler. + */ + unsigned id = 0; + + /** + * The OpenGL texture target of the sampler. + */ + unsigned target = GL_TEXTURE_2D; + + TextureType type() const override; + + BackendTexture getBackendTexture(int width, int height) const override; + + protected: + void computeKey(Context* context, BytesKey* bytesKey) const override; +}; +} // namespace tgfx diff --git a/src/opengl/GLSemaphore.cpp b/src/opengl/GLSemaphore.cpp new file mode 100644 index 00000000..f60fc8c5 --- /dev/null +++ b/src/opengl/GLSemaphore.cpp @@ -0,0 +1,36 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLSemaphore.h" + +namespace tgfx { +std::unique_ptr Semaphore::Wrap(const BackendSemaphore* backendSemaphore) { + if (backendSemaphore == nullptr) { + return nullptr; + } + auto semaphore = std::make_unique(); + semaphore->glSync = backendSemaphore->glSync(); + return semaphore; +} + +BackendSemaphore GLSemaphore::getBackendSemaphore() const { + BackendSemaphore semaphore = {}; + semaphore.initGL(glSync); + return semaphore; +} +} // namespace tgfx \ No newline at end of file diff --git a/src/opengl/GLSemaphore.h b/src/opengl/GLSemaphore.h new file mode 100644 index 00000000..adb9d4d3 --- /dev/null +++ b/src/opengl/GLSemaphore.h @@ -0,0 +1,36 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/Semaphore.h" + +namespace tgfx { +/** + * Types for interacting with OpenGL semaphore object. + */ +class GLSemaphore : public Semaphore { + public: + /** + * Holds the GLsync as a void*. + */ + void* glSync = nullptr; + + BackendSemaphore getBackendSemaphore() const override; +}; +} // namespace tgfx diff --git a/src/opengl/GLStandard.h b/src/opengl/GLStandard.h new file mode 100644 index 00000000..d72bdbf6 --- /dev/null +++ b/src/opengl/GLStandard.h @@ -0,0 +1,23 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +namespace tgfx { +enum class GLStandard { None, GL, GLES, WebGL }; +} diff --git a/src/opengl/GLUniformBuffer.cpp b/src/opengl/GLUniformBuffer.cpp new file mode 100644 index 00000000..e9e73645 --- /dev/null +++ b/src/opengl/GLUniformBuffer.cpp @@ -0,0 +1,100 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLUniformBuffer.h" +#include "tgfx/opengl/GLFunctions.h" +#include "utils/Log.h" + +namespace tgfx { +GLUniformBuffer::GLUniformBuffer(std::vector uniformList, std::vector locationList) + : StagedUniformBuffer(std::move(uniformList)), locations(std::move(locationList)) { + DEBUG_ASSERT(uniforms.size() == locations.size()); + if (!uniforms.empty()) { + dirtyFlags.resize(uniforms.size(), true); + size_t bufferSize = offsets.back() + uniforms.back().size(); + buffer = new (std::nothrow) uint8_t[bufferSize]; + } +} + +GLUniformBuffer::~GLUniformBuffer() { + delete[] buffer; +} + +void GLUniformBuffer::onCopyData(int index, size_t offset, size_t size, const void* data) { + if (!dirtyFlags[index] && memcmp(buffer + offset, data, size) == 0) { + return; + } + dirtyFlags[index] = true; + bufferChanged = true; + memcpy(buffer + offset, data, size); +} + +void GLUniformBuffer::uploadToGPU(Context* context) { + if (!bufferChanged) { + return; + } + bufferChanged = false; + auto gl = GLFunctions::Get(context); + int index = 0; + for (auto& uniform : uniforms) { + if (!dirtyFlags[index]) { + index++; + continue; + } + dirtyFlags[index] = false; + auto location = locations[index]; + auto offset = offsets[index]; + switch (uniform.type) { + case Uniform::Type::Float: + gl->uniform1fv(location, 1, reinterpret_cast(buffer + offset)); + break; + case Uniform::Type::Float2: + gl->uniform2fv(location, 1, reinterpret_cast(buffer + offset)); + break; + case Uniform::Type::Float3: + gl->uniform3fv(location, 1, reinterpret_cast(buffer + offset)); + break; + case Uniform::Type::Float4: + gl->uniform4fv(location, 1, reinterpret_cast(buffer + offset)); + break; + case Uniform::Type::Float2x2: + gl->uniformMatrix2fv(location, 1, GL_FALSE, reinterpret_cast(buffer + offset)); + break; + case Uniform::Type::Float3x3: + gl->uniformMatrix3fv(location, 1, GL_FALSE, reinterpret_cast(buffer + offset)); + break; + case Uniform::Type::Float4x4: + gl->uniformMatrix4fv(location, 1, GL_FALSE, reinterpret_cast(buffer + offset)); + break; + case Uniform::Type::Int: + gl->uniform1iv(location, 1, reinterpret_cast(buffer + offset)); + break; + case Uniform::Type::Int2: + gl->uniform2iv(location, 1, reinterpret_cast(buffer + offset)); + break; + case Uniform::Type::Int3: + gl->uniform3iv(location, 1, reinterpret_cast(buffer + offset)); + break; + case Uniform::Type::Int4: + gl->uniform4iv(location, 1, reinterpret_cast(buffer + offset)); + break; + } + index++; + } +} +} // namespace tgfx diff --git a/src/opengl/GLUniformBuffer.h b/src/opengl/GLUniformBuffer.h new file mode 100644 index 00000000..77d4627c --- /dev/null +++ b/src/opengl/GLUniformBuffer.h @@ -0,0 +1,42 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/StagedUniformBuffer.h" +#include "tgfx/gpu/Context.h" + +namespace tgfx { +class GLUniformBuffer : public StagedUniformBuffer { + public: + GLUniformBuffer(std::vector uniforms, std::vector locations); + + ~GLUniformBuffer() override; + + void uploadToGPU(Context* context); + + protected: + void onCopyData(int index, size_t offset, size_t size, const void* data) override; + + private: + uint8_t* buffer = nullptr; + bool bufferChanged = false; + std::vector locations = {}; + std::vector dirtyFlags = {}; +}; +} // namespace tgfx diff --git a/src/opengl/GLUniformHandler.cpp b/src/opengl/GLUniformHandler.cpp new file mode 100644 index 00000000..b2434645 --- /dev/null +++ b/src/opengl/GLUniformHandler.cpp @@ -0,0 +1,149 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLUniformHandler.h" +#include "GLProgramBuilder.h" + +namespace tgfx { +std::string GLUniformHandler::internalAddUniform(ShaderFlags visibility, SLType type, + const std::string& name, bool mangleName) { + GLUniform uniform; + uniform.variable.setType(type); + uniform.variable.setTypeModifier(ShaderVar::TypeModifier::Uniform); + char prefix = 'u'; + if (prefix == name[0] || name.find(NO_MANGLE_PREFIX) == 0) { + prefix = '\0'; + } + uniform.variable.setName(programBuilder->nameVariable(prefix, name, mangleName)); + uniform.visibility = visibility; + auto uniformKey = StagedUniformBuffer::GetMangledName(name, programBuilder->stageIndex()); + uniformMap[uniformKey] = uniform; + return uniform.variable.name(); +} + +SamplerHandle GLUniformHandler::addSampler(const TextureSampler* sampler, const std::string& name) { + auto mangleName = programBuilder->nameVariable('u', name); + + auto caps = GLCaps::Get(programBuilder->getContext()); + const auto& swizzle = caps->getReadSwizzle(sampler->format); + + SLType type; + switch (static_cast(sampler)->target) { + case GL_TEXTURE_EXTERNAL_OES: + programBuilder->fragmentShaderBuilder()->addFeature(PrivateFeature::OESTexture, + "GL_OES_EGL_image_external"); + type = SLType::TextureExternalSampler; + break; + case GL_TEXTURE_RECTANGLE: + type = SLType::Texture2DRectSampler; + break; + default: + type = SLType::Texture2DSampler; + break; + } + GLUniform samplerUniform; + samplerUniform.variable.setType(type); + samplerUniform.variable.setTypeModifier(ShaderVar::TypeModifier::Uniform); + samplerUniform.variable.setName(mangleName); + samplerUniform.visibility = ShaderFlags::Fragment; + samplerSwizzles.push_back(swizzle); + samplers.push_back(samplerUniform); + return SamplerHandle(samplers.size() - 1); +} + +std::string GLUniformHandler::getUniformDeclarations(ShaderFlags visibility) const { + std::string ret; + for (auto& item : uniformMap) { + auto& uniform = item.second; + if ((uniform.visibility & visibility) == visibility) { + ret += programBuilder->getShaderVarDeclarations(uniform.variable, visibility); + ret += ";\n"; + } + } + for (const auto& sampler : samplers) { + if ((sampler.visibility & visibility) == visibility) { + ret += programBuilder->getShaderVarDeclarations(sampler.variable, visibility); + ret += ";\n"; + } + } + return ret; +} + +void GLUniformHandler::resolveUniformLocations(unsigned programID) { + auto gl = GLFunctions::Get(programBuilder->getContext()); + for (auto& item : uniformMap) { + auto& uniform = item.second; + uniform.location = gl->getUniformLocation(programID, uniform.variable.name().c_str()); + } + for (auto& sampler : samplers) { + sampler.location = gl->getUniformLocation(programID, sampler.variable.name().c_str()); + } +} + +std::unique_ptr GLUniformHandler::makeUniformBuffer() const { + std::vector uniforms = {}; + std::vector locations = {}; + for (auto& item : uniformMap) { + std::optional type; + auto& uniform = item.second; + switch (uniform.variable.type()) { + case SLType::Float: + type = Uniform::Type::Float; + break; + case SLType::Float2: + type = Uniform::Type::Float2; + break; + case SLType::Float3: + type = Uniform::Type::Float3; + break; + case SLType::Float4: + type = Uniform::Type::Float4; + break; + case SLType::Float2x2: + type = Uniform::Type::Float2x2; + break; + case SLType::Float3x3: + type = Uniform::Type::Float3x3; + break; + case SLType::Float4x4: + type = Uniform::Type::Float4x4; + break; + case SLType::Int: + type = Uniform::Type::Int; + break; + case SLType::Int2: + type = Uniform::Type::Int2; + break; + case SLType::Int3: + type = Uniform::Type::Int3; + break; + case SLType::Int4: + type = Uniform::Type::Int4; + break; + default: + type = std::nullopt; + break; + } + if (type.has_value()) { + uniforms.push_back({item.first, *type}); + locations.push_back(uniform.location); + } + } + return std::make_unique(std::move(uniforms), std::move(locations)); +} +} // namespace tgfx diff --git a/src/opengl/GLUniformHandler.h b/src/opengl/GLUniformHandler.h new file mode 100644 index 00000000..cd7d5ee9 --- /dev/null +++ b/src/opengl/GLUniformHandler.h @@ -0,0 +1,64 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/Swizzle.h" +#include "gpu/UniformHandler.h" +#include "opengl/GLUniformBuffer.h" + +namespace tgfx { +static constexpr int UNUSED_UNIFORM = -1; + +struct GLUniform { + ShaderVar variable; + ShaderFlags visibility = ShaderFlags::None; + int location = UNUSED_UNIFORM; +}; + +class GLUniformHandler : public UniformHandler { + private: + explicit GLUniformHandler(ProgramBuilder* program) : UniformHandler(program) { + } + + std::string internalAddUniform(ShaderFlags visibility, SLType type, const std::string& name, + bool mangleName) override; + + SamplerHandle addSampler(const TextureSampler* sampler, const std::string& name) override; + + const ShaderVar& samplerVariable(SamplerHandle handle) const override { + return samplers[handle.toIndex()].variable; + } + + const Swizzle& samplerSwizzle(SamplerHandle handle) const override { + return samplerSwizzles[handle.toIndex()]; + } + + std::string getUniformDeclarations(ShaderFlags visibility) const override; + + void resolveUniformLocations(unsigned programID); + + std::unique_ptr makeUniformBuffer() const; + + std::unordered_map uniformMap; + std::vector samplers; + std::vector samplerSwizzles; + + friend class GLProgramBuilder; +}; +} // namespace tgfx diff --git a/src/opengl/GLUtil.cpp b/src/opengl/GLUtil.cpp new file mode 100644 index 00000000..6d217d6e --- /dev/null +++ b/src/opengl/GLUtil.cpp @@ -0,0 +1,157 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLUtil.h" +#include "utils/USE.h" + +namespace tgfx { +PixelFormat GLSizeFormatToPixelFormat(unsigned sizeFormat) { + switch (sizeFormat) { + case GL_BGRA: + case GL_BGRA8: + return PixelFormat::BGRA_8888; + case GL_R8: + case GL_RED: + case GL_ALPHA8: + case GL_ALPHA: + return PixelFormat::ALPHA_8; + case GL_LUMINANCE8: + case GL_LUMINANCE: + return PixelFormat::GRAY_8; + case GL_RG8: + case GL_RG: + return PixelFormat::RG_88; + } + return PixelFormat::RGBA_8888; +} + +unsigned PixelFormatToGLSizeFormat(PixelFormat pixelFormat) { + switch (pixelFormat) { + case PixelFormat::ALPHA_8: + return GL_ALPHA8; + case PixelFormat::GRAY_8: + return GL_LUMINANCE8; + case PixelFormat::RG_88: + return GL_RG8; + case PixelFormat::BGRA_8888: + return GL_BGRA8; + default: + break; + } + return GL_RGBA8; +} + +GLVersion GetGLVersion(const char* versionString) { + if (versionString == nullptr) { + return {}; + } + int major, minor; + int mesaMajor, mesaMinor; + int n = sscanf(versionString, "%d.%d Mesa %d.%d", &major, &minor, &mesaMajor, &mesaMinor); + if (4 == n) { + return {major, minor}; + } + n = sscanf(versionString, "%d.%d", &major, &minor); + if (2 == n) { + return {major, minor}; + } + int esMajor, esMinor; + n = sscanf(versionString, "OpenGL ES %d.%d (WebGL %d.%d", &esMajor, &esMinor, &major, &minor); + if (4 == n) { + return {major, minor}; + } + char profile[2]; + n = sscanf(versionString, "OpenGL ES-%c%c %d.%d", profile, profile + 1, &major, &minor); + if (4 == n) { + return {major, minor}; + } + n = sscanf(versionString, "OpenGL ES %d.%d", &major, &minor); + if (2 == n) { + return {major, minor}; + } + return {}; +} + +unsigned CreateGLProgram(Context* context, const std::string& vertex, const std::string& fragment) { + auto vertexShader = LoadGLShader(context, GL_VERTEX_SHADER, vertex); + if (vertexShader == 0) { + return 0; + } + auto fragmentShader = LoadGLShader(context, GL_FRAGMENT_SHADER, fragment); + if (fragmentShader == 0) { + return 0; + } + auto gl = GLFunctions::Get(context); + auto programHandle = gl->createProgram(); + gl->attachShader(programHandle, vertexShader); + gl->attachShader(programHandle, fragmentShader); + gl->linkProgram(programHandle); + int success; + gl->getProgramiv(programHandle, GL_LINK_STATUS, &success); + if (!success) { + char infoLog[512]; + gl->getProgramInfoLog(programHandle, 512, nullptr, infoLog); + gl->deleteProgram(programHandle); + } + gl->deleteShader(vertexShader); + gl->deleteShader(fragmentShader); + return programHandle; +} + +unsigned LoadGLShader(Context* context, unsigned shaderType, const std::string& source) { + auto gl = GLFunctions::Get(context); + auto shader = gl->createShader(shaderType); + const char* files[] = {source.c_str()}; + gl->shaderSource(shader, 1, files, nullptr); + gl->compileShader(shader); +#ifndef TGFX_BUILD_FOR_WEB + int success; + gl->getShaderiv(shader, GL_COMPILE_STATUS, &success); + if (!success) { + char infoLog[512]; + gl->getShaderInfoLog(shader, 512, nullptr, infoLog); + LOGE("Could not compile shader: %d %s", shaderType, infoLog); + gl->deleteShader(shader); + shader = 0; + } +#endif + return shader; +} + +bool CheckGLErrorImpl(Context* context, std::string file, int line) { +#ifdef TGFX_BUILD_FOR_WEB + USE(context); + USE(file); + USE(line); + return true; +#else + auto gl = GLFunctions::Get(context); + bool success = true; + unsigned errorCode; + while ((errorCode = gl->getError()) != GL_NO_ERROR) { + success = false; + if (file.empty()) { + LOGE("CheckGLError: %d", errorCode); + } else { + LOGE("CheckGLError: %d at %s:%d", errorCode, file.c_str(), line); + } + } + return success; +#endif +} +} // namespace tgfx diff --git a/src/opengl/GLUtil.h b/src/opengl/GLUtil.h new file mode 100644 index 00000000..7c1fde40 --- /dev/null +++ b/src/opengl/GLUtil.h @@ -0,0 +1,60 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include +#include "opengl/GLContext.h" +#include "opengl/GLSampler.h" +#include "tgfx/core/Matrix.h" +#include "tgfx/gpu/ImageOrigin.h" + +namespace tgfx { +struct GLVersion { + int majorVersion = -1; + int minorVersion = -1; + + GLVersion() = default; + + GLVersion(int major, int minor) : majorVersion(major), minorVersion(minor) { + } +}; + +PixelFormat GLSizeFormatToPixelFormat(unsigned sizeFormat); + +unsigned PixelFormatToGLSizeFormat(PixelFormat pixelFormat); + +GLVersion GetGLVersion(const char* versionString); + +unsigned CreateGLProgram(Context* context, const std::string& vertex, const std::string& fragment); + +unsigned LoadGLShader(Context* context, unsigned shaderType, const std::string& source); + +bool CheckGLErrorImpl(Context* context, std::string file, int line); + +#ifdef DEBUG + +#define CheckGLError(context) CheckGLErrorImpl(context, __FILE__, __LINE__) + +#else + +#define CheckGLError(context) CheckGLErrorImpl(context, "", 0) + +#endif +} // namespace tgfx diff --git a/src/opengl/GLVertexShaderBuilder.cpp b/src/opengl/GLVertexShaderBuilder.cpp new file mode 100644 index 00000000..6157bd94 --- /dev/null +++ b/src/opengl/GLVertexShaderBuilder.cpp @@ -0,0 +1,31 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLVertexShaderBuilder.h" + +namespace tgfx { +GLVertexShaderBuilder::GLVertexShaderBuilder(ProgramBuilder* program) + : VertexShaderBuilder(program) { + setPrecisionQualifier("precision mediump float;"); +} + +void GLVertexShaderBuilder::emitNormalizedPosition(const std::string& devPos) { + codeAppendf("gl_Position = vec4(%s.xy * %s.xz + %s.yw, 0, 1);", devPos.c_str(), + RTAdjustName.c_str(), RTAdjustName.c_str()); +} +} // namespace tgfx diff --git a/src/opengl/GLVertexShaderBuilder.h b/src/opengl/GLVertexShaderBuilder.h new file mode 100644 index 00000000..94ebd6af --- /dev/null +++ b/src/opengl/GLVertexShaderBuilder.h @@ -0,0 +1,30 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/VertexShaderBuilder.h" + +namespace tgfx { +class GLVertexShaderBuilder : public VertexShaderBuilder { + public: + explicit GLVertexShaderBuilder(ProgramBuilder* program); + + void emitNormalizedPosition(const std::string& devPos) override; +}; +} // namespace tgfx diff --git a/src/opengl/cgl/CGLDevice.mm b/src/opengl/cgl/CGLDevice.mm new file mode 100644 index 00000000..d4b45d54 --- /dev/null +++ b/src/opengl/cgl/CGLDevice.mm @@ -0,0 +1,132 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/opengl/cgl/CGLDevice.h" + +namespace tgfx { +void* GLDevice::CurrentNativeHandle() { + return CGLGetCurrentContext(); +} + +std::shared_ptr GLDevice::Current() { + return CGLDevice::Wrap(CGLGetCurrentContext(), true); +} + +std::shared_ptr GLDevice::Make(void* sharedContext) { + CGLPixelFormatObj format = nullptr; + CGLContextObj cglShareContext = reinterpret_cast(sharedContext); + if (cglShareContext == nullptr) { + const CGLPixelFormatAttribute attributes[] = { + kCGLPFAStencilSize, (CGLPixelFormatAttribute)8, + kCGLPFAAccelerated, kCGLPFADoubleBuffer, + kCGLPFAOpenGLProfile, (CGLPixelFormatAttribute)kCGLOGLPVersion_3_2_Core, + (CGLPixelFormatAttribute)0}; + GLint npix = 0; + CGLChoosePixelFormat(attributes, &format, &npix); + } else { + format = CGLGetPixelFormat(cglShareContext); + } + CGLContextObj cglContext = nullptr; + CGLCreateContext(format, cglShareContext, &cglContext); + if (cglShareContext == nullptr) { + CGLDestroyPixelFormat(format); + } + if (cglContext == nullptr) { + return nullptr; + } + GLint opacity = 0; + CGLSetParameter(cglContext, kCGLCPSurfaceOpacity, &opacity); + auto device = CGLDevice::Wrap(cglContext, false); + CGLReleaseContext(cglContext); + return device; +} + +std::shared_ptr CGLDevice::MakeAdopted(CGLContextObj cglContext) { + return CGLDevice::Wrap(cglContext, true); +} + +std::shared_ptr CGLDevice::Wrap(CGLContextObj cglContext, bool isAdopted) { + if (cglContext == nil) { + return nullptr; + } + auto glDevice = GLDevice::Get(cglContext); + if (glDevice) { + return std::static_pointer_cast(glDevice); + } + auto oldCGLContext = CGLGetCurrentContext(); + if (oldCGLContext != cglContext) { + CGLSetCurrentContext(cglContext); + if (CGLGetCurrentContext() != cglContext) { + return nullptr; + } + } + auto device = std::shared_ptr(new CGLDevice(cglContext)); + device->isAdopted = isAdopted; + device->weakThis = device; + if (oldCGLContext != cglContext) { + CGLSetCurrentContext(oldCGLContext); + } + return device; +} + +CGLDevice::CGLDevice(CGLContextObj cglContext) : GLDevice(cglContext) { + glContext = [[NSOpenGLContext alloc] initWithCGLContextObj:cglContext]; +} + +CGLDevice::~CGLDevice() { + releaseAll(); + if (textureCache != nil) { + CFRelease(textureCache); + textureCache = nil; + } + [glContext release]; +} + +bool CGLDevice::sharableWith(void* nativeContext) const { + if (nativeContext == nullptr) { + return false; + } + auto shareContext = static_cast(nativeContext); + return CGLGetShareGroup(shareContext) == CGLGetShareGroup(glContext.CGLContextObj); +} + +CGLContextObj CGLDevice::cglContext() const { + return glContext.CGLContextObj; +} + +CVOpenGLTextureCacheRef CGLDevice::getTextureCache() { + if (!textureCache) { + auto pixelFormatObj = CGLGetPixelFormat(glContext.CGLContextObj); + CVOpenGLTextureCacheCreate(kCFAllocatorDefault, NULL, glContext.CGLContextObj, pixelFormatObj, + NULL, &textureCache); + } + return textureCache; +} + +bool CGLDevice::onMakeCurrent() { + oldContext = CGLGetCurrentContext(); + CGLRetainContext(oldContext); + [glContext makeCurrentContext]; + return [NSOpenGLContext currentContext] == glContext; +} + +void CGLDevice::onClearCurrent() { + CGLSetCurrentContext(oldContext); + CGLReleaseContext(oldContext); +} +} // namespace tgfx diff --git a/src/opengl/cgl/CGLHardwareBuffer.mm b/src/opengl/cgl/CGLHardwareBuffer.mm new file mode 100644 index 00000000..34c4ac11 --- /dev/null +++ b/src/opengl/cgl/CGLHardwareBuffer.mm @@ -0,0 +1,39 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "CGLHardwareTexture.h" +#include "tgfx/opengl/cgl/CGLDevice.h" + +namespace tgfx { +bool HardwareBufferAvailable() { + return true; +} + +std::shared_ptr Texture::MakeFrom(Context* context, HardwareBufferRef hardwareBuffer, + YUVColorSpace) { + if (!HardwareBufferCheck(hardwareBuffer)) { + return nullptr; + } + auto cglDevice = static_cast(context->device()); + if (cglDevice == nullptr) { + return nullptr; + } + auto textureCache = cglDevice->getTextureCache(); + return CGLHardwareTexture::MakeFrom(context, hardwareBuffer, textureCache); +} +} // namespace tgfx diff --git a/src/opengl/cgl/CGLHardwareTexture.h b/src/opengl/cgl/CGLHardwareTexture.h new file mode 100644 index 00000000..4c1625d2 --- /dev/null +++ b/src/opengl/cgl/CGLHardwareTexture.h @@ -0,0 +1,55 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "gpu/Texture.h" +#include "gpu/TextureSampler.h" + +namespace tgfx { +class CGLHardwareTexture : public Texture { + public: + static std::shared_ptr MakeFrom(Context* context, + CVPixelBufferRef pixelBuffer, + CVOpenGLTextureCacheRef textureCache); + + explicit CGLHardwareTexture(CVPixelBufferRef pixelBuffer); + + ~CGLHardwareTexture() override; + + size_t memoryUsage() const override; + + const TextureSampler* getSampler() const override { + return sampler.get(); + } + + protected: + void computeScratchKey(BytesKey* scratchKey) const override; + + private: + std::unique_ptr sampler = {}; + CVPixelBufferRef pixelBuffer = nullptr; + CVOpenGLTextureRef texture = nil; + CVOpenGLTextureCacheRef textureCache = nil; + + static void ComputeScratchKey(BytesKey* scratchKey, CVPixelBufferRef pixelBuffer); + + void onReleaseGPU() override; +}; +} // namespace tgfx diff --git a/src/opengl/cgl/CGLHardwareTexture.mm b/src/opengl/cgl/CGLHardwareTexture.mm new file mode 100644 index 00000000..ad5702e8 --- /dev/null +++ b/src/opengl/cgl/CGLHardwareTexture.mm @@ -0,0 +1,100 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "CGLHardwareTexture.h" +#include "opengl/GLSampler.h" +#include "utils/UniqueID.h" + +namespace tgfx { +std::shared_ptr CGLHardwareTexture::MakeFrom( + Context* context, CVPixelBufferRef pixelBuffer, CVOpenGLTextureCacheRef textureCache) { + ScratchKey scratchKey = {}; + ComputeScratchKey(&scratchKey, pixelBuffer); + auto glTexture = std::static_pointer_cast( + context->resourceCache()->findScratchResource(scratchKey)); + if (glTexture) { + return glTexture; + } + if (textureCache == nil) { + return nullptr; + } + CVOpenGLTextureRef texture = nil; + CVOpenGLTextureCacheCreateTextureFromImage(kCFAllocatorDefault, textureCache, pixelBuffer, + nullptr, &texture); + if (texture == nil) { + return nullptr; + } + auto glSampler = std::make_unique(); + glSampler->target = CVOpenGLTextureGetTarget(texture); + glSampler->id = CVOpenGLTextureGetName(texture); + glSampler->format = + CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_OneComponent8 + ? PixelFormat::ALPHA_8 + : PixelFormat::RGBA_8888; + glTexture = Resource::Wrap(context, new CGLHardwareTexture(pixelBuffer)); + glTexture->sampler = std::move(glSampler); + glTexture->texture = texture; + glTexture->textureCache = textureCache; + CFRetain(textureCache); + + return glTexture; +} + +void CGLHardwareTexture::ComputeScratchKey(BytesKey* scratchKey, CVPixelBufferRef pixelBuffer) { + static const uint32_t BGRAType = UniqueID::Next(); + scratchKey->write(BGRAType); + // 这里可以直接用指针做为 key 是因为缓存的 holder 会持有 CVPixelBuffer,只要 holder + // 缓存存在,对应的 CVPixelBuffer + // 指针就是有效的,不会出现指针地址被其他新创建对象占用的情况。其他情况下应该避免使用指针做 key。 + scratchKey->write(pixelBuffer); +} + +CGLHardwareTexture::CGLHardwareTexture(CVPixelBufferRef pixelBuffer) + : Texture(static_cast(CVPixelBufferGetWidth(pixelBuffer)), + static_cast(CVPixelBufferGetHeight(pixelBuffer)), ImageOrigin::TopLeft), + pixelBuffer(pixelBuffer) { + CFRetain(pixelBuffer); +} + +CGLHardwareTexture::~CGLHardwareTexture() { + CFRelease(pixelBuffer); + if (texture != nil) { + CFRelease(texture); + CFRelease(textureCache); + } +} + +size_t CGLHardwareTexture::memoryUsage() const { + return CVPixelBufferGetDataSize(pixelBuffer); +} + +void CGLHardwareTexture::computeScratchKey(BytesKey* scratchKey) const { + ComputeScratchKey(scratchKey, pixelBuffer); +} + +void CGLHardwareTexture::onReleaseGPU() { + if (texture == nil) { + return; + } + CFRelease(texture); + texture = nil; + CVOpenGLTextureCacheFlush(textureCache, 0); + CFRelease(textureCache); + textureCache = nil; +} +} // namespace tgfx diff --git a/src/opengl/cgl/CGLProcGetter.h b/src/opengl/cgl/CGLProcGetter.h new file mode 100644 index 00000000..df0954f4 --- /dev/null +++ b/src/opengl/cgl/CGLProcGetter.h @@ -0,0 +1,35 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "opengl/GLProcGetter.h" + +namespace tgfx { +class CGLProcGetter : public GLProcGetter { + public: + CGLProcGetter(); + + ~CGLProcGetter() override; + + void* getProcAddress(const char name[]) const override; + + private: + void* fLibrary; +}; +} // namespace tgfx diff --git a/src/opengl/cgl/CGLProcGetter.mm b/src/opengl/cgl/CGLProcGetter.mm new file mode 100644 index 00000000..5278f724 --- /dev/null +++ b/src/opengl/cgl/CGLProcGetter.mm @@ -0,0 +1,42 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "CGLProcGetter.h" +#include + +namespace tgfx { +CGLProcGetter::CGLProcGetter() { + fLibrary = dlopen("/System/Library/Frameworks/OpenGL.framework/Versions/A/Libraries/libGL.dylib", + RTLD_LAZY); +} + +CGLProcGetter::~CGLProcGetter() { + if (fLibrary) { + dlclose(fLibrary); + } +} + +void* CGLProcGetter::getProcAddress(const char* name) const { + auto handle = fLibrary ? fLibrary : RTLD_DEFAULT; + return dlsym(handle, name); +} + +std::unique_ptr GLProcGetter::Make() { + return std::make_unique(); +} +} // namespace tgfx diff --git a/src/opengl/cgl/CGLWindow.mm b/src/opengl/cgl/CGLWindow.mm new file mode 100644 index 00000000..734b4364 --- /dev/null +++ b/src/opengl/cgl/CGLWindow.mm @@ -0,0 +1,72 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/opengl/cgl/CGLWindow.h" +#include +#include "tgfx/gpu/Backend.h" +#include "tgfx/opengl/GLDefines.h" + +namespace tgfx { +std::shared_ptr CGLWindow::MakeFrom(NSView* view, CGLContextObj sharedContext) { + if (view == nil) { + return nullptr; + } + auto device = GLDevice::Make(sharedContext); + if (device == nullptr) { + return nullptr; + } + return std::shared_ptr(new CGLWindow(device, view)); +} + +CGLWindow::CGLWindow(std::shared_ptr device, NSView* view) + : Window(std::move(device)), view(view) { + // do not retain view here, otherwise it can cause circular reference. +} + +CGLWindow::~CGLWindow() { + auto glContext = static_cast(device.get())->glContext; +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" + [glContext setView:nil]; +#pragma clang diagnostic pop + view = nil; +} + +std::shared_ptr CGLWindow::onCreateSurface(Context* context) { + auto glContext = static_cast(device.get())->glContext; + [glContext update]; + CGSize size = [view convertSizeToBacking:view.bounds.size]; + if (size.width <= 0 || size.height <= 0) { + return nullptr; + } +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" + [glContext setView:view]; +#pragma clang diagnostic pop + GLFrameBufferInfo frameBuffer = {}; + frameBuffer.id = 0; + frameBuffer.format = GL_RGBA8; + BackendRenderTarget renderTarget(frameBuffer, size.width, size.height); + return Surface::MakeFrom(context, renderTarget, ImageOrigin::BottomLeft); +} + +void CGLWindow::onPresent(Context*, int64_t) { + auto glContext = static_cast(device.get())->glContext; + [glContext flushBuffer]; +} +} // namespace tgfx diff --git a/src/opengl/eagl/EAGLDevice.mm b/src/opengl/eagl/EAGLDevice.mm new file mode 100644 index 00000000..8354dc37 --- /dev/null +++ b/src/opengl/eagl/EAGLDevice.mm @@ -0,0 +1,259 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/opengl/eagl/EAGLDevice.h" +#import +#import +#include "EAGLProcGetter.h" + +namespace tgfx { +static std::mutex deviceLocker = {}; +static std::vector deviceList = {}; +static std::vector delayPurgeList = {}; +static std::atomic_bool appInBackground = {true}; + +void ApplicationWillResignActive() { + // Set applicationInBackground to true first to ensure that no new GL operation is generated + // during the callback process. + appInBackground = true; + std::lock_guard autoLock(deviceLocker); + for (auto& device : deviceList) { + device->finish(); + } +} + +void ApplicationDidBecomeActive() { + appInBackground = false; + std::vector delayList = {}; + deviceLocker.lock(); + std::swap(delayList, delayPurgeList); + deviceLocker.unlock(); + for (auto& device : delayList) { + delete device; + } +} + +void* GLDevice::CurrentNativeHandle() { + return [EAGLContext currentContext]; +} + +std::shared_ptr GLDevice::Current() { + if (appInBackground) { + return nullptr; + } + return EAGLDevice::Wrap([EAGLContext currentContext], true); +} + +std::shared_ptr GLDevice::Make(void* sharedContext) { + if (appInBackground) { + return nullptr; + } + EAGLContext* eaglContext = nil; + EAGLContext* eaglShareContext = reinterpret_cast(sharedContext); + if (eaglShareContext != nil) { + eaglContext = [[EAGLContext alloc] initWithAPI:[eaglShareContext API] + sharegroup:[eaglShareContext sharegroup]]; + } else { + eaglContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3]; + if (eaglContext == nil) { + eaglContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2]; + } + } + if (eaglContext == nil) { + return nullptr; + } + auto device = EAGLDevice::Wrap(eaglContext, false); + [eaglContext release]; + return device; +} + +std::shared_ptr EAGLDevice::MakeAdopted(EAGLContext* eaglContext) { + if (appInBackground) { + return nullptr; + } + return EAGLDevice::Wrap(eaglContext, true); +} + +std::shared_ptr EAGLDevice::Wrap(EAGLContext* eaglContext, bool isAdopted) { + if (eaglContext == nil) { + return nullptr; + } + auto glDevice = GLDevice::Get(eaglContext); + if (glDevice) { + return std::static_pointer_cast(glDevice); + } + auto oldEAGLContext = [[EAGLContext currentContext] retain]; + if (oldEAGLContext != eaglContext) { + auto result = [EAGLContext setCurrentContext:eaglContext]; + if (!result) { + [oldEAGLContext release]; + return nullptr; + } + } + auto device = std::shared_ptr(new EAGLDevice(eaglContext), + EAGLDevice::NotifyReferenceReachedZero); + device->isAdopted = isAdopted; + device->weakThis = device; + if (oldEAGLContext != eaglContext) { + [EAGLContext setCurrentContext:oldEAGLContext]; + } + [oldEAGLContext release]; + return device; +} + +void EAGLDevice::NotifyReferenceReachedZero(EAGLDevice* device) { + if (!appInBackground) { + delete device; + return; + } + std::lock_guard autoLock(deviceLocker); + delayPurgeList.push_back(device); +} + +EAGLDevice::EAGLDevice(EAGLContext* eaglContext) + : GLDevice(eaglContext), _eaglContext(eaglContext) { + [_eaglContext retain]; + std::lock_guard autoLock(deviceLocker); + auto index = deviceList.size(); + deviceList.push_back(this); + cacheArrayIndex = index; +} + +EAGLDevice::~EAGLDevice() { + { + std::lock_guard autoLock(deviceLocker); + auto tail = *(deviceList.end() - 1); + auto index = cacheArrayIndex; + deviceList[index] = tail; + tail->cacheArrayIndex = index; + deviceList.pop_back(); + } + releaseAll(); + if (textureCache != nil) { + CFRelease(textureCache); + textureCache = nil; + } + [_eaglContext release]; +} + +bool EAGLDevice::sharableWith(void* nativeContext) const { + if (nativeContext == nullptr) { + return false; + } + auto shareContext = static_cast(nativeContext); + return [_eaglContext sharegroup] == [shareContext sharegroup]; +} + +EAGLContext* EAGLDevice::eaglContext() const { + return _eaglContext; +} + +CVOpenGLESTextureCacheRef EAGLDevice::getTextureCache() { + if (!textureCache) { + CFMutableDictionaryRef attrs = CFDictionaryCreateMutable( + kCFAllocatorDefault, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks); + CFDictionarySetValue(attrs, kCVOpenGLESTextureCacheMaximumTextureAgeKey, + [NSNumber numberWithFloat:0]); + CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, attrs, _eaglContext, NULL, &textureCache); + CFRelease(attrs); + } + return textureCache; +} + +void EAGLDevice::releaseTexture(CVOpenGLESTextureRef texture) { + if (texture == nil || textureCache == nil) { + return; + } + CFRelease(texture); + CVOpenGLESTextureCacheFlush(textureCache, 0); +} + +bool EAGLDevice::onMakeCurrent() { + return makeCurrent(); +} + +void EAGLDevice::onClearCurrent() { + clearCurrent(); +} + +bool EAGLDevice::makeCurrent(bool force) { + if (!force && appInBackground) { + return false; + } + oldContext = [[EAGLContext currentContext] retain]; + if (oldContext == _eaglContext) { + return true; + } + if (![EAGLContext setCurrentContext:_eaglContext]) { + [oldContext release]; + oldContext = nil; + return false; + } + return true; +} + +void EAGLDevice::clearCurrent() { + if (oldContext == _eaglContext) { + [oldContext release]; + oldContext = nil; + return; + } + [EAGLContext setCurrentContext:nil]; + if (oldContext) { + // 可能失败。 + [EAGLContext setCurrentContext:oldContext]; + } + [oldContext release]; + oldContext = nil; +} + +void EAGLDevice::finish() { + std::lock_guard autoLock(locker); + if (makeCurrent(true)) { + glFinish(); + clearCurrent(); + } +} +} // namespace tgfx + +@interface TGFXAppMonitor : NSObject +@end + +@implementation TGFXAppMonitor ++ (void)applicationWillResignActive:(NSNotification*)notification { + tgfx::ApplicationWillResignActive(); +} + ++ (void)applicationDidBecomeActive:(NSNotification*)notification { + tgfx::ApplicationDidBecomeActive(); +} +@end + +static bool RegisterNotifications() { + [[NSNotificationCenter defaultCenter] addObserver:[TGFXAppMonitor class] + selector:@selector(applicationWillResignActive:) + name:UIApplicationWillResignActiveNotification + object:nil]; + [[NSNotificationCenter defaultCenter] addObserver:[TGFXAppMonitor class] + selector:@selector(applicationDidBecomeActive:) + name:UIApplicationDidBecomeActiveNotification + object:nil]; + return true; +} + +static bool registered = RegisterNotifications(); diff --git a/src/opengl/eagl/EAGLHardwareBuffer.mm b/src/opengl/eagl/EAGLHardwareBuffer.mm new file mode 100644 index 00000000..d649771f --- /dev/null +++ b/src/opengl/eagl/EAGLHardwareBuffer.mm @@ -0,0 +1,45 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#import +#include "EAGLHardwareTexture.h" +#include "EAGLNV12Texture.h" +#include "core/PixelBuffer.h" +#include "platform/apple/NV12HardwareBuffer.h" + +namespace tgfx { +bool HardwareBufferAvailable() { +#if TARGET_IPHONE_SIMULATOR + return false; +#else + return true; +#endif +} + +std::shared_ptr Texture::MakeFrom(Context* context, HardwareBufferRef hardwareBuffer, + YUVColorSpace colorSpace) { + if (!HardwareBufferCheck(hardwareBuffer)) { + return nullptr; + } + auto planeCount = CVPixelBufferGetPlaneCount(hardwareBuffer); + if (planeCount > 1) { + return EAGLNV12Texture::MakeFrom(context, hardwareBuffer, colorSpace); + } + return EAGLHardwareTexture::MakeFrom(context, hardwareBuffer); +} +} // namespace tgfx diff --git a/src/opengl/eagl/EAGLHardwareTexture.h b/src/opengl/eagl/EAGLHardwareTexture.h new file mode 100644 index 00000000..c52e560d --- /dev/null +++ b/src/opengl/eagl/EAGLHardwareTexture.h @@ -0,0 +1,56 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#import +#include "gpu/Texture.h" +#include "gpu/TextureSampler.h" + +namespace tgfx { +class EAGLHardwareTexture : public Texture { + public: + static std::shared_ptr MakeFrom(Context* context, + CVPixelBufferRef pixelBuffer); + + explicit EAGLHardwareTexture(CVPixelBufferRef pixelBuffer); + + ~EAGLHardwareTexture() override; + + size_t memoryUsage() const override; + + const TextureSampler* getSampler() const override { + return sampler.get(); + } + + HardwareBufferRef getHardwareBuffer() const override { + return pixelBuffer; + } + + protected: + void computeScratchKey(BytesKey* scratchKey) const override; + + private: + std::unique_ptr sampler = {}; + CVPixelBufferRef pixelBuffer = nullptr; + CVOpenGLESTextureRef texture = nil; + + static void ComputeScratchKey(BytesKey* scratchKey, CVPixelBufferRef pixelBuffer); + void onReleaseGPU() override; +}; +} // namespace tgfx diff --git a/src/opengl/eagl/EAGLHardwareTexture.mm b/src/opengl/eagl/EAGLHardwareTexture.mm new file mode 100644 index 00000000..8dc4fd3d --- /dev/null +++ b/src/opengl/eagl/EAGLHardwareTexture.mm @@ -0,0 +1,120 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "EAGLHardwareTexture.h" +#include "opengl/GLCaps.h" +#include "opengl/GLSampler.h" +#include "tgfx/opengl/eagl/EAGLDevice.h" +#include "utils/UniqueID.h" + +namespace tgfx { +static CVOpenGLESTextureRef GetTextureRef(Context* context, CVPixelBufferRef pixelBuffer, + PixelFormat pixelFormat, + CVOpenGLESTextureCacheRef textureCache) { + if (textureCache == nil) { + return nil; + } + auto width = static_cast(CVPixelBufferGetWidth(pixelBuffer)); + auto height = static_cast(CVPixelBufferGetHeight(pixelBuffer)); + CVOpenGLESTextureRef texture = nil; + auto caps = GLCaps::Get(context); + const auto& format = caps->getTextureFormat(pixelFormat); + // 返回的 texture 对象是一个强引用计数为 1 的对象。 + CVReturn result = CVOpenGLESTextureCacheCreateTextureFromImage( + kCFAllocatorDefault, textureCache, pixelBuffer, NULL, /* texture attributes */ + GL_TEXTURE_2D, format.internalFormatTexImage, /* opengl format */ + width, height, format.externalFormat, /* native iOS format */ + GL_UNSIGNED_BYTE, 0, &texture); + if (result != kCVReturnSuccess && texture != nil) { + CFRelease(texture); + texture = nil; + } + return texture; +} + +std::shared_ptr EAGLHardwareTexture::MakeFrom(Context* context, + CVPixelBufferRef pixelBuffer) { + std::shared_ptr glTexture = nullptr; + ScratchKey scratchKey = {}; + ComputeScratchKey(&scratchKey, pixelBuffer); + glTexture = std::static_pointer_cast( + context->resourceCache()->findScratchResource(scratchKey)); + if (glTexture) { + return glTexture; + } + auto eaglDevice = static_cast(context->device()); + if (eaglDevice == nullptr) { + return nullptr; + } + + auto format = CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_OneComponent8 + ? PixelFormat::ALPHA_8 + : PixelFormat::BGRA_8888; + auto texture = GetTextureRef(context, pixelBuffer, format, eaglDevice->getTextureCache()); + if (texture == nil) { + return nullptr; + } + auto sampler = std::make_unique(); + sampler->format = format; + sampler->target = CVOpenGLESTextureGetTarget(texture); + sampler->id = CVOpenGLESTextureGetName(texture); + glTexture = Resource::Wrap(context, new EAGLHardwareTexture(pixelBuffer)); + glTexture->sampler = std::move(sampler); + glTexture->texture = texture; + return glTexture; +} + +void EAGLHardwareTexture::ComputeScratchKey(BytesKey* scratchKey, CVPixelBufferRef pixelBuffer) { + static const uint32_t HardwareType = UniqueID::Next(); + scratchKey->write(HardwareType); + // 这里可以直接用指针做为 key 是因为缓存的 holder 会持有 CVPixelBuffer,只要 holder + // 缓存存在,对应的 CVPixelBuffer + // 指针就是有效的,不会出现指针地址被其他新创建对象占用的情况。其他情况下应该避免使用指针做 key。 + scratchKey->write(pixelBuffer); +} + +EAGLHardwareTexture::EAGLHardwareTexture(CVPixelBufferRef pixelBuffer) + : Texture(static_cast(CVPixelBufferGetWidth(pixelBuffer)), + static_cast(CVPixelBufferGetHeight(pixelBuffer)), ImageOrigin::TopLeft), + pixelBuffer(pixelBuffer) { + CFRetain(pixelBuffer); +} + +EAGLHardwareTexture::~EAGLHardwareTexture() { + CFRelease(pixelBuffer); + if (texture) { + CFRelease(texture); + } +} + +void EAGLHardwareTexture::computeScratchKey(BytesKey* scratchKey) const { + ComputeScratchKey(scratchKey, pixelBuffer); +} + +size_t EAGLHardwareTexture::memoryUsage() const { + return CVPixelBufferGetDataSize(pixelBuffer); +} + +void EAGLHardwareTexture::onReleaseGPU() { + if (texture == nil) { + return; + } + static_cast(context->device())->releaseTexture(texture); + texture = nil; +} +} // namespace tgfx diff --git a/src/opengl/eagl/EAGLNV12Texture.h b/src/opengl/eagl/EAGLNV12Texture.h new file mode 100644 index 00000000..6bab5c76 --- /dev/null +++ b/src/opengl/eagl/EAGLNV12Texture.h @@ -0,0 +1,45 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#import +#include "gpu/YUVTexture.h" + +namespace tgfx { +class EAGLNV12Texture : public YUVTexture { + public: + static std::shared_ptr MakeFrom(Context* context, CVPixelBufferRef pixelBuffer, + YUVColorSpace colorSpace); + + EAGLNV12Texture(CVPixelBufferRef pixelBuffer, YUVColorSpace colorSpace); + + ~EAGLNV12Texture() override; + + HardwareBufferRef getHardwareBuffer() const override { + return pixelBuffer; + } + + private: + CVPixelBufferRef pixelBuffer = nullptr; + CVOpenGLESTextureRef lumaTexture = nullptr; + CVOpenGLESTextureRef chromaTexture = nullptr; + + void onReleaseGPU() override; +}; +} // namespace tgfx diff --git a/src/opengl/eagl/EAGLNV12Texture.mm b/src/opengl/eagl/EAGLNV12Texture.mm new file mode 100644 index 00000000..608e6c81 --- /dev/null +++ b/src/opengl/eagl/EAGLNV12Texture.mm @@ -0,0 +1,110 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "EAGLNV12Texture.h" +#include "opengl/GLCaps.h" +#include "opengl/GLSampler.h" +#include "tgfx/opengl/eagl/EAGLDevice.h" + +namespace tgfx { +static std::unique_ptr ToGLSampler(CVOpenGLESTextureRef texture, PixelFormat format) { + auto sampler = std::make_unique(); + sampler->target = CVOpenGLESTextureGetTarget(texture); + sampler->id = CVOpenGLESTextureGetName(texture); + sampler->format = format; + return sampler; +} + +std::shared_ptr EAGLNV12Texture::MakeFrom(Context* context, + CVPixelBufferRef pixelBuffer, + YUVColorSpace colorSpace) { + auto glDevice = std::static_pointer_cast(GLDevice::Current()); + if (glDevice == nullptr) { + return nullptr; + } + CVOpenGLESTextureCacheRef textureCache = glDevice->getTextureCache(); + if (textureCache == nil) { + return nullptr; + } + auto width = static_cast(CVPixelBufferGetWidth(pixelBuffer)); + auto height = static_cast(CVPixelBufferGetHeight(pixelBuffer)); + CVOpenGLESTextureRef outputTextureLuma = nil; + CVOpenGLESTextureRef outputTextureChroma = nil; + auto caps = GLCaps::Get(context); + auto lumaComponentFormat = PixelFormat::GRAY_8; + const auto& oneComponentFormat = caps->getTextureFormat(lumaComponentFormat); + // 返回的 texture 对象是一个强引用计数为 1 的对象。 + CVOpenGLESTextureCacheCreateTextureFromImage( + kCFAllocatorDefault, textureCache, pixelBuffer, NULL, GL_TEXTURE_2D, + oneComponentFormat.internalFormatTexImage, width, height, oneComponentFormat.externalFormat, + GL_UNSIGNED_BYTE, 0, &outputTextureLuma); + auto chromaComponentFormat = PixelFormat::RG_88; + const auto& twoComponentFormat = caps->getTextureFormat(chromaComponentFormat); + // 返回的 texture 对象是一个强引用计数为 1 的对象。 + CVOpenGLESTextureCacheCreateTextureFromImage( + kCFAllocatorDefault, textureCache, pixelBuffer, NULL, GL_TEXTURE_2D, + twoComponentFormat.internalFormatTexImage, width / 2, height / 2, + twoComponentFormat.externalFormat, GL_UNSIGNED_BYTE, 1, &outputTextureChroma); + if (outputTextureLuma == nil || outputTextureChroma == nil) { + return nullptr; + } + auto texture = Resource::Wrap(context, new EAGLNV12Texture(pixelBuffer, colorSpace)); + texture->lumaTexture = outputTextureLuma; + texture->samplers.push_back(ToGLSampler(outputTextureLuma, lumaComponentFormat)); + texture->chromaTexture = outputTextureChroma; + texture->samplers.push_back(ToGLSampler(outputTextureChroma, chromaComponentFormat)); + return texture; +} + +EAGLNV12Texture::EAGLNV12Texture(CVPixelBufferRef pixelBuffer, YUVColorSpace colorSpace) + : YUVTexture(static_cast(CVPixelBufferGetWidth(pixelBuffer)), + static_cast(CVPixelBufferGetHeight(pixelBuffer)), YUVPixelFormat::NV12, + colorSpace), + pixelBuffer(pixelBuffer) { + CFRetain(pixelBuffer); +} + +EAGLNV12Texture::~EAGLNV12Texture() { + CFRelease(pixelBuffer); + if (lumaTexture) { + CFRelease(lumaTexture); + } + if (chromaTexture) { + CFRelease(chromaTexture); + } +} + +void EAGLNV12Texture::onReleaseGPU() { + if (lumaTexture == nil || chromaTexture == nil) { + return; + } + auto glDevice = std::static_pointer_cast(GLDevice::Current()); + if (glDevice == nullptr) { + return; + } + auto cache = glDevice->getTextureCache(); + if (!cache) { + return; + } + CFRelease(lumaTexture); + lumaTexture = nil; + CFRelease(chromaTexture); + chromaTexture = nil; + CVOpenGLESTextureCacheFlush(cache, 0); +} +} // namespace tgfx diff --git a/src/opengl/eagl/EAGLProcGetter.h b/src/opengl/eagl/EAGLProcGetter.h new file mode 100644 index 00000000..72c7c6d8 --- /dev/null +++ b/src/opengl/eagl/EAGLProcGetter.h @@ -0,0 +1,35 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "opengl/GLProcGetter.h" + +namespace tgfx { +class EAGLProcGetter : public GLProcGetter { + public: + EAGLProcGetter(); + + ~EAGLProcGetter() override; + + void* getProcAddress(const char name[]) const override; + + private: + void* fLibrary; +}; +} // namespace tgfx diff --git a/src/opengl/eagl/EAGLProcGetter.mm b/src/opengl/eagl/EAGLProcGetter.mm new file mode 100644 index 00000000..2f64b5cb --- /dev/null +++ b/src/opengl/eagl/EAGLProcGetter.mm @@ -0,0 +1,41 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "EAGLProcGetter.h" +#include + +namespace tgfx { +EAGLProcGetter::EAGLProcGetter() { + fLibrary = dlopen("/System/Library/Frameworks/OpenGLES.framework/OpenGLES", RTLD_LAZY); +} + +EAGLProcGetter::~EAGLProcGetter() { + if (fLibrary) { + dlclose(fLibrary); + } +} + +void* EAGLProcGetter::getProcAddress(const char* name) const { + auto handle = fLibrary ? fLibrary : RTLD_DEFAULT; + return dlsym(handle, name); +} + +std::unique_ptr GLProcGetter::Make() { + return std::make_unique(); +} +} // namespace tgfx diff --git a/src/opengl/eagl/EAGLWindow.mm b/src/opengl/eagl/EAGLWindow.mm new file mode 100644 index 00000000..21bfa22f --- /dev/null +++ b/src/opengl/eagl/EAGLWindow.mm @@ -0,0 +1,102 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/opengl/eagl/EAGLWindow.h" +#include "tgfx/opengl/GLFunctions.h" +#include "utils/Log.h" + +namespace tgfx { +std::shared_ptr EAGLWindow::MakeFrom(CAEAGLLayer* layer, + std::shared_ptr device) { + if (layer == nil) { + return nullptr; + } + if (device == nullptr) { + device = tgfx::GLDevice::MakeWithFallback(); + } + if (device == nullptr) { + return nullptr; + } + return std::shared_ptr(new EAGLWindow(device, layer)); +} + +EAGLWindow::EAGLWindow(std::shared_ptr device, CAEAGLLayer* layer) + : Window(std::move(device)), layer(layer) { + // do not retain layer here, otherwise it can cause circular reference. +} + +EAGLWindow::~EAGLWindow() { + auto context = device->lockContext(); + if (context) { + auto gl = GLFunctions::Get(context); + if (frameBufferID > 0) { + gl->deleteFramebuffers(1, &frameBufferID); + frameBufferID = 0; + } + if (colorBuffer) { + gl->deleteRenderbuffers(1, &colorBuffer); + colorBuffer = 0; + } + device->unlock(); + } +} + +std::shared_ptr EAGLWindow::onCreateSurface(Context* context) { + auto gl = GLFunctions::Get(context); + if (frameBufferID > 0) { + gl->deleteFramebuffers(1, &frameBufferID); + frameBufferID = 0; + } + if (colorBuffer) { + gl->deleteRenderbuffers(1, &colorBuffer); + colorBuffer = 0; + } + auto width = layer.bounds.size.width * layer.contentsScale; + auto height = layer.bounds.size.height * layer.contentsScale; + if (width <= 0 || height <= 0) { + return nullptr; + } + gl->genFramebuffers(1, &frameBufferID); + gl->bindFramebuffer(GL_FRAMEBUFFER, frameBufferID); + gl->genRenderbuffers(1, &colorBuffer); + gl->bindRenderbuffer(GL_RENDERBUFFER, colorBuffer); + gl->framebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, colorBuffer); + auto eaglContext = static_cast(context->device())->eaglContext(); + [eaglContext renderbufferStorage:GL_RENDERBUFFER fromDrawable:layer]; + auto frameBufferStatus = gl->checkFramebufferStatus(GL_FRAMEBUFFER); + gl->bindFramebuffer(GL_FRAMEBUFFER, 0); + gl->bindRenderbuffer(GL_RENDERBUFFER, 0); + if (frameBufferStatus != GL_FRAMEBUFFER_COMPLETE) { + LOGE("EAGLWindow::onCreateSurface() Framebuffer is not complete!"); + return nullptr; + } + GLFrameBufferInfo glInfo = {}; + glInfo.id = frameBufferID; + glInfo.format = GL_RGBA8; + BackendRenderTarget renderTarget = {glInfo, static_cast(width), static_cast(height)}; + return Surface::MakeFrom(context, renderTarget, ImageOrigin::BottomLeft); +} + +void EAGLWindow::onPresent(Context* context, int64_t) { + auto gl = GLFunctions::Get(context); + gl->bindRenderbuffer(GL_RENDERBUFFER, colorBuffer); + auto eaglContext = static_cast(context->device())->eaglContext(); + [eaglContext presentRenderbuffer:GL_RENDERBUFFER]; + gl->bindRenderbuffer(GL_RENDERBUFFER, 0); +} +} // namespace tgfx diff --git a/src/opengl/egl/EGLDevice.cpp b/src/opengl/egl/EGLDevice.cpp new file mode 100644 index 00000000..b4363601 --- /dev/null +++ b/src/opengl/egl/EGLDevice.cpp @@ -0,0 +1,199 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/opengl/egl/EGLDevice.h" +#include +#include "opengl/egl/EGLProcGetter.h" +#include "utils/Log.h" + +namespace tgfx { +static EGLContext CreateContext(EGLContext sharedContext, EGLDisplay eglDisplay, + EGLConfig eglConfig) { + static const EGLint context3Attributes[] = {EGL_CONTEXT_CLIENT_VERSION, 3, EGL_NONE}; + auto eglContext = eglCreateContext(eglDisplay, eglConfig, sharedContext, context3Attributes); + if (eglContext != EGL_NO_CONTEXT) { + return eglContext; + } + LOGE("EGLDevice CreateContext() version 3: error=%d", eglGetError()); + static const EGLint context2Attributes[] = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL_NONE}; + eglContext = eglCreateContext(eglDisplay, eglConfig, sharedContext, context2Attributes); + if (eglContext == EGL_NO_CONTEXT) { + LOGE("EGLDevice CreateContext() version 2: error=%d", eglGetError()); + } + return eglContext; +} + +void* GLDevice::CurrentNativeHandle() { + return eglGetCurrentContext(); +} + +std::shared_ptr GLDevice::Current() { + auto eglContext = eglGetCurrentContext(); + auto eglDisplay = eglGetCurrentDisplay(); + auto eglSurface = eglGetCurrentSurface(EGL_DRAW); + return EGLDevice::Wrap(eglDisplay, eglSurface, eglContext, nullptr, true); +} + +std::shared_ptr GLDevice::Make(void* sharedContext) { + static auto eglGlobals = EGLGlobals::Get(); + auto eglContext = EGL_NO_CONTEXT; + auto eglSurface = eglCreatePbufferSurface(eglGlobals->display, eglGlobals->pbufferConfig, + &eglGlobals->pbufferSurfaceAttributes[0]); + if (eglSurface == nullptr) { + LOGE("GLDevice::Make() eglCreatePbufferSurface error=%d", eglGetError()); + return nullptr; + } + auto eglShareContext = reinterpret_cast(sharedContext); + eglContext = CreateContext(eglShareContext, eglGlobals->display, eglGlobals->pbufferConfig); + if (eglContext == nullptr) { + eglDestroySurface(eglGlobals->display, eglSurface); + return nullptr; + } + auto device = + EGLDevice::Wrap(eglGlobals->display, eglSurface, eglContext, eglShareContext, false); + if (device == nullptr) { + eglDestroyContext(eglGlobals->display, eglContext); + eglDestroySurface(eglGlobals->display, eglSurface); + } + return device; +} + +std::shared_ptr EGLDevice::MakeAdopted(EGLDisplay eglDisplay, EGLSurface eglSurface, + EGLContext eglContext) { + return EGLDevice::Wrap(eglDisplay, eglSurface, eglContext, nullptr, true); +} + +std::shared_ptr EGLDevice::MakeFrom(EGLNativeWindowType nativeWindow, + EGLContext sharedContext) { + static auto eglGlobals = EGLGlobals::Get(); + auto eglSurface = eglCreateWindowSurface(eglGlobals->display, eglGlobals->windowConfig, + nativeWindow, &eglGlobals->windowSurfaceAttributes[0]); + if (eglSurface == nullptr) { + LOGE("EGLDevice::MakeFrom() eglCreateWindowSurface error=%d", eglGetError()); + return nullptr; + } + auto eglContext = CreateContext(sharedContext, eglGlobals->display, eglGlobals->windowConfig); + if (eglContext == EGL_NO_CONTEXT) { + eglDestroySurface(eglGlobals->display, eglSurface); + return nullptr; + } + auto device = EGLDevice::Wrap(eglGlobals->display, eglSurface, eglContext, sharedContext, false); + if (device == nullptr) { + eglDestroyContext(eglGlobals->display, eglContext); + eglDestroySurface(eglGlobals->display, eglSurface); + } + return device; +} + +std::shared_ptr EGLDevice::Wrap(EGLDisplay eglDisplay, EGLSurface eglSurface, + EGLContext eglContext, EGLContext shareContext, + bool isAdopted) { + auto glContext = GLDevice::Get(eglContext); + if (glContext) { + return std::static_pointer_cast(glContext); + } + if (eglDisplay == nullptr || eglContext == nullptr || eglSurface == nullptr) { + return nullptr; + } + EGLContext oldEglContext = eglGetCurrentContext(); + EGLDisplay oldEglDisplay = eglGetCurrentDisplay(); + EGLSurface oldEglReadSurface = eglGetCurrentSurface(EGL_READ); + EGLSurface oldEglDrawSurface = eglGetCurrentSurface(EGL_DRAW); + if (oldEglContext != eglContext) { + auto result = eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext); + if (!result) { + eglMakeCurrent(eglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT); + if (oldEglDisplay) { + eglMakeCurrent(oldEglDisplay, oldEglDrawSurface, oldEglReadSurface, oldEglContext); + } + return nullptr; + } + } + auto device = std::shared_ptr(new EGLDevice(eglContext)); + device->isAdopted = isAdopted; + device->eglDisplay = eglDisplay; + device->eglSurface = eglSurface; + device->eglContext = eglContext; + device->shareContext = shareContext; + device->weakThis = device; + if (oldEglContext != eglContext) { + eglMakeCurrent(eglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT); + if (oldEglDisplay) { + eglMakeCurrent(oldEglDisplay, oldEglDrawSurface, oldEglReadSurface, oldEglContext); + } + } + return device; +} + +EGLDevice::EGLDevice(void* nativeHandle) : GLDevice(nativeHandle) { +} + +EGLDevice::~EGLDevice() { + releaseAll(); + if (isAdopted) { + return; + } + eglDestroyContext(eglDisplay, eglContext); + eglDestroySurface(eglDisplay, eglSurface); +} + +bool EGLDevice::sharableWith(void* nativeContext) const { + return nativeHandle == nativeContext || shareContext == nativeContext; +} + +bool EGLDevice::onMakeCurrent() { + oldEglContext = eglGetCurrentContext(); + oldEglDisplay = eglGetCurrentDisplay(); + oldEglReadSurface = eglGetCurrentSurface(EGL_READ); + oldEglDrawSurface = eglGetCurrentSurface(EGL_DRAW); + if (oldEglContext == eglContext) { + // 如果外部已经设定好了当前的 Context,以外部设定的为准,不再切换。外部用于 read/draw 的 Surface + // 有可能不一样。 + return true; + } + auto result = eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext); + if (!result) { + LOGE("EGLDevice::onMakeCurrent() failure result = %d error= %d", result, eglGetError()); + return false; + } + return true; +} + +void EGLDevice::onClearCurrent() { + if (oldEglContext == eglContext) { + return; + } + eglMakeCurrent(eglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT); + if (oldEglDisplay) { + // 可能失败。 + eglMakeCurrent(oldEglDisplay, oldEglDrawSurface, oldEglReadSurface, oldEglContext); + } +} + +void EGLDevice::swapBuffers(int64_t timestamp) { + if (timestamp != INT64_MIN) { + static auto eglPresentationTimeANDROID = reinterpret_cast( + eglGetProcAddress("eglPresentationTimeANDROID")); + if (eglPresentationTimeANDROID) { + // egl uses nano seconds + eglPresentationTimeANDROID(eglDisplay, eglSurface, timestamp * 1000); + } + } + eglSwapBuffers(eglDisplay, eglSurface); +} +} // namespace tgfx \ No newline at end of file diff --git a/src/opengl/egl/EGLDisplayWrapper.cpp b/src/opengl/egl/EGLDisplayWrapper.cpp new file mode 100644 index 00000000..4e8e27f4 --- /dev/null +++ b/src/opengl/egl/EGLDisplayWrapper.cpp @@ -0,0 +1,111 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#if defined(_WIN32) + +#include "EGLDisplayWrapper.h" +#include +#include "WinUser.h" + +namespace tgfx { +enum { D3D11, D3D11_FL_9_3, D3D11_WARP, PLAN_END }; + +// These are preferred display attributes and request ANGLE's D3D11 +// renderer. eglInitialize will only succeed with these attributes if the +// hardware supports D3D11 Feature Level 10_0+. +const EGLint d3d11_display_attributes[] = { + EGL_PLATFORM_ANGLE_TYPE_ANGLE, + EGL_PLATFORM_ANGLE_TYPE_D3D11_ANGLE, + + // EGL_PLATFORM_ANGLE_ENABLE_AUTOMATIC_TRIM_ANGLE is an option that will + // enable ANGLE to automatically call the IDXGIDevice3::Trim method on + // behalf of the application when it gets suspended. + EGL_PLATFORM_ANGLE_ENABLE_AUTOMATIC_TRIM_ANGLE, + EGL_TRUE, + + // This extension allows angle to render directly on a D3D swapchain + // in the correct orientation on D3D11. + EGL_EXPERIMENTAL_PRESENT_PATH_ANGLE, + EGL_EXPERIMENTAL_PRESENT_PATH_FAST_ANGLE, + + EGL_NONE, +}; + +// These are used to request ANGLE's D3D11 renderer, with D3D11 Feature +// Level 9_3. +const EGLint d3d11_fl_9_3_display_attributes[] = { + EGL_PLATFORM_ANGLE_TYPE_ANGLE, + EGL_PLATFORM_ANGLE_TYPE_D3D11_ANGLE, + EGL_PLATFORM_ANGLE_MAX_VERSION_MAJOR_ANGLE, + 9, + EGL_PLATFORM_ANGLE_MAX_VERSION_MINOR_ANGLE, + 3, + EGL_PLATFORM_ANGLE_ENABLE_AUTOMATIC_TRIM_ANGLE, + EGL_TRUE, + EGL_NONE, +}; + +// These attributes request D3D11 WARP (software rendering fallback) in case +// hardware-backed D3D11 is unavailable. +const EGLint d3d11_warp_display_attributes[] = { + EGL_PLATFORM_ANGLE_TYPE_ANGLE, + EGL_PLATFORM_ANGLE_TYPE_D3D11_ANGLE, + EGL_PLATFORM_ANGLE_ENABLE_AUTOMATIC_TRIM_ANGLE, + EGL_TRUE, + EGL_NONE, +}; + +static int Win32EGLDisplayPlan = D3D11; + +EGLDisplay EGLDisplayWrapper::EGLGetPlatformDisplay() { + EGLDisplay display; + + auto hdc = GetDC(nullptr); + + auto eglGetPlatformDisplayEXT = reinterpret_cast( + eglGetProcAddress("eglGetPlatformDisplayEXT")); + if (!eglGetPlatformDisplayEXT) { + return nullptr; + } + + if (Win32EGLDisplayPlan == D3D11) { + display = eglGetPlatformDisplayEXT(EGL_PLATFORM_ANGLE_ANGLE, hdc, d3d11_display_attributes); + } else if (Win32EGLDisplayPlan == D3D11_FL_9_3) { + display = + eglGetPlatformDisplayEXT(EGL_PLATFORM_ANGLE_ANGLE, hdc, d3d11_fl_9_3_display_attributes); + } else if (Win32EGLDisplayPlan == D3D11_WARP) { + display = + eglGetPlatformDisplayEXT(EGL_PLATFORM_ANGLE_ANGLE, hdc, d3d11_warp_display_attributes); + } else { + return eglGetDisplay(EGL_DEFAULT_DISPLAY); + } + + if (display == EGL_NO_DISPLAY) { + Win32EGLDisplayPlan++; + return EGLDisplayWrapper::EGLGetPlatformDisplay(); + } + + return display; +} + +bool EGLDisplayWrapper::HasNext() { + Win32EGLDisplayPlan++; + return Win32EGLDisplayPlan < PLAN_END; +} +} // namespace tgfx +#endif \ No newline at end of file diff --git a/src/opengl/egl/EGLDisplayWrapper.h b/src/opengl/egl/EGLDisplayWrapper.h new file mode 100644 index 00000000..62673430 --- /dev/null +++ b/src/opengl/egl/EGLDisplayWrapper.h @@ -0,0 +1,31 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#if defined(_WIN32) +#include + +namespace tgfx { +class EGLDisplayWrapper { + public: + static EGLDisplay EGLGetPlatformDisplay(); + static bool HasNext(); +}; +} // namespace tgfx +#endif diff --git a/src/opengl/egl/EGLGlobals.cpp b/src/opengl/egl/EGLGlobals.cpp new file mode 100644 index 00000000..433ddc7d --- /dev/null +++ b/src/opengl/egl/EGLGlobals.cpp @@ -0,0 +1,97 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/opengl/egl/EGLGlobals.h" +#include +#include +#if defined(_WIN32) +#include "EGLDisplayWrapper.h" +#endif + +namespace tgfx { +EGLGlobals InitializeEGL() { + EGLGlobals globals = {}; + EGLint majorVersion; + EGLint minorVersion; +#if defined(_WIN32) + do { + globals.display = EGLDisplayWrapper::EGLGetPlatformDisplay(); + } while (eglInitialize(globals.display, &majorVersion, &minorVersion) == EGL_FALSE && + EGLDisplayWrapper::HasNext()); + globals.windowSurfaceAttributes = {EGL_DIRECT_COMPOSITION_ANGLE, EGL_TRUE, EGL_NONE}; +#else + globals.display = eglGetDisplay(EGL_DEFAULT_DISPLAY); + eglInitialize(globals.display, &majorVersion, &minorVersion); +#endif + globals.pbufferSurfaceAttributes = {EGL_WIDTH, 1, EGL_HEIGHT, 1, + EGL_LARGEST_PBUFFER, EGL_TRUE, EGL_NONE}; + eglBindAPI(EGL_OPENGL_ES_API); + EGLint numConfigs = 0; + const EGLint configAttribs[] = {EGL_SURFACE_TYPE, + EGL_WINDOW_BIT, + EGL_RENDERABLE_TYPE, + EGL_OPENGL_ES2_BIT, + EGL_RED_SIZE, + 8, + EGL_GREEN_SIZE, + 8, + EGL_BLUE_SIZE, + 8, + EGL_ALPHA_SIZE, + 8, + EGL_STENCIL_SIZE, + 8, + EGL_NONE}; + eglChooseConfig(globals.display, configAttribs, &globals.windowConfig, 1, &numConfigs); + + const EGLint configPbufferAttribs[] = {EGL_SURFACE_TYPE, + EGL_PBUFFER_BIT, + EGL_RENDERABLE_TYPE, + EGL_OPENGL_ES2_BIT, + EGL_RED_SIZE, + 8, + EGL_GREEN_SIZE, + 8, + EGL_BLUE_SIZE, + 8, + EGL_ALPHA_SIZE, + 8, + EGL_STENCIL_SIZE, + 8, + EGL_NONE}; + eglChooseConfig(globals.display, configPbufferAttribs, &globals.pbufferConfig, 1, &numConfigs); + return globals; +} + +static std::mutex eglGlobalsLocker = {}; +static const EGLGlobals* eglGlobals = nullptr; + +void EGLGlobals::Set(const EGLGlobals* globals) { + std::lock_guard lock(eglGlobalsLocker); + eglGlobals = globals; +} + +const EGLGlobals* EGLGlobals::Get() { + std::lock_guard lock(eglGlobalsLocker); + if (eglGlobals) { + return eglGlobals; + } + static const EGLGlobals globals = InitializeEGL(); + return &globals; +} +} // namespace tgfx diff --git a/src/opengl/egl/EGLHardwareBuffer.cpp b/src/opengl/egl/EGLHardwareBuffer.cpp new file mode 100644 index 00000000..223bcd31 --- /dev/null +++ b/src/opengl/egl/EGLHardwareBuffer.cpp @@ -0,0 +1,62 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "gpu/Texture.h" +#include "tgfx/platform/HardwareBuffer.h" + +#if defined(__ANDROID__) || defined(ANDROID) +#include "EGLHardwareTexture.h" +#include "platform/android/AHardwareBufferFunctions.h" +#include "tgfx/platform/HardwareBuffer.h" +#endif + +namespace tgfx { +#if defined(__ANDROID__) || defined(ANDROID) + +bool HardwareBufferAvailable() { + static const bool available = AHardwareBufferFunctions::Get()->allocate != nullptr && + AHardwareBufferFunctions::Get()->release != nullptr && + AHardwareBufferFunctions::Get()->lock != nullptr && + AHardwareBufferFunctions::Get()->unlock != nullptr && + AHardwareBufferFunctions::Get()->describe != nullptr && + AHardwareBufferFunctions::Get()->acquire != nullptr && + AHardwareBufferFunctions::Get()->toHardwareBuffer != nullptr && + AHardwareBufferFunctions::Get()->fromHardwareBuffer != nullptr; + return available; +} + +std::shared_ptr Texture::MakeFrom(Context* context, HardwareBufferRef hardwareBuffer, + YUVColorSpace) { + if (!HardwareBufferCheck(hardwareBuffer)) { + return nullptr; + } + return EGLHardwareTexture::MakeFrom(context, hardwareBuffer); +} + +#else + +bool HardwareBufferAvailable() { + return false; +} + +std::shared_ptr Texture::MakeFrom(Context*, HardwareBufferRef, YUVColorSpace) { + return nullptr; +} + +#endif +} // namespace tgfx \ No newline at end of file diff --git a/src/opengl/egl/EGLHardwareTexture.cpp b/src/opengl/egl/EGLHardwareTexture.cpp new file mode 100644 index 00000000..e09904b0 --- /dev/null +++ b/src/opengl/egl/EGLHardwareTexture.cpp @@ -0,0 +1,128 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#if defined(__ANDROID__) || defined(ANDROID) + +#include "EGLHardwareTexture.h" +#include +#include +#include +#include "gpu/Gpu.h" +#include "opengl/GLSampler.h" +#include "tgfx/core/Pixmap.h" +#include "tgfx/opengl/egl/EGLDevice.h" +#include "utils/PixelFormatUtil.h" +#include "utils/UniqueID.h" + +namespace tgfx { +namespace eglext { +static PFNEGLGETNATIVECLIENTBUFFERANDROIDPROC eglGetNativeClientBufferANDROID = nullptr; +static PFNGLEGLIMAGETARGETTEXTURE2DOESPROC glEGLImageTargetTexture2DOES = nullptr; +static PFNEGLCREATEIMAGEKHRPROC eglCreateImageKHR = nullptr; +static PFNEGLDESTROYIMAGEKHRPROC eglDestroyImageKHR = nullptr; +} // namespace eglext + +static bool InitEGLEXTProc() { + eglext::eglGetNativeClientBufferANDROID = + (PFNEGLGETNATIVECLIENTBUFFERANDROIDPROC)eglGetProcAddress("eglGetNativeClientBufferANDROID"); + eglext::glEGLImageTargetTexture2DOES = + (PFNGLEGLIMAGETARGETTEXTURE2DOESPROC)eglGetProcAddress("glEGLImageTargetTexture2DOES"); + eglext::eglCreateImageKHR = (PFNEGLCREATEIMAGEKHRPROC)eglGetProcAddress("eglCreateImageKHR"); + eglext::eglDestroyImageKHR = (PFNEGLDESTROYIMAGEKHRPROC)eglGetProcAddress("eglDestroyImageKHR"); + return eglext::eglGetNativeClientBufferANDROID && eglext::glEGLImageTargetTexture2DOES && + eglext::eglCreateImageKHR && eglext::eglDestroyImageKHR; +} + +std::shared_ptr EGLHardwareTexture::MakeFrom(Context* context, + AHardwareBuffer* hardwareBuffer) { + static const bool initialized = InitEGLEXTProc(); + if (!initialized) { + return nullptr; + } + auto info = HardwareBufferGetInfo(hardwareBuffer); + if (info.isEmpty()) { + return nullptr; + } + ScratchKey scratchKey = {}; + ComputeScratchKey(&scratchKey, hardwareBuffer); + auto glTexture = std::static_pointer_cast( + context->resourceCache()->findScratchResource(scratchKey)); + if (glTexture != nullptr) { + return glTexture; + } + EGLClientBuffer clientBuffer = eglext::eglGetNativeClientBufferANDROID(hardwareBuffer); + if (!clientBuffer) { + return nullptr; + } + auto display = static_cast(context->device())->getDisplay(); + EGLint attributes[] = {EGL_IMAGE_PRESERVED_KHR, EGL_TRUE, EGL_NONE}; + EGLImageKHR eglImage = eglext::eglCreateImageKHR( + display, EGL_NO_CONTEXT, EGL_NATIVE_BUFFER_ANDROID, clientBuffer, attributes); + if (eglImage == EGL_NO_IMAGE_KHR) { + return nullptr; + } + + auto sampler = std::make_unique(); + sampler->target = GL_TEXTURE_2D; + sampler->format = ColorTypeToPixelFormat(info.colorType()); + glGenTextures(1, &sampler->id); + if (sampler->id == 0) { + eglext::eglDestroyImageKHR(display, eglImage); + return nullptr; + } + glBindTexture(sampler->target, sampler->id); + glTexParameteri(sampler->target, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameteri(sampler->target, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + glTexParameteri(sampler->target, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + glTexParameteri(sampler->target, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + eglext::glEGLImageTargetTexture2DOES(sampler->target, (GLeglImageOES)eglImage); + glTexture = Resource::Wrap( + context, new EGLHardwareTexture(hardwareBuffer, eglImage, info.width(), info.height())); + glTexture->sampler = std::move(sampler); + return glTexture; +} + +EGLHardwareTexture::EGLHardwareTexture(AHardwareBuffer* hardwareBuffer, EGLImageKHR eglImage, + int width, int height) + : Texture(width, height, ImageOrigin::TopLeft), + hardwareBuffer(HardwareBufferRetain(hardwareBuffer)), eglImage(eglImage) { +} + +EGLHardwareTexture::~EGLHardwareTexture() { + HardwareBufferRelease(hardwareBuffer); +} + +void EGLHardwareTexture::ComputeScratchKey(BytesKey* scratchKey, void* hardwareBuffer) { + static const uint32_t BGRAType = UniqueID::Next(); + scratchKey->write(BGRAType); + scratchKey->write(hardwareBuffer); +} + +size_t EGLHardwareTexture::memoryUsage() const { + auto info = HardwareBufferGetInfo(hardwareBuffer); + return info.byteSize(); +} + +void EGLHardwareTexture::onReleaseGPU() { + context->gpu()->deleteSampler(sampler.get()); + auto display = static_cast(context->device())->getDisplay(); + eglext::eglDestroyImageKHR(display, eglImage); +} +} // namespace tgfx + +#endif \ No newline at end of file diff --git a/src/opengl/egl/EGLHardwareTexture.h b/src/opengl/egl/EGLHardwareTexture.h new file mode 100644 index 00000000..06665d62 --- /dev/null +++ b/src/opengl/egl/EGLHardwareTexture.h @@ -0,0 +1,59 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#if defined(__ANDROID__) || defined(ANDROID) + +#pragma once + +#include +#include +#include "gpu/Texture.h" +#include "tgfx/platform/HardwareBuffer.h" + +namespace tgfx { +class EGLHardwareTexture : public Texture { + public: + static std::shared_ptr MakeFrom(Context* context, + AHardwareBuffer* hardwareBuffer); + + size_t memoryUsage() const override; + + const TextureSampler* getSampler() const override { + return sampler.get(); + } + + HardwareBufferRef getHardwareBuffer() const override { + return hardwareBuffer; + } + + private: + std::unique_ptr sampler = {}; + AHardwareBuffer* hardwareBuffer = nullptr; + EGLImageKHR eglImage = EGL_NO_IMAGE_KHR; + + static void ComputeScratchKey(BytesKey* scratchKey, void* hardwareBuffer); + + EGLHardwareTexture(AHardwareBuffer* hardwareBuffer, EGLImageKHR eglImage, int width, int height); + + ~EGLHardwareTexture() override; + + void onReleaseGPU() override; +}; +} // namespace tgfx + +#endif \ No newline at end of file diff --git a/src/opengl/egl/EGLProcGetter.cpp b/src/opengl/egl/EGLProcGetter.cpp new file mode 100644 index 00000000..f7adb7d5 --- /dev/null +++ b/src/opengl/egl/EGLProcGetter.cpp @@ -0,0 +1,152 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "EGLProcGetter.h" +#include +#include +#include + +namespace tgfx { + +static void* egl_get_gl_proc(void*, const char name[]) { + // https://www.khronos.org/registry/EGL/extensions/KHR/EGL_KHR_get_all_proc_addresses.txt + // eglGetProcAddress() is not guaranteed to support the querying of non-extension EGL functions. +#define M(X) \ + if (0 == strcmp(#X, name)) { \ + return reinterpret_cast(X); \ + } + M(eglGetCurrentDisplay) + M(eglQueryString) + M(glActiveTexture) + M(glAttachShader) + M(glBindAttribLocation) + M(glBindBuffer) + M(glBindFramebuffer) + M(glBindRenderbuffer) + M(glBindTexture) + M(glBlendColor) + M(glBlendEquation) + M(glBlendFunc) + M(glBufferData) + M(glBufferSubData) + M(glCheckFramebufferStatus) + M(glClear) + M(glClearColor) + M(glClearStencil) + M(glColorMask) + M(glCompileShader) + M(glCompressedTexImage2D) + M(glCompressedTexSubImage2D) + M(glCopyTexSubImage2D) + M(glCreateProgram) + M(glCreateShader) + M(glCullFace) + M(glDeleteBuffers) + M(glDeleteFramebuffers) + M(glDeleteProgram) + M(glDeleteRenderbuffers) + M(glDeleteShader) + M(glDeleteTextures) + M(glDepthMask) + M(glDisable) + M(glDisableVertexAttribArray) + M(glDrawArrays) + M(glDrawElements) + M(glEnable) + M(glEnableVertexAttribArray) + M(glFinish) + M(glFlush) + M(glFramebufferRenderbuffer) + M(glFramebufferTexture2D) + M(glFrontFace) + M(glGenBuffers) + M(glGenFramebuffers) + M(glGenRenderbuffers) + M(glGenTextures) + M(glGenerateMipmap) + M(glGetBufferParameteriv) + M(glGetError) + M(glGetFramebufferAttachmentParameteriv) + M(glGetIntegerv) + M(glGetProgramInfoLog) + M(glGetProgramiv) + M(glGetRenderbufferParameteriv) + M(glGetShaderInfoLog) + M(glGetShaderPrecisionFormat) + M(glGetShaderiv) + M(glGetString) + M(glGetUniformLocation) + M(glIsTexture) + M(glLineWidth) + M(glLinkProgram) + M(glPixelStorei) + M(glReadPixels) + M(glRenderbufferStorage) + M(glScissor) + M(glShaderSource) + M(glStencilFunc) + M(glStencilFuncSeparate) + M(glStencilMask) + M(glStencilMaskSeparate) + M(glStencilOp) + M(glStencilOpSeparate) + M(glTexImage2D) + M(glTexParameterf) + M(glTexParameterfv) + M(glTexParameteri) + M(glTexParameteriv) + M(glTexSubImage2D) + M(glUniform1f) + M(glUniform1fv) + M(glUniform1i) + M(glUniform1iv) + M(glUniform2f) + M(glUniform2fv) + M(glUniform2i) + M(glUniform2iv) + M(glUniform3f) + M(glUniform3fv) + M(glUniform3i) + M(glUniform3iv) + M(glUniform4f) + M(glUniform4fv) + M(glUniform4i) + M(glUniform4iv) + M(glUniformMatrix2fv) + M(glUniformMatrix3fv) + M(glUniformMatrix4fv) + M(glUseProgram) + M(glVertexAttrib1f) + M(glVertexAttrib2fv) + M(glVertexAttrib3fv) + M(glVertexAttrib4fv) + M(glVertexAttribPointer) + M(glViewport) +#undef M + auto fun = eglGetProcAddress(name); + return reinterpret_cast(fun); +} + +void* EGLProcGetter::getProcAddress(const char* name) const { + return egl_get_gl_proc(nullptr, name); +} + +std::unique_ptr GLProcGetter::Make() { + return std::make_unique(); +} +} // namespace tgfx \ No newline at end of file diff --git a/src/opengl/egl/EGLProcGetter.h b/src/opengl/egl/EGLProcGetter.h new file mode 100644 index 00000000..499f0483 --- /dev/null +++ b/src/opengl/egl/EGLProcGetter.h @@ -0,0 +1,28 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "opengl/GLProcGetter.h" + +namespace tgfx { +class EGLProcGetter : public GLProcGetter { + public: + void* getProcAddress(const char name[]) const override; +}; +} // namespace tgfx diff --git a/src/opengl/egl/EGLWindow.cpp b/src/opengl/egl/EGLWindow.cpp new file mode 100644 index 00000000..bf7bde98 --- /dev/null +++ b/src/opengl/egl/EGLWindow.cpp @@ -0,0 +1,85 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/opengl/egl/EGLWindow.h" + +#if defined(__ANDROID__) || defined(ANDROID) +#include +#endif +#include +#include +#include "utils/USE.h" + +namespace tgfx { +std::shared_ptr EGLWindow::Current() { + auto device = EGLDevice::Current(); + if (device == nullptr) { + return nullptr; + } + return std::shared_ptr(new EGLWindow(device)); +} + +std::shared_ptr EGLWindow::MakeFrom(EGLNativeWindowType nativeWindow, + EGLContext sharedContext) { + if (!nativeWindow) { + return nullptr; + } + auto device = EGLDevice::MakeFrom(nativeWindow, sharedContext); + if (device == nullptr) { + return nullptr; + } + auto eglWindow = std::shared_ptr(new EGLWindow(device)); + eglWindow->nativeWindow = nativeWindow; + return eglWindow; +} + +EGLWindow::EGLWindow(std::shared_ptr device) : Window(std::move(device)) { +} + +std::shared_ptr EGLWindow::onCreateSurface(Context* context) { + EGLint width = 0; + EGLint height = 0; + + // If the rendering size changes,eglQuerySurface based on ANativeWindow may give the wrong size. +#if defined(__ANDROID__) || defined(ANDROID) + if (nativeWindow) { + width = ANativeWindow_getWidth(nativeWindow); + height = ANativeWindow_getHeight(nativeWindow); + } +#endif + if (width <= 0 || height <= 0) { + auto eglDevice = static_cast(device.get()); + eglQuerySurface(eglDevice->eglDisplay, eglDevice->eglSurface, EGL_WIDTH, &width); + eglQuerySurface(eglDevice->eglDisplay, eglDevice->eglSurface, EGL_HEIGHT, &height); + } + + if (width <= 0 || height <= 0) { + return nullptr; + } + + tgfx::GLFrameBufferInfo frameBuffer = {}; + frameBuffer.id = 0; + frameBuffer.format = GL_RGBA8; + tgfx::BackendRenderTarget renderTarget = {frameBuffer, width, height}; + return Surface::MakeFrom(context, renderTarget, ImageOrigin::BottomLeft); +} + +void EGLWindow::onPresent(Context*, int64_t presentationTime) { + std::static_pointer_cast(device)->swapBuffers(presentationTime); +} +} // namespace tgfx \ No newline at end of file diff --git a/src/opengl/processors/GLAARectEffect.cpp b/src/opengl/processors/GLAARectEffect.cpp new file mode 100644 index 00000000..97b03584 --- /dev/null +++ b/src/opengl/processors/GLAARectEffect.cpp @@ -0,0 +1,49 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLAARectEffect.h" + +namespace tgfx { +std::unique_ptr AARectEffect::Make(const Rect& rect) { + return std::unique_ptr(new GLAARectEffect(rect)); +} + +GLAARectEffect::GLAARectEffect(const Rect& rect) : AARectEffect(rect) { +} + +void GLAARectEffect::emitCode(EmitArgs& args) const { + auto* fragBuilder = args.fragBuilder; + auto* uniformHandler = args.uniformHandler; + + auto rectName = uniformHandler->addUniform(ShaderFlags::Fragment, SLType::Float4, "Rect"); + fragBuilder->codeAppendf( + "vec4 dists4 = clamp(vec4(1.0, 1.0, -1.0, -1.0) * vec4(gl_FragCoord.xyxy - %s), 0.0, 1.0);", + rectName.c_str()); + fragBuilder->codeAppend("vec2 dists2 = dists4.xy + dists4.zw - 1.0;"); + fragBuilder->codeAppend("float coverage = dists2.x * dists2.y;"); + fragBuilder->codeAppendf("%s = %s * coverage;", args.outputColor.c_str(), + args.inputColor.c_str()); +} + +void GLAARectEffect::onSetData(UniformBuffer* uniformBuffer) const { + // The AA math in the shader evaluates to 0 at the uploaded coordinates, so outset by 0.5 + // to interpolate from 0 at a half pixel inset and 1 at a half pixel outset of rect. + auto outRect = rect.makeOutset(0.5f, 0.5f); + uniformBuffer->setData("Rect", outRect); +} +} // namespace tgfx diff --git a/src/opengl/processors/GLAARectEffect.h b/src/opengl/processors/GLAARectEffect.h new file mode 100644 index 00000000..e7111521 --- /dev/null +++ b/src/opengl/processors/GLAARectEffect.h @@ -0,0 +1,34 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "gpu/processors/AARectEffect.h" + +namespace tgfx { +class GLAARectEffect : public AARectEffect { + public: + explicit GLAARectEffect(const Rect& rect); + + void emitCode(EmitArgs& args) const override; + + private: + void onSetData(UniformBuffer* uniformBuffer) const override; +}; +} // namespace tgfx diff --git a/src/opengl/processors/GLClampedGradientEffect.cpp b/src/opengl/processors/GLClampedGradientEffect.cpp new file mode 100644 index 00000000..2e481502 --- /dev/null +++ b/src/opengl/processors/GLClampedGradientEffect.cpp @@ -0,0 +1,70 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLClampedGradientEffect.h" + +namespace tgfx { +std::unique_ptr ClampedGradientEffect::Make( + std::unique_ptr colorizer, std::unique_ptr gradLayout, + Color leftBorderColor, Color rightBorderColor, bool makePremultiply) { + return std::unique_ptr( + new GLClampedGradientEffect(std::move(colorizer), std::move(gradLayout), leftBorderColor, + rightBorderColor, makePremultiply)); +} + +GLClampedGradientEffect::GLClampedGradientEffect(std::unique_ptr colorizer, + std::unique_ptr gradLayout, + tgfx::Color leftBorderColor, + tgfx::Color rightBorderColor, bool makePremultiply) + : ClampedGradientEffect(std::move(colorizer), std::move(gradLayout), leftBorderColor, + rightBorderColor, makePremultiply) { +} + +void GLClampedGradientEffect::emitCode(EmitArgs& args) const { + auto* fragBuilder = args.fragBuilder; + auto leftBorderColorName = + args.uniformHandler->addUniform(ShaderFlags::Fragment, SLType::Float4, "leftBorderColor"); + auto rightBorderColorName = + args.uniformHandler->addUniform(ShaderFlags::Fragment, SLType::Float4, "rightBorderColor"); + std::string _child1 = "_child1"; + emitChild(gradLayoutIndex, &_child1, args); + fragBuilder->codeAppendf("vec4 t = %s;", _child1.c_str()); + fragBuilder->codeAppend("if (t.y < 0.0) {"); + fragBuilder->codeAppendf("%s = vec4(0.0);", args.outputColor.c_str()); + fragBuilder->codeAppend("} else if (t.x <= 0.0) {"); + fragBuilder->codeAppendf("%s = %s;", args.outputColor.c_str(), leftBorderColorName.c_str()); + fragBuilder->codeAppend("} else if (t.x >= 1.0) {"); + fragBuilder->codeAppendf("%s = %s;", args.outputColor.c_str(), rightBorderColorName.c_str()); + fragBuilder->codeAppend("} else {"); + std::string _input0 = "t"; + std::string _child0 = "_child0"; + emitChild(colorizerIndex, _input0, &_child0, args); + fragBuilder->codeAppendf("%s = %s;", args.outputColor.c_str(), _child0.c_str()); + fragBuilder->codeAppend("}"); + if (makePremultiply) { + fragBuilder->codeAppend("{"); + fragBuilder->codeAppendf("%s.rgb *= %s.a;", args.outputColor.c_str(), args.outputColor.c_str()); + fragBuilder->codeAppend("}"); + } +} + +void GLClampedGradientEffect::onSetData(UniformBuffer* uniformBuffer) const { + uniformBuffer->setData("leftBorderColor", leftBorderColor); + uniformBuffer->setData("rightBorderColor", rightBorderColor); +} +} // namespace tgfx diff --git a/src/opengl/processors/GLClampedGradientEffect.h b/src/opengl/processors/GLClampedGradientEffect.h new file mode 100644 index 00000000..560242a9 --- /dev/null +++ b/src/opengl/processors/GLClampedGradientEffect.h @@ -0,0 +1,37 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "gpu/processors/ClampedGradientEffect.h" +#include "tgfx/core/Color.h" + +namespace tgfx { +class GLClampedGradientEffect : public ClampedGradientEffect { + public: + GLClampedGradientEffect(std::unique_ptr colorizer, + std::unique_ptr gradLayout, Color leftBorderColor, + Color rightBorderColor, bool makePremultiply); + + void emitCode(EmitArgs& args) const override; + + private: + void onSetData(UniformBuffer* uniformBuffer) const override; +}; +} // namespace tgfx diff --git a/src/opengl/processors/GLColorMatrixFragmentProcessor.cpp b/src/opengl/processors/GLColorMatrixFragmentProcessor.cpp new file mode 100644 index 00000000..d2ad3e0f --- /dev/null +++ b/src/opengl/processors/GLColorMatrixFragmentProcessor.cpp @@ -0,0 +1,64 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLColorMatrixFragmentProcessor.h" + +namespace tgfx { +std::unique_ptr ColorMatrixFragmentProcessor::Make( + const std::array& matrix) { + return std::unique_ptr(new GLColorMatrixFragmentProcessor(matrix)); +} + +GLColorMatrixFragmentProcessor::GLColorMatrixFragmentProcessor(const std::array& matrix) + : ColorMatrixFragmentProcessor(matrix) { +} + +void GLColorMatrixFragmentProcessor::emitCode(EmitArgs& args) const { + auto* uniformHandler = args.uniformHandler; + auto matrixUniformName = + uniformHandler->addUniform(ShaderFlags::Fragment, SLType::Float4x4, "Matrix"); + auto vectorUniformName = + uniformHandler->addUniform(ShaderFlags::Fragment, SLType::Float4, "Vector"); + + auto* fragBuilder = args.fragBuilder; + fragBuilder->codeAppendf("%s = vec4(%s.rgb / max(%s.a, 9.9999997473787516e-05), %s.a);", + args.outputColor.c_str(), args.inputColor.c_str(), + args.inputColor.c_str(), args.inputColor.c_str()); + fragBuilder->codeAppendf("%s = %s * %s + %s;", args.outputColor.c_str(), + matrixUniformName.c_str(), args.outputColor.c_str(), + vectorUniformName.c_str()); + fragBuilder->codeAppendf("%s = clamp(%s, 0.0, 1.0);", args.outputColor.c_str(), + args.outputColor.c_str()); + fragBuilder->codeAppendf("%s.rgb *= %s.a;", args.outputColor.c_str(), args.outputColor.c_str()); +} + +void GLColorMatrixFragmentProcessor::onSetData(UniformBuffer* uniformBuffer) const { + float m[] = { + matrix[0], matrix[5], matrix[10], matrix[15], matrix[1], matrix[6], matrix[11], matrix[16], + matrix[2], matrix[7], matrix[12], matrix[17], matrix[3], matrix[8], matrix[13], matrix[18], + }; + float vec[] = { + matrix[4], + matrix[9], + matrix[14], + matrix[19], + }; + uniformBuffer->setData("Matrix", m); + uniformBuffer->setData("Vector", vec); +} +} // namespace tgfx diff --git a/src/opengl/processors/GLColorMatrixFragmentProcessor.h b/src/opengl/processors/GLColorMatrixFragmentProcessor.h new file mode 100644 index 00000000..bf9c5c41 --- /dev/null +++ b/src/opengl/processors/GLColorMatrixFragmentProcessor.h @@ -0,0 +1,35 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include +#include "gpu/processors/ColorMatrixFragmentProcessor.h" + +namespace tgfx { +class GLColorMatrixFragmentProcessor : public ColorMatrixFragmentProcessor { + public: + explicit GLColorMatrixFragmentProcessor(const std::array& matrix); + + void emitCode(EmitArgs& args) const override; + + private: + void onSetData(UniformBuffer* uniformBuffer) const override; +}; +} // namespace tgfx diff --git a/src/opengl/processors/GLConstColorProcessor.cpp b/src/opengl/processors/GLConstColorProcessor.cpp new file mode 100644 index 00000000..7f98f837 --- /dev/null +++ b/src/opengl/processors/GLConstColorProcessor.cpp @@ -0,0 +1,49 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLConstColorProcessor.h" + +namespace tgfx { +std::unique_ptr ConstColorProcessor::Make(Color color, InputMode mode) { + return std::unique_ptr(new GLConstColorProcessor(color, mode)); +} + +GLConstColorProcessor::GLConstColorProcessor(Color color, InputMode mode) + : ConstColorProcessor(color, mode) { +} + +void GLConstColorProcessor::emitCode(EmitArgs& args) const { + auto* fragBuilder = args.fragBuilder; + auto colorName = args.uniformHandler->addUniform(ShaderFlags::Fragment, SLType::Float4, "Color"); + fragBuilder->codeAppendf("%s = %s;", args.outputColor.c_str(), colorName.c_str()); + switch (inputMode) { + case InputMode::Ignore: + break; + case InputMode::ModulateRGBA: + fragBuilder->codeAppendf("%s *= %s;", args.outputColor.c_str(), args.inputColor.c_str()); + break; + case InputMode::ModulateA: + fragBuilder->codeAppendf("%s *= %s.a;", args.outputColor.c_str(), args.inputColor.c_str()); + break; + } +} + +void GLConstColorProcessor::onSetData(UniformBuffer* uniformBuffer) const { + uniformBuffer->setData("Color", color); +} +} // namespace tgfx diff --git a/src/opengl/processors/GLConstColorProcessor.h b/src/opengl/processors/GLConstColorProcessor.h new file mode 100644 index 00000000..22757ff0 --- /dev/null +++ b/src/opengl/processors/GLConstColorProcessor.h @@ -0,0 +1,35 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "gpu/processors/ConstColorProcessor.h" +#include "tgfx/core/Color.h" + +namespace tgfx { +class GLConstColorProcessor : public ConstColorProcessor { + public: + GLConstColorProcessor(Color color, InputMode mode); + + void emitCode(EmitArgs& args) const override; + + private: + void onSetData(UniformBuffer* uniformBuffer) const override; +}; +} // namespace tgfx diff --git a/src/opengl/processors/GLDefaultGeometryProcessor.cpp b/src/opengl/processors/GLDefaultGeometryProcessor.cpp new file mode 100644 index 00000000..1c6bf2ee --- /dev/null +++ b/src/opengl/processors/GLDefaultGeometryProcessor.cpp @@ -0,0 +1,69 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLDefaultGeometryProcessor.h" + +namespace tgfx { +std::unique_ptr DefaultGeometryProcessor::Make( + Color color, int width, int height, const Matrix& viewMatrix, const Matrix& localMatrix) { + return std::unique_ptr( + new GLDefaultGeometryProcessor(color, width, height, viewMatrix, localMatrix)); +} + +GLDefaultGeometryProcessor::GLDefaultGeometryProcessor(Color color, int width, int height, + const Matrix& viewMatrix, + const Matrix& localMatrix) + : DefaultGeometryProcessor(color, width, height, viewMatrix, localMatrix) { +} + +void GLDefaultGeometryProcessor::emitCode(EmitArgs& args) const { + auto* vertBuilder = args.vertBuilder; + auto* fragBuilder = args.fragBuilder; + auto* varyingHandler = args.varyingHandler; + auto* uniformHandler = args.uniformHandler; + + varyingHandler->emitAttributes(*this); + + auto matrixName = + args.uniformHandler->addUniform(ShaderFlags::Vertex, SLType::Float3x3, "Matrix"); + std::string positionName = "position"; + vertBuilder->codeAppendf("vec2 %s = (%s * vec3(%s, 1.0)).xy;", positionName.c_str(), + matrixName.c_str(), position.name().c_str()); + + emitTransforms(vertBuilder, varyingHandler, uniformHandler, position.asShaderVar(), + args.fpCoordTransformHandler); + + auto coverageVar = varyingHandler->addVarying("Coverage", SLType::Float); + vertBuilder->codeAppendf("%s = %s;", coverageVar.vsOut().c_str(), coverage.name().c_str()); + fragBuilder->codeAppendf("%s = vec4(%s);", args.outputCoverage.c_str(), + coverageVar.fsIn().c_str()); + + auto colorName = args.uniformHandler->addUniform(ShaderFlags::Fragment, SLType::Float4, "Color"); + fragBuilder->codeAppendf("%s = %s;", args.outputColor.c_str(), colorName.c_str()); + + // Emit the vertex position to the hardware in the normalized window coordinates it expects. + args.vertBuilder->emitNormalizedPosition(positionName); +} + +void GLDefaultGeometryProcessor::setData(UniformBuffer* uniformBuffer, + FPCoordTransformIter* transformIter) const { + setTransformDataHelper(localMatrix, uniformBuffer, transformIter); + uniformBuffer->setData("Color", color); + uniformBuffer->setData("Matrix", viewMatrix); +} +} // namespace tgfx diff --git a/src/opengl/processors/GLDefaultGeometryProcessor.h b/src/opengl/processors/GLDefaultGeometryProcessor.h new file mode 100644 index 00000000..867dc895 --- /dev/null +++ b/src/opengl/processors/GLDefaultGeometryProcessor.h @@ -0,0 +1,34 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "gpu/processors/DefaultGeometryProcessor.h" + +namespace tgfx { +class GLDefaultGeometryProcessor : public DefaultGeometryProcessor { + public: + GLDefaultGeometryProcessor(Color color, int width, int height, const Matrix& viewMatrix, + const Matrix& localMatrix); + + void emitCode(EmitArgs& args) const override; + + void setData(UniformBuffer* uniformBuffer, FPCoordTransformIter* transformIter) const override; +}; +} // namespace tgfx diff --git a/src/opengl/processors/GLDeviceSpaceTextureEffect.cpp b/src/opengl/processors/GLDeviceSpaceTextureEffect.cpp new file mode 100644 index 00000000..2b4e1bbc --- /dev/null +++ b/src/opengl/processors/GLDeviceSpaceTextureEffect.cpp @@ -0,0 +1,56 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLDeviceSpaceTextureEffect.h" + +namespace tgfx { +std::unique_ptr DeviceSpaceTextureEffect::Make( + std::shared_ptr texture, ImageOrigin deviceOrigin) { + if (texture == nullptr) { + return nullptr; + } + return std::unique_ptr( + new GLDeviceSpaceTextureEffect(std::move(texture), deviceOrigin)); +} + +GLDeviceSpaceTextureEffect::GLDeviceSpaceTextureEffect(std::shared_ptr texture, + ImageOrigin deviceOrigin) + : DeviceSpaceTextureEffect(std::move(texture), deviceOrigin) { +} + +void GLDeviceSpaceTextureEffect::emitCode(EmitArgs& args) const { + auto* fragBuilder = args.fragBuilder; + auto* uniformHandler = args.uniformHandler; + auto deviceCoordMatrixName = + uniformHandler->addUniform(ShaderFlags::Fragment, SLType::Float3x3, "DeviceCoordMatrix"); + auto scaleName = uniformHandler->addUniform(ShaderFlags::Fragment, SLType::Float2, "CoordScale"); + fragBuilder->codeAppendf("vec3 deviceCoord = %s * vec3(gl_FragCoord.xy * %s, 1.0);", + deviceCoordMatrixName.c_str(), scaleName.c_str()); + std::string coordName = "deviceCoord.xy"; + fragBuilder->codeAppendf("%s = ", args.outputColor.c_str()); + fragBuilder->appendTextureLookup((*args.textureSamplers)[0], coordName); + fragBuilder->codeAppend(";"); +} + +void GLDeviceSpaceTextureEffect::onSetData(UniformBuffer* uniformBuffer) const { + float scales[] = {1.f / static_cast(texture->width()), + 1.f / static_cast(texture->height())}; + uniformBuffer->setData("CoordScale", scales); + uniformBuffer->setData("DeviceCoordMatrix", deviceCoordMatrix); +} +} // namespace tgfx diff --git a/src/opengl/processors/GLDeviceSpaceTextureEffect.h b/src/opengl/processors/GLDeviceSpaceTextureEffect.h new file mode 100644 index 00000000..fd2cc705 --- /dev/null +++ b/src/opengl/processors/GLDeviceSpaceTextureEffect.h @@ -0,0 +1,34 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "gpu/processors/DeviceSpaceTextureEffect.h" + +namespace tgfx { +class GLDeviceSpaceTextureEffect : public DeviceSpaceTextureEffect { + public: + GLDeviceSpaceTextureEffect(std::shared_ptr texture, ImageOrigin deviceOrigin); + + void emitCode(EmitArgs& args) const override; + + private: + void onSetData(UniformBuffer* uniformBuffer) const override; +}; +} // namespace tgfx diff --git a/src/opengl/processors/GLDualBlurFragmentProcessor.cpp b/src/opengl/processors/GLDualBlurFragmentProcessor.cpp new file mode 100644 index 00000000..b31d1009 --- /dev/null +++ b/src/opengl/processors/GLDualBlurFragmentProcessor.cpp @@ -0,0 +1,103 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLDualBlurFragmentProcessor.h" + +namespace tgfx { +std::unique_ptr DualBlurFragmentProcessor::Make( + DualBlurPassMode passMode, std::unique_ptr processor, Point blurOffset, + Point texelSize) { + if (processor == nullptr) { + return nullptr; + } + return std::unique_ptr( + new GLDualBlurFragmentProcessor(passMode, std::move(processor), blurOffset, texelSize)); +} + +GLDualBlurFragmentProcessor::GLDualBlurFragmentProcessor( + DualBlurPassMode passMode, std::unique_ptr processor, Point blurOffset, + Point texelSize) + : DualBlurFragmentProcessor(passMode, std::move(processor), blurOffset, texelSize) { +} + +void GLDualBlurFragmentProcessor::emitCode(EmitArgs& args) const { + auto* fragBuilder = args.fragBuilder; + auto blurOffsetName = + args.uniformHandler->addUniform(ShaderFlags::Fragment, SLType::Float2, "Blur"); + auto texelSizeName = + args.uniformHandler->addUniform(ShaderFlags::Fragment, SLType::Float2, "TexelSize"); + std::string tempColor = "tempColor"; + if (passMode == DualBlurPassMode::Down) { + fragBuilder->codeAppend("const int size = 5;"); + fragBuilder->codeAppendf("vec2 coords[size];"); + fragBuilder->codeAppend("coords[0] = vec2(0.0, 0.0);"); + fragBuilder->codeAppendf("coords[1] = -%s * %s;", texelSizeName.c_str(), + blurOffsetName.c_str()); + fragBuilder->codeAppendf("coords[2] = %s * %s;", texelSizeName.c_str(), blurOffsetName.c_str()); + fragBuilder->codeAppendf("coords[3] = vec2(%s.x, -%s.y) * %s;", texelSizeName.c_str(), + texelSizeName.c_str(), blurOffsetName.c_str()); + fragBuilder->codeAppendf("coords[4] = -vec2(%s.x, -%s.y) * %s;", texelSizeName.c_str(), + texelSizeName.c_str(), blurOffsetName.c_str()); + fragBuilder->codeAppendf("vec4 sum;"); + fragBuilder->codeAppend("for (int i = 0; i < size; i++) {"); + emitChild(0, &tempColor, args, + [](std::string_view coord) { return std::string(coord) + " + coords[i]"; }); + fragBuilder->codeAppend("if (i == 0) {"); + fragBuilder->codeAppendf("sum = %s * 4.0;", tempColor.c_str()); + fragBuilder->codeAppend("} else {"); + fragBuilder->codeAppendf("sum += %s;", tempColor.c_str()); + fragBuilder->codeAppend("}"); + fragBuilder->codeAppend("}"); + fragBuilder->codeAppendf("%s = sum / 8.0;", args.outputColor.c_str()); + } else { + fragBuilder->codeAppend("const int size = 8;"); + fragBuilder->codeAppend("vec2 coords[size];"); + fragBuilder->codeAppendf("coords[0] = vec2(-%s.x * 2.0, 0.0) * %s;", texelSizeName.c_str(), + blurOffsetName.c_str()); + fragBuilder->codeAppendf("coords[1] = vec2(-%s.x, %s.y) * %s;", texelSizeName.c_str(), + texelSizeName.c_str(), blurOffsetName.c_str()); + fragBuilder->codeAppendf("coords[2] = vec2(0.0, %s.y * 2.0) * %s;", texelSizeName.c_str(), + blurOffsetName.c_str()); + fragBuilder->codeAppendf("coords[3] = %s * %s;", texelSizeName.c_str(), blurOffsetName.c_str()); + fragBuilder->codeAppendf("coords[4] = vec2(%s.x * 2.0, 0.0) * %s;", texelSizeName.c_str(), + blurOffsetName.c_str()); + fragBuilder->codeAppendf("coords[5] = vec2(%s.x, -%s.y) * %s;", texelSizeName.c_str(), + texelSizeName.c_str(), blurOffsetName.c_str()); + fragBuilder->codeAppendf("coords[6] = vec2(0.0, -%s.y * 2.0) * %s;", texelSizeName.c_str(), + blurOffsetName.c_str()); + fragBuilder->codeAppendf("coords[7] = vec2(-%s.x, -%s.y) * %s;", texelSizeName.c_str(), + texelSizeName.c_str(), blurOffsetName.c_str()); + fragBuilder->codeAppend("vec4 sum = vec4(0.0);"); + fragBuilder->codeAppend("for (int i = 0; i < size; i++) {"); + emitChild(0, &tempColor, args, + [](std::string_view coord) { return std::string(coord) + " + coords[i]"; }); + fragBuilder->codeAppend("if (mod(float(i), 2.0) == 0.0) {"); + fragBuilder->codeAppendf("sum += %s;", tempColor.c_str()); + fragBuilder->codeAppend("} else {"); + fragBuilder->codeAppendf("sum += %s * 2.0;", tempColor.c_str()); + fragBuilder->codeAppend("}"); + fragBuilder->codeAppend("}"); + fragBuilder->codeAppendf("%s = sum / 12.0;", args.outputColor.c_str()); + } +} + +void GLDualBlurFragmentProcessor::onSetData(UniformBuffer* uniformBuffer) const { + uniformBuffer->setData("Blur", blurOffset); + uniformBuffer->setData("TexelSize", texelSize); +} +} // namespace tgfx diff --git a/src/opengl/processors/GLDualBlurFragmentProcessor.h b/src/opengl/processors/GLDualBlurFragmentProcessor.h new file mode 100644 index 00000000..8864ed48 --- /dev/null +++ b/src/opengl/processors/GLDualBlurFragmentProcessor.h @@ -0,0 +1,36 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "gpu/processors/DualBlurFragmentProcessor.h" + +namespace tgfx { +class GLDualBlurFragmentProcessor : public DualBlurFragmentProcessor { + public: + GLDualBlurFragmentProcessor(DualBlurPassMode passMode, + std::unique_ptr processor, Point blurOffset, + Point texelSize); + + void emitCode(EmitArgs& args) const override; + + private: + void onSetData(UniformBuffer* uniformBuffer) const override; +}; +} // namespace tgfx diff --git a/src/opengl/processors/GLDualIntervalGradientColorizer.cpp b/src/opengl/processors/GLDualIntervalGradientColorizer.cpp new file mode 100644 index 00000000..dc94d320 --- /dev/null +++ b/src/opengl/processors/GLDualIntervalGradientColorizer.cpp @@ -0,0 +1,83 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLDualIntervalGradientColorizer.h" + +namespace tgfx { +std::unique_ptr DualIntervalGradientColorizer::Make( + Color c0, Color c1, Color c2, Color c3, float threshold) { + Color scale01; + // Derive scale and biases from the 4 colors and threshold + for (int i = 0; i < 4; ++i) { + auto vc0 = c0[i]; + auto vc1 = c1[i]; + scale01[i] = (vc1 - vc0) / threshold; + } + // bias01 = c0 + + Color scale23; + Color bias23; + for (int i = 0; i < 4; ++i) { + auto vc2 = c2[i]; + auto vc3 = c3[i]; + scale23[i] = (vc3 - vc2) / (1 - threshold); + bias23[i] = vc2 - threshold * scale23[i]; + } + + return std::unique_ptr( + new GLDualIntervalGradientColorizer(scale01, c0, scale23, bias23, threshold)); +} + +GLDualIntervalGradientColorizer::GLDualIntervalGradientColorizer(Color scale01, Color bias01, + Color scale23, Color bias23, + float threshold) + : DualIntervalGradientColorizer(scale01, bias01, scale23, bias23, threshold) { +} + +void GLDualIntervalGradientColorizer::emitCode(EmitArgs& args) const { + auto* fragBuilder = args.fragBuilder; + auto scale01Name = + args.uniformHandler->addUniform(ShaderFlags::Fragment, SLType::Float4, "scale01"); + auto bias01Name = + args.uniformHandler->addUniform(ShaderFlags::Fragment, SLType::Float4, "bias01"); + auto scale23Name = + args.uniformHandler->addUniform(ShaderFlags::Fragment, SLType::Float4, "scale23"); + auto bias23Name = + args.uniformHandler->addUniform(ShaderFlags::Fragment, SLType::Float4, "bias23"); + auto thresholdName = + args.uniformHandler->addUniform(ShaderFlags::Fragment, SLType::Float, "threshold"); + fragBuilder->codeAppendf("float t = %s.x;", args.inputColor.c_str()); + fragBuilder->codeAppend("vec4 scale, bias;"); + fragBuilder->codeAppendf("if (t < %s) {", thresholdName.c_str()); + fragBuilder->codeAppendf("scale = %s;", scale01Name.c_str()); + fragBuilder->codeAppendf("bias = %s;", bias01Name.c_str()); + fragBuilder->codeAppend("} else {"); + fragBuilder->codeAppendf("scale = %s;", scale23Name.c_str()); + fragBuilder->codeAppendf("bias = %s;", bias23Name.c_str()); + fragBuilder->codeAppend("}"); + fragBuilder->codeAppendf("%s = vec4(t * scale + bias);", args.outputColor.c_str()); +} + +void GLDualIntervalGradientColorizer::onSetData(UniformBuffer* uniformBuffer) const { + uniformBuffer->setData("scale01", scale01); + uniformBuffer->setData("bias01", bias01); + uniformBuffer->setData("scale23", scale23); + uniformBuffer->setData("bias23", bias23); + uniformBuffer->setData("threshold", threshold); +} +} // namespace tgfx diff --git a/src/opengl/processors/GLDualIntervalGradientColorizer.h b/src/opengl/processors/GLDualIntervalGradientColorizer.h new file mode 100644 index 00000000..f7fa6b84 --- /dev/null +++ b/src/opengl/processors/GLDualIntervalGradientColorizer.h @@ -0,0 +1,36 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "gpu/processors/DualIntervalGradientColorizer.h" +#include "tgfx/core/Color.h" + +namespace tgfx { +class GLDualIntervalGradientColorizer : public DualIntervalGradientColorizer { + public: + GLDualIntervalGradientColorizer(Color scale01, Color bias01, Color scale23, Color bias23, + float threshold); + + void emitCode(EmitArgs& args) const override; + + private: + void onSetData(UniformBuffer*) const override; +}; +} // namespace tgfx diff --git a/src/opengl/processors/GLEllipseGeometryProcessor.cpp b/src/opengl/processors/GLEllipseGeometryProcessor.cpp new file mode 100644 index 00000000..5046989e --- /dev/null +++ b/src/opengl/processors/GLEllipseGeometryProcessor.cpp @@ -0,0 +1,130 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLEllipseGeometryProcessor.h" + +namespace tgfx { +std::unique_ptr EllipseGeometryProcessor::Make( + int width, int height, bool stroke, bool useScale, const Matrix& localMatrix) { + return std::unique_ptr( + new GLEllipseGeometryProcessor(width, height, stroke, useScale, localMatrix)); +} + +GLEllipseGeometryProcessor::GLEllipseGeometryProcessor(int width, int height, bool stroke, + bool useScale, const Matrix& localMatrix) + : EllipseGeometryProcessor(width, height, stroke, useScale, localMatrix) { +} + +void GLEllipseGeometryProcessor::emitCode(EmitArgs& args) const { + auto* vertBuilder = args.vertBuilder; + auto* varyingHandler = args.varyingHandler; + auto* uniformHandler = args.uniformHandler; + + // emit attributes + varyingHandler->emitAttributes(*this); + + auto offsetType = useScale ? SLType::Float3 : SLType::Float2; + auto ellipseOffsets = varyingHandler->addVarying("EllipseOffsets", offsetType); + vertBuilder->codeAppendf("%s = %s;", ellipseOffsets.vsOut().c_str(), + inEllipseOffset.name().c_str()); + + auto ellipseRadii = varyingHandler->addVarying("EllipseRadii", SLType::Float4); + vertBuilder->codeAppendf("%s = %s;", ellipseRadii.vsOut().c_str(), inEllipseRadii.name().c_str()); + + auto* fragBuilder = args.fragBuilder; + // setup pass through color + auto color = varyingHandler->addVarying("Color", SLType::Float4); + vertBuilder->codeAppendf("%s = %s;", color.vsOut().c_str(), inColor.name().c_str()); + fragBuilder->codeAppendf("%s = %s;", args.outputColor.c_str(), color.fsIn().c_str()); + + // Setup position + args.vertBuilder->emitNormalizedPosition(inPosition.name()); + // emit transforms + emitTransforms(vertBuilder, varyingHandler, uniformHandler, inPosition.asShaderVar(), + args.fpCoordTransformHandler); + // For stroked ellipses, we use the full ellipse equation (x^2/a^2 + y^2/b^2 = 1) + // to compute both the edges because we need two separate test equations for + // the single offset. + // For filled ellipses we can use a unit circle equation (x^2 + y^2 = 1), and warp + // the distance by the gradient, non-uniformly scaled by the inverse of the + // ellipse size. + + // On medium precision devices, we scale the denominator of the distance equation + // before taking the inverse square root to minimize the chance that we're dividing + // by zero, then we scale the result back. + + // for outer curve + fragBuilder->codeAppendf("vec2 offset = %s.xy;", ellipseOffsets.fsIn().c_str()); + if (stroke) { + fragBuilder->codeAppendf("offset *= %s.xy;", ellipseRadii.fsIn().c_str()); + } + fragBuilder->codeAppend("float test = dot(offset, offset) - 1.0;"); + if (useScale) { + fragBuilder->codeAppendf("vec2 grad = 2.0*offset*(%s.z*%s.xy);", ellipseOffsets.fsIn().c_str(), + ellipseRadii.fsIn().c_str()); + } else { + fragBuilder->codeAppendf("vec2 grad = 2.0*offset*%s.xy;", ellipseRadii.fsIn().c_str()); + } + fragBuilder->codeAppend("float grad_dot = dot(grad, grad);"); + + // avoid calling inversesqrt on zero. + if (args.caps->floatIs32Bits) { + fragBuilder->codeAppend("grad_dot = max(grad_dot, 1.1755e-38);"); + } else { + fragBuilder->codeAppend("grad_dot = max(grad_dot, 6.1036e-5);"); + } + if (useScale) { + fragBuilder->codeAppendf("float invlen = %s.z*inversesqrt(grad_dot);", + ellipseOffsets.fsIn().c_str()); + } else { + fragBuilder->codeAppend("float invlen = inversesqrt(grad_dot);"); + } + fragBuilder->codeAppend("float edgeAlpha = clamp(0.5-test*invlen, 0.0, 1.0);"); + + // for inner curve + if (stroke) { + fragBuilder->codeAppendf("offset = %s.xy*%s.zw;", ellipseOffsets.fsIn().c_str(), + ellipseRadii.fsIn().c_str()); + fragBuilder->codeAppend("test = dot(offset, offset) - 1.0;"); + if (useScale) { + fragBuilder->codeAppendf("grad = 2.0*offset*(%s.z*%s.zw);", ellipseOffsets.fsIn().c_str(), + ellipseRadii.fsIn().c_str()); + } else { + fragBuilder->codeAppendf("grad = 2.0*offset*%s.zw;", ellipseRadii.fsIn().c_str()); + } + fragBuilder->codeAppend("grad_dot = dot(grad, grad);"); + if (!args.caps->floatIs32Bits) { + fragBuilder->codeAppend("grad_dot = max(grad_dot, 6.1036e-5);"); + } + if (useScale) { + fragBuilder->codeAppendf("invlen = %s.z*inversesqrt(grad_dot);", + ellipseOffsets.fsIn().c_str()); + } else { + fragBuilder->codeAppend("invlen = inversesqrt(grad_dot);"); + } + fragBuilder->codeAppend("edgeAlpha *= saturate(0.5+test*invlen);"); + } + + fragBuilder->codeAppendf("%s = vec4(edgeAlpha);", args.outputCoverage.c_str()); +} + +void GLEllipseGeometryProcessor::setData(UniformBuffer* uniformBuffer, + FPCoordTransformIter* transformIter) const { + setTransformDataHelper(localMatrix, uniformBuffer, transformIter); +} +} // namespace tgfx diff --git a/src/opengl/processors/GLEllipseGeometryProcessor.h b/src/opengl/processors/GLEllipseGeometryProcessor.h new file mode 100644 index 00000000..206f8120 --- /dev/null +++ b/src/opengl/processors/GLEllipseGeometryProcessor.h @@ -0,0 +1,33 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/processors/EllipseGeometryProcessor.h" + +namespace tgfx { +class GLEllipseGeometryProcessor : public EllipseGeometryProcessor { + public: + GLEllipseGeometryProcessor(int width, int height, bool stroke, bool useScale, + const Matrix& localMatrix); + + void emitCode(EmitArgs& args) const override; + + void setData(UniformBuffer* uniformBuffer, FPCoordTransformIter* transformIter) const override; +}; +} // namespace tgfx diff --git a/src/opengl/processors/GLEmptyXferProcessor.cpp b/src/opengl/processors/GLEmptyXferProcessor.cpp new file mode 100644 index 00000000..83c15dc6 --- /dev/null +++ b/src/opengl/processors/GLEmptyXferProcessor.cpp @@ -0,0 +1,31 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLEmptyXferProcessor.h" + +namespace tgfx { +const EmptyXferProcessor* EmptyXferProcessor::GetInstance() { + static auto& xferProcessor = *new GLEmptyXferProcessor(); + return &xferProcessor; +} + +void GLEmptyXferProcessor::emitCode(const EmitArgs& args) const { + args.fragBuilder->codeAppendf("%s = %s * %s;", args.outputColor.c_str(), args.inputColor.c_str(), + args.inputCoverage.c_str()); +} +} // namespace tgfx diff --git a/src/opengl/processors/GLEmptyXferProcessor.h b/src/opengl/processors/GLEmptyXferProcessor.h new file mode 100644 index 00000000..42d3f930 --- /dev/null +++ b/src/opengl/processors/GLEmptyXferProcessor.h @@ -0,0 +1,28 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/processors/EmptyXferProcessor.h" + +namespace tgfx { +class GLEmptyXferProcessor : public EmptyXferProcessor { + public: + void emitCode(const EmitArgs& args) const override; +}; +} // namespace tgfx diff --git a/src/opengl/processors/GLLinearGradientLayout.cpp b/src/opengl/processors/GLLinearGradientLayout.cpp new file mode 100644 index 00000000..e73938ee --- /dev/null +++ b/src/opengl/processors/GLLinearGradientLayout.cpp @@ -0,0 +1,35 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLLinearGradientLayout.h" + +namespace tgfx { +std::unique_ptr LinearGradientLayout::Make(Matrix matrix) { + return std::unique_ptr(new GLLinearGradientLayout(matrix)); +} + +GLLinearGradientLayout::GLLinearGradientLayout(Matrix matrix) : LinearGradientLayout(matrix) { +} + +void GLLinearGradientLayout::emitCode(EmitArgs& args) const { + auto* fragBuilder = args.fragBuilder; + fragBuilder->codeAppendf("float t = %s.x + 1.0000000000000001e-05;", + (*args.transformedCoords)[0].name().c_str()); + fragBuilder->codeAppendf("%s = vec4(t, 1.0, 0.0, 0.0);", args.outputColor.c_str()); +} +} // namespace tgfx diff --git a/src/opengl/processors/GLLinearGradientLayout.h b/src/opengl/processors/GLLinearGradientLayout.h new file mode 100644 index 00000000..09d97749 --- /dev/null +++ b/src/opengl/processors/GLLinearGradientLayout.h @@ -0,0 +1,30 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/processors/LinearGradientLayout.h" + +namespace tgfx { +class GLLinearGradientLayout : public LinearGradientLayout { + public: + explicit GLLinearGradientLayout(Matrix matrix); + + void emitCode(EmitArgs& args) const override; +}; +} // namespace tgfx diff --git a/src/opengl/processors/GLLumaColorFilterEffect.cpp b/src/opengl/processors/GLLumaColorFilterEffect.cpp new file mode 100644 index 00000000..49302a09 --- /dev/null +++ b/src/opengl/processors/GLLumaColorFilterEffect.cpp @@ -0,0 +1,33 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLLumaColorFilterEffect.h" + +namespace tgfx { +std::unique_ptr LumaColorFilterEffect::Make() { + return std::make_unique(); +} + +void GLLumaColorFilterEffect::emitCode(EmitArgs& args) const { + auto* fragBuilder = args.fragBuilder; + fragBuilder->codeAppendf( + "%s = vec4(0.0, 0.0, 0.0, clamp(dot(vec3(0.21260000000000001, 0.71519999999999995, 0.0722), " + "%s.rgb), 0.0, 1.0));", + args.outputColor.c_str(), args.inputColor.c_str()); +} +} // namespace tgfx diff --git a/src/opengl/processors/GLLumaColorFilterEffect.h b/src/opengl/processors/GLLumaColorFilterEffect.h new file mode 100644 index 00000000..e27f85c9 --- /dev/null +++ b/src/opengl/processors/GLLumaColorFilterEffect.h @@ -0,0 +1,28 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/processors/LumaColorFilterEffect.h" + +namespace tgfx { +class GLLumaColorFilterEffect : public LumaColorFilterEffect { + public: + void emitCode(EmitArgs& args) const override; +}; +} // namespace tgfx diff --git a/src/opengl/processors/GLPorterDuffXferProcessor.cpp b/src/opengl/processors/GLPorterDuffXferProcessor.cpp new file mode 100644 index 00000000..00061fc4 --- /dev/null +++ b/src/opengl/processors/GLPorterDuffXferProcessor.cpp @@ -0,0 +1,92 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLPorterDuffXferProcessor.h" +#include "opengl/GLBlend.h" + +namespace tgfx { +std::unique_ptr PorterDuffXferProcessor::Make(BlendMode blend) { + return std::unique_ptr(new GLPorterDuffXferProcessor(blend)); +} + +GLPorterDuffXferProcessor::GLPorterDuffXferProcessor(tgfx::BlendMode blend) + : PorterDuffXferProcessor(blend) { +} + +void GLPorterDuffXferProcessor::emitCode(const EmitArgs& args) const { + auto* fragBuilder = args.fragBuilder; + auto* uniformHandler = args.uniformHandler; + const auto& dstColor = fragBuilder->dstColor(); + + if (args.dstTextureSamplerHandle.isValid()) { + // We don't think any shaders actually output negative coverage, but just as a safety + // check for floating point precision errors we compare with <= here. We just check the + // rgb values of the coverage since the alpha may not have been set when using lcd. If + // we are using single channel coverage alpha will equal to rgb anyways. + // + // The discard here also helps for batching text draws together which need to read from + // a dst copy for blends. Though this only helps the case where the outer bounding boxes + // of each letter overlap and not two actually parts of the text. + fragBuilder->codeAppendf("if (%s.r <= 0.0 && %s.g <= 0.0 && %s.b <= 0.0) {", + args.inputCoverage.c_str(), args.inputCoverage.c_str(), + args.inputCoverage.c_str()); + fragBuilder->codeAppend("discard;"); + fragBuilder->codeAppend("}"); + + auto dstTopLeftName = + uniformHandler->addUniform(ShaderFlags::Fragment, SLType::Float2, "DstTextureUpperLeft"); + auto dstCoordScaleName = + uniformHandler->addUniform(ShaderFlags::Fragment, SLType::Float2, "DstTextureCoordScale"); + + fragBuilder->codeAppend("// Read color from copy of the destination.\n"); + std::string dstTexCoord = "_dstTexCoord"; + fragBuilder->codeAppendf("vec2 %s = (gl_FragCoord.xy - %s) * %s;", dstTexCoord.c_str(), + dstTopLeftName.c_str(), dstCoordScaleName.c_str()); + + fragBuilder->codeAppendf("vec4 %s = ", dstColor.c_str()); + fragBuilder->appendTextureLookup(args.dstTextureSamplerHandle, dstTexCoord); + fragBuilder->codeAppend(";"); + } + + const char* outColor = "localOutputColor"; + fragBuilder->codeAppendf("vec4 %s;", outColor); + AppendMode(fragBuilder, args.inputColor, dstColor, outColor, blend); + fragBuilder->codeAppendf("%s = %s * %s + (vec4(1.0) - %s) * %s;", outColor, + args.inputCoverage.c_str(), outColor, args.inputCoverage.c_str(), + dstColor.c_str()); + fragBuilder->codeAppendf("%s = %s;", args.outputColor.c_str(), outColor); +} + +void GLPorterDuffXferProcessor::setData(UniformBuffer* uniformBuffer, const Texture* dstTexture, + const Point& dstTextureOffset) const { + if (dstTexture) { + uniformBuffer->setData("DstTextureUpperLeft", dstTextureOffset); + int width; + int height; + if (dstTexture->getSampler()->type() == TextureType::Rectangle) { + width = 1; + height = 1; + } else { + width = dstTexture->width(); + height = dstTexture->height(); + } + float scales[] = {1.f / static_cast(width), 1.f / static_cast(height)}; + uniformBuffer->setData("DstTextureCoordScale", scales); + } +} +} // namespace tgfx diff --git a/src/opengl/processors/GLPorterDuffXferProcessor.h b/src/opengl/processors/GLPorterDuffXferProcessor.h new file mode 100644 index 00000000..69049cd3 --- /dev/null +++ b/src/opengl/processors/GLPorterDuffXferProcessor.h @@ -0,0 +1,34 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "gpu/processors/PorterDuffXferProcessor.h" + +namespace tgfx { +class GLPorterDuffXferProcessor : public PorterDuffXferProcessor { + public: + explicit GLPorterDuffXferProcessor(BlendMode blend); + + void emitCode(const EmitArgs& args) const override; + + void setData(UniformBuffer* uniformBuffer, const Texture* dstTexture, + const Point& dstTextureOffset) const override; +}; +} // namespace tgfx diff --git a/src/opengl/processors/GLQuadPerEdgeAAGeometryProcessor.cpp b/src/opengl/processors/GLQuadPerEdgeAAGeometryProcessor.cpp new file mode 100644 index 00000000..08709a3f --- /dev/null +++ b/src/opengl/processors/GLQuadPerEdgeAAGeometryProcessor.cpp @@ -0,0 +1,69 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLQuadPerEdgeAAGeometryProcessor.h" + +namespace tgfx { +std::unique_ptr QuadPerEdgeAAGeometryProcessor::Make( + int width, int height, AAType aa, bool hasColor) { + return std::unique_ptr( + new GLQuadPerEdgeAAGeometryProcessor(width, height, aa, hasColor)); +} + +GLQuadPerEdgeAAGeometryProcessor::GLQuadPerEdgeAAGeometryProcessor(int width, int height, AAType aa, + bool hasColor) + : QuadPerEdgeAAGeometryProcessor(width, height, aa, hasColor) { +} + +void GLQuadPerEdgeAAGeometryProcessor::emitCode(EmitArgs& args) const { + auto* vertBuilder = args.vertBuilder; + auto* fragBuilder = args.fragBuilder; + auto* varyingHandler = args.varyingHandler; + auto* uniformHandler = args.uniformHandler; + + varyingHandler->emitAttributes(*this); + + emitTransforms(vertBuilder, varyingHandler, uniformHandler, localCoord.asShaderVar(), + args.fpCoordTransformHandler); + + if (aa == AAType::Coverage) { + auto coverage = varyingHandler->addVarying("coverage", SLType::Float); + vertBuilder->codeAppendf("%s = %s.z;", coverage.vsOut().c_str(), position.name().c_str()); + fragBuilder->codeAppendf("%s = vec4(%s);", args.outputCoverage.c_str(), + coverage.fsIn().c_str()); + } else { + fragBuilder->codeAppendf("%s = vec4(1.0);", args.outputCoverage.c_str()); + } + + if (color.isInitialized()) { + auto colorVar = varyingHandler->addVarying("Color", SLType::Float4); + vertBuilder->codeAppendf("%s = %s;", colorVar.vsOut().c_str(), color.name().c_str()); + fragBuilder->codeAppendf("%s = %s;", args.outputColor.c_str(), colorVar.fsIn().c_str()); + } else { + fragBuilder->codeAppendf("%s = vec4(1.0);", args.outputColor.c_str()); + } + + // Emit the vertex position to the hardware in the normalized window coordinates it expects. + args.vertBuilder->emitNormalizedPosition(position.name()); +} + +void GLQuadPerEdgeAAGeometryProcessor::setData(UniformBuffer* uniformBuffer, + FPCoordTransformIter* transformIter) const { + setTransformDataHelper(Matrix::I(), uniformBuffer, transformIter); +} +} // namespace tgfx diff --git a/src/opengl/processors/GLQuadPerEdgeAAGeometryProcessor.h b/src/opengl/processors/GLQuadPerEdgeAAGeometryProcessor.h new file mode 100644 index 00000000..ff49a6a2 --- /dev/null +++ b/src/opengl/processors/GLQuadPerEdgeAAGeometryProcessor.h @@ -0,0 +1,33 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "gpu/processors/QuadPerEdgeAAGeometryProcessor.h" + +namespace tgfx { +class GLQuadPerEdgeAAGeometryProcessor : public QuadPerEdgeAAGeometryProcessor { + public: + GLQuadPerEdgeAAGeometryProcessor(int width, int height, AAType aa, bool hasColor); + + void emitCode(EmitArgs& args) const override; + + void setData(UniformBuffer* uniformBuffer, FPCoordTransformIter* transformIter) const override; +}; +} // namespace tgfx diff --git a/src/opengl/processors/GLRadialGradientLayout.cpp b/src/opengl/processors/GLRadialGradientLayout.cpp new file mode 100644 index 00000000..de8600d5 --- /dev/null +++ b/src/opengl/processors/GLRadialGradientLayout.cpp @@ -0,0 +1,34 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLRadialGradientLayout.h" + +namespace tgfx { +std::unique_ptr RadialGradientLayout::Make(Matrix matrix) { + return std::unique_ptr(new GLRadialGradientLayout(matrix)); +} + +GLRadialGradientLayout::GLRadialGradientLayout(Matrix matrix) : RadialGradientLayout(matrix) { +} + +void GLRadialGradientLayout::emitCode(EmitArgs& args) const { + auto* fragBuilder = args.fragBuilder; + fragBuilder->codeAppendf("float t = length(%s);", (*args.transformedCoords)[0].name().c_str()); + fragBuilder->codeAppendf("%s = vec4(t, 1.0, 0.0, 0.0);", args.outputColor.c_str()); +} +} // namespace tgfx diff --git a/src/opengl/processors/GLRadialGradientLayout.h b/src/opengl/processors/GLRadialGradientLayout.h new file mode 100644 index 00000000..9dbfe115 --- /dev/null +++ b/src/opengl/processors/GLRadialGradientLayout.h @@ -0,0 +1,30 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/processors/RadialGradientLayout.h" + +namespace tgfx { +class GLRadialGradientLayout : public RadialGradientLayout { + public: + explicit GLRadialGradientLayout(Matrix matrix); + + void emitCode(EmitArgs& args) const override; +}; +} // namespace tgfx diff --git a/src/opengl/processors/GLSeriesFragmentProcessor.cpp b/src/opengl/processors/GLSeriesFragmentProcessor.cpp new file mode 100644 index 00000000..31266dcb --- /dev/null +++ b/src/opengl/processors/GLSeriesFragmentProcessor.cpp @@ -0,0 +1,51 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLSeriesFragmentProcessor.h" + +namespace tgfx { +std::unique_ptr SeriesFragmentProcessor::Make( + std::unique_ptr* children, int count) { + if (!count) { + return nullptr; + } + if (1 == count) { + return std::move(children[0]); + } + return std::unique_ptr(new GLSeriesFragmentProcessor(children, count)); +} + +GLSeriesFragmentProcessor::GLSeriesFragmentProcessor(std::unique_ptr* children, + int count) + : SeriesFragmentProcessor(children, count) { +} + +void GLSeriesFragmentProcessor::emitCode(EmitArgs& args) const { + // First guy's input might be nil. + std::string temp = "out0"; + emitChild(0, args.inputColor, &temp, args); + std::string input = temp; + for (size_t i = 1; i < numChildProcessors() - 1; ++i) { + temp = "out" + std::to_string(i); + emitChild(i, input, &temp, args); + input = temp; + } + // Last guy writes to our output variable. + emitChild(numChildProcessors() - 1, input, args); +} +} // namespace tgfx diff --git a/src/opengl/processors/GLSeriesFragmentProcessor.h b/src/opengl/processors/GLSeriesFragmentProcessor.h new file mode 100644 index 00000000..f9d66220 --- /dev/null +++ b/src/opengl/processors/GLSeriesFragmentProcessor.h @@ -0,0 +1,30 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/processors/SeriesFragmentProcessor.h" + +namespace tgfx { +class GLSeriesFragmentProcessor : public SeriesFragmentProcessor { + public: + GLSeriesFragmentProcessor(std::unique_ptr* children, int count); + + void emitCode(EmitArgs& args) const override; +}; +} // namespace tgfx diff --git a/src/opengl/processors/GLSingleIntervalGradientColorizer.cpp b/src/opengl/processors/GLSingleIntervalGradientColorizer.cpp new file mode 100644 index 00000000..85865b27 --- /dev/null +++ b/src/opengl/processors/GLSingleIntervalGradientColorizer.cpp @@ -0,0 +1,45 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLSingleIntervalGradientColorizer.h" + +namespace tgfx { +std::unique_ptr SingleIntervalGradientColorizer::Make(Color start, + Color end) { + return std::unique_ptr( + new GLSingleIntervalGradientColorizer(start, end)); +} + +GLSingleIntervalGradientColorizer::GLSingleIntervalGradientColorizer(Color start, Color end) + : SingleIntervalGradientColorizer(start, end) { +} + +void GLSingleIntervalGradientColorizer::emitCode(EmitArgs& args) const { + auto* fragBuilder = args.fragBuilder; + auto startName = args.uniformHandler->addUniform(ShaderFlags::Fragment, SLType::Float4, "start"); + auto endName = args.uniformHandler->addUniform(ShaderFlags::Fragment, SLType::Float4, "end"); + fragBuilder->codeAppendf("float t = %s.x;", args.inputColor.c_str()); + fragBuilder->codeAppendf("%s = (1.0 - t) * %s + t * %s;", args.outputColor.c_str(), + startName.c_str(), endName.c_str()); +} + +void GLSingleIntervalGradientColorizer::onSetData(UniformBuffer* uniformBuffer) const { + uniformBuffer->setData("start", start); + uniformBuffer->setData("end", end); +} +} // namespace tgfx diff --git a/src/opengl/processors/GLSingleIntervalGradientColorizer.h b/src/opengl/processors/GLSingleIntervalGradientColorizer.h new file mode 100644 index 00000000..3cdf8ea3 --- /dev/null +++ b/src/opengl/processors/GLSingleIntervalGradientColorizer.h @@ -0,0 +1,35 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "gpu/processors/SingleIntervalGradientColorizer.h" +#include "tgfx/core/Color.h" + +namespace tgfx { +class GLSingleIntervalGradientColorizer : public SingleIntervalGradientColorizer { + public: + GLSingleIntervalGradientColorizer(Color start, Color end); + + void emitCode(EmitArgs& args) const override; + + private: + void onSetData(UniformBuffer*) const override; +}; +} // namespace tgfx diff --git a/src/opengl/processors/GLSweepGradientLayout.cpp b/src/opengl/processors/GLSweepGradientLayout.cpp new file mode 100644 index 00000000..e1d5e1aa --- /dev/null +++ b/src/opengl/processors/GLSweepGradientLayout.cpp @@ -0,0 +1,48 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLSweepGradientLayout.h" + +namespace tgfx { +std::unique_ptr SweepGradientLayout::Make(Matrix matrix, float bias, + float scale) { + return std::unique_ptr(new GLSweepGradientLayout(matrix, bias, scale)); +} + +GLSweepGradientLayout::GLSweepGradientLayout(Matrix matrix, float bias, float scale) + : SweepGradientLayout(matrix, bias, scale) { +} + +void GLSweepGradientLayout::emitCode(EmitArgs& args) const { + auto* fragBuilder = args.fragBuilder; + auto* uniformHandler = args.uniformHandler; + auto biasName = uniformHandler->addUniform(ShaderFlags::Fragment, SLType::Float, "Bias"); + auto scaleName = uniformHandler->addUniform(ShaderFlags::Fragment, SLType::Float, "Scale"); + fragBuilder->codeAppendf("float angle = atan(-%s.y, -%s.x);", + (*args.transformedCoords)[0].name().c_str(), + (*args.transformedCoords)[0].name().c_str()); + fragBuilder->codeAppendf("float t = ((angle * 0.15915494309180001 + 0.5) + %s) * %s;", + biasName.c_str(), scaleName.c_str()); + fragBuilder->codeAppendf("%s = vec4(t, 1.0, 0.0, 0.0);", args.outputColor.c_str()); +} + +void GLSweepGradientLayout::onSetData(UniformBuffer* uniformBuffer) const { + uniformBuffer->setData("Bias", bias); + uniformBuffer->setData("Scale", scale); +} +} // namespace tgfx diff --git a/src/opengl/processors/GLSweepGradientLayout.h b/src/opengl/processors/GLSweepGradientLayout.h new file mode 100644 index 00000000..e028fc66 --- /dev/null +++ b/src/opengl/processors/GLSweepGradientLayout.h @@ -0,0 +1,34 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "gpu/processors/SweepGradientLayout.h" + +namespace tgfx { +class GLSweepGradientLayout : public SweepGradientLayout { + public: + GLSweepGradientLayout(Matrix matrix, float bias, float scale); + + void emitCode(EmitArgs& args) const override; + + private: + void onSetData(UniformBuffer*) const override; +}; +} // namespace tgfx diff --git a/src/opengl/processors/GLTextureEffect.cpp b/src/opengl/processors/GLTextureEffect.cpp new file mode 100644 index 00000000..ac37c673 --- /dev/null +++ b/src/opengl/processors/GLTextureEffect.cpp @@ -0,0 +1,195 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLTextureEffect.h" +#include + +namespace tgfx { +static const float ColorConversion601LimitRange[] = { + 1.164384f, 1.164384f, 1.164384f, 0.0f, -0.391762f, 2.017232f, 1.596027f, -0.812968f, 0.0f, +}; + +static const float ColorConversion601FullRange[] = { + 1.0f, 1.0f, 1.0f, 0.0f, -0.344136f, 1.772f, 1.402f, -0.714136f, 0.0f, +}; + +static const float ColorConversion709LimitRange[] = { + 1.164384f, 1.164384f, 1.164384f, 0.0f, -0.213249f, 2.112402f, 1.792741f, -0.532909f, 0.0f, +}; + +static const float ColorConversion709FullRange[] = { + 1.0f, 1.0f, 1.0f, 0.0f, -0.187324f, 1.8556f, 1.5748f, -0.468124f, 0.0f, +}; + +static const float ColorConversion2020LimitRange[] = { + 1.164384f, 1.164384f, 1.164384f, 0.0f, -0.187326f, 2.141772f, 1.678674f, -0.650424f, 0.0f, +}; + +static const float ColorConversion2020FullRange[] = { + 1.0f, 1.0f, 1.0f, 0.0f, -0.164553f, 1.8814f, 1.4746f, -0.571353f, 0.0f, +}; + +static const float ColorConversionJPEGFullRange[] = { + 1.0f, 1.0f, 1.0f, 0.0f, -0.344136f, 1.772000f, 1.402f, -0.714136f, 0.0f, +}; + +std::unique_ptr TextureEffect::MakeRGBAAA(std::shared_ptr proxy, + const SamplingOptions& sampling, + const Point& alphaStart, + const Matrix* localMatrix) { + if (proxy == nullptr) { + return nullptr; + } + auto matrix = localMatrix ? *localMatrix : Matrix::I(); + return std::make_unique(std::move(proxy), sampling, alphaStart, matrix); +} + +GLTextureEffect::GLTextureEffect(std::shared_ptr proxy, SamplingOptions sampling, + const Point& alphaStart, const Matrix& localMatrix) + : TextureEffect(std::move(proxy), sampling, alphaStart, localMatrix) { +} + +void GLTextureEffect::emitCode(EmitArgs& args) const { + auto texture = getTexture(); + if (texture == nullptr) { + // emit a transparent color as the output color. + auto* fragBuilder = args.fragBuilder; + fragBuilder->codeAppendf("%s = vec4(0.0);", args.outputColor.c_str()); + return; + } + if (texture->isYUV()) { + emitYUVTextureCode(args); + } else { + emitPlainTextureCode(args); + } +} +void GLTextureEffect::emitPlainTextureCode(EmitArgs& args) const { + auto* fragBuilder = args.fragBuilder; + auto* uniformHandler = args.uniformHandler; + auto& textureSampler = (*args.textureSamplers)[0]; + auto vertexColor = (*args.transformedCoords)[0].name(); + if (args.coordFunc) { + vertexColor = args.coordFunc(vertexColor); + } + fragBuilder->codeAppend("vec4 color = "); + fragBuilder->appendTextureLookup(textureSampler, vertexColor); + fragBuilder->codeAppend(";"); + if (alphaStart != Point::Zero()) { + fragBuilder->codeAppend("color = clamp(color, 0.0, 1.0);"); + auto alphaStartName = + uniformHandler->addUniform(ShaderFlags::Fragment, SLType::Float2, "AlphaStart"); + std::string alphaVertexColor = "alphaVertexColor"; + fragBuilder->codeAppendf("vec2 %s = %s + %s;", alphaVertexColor.c_str(), vertexColor.c_str(), + alphaStartName.c_str()); + fragBuilder->codeAppend("vec4 alpha = "); + fragBuilder->appendTextureLookup(textureSampler, alphaVertexColor); + fragBuilder->codeAppend(";"); + fragBuilder->codeAppend("alpha = clamp(alpha, 0.0, 1.0);"); + fragBuilder->codeAppend("color = vec4(color.rgb * alpha.r, alpha.r);"); + } + fragBuilder->codeAppendf("%s = color * %s;", args.outputColor.c_str(), args.inputColor.c_str()); +} + +void GLTextureEffect::emitYUVTextureCode(EmitArgs& args) const { + auto* fragBuilder = args.fragBuilder; + auto* uniformHandler = args.uniformHandler; + auto yuvTexture = getYUVTexture(); + auto& textureSamplers = *args.textureSamplers; + auto vertexColor = (*args.transformedCoords)[0].name(); + fragBuilder->codeAppend("vec3 yuv;"); + fragBuilder->codeAppend("yuv.x = "); + fragBuilder->appendTextureLookup(textureSamplers[0], vertexColor); + fragBuilder->codeAppend(".r;"); + if (yuvTexture->pixelFormat() == YUVPixelFormat::I420) { + fragBuilder->codeAppend("yuv.y = "); + fragBuilder->appendTextureLookup(textureSamplers[1], vertexColor); + fragBuilder->codeAppend(".r;"); + fragBuilder->codeAppend("yuv.z = "); + fragBuilder->appendTextureLookup(textureSamplers[2], vertexColor); + fragBuilder->codeAppend(".r;"); + } else if (yuvTexture->pixelFormat() == YUVPixelFormat::NV12) { + fragBuilder->codeAppend("yuv.yz = "); + fragBuilder->appendTextureLookup(textureSamplers[1], vertexColor); + fragBuilder->codeAppend(".ra;"); + } + if (IsLimitedYUVColorRange(yuvTexture->colorSpace())) { + fragBuilder->codeAppend("yuv.x -= (16.0 / 255.0);"); + } + fragBuilder->codeAppend("yuv.yz -= vec2(0.5, 0.5);"); + auto mat3Name = + uniformHandler->addUniform(ShaderFlags::Fragment, SLType::Float3x3, "Mat3ColorConversion"); + fragBuilder->codeAppendf("vec3 rgb = clamp(%s * yuv, 0.0, 1.0);", mat3Name.c_str()); + if (alphaStart == Point::Zero()) { + fragBuilder->codeAppendf("%s = vec4(rgb, 1.0) * %s;", args.outputColor.c_str(), + args.inputColor.c_str()); + } else { + auto alphaStartName = + uniformHandler->addUniform(ShaderFlags::Fragment, SLType::Float2, "AlphaStart"); + std::string alphaVertexColor = "alphaVertexColor"; + fragBuilder->codeAppendf("vec2 %s = %s + %s;", alphaVertexColor.c_str(), vertexColor.c_str(), + alphaStartName.c_str()); + fragBuilder->codeAppend("float yuv_a = "); + fragBuilder->appendTextureLookup(textureSamplers[0], alphaVertexColor); + fragBuilder->codeAppend(".r;"); + fragBuilder->codeAppend("yuv_a = (yuv_a - 16.0/255.0) / (219.0/255.0 - 1.0/255.0);"); + fragBuilder->codeAppend("yuv_a = clamp(yuv_a, 0.0, 1.0);"); + fragBuilder->codeAppendf("%s = vec4(rgb * yuv_a, yuv_a) * %s;", args.outputColor.c_str(), + args.inputColor.c_str()); + } +} + +void GLTextureEffect::onSetData(UniformBuffer* uniformBuffer) const { + auto texture = getTexture(); + if (texture == nullptr) { + return; + } + if (alphaStart != Point::Zero()) { + auto alphaStartValue = texture->getTextureCoord(alphaStart.x, alphaStart.y); + uniformBuffer->setData("AlphaStart", alphaStartValue); + } + auto yuvTexture = getYUVTexture(); + if (yuvTexture) { + std::string mat3ColorConversion = "Mat3ColorConversion"; + switch (yuvTexture->colorSpace()) { + case YUVColorSpace::BT601_LIMITED: + uniformBuffer->setData(mat3ColorConversion, ColorConversion601LimitRange); + break; + case YUVColorSpace::BT601_FULL: + uniformBuffer->setData(mat3ColorConversion, ColorConversion601FullRange); + break; + case YUVColorSpace::BT709_LIMITED: + uniformBuffer->setData(mat3ColorConversion, ColorConversion709LimitRange); + break; + case YUVColorSpace::BT709_FULL: + uniformBuffer->setData(mat3ColorConversion, ColorConversion709FullRange); + break; + case YUVColorSpace::BT2020_LIMITED: + uniformBuffer->setData(mat3ColorConversion, ColorConversion2020LimitRange); + break; + case YUVColorSpace::BT2020_FULL: + uniformBuffer->setData(mat3ColorConversion, ColorConversion2020FullRange); + break; + case YUVColorSpace::JPEG_FULL: + uniformBuffer->setData(mat3ColorConversion, ColorConversionJPEGFullRange); + break; + default: + break; + } + } +} +} // namespace tgfx diff --git a/src/opengl/processors/GLTextureEffect.h b/src/opengl/processors/GLTextureEffect.h new file mode 100644 index 00000000..526d2a23 --- /dev/null +++ b/src/opengl/processors/GLTextureEffect.h @@ -0,0 +1,37 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "gpu/processors/TextureEffect.h" + +namespace tgfx { +class GLTextureEffect : public TextureEffect { + public: + GLTextureEffect(std::shared_ptr proxy, SamplingOptions sampling, + const Point& alphaStart, const Matrix& localMatrix); + + void emitCode(EmitArgs& args) const override; + + private: + void emitPlainTextureCode(EmitArgs& args) const; + void emitYUVTextureCode(EmitArgs& args) const; + void onSetData(UniformBuffer* uniformBuffer) const override; +}; +} // namespace tgfx diff --git a/src/opengl/processors/GLTextureGradientColorizer.cpp b/src/opengl/processors/GLTextureGradientColorizer.cpp new file mode 100644 index 00000000..514c40a1 --- /dev/null +++ b/src/opengl/processors/GLTextureGradientColorizer.cpp @@ -0,0 +1,39 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLTextureGradientColorizer.h" + +namespace tgfx { +std::unique_ptr TextureGradientColorizer::Make( + std::shared_ptr gradient) { + return std::unique_ptr( + new GLTextureGradientColorizer(std::move(gradient))); +} + +GLTextureGradientColorizer::GLTextureGradientColorizer(std::shared_ptr gradient) + : TextureGradientColorizer(std::move(gradient)) { +} + +void GLTextureGradientColorizer::emitCode(EmitArgs& args) const { + auto* fragBuilder = args.fragBuilder; + fragBuilder->codeAppendf("vec2 coord = vec2(%s.x, 0.5);", args.inputColor.c_str()); + fragBuilder->codeAppendf("%s = ", args.outputColor.c_str()); + fragBuilder->appendTextureLookup((*args.textureSamplers)[0], "coord"); + fragBuilder->codeAppend(";"); +} +} // namespace tgfx diff --git a/src/opengl/processors/GLTextureGradientColorizer.h b/src/opengl/processors/GLTextureGradientColorizer.h new file mode 100644 index 00000000..356a854e --- /dev/null +++ b/src/opengl/processors/GLTextureGradientColorizer.h @@ -0,0 +1,30 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/processors/TextureGradientColorizer.h" + +namespace tgfx { +class GLTextureGradientColorizer : public TextureGradientColorizer { + public: + explicit GLTextureGradientColorizer(std::shared_ptr gradient); + + void emitCode(EmitArgs& args) const override; +}; +} // namespace tgfx diff --git a/src/opengl/processors/GLTiledTextureEffect.cpp b/src/opengl/processors/GLTiledTextureEffect.cpp new file mode 100644 index 00000000..193cf1cb --- /dev/null +++ b/src/opengl/processors/GLTiledTextureEffect.cpp @@ -0,0 +1,431 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLTiledTextureEffect.h" +#include "gpu/SamplerState.h" +#include "gpu/processors/TextureEffect.h" + +namespace tgfx { +std::unique_ptr TiledTextureEffect::Make(std::shared_ptr proxy, + TileMode tileModeX, TileMode tileModeY, + const SamplingOptions& options, + const Matrix* localMatrix) { + if (proxy == nullptr) { + return nullptr; + } + if (tileModeX == TileMode::Clamp && tileModeY == TileMode::Clamp) { + return TextureEffect::Make(std::move(proxy), options, localMatrix); + } + auto matrix = localMatrix ? *localMatrix : Matrix::I(); + SamplerState samplerState(tileModeX, tileModeY, options); + return std::make_unique(std::move(proxy), samplerState, matrix); +} + +GLTiledTextureEffect::GLTiledTextureEffect(std::shared_ptr proxy, + const SamplerState& samplerState, + const Matrix& localMatrix) + : TiledTextureEffect(std::move(proxy), samplerState, localMatrix) { +} + +bool GLTiledTextureEffect::ShaderModeRequiresUnormCoord(TiledTextureEffect::ShaderMode mode) { + switch (mode) { + case TiledTextureEffect::ShaderMode::None: + case TiledTextureEffect::ShaderMode::Clamp: + case TiledTextureEffect::ShaderMode::RepeatNearestNone: + case TiledTextureEffect::ShaderMode::MirrorRepeat: + return false; + case TiledTextureEffect::ShaderMode::RepeatLinearNone: + case TiledTextureEffect::ShaderMode::RepeatNearestMipmap: + case TiledTextureEffect::ShaderMode::RepeatLinearMipmap: + case TiledTextureEffect::ShaderMode::ClampToBorderNearest: + case TiledTextureEffect::ShaderMode::ClampToBorderLinear: + return true; + } +} + +bool GLTiledTextureEffect::ShaderModeUsesSubset(TiledTextureEffect::ShaderMode m) { + switch (m) { + case TiledTextureEffect::ShaderMode::None: + case TiledTextureEffect::ShaderMode::Clamp: + return false; + case TiledTextureEffect::ShaderMode::RepeatNearestNone: + case TiledTextureEffect::ShaderMode::RepeatLinearNone: + case TiledTextureEffect::ShaderMode::RepeatNearestMipmap: + case TiledTextureEffect::ShaderMode::RepeatLinearMipmap: + case TiledTextureEffect::ShaderMode::MirrorRepeat: + case TiledTextureEffect::ShaderMode::ClampToBorderNearest: + case TiledTextureEffect::ShaderMode::ClampToBorderLinear: + return true; + } +} + +bool GLTiledTextureEffect::ShaderModeUsesClamp(TiledTextureEffect::ShaderMode m) { + switch (m) { + case TiledTextureEffect::ShaderMode::None: + case TiledTextureEffect::ShaderMode::ClampToBorderNearest: + return false; + case TiledTextureEffect::ShaderMode::Clamp: + case TiledTextureEffect::ShaderMode::RepeatNearestNone: + case TiledTextureEffect::ShaderMode::RepeatLinearNone: + case TiledTextureEffect::ShaderMode::RepeatNearestMipmap: + case TiledTextureEffect::ShaderMode::RepeatLinearMipmap: + case TiledTextureEffect::ShaderMode::MirrorRepeat: + case TiledTextureEffect::ShaderMode::ClampToBorderLinear: + return true; + } +} + +void GLTiledTextureEffect::readColor(EmitArgs& args, const std::string& dimensionsName, + const std::string& coord, const char* out) const { + std::string normCoord; + if (!dimensionsName.empty()) { + normCoord = "(" + coord + ") * " + dimensionsName + ""; + } else { + normCoord = coord; + } + auto* fragBuilder = args.fragBuilder; + fragBuilder->codeAppendf("vec4 %s = ", out); + fragBuilder->appendTextureLookup((*args.textureSamplers)[0], normCoord); + fragBuilder->codeAppend(";"); +} + +void GLTiledTextureEffect::subsetCoord(EmitArgs& args, TiledTextureEffect::ShaderMode mode, + const std::string& subsetName, const char* coordSwizzle, + const char* subsetStartSwizzle, + const char* subsetStopSwizzle, const char* extraCoord, + const char* coordWeight) const { + auto* fragBuilder = args.fragBuilder; + switch (mode) { + case TiledTextureEffect::ShaderMode::None: + case TiledTextureEffect::ShaderMode::ClampToBorderNearest: + case TiledTextureEffect::ShaderMode::ClampToBorderLinear: + case TiledTextureEffect::ShaderMode::Clamp: + fragBuilder->codeAppendf("subsetCoord.%s = inCoord.%s;", coordSwizzle, coordSwizzle); + break; + case TiledTextureEffect::ShaderMode::RepeatNearestNone: + case TiledTextureEffect::ShaderMode::RepeatLinearNone: + fragBuilder->codeAppendf("subsetCoord.%s = mod(inCoord.%s - %s.%s, %s.%s - %s.%s) + %s.%s;", + coordSwizzle, coordSwizzle, subsetName.c_str(), subsetStartSwizzle, + subsetName.c_str(), subsetStopSwizzle, subsetName.c_str(), + subsetStartSwizzle, subsetName.c_str(), subsetStartSwizzle); + break; + case TiledTextureEffect::ShaderMode::RepeatNearestMipmap: + case TiledTextureEffect::ShaderMode::RepeatLinearMipmap: + fragBuilder->codeAppend("{"); + fragBuilder->codeAppendf("float w = %s.%s - %s.%s;", subsetName.c_str(), subsetStopSwizzle, + subsetName.c_str(), subsetStartSwizzle); + fragBuilder->codeAppendf("float w2 = 2.0 * w;"); + fragBuilder->codeAppendf("float d = inCoord.%s - %s.%s;", coordSwizzle, subsetName.c_str(), + subsetStartSwizzle); + fragBuilder->codeAppend("float m = mod(d, w2);"); + fragBuilder->codeAppend("float o = mix(m, w2 - m, step(w, m));"); + fragBuilder->codeAppendf("subsetCoord.%s = o + %s.%s;", coordSwizzle, subsetName.c_str(), + subsetStartSwizzle); + fragBuilder->codeAppendf("%s = w - o + %s.%s;", extraCoord, subsetName.c_str(), + subsetStartSwizzle); + // coordWeight is used as the third param of mix() to blend between a sample taken using + // subsetCoord and a sample at extraCoord. + fragBuilder->codeAppend("float hw = w / 2.0;"); + fragBuilder->codeAppend("float n = mod(d - hw, w2);"); + fragBuilder->codeAppendf("%s = clamp(mix(n, w2 - n, step(w, n)) - hw + 0.5, 0.0, 1.0);", + coordWeight); + fragBuilder->codeAppend("}"); + break; + case TiledTextureEffect::ShaderMode::MirrorRepeat: + fragBuilder->codeAppend("{"); + fragBuilder->codeAppendf("float w = %s.%s - %s.%s;", subsetName.c_str(), subsetStopSwizzle, + subsetName.c_str(), subsetStartSwizzle); + fragBuilder->codeAppendf("float w2 = 2.0 * w;"); + fragBuilder->codeAppendf("float m = mod(inCoord.%s - %s.%s, w2);", coordSwizzle, + subsetName.c_str(), subsetStartSwizzle); + fragBuilder->codeAppendf("subsetCoord.%s = mix(m, w2 - m, step(w, m)) + %s.%s;", coordSwizzle, + subsetName.c_str(), subsetStartSwizzle); + fragBuilder->codeAppend("}"); + break; + } +} + +void GLTiledTextureEffect::clampCoord(EmitArgs& args, bool clamp, const std::string& clampName, + const char* coordSwizzle, const char* clampStartSwizzle, + const char* clampStopSwizzle) const { + auto* fragBuilder = args.fragBuilder; + if (clamp) { + fragBuilder->codeAppendf("clampedCoord%s = clamp(subsetCoord%s, %s%s, %s%s);", coordSwizzle, + coordSwizzle, clampName.c_str(), clampStartSwizzle, clampName.c_str(), + clampStopSwizzle); + } else { + fragBuilder->codeAppendf("clampedCoord%s = subsetCoord%s;", coordSwizzle, coordSwizzle); + } +} + +void GLTiledTextureEffect::clampCoord(EmitArgs& args, const bool useClamp[2], + const std::string& clampName) const { + if (useClamp[0] == useClamp[1]) { + clampCoord(args, useClamp[0], clampName, "", ".xy", ".zw"); + } else { + clampCoord(args, useClamp[0], clampName, ".x", ".x", ".z"); + clampCoord(args, useClamp[1], clampName, ".y", ".y", ".w"); + } +} + +GLTiledTextureEffect::UniformNames GLTiledTextureEffect::initUniform(EmitArgs& args, + const Texture* texture, + const Sampling& sampling, + const bool useClamp[2]) const { + UniformNames names = {}; + auto* uniformHandler = args.uniformHandler; + if (ShaderModeUsesSubset(sampling.shaderModeX) || ShaderModeUsesSubset(sampling.shaderModeY)) { + names.subsetName = uniformHandler->addUniform(ShaderFlags::Fragment, SLType::Float4, "Subset"); + } + if (useClamp[0] || useClamp[1]) { + names.clampName = uniformHandler->addUniform(ShaderFlags::Fragment, SLType::Float4, "Clamp"); + } + bool unormCoordsRequiredForShaderMode = ShaderModeRequiresUnormCoord(sampling.shaderModeX) || + ShaderModeRequiresUnormCoord(sampling.shaderModeY); + bool sampleCoordsMustBeNormalized = texture->getSampler()->type() != TextureType::Rectangle; + if (unormCoordsRequiredForShaderMode && sampleCoordsMustBeNormalized) { + names.dimensionsName = + uniformHandler->addUniform(ShaderFlags::Fragment, SLType::Float2, "Dimension"); + } + return names; +} + +void GLTiledTextureEffect::emitCode(EmitArgs& args) const { + auto* fragBuilder = args.fragBuilder; + auto texture = getTexture(); + if (texture == nullptr) { + // emit a transparent color as the output color. + fragBuilder->codeAppendf("%s = vec4(0.0);", args.outputColor.c_str()); + return; + } + auto vertexColor = (*args.transformedCoords)[0].name(); + if (args.coordFunc) { + vertexColor = args.coordFunc(vertexColor); + } + auto subset = Rect::MakeWH(texture->width(), texture->height()); + Sampling sampling(texture, samplerState, subset); + if (sampling.shaderModeX == TiledTextureEffect::ShaderMode::None && + sampling.shaderModeY == TiledTextureEffect::ShaderMode::None) { + fragBuilder->codeAppendf("%s = ", args.outputColor.c_str()); + fragBuilder->appendTextureLookup((*args.textureSamplers)[0], vertexColor); + fragBuilder->codeAppendf(" * %s;", args.inputColor.c_str()); + } else { + fragBuilder->codeAppendf("vec2 inCoord = %s;", vertexColor.c_str()); + bool useClamp[2] = {ShaderModeUsesClamp(sampling.shaderModeX), + ShaderModeUsesClamp(sampling.shaderModeY)}; + auto names = initUniform(args, texture, sampling, useClamp); + if (!names.dimensionsName.empty()) { + fragBuilder->codeAppendf("inCoord /= %s;", names.dimensionsName.c_str()); + } + + const char* extraRepeatCoordX = nullptr; + const char* repeatCoordWeightX = nullptr; + const char* extraRepeatCoordY = nullptr; + const char* repeatCoordWeightY = nullptr; + + bool mipMapRepeatX = + sampling.shaderModeX == TiledTextureEffect::ShaderMode::RepeatNearestMipmap || + sampling.shaderModeX == TiledTextureEffect::ShaderMode::RepeatLinearMipmap; + bool mipMapRepeatY = + sampling.shaderModeY == TiledTextureEffect::ShaderMode::RepeatNearestMipmap || + sampling.shaderModeY == TiledTextureEffect::ShaderMode::RepeatLinearMipmap; + + if (mipMapRepeatX || mipMapRepeatY) { + fragBuilder->codeAppend("vec2 extraRepeatCoord;"); + } + if (mipMapRepeatX) { + fragBuilder->codeAppend("float repeatCoordWeightX;"); + extraRepeatCoordX = "extraRepeatCoord.x"; + repeatCoordWeightX = "repeatCoordWeightX"; + } + if (mipMapRepeatY) { + fragBuilder->codeAppend("float repeatCoordWeightY;"); + extraRepeatCoordY = "extraRepeatCoord.y"; + repeatCoordWeightY = "repeatCoordWeightY"; + } + + fragBuilder->codeAppend("highp vec2 subsetCoord;"); + subsetCoord(args, sampling.shaderModeX, names.subsetName, "x", "x", "z", extraRepeatCoordX, + repeatCoordWeightX); + subsetCoord(args, sampling.shaderModeY, names.subsetName, "y", "y", "w", extraRepeatCoordY, + repeatCoordWeightY); + + fragBuilder->codeAppend("vec2 clampedCoord;"); + clampCoord(args, useClamp, names.clampName); + + if (mipMapRepeatX && mipMapRepeatY) { + fragBuilder->codeAppendf("extraRepeatCoord = clamp(extraRepeatCoord, %s.xy, %s.zw);", + names.clampName.c_str(), names.clampName.c_str()); + } else if (mipMapRepeatX) { + fragBuilder->codeAppendf("extraRepeatCoord.x = clamp(extraRepeatCoord.x, %s.x, %s.z);", + names.clampName.c_str(), names.clampName.c_str()); + } else if (mipMapRepeatY) { + fragBuilder->codeAppendf("extraRepeatCoord.y = clamp(extraRepeatCoord.y, %s.y, %s.w);", + names.clampName.c_str(), names.clampName.c_str()); + } + + if (mipMapRepeatX && mipMapRepeatY) { + const char* textureColor1 = "textureColor1"; + readColor(args, names.dimensionsName, "clampedCoord", textureColor1); + const char* textureColor2 = "textureColor2"; + readColor(args, names.dimensionsName, "vec2(extraRepeatCoord.x, clampedCoord.y)", + textureColor2); + const char* textureColor3 = "textureColor3"; + readColor(args, names.dimensionsName, "vec2(clampedCoord.x, extraRepeatCoord.y)", + textureColor3); + const char* textureColor4 = "textureColor4"; + readColor(args, names.dimensionsName, "vec2(extraRepeatCoord.x, extraRepeatCoord.y)", + textureColor4); + fragBuilder->codeAppendf( + "vec4 textureColor = mix(mix(%s, %s, repeatCoordWeightX), mix(%s, %s, " + "repeatCoordWeightX), repeatCoordWeightY);", + textureColor1, textureColor2, textureColor3, textureColor4); + } else if (mipMapRepeatX) { + const char* textureColor1 = "textureColor1"; + readColor(args, names.dimensionsName, "clampedCoord", textureColor1); + const char* textureColor2 = "textureColor2"; + readColor(args, names.dimensionsName, "vec2(extraRepeatCoord.x, clampedCoord.y)", + textureColor2); + fragBuilder->codeAppendf("vec4 textureColor = mix(%s, %s, repeatCoordWeightX);", + textureColor1, textureColor2); + } else if (mipMapRepeatY) { + const char* textureColor1 = "textureColor1"; + readColor(args, names.dimensionsName, "clampedCoord", textureColor1); + const char* textureColor2 = "textureColor2"; + readColor(args, names.dimensionsName, "vec2(clampedCoord.x, extraRepeatCoord.y)", + textureColor2); + fragBuilder->codeAppendf("vec4 textureColor = mix(%s, %s, repeatCoordWeightY);", + textureColor1, textureColor2); + } else { + readColor(args, names.dimensionsName, "clampedCoord", "textureColor"); + } + + static const char* repeatReadX = "repeatReadX"; + static const char* repeatReadY = "repeatReadY"; + bool repeatX = sampling.shaderModeX == TiledTextureEffect::ShaderMode::RepeatLinearNone || + sampling.shaderModeX == TiledTextureEffect::ShaderMode::RepeatLinearMipmap; + bool repeatY = sampling.shaderModeY == TiledTextureEffect::ShaderMode::RepeatLinearNone || + sampling.shaderModeY == TiledTextureEffect::ShaderMode::RepeatLinearMipmap; + if (repeatX || sampling.shaderModeX == TiledTextureEffect::ShaderMode::ClampToBorderLinear) { + fragBuilder->codeAppend("float errX = subsetCoord.x - clampedCoord.x;"); + if (repeatX) { + fragBuilder->codeAppendf("float repeatCoordX = errX > 0.0 ? %s.x : %s.z;", + names.clampName.c_str(), names.clampName.c_str()); + } + } + if (repeatY || sampling.shaderModeY == TiledTextureEffect::ShaderMode::ClampToBorderLinear) { + fragBuilder->codeAppend("float errY = subsetCoord.y - clampedCoord.y;"); + if (repeatY) { + fragBuilder->codeAppendf("float repeatCoordY = errY > 0.0 ? %s.y : %s.w;", + names.clampName.c_str(), names.clampName.c_str()); + } + } + + const char* ifStr = "if"; + if (repeatX && repeatY) { + readColor(args, names.dimensionsName, "vec2(repeatCoordX, clampedCoord.y)", repeatReadX); + readColor(args, names.dimensionsName, "vec2(clampedCoord.x, repeatCoordY)", repeatReadY); + static const char* repeatReadXY = "repeatReadXY"; + readColor(args, names.dimensionsName, "vec2(repeatCoordX, repeatCoordY)", repeatReadXY); + fragBuilder->codeAppend("if (errX != 0.0 && errY != 0.0) {"); + fragBuilder->codeAppend("errX = abs(errX);"); + fragBuilder->codeAppendf( + "textureColor = mix(mix(textureColor, %s, errX), mix(%s, %s, errX), abs(errY));", + repeatReadX, repeatReadY, repeatReadXY); + fragBuilder->codeAppend("}"); + ifStr = "else if"; + } + if (repeatX) { + fragBuilder->codeAppendf("%s (errX != 0.0) {", ifStr); + readColor(args, names.dimensionsName, "vec2(repeatCoordX, clampedCoord.y)", repeatReadX); + fragBuilder->codeAppendf("textureColor = mix(textureColor, %s, errX);", repeatReadX); + fragBuilder->codeAppend("}"); + } + if (repeatY) { + fragBuilder->codeAppendf("%s (errY != 0.0) {", ifStr); + readColor(args, names.dimensionsName, "vec2(clampedCoord.x, repeatCoordY)", repeatReadY); + fragBuilder->codeAppendf("textureColor = mix(textureColor, %s, errY);", repeatReadY); + fragBuilder->codeAppend("}"); + } + + if (sampling.shaderModeX == TiledTextureEffect::ShaderMode::ClampToBorderLinear) { + fragBuilder->codeAppend("textureColor = mix(textureColor, vec4(0.0), min(abs(errX), 1.0));"); + } + if (sampling.shaderModeY == TiledTextureEffect::ShaderMode::ClampToBorderLinear) { + fragBuilder->codeAppendf("textureColor = mix(textureColor, vec4(0.0), min(abs(errY), 1.0));"); + } + if (sampling.shaderModeX == TiledTextureEffect::ShaderMode::ClampToBorderNearest) { + fragBuilder->codeAppend("float snappedX = floor(inCoord.x + 0.001) + 0.5;"); + fragBuilder->codeAppendf("if (snappedX < %s.x || snappedX > %s.z) {", + names.subsetName.c_str(), names.subsetName.c_str()); + fragBuilder->codeAppend("textureColor = vec4(0.0);"); // border color + fragBuilder->codeAppend("}"); + } + if (sampling.shaderModeY == TiledTextureEffect::ShaderMode::ClampToBorderNearest) { + fragBuilder->codeAppend("float snappedY = floor(inCoord.y + 0.001) + 0.5;"); + fragBuilder->codeAppendf("if (snappedY < %s.y || snappedY > %s.w) {", + names.subsetName.c_str(), names.subsetName.c_str()); + fragBuilder->codeAppend("textureColor = vec4(0.0);"); // border color + fragBuilder->codeAppend("}"); + } + fragBuilder->codeAppendf("%s = textureColor * %s;", args.outputColor.c_str(), + args.inputColor.c_str()); + } +} + +void GLTiledTextureEffect::onSetData(UniformBuffer* uniformBuffer) const { + auto texture = getTexture(); + if (texture == nullptr) { + return; + } + auto subset = Rect::MakeWH(texture->width(), texture->height()); + Sampling sampling(texture, samplerState, subset); + auto hasDimensionUniform = (ShaderModeRequiresUnormCoord(sampling.shaderModeX) || + ShaderModeRequiresUnormCoord(sampling.shaderModeY)) && + texture->getSampler()->type() != TextureType::Rectangle; + if (hasDimensionUniform) { + auto dimensions = texture->getTextureCoord(1.f, 1.f); + uniformBuffer->setData("Dimension", dimensions); + } + auto pushRect = [&](Rect subset, const std::string& uni) { + float rect[4] = {subset.left, subset.top, subset.right, subset.bottom}; + if (texture->origin() == ImageOrigin::BottomLeft) { + auto h = static_cast(texture->height()); + rect[1] = h - rect[1]; + rect[3] = h - rect[3]; + std::swap(rect[1], rect[3]); + } + auto type = texture->getSampler()->type(); + if (!hasDimensionUniform && type != TextureType::Rectangle) { + auto lt = texture->getTextureCoord(static_cast(rect[0]), static_cast(rect[1])); + auto rb = texture->getTextureCoord(static_cast(rect[2]), static_cast(rect[3])); + rect[0] = lt.x; + rect[1] = lt.y; + rect[2] = rb.x; + rect[3] = rb.y; + } + uniformBuffer->setData(uni, rect); + }; + if (ShaderModeUsesSubset(sampling.shaderModeX) || ShaderModeUsesSubset(sampling.shaderModeY)) { + pushRect(sampling.shaderSubset, "Subset"); + } + if (ShaderModeUsesClamp(sampling.shaderModeX) || ShaderModeUsesClamp(sampling.shaderModeY)) { + pushRect(sampling.shaderClamp, "Clamp"); + } +} +} // namespace tgfx diff --git a/src/opengl/processors/GLTiledTextureEffect.h b/src/opengl/processors/GLTiledTextureEffect.h new file mode 100644 index 00000000..fc9914b0 --- /dev/null +++ b/src/opengl/processors/GLTiledTextureEffect.h @@ -0,0 +1,64 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "gpu/processors/TiledTextureEffect.h" + +namespace tgfx { +class GLTiledTextureEffect : public TiledTextureEffect { + public: + GLTiledTextureEffect(std::shared_ptr proxy, const SamplerState& samplerState, + const Matrix& localMatrix); + + void emitCode(EmitArgs& args) const override; + + private: + struct UniformNames { + std::string subsetName; + std::string clampName; + std::string dimensionsName; + }; + + void onSetData(UniformBuffer* uniformBuffer) const override; + + static bool ShaderModeRequiresUnormCoord(TiledTextureEffect::ShaderMode m); + + static bool ShaderModeUsesSubset(TiledTextureEffect::ShaderMode m); + + static bool ShaderModeUsesClamp(TiledTextureEffect::ShaderMode m); + + void readColor(EmitArgs& args, const std::string& dimensionsName, const std::string& coord, + const char* out) const; + + void subsetCoord(EmitArgs& args, TiledTextureEffect::ShaderMode mode, + const std::string& subsetName, const char* coordSwizzle, + const char* subsetStartSwizzle, const char* subsetStopSwizzle, + const char* extraCoord, const char* coordWeight) const; + + void clampCoord(EmitArgs& args, bool clamp, const std::string& clampName, + const char* coordSwizzle, const char* clampStartSwizzle, + const char* clampStopSwizzle) const; + + void clampCoord(EmitArgs& args, const bool useClamp[2], const std::string& clampName) const; + + UniformNames initUniform(EmitArgs& args, const Texture* texture, const Sampling& sampling, + const bool useClamp[2]) const; +}; +} // namespace tgfx diff --git a/src/opengl/processors/GLUnrolledBinaryGradientColorizer.cpp b/src/opengl/processors/GLUnrolledBinaryGradientColorizer.cpp new file mode 100644 index 00000000..a59ea5bb --- /dev/null +++ b/src/opengl/processors/GLUnrolledBinaryGradientColorizer.cpp @@ -0,0 +1,292 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLUnrolledBinaryGradientColorizer.h" +#include "utils/MathExtra.h" + +namespace tgfx { +struct UnrolledBinaryUniformName { + std::string scale0_1; + std::string scale2_3; + std::string scale4_5; + std::string scale6_7; + std::string scale8_9; + std::string scale10_11; + std::string scale12_13; + std::string scale14_15; + std::string bias0_1; + std::string bias2_3; + std::string bias4_5; + std::string bias6_7; + std::string bias8_9; + std::string bias10_11; + std::string bias12_13; + std::string bias14_15; + std::string thresholds1_7; + std::string thresholds9_13; +}; + +static constexpr int kMaxIntervals = 8; + +std::unique_ptr UnrolledBinaryGradientColorizer::Make( + const Color* colors, const float* positions, int count) { + // Depending on how the positions resolve into hard stops or regular stops, the number of + // intervals specified by the number of colors/positions can change. For instance, a plain + // 3 color gradient is two intervals, but a 4 color gradient with a hard stop is also + // two intervals. At the most extreme end, an 8 interval gradient made entirely of hard + // stops has 16 colors. + + if (count > kMaxColorCount) { + // Definitely cannot represent this gradient configuration + return nullptr; + } + + // The raster implementation also uses scales and biases, but since they must be calculated + // after the dst color space is applied, it limits our ability to cache their values. + Color scales[kMaxIntervals]; + Color biases[kMaxIntervals]; + float thresholds[kMaxIntervals]; + + int intervalCount = 0; + + for (int i = 0; i < count - 1; i++) { + if (intervalCount >= kMaxIntervals) { + // Already reached kMaxIntervals, and haven't run out of color stops so this + // gradient cannot be represented by this shader. + return nullptr; + } + + auto t0 = positions[i]; + auto t1 = positions[i + 1]; + auto dt = t1 - t0; + // If the interval is empty, skip to the next interval. This will automatically create + // distinct hard stop intervals as needed. It also protects against malformed gradients + // that have repeated hard stops at the very beginning that are effectively unreachable. + if (FloatNearlyZero(dt)) { + continue; + } + + for (int j = 0; j < 4; ++j) { + auto c0 = colors[i][j]; + auto c1 = colors[i + 1][j]; + auto scale = (c1 - c0) / dt; + auto bias = c0 - t0 * scale; + scales[intervalCount][j] = scale; + biases[intervalCount][j] = bias; + } + thresholds[intervalCount] = t1; + intervalCount++; + } + + // set the unused values to something consistent + for (int i = intervalCount; i < kMaxIntervals; i++) { + scales[i] = Color::Transparent(); + biases[i] = Color::Transparent(); + thresholds[i] = 0.0; + } + + return std::unique_ptr(new GLUnrolledBinaryGradientColorizer( + intervalCount, scales, biases, + Rect::MakeLTRB(thresholds[0], thresholds[1], thresholds[2], thresholds[3]), + Rect::MakeLTRB(thresholds[4], thresholds[5], thresholds[6], 0.0))); +} + +GLUnrolledBinaryGradientColorizer::GLUnrolledBinaryGradientColorizer(int intervalCount, + Color* scales, Color* biases, + Rect thresholds1_7, + Rect thresholds9_13) + : UnrolledBinaryGradientColorizer(intervalCount, scales, biases, thresholds1_7, + thresholds9_13) { +} + +std::string AddUniform(UniformHandler* uniformHandler, const std::string& name, int intervalCount, + int limit) { + if (intervalCount > limit) { + return uniformHandler->addUniform(ShaderFlags::Fragment, SLType::Float4, name); + } + return ""; +} + +void AppendCode1(FragmentShaderBuilder* fragBuilder, int intervalCount, + const UnrolledBinaryUniformName& name) { + if (intervalCount >= 2) { + fragBuilder->codeAppend("// thresholds1_7.y is mid-point for intervals (0,3) and (4,7)\n"); + fragBuilder->codeAppendf("if (t < %s.y) {", name.thresholds1_7.c_str()); + } + fragBuilder->codeAppend("// thresholds1_7.x is mid-point for intervals (0,1) and (2,3)\n"); + fragBuilder->codeAppendf("if (t < %s.x) {", name.thresholds1_7.c_str()); + fragBuilder->codeAppendf("scale = %s;", name.scale0_1.c_str()); + fragBuilder->codeAppendf("bias = %s;", name.bias0_1.c_str()); + if (intervalCount > 1) { + fragBuilder->codeAppend("} else {"); + fragBuilder->codeAppendf("scale = %s;", + name.scale2_3.empty() ? "vec4(0.0)" : name.scale2_3.c_str()); + fragBuilder->codeAppendf("bias = %s;", + name.bias2_3.empty() ? "vec4(0.0)" : name.bias2_3.c_str()); + } + fragBuilder->codeAppend("}"); + if (intervalCount > 2) { + fragBuilder->codeAppend("} else {"); + } + if (intervalCount >= 3) { + fragBuilder->codeAppend("// thresholds1_7.z is mid-point for intervals (4,5) and (6,7)\n"); + fragBuilder->codeAppendf("if (t < %s.z) {", name.thresholds1_7.c_str()); + fragBuilder->codeAppendf("scale = %s;", + name.scale4_5.empty() ? "vec4(0.0)" : name.scale4_5.c_str()); + fragBuilder->codeAppendf("bias = %s;", + name.bias4_5.empty() ? "vec4(0.0)" : name.bias4_5.c_str()); + } + if (intervalCount > 3) { + fragBuilder->codeAppend("} else {"); + fragBuilder->codeAppendf("scale = %s;", + name.scale6_7.empty() ? "vec4(0.0)" : name.scale6_7.c_str()); + fragBuilder->codeAppendf("bias = %s;", + name.bias6_7.empty() ? "vec4(0.0)" : name.bias6_7.c_str()); + } + if (intervalCount >= 3) { + fragBuilder->codeAppend("}"); + } + if (intervalCount >= 2) { + fragBuilder->codeAppend("}"); + } +} + +void AppendCode2(FragmentShaderBuilder* fragBuilder, int intervalCount, + const UnrolledBinaryUniformName& name) { + if (intervalCount >= 6) { + fragBuilder->codeAppend("// thresholds9_13.y is mid-point for intervals (8,11) and (12,15)\n"); + fragBuilder->codeAppendf("if (t < %s.y) {", name.thresholds9_13.c_str()); + } + if (intervalCount >= 5) { + fragBuilder->codeAppend("// thresholds9_13.x is mid-point for intervals (8,9) and (10,11)\n"); + fragBuilder->codeAppendf("if (t < %s.x) {", name.thresholds9_13.c_str()); + fragBuilder->codeAppendf("scale = %s;", + name.scale8_9.empty() ? "vec4(0.0)" : name.scale8_9.c_str()); + fragBuilder->codeAppendf("bias = %s;", + name.bias8_9.empty() ? "vec4(0.0)" : name.bias8_9.c_str()); + } + if (intervalCount > 5) { + fragBuilder->codeAppend("} else {"); + fragBuilder->codeAppendf("scale = %s;", + name.scale10_11.empty() ? "vec4(0.0)" : name.scale10_11.c_str()); + fragBuilder->codeAppendf("bias = %s;", + name.bias10_11.empty() ? "vec4(0.0)" : name.bias10_11.c_str()); + } + + if (intervalCount >= 5) { + fragBuilder->codeAppend("}"); + } + if (intervalCount > 6) { + fragBuilder->codeAppend("} else {"); + } + if (intervalCount >= 7) { + fragBuilder->codeAppend("// thresholds9_13.z is mid-point for intervals (12,13) and (14,15)\n"); + fragBuilder->codeAppendf("if (t < %s.z) {", name.thresholds9_13.c_str()); + fragBuilder->codeAppendf("scale = %s;", + name.scale12_13.empty() ? "vec4(0.0)" : name.scale12_13.c_str()); + fragBuilder->codeAppendf("bias = %s;", + name.bias12_13.empty() ? "vec4(0.0)" : name.bias12_13.c_str()); + } + if (intervalCount > 7) { + fragBuilder->codeAppend("} else {"); + fragBuilder->codeAppendf("scale = %s;", + name.scale14_15.empty() ? "vec4(0.0)" : name.scale14_15.c_str()); + fragBuilder->codeAppendf("bias = %s;", + name.bias14_15.empty() ? "vec4(0.0)" : name.bias14_15.c_str()); + } + if (intervalCount >= 7) { + fragBuilder->codeAppend("}"); + } + if (intervalCount >= 6) { + fragBuilder->codeAppend("}"); + } +} + +void GLUnrolledBinaryGradientColorizer::emitCode(EmitArgs& args) const { + auto* fragBuilder = args.fragBuilder; + auto* uniformHandler = args.uniformHandler; + UnrolledBinaryUniformName uniformNames = {}; + uniformNames.scale0_1 = AddUniform(uniformHandler, "scale0_1", intervalCount, 0); + uniformNames.scale2_3 = AddUniform(uniformHandler, "scale2_3", intervalCount, 1); + uniformNames.scale4_5 = AddUniform(uniformHandler, "scale4_5", intervalCount, 2); + uniformNames.scale6_7 = AddUniform(uniformHandler, "scale6_7", intervalCount, 3); + uniformNames.scale8_9 = AddUniform(uniformHandler, "scale8_9", intervalCount, 4); + uniformNames.scale10_11 = AddUniform(uniformHandler, "scale10_11", intervalCount, 5); + uniformNames.scale12_13 = AddUniform(uniformHandler, "scale12_13", intervalCount, 6); + uniformNames.scale14_15 = AddUniform(uniformHandler, "scale14_15", intervalCount, 7); + uniformNames.bias0_1 = AddUniform(uniformHandler, "bias0_1", intervalCount, 0); + uniformNames.bias2_3 = AddUniform(uniformHandler, "bias2_3", intervalCount, 1); + uniformNames.bias4_5 = AddUniform(uniformHandler, "bias4_5", intervalCount, 2); + uniformNames.bias6_7 = AddUniform(uniformHandler, "bias6_7", intervalCount, 3); + uniformNames.bias8_9 = AddUniform(uniformHandler, "bias8_9", intervalCount, 4); + uniformNames.bias10_11 = AddUniform(uniformHandler, "bias10_11", intervalCount, 5); + uniformNames.bias12_13 = AddUniform(uniformHandler, "bias12_13", intervalCount, 6); + uniformNames.bias14_15 = AddUniform(uniformHandler, "bias14_15", intervalCount, 7); + uniformNames.thresholds1_7 = + args.uniformHandler->addUniform(ShaderFlags::Fragment, SLType::Float4, "thresholds1_7"); + uniformNames.thresholds9_13 = + args.uniformHandler->addUniform(ShaderFlags::Fragment, SLType::Float4, "thresholds9_13"); + + fragBuilder->codeAppendf("float t = %s.x;", args.inputColor.c_str()); + fragBuilder->codeAppend("vec4 scale, bias;"); + fragBuilder->codeAppendf("// interval count: %d\n", intervalCount); + + if (intervalCount >= 4) { + fragBuilder->codeAppend("// thresholds1_7.w is mid-point for intervals (0,7) and (8,15)\n"); + fragBuilder->codeAppendf("if (t < %s.w) {", uniformNames.thresholds1_7.c_str()); + } + AppendCode1(fragBuilder, intervalCount, uniformNames); + if (intervalCount > 4) { + fragBuilder->codeAppend("} else {"); + } + AppendCode2(fragBuilder, intervalCount, uniformNames); + if (intervalCount >= 4) { + fragBuilder->codeAppend("}"); + } + + fragBuilder->codeAppendf("%s = vec4(t * scale + bias);", args.outputColor.c_str()); +} + +void SetUniformData(UniformBuffer* uniformBuffer, const std::string& name, int intervalCount, + int limit, const Color& value) { + if (intervalCount > limit) { + uniformBuffer->setData(name, value); + } +} + +void GLUnrolledBinaryGradientColorizer::onSetData(UniformBuffer* uniformBuffer) const { + SetUniformData(uniformBuffer, "scale0_1", intervalCount, 0, scale0_1); + SetUniformData(uniformBuffer, "scale2_3", intervalCount, 1, scale2_3); + SetUniformData(uniformBuffer, "scale4_5", intervalCount, 2, scale4_5); + SetUniformData(uniformBuffer, "scale6_7", intervalCount, 3, scale6_7); + SetUniformData(uniformBuffer, "scale8_9", intervalCount, 4, scale8_9); + SetUniformData(uniformBuffer, "scale10_11", intervalCount, 5, scale10_11); + SetUniformData(uniformBuffer, "scale12_13", intervalCount, 6, scale12_13); + SetUniformData(uniformBuffer, "scale14_15", intervalCount, 7, scale14_15); + SetUniformData(uniformBuffer, "bias0_1", intervalCount, 0, bias0_1); + SetUniformData(uniformBuffer, "bias2_3", intervalCount, 1, bias2_3); + SetUniformData(uniformBuffer, "bias4_5", intervalCount, 2, bias4_5); + SetUniformData(uniformBuffer, "bias6_7", intervalCount, 3, bias6_7); + SetUniformData(uniformBuffer, "bias8_9", intervalCount, 4, bias8_9); + SetUniformData(uniformBuffer, "bias10_11", intervalCount, 5, bias10_11); + SetUniformData(uniformBuffer, "bias12_13", intervalCount, 6, bias12_13); + SetUniformData(uniformBuffer, "bias14_15", intervalCount, 7, bias14_15); + uniformBuffer->setData("thresholds1_7", thresholds1_7); + uniformBuffer->setData("thresholds9_13", thresholds9_13); +} +} // namespace tgfx diff --git a/src/opengl/processors/GLUnrolledBinaryGradientColorizer.h b/src/opengl/processors/GLUnrolledBinaryGradientColorizer.h new file mode 100644 index 00000000..2405a2c9 --- /dev/null +++ b/src/opengl/processors/GLUnrolledBinaryGradientColorizer.h @@ -0,0 +1,49 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "gpu/processors/UnrolledBinaryGradientColorizer.h" + +namespace tgfx { +/** + * The 7 threshold positions that define the boundaries of the 8 intervals (excluding t = 0, and t = + * 1) are packed into two vec4's instead of having up to 7 separate scalar uniforms. For low + * interval counts, the extra components are ignored in the shader, but the uniform simplification + * is worth it. It is assumed thresholds are provided in increasing value, mapped as: + * - thresholds1_7.x = boundary between (0,1) and (2,3) -> 1_2 + * - .y = boundary between (2,3) and (4,5) -> 3_4 + * - .z = boundary between (4,5) and (6,7) -> 5_6 + * - .w = boundary between (6,7) and (8,9) -> 7_8 + * - thresholds9_13.x = boundary between (8,9) and (10,11) -> 9_10 + * - .y = boundary between (10,11) and (12,13) -> 11_12 + * - .z = boundary between (12,13) and (14,15) -> 13_14 + * - .w = unused + */ +class GLUnrolledBinaryGradientColorizer : public UnrolledBinaryGradientColorizer { + public: + GLUnrolledBinaryGradientColorizer(int intervalCount, Color* scales, Color* biases, + Rect thresholds1_7, Rect thresholds9_13); + + void emitCode(EmitArgs& args) const override; + + private: + void onSetData(UniformBuffer*) const override; +}; +} // namespace tgfx diff --git a/src/opengl/processors/GLXfermodeFragmentProcessor.cpp b/src/opengl/processors/GLXfermodeFragmentProcessor.cpp new file mode 100644 index 00000000..7fd160ec --- /dev/null +++ b/src/opengl/processors/GLXfermodeFragmentProcessor.cpp @@ -0,0 +1,79 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLXfermodeFragmentProcessor.h" +#include "gpu/processors/ConstColorProcessor.h" +#include "opengl/GLBlend.h" + +namespace tgfx { +std::unique_ptr XfermodeFragmentProcessor::MakeFromTwoProcessors( + std::unique_ptr src, std::unique_ptr dst, + BlendMode mode) { + if (src == nullptr && dst == nullptr) { + return nullptr; + } + switch (mode) { + case BlendMode::Clear: + return ConstColorProcessor::Make(Color::Transparent(), InputMode::Ignore); + case BlendMode::Src: + return src; + case BlendMode::Dst: + return dst; + default: + return std::unique_ptr( + new GLXfermodeFragmentProcessor(std::move(src), std::move(dst), mode)); + } +} + +GLXfermodeFragmentProcessor::GLXfermodeFragmentProcessor(std::unique_ptr src, + std::unique_ptr dst, + BlendMode mode) + : XfermodeFragmentProcessor(std::move(src), std::move(dst), mode) { +} + +void GLXfermodeFragmentProcessor::emitCode(EmitArgs& args) const { + auto* fragBuilder = args.fragBuilder; + if (child == XfermodeFragmentProcessor::Child::TwoChild) { + std::string inputColor; + if (!args.inputColor.empty()) { + inputColor = "inputColor"; + fragBuilder->codeAppendf("vec4 inputColor = vec4(%s.rgb, 1.0);", args.inputColor.c_str()); + } + std::string srcColor = "xfer_src"; + emitChild(0, inputColor, &srcColor, args); + std::string dstColor = "xfer_dst"; + emitChild(1, inputColor, &dstColor, args); + fragBuilder->codeAppendf("// Compose Xfer Mode: %s\n", BlendModeName(mode)); + AppendMode(fragBuilder, srcColor, dstColor, args.outputColor, mode); + // re-multiply the output color by the input color's alpha + if (!args.inputColor.empty()) { + fragBuilder->codeAppendf("%s *= %s.a;", args.outputColor.c_str(), args.inputColor.c_str()); + } + } else { + std::string childColor = "child"; + emitChild(0, &childColor, args); + // emit blend code + fragBuilder->codeAppendf("// Compose Xfer Mode: %s\n", BlendModeName(mode)); + if (child == XfermodeFragmentProcessor::Child::DstChild) { + AppendMode(fragBuilder, args.inputColor, childColor, args.outputColor, mode); + } else { + AppendMode(fragBuilder, childColor, args.inputColor, args.outputColor, mode); + } + } +} +} // namespace tgfx diff --git a/src/opengl/processors/GLXfermodeFragmentProcessor.h b/src/opengl/processors/GLXfermodeFragmentProcessor.h new file mode 100644 index 00000000..c6127a0b --- /dev/null +++ b/src/opengl/processors/GLXfermodeFragmentProcessor.h @@ -0,0 +1,31 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/processors/XfermodeFragmentProcessor.h" + +namespace tgfx { +class GLXfermodeFragmentProcessor : public XfermodeFragmentProcessor { + public: + GLXfermodeFragmentProcessor(std::unique_ptr src, + std::unique_ptr dst, BlendMode mode); + + void emitCode(EmitArgs& args) const override; +}; +} // namespace tgfx diff --git a/src/opengl/qt/QGLDevice.cpp b/src/opengl/qt/QGLDevice.cpp new file mode 100644 index 00000000..3d69af72 --- /dev/null +++ b/src/opengl/qt/QGLDevice.cpp @@ -0,0 +1,184 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/opengl/qt/QGLDevice.h" +#include +#include +#include "QGLProcGetter.h" +#include "utils/Log.h" +#ifdef __APPLE__ +#include +#include +#endif + +namespace tgfx { +void* GLDevice::CurrentNativeHandle() { + return QOpenGLContext::currentContext(); +} + +std::shared_ptr GLDevice::Current() { + auto context = QOpenGLContext::currentContext(); + auto surface = context != nullptr ? context->surface() : nullptr; + return QGLDevice::Wrap(context, surface, true); +} + +std::shared_ptr GLDevice::Make(void*) { + return QGLDevice::Make(); +} + +std::shared_ptr QGLDevice::Make(QOpenGLContext* sharedContext, QSurfaceFormat* format) { + if (QThread::currentThread() != QApplication::instance()->thread()) { + LOGE("QGLDevice::Make() can be called on the main (GUI) thread only."); + return nullptr; + } + auto context = new QOpenGLContext(); + if (format) { + context->setFormat(*format); + } + if (sharedContext) { + context->setShareContext(sharedContext); + } + context->create(); + auto surface = new QOffscreenSurface(); + surface->setFormat(context->format()); + surface->create(); + auto device = QGLDevice::Wrap(context, surface, false); + if (device == nullptr) { + delete surface; + delete context; + } + return device; +} + +std::shared_ptr QGLDevice::MakeAdopted(QOpenGLContext* context, QSurface* surface) { + return QGLDevice::Wrap(context, surface, true); +} + +std::shared_ptr QGLDevice::Wrap(QOpenGLContext* qtContext, QSurface* qtSurface, + bool isAdopted) { + auto glContext = GLDevice::Get(qtContext); + if (glContext) { + return std::static_pointer_cast(glContext); + } + if (qtContext == nullptr || qtSurface == nullptr) { + return nullptr; + } + auto oldContext = QOpenGLContext::currentContext(); + auto oldSurface = oldContext != nullptr ? oldContext->surface() : nullptr; + if (oldContext != qtContext) { + if (!qtContext->makeCurrent(qtSurface)) { + return nullptr; + } + } + auto device = std::shared_ptr(new QGLDevice(qtContext)); + device->isAdopted = isAdopted; + device->qtContext = qtContext; + device->qtSurface = qtSurface; + device->weakThis = device; + if (oldContext != qtContext) { + qtContext->doneCurrent(); + if (oldContext != nullptr) { + oldContext->makeCurrent(oldSurface); + } + } + return device; +} + +QGLDevice::QGLDevice(void* nativeHandle) : GLDevice(nativeHandle) { +} + +QGLDevice::~QGLDevice() { + releaseAll(); +#ifdef __APPLE__ + if (textureCache != nil) { + CFRelease(textureCache); + textureCache = nil; + } +#endif + if (isAdopted) { + return; + } + delete qtContext; + delete qtSurface; +} + +bool QGLDevice::sharableWith(void* nativeContext) const { + auto shareContext = reinterpret_cast(nativeContext); + return QOpenGLContext::areSharing(shareContext, qtContext); +} + +void QGLDevice::moveToThread(QThread* targetThread) { + ownerThread = targetThread; + qtContext->moveToThread(targetThread); + if (qtSurface->surfaceClass() == QSurface::SurfaceClass::Offscreen) { + static_cast(qtSurface)->moveToThread(targetThread); + } +} + +bool QGLDevice::onMakeCurrent() { + if (ownerThread != nullptr && QThread::currentThread() != ownerThread) { + LOGE("QGLDevice can not be locked in a different thread."); + return false; + } + oldContext = QOpenGLContext::currentContext(); + if (oldContext) { + oldSurface = oldContext->surface(); + } + if (oldContext == qtContext) { + return true; + } + if (!qtContext->makeCurrent(qtSurface)) { + LOGE("QGLDevice::makeCurrent() failed!"); + return false; + } + return true; +} + +void QGLDevice::onClearCurrent() { + if (oldContext == qtContext) { + oldContext = nullptr; + oldSurface = nullptr; + return; + } + qtContext->doneCurrent(); + if (oldContext != nullptr) { + oldContext->makeCurrent(oldSurface); + oldContext = nullptr; + oldSurface = nullptr; + } +} + +#ifdef __APPLE__ +CVOpenGLTextureCacheRef QGLDevice::getTextureCache() { + if (!textureCache) { + // The toolchain of QT does not allow us to access the native OpenGL interface directly. + typedef CGLContextObj (*GetCurrentContext)(); + typedef CGLPixelFormatObj (*GetPixelFormat)(CGLContextObj); + auto getCurrentContext = + reinterpret_cast(dlsym(RTLD_DEFAULT, "CGLGetCurrentContext")); + auto getPixelFormat = + reinterpret_cast(dlsym(RTLD_DEFAULT, "CGLGetPixelFormat")); + auto cglContext = getCurrentContext(); + auto pixelFormatObj = getPixelFormat(cglContext); + CVOpenGLTextureCacheCreate(kCFAllocatorDefault, NULL, cglContext, pixelFormatObj, NULL, + &textureCache); + } + return textureCache; +} +#endif +} // namespace tgfx \ No newline at end of file diff --git a/src/opengl/qt/QGLHardwareBuffer.cpp b/src/opengl/qt/QGLHardwareBuffer.cpp new file mode 100644 index 00000000..66e18ac2 --- /dev/null +++ b/src/opengl/qt/QGLHardwareBuffer.cpp @@ -0,0 +1,57 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "gpu/Texture.h" +#include "tgfx/opengl/qt/QGLDevice.h" +#ifdef __APPLE__ +#include "opengl/cgl/CGLHardwareTexture.h" +#endif +#include "tgfx/platform/HardwareBuffer.h" + +namespace tgfx { +#ifdef __APPLE__ + +bool HardwareBufferAvailable() { + return true; +} + +std::shared_ptr Texture::MakeFrom(Context* context, HardwareBufferRef hardwareBuffer, + YUVColorSpace) { + if (!HardwareBufferCheck(hardwareBuffer)) { + return nullptr; + } + auto qglDevice = static_cast(context->device()); + if (qglDevice == nullptr) { + return nullptr; + } + auto textureCache = qglDevice->getTextureCache(); + return CGLHardwareTexture::MakeFrom(context, hardwareBuffer, textureCache); +} + +#else + +bool HardwareBufferAvailable() { + return false; +} + +std::shared_ptr Texture::MakeFrom(Context*, HardwareBufferRef, YUVColorSpace) { + return nullptr; +} + +#endif +} // namespace tgfx \ No newline at end of file diff --git a/src/opengl/qt/QGLProcGetter.cpp b/src/opengl/qt/QGLProcGetter.cpp new file mode 100644 index 00000000..35130b00 --- /dev/null +++ b/src/opengl/qt/QGLProcGetter.cpp @@ -0,0 +1,33 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "QGLProcGetter.h" + +namespace tgfx { +void* QGLProcGetter::getProcAddress(const char* name) const { + return reinterpret_cast(glContext->getProcAddress(name)); +} + +std::unique_ptr GLProcGetter::Make() { + auto context = QOpenGLContext::currentContext(); + if (context == nullptr) { + return nullptr; + } + return std::make_unique(context); +} +} // namespace tgfx \ No newline at end of file diff --git a/src/opengl/qt/QGLProcGetter.h b/src/opengl/qt/QGLProcGetter.h new file mode 100644 index 00000000..5a3d14c8 --- /dev/null +++ b/src/opengl/qt/QGLProcGetter.h @@ -0,0 +1,35 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "opengl/GLProcGetter.h" + +namespace tgfx { +class QGLProcGetter : public GLProcGetter { + public: + explicit QGLProcGetter(QOpenGLContext* glContext) : glContext(glContext) { + } + + void* getProcAddress(const char name[]) const override; + + private: + QOpenGLContext* glContext = nullptr; +}; +} // namespace tgfx diff --git a/src/opengl/qt/QGLWindow.cpp b/src/opengl/qt/QGLWindow.cpp new file mode 100644 index 00000000..9bb914a0 --- /dev/null +++ b/src/opengl/qt/QGLWindow.cpp @@ -0,0 +1,114 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/opengl/qt/QGLWindow.h" +#include +#include +#include +#include "gpu/Texture.h" +#include "opengl/GLRenderTarget.h" +#include "opengl/GLSampler.h" +#include "tgfx/opengl/GLFunctions.h" + +namespace tgfx { + +std::shared_ptr QGLWindow::MakeFrom(QQuickItem* quickItem, + QOpenGLContext* sharedContext) { + if (quickItem == nullptr) { + return nullptr; + } + auto device = QGLDevice::Make(sharedContext); + if (device == nullptr) { + return nullptr; + } + return std::shared_ptr(new QGLWindow(device, quickItem)); +} + +QGLWindow::QGLWindow(std::shared_ptr device, QQuickItem* quickItem) + : DoubleBufferedWindow(std::move(device)), quickItem(quickItem) { +} + +QGLWindow::~QGLWindow() { + frontTexture = nullptr; + backTexture = nullptr; + delete outTexture; +} + +void QGLWindow::moveToThread(QThread* targetThread) { + std::lock_guard autoLock(locker); + static_cast(device.get())->moveToThread(targetThread); +} + +QSGTexture* QGLWindow::getTexture() { + std::lock_guard autoLock(locker); + auto nativeWindow = quickItem->window(); + if (nativeWindow == nullptr) { + return nullptr; + } + if (textureInvalid && frontTexture) { + textureInvalid = false; + if (outTexture) { + delete outTexture; + outTexture = nullptr; + } + auto textureID = static_cast(frontTexture->getSampler())->id; + auto width = static_cast(ceil(quickItem->width())); + auto height = static_cast(ceil(quickItem->height())); +#if (QT_VERSION >= QT_VERSION_CHECK(6, 0, 0)) + outTexture = QNativeInterface::QSGOpenGLTexture::fromNative( + textureID, nativeWindow, QSize(width, height), QQuickWindow::TextureHasAlphaChannel); + +#elif (QT_VERSION >= QT_VERSION_CHECK(5, 15, 0)) + outTexture = nativeWindow->createTextureFromNativeObject(QQuickWindow::NativeObjectTexture, + &textureID, 0, QSize(width, height), + QQuickWindow::TextureHasAlphaChannel); +#else + outTexture = nativeWindow->createTextureFromId(textureID, QSize(width, height), + QQuickWindow::TextureHasAlphaChannel); +#endif + } + return outTexture; +} + +void QGLWindow::invalidateTexture() { + std::lock_guard autoLock(locker); + textureInvalid = true; +} + +std::shared_ptr QGLWindow::onCreateSurface(Context* context) { + frontTexture = nullptr; + backTexture = nullptr; + auto nativeWindow = quickItem->window(); + auto pixelRatio = nativeWindow ? nativeWindow->devicePixelRatio() : 1.0f; + auto width = static_cast(ceil(quickItem->width() * pixelRatio)); + auto height = static_cast(ceil(quickItem->height() * pixelRatio)); + if (width <= 0 || height <= 0) { + return nullptr; + } + frontTexture = Texture::MakeRGBA(context, width, height); + backTexture = Texture::MakeRGBA(context, width, height); + frontSurface = Surface::MakeFrom(frontTexture); + backSurface = Surface::MakeFrom(backTexture); + return backSurface; +} + +void QGLWindow::onSwapSurfaces(Context*) { + std::swap(frontTexture, backTexture); + invalidateTexture(); +} +} // namespace tgfx diff --git a/src/opengl/webgl/WebGLDevice.cpp b/src/opengl/webgl/WebGLDevice.cpp new file mode 100644 index 00000000..2a838370 --- /dev/null +++ b/src/opengl/webgl/WebGLDevice.cpp @@ -0,0 +1,135 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/opengl/webgl/WebGLDevice.h" +#include "WebGLProcGetter.h" +#include "utils/Log.h" + +namespace tgfx { + +void* GLDevice::CurrentNativeHandle() { + return reinterpret_cast(emscripten_webgl_get_current_context()); +} + +std::shared_ptr GLDevice::Current() { + auto context = emscripten_webgl_get_current_context(); + auto glDevice = GLDevice::Get(reinterpret_cast(context)); + if (glDevice) { + return std::static_pointer_cast(glDevice); + } + return WebGLDevice::Wrap(context, true); +} + +std::shared_ptr GLDevice::Make(void*) { + return nullptr; +} + +std::shared_ptr WebGLDevice::MakeFrom(const std::string& canvasID) { + auto oldContext = emscripten_webgl_get_current_context(); + + EmscriptenWebGLContextAttributes attrs; + emscripten_webgl_init_context_attributes(&attrs); + attrs.depth = EM_FALSE; + attrs.stencil = EM_FALSE; + attrs.antialias = EM_FALSE; + auto context = emscripten_webgl_create_context(canvasID.c_str(), &attrs); + if (context == 0) { + LOGE("WebGLDevice::MakeFrom emscripten_webgl_create_context error"); + return nullptr; + } + auto result = emscripten_webgl_make_context_current(context); + if (result != EMSCRIPTEN_RESULT_SUCCESS) { + emscripten_webgl_destroy_context(context); + if (oldContext) { + emscripten_webgl_make_context_current(oldContext); + } + return nullptr; + } + emscripten_webgl_make_context_current(0); + if (oldContext) { + emscripten_webgl_make_context_current(oldContext); + } + return WebGLDevice::Wrap(context); +} + +std::shared_ptr WebGLDevice::Wrap(EMSCRIPTEN_WEBGL_CONTEXT_HANDLE webglContext, + bool isAdopted) { + if (webglContext == 0) { + return nullptr; + } + auto oldContext = emscripten_webgl_get_current_context(); + if (oldContext != webglContext) { + auto result = emscripten_webgl_make_context_current(webglContext); + if (result != EMSCRIPTEN_RESULT_SUCCESS) { + emscripten_webgl_make_context_current(0); + if (oldContext) { + emscripten_webgl_make_context_current(oldContext); + } + return nullptr; + } + } + auto device = std::shared_ptr(new WebGLDevice(webglContext)); + device->isAdopted = isAdopted; + device->context = webglContext; + device->weakThis = device; + if (oldContext != webglContext) { + emscripten_webgl_make_context_current(0); + if (oldContext) { + emscripten_webgl_make_context_current(oldContext); + } + } + return device; +} + +WebGLDevice::WebGLDevice(EMSCRIPTEN_WEBGL_CONTEXT_HANDLE nativeHandle) + : GLDevice(reinterpret_cast(nativeHandle)) { +} + +WebGLDevice::~WebGLDevice() { + releaseAll(); + if (isAdopted) { + return; + } + emscripten_webgl_destroy_context(context); +} + +bool WebGLDevice::onMakeCurrent() { + oldContext = emscripten_webgl_get_current_context(); + if (oldContext == context) { + // 如果外部已经设定好了当前的 Context,以外部设定的为准,不再切换。 + return true; + } + auto result = emscripten_webgl_make_context_current(context); + if (result != EMSCRIPTEN_RESULT_SUCCESS) { + LOGE("WebGLDevice::onMakeCurrent failure result = %d", result); + return false; + } + return true; +} + +void WebGLDevice::onClearCurrent() { + emscripten_webgl_make_context_current(0); + if (oldContext) { + emscripten_webgl_make_context_current(oldContext); + } +} + +bool WebGLDevice::sharableWith(void* nativeContext) const { + return nativeHandle == nativeContext; +} +} // namespace tgfx diff --git a/src/opengl/webgl/WebGLHardwareBuffer.cpp b/src/opengl/webgl/WebGLHardwareBuffer.cpp new file mode 100644 index 00000000..604219c2 --- /dev/null +++ b/src/opengl/webgl/WebGLHardwareBuffer.cpp @@ -0,0 +1,30 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "gpu/Texture.h" +#include "tgfx/platform/HardwareBuffer.h" + +namespace tgfx { +bool HardwareBufferAvailable() { + return false; +} + +std::shared_ptr Texture::MakeFrom(Context*, HardwareBufferRef, YUVColorSpace) { + return nullptr; +} +} // namespace tgfx \ No newline at end of file diff --git a/src/opengl/webgl/WebGLProcGetter.cpp b/src/opengl/webgl/WebGLProcGetter.cpp new file mode 100644 index 00000000..06712228 --- /dev/null +++ b/src/opengl/webgl/WebGLProcGetter.cpp @@ -0,0 +1,156 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "WebGLProcGetter.h" +#include +#include +#include + +namespace tgfx { +static void emscripten_glWaitSync(GLsync sync, GLbitfield flags, GLuint64 timeout) { + uint32_t timeoutLo = timeout; + uint32_t timeoutHi = timeout >> 32; + emscripten_glWaitSync(sync, flags, timeoutLo, timeoutHi); +} + +void* WebGLProcGetter::getProcAddress(const char* name) const { +#define N(X) \ + if (0 == strcmp(#X, name)) { \ + return reinterpret_cast(emscripten_##X); \ + } + N(glActiveTexture) + N(glAttachShader) + N(glIsEnabled) + N(glBindAttribLocation) + N(glBindBuffer) + N(glBindFramebuffer) + N(glBindRenderbuffer) + N(glBindTexture) + N(glBlendColor) + N(glBlendEquation) + N(glBlendFunc) + N(glBufferData) + N(glCheckFramebufferStatus) + N(glClear) + N(glClearColor) + N(glClearStencil) + N(glColorMask) + N(glCompileShader) + N(glCopyTexSubImage2D) + N(glCreateProgram) + N(glCreateShader) + N(glDeleteBuffers) + N(glDeleteFramebuffers) + N(glDeleteProgram) + N(glDeleteRenderbuffers) + N(glDeleteShader) + N(glDeleteTextures) + N(glDepthMask) + N(glDisable) + N(glDisableVertexAttribArray) + N(glDrawArrays) + N(glDrawElements) + N(glEnable) + N(glEnableVertexAttribArray) + N(glFinish) + N(glFlush) + N(glFramebufferRenderbuffer) + N(glFramebufferTexture2D) + N(glGenBuffers) + N(glGenFramebuffers) + N(glGenRenderbuffers) + N(glGenTextures) + N(glGetBufferParameteriv) + N(glGetError) + N(glGetFramebufferAttachmentParameteriv) + N(glGetIntegerv) + N(glGetProgramInfoLog) + N(glGetProgramiv) + N(glGetRenderbufferParameteriv) + N(glGetShaderInfoLog) + N(glGetShaderPrecisionFormat) + N(glGetShaderiv) + N(glGetString) + N(glGetStringi) + N(glGetUniformLocation) + N(glIsTexture) + N(glLineWidth) + N(glLinkProgram) + N(glPixelStorei) + N(glReadPixels) + N(glRenderbufferStorage) + N(glScissor) + N(glShaderSource) + N(glTexImage2D) + N(glTexParameterf) + N(glTexParameterfv) + N(glTexParameteri) + N(glTexParameteriv) + N(glTexSubImage2D) + N(glUniform1f) + N(glUniform1fv) + N(glUniform1i) + N(glUniform1iv) + N(glUniform2f) + N(glUniform2fv) + N(glUniform2i) + N(glUniform2iv) + N(glUniform3f) + N(glUniform3fv) + N(glUniform3i) + N(glUniform3iv) + N(glUniform4f) + N(glUniform4fv) + N(glUniform4i) + N(glUniform4iv) + N(glUniformMatrix2fv) + N(glUniformMatrix3fv) + N(glUniformMatrix4fv) + N(glUseProgram) + N(glVertexAttrib1f) + N(glVertexAttrib2fv) + N(glVertexAttrib3fv) + N(glVertexAttrib4fv) + N(glVertexAttribPointer) + N(glViewport) + N(glGetAttribLocation) + N(glBlendEquationSeparate) + N(glBindVertexArray) + N(glDeleteVertexArrays) + N(glGenVertexArrays) + N(glBindVertexArrayOES) + N(glDeleteVertexArraysOES) + N(glGenVertexArraysOES) + N(glFenceSync) + N(glWaitSync) + N(glDeleteSync) + N(glBlitFramebuffer) + N(glRenderbufferStorageMultisample) +#undef N + + // We explicitly do not use GetProcAddress or something similar because its code size is quite + // large. We shouldn't need GetProcAddress because emscripten provides us all the valid function + // pointers for WebGL via the included headers. + // https://github.com/emscripten-core/emscripten/blob/7ba7700902c46734987585409502f3c63beb650f/system/include/emscripten/html5_webgl.h#L93 + return nullptr; +} + +std::unique_ptr GLProcGetter::Make() { + return std::make_unique(); +} +} // namespace tgfx diff --git a/src/opengl/webgl/WebGLProcGetter.h b/src/opengl/webgl/WebGLProcGetter.h new file mode 100644 index 00000000..dbfe164f --- /dev/null +++ b/src/opengl/webgl/WebGLProcGetter.h @@ -0,0 +1,28 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "opengl/GLProcGetter.h" + +namespace tgfx { +class WebGLProcGetter : public GLProcGetter { + public: + void* getProcAddress(const char name[]) const override; +}; +} // namespace tgfx diff --git a/src/opengl/webgl/WebGLWindow.cpp b/src/opengl/webgl/WebGLWindow.cpp new file mode 100644 index 00000000..57c4b25b --- /dev/null +++ b/src/opengl/webgl/WebGLWindow.cpp @@ -0,0 +1,53 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/opengl/webgl/WebGLWindow.h" +#include "tgfx/opengl/GLDefines.h" +#include "utils/Log.h" + +namespace tgfx { +std::shared_ptr WebGLWindow::MakeFrom(const std::string& canvasID) { + if (canvasID.empty()) { + return nullptr; + } + auto device = WebGLDevice::MakeFrom(canvasID); + if (device == nullptr) { + return nullptr; + } + auto window = std::shared_ptr(new WebGLWindow(device)); + window->canvasID = canvasID; + return window; +} + +WebGLWindow::WebGLWindow(std::shared_ptr device) : Window(std::move(device)) { +} + +std::shared_ptr WebGLWindow::onCreateSurface(Context* context) { + int width = 0; + int height = 0; + emscripten_get_canvas_element_size(canvasID.c_str(), &width, &height); + if (width <= 0 || height <= 0) { + LOGE("WebGLWindow::onCreateSurface() Can not create a Surface with zero size."); + return nullptr; + } + tgfx::GLFrameBufferInfo glInfo = {}; + glInfo.id = 0; + glInfo.format = GL_RGBA8; + return Surface::MakeFrom(context, {glInfo, width, height}, ImageOrigin::BottomLeft); +} +} // namespace tgfx diff --git a/src/platform/Print.cpp b/src/platform/Print.cpp new file mode 100644 index 00000000..ab7cac94 --- /dev/null +++ b/src/platform/Print.cpp @@ -0,0 +1,43 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#if !defined(__ANDROID__) && !defined(ANDROID) + +#include "tgfx/platform/Print.h" +#include +#include + +namespace tgfx { +void PrintLog(const char format[], ...) { + va_list args; + va_start(args, format); + vfprintf(stdout, format, args); + va_end(args); + fprintf(stdout, "\n"); +} + +void PrintError(const char format[], ...) { + va_list args; + va_start(args, format); + vfprintf(stderr, format, args); + va_end(args); + fprintf(stderr, "\n"); +} +} // namespace tgfx + +#endif \ No newline at end of file diff --git a/src/platform/android/AHardwareBufferFunctions.cpp b/src/platform/android/AHardwareBufferFunctions.cpp new file mode 100644 index 00000000..68188a6c --- /dev/null +++ b/src/platform/android/AHardwareBufferFunctions.cpp @@ -0,0 +1,55 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "AHardwareBufferFunctions.h" +#include +#include +#include +#include + +namespace tgfx { +template +static void LoadSymbol(T*& function, const char* symbol) { + function = (T*)dlsym(RTLD_DEFAULT, symbol); +} + +const AHardwareBufferFunctions* AHardwareBufferFunctions::Get() { + static AHardwareBufferFunctions functions = *new AHardwareBufferFunctions(); + return &functions; +} + +AHardwareBufferFunctions::AHardwareBufferFunctions() { + char sdk[PROP_VALUE_MAX] = "0"; + __system_property_get("ro.build.version.sdk", sdk); + auto version = std::stoi(sdk); + if (version >= 26) { + LoadSymbol(allocate, "AHardwareBuffer_allocate"); + LoadSymbol(acquire, "AHardwareBuffer_acquire"); + LoadSymbol(release, "AHardwareBuffer_release"); + LoadSymbol(describe, "AHardwareBuffer_describe"); + LoadSymbol(lock, "AHardwareBuffer_lock"); + LoadSymbol(unlock, "AHardwareBuffer_unlock"); + LoadSymbol(fromHardwareBuffer, "AHardwareBuffer_fromHardwareBuffer"); + LoadSymbol(toHardwareBuffer, "AHardwareBuffer_toHardwareBuffer"); + } + if (version >= 30) { + LoadSymbol(fromBitmap, "AndroidBitmap_getHardwareBuffer"); + } +} + +} // namespace tgfx diff --git a/src/platform/android/AHardwareBufferFunctions.h b/src/platform/android/AHardwareBufferFunctions.h new file mode 100644 index 00000000..7c0aa494 --- /dev/null +++ b/src/platform/android/AHardwareBufferFunctions.h @@ -0,0 +1,55 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include + +namespace tgfx { +static constexpr int HARDWAREBUFFER_FORMAT_R8_UNORM = 0x38; + +using Allocate = int(const AHardwareBuffer_Desc* desc, AHardwareBuffer** outBuffer); +using Acquire = void(AHardwareBuffer* buffer); +using Release = void(AHardwareBuffer* buffer); +using Describe = void(const AHardwareBuffer* buffer, AHardwareBuffer_Desc* outDesc); +using Lock = int(AHardwareBuffer* buffer, uint64_t usage, int32_t fence, const ARect* rect, + void** outVirtualAddress); +using Unlock = int(AHardwareBuffer* buffer, int32_t* fence); +using FromHardwareBuffer = AHardwareBuffer*(JNIEnv* env, jobject hardwareBufferObj); +using ToHardwareBuffer = jobject(JNIEnv* env, AHardwareBuffer* hardwareBuffer); +using FromBitmap = int(JNIEnv* env, jobject bitmap, AHardwareBuffer** outBuffer); + +class AHardwareBufferFunctions { + public: + static const AHardwareBufferFunctions* Get(); + + Allocate* allocate = nullptr; + Acquire* acquire = nullptr; + Release* release = nullptr; + Describe* describe = nullptr; + Lock* lock = nullptr; + Unlock* unlock = nullptr; + FromHardwareBuffer* fromHardwareBuffer = nullptr; + ToHardwareBuffer* toHardwareBuffer = nullptr; + FromBitmap* fromBitmap = nullptr; + + private: + AHardwareBufferFunctions(); +}; +} // namespace tgfx diff --git a/src/platform/android/AndroidBitmap.cpp b/src/platform/android/AndroidBitmap.cpp new file mode 100644 index 00000000..3ccdccbf --- /dev/null +++ b/src/platform/android/AndroidBitmap.cpp @@ -0,0 +1,80 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/platform/android/AndroidBitmap.h" +#include +#include "AHardwareBufferFunctions.h" + +namespace tgfx { +static constexpr int BITMAP_FLAGS_ALPHA_UNPREMUL = 2; +static constexpr int BITMAP_FORMAT_RGBA_F16 = 9; +static constexpr int BITMAP_FORMAT_RGBA_1010102 = 10; + +ImageInfo AndroidBitmap::GetInfo(JNIEnv* env, jobject bitmap) { + if (env == nullptr || bitmap == nullptr) { + return {}; + } + AndroidBitmapInfo bitmapInfo = {}; + if (AndroidBitmap_getInfo(env, bitmap, &bitmapInfo) != 0) { + env->ExceptionClear(); + return {}; + } + AlphaType alphaType = (bitmapInfo.flags & BITMAP_FLAGS_ALPHA_UNPREMUL) + ? AlphaType::Unpremultiplied + : AlphaType::Premultiplied; + ColorType colorType; + switch (bitmapInfo.format) { + case ANDROID_BITMAP_FORMAT_RGBA_8888: + colorType = ColorType::RGBA_8888; + break; + case ANDROID_BITMAP_FORMAT_A_8: + colorType = ColorType::ALPHA_8; + break; + case ANDROID_BITMAP_FORMAT_RGB_565: + colorType = ColorType::RGB_565; + break; + case BITMAP_FORMAT_RGBA_F16: + colorType = ColorType::RGBA_F16; + break; + case BITMAP_FORMAT_RGBA_1010102: + colorType = ColorType::RGBA_1010102; + break; + default: + colorType = ColorType::Unknown; + break; + } + return ImageInfo::Make(static_cast(bitmapInfo.width), static_cast(bitmapInfo.height), + colorType, alphaType, bitmapInfo.stride); +} + +HardwareBufferRef AndroidBitmap::GetHardwareBuffer(JNIEnv* env, jobject bitmap) { + static const auto fromBitmap = AHardwareBufferFunctions::Get()->fromBitmap; + static const auto release = AHardwareBufferFunctions::Get()->release; + if (fromBitmap == nullptr || release == nullptr || bitmap == nullptr) { + return nullptr; + } + AHardwareBuffer* hardwareBuffer = nullptr; + fromBitmap(env, bitmap, &hardwareBuffer); + if (hardwareBuffer != nullptr) { + // The hardware buffer returned by AndroidBitmap_getHardwareBuffer() has a reference count of 1. + // We decrease it to keep the behaviour the same as AHardwareBuffer_fromHardwareBuffer(). + release(hardwareBuffer); + } + return hardwareBuffer; +} +} // namespace tgfx \ No newline at end of file diff --git a/src/platform/android/GLExternalOESTexture.cpp b/src/platform/android/GLExternalOESTexture.cpp new file mode 100644 index 00000000..c3661ba2 --- /dev/null +++ b/src/platform/android/GLExternalOESTexture.cpp @@ -0,0 +1,67 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLExternalOESTexture.h" +#include "gpu/Gpu.h" +#include "opengl/GLSampler.h" +#include "tgfx/opengl/GLFunctions.h" + +namespace tgfx { +std::shared_ptr GLExternalOESTexture::Make(Context* context, int width, + int height) { + if (context == nullptr || width < 1 || height < 1) { + return nullptr; + } + auto gl = tgfx::GLFunctions::Get(context); + auto sampler = std::make_unique(); + sampler->target = GL_TEXTURE_EXTERNAL_OES; + sampler->format = tgfx::PixelFormat::RGBA_8888; + gl->genTextures(1, &sampler->id); + if (sampler->id == 0) { + return nullptr; + } + return Resource::Wrap(context, new GLExternalOESTexture(std::move(sampler), width, height)); +} + +GLExternalOESTexture::GLExternalOESTexture(std::unique_ptr sampler, int width, + int height) + : Texture(width, height, ImageOrigin::TopLeft), sampler(std::move(sampler)), + textureWidth(width), textureHeight(height) { +} + +void GLExternalOESTexture::updateTextureSize(int width, int height) { + textureWidth = width; + textureHeight = height; +} + +Point GLExternalOESTexture::getTextureCoord(float x, float y) const { + return {x / static_cast(textureWidth), y / static_cast(textureHeight)}; +} + +BackendTexture GLExternalOESTexture::getBackendTexture() const { + return getSampler()->getBackendTexture(textureWidth, textureHeight); +} + +size_t GLExternalOESTexture::memoryUsage() const { + return textureWidth * textureHeight * 3 / 2; +} + +void GLExternalOESTexture::onReleaseGPU() { + context->gpu()->deleteSampler(sampler.get()); +} +} // namespace tgfx diff --git a/src/platform/android/GLExternalOESTexture.h b/src/platform/android/GLExternalOESTexture.h new file mode 100644 index 00000000..52cd26cd --- /dev/null +++ b/src/platform/android/GLExternalOESTexture.h @@ -0,0 +1,50 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/Texture.h" +#include "gpu/TextureSampler.h" + +namespace tgfx { +class GLExternalOESTexture : public Texture { + public: + static std::shared_ptr Make(Context* context, int width, int height); + + size_t memoryUsage() const override; + + const TextureSampler* getSampler() const override { + return sampler.get(); + } + + tgfx::Point getTextureCoord(float x, float y) const override; + + BackendTexture getBackendTexture() const override; + + void updateTextureSize(int width, int height); + + private: + std::unique_ptr sampler = {}; + int textureWidth = 0; + int textureHeight = 0; + + GLExternalOESTexture(std::unique_ptr sampler, int width, int height); + + void onReleaseGPU() override; +}; +} // namespace tgfx diff --git a/src/platform/android/HandlerThread.cpp b/src/platform/android/HandlerThread.cpp new file mode 100644 index 00000000..0c845b1b --- /dev/null +++ b/src/platform/android/HandlerThread.cpp @@ -0,0 +1,69 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "HandlerThread.h" +#include "utils/Log.h" + +namespace tgfx { +static Global HandlerThreadClass; +static jmethodID HandlerThread_Constructor; +static jmethodID HandlerThread_start; +static jmethodID HandlerThread_quit; +static jmethodID HandlerThread_getLooper; + +void HandlerThread::JNIInit(JNIEnv* env) { + HandlerThreadClass = env->FindClass("android/os/HandlerThread"); + HandlerThread_Constructor = + env->GetMethodID(HandlerThreadClass.get(), "", "(Ljava/lang/String;)V"); + HandlerThread_start = env->GetMethodID(HandlerThreadClass.get(), "start", "()V"); + HandlerThread_quit = env->GetMethodID(HandlerThreadClass.get(), "quit", "()Z"); + HandlerThread_getLooper = + env->GetMethodID(HandlerThreadClass.get(), "getLooper", "()Landroid/os/Looper;"); +} + +std::shared_ptr HandlerThread::Make() { + JNIEnvironment environment; + auto env = environment.current(); + if (env == nullptr) { + return nullptr; + } + auto name = env->NewStringUTF("tgfx_HandlerThread"); + auto thread = env->NewObject(HandlerThreadClass.get(), HandlerThread_Constructor, name); + env->CallVoidMethod(thread, HandlerThread_start); + if (env->ExceptionCheck()) { + env->ExceptionClear(); + LOGE("HandlerThread::Make(): failed to start the HandlerThread!"); + return nullptr; + } + return std::make_shared(env, thread); +} + +HandlerThread::HandlerThread(JNIEnv* env, jobject handlerThread) { + thread = handlerThread; + looper = env->CallObjectMethod(handlerThread, HandlerThread_getLooper); +} + +HandlerThread::~HandlerThread() { + JNIEnvironment environment; + auto env = environment.current(); + if (env == nullptr) { + return; + } + env->CallBooleanMethod(thread.get(), HandlerThread_quit); +} +} // namespace tgfx diff --git a/src/platform/android/HandlerThread.h b/src/platform/android/HandlerThread.h new file mode 100644 index 00000000..20b162da --- /dev/null +++ b/src/platform/android/HandlerThread.h @@ -0,0 +1,43 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "JNIUtil.h" + +namespace tgfx { +class HandlerThread { + public: + static void JNIInit(JNIEnv* env); + + static std::shared_ptr Make(); + + HandlerThread(JNIEnv* env, jobject thread); + + ~HandlerThread(); + + jobject getLooper() const { + return looper.get(); + } + + private: + Global thread; + Global looper; +}; +} // namespace tgfx diff --git a/src/platform/android/HardwareBuffer.cpp b/src/platform/android/HardwareBuffer.cpp new file mode 100644 index 00000000..44844295 --- /dev/null +++ b/src/platform/android/HardwareBuffer.cpp @@ -0,0 +1,131 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "AHardwareBufferFunctions.h" +#include "core/PixelBuffer.h" +#include "tgfx/platform/android/HardwareBufferJNI.h" + +namespace tgfx { +std::shared_ptr ImageBuffer::MakeFrom(HardwareBufferRef hardwareBuffer, + YUVColorSpace) { + return PixelBuffer::MakeFrom(hardwareBuffer); +} + +bool HardwareBufferCheck(HardwareBufferRef buffer) { + if (!HardwareBufferAvailable()) { + return false; + } + return buffer != nullptr; +} + +HardwareBufferRef HardwareBufferAllocate(int width, int height, bool alphaOnly) { + if (!HardwareBufferAvailable() || alphaOnly) { + return nullptr; + } + AHardwareBuffer* hardwareBuffer = nullptr; + AHardwareBuffer_Desc desc = { + static_cast(width), + static_cast(height), + 1, + AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM, + AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN | AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN | + AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE | AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT, + 0, + 0, + 0}; + AHardwareBufferFunctions::Get()->allocate(&desc, &hardwareBuffer); + return hardwareBuffer; +} + +HardwareBufferRef HardwareBufferRetain(HardwareBufferRef buffer) { + static const auto acquire = AHardwareBufferFunctions::Get()->acquire; + if (acquire != nullptr && buffer != nullptr) { + acquire(buffer); + } + return buffer; +} + +void HardwareBufferRelease(HardwareBufferRef buffer) { + static const auto release = AHardwareBufferFunctions::Get()->release; + if (release != nullptr && buffer != nullptr) { + release(buffer); + } +} + +void* HardwareBufferLock(HardwareBufferRef buffer) { + static const auto lock = AHardwareBufferFunctions::Get()->lock; + if (lock == nullptr || buffer == nullptr) { + return nullptr; + } + void* pixels = nullptr; + lock(buffer, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN | AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN, -1, + nullptr, &pixels); + return pixels; +} + +void HardwareBufferUnlock(HardwareBufferRef buffer) { + static const auto unlock = AHardwareBufferFunctions::Get()->unlock; + if (unlock != nullptr && buffer != nullptr) { + unlock(buffer, nullptr); + } +} + +ImageInfo HardwareBufferGetInfo(HardwareBufferRef buffer) { + static const auto describe = AHardwareBufferFunctions::Get()->describe; + if (!HardwareBufferAvailable() || buffer == nullptr) { + return {}; + } + AHardwareBuffer_Desc desc; + describe(buffer, &desc); + auto colorType = ColorType::Unknown; + auto alphaType = AlphaType::Premultiplied; + switch (desc.format) { + case HARDWAREBUFFER_FORMAT_R8_UNORM: + colorType = ColorType::ALPHA_8; + break; + case AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM: + colorType = ColorType::RGBA_8888; + break; + case AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM: + colorType = ColorType::RGBA_8888; + alphaType = AlphaType::Opaque; + break; + default: + break; + } + auto bytesPerPixel = ImageInfo::GetBytesPerPixel(colorType); + return ImageInfo::Make(static_cast(desc.width), static_cast(desc.height), colorType, + alphaType, desc.stride * bytesPerPixel); +} + +HardwareBufferRef HardwareBufferFromJavaObject(JNIEnv* env, jobject hardwareBufferObject) { + static const auto fromHardwareBuffer = AHardwareBufferFunctions::Get()->fromHardwareBuffer; + if (fromHardwareBuffer == nullptr || hardwareBufferObject == nullptr) { + return nullptr; + } + return fromHardwareBuffer(env, hardwareBufferObject); +} + +jobject HardwareBufferToJavaObject(JNIEnv* env, HardwareBufferRef hardwareBuffer) { + static const auto toHardwareBuffer = AHardwareBufferFunctions::Get()->toHardwareBuffer; + if (toHardwareBuffer == nullptr || hardwareBuffer == nullptr) { + return nullptr; + } + return toHardwareBuffer(env, hardwareBuffer); +} +} // namespace tgfx diff --git a/src/platform/android/JNIEnvironment.cpp b/src/platform/android/JNIEnvironment.cpp new file mode 100644 index 00000000..276c3bb1 --- /dev/null +++ b/src/platform/android/JNIEnvironment.cpp @@ -0,0 +1,94 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/platform/android/JNIEnvironment.h" +#include +#include +#include +#include "JNIInit.h" +#include "utils/Log.h" + +namespace tgfx { +// https://android.googlesource.com/platform/art/+/refs/tags/android-5.0.2_r1/runtime/jni_internal.cc#63 +static constexpr size_t LOCALS_CAPACITY = 64; + +static std::atomic globalJavaVM = nullptr; +static pthread_key_t envKey = 0; + +static void JNI_Thread_Destroy(void*) { + JavaVM* javaVM = globalJavaVM; + if (javaVM != nullptr) { + javaVM->DetachCurrentThread(); + pthread_setspecific(envKey, nullptr); + } +} + +bool JNIEnvironment::SetJavaVM(JavaVM* javaVM) { + if (globalJavaVM == javaVM) { + return true; + } + if (globalJavaVM != nullptr && javaVM != nullptr && globalJavaVM != javaVM) { + return false; + } + globalJavaVM = javaVM; + if (globalJavaVM != nullptr) { + pthread_key_create(&envKey, JNI_Thread_Destroy); + JNIInit::Run(); + } else { + pthread_key_delete(envKey); + } + return true; +} + +JNIEnv* JNIEnvironment::Current() { + JavaVM* javaVM = globalJavaVM; + if (javaVM == nullptr) { + LOGE( + "JNIEnvironment::Current(): The global JavaVM has not been set yet! " + "Please use JNIEnvironment::SetJavaVM() to initialize the global JavaVM."); + return nullptr; + } + JNIEnv* env = nullptr; + auto result = javaVM->GetEnv(reinterpret_cast(&env), JNI_VERSION_1_4); + if (result == JNI_EDETACHED || env == nullptr) { + JavaVMAttachArgs args = {JNI_VERSION_1_4, "tgfx_JNIEnvironment", nullptr}; + if (javaVM->AttachCurrentThread(&env, &args) == JNI_OK) { + pthread_setspecific(envKey, (void*)env); + } + } + return env; +} + +JNIEnvironment::JNIEnvironment() { + env = JNIEnvironment::Current(); + if (env != nullptr) { + auto result = env->PushLocalFrame(LOCALS_CAPACITY); + if (result != 0) { + LOGE("JNIEnvironment(): Failed to call env->PushLocalFrame(%d)!", LOCALS_CAPACITY); + env->ExceptionClear(); + env = nullptr; + } + } +} + +JNIEnvironment::~JNIEnvironment() { + if (env != nullptr) { + env->PopLocalFrame(nullptr); + } +} +} // namespace tgfx diff --git a/src/platform/android/JNIInit.cpp b/src/platform/android/JNIInit.cpp new file mode 100644 index 00000000..63cbad19 --- /dev/null +++ b/src/platform/android/JNIInit.cpp @@ -0,0 +1,41 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "JNIInit.h" +#include "HandlerThread.h" +#include "NativeCodec.h" +#include "platform/android/SurfaceTexture.h" + +namespace tgfx { +void JNIInit::Run() { + static bool initialized = false; + if (initialized) { + return; + } + JNIEnvironment environment; + auto env = environment.current(); + if (env == nullptr) { + return; + } + initialized = true; + NativeCodec::JNIInit(env); + HandlerThread::JNIInit(env); + SurfaceTexture::JNIInit(env); + env->ExceptionClear(); +} +} // namespace tgfx diff --git a/src/platform/android/JNIInit.h b/src/platform/android/JNIInit.h new file mode 100644 index 00000000..f130bc59 --- /dev/null +++ b/src/platform/android/JNIInit.h @@ -0,0 +1,26 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +namespace tgfx { +class JNIInit { + public: + static void Run(); +}; +} // namespace tgfx diff --git a/src/platform/android/JNIUtil.cpp b/src/platform/android/JNIUtil.cpp new file mode 100644 index 00000000..a9a8971b --- /dev/null +++ b/src/platform/android/JNIUtil.cpp @@ -0,0 +1,37 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "JNIUtil.h" +#include +#include +#include "utils/Log.h" + +namespace tgfx { +jstring SafeToJString(JNIEnv* env, const std::string& text) { + static Global StringClass = env->FindClass("java/lang/String"); + static jmethodID StringConstructID = + env->GetMethodID(StringClass.get(), "", "([BLjava/lang/String;)V"); + auto array = env->NewByteArray(text.size()); + env->SetByteArrayRegion(array, 0, text.size(), reinterpret_cast(text.c_str())); + auto stringUTF = env->NewStringUTF("UTF-8"); + auto result = (jstring)env->NewObject(StringClass.get(), StringConstructID, array, stringUTF); + env->DeleteLocalRef(array); + env->DeleteLocalRef(stringUTF); + return result; +} +} // namespace tgfx diff --git a/src/platform/android/JNIUtil.h b/src/platform/android/JNIUtil.h new file mode 100644 index 00000000..5feb810d --- /dev/null +++ b/src/platform/android/JNIUtil.h @@ -0,0 +1,32 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include +#include "tgfx/platform/android/Global.h" +#include "tgfx/platform/android/JNIEnvironment.h" + +namespace tgfx { +/** + * There is a known bug in env->NewStringUTF which will lead to crash in some devices. + * So we use this method instead. + */ +jstring SafeToJString(JNIEnv* env, const std::string& text); +} // namespace tgfx diff --git a/src/platform/android/NativeCodec.cpp b/src/platform/android/NativeCodec.cpp new file mode 100644 index 00000000..b6b52dca --- /dev/null +++ b/src/platform/android/NativeCodec.cpp @@ -0,0 +1,363 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "NativeCodec.h" +#include +#include "NativeImageBuffer.h" +#include "platform/android/AHardwareBufferFunctions.h" +#include "tgfx/core/Pixmap.h" +#include "tgfx/platform/android/AndroidBitmap.h" +#include "utils/Log.h" + +namespace tgfx { +static Global BitmapFactoryOptionsClass; +static jmethodID BitmapFactoryOptions_Constructor; +static jfieldID BitmapFactoryOptions_inJustDecodeBounds; +static jfieldID BitmapFactoryOptions_inPreferredConfig; +static jfieldID BitmapFactoryOptions_inPremultiplied; +static jfieldID BitmapFactoryOptions_outWidth; +static jfieldID BitmapFactoryOptions_outHeight; +static Global BitmapFactoryClass; +static jmethodID BitmapFactory_decodeFile; +static jmethodID BitmapFactory_decodeByteArray; +static Global ByteArrayInputStreamClass; +static jmethodID ByteArrayInputStream_Constructor; +static Global ExifInterfaceClass; +static jmethodID ExifInterface_Constructor_Path; +static jmethodID ExifInterface_Constructor_Stream; +static jmethodID ExifInterfaceClass_getAttributeInt; +static Global BitmapClass; +static jmethodID Bitmap_copy; +static jmethodID Bitmap_getConfig; +static Global BitmapConfigClass; +static jmethodID BitmapConfig_equals; +static jfieldID BitmapConfig_ALPHA_8; +static jfieldID BitmapConfig_ARGB_8888; +static jfieldID BitmapConfig_RGB_565; +static jfieldID BitmapConfig_HARDWARE; + +void NativeCodec::JNIInit(JNIEnv* env) { + BitmapFactoryOptionsClass = env->FindClass("android/graphics/BitmapFactory$Options"); + if (BitmapFactoryOptionsClass.get() == nullptr) { + LOGE("Could not run NativeCodec.InitJNI(), BitmapFactoryOptionsClass is not found!"); + return; + } + BitmapFactoryOptions_Constructor = + env->GetMethodID(BitmapFactoryOptionsClass.get(), "", "()V"); + BitmapFactoryOptions_inJustDecodeBounds = + env->GetFieldID(BitmapFactoryOptionsClass.get(), "inJustDecodeBounds", "Z"); + BitmapFactoryOptions_inPreferredConfig = env->GetFieldID( + BitmapFactoryOptionsClass.get(), "inPreferredConfig", "Landroid/graphics/Bitmap$Config;"); + BitmapFactoryOptions_inPremultiplied = + env->GetFieldID(BitmapFactoryOptionsClass.get(), "inPremultiplied", "Z"); + BitmapFactoryOptions_outWidth = env->GetFieldID(BitmapFactoryOptionsClass.get(), "outWidth", "I"); + BitmapFactoryOptions_outHeight = + env->GetFieldID(BitmapFactoryOptionsClass.get(), "outHeight", "I"); + ByteArrayInputStreamClass = env->FindClass("java/io/ByteArrayInputStream"); + ByteArrayInputStream_Constructor = + env->GetMethodID(ByteArrayInputStreamClass.get(), "", "([B)V"); + ExifInterfaceClass = env->FindClass("androidx/exifinterface/media/ExifInterface"); + ExifInterface_Constructor_Path = + env->GetMethodID(ExifInterfaceClass.get(), "", "(Ljava/lang/String;)V"); + ExifInterface_Constructor_Stream = + env->GetMethodID(ExifInterfaceClass.get(), "", "(Ljava/io/InputStream;)V"); + ExifInterfaceClass_getAttributeInt = + env->GetMethodID(ExifInterfaceClass.get(), "getAttributeInt", "(Ljava/lang/String;I)I"); + BitmapFactoryClass = env->FindClass("android/graphics/BitmapFactory"); + BitmapFactory_decodeFile = env->GetStaticMethodID( + BitmapFactoryClass.get(), "decodeFile", + "(Ljava/lang/String;Landroid/graphics/BitmapFactory$Options;)Landroid/graphics/Bitmap;"); + BitmapFactory_decodeByteArray = env->GetStaticMethodID( + BitmapFactoryClass.get(), "decodeByteArray", + "([BIILandroid/graphics/BitmapFactory$Options;)Landroid/graphics/Bitmap;"); + BitmapClass = env->FindClass("android/graphics/Bitmap"); + Bitmap_copy = env->GetMethodID(BitmapClass.get(), "copy", + "(Landroid/graphics/Bitmap$Config;Z)Landroid/graphics/Bitmap;"); + Bitmap_getConfig = + env->GetMethodID(BitmapClass.get(), "getConfig", "()Landroid/graphics/Bitmap$Config;"); + BitmapConfigClass = env->FindClass("android/graphics/Bitmap$Config"); + BitmapConfig_equals = + env->GetMethodID(BitmapConfigClass.get(), "equals", "(Ljava/lang/Object;)Z"); + BitmapConfig_ALPHA_8 = + env->GetStaticFieldID(BitmapConfigClass.get(), "ALPHA_8", "Landroid/graphics/Bitmap$Config;"); + BitmapConfig_ARGB_8888 = env->GetStaticFieldID(BitmapConfigClass.get(), "ARGB_8888", + "Landroid/graphics/Bitmap$Config;"); + BitmapConfig_RGB_565 = + env->GetStaticFieldID(BitmapConfigClass.get(), "RGB_565", "Landroid/graphics/Bitmap$Config;"); + BitmapConfig_HARDWARE = env->GetStaticFieldID(BitmapConfigClass.get(), "HARDWARE", + "Landroid/graphics/Bitmap$Config;"); + // BitmapConfig_HARDWARE may be nullptr. + if (env->ExceptionCheck()) { + env->ExceptionClear(); + } +} + +std::shared_ptr NativeCodec::Make(JNIEnv* env, jobject sizeObject, int origin) { + auto size = env->GetIntArrayElements(static_cast(sizeObject), nullptr); + int width = size[0]; + int height = size[1]; + env->ReleaseIntArrayElements(static_cast(sizeObject), size, 0); + if (width <= 0 || height <= 0) { + return nullptr; + } + return std::shared_ptr( + new NativeCodec(width, height, static_cast(origin))); +} + +static EncodedOrigin GetEncodedOrigin(JNIEnv* env, jobject exifInterface) { + if (exifInterface == nullptr) { + env->ExceptionClear(); + return EncodedOrigin::TopLeft; + } + auto key = env->NewStringUTF("Orientation"); + auto origin = env->CallIntMethod(exifInterface, ExifInterfaceClass_getAttributeInt, key, + static_cast(EncodedOrigin::TopLeft)); + return static_cast(origin); +} + +std::shared_ptr ImageCodec::MakeNativeCodec(const std::string& filePath) { + JNIEnvironment environment; + auto env = environment.current(); + if (env == nullptr || filePath.empty()) { + return nullptr; + } + if (BitmapFactoryOptionsClass.get() == nullptr) { + LOGE("Could not run NativeCodec.GetEncodedOrigin(), BitmapFactoryOptionsClass is not found!"); + return nullptr; + } + auto options = env->NewObject(BitmapFactoryOptionsClass.get(), BitmapFactoryOptions_Constructor); + if (options == nullptr) { + return nullptr; + } + env->SetBooleanField(options, BitmapFactoryOptions_inJustDecodeBounds, true); + auto imagePath = SafeToJString(env, filePath); + env->CallStaticObjectMethod(BitmapFactoryClass.get(), BitmapFactory_decodeFile, imagePath, + options); + if (env->ExceptionCheck()) { + return nullptr; + } + auto width = env->GetIntField(options, BitmapFactoryOptions_outWidth); + auto height = env->GetIntField(options, BitmapFactoryOptions_outHeight); + if (width <= 0 || height <= 0) { + env->ExceptionClear(); + LOGE("NativeCodec::readPixels(): Failed to get the size of the image!"); + return nullptr; + } + auto exifInterface = + env->NewObject(ExifInterfaceClass.get(), ExifInterface_Constructor_Path, imagePath); + auto origin = GetEncodedOrigin(env, exifInterface); + auto codec = std::shared_ptr(new NativeCodec(width, height, origin)); + codec->imagePath = filePath; + return codec; +} + +std::shared_ptr ImageCodec::MakeNativeCodec(std::shared_ptr imageBytes) { + JNIEnvironment environment; + auto env = environment.current(); + if (env == nullptr || imageBytes == nullptr) { + return nullptr; + } + if (BitmapFactoryOptionsClass.get() == nullptr) { + LOGE("Could not run NativeCodec.MakeNativeCodec(), BitmapFactoryOptionsClass is not found!"); + return nullptr; + } + auto options = env->NewObject(BitmapFactoryOptionsClass.get(), BitmapFactoryOptions_Constructor); + if (options == nullptr) { + return nullptr; + } + env->SetBooleanField(options, BitmapFactoryOptions_inJustDecodeBounds, true); + auto byteArray = env->NewByteArray(imageBytes->size()); + env->SetByteArrayRegion(byteArray, 0, imageBytes->size(), + reinterpret_cast(imageBytes->data())); + env->CallStaticObjectMethod(BitmapFactoryClass.get(), BitmapFactory_decodeByteArray, byteArray, 0, + imageBytes->size(), options); + if (env->ExceptionCheck()) { + return nullptr; + } + auto width = env->GetIntField(options, BitmapFactoryOptions_outWidth); + auto height = env->GetIntField(options, BitmapFactoryOptions_outHeight); + if (width <= 0 || height <= 0) { + env->ExceptionClear(); + LOGE("NativeCodec::readPixels(): Failed to get the size of the image!"); + return nullptr; + } + auto inputStream = + env->NewObject(ByteArrayInputStreamClass.get(), ByteArrayInputStream_Constructor, byteArray); + auto exifInterface = + env->NewObject(ExifInterfaceClass.get(), ExifInterface_Constructor_Stream, inputStream); + auto origin = GetEncodedOrigin(env, exifInterface); + auto codec = std::shared_ptr(new NativeCodec(width, height, origin)); + codec->imageBytes = imageBytes; + return codec; +} + +std::shared_ptr ImageCodec::MakeFrom(NativeImageRef nativeImage) { + JNIEnvironment environment; + auto env = environment.current(); + if (env == nullptr) { + return nullptr; + } + if (BitmapFactoryOptionsClass.get() == nullptr) { + LOGE("Could not run NativeCodec.MakeNativeCodec(), BitmapFactoryOptionsClass is not found!"); + return nullptr; + } + auto info = AndroidBitmap::GetInfo(env, nativeImage); + if (info.isEmpty()) { + return nullptr; + } + auto image = std::shared_ptr( + new NativeCodec(info.width(), info.height(), EncodedOrigin::TopLeft)); + image->nativeImage = nativeImage; + return image; +} + +static jobject ConvertHardwareBitmap(JNIEnv* env, jobject bitmap) { + // The AndroidBitmapInfo does not contain the ANDROID_BITMAP_FLAGS_IS_HARDWARE flag in the old + // versions of Android NDK, even when the Java Bitmap has the hardware config. So we check it here + // by the Java-side methods. + if (bitmap == nullptr) { + return nullptr; + } + if (BitmapConfig_HARDWARE == nullptr) { + return bitmap; + } + auto config = env->CallObjectMethod(bitmap, Bitmap_getConfig); + if (config == nullptr) { + return bitmap; + } + static Global HardwareConfig = + env->GetStaticObjectField(BitmapConfigClass.get(), BitmapConfig_HARDWARE); + if (HardwareConfig.isEmpty()) { + return bitmap; + } + auto result = env->CallBooleanMethod(config, BitmapConfig_equals, HardwareConfig.get()); + if (result) { + static Global RGBA_Config = + env->GetStaticObjectField(BitmapConfigClass.get(), BitmapConfig_ARGB_8888); + auto newBitmap = env->CallObjectMethod(bitmap, Bitmap_copy, RGBA_Config.get(), JNI_FALSE); + if (env->ExceptionCheck()) { + env->ExceptionClear(); + return bitmap; + } + bitmap = newBitmap; + } + return bitmap; +} + +bool NativeCodec::readPixels(const ImageInfo& dstInfo, void* dstPixels) const { + if (dstInfo.isEmpty() || dstPixels == nullptr) { + return false; + } + JNIEnvironment environment; + auto env = environment.current(); + if (env == nullptr) { + return false; + } + auto bitmap = decodeBitmap(env, dstInfo.colorType(), dstInfo.alphaType(), false); + auto info = AndroidBitmap::GetInfo(env, bitmap); + if (info.isEmpty()) { + LOGE("NativeCodec::readPixels() Failed to read the image info from a Bitmap!"); + return false; + } + void* pixels = nullptr; + if (AndroidBitmap_lockPixels(env, bitmap, &pixels) != 0) { + env->ExceptionClear(); + LOGE("NativeCodec::readPixels() Failed to lockPixels() of a Java Bitmap!"); + return false; + } + auto result = tgfx::Pixmap(info, pixels).readPixels(dstInfo, dstPixels); + AndroidBitmap_unlockPixels(env, bitmap); + return result; +} + +std::shared_ptr NativeCodec::onMakeBuffer(bool tryHardware) const { + JNIEnvironment environment; + auto env = environment.current(); + if (env == nullptr) { + return nullptr; + } + auto bitmap = decodeBitmap(env, ColorType::RGBA_8888, AlphaType::Premultiplied, tryHardware); + if (tryHardware) { + auto hardwareBuffer = AndroidBitmap::GetHardwareBuffer(env, nativeImage.get()); + auto imageBuffer = PixelBuffer::MakeFrom(hardwareBuffer); + if (imageBuffer != nullptr) { + return imageBuffer; + } + bitmap = ConvertHardwareBitmap(env, bitmap); + } + auto imageBuffer = NativeImageBuffer::MakeFrom(env, bitmap); + if (imageBuffer != nullptr) { + return imageBuffer; + } + return ImageCodec::onMakeBuffer(tryHardware); +} + +jobject NativeCodec::decodeBitmap(JNIEnv* env, ColorType colorType, AlphaType alphaType, + bool tryHardware) const { + if (!nativeImage.isEmpty()) { + if (!tryHardware) { + return ConvertHardwareBitmap(env, nativeImage.get()); + } + return nativeImage.get(); + } + auto options = env->NewObject(BitmapFactoryOptionsClass.get(), BitmapFactoryOptions_Constructor); + if (options == nullptr) { + env->ExceptionClear(); + LOGE("NativeCodec::decodeBitmap() Failed to create a BitmapFactory.Options object!"); + return nullptr; + } + jobject config; + static const bool HasHardwareBitmapSupport = + AHardwareBufferFunctions::Get()->fromBitmap != nullptr; + if (tryHardware && HasHardwareBitmapSupport) { + config = env->GetStaticObjectField(BitmapConfigClass.get(), BitmapConfig_HARDWARE); + } else if (colorType == ColorType::ALPHA_8) { + config = env->GetStaticObjectField(BitmapConfigClass.get(), BitmapConfig_ALPHA_8); + } else if (colorType == ColorType::RGB_565) { + config = env->GetStaticObjectField(BitmapConfigClass.get(), BitmapConfig_RGB_565); + } else { + config = env->GetStaticObjectField(BitmapConfigClass.get(), BitmapConfig_ARGB_8888); + } + env->SetObjectField(options, BitmapFactoryOptions_inPreferredConfig, config); + if (alphaType == AlphaType::Unpremultiplied) { + env->SetBooleanField(options, BitmapFactoryOptions_inPremultiplied, false); + } + + if (!imagePath.empty()) { + auto filePath = SafeToJString(env, imagePath); + auto bitmap = env->CallStaticObjectMethod(BitmapFactoryClass.get(), BitmapFactory_decodeFile, + filePath, options); + if (env->ExceptionCheck()) { + LOGE("NativeCodec::decodeBitmap() Failed to decode a Bitmap from the path: %s!", + imagePath.c_str()); + return nullptr; + } + return bitmap; + } + auto byteArray = env->NewByteArray(imageBytes->size()); + env->SetByteArrayRegion(byteArray, 0, imageBytes->size(), + reinterpret_cast(imageBytes->data())); + auto bitmap = env->CallStaticObjectMethod(BitmapFactoryClass.get(), BitmapFactory_decodeByteArray, + byteArray, 0, imageBytes->size(), options); + if (env->ExceptionCheck()) { + LOGE("NativeCodec::decodeBitmap() Failed to decode a Bitmap from the image bytes!"); + return nullptr; + } + return bitmap; +} +} // namespace tgfx diff --git a/src/platform/android/NativeCodec.h b/src/platform/android/NativeCodec.h new file mode 100644 index 00000000..3f978f01 --- /dev/null +++ b/src/platform/android/NativeCodec.h @@ -0,0 +1,49 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "JNIUtil.h" +#include "tgfx/core/ImageCodec.h" + +namespace tgfx { +class NativeCodec : public ImageCodec { + public: + static void JNIInit(JNIEnv* env); + + bool readPixels(const ImageInfo& dstInfo, void* dstPixels) const override; + + protected: + std::shared_ptr onMakeBuffer(bool tryHardware) const override; + + private: + std::string imagePath; + std::shared_ptr imageBytes; + Global nativeImage; + + static std::shared_ptr Make(JNIEnv* env, jobject sizeObject, int origin); + + NativeCodec(int width, int height, EncodedOrigin origin) : ImageCodec(width, height, origin) { + } + + jobject decodeBitmap(JNIEnv* env, ColorType colorType, AlphaType alphaType, + bool tryHardware) const; + + friend class ImageCodec; +}; +} // namespace tgfx diff --git a/src/platform/android/NativeImageBuffer.cpp b/src/platform/android/NativeImageBuffer.cpp new file mode 100644 index 00000000..806abd40 --- /dev/null +++ b/src/platform/android/NativeImageBuffer.cpp @@ -0,0 +1,60 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "NativeImageBuffer.h" +#include +#include "tgfx/platform/android/AndroidBitmap.h" +#include "utils/Log.h" + +namespace tgfx { +std::shared_ptr NativeImageBuffer::MakeFrom(JNIEnv* env, jobject bitmap) { + auto info = AndroidBitmap::GetInfo(env, bitmap); + if (info.isEmpty() || + (info.colorType() != ColorType::RGBA_8888 && info.colorType() != ColorType::ALPHA_8) || + info.alphaType() == AlphaType::Unpremultiplied) { + return nullptr; + } + auto pixelBuffer = std::shared_ptr(new NativeImageBuffer(info)); + pixelBuffer->bitmap = bitmap; + return pixelBuffer; +} + +std::shared_ptr NativeImageBuffer::onMakeTexture(Context* context, bool mipMapped) const { + JNIEnvironment environment; + auto env = environment.current(); + if (env == nullptr) { + return nullptr; + } + void* pixels = nullptr; + if (AndroidBitmap_lockPixels(env, bitmap.get(), &pixels) != 0) { + env->ExceptionClear(); + LOGE("NativeImageBuffer::onMakeTexture() Failed to lockPixels() from a Java Bitmap!"); + return nullptr; + } + std::shared_ptr texture = nullptr; + if (isAlphaOnly()) { + texture = Texture::MakeAlpha(context, info.width(), info.height(), pixels, info.rowBytes(), + ImageOrigin::TopLeft, mipMapped); + } else { + texture = Texture::MakeRGBA(context, info.width(), info.height(), pixels, info.rowBytes(), + ImageOrigin::TopLeft, mipMapped); + } + AndroidBitmap_unlockPixels(env, bitmap.get()); + return texture; +} +} // namespace tgfx diff --git a/src/platform/android/NativeImageBuffer.h b/src/platform/android/NativeImageBuffer.h new file mode 100644 index 00000000..c45ce1b4 --- /dev/null +++ b/src/platform/android/NativeImageBuffer.h @@ -0,0 +1,56 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "JNIUtil.h" +#include "core/PixelBuffer.h" + +namespace tgfx { +class NativeImageBuffer : public ImageBuffer { + public: + /** + * Creates a ImageBuffer from the specified Android Bitmap object. Returns nullptr if the bitmap + * is null, has an unpremultiplied alpha type, or its color type is neither RGBA_8888 nor ALPHA_8. + */ + static std::shared_ptr MakeFrom(JNIEnv* env, jobject bitmap); + + int width() const override { + return info.width(); + } + + int height() const override { + return info.height(); + } + + bool isAlphaOnly() const override { + return info.isAlphaOnly(); + } + + protected: + std::shared_ptr onMakeTexture(Context* context, bool mipMapped) const override; + + private: + ImageInfo info = {}; + Global bitmap = {}; + + explicit NativeImageBuffer(const ImageInfo& info) : info(info) { + } +}; + +} // namespace tgfx diff --git a/src/platform/android/Print.cpp b/src/platform/android/Print.cpp new file mode 100644 index 00000000..87f195d4 --- /dev/null +++ b/src/platform/android/Print.cpp @@ -0,0 +1,38 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/platform/Print.h" +#include + +namespace tgfx { +#define LOG_TAG "tgfx" + +void PrintLog(const char format[], ...) { + va_list args; + va_start(args, format); + __android_log_vprint(ANDROID_LOG_INFO, LOG_TAG, format, args); + va_end(args); +} + +void PrintError(const char format[], ...) { + va_list args; + va_start(args, format); + __android_log_vprint(ANDROID_LOG_ERROR, LOG_TAG, format, args); + va_end(args); +} +} // namespace tgfx \ No newline at end of file diff --git a/src/platform/android/SurfaceTexture.cpp b/src/platform/android/SurfaceTexture.cpp new file mode 100644 index 00000000..23a7ca59 --- /dev/null +++ b/src/platform/android/SurfaceTexture.cpp @@ -0,0 +1,257 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "platform/android/SurfaceTexture.h" +#include +#include "GLExternalOESTexture.h" +#include "HandlerThread.h" +#include "JNIUtil.h" +#include "opengl/GLSampler.h" +#include "utils/Log.h" + +namespace tgfx { +static Global SurfaceTextureClass; +static jmethodID SurfaceTexture_Constructor_singleBufferMode; +static jmethodID SurfaceTexture_Constructor_texName; +static jmethodID SurfaceTexture_setOnFrameAvailableListenerHandler; +static jmethodID SurfaceTexture_setOnFrameAvailableListener; +static jfieldID SurfaceTexture_mEventHandler; +static jmethodID SurfaceTexture_updateTexImage; +static jmethodID SurfaceTexture_attachToGLContext; +static jmethodID SurfaceTexture_detachFromGLContext; +static jmethodID SurfaceTexture_getTransformMatrix; +static jmethodID SurfaceTexture_release; +static Global SurfaceClass; +static jmethodID Surface_Constructor; +static jmethodID Surface_release; +static Global HandlerClass; +static jmethodID Handler_Constructor; +static Global EventHandlerClass; +static jmethodID EventHandler_Constructor; + +void SurfaceTexture::JNIInit(JNIEnv* env) { + SurfaceTextureClass = env->FindClass("android/graphics/SurfaceTexture"); + SurfaceTexture_Constructor_singleBufferMode = + env->GetMethodID(SurfaceTextureClass.get(), "", "(Z)V"); + if (env->ExceptionCheck()) { + env->ExceptionClear(); + SurfaceTexture_Constructor_texName = + env->GetMethodID(SurfaceTextureClass.get(), "", "(I)V"); + } + SurfaceTexture_setOnFrameAvailableListenerHandler = env->GetMethodID( + SurfaceTextureClass.get(), "setOnFrameAvailableListener", + "(Landroid/graphics/SurfaceTexture$OnFrameAvailableListener;Landroid/os/Handler;)V"); + if (env->ExceptionCheck()) { + env->ExceptionClear(); + SurfaceTexture_setOnFrameAvailableListener = + env->GetMethodID(SurfaceTextureClass.get(), "setOnFrameAvailableListener", + "(Landroid/graphics/SurfaceTexture$OnFrameAvailableListener;)V"); + SurfaceTexture_mEventHandler = + env->GetFieldID(SurfaceTextureClass.get(), "mEventHandler", + "Landroid/graphics/SurfaceTexture$EventHandler;"); + EventHandlerClass = env->FindClass("android/graphics/SurfaceTexture$EventHandler"); + EventHandler_Constructor = + env->GetMethodID(EventHandlerClass.get(), "", + "(Landroid/graphics/SurfaceTexture;Landroid/os/Looper;)V"); + } else { + HandlerClass = env->FindClass("android/os/Handler"); + Handler_Constructor = env->GetMethodID(HandlerClass.get(), "", "(Landroid/os/Looper;)V"); + } + SurfaceTexture_updateTexImage = + env->GetMethodID(SurfaceTextureClass.get(), "updateTexImage", "()V"); + SurfaceTexture_attachToGLContext = + env->GetMethodID(SurfaceTextureClass.get(), "attachToGLContext", "(I)V"); + SurfaceTexture_detachFromGLContext = + env->GetMethodID(SurfaceTextureClass.get(), "detachFromGLContext", "()V"); + SurfaceTexture_getTransformMatrix = + env->GetMethodID(SurfaceTextureClass.get(), "getTransformMatrix", "([F)V"); + SurfaceTexture_release = env->GetMethodID(SurfaceTextureClass.get(), "release", "()V"); + SurfaceClass = env->FindClass("android/view/Surface"); + Surface_Constructor = + env->GetMethodID(SurfaceClass.get(), "", "(Landroid/graphics/SurfaceTexture;)V"); + Surface_release = env->GetMethodID(SurfaceClass.get(), "release", "()V"); +} + +static std::mutex threadLocker = {}; +static std::shared_ptr handlerThread; + +static jobject GetGlobalLooper() { + std::lock_guard autoLock(threadLocker); + if (handlerThread == nullptr) { + handlerThread = HandlerThread::Make(); + } + if (handlerThread == nullptr) { + return nullptr; + } + return handlerThread->getLooper(); +} + +static void SetOnFrameAvailableListener(JNIEnv* env, jobject surfaceTexture, jobject listener, + jobject looper) { + if (SurfaceTexture_setOnFrameAvailableListenerHandler != nullptr) { + auto handler = env->NewObject(HandlerClass.get(), Handler_Constructor, looper); + env->CallVoidMethod(surfaceTexture, SurfaceTexture_setOnFrameAvailableListenerHandler, listener, + handler); + } else { + env->CallVoidMethod(surfaceTexture, SurfaceTexture_setOnFrameAvailableListener, listener); + auto handler = + env->NewObject(EventHandlerClass.get(), EventHandler_Constructor, surfaceTexture, looper); + env->SetObjectField(surfaceTexture, SurfaceTexture_mEventHandler, handler); + } +} + +std::shared_ptr SurfaceTexture::Make(int width, int height, jobject listener) { + if (width <= 0 || height <= 0 || listener == nullptr) { + return nullptr; + } + JNIEnvironment environment; + auto env = environment.current(); + if (env == nullptr) { + return nullptr; + } + jobject stObject; + if (SurfaceTexture_Constructor_singleBufferMode != nullptr) { + stObject = env->NewObject(SurfaceTextureClass.get(), + SurfaceTexture_Constructor_singleBufferMode, JNI_FALSE); + } else { + stObject = env->NewObject(SurfaceTextureClass.get(), SurfaceTexture_Constructor_texName, 0); + if (stObject != nullptr) { + env->CallVoidMethod(stObject, SurfaceTexture_detachFromGLContext); + } + } + if (env->ExceptionCheck()) { + env->ExceptionClear(); + LOGE("SurfaceImageReader::MakeFrom(): failed to create a new SurfaceTexture!"); + return nullptr; + } + auto looper = GetGlobalLooper(); + if (looper == nullptr) { + return nullptr; + } + SetOnFrameAvailableListener(env, stObject, listener, looper); + if (env->ExceptionCheck()) { + env->ExceptionClear(); + LOGE("SurfaceImageReader::MakeFrom(): failed to set the OnFrameAvailableListener!"); + return nullptr; + } + return std::shared_ptr(new SurfaceTexture(width, height, env, stObject)); +} + +SurfaceTexture::SurfaceTexture(int width, int height, JNIEnv* env, jobject st) + : _width(width), _height(height) { + surfaceTexture = st; + surface = env->NewObject(SurfaceClass.get(), Surface_Constructor, st); +} + +SurfaceTexture::~SurfaceTexture() { + JNIEnvironment environment; + auto env = environment.current(); + if (env == nullptr) { + return; + } + env->CallVoidMethod(surface.get(), Surface_release); + env->CallVoidMethod(surfaceTexture.get(), SurfaceTexture_release); +} + +jobject SurfaceTexture::getInputSurface() const { + return surface.get(); +} + +void SurfaceTexture::notifyFrameAvailable() { + // Note: If there is a pending frame available already, SurfaceTexture will not dispatch any new + // frame-available event until you have called the SurfaceTexture.updateTexImage(). + std::lock_guard autoLock(locker); + frameAvailable = true; + condition.notify_all(); +} + +static ISize ComputeTextureSize(float matrix[16], int width, int height) { + Size size = {static_cast(width), static_cast(height)}; + auto scaleX = fabsf(matrix[0]); + if (scaleX > 0) { + size.width = size.width / (scaleX + matrix[12] * 2); + } + float scaleY = fabsf(matrix[5]); + if (scaleY > 0) { + size.height = size.height / (scaleY + (matrix[13] - scaleY) * 2); + } + return size.toRound(); +} + +std::shared_ptr SurfaceTexture::onMakeTexture(Context* context, bool) { + auto texture = makeTexture(context); + if (texture != nullptr) { + onUpdateTexture(texture, Rect::MakeWH(_width, _height)); + } + return texture; +} + +bool SurfaceTexture::onUpdateTexture(std::shared_ptr texture, const Rect&) { + std::unique_lock autoLock(locker); + if (!frameAvailable) { + static const auto TIMEOUT = std::chrono::seconds(1); + auto status = condition.wait_for(autoLock, TIMEOUT); + if (status == std::cv_status::timeout) { + LOGE("NativeImageReader::onUpdateTexture(): timeout when waiting for the frame available!"); + return false; + } + } + JNIEnvironment environment; + auto env = environment.current(); + if (env == nullptr) { + return false; + } + frameAvailable = false; + env->CallVoidMethod(surfaceTexture.get(), SurfaceTexture_updateTexImage); + if (env->ExceptionCheck()) { + env->ExceptionClear(); + LOGE("NativeImageReader::onUpdateTexture(): failed to updateTexImage!"); + return false; + } + auto floatArray = env->NewFloatArray(16); + env->CallVoidMethod(surfaceTexture.get(), SurfaceTexture_getTransformMatrix, floatArray); + auto matrix = env->GetFloatArrayElements(floatArray, nullptr); + auto textureSize = ComputeTextureSize(matrix, width(), height()); + env->ReleaseFloatArrayElements(floatArray, matrix, 0); + std::static_pointer_cast(texture)->updateTextureSize(textureSize.width, + textureSize.height); + return true; +} + +std::shared_ptr SurfaceTexture::makeTexture(Context* context) { + std::lock_guard autoLock(locker); + JNIEnvironment environment; + auto env = environment.current(); + if (env == nullptr) { + return nullptr; + } + auto texture = GLExternalOESTexture::Make(context, width(), height()); + if (texture == nullptr) { + return nullptr; + } + auto sampler = static_cast(texture->getSampler()); + env->CallVoidMethod(surfaceTexture.get(), SurfaceTexture_attachToGLContext, sampler->id); + if (env->ExceptionCheck()) { + env->ExceptionClear(); + texture = nullptr; + LOGE("NativeImageReader::makeTexture(): failed to attached to a SurfaceTexture!"); + return nullptr; + } + return texture; +} +} // namespace tgfx diff --git a/src/platform/android/SurfaceTexture.h b/src/platform/android/SurfaceTexture.h new file mode 100644 index 00000000..6e8de064 --- /dev/null +++ b/src/platform/android/SurfaceTexture.h @@ -0,0 +1,94 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "core/ImageStream.h" +#include "tgfx/platform/android/Global.h" + +namespace tgfx { +/** + * The SurfaceTexture class allows direct access to image data rendered into the Java Surface object + * on the android platform. It is typically used with the ImageReader class. + */ +class SurfaceTexture : public ImageStream { + public: + /** + * Creates a new SurfaceTexture with the specified image size and listener. Returns nullptr + * if the image size is zero or the listener is null. + * @param width The width of generated ImageBuffers. + * @param height The height of generated ImageBuffers. + * @param listener A java object that implements the SurfaceTexture.OnFrameAvailableListener + * interface. The implementation should make a native call to the notifyFrameAvailable() of the + * returned SurfaceImageReader. + */ + static std::shared_ptr Make(int width, int height, jobject listener); + + ~SurfaceTexture() override; + + int width() const override { + return _width; + } + + int height() const override { + return _height; + } + + bool isAlphaOnly() const override { + return false; + } + + bool isHardwareBacked() const override { + return false; + } + + /** + * Returns the Surface object used as the input to the SurfaceTexture. The release() method of + * the returned Surface will be called when the SurfaceTexture is released. + */ + jobject getInputSurface() const; + + /** + * Notifies the previously returned ImageBuffer is available for generating textures. The method + * should only be called by the listener passed in when creating the reader. + */ + void notifyFrameAvailable(); + + protected: + std::shared_ptr onMakeTexture(Context* context, bool mipMapped) override; + + bool onUpdateTexture(std::shared_ptr texture, const Rect& bounds) override; + + private: + std::mutex locker = {}; + int _width = 0; + int _height = 0; + std::condition_variable condition = {}; + Global surface; + Global surfaceTexture; + bool frameAvailable = false; + + static void JNIInit(JNIEnv* env); + + SurfaceTexture(int width, int height, JNIEnv* env, jobject surfaceTexture); + + std::shared_ptr makeTexture(Context* context); + + friend class JNIInit; +}; +} // namespace tgfx diff --git a/src/platform/android/SurfaceTextureReader.cpp b/src/platform/android/SurfaceTextureReader.cpp new file mode 100644 index 00000000..f3975b39 --- /dev/null +++ b/src/platform/android/SurfaceTextureReader.cpp @@ -0,0 +1,55 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/platform/android/SurfaceTextureReader.h" +#include +#include "platform/android/SurfaceTexture.h" + +namespace tgfx { +std::shared_ptr SurfaceTextureReader::Make(int width, int height, + jobject listener) { + if (listener == nullptr || width < 1 || height < 1) { + return nullptr; + } + auto imageStream = SurfaceTexture::Make(width, height, listener); + if (imageStream == nullptr) { + return nullptr; + } + auto imageReader = + std::shared_ptr(new SurfaceTextureReader(std::move(imageStream))); + imageReader->weakThis = imageReader; + return imageReader; +} + +SurfaceTextureReader::SurfaceTextureReader(std::shared_ptr stream) + : ImageReader(std::move(stream)) { +} + +jobject SurfaceTextureReader::getInputSurface() const { + return std::static_pointer_cast(stream)->getInputSurface(); +} + +void SurfaceTextureReader::notifyFrameAvailable() { + std::static_pointer_cast(stream)->notifyFrameAvailable(); +} + +std::shared_ptr SurfaceTextureReader::acquireNextBuffer() { + stream->markContentDirty(Rect::MakeWH(stream->width(), stream->height())); + return ImageReader::acquireNextBuffer(); +} +} // namespace tgfx diff --git a/src/platform/apple/BitmapContextUtil.h b/src/platform/apple/BitmapContextUtil.h new file mode 100644 index 00000000..17a50ba1 --- /dev/null +++ b/src/platform/apple/BitmapContextUtil.h @@ -0,0 +1,26 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "tgfx/core/ImageInfo.h" + +namespace tgfx { +CGContextRef CreateBitmapContext(const ImageInfo& info, void* pixels); +} // namespace tgfx diff --git a/src/platform/apple/BitmapContextUtil.mm b/src/platform/apple/BitmapContextUtil.mm new file mode 100644 index 00000000..e1054b52 --- /dev/null +++ b/src/platform/apple/BitmapContextUtil.mm @@ -0,0 +1,61 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "BitmapContextUtil.h" + +namespace tgfx { +static uint32_t GetBitmapInfo(AlphaType alphaType, ColorType colorType) { + CGBitmapInfo bitmapInfo = 0; + auto premultiplied = alphaType == AlphaType::Premultiplied; + CGImageAlphaInfo alphaInfo = kCGImageAlphaNone; + switch (colorType) { + case ColorType::BGRA_8888: + bitmapInfo = kCGBitmapByteOrder32Little; + alphaInfo = premultiplied ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaFirst; + break; + case ColorType::RGBA_8888: + bitmapInfo = kCGBitmapByteOrder32Big; + alphaInfo = premultiplied ? kCGImageAlphaPremultipliedLast : kCGImageAlphaLast; + break; + case ColorType::ALPHA_8: + alphaInfo = kCGImageAlphaOnly; + break; + default: + break; + } + return static_cast(bitmapInfo) | static_cast(alphaInfo); +} + +CGContextRef CreateBitmapContext(const ImageInfo& info, void* pixels) { + if (pixels == nullptr) { + return nullptr; + } + auto bitmapInfo = GetBitmapInfo(info.alphaType(), info.colorType()); + if (bitmapInfo == 0) { + return nullptr; + } + CGColorSpaceRef colorspace = + info.colorType() == ColorType::ALPHA_8 ? nullptr : CGColorSpaceCreateDeviceRGB(); + auto cgContext = CGBitmapContextCreate(pixels, info.width(), info.height(), 8, info.rowBytes(), + colorspace, bitmapInfo); + if (colorspace) { + CFRelease(colorspace); + } + return cgContext; +} +} // namespace tgfx \ No newline at end of file diff --git a/src/platform/apple/CTTypeface.mm b/src/platform/apple/CTTypeface.mm new file mode 100644 index 00000000..ed67d3d0 --- /dev/null +++ b/src/platform/apple/CTTypeface.mm @@ -0,0 +1,44 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/platform/apple/CTTypeface.h" +#include "utils/USE.h" + +#ifndef TGFX_USE_FREETYPE +#include "vectors/coregraphics/CGTypeface.h" +#endif + +namespace tgfx { +std::shared_ptr CTTypeface::MakeFromCTFont(CTFontRef ctFont) { +#ifdef TGFX_USE_FREETYPE + USE(ctFont); + return nullptr; +#else + return CGTypeface::Make(static_cast(ctFont)); +#endif +} + +CTFontRef CTTypeface::GetCTFont(const Typeface* typeface) { +#ifdef TGFX_USE_FREETYPE + USE(typeface); + return nullptr; +#else + return typeface ? static_cast(typeface)->getCTFont() : nullptr; +#endif +} +} // namespace tgfx diff --git a/src/platform/apple/HardwareBuffer.mm b/src/platform/apple/HardwareBuffer.mm new file mode 100644 index 00000000..55e85478 --- /dev/null +++ b/src/platform/apple/HardwareBuffer.mm @@ -0,0 +1,116 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/platform/HardwareBuffer.h" +#import +#import +#include "core/PixelBuffer.h" +#include "platform/apple/NV12HardwareBuffer.h" + +namespace tgfx { +std::shared_ptr ImageBuffer::MakeFrom(HardwareBufferRef hardwareBuffer, + YUVColorSpace colorSpace) { + if (hardwareBuffer == nullptr) { + return nullptr; + } + auto planeCount = CVPixelBufferGetPlaneCount(hardwareBuffer); + if (planeCount > 1) { + return NV12HardwareBuffer::MakeFrom(hardwareBuffer, colorSpace); + } + return PixelBuffer::MakeFrom(hardwareBuffer); +} + +bool HardwareBufferCheck(HardwareBufferRef buffer) { + if (!HardwareBufferAvailable()) { + return false; + } + auto success = CVPixelBufferGetIOSurface(buffer) != nil; +#if TARGET_OS_IPHONE == 0 && TARGET_OS_MAC == 1 && defined(__aarch64__) + if (success && CVPixelBufferGetPixelFormatType(buffer) == kCVPixelFormatType_OneComponent8) { + // The alpha-only CVPixelBuffer on macOS with Apple Silicon does not share memory across GPU and + // CPU. + return false; + } +#endif + return success; +} + +HardwareBufferRef HardwareBufferAllocate(int width, int height, bool alphaOnly) { + if (width <= 0 || height <= 0) { + return nil; + } +#if TARGET_OS_IPHONE == 0 && TARGET_OS_MAC == 1 && defined(__aarch64__) + if (alphaOnly) { + return nil; + } +#endif + OSType pixelFormat = alphaOnly ? kCVPixelFormatType_OneComponent8 : kCVPixelFormatType_32BGRA; + NSDictionary* options = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}}; + CVPixelBufferRef pixelBuffer = nil; + CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, static_cast(width), + static_cast(height), pixelFormat, + (CFDictionaryRef)options, &pixelBuffer); + if (status != kCVReturnSuccess) { + return nil; + } + return pixelBuffer; +} + +HardwareBufferRef HardwareBufferRetain(HardwareBufferRef buffer) { + return CVPixelBufferRetain(buffer); +} + +void HardwareBufferRelease(HardwareBufferRef buffer) { + CVPixelBufferRelease(buffer); +} + +void* HardwareBufferLock(HardwareBufferRef buffer) { + CVPixelBufferLockBaseAddress(buffer, 0); + return CVPixelBufferGetBaseAddress(buffer); +} + +void HardwareBufferUnlock(HardwareBufferRef buffer) { + CVPixelBufferUnlockBaseAddress(buffer, 0); +} + +ImageInfo HardwareBufferGetInfo(HardwareBufferRef buffer) { + if (buffer == nil) { + return {}; + } + auto planeCount = CVPixelBufferGetPlaneCount(buffer); + if (planeCount > 1) { + return {}; + } + auto width = static_cast(CVPixelBufferGetWidth(buffer)); + auto height = static_cast(CVPixelBufferGetHeight(buffer)); + auto pixelFormat = CVPixelBufferGetPixelFormatType(buffer); + ColorType colorType = ColorType::Unknown; + switch (pixelFormat) { + case kCVPixelFormatType_OneComponent8: + colorType = ColorType::ALPHA_8; + break; + case kCVPixelFormatType_32BGRA: + colorType = ColorType::BGRA_8888; + break; + default: + break; + } + auto rowBytes = CVPixelBufferGetBytesPerRow(buffer); + return ImageInfo::Make(width, height, colorType, AlphaType::Premultiplied, rowBytes); +} +} // namespace tgfx diff --git a/src/platform/apple/NV12HardwareBuffer.h b/src/platform/apple/NV12HardwareBuffer.h new file mode 100644 index 00000000..1fe22d23 --- /dev/null +++ b/src/platform/apple/NV12HardwareBuffer.h @@ -0,0 +1,50 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#import +#include "tgfx/core/ImageBuffer.h" + +namespace tgfx { +class NV12HardwareBuffer : public ImageBuffer { + public: + static std::shared_ptr MakeFrom(CVPixelBufferRef pixelBuffer, + YUVColorSpace colorSpace); + + ~NV12HardwareBuffer() override; + + int width() const override; + + int height() const override; + + bool isAlphaOnly() const final { + return false; + } + + protected: + std::shared_ptr onMakeTexture(Context* context, bool mipMapped) const override; + + private: + CVPixelBufferRef pixelBuffer = nullptr; + YUVColorSpace colorSpace = YUVColorSpace::BT601_LIMITED; + + NV12HardwareBuffer(CVPixelBufferRef pixelBuffer, YUVColorSpace colorSpace); +}; + +} // namespace tgfx diff --git a/src/platform/apple/NV12HardwareBuffer.mm b/src/platform/apple/NV12HardwareBuffer.mm new file mode 100644 index 00000000..e1f2197e --- /dev/null +++ b/src/platform/apple/NV12HardwareBuffer.mm @@ -0,0 +1,74 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "NV12HardwareBuffer.h" +#include "gpu/YUVTexture.h" +#include "utils/USE.h" + +namespace tgfx { +static std::mutex cacheLocker = {}; +static std::unordered_map> nv12BufferMap = {}; + +std::shared_ptr NV12HardwareBuffer::MakeFrom(CVPixelBufferRef pixelBuffer, + YUVColorSpace colorSpace) { + if (!HardwareBufferCheck(pixelBuffer)) { + return nullptr; + } + auto format = CVPixelBufferGetPixelFormatType(pixelBuffer); + if (format != kCVPixelFormatType_420YpCbCr8BiPlanarFullRange && + format != kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange) { + return nullptr; + } + std::lock_guard cacheLock(cacheLocker); + auto result = nv12BufferMap.find(pixelBuffer); + if (result != nv12BufferMap.end()) { + auto buffer = result->second.lock(); + if (buffer) { + return buffer; + } + nv12BufferMap.erase(result); + } + auto buffer = + std::shared_ptr(new NV12HardwareBuffer(pixelBuffer, colorSpace)); + nv12BufferMap[pixelBuffer] = buffer; + return buffer; +} + +NV12HardwareBuffer::NV12HardwareBuffer(CVPixelBufferRef pixelBuffer, YUVColorSpace colorSpace) + : pixelBuffer(pixelBuffer), colorSpace(colorSpace) { + CFRetain(pixelBuffer); +} + +NV12HardwareBuffer::~NV12HardwareBuffer() { + CFRelease(pixelBuffer); + std::lock_guard cacheLock(cacheLocker); + nv12BufferMap.erase(pixelBuffer); +} + +int NV12HardwareBuffer::width() const { + return static_cast(CVPixelBufferGetWidth(pixelBuffer)); +} + +int NV12HardwareBuffer::height() const { + return static_cast(CVPixelBufferGetHeight(pixelBuffer)); +} + +std::shared_ptr NV12HardwareBuffer::onMakeTexture(Context* context, bool) const { + return YUVTexture::MakeFrom(context, pixelBuffer, colorSpace); +} +} // namespace tgfx diff --git a/src/platform/apple/NativeCodec.h b/src/platform/apple/NativeCodec.h new file mode 100644 index 00000000..7a86584d --- /dev/null +++ b/src/platform/apple/NativeCodec.h @@ -0,0 +1,41 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "tgfx/core/ImageCodec.h" + +namespace tgfx { +class NativeCodec : public ImageCodec { + public: + ~NativeCodec() override; + + bool readPixels(const ImageInfo& dstInfo, void* dstPixels) const override; + + private: + std::string imagePath; + std::shared_ptr imageBytes = nullptr; + CGImageRef nativeImage = nullptr; + + NativeCodec(int width, int height, EncodedOrigin origin) : ImageCodec(width, height, origin) { + } + + friend class ImageCodec; +}; +} // namespace tgfx diff --git a/src/platform/apple/NativeCodec.mm b/src/platform/apple/NativeCodec.mm new file mode 100644 index 00000000..721a6522 --- /dev/null +++ b/src/platform/apple/NativeCodec.mm @@ -0,0 +1,189 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "NativeCodec.h" +#include "BitmapContextUtil.h" +#include "tgfx/utils/Buffer.h" + +namespace tgfx { +static CGImagePropertyOrientation GetOrientationFromProperties(CFDictionaryRef imageProperties) { + CGImagePropertyOrientation orientation = kCGImagePropertyOrientationUp; + if (imageProperties != NULL) { + CFNumberRef oritentationNum = + (CFNumberRef)CFDictionaryGetValue(imageProperties, kCGImagePropertyOrientation); + if (oritentationNum != NULL) { + CFNumberGetValue(oritentationNum, kCFNumberIntType, &orientation); + } + } + + return orientation; +} + +static CGSize GetImageSize(CGImageSourceRef imageSource, CGImagePropertyOrientation* orientation) { + int width = 0; + int height = 0; + CFDictionaryRef imageProperties = CGImageSourceCopyPropertiesAtIndex(imageSource, 0, NULL); + if (imageProperties != NULL) { + CFNumberRef widthNum = + (CFNumberRef)CFDictionaryGetValue(imageProperties, kCGImagePropertyPixelWidth); + if (widthNum != NULL) { + CFNumberGetValue(widthNum, kCFNumberIntType, &width); + } + CFNumberRef heightNum = + (CFNumberRef)CFDictionaryGetValue(imageProperties, kCGImagePropertyPixelHeight); + if (heightNum != NULL) { + CFNumberGetValue(heightNum, kCFNumberIntType, &height); + } + *orientation = GetOrientationFromProperties(imageProperties); + CFRelease(imageProperties); + } + return CGSizeMake(width, height); +} + +std::shared_ptr ImageCodec::MakeNativeCodec(const std::string& filePath) { + CFStringRef imagePath = CFStringCreateWithCString(NULL, filePath.c_str(), kCFStringEncodingUTF8); + CFURLRef imageURL = CFURLCreateWithFileSystemPath(NULL, imagePath, kCFURLPOSIXPathStyle, 0); + CFRelease(imagePath); + CGImageSourceRef imageSource = CGImageSourceCreateWithURL(imageURL, NULL); + CFRelease(imageURL); + if (imageSource == nil) { + return nullptr; + } + CGImagePropertyOrientation orientation = kCGImagePropertyOrientationUp; + auto size = GetImageSize(imageSource, &orientation); + CFRelease(imageSource); + if (size.width <= 0 || size.height <= 0) { + return nullptr; + } + auto image = new NativeCodec(size.width, size.height, static_cast(orientation)); + image->imagePath = filePath; + return std::shared_ptr(image); +} + +std::shared_ptr ImageCodec::MakeNativeCodec(std::shared_ptr imageBytes) { + if (imageBytes == nullptr) { + return nullptr; + } + CFDataRef data = CFDataCreateWithBytesNoCopy(kCFAllocatorDefault, + reinterpret_cast(imageBytes->data()), + imageBytes->size(), kCFAllocatorNull); + if (data == nullptr) { + return nullptr; + } + CGImageSourceRef imageSource = CGImageSourceCreateWithData(data, NULL); + CFRelease(data); + if (imageSource == nil) { + return nullptr; + } + CGImagePropertyOrientation orientation = kCGImagePropertyOrientationUp; + auto size = GetImageSize(imageSource, &orientation); + CFRelease(imageSource); + if (size.width <= 0 || size.height <= 0) { + return nullptr; + } + auto codec = new NativeCodec(size.width, size.height, static_cast(orientation)); + codec->imageBytes = imageBytes; + return std::shared_ptr(codec); +} + +std::shared_ptr ImageCodec::MakeFrom(NativeImageRef nativeImage) { + if (nativeImage == nullptr) { + return nullptr; + } + auto width = CGImageGetWidth(nativeImage); + auto height = CGImageGetHeight(nativeImage); + if (width <= 0 || height <= 0) { + return nullptr; + } + auto codec = + new NativeCodec(static_cast(width), static_cast(height), EncodedOrigin::TopLeft); + CFRetain(nativeImage); + codec->nativeImage = nativeImage; + return std::shared_ptr(codec); +} + +NativeCodec::~NativeCodec() { + if (nativeImage != nullptr) { + CFRelease(nativeImage); + } +} + +bool NativeCodec::readPixels(const ImageInfo& dstInfo, void* dstPixels) const { + if (dstInfo.isEmpty() || dstPixels == nullptr) { + return false; + } + NSData* data = nil; + CGImageSourceRef sourceRef = nil; + CGImageRef image = nil; + if (nativeImage != nullptr) { + image = nativeImage; + } else { + if (!imagePath.empty()) { + data = + [[NSData alloc] initWithContentsOfFile:[NSString stringWithUTF8String:imagePath.c_str()]]; + } else { + auto bytes = const_cast(imageBytes->data()); + data = [[NSData alloc] initWithBytesNoCopy:bytes length:imageBytes->size() freeWhenDone:NO]; + } + if (data == nil) { + return false; + } + CFDataRef dataRef = (__bridge CFDataRef)data; + sourceRef = CGImageSourceCreateWithData(dataRef, NULL); + if (sourceRef == NULL) { + [data release]; + return false; + } + image = CGImageSourceCreateImageAtIndex(sourceRef, 0, NULL); + if (image == NULL) { + [data release]; + CFRelease(sourceRef); + return false; + } + } + Buffer tempBuffer = {}; + auto colorType = dstInfo.colorType(); + auto info = dstInfo; + auto pixels = dstPixels; + if (colorType != ColorType::RGBA_8888 && colorType != ColorType::BGRA_8888 && + colorType != ColorType::ALPHA_8) { + info = dstInfo.makeColorType(ColorType::RGBA_8888); + tempBuffer.alloc(info.byteSize()); + pixels = tempBuffer.data(); + } + auto context = CreateBitmapContext(info, pixels); + auto result = context != nullptr; + if (result) { + int width = static_cast(CGImageGetWidth(image)); + int height = static_cast(CGImageGetHeight(image)); + CGRect rect = CGRectMake(0, 0, width, height); + CGContextSetBlendMode(context, kCGBlendModeCopy); + CGContextDrawImage(context, rect, image); + CGContextRelease(context); + } + if (nativeImage == nullptr) { + [data release]; + CFRelease(sourceRef); + CGImageRelease(image); + } + if (!tempBuffer.isEmpty()) { + Pixmap(info, pixels).readPixels(dstInfo, dstPixels); + } + return result; +} +} // namespace tgfx diff --git a/src/platform/mock/HardwareBuffer.cpp b/src/platform/mock/HardwareBuffer.cpp new file mode 100644 index 00000000..d361ee87 --- /dev/null +++ b/src/platform/mock/HardwareBuffer.cpp @@ -0,0 +1,52 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/platform/HardwareBuffer.h" +#include "tgfx/core/ImageBuffer.h" + +namespace tgfx { +std::shared_ptr ImageBuffer::MakeFrom(HardwareBufferRef, YUVColorSpace) { + return nullptr; +} + +bool HardwareBufferCheck(HardwareBufferRef) { + return false; +} + +HardwareBufferRef HardwareBufferAllocate(int, int, bool) { + return nullptr; +} + +HardwareBufferRef HardwareBufferRetain(HardwareBufferRef buffer) { + return buffer; +} + +void HardwareBufferRelease(HardwareBufferRef) { +} + +void* HardwareBufferLock(HardwareBufferRef) { + return nullptr; +} + +void HardwareBufferUnlock(HardwareBufferRef) { +} + +ImageInfo HardwareBufferGetInfo(HardwareBufferRef) { + return {}; +} +} // namespace tgfx diff --git a/src/platform/mock/NativeCodec.cpp b/src/platform/mock/NativeCodec.cpp new file mode 100644 index 00000000..2aa3d25b --- /dev/null +++ b/src/platform/mock/NativeCodec.cpp @@ -0,0 +1,33 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/core/ImageCodec.h" + +namespace tgfx { +std::shared_ptr ImageCodec::MakeNativeCodec(const std::string&) { + return nullptr; +} + +std::shared_ptr ImageCodec::MakeNativeCodec(std::shared_ptr) { + return nullptr; +} + +std::shared_ptr ImageCodec::MakeFrom(NativeImageRef) { + return nullptr; +} +} // namespace tgfx diff --git a/src/platform/web/GLVideoTexture.cpp b/src/platform/web/GLVideoTexture.cpp new file mode 100644 index 00000000..2a478011 --- /dev/null +++ b/src/platform/web/GLVideoTexture.cpp @@ -0,0 +1,68 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GLVideoTexture.h" +#include +#include "gpu/Gpu.h" + +namespace tgfx { +using namespace emscripten; + +static constexpr int ANDROID_ALIGNMENT = 16; + +std::shared_ptr GLVideoTexture::Make(Context* context, int width, int height, + bool mipMapped) { + static auto isAndroidMiniprogram = + val::module_property("tgfx").call("isAndroidMiniprogram"); + int maxMipmapLevel = mipMapped ? context->caps()->getMaxMipmapLevel(width, height) : 0; + auto sampler = + context->gpu()->createSampler(width, height, PixelFormat::RGBA_8888, maxMipmapLevel + 1); + if (sampler == nullptr) { + return nullptr; + } + auto texture = Resource::Wrap(context, new GLVideoTexture(std::move(sampler), width, height)); + if (isAndroidMiniprogram) { + // https://stackoverflow.com/questions/28291204/something-about-stagefright-codec-input-format-in-android + // Video decoder will align to multiples of 16 on the Android WeChat mini-program. + texture->textureWidth += ANDROID_ALIGNMENT - width % ANDROID_ALIGNMENT; + texture->textureHeight += ANDROID_ALIGNMENT - height % ANDROID_ALIGNMENT; + } + return texture; +} + +GLVideoTexture::GLVideoTexture(std::unique_ptr sampler, int width, int height) + : Texture(width, height, ImageOrigin::TopLeft), sampler(std::move(sampler)), + textureWidth(width), textureHeight(height) { +} + +size_t GLVideoTexture::memoryUsage() const { + return width() * height() * 4; +} + +Point GLVideoTexture::getTextureCoord(float x, float y) const { + return {x / static_cast(textureWidth), y / static_cast(textureHeight)}; +} + +BackendTexture GLVideoTexture::getBackendTexture() const { + return getSampler()->getBackendTexture(textureWidth, textureHeight); +} + +void GLVideoTexture::onReleaseGPU() { + context->gpu()->deleteSampler(sampler.get()); +} +} // namespace tgfx diff --git a/src/platform/web/GLVideoTexture.h b/src/platform/web/GLVideoTexture.h new file mode 100644 index 00000000..c76ce0e8 --- /dev/null +++ b/src/platform/web/GLVideoTexture.h @@ -0,0 +1,48 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/Texture.h" + +namespace tgfx { +class GLVideoTexture : public Texture { + public: + static std::shared_ptr Make(Context* context, int width, int height, + bool mipMapped = false); + + size_t memoryUsage() const override; + + const TextureSampler* getSampler() const override { + return sampler.get(); + } + + Point getTextureCoord(float x, float y) const override; + + BackendTexture getBackendTexture() const override; + + private: + std::unique_ptr sampler = {}; + int textureWidth = 0; + int textureHeight = 0; + + GLVideoTexture(std::unique_ptr sampler, int width, int height); + + void onReleaseGPU() override; +}; +} // namespace tgfx diff --git a/src/platform/web/HardwareBuffer.cpp b/src/platform/web/HardwareBuffer.cpp new file mode 100644 index 00000000..d361ee87 --- /dev/null +++ b/src/platform/web/HardwareBuffer.cpp @@ -0,0 +1,52 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/platform/HardwareBuffer.h" +#include "tgfx/core/ImageBuffer.h" + +namespace tgfx { +std::shared_ptr ImageBuffer::MakeFrom(HardwareBufferRef, YUVColorSpace) { + return nullptr; +} + +bool HardwareBufferCheck(HardwareBufferRef) { + return false; +} + +HardwareBufferRef HardwareBufferAllocate(int, int, bool) { + return nullptr; +} + +HardwareBufferRef HardwareBufferRetain(HardwareBufferRef buffer) { + return buffer; +} + +void HardwareBufferRelease(HardwareBufferRef) { +} + +void* HardwareBufferLock(HardwareBufferRef) { + return nullptr; +} + +void HardwareBufferUnlock(HardwareBufferRef) { +} + +ImageInfo HardwareBufferGetInfo(HardwareBufferRef) { + return {}; +} +} // namespace tgfx diff --git a/src/platform/web/NativeCodec.cpp b/src/platform/web/NativeCodec.cpp new file mode 100644 index 00000000..fa30aae6 --- /dev/null +++ b/src/platform/web/NativeCodec.cpp @@ -0,0 +1,130 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "NativeCodec.h" +#include +#include "WebImageBuffer.h" +#include "WebImageInfo.h" +#include "tgfx/platform/web/WebCodec.h" +#include "tgfx/utils/Buffer.h" +#include "tgfx/utils/Stream.h" + +using namespace emscripten; + +namespace tgfx { +std::shared_ptr ImageCodec::MakeNativeCodec(const std::string& filePath) { + if (filePath.find("http://") == 0 || filePath.find("https://") == 0) { + auto data = val::module_property("tgfx") + .call("getBytesFromPath", val::module_property("module"), filePath) + .await(); + if (data.isNull()) { + return nullptr; + } + auto byteOffset = reinterpret_cast(data["byteOffset"].as()); + auto length = data["length"].as(); + Buffer imageBuffer(length); + memcpy(imageBuffer.data(), byteOffset, length); + data.call("free"); + auto imageData = imageBuffer.release(); + return ImageCodec::MakeNativeCodec(imageData); + } else { + auto imageStream = Stream::MakeFromFile(filePath); + if (imageStream == nullptr || imageStream->size() <= 14) { + return nullptr; + } + Buffer imageBuffer(imageStream->size()); + imageStream->read(imageBuffer.data(), imageStream->size()); + auto imageData = imageBuffer.release(); + return ImageCodec::MakeNativeCodec(imageData); + } +} + +std::shared_ptr ImageCodec::MakeNativeCodec(std::shared_ptr imageBytes) { + auto imageSize = WebImageInfo::GetSize(imageBytes); + if (imageSize.isEmpty()) { + return nullptr; + } + return std::shared_ptr( + new NativeCodec(imageSize.width, imageSize.height, std::move(imageBytes))); +} + +std::shared_ptr ImageCodec::MakeFrom(NativeImageRef nativeImage) { + if (nativeImage.isNull()) { + return nullptr; + } + auto size = val::module_property("tgfx").call("getSourceSize", nativeImage); + auto width = size["width"].as(); + auto height = size["height"].as(); + if (width < 1 || height < 1) { + return nullptr; + } + return std::shared_ptr(new NativeCodec(width, height, std::move(nativeImage))); +} + +NativeCodec::NativeCodec(int width, int height, std::shared_ptr imageBytes) + : ImageCodec(width, height, EncodedOrigin::TopLeft), imageBytes(std::move(imageBytes)) { +} + +NativeCodec::NativeCodec(int width, int height, emscripten::val nativeImage) + : ImageCodec(width, height, EncodedOrigin::TopLeft), nativeImage(std::move(nativeImage)) { +} + +bool NativeCodec::asyncSupport() const { + return WebCodec::AsyncSupport(); +} + +bool NativeCodec::readPixels(const ImageInfo& dstInfo, void* dstPixels) const { + if (dstInfo.isEmpty() || dstPixels == nullptr) { + return false; + } + auto image = nativeImage; + if (image.isNull()) { + auto bytes = + val(typed_memory_view(imageBytes->size(), static_cast(imageBytes->data()))); + image = val::module_property("tgfx").call("createImageFromBytes", bytes).await(); + } + + auto data = val::module_property("tgfx").call( + "readImagePixels", val::module_property("module"), image, dstInfo.width(), dstInfo.height()); + if (data.isNull()) { + return false; + } + auto pixels = reinterpret_cast(data["byteOffset"].as()); + auto info = ImageInfo::Make(width(), height(), ColorType::RGBA_8888, AlphaType::Unpremultiplied); + Pixmap pixmap(info, pixels); + auto result = pixmap.readPixels(dstInfo, dstPixels); + data.call("free"); + return result; +} + +std::shared_ptr NativeCodec::onMakeBuffer(bool) const { + auto image = nativeImage; + bool usePromise = false; + if (image.isNull()) { + auto bytes = + val(typed_memory_view(imageBytes->size(), static_cast(imageBytes->data()))); + image = val::module_property("tgfx").call("createImageFromBytes", bytes); + usePromise = WebCodec::AsyncSupport(); + if (!usePromise) { + image = image.await(); + } + } + return std::shared_ptr( + new WebImageBuffer(width(), height(), std::move(image), usePromise)); +} +} // namespace tgfx diff --git a/src/platform/web/NativeCodec.h b/src/platform/web/NativeCodec.h new file mode 100644 index 00000000..0fbe0a0d --- /dev/null +++ b/src/platform/web/NativeCodec.h @@ -0,0 +1,44 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "tgfx/core/ImageCodec.h" + +namespace tgfx { +class NativeCodec : public ImageCodec { + public: + bool asyncSupport() const override; + + bool readPixels(const ImageInfo& dstInfo, void* dstPixels) const override; + + protected: + std::shared_ptr onMakeBuffer(bool tryHardware) const override; + + private: + std::shared_ptr imageBytes = nullptr; + emscripten::val nativeImage = emscripten::val::null(); + + NativeCodec(int width, int height, std::shared_ptr imageBytes); + + NativeCodec(int width, int height, emscripten::val nativeImage); + + friend class ImageCodec; +}; +} // namespace tgfx diff --git a/src/platform/web/TGFXWasmBindings.cpp b/src/platform/web/TGFXWasmBindings.cpp new file mode 100644 index 00000000..cd69ce07 --- /dev/null +++ b/src/platform/web/TGFXWasmBindings.cpp @@ -0,0 +1,74 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// +#include +#include "tgfx/core/FontMetrics.h" +#include "tgfx/core/ImageInfo.h" +#include "tgfx/core/Matrix.h" +#include "tgfx/core/PathTypes.h" +#include "tgfx/core/Stroke.h" + +using namespace emscripten; +using namespace tgfx; + +EMSCRIPTEN_BINDINGS(tgfx) { + class_("TGFXMatrix").function("_get", &Matrix::get).function("_set", &Matrix::set); + + value_object("TGFXPoint").field("x", &Point::x).field("y", &Point::y); + + value_object("TGFXRect") + .field("left", &Rect::left) + .field("top", &Rect::top) + .field("right", &Rect::right) + .field("bottom", &Rect::bottom); + + register_vector("VectorTGFXPoint"); + + class_("TGFXImageInfo") + .property("width", &ImageInfo::width) + .property("height", &ImageInfo::height) + .property("rowBytes", &ImageInfo::rowBytes) + .property("colorType", &ImageInfo::colorType); + + class_("TGFXStroke") + .property("width", &Stroke::width) + .property("cap", &Stroke::cap) + .property("join", &Stroke::join) + .property("miterLimit", &Stroke::miterLimit); + + value_object("TGFXFontMetrics") + .field("ascent", &FontMetrics::ascent) + .field("descent", &FontMetrics::descent) + .field("xHeight", &FontMetrics::xHeight) + .field("capHeight", &FontMetrics::capHeight); + + enum_("TGFXPathFillType") + .value("Winding", PathFillType::Winding) + .value("EvenOdd", PathFillType::EvenOdd) + .value("InverseWinding", PathFillType::InverseWinding) + .value("InverseEvenOdd", PathFillType::InverseEvenOdd); + + enum_("TGFXLineCap") + .value("Butt", LineCap::Butt) + .value("Round", LineCap::Round) + .value("Square", LineCap::Square); + + enum_("TGFXLineJoin") + .value("Miter", LineJoin::Miter) + .value("Round", LineJoin::Round) + .value("Bevel", LineJoin::Bevel); +} diff --git a/src/platform/web/VideoElement.cpp b/src/platform/web/VideoElement.cpp new file mode 100644 index 00000000..43d3362c --- /dev/null +++ b/src/platform/web/VideoElement.cpp @@ -0,0 +1,60 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "platform/web/VideoElement.h" +#include "GLVideoTexture.h" +#include "tgfx/platform/web/WebCodec.h" + +namespace tgfx { +using namespace emscripten; + +std::shared_ptr VideoElement::MakeFrom(val video, int width, int height) { + if (video == val::null() || width < 1 || height < 1) { + return nullptr; + } + return std::shared_ptr(new VideoElement(video, width, height)); +} + +VideoElement::VideoElement(emscripten::val video, int width, int height) + : WebImageStream(video, width, height, false) { +} + +void VideoElement::markFrameChanged(emscripten::val promise) { + currentPromise = promise; + markContentDirty(Rect::MakeWH(width(), height())); + if (currentPromise != val::null() && !WebCodec::AsyncSupport()) { + currentPromise.await(); + } +} + +std::shared_ptr VideoElement::onMakeTexture(Context* context, bool mipMapped) { + auto texture = GLVideoTexture::Make(context, width(), height(), mipMapped); + if (texture != nullptr) { + onUpdateTexture(texture, Rect::MakeWH(width(), height())); + } + return texture; +} + +bool VideoElement::onUpdateTexture(std::shared_ptr texture, const Rect& bounds) { + if (currentPromise != val::null() && WebCodec::AsyncSupport()) { + currentPromise.await(); + } + return WebImageStream::onUpdateTexture(texture, bounds); +} + +} // namespace tgfx \ No newline at end of file diff --git a/src/platform/web/VideoElement.h b/src/platform/web/VideoElement.h new file mode 100644 index 00000000..cea31325 --- /dev/null +++ b/src/platform/web/VideoElement.h @@ -0,0 +1,54 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "WebImageStream.h" + +namespace tgfx { +/** + * The VideoElement class allows direct access to image buffers rendered into a HTMLVideoElement + * on the web platform. It is typically used with the ImageReader class. + */ +class VideoElement : public WebImageStream { + public: + /** + * Creates a new VideoElement from the specified HTMLVideoElement object and the video size. + * Returns nullptr if the video is null or the buffer size is zero. + */ + static std::shared_ptr MakeFrom(emscripten::val video, int width, int height); + + /** + * Notifies the VideoElement that a new image frame has been rendered into the associated + * HTMLVideoElement. The next acquired ImageBuffer will call the promise.await() method before + * generating textures. + */ + void markFrameChanged(emscripten::val promise); + + protected: + std::shared_ptr onMakeTexture(Context* context, bool mipMapped) override; + + bool onUpdateTexture(std::shared_ptr texture, const Rect& bounds) override; + + private: + emscripten::val currentPromise = emscripten::val::null(); + + VideoElement(emscripten::val video, int width, int height); +}; +} // namespace tgfx diff --git a/src/platform/web/VideoElementReader.cpp b/src/platform/web/VideoElementReader.cpp new file mode 100644 index 00000000..9e343c75 --- /dev/null +++ b/src/platform/web/VideoElementReader.cpp @@ -0,0 +1,56 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/platform/web/VideoElementReader.h" +#include "GLVideoTexture.h" +#include "opengl/GLSampler.h" +#include "platform/web/VideoElement.h" +#include "tgfx/platform/web/WebCodec.h" +#include "utils/Log.h" + +namespace tgfx { +using namespace emscripten; + +std::shared_ptr VideoElementReader::MakeFrom(val video, int width, int height) { + if (video == val::null() || width < 1 || height < 1) { + return nullptr; + } + auto imageStream = VideoElement::MakeFrom(video, width, height); + if (imageStream == nullptr) { + return nullptr; + } + auto imageReader = + std::shared_ptr(new VideoElementReader(std::move(imageStream))); + imageReader->weakThis = imageReader; + return imageReader; +} + +VideoElementReader::VideoElementReader(std::shared_ptr stream) + : ImageReader(std::move(stream)) { +} + +std::shared_ptr VideoElementReader::acquireNextBuffer() { + stream->markContentDirty(Rect::MakeWH(stream->width(), stream->height())); + return ImageReader::acquireNextBuffer(); +} + +std::shared_ptr VideoElementReader::acquireNextBuffer(val promise) { + std::static_pointer_cast(stream)->markFrameChanged(promise); + return ImageReader::acquireNextBuffer(); +} +} // namespace tgfx \ No newline at end of file diff --git a/src/platform/web/WebCodec.cpp b/src/platform/web/WebCodec.cpp new file mode 100644 index 00000000..de812016 --- /dev/null +++ b/src/platform/web/WebCodec.cpp @@ -0,0 +1,32 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/platform/web/WebCodec.h" +#include + +namespace tgfx { +static std::atomic_bool AsyncSupportEnabled = false; + +bool WebCodec::AsyncSupport() { + return AsyncSupportEnabled; +} + +void WebCodec::SetAsyncSupport(bool enabled) { + AsyncSupportEnabled = enabled; +} +} // namespace tgfx \ No newline at end of file diff --git a/src/platform/web/WebImageBuffer.cpp b/src/platform/web/WebImageBuffer.cpp new file mode 100644 index 00000000..e1e1b42a --- /dev/null +++ b/src/platform/web/WebImageBuffer.cpp @@ -0,0 +1,75 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "WebImageBuffer.h" +#include "gpu/Texture.h" +#include "opengl/GLSampler.h" +#include "tgfx/core/ImageCodec.h" + +using namespace emscripten; + +namespace tgfx { +std::shared_ptr WebImageBuffer::MakeFrom(emscripten::val nativeImage) { + if (nativeImage.isNull()) { + return nullptr; + } + auto size = val::module_property("tgfx").call("getSourceSize", nativeImage); + auto width = size["width"].as(); + auto height = size["height"].as(); + if (width < 1 || height < 1) { + return nullptr; + } + return std::shared_ptr(new WebImageBuffer(width, height, nativeImage, false)); +} + +std::shared_ptr WebImageBuffer::MakeAdopted(emscripten::val nativeImage) { + auto imageBuffer = MakeFrom(nativeImage); + if (imageBuffer != nullptr) { + imageBuffer->adopted = true; + } + return imageBuffer; +} + +WebImageBuffer::WebImageBuffer(int width, int height, emscripten::val nativeImage, bool usePromise) + : _width(width), _height(height), nativeImage(nativeImage), usePromise(usePromise) { +} + +WebImageBuffer::~WebImageBuffer() { + if (adopted) { + val::module_property("tgfx").call("releaseNativeImage", nativeImage); + } +} + +std::shared_ptr WebImageBuffer::onMakeTexture(Context* context, bool) const { + auto texture = Texture::MakeRGBA(context, width(), height(), nullptr, 0, ImageOrigin::TopLeft); + if (texture == nullptr) { + return nullptr; + } + auto glInfo = static_cast(texture->getSampler()); + val::module_property("tgfx").call("uploadToTexture", emscripten::val::module_property("GL"), + getImage(), glInfo->id, false); + return texture; +} + +emscripten::val WebImageBuffer::getImage() const { + if (usePromise) { + return nativeImage.await(); + } + return nativeImage; +} +} // namespace tgfx diff --git a/src/platform/web/WebImageBuffer.h b/src/platform/web/WebImageBuffer.h new file mode 100644 index 00000000..54172386 --- /dev/null +++ b/src/platform/web/WebImageBuffer.h @@ -0,0 +1,72 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "tgfx/core/ImageBuffer.h" + +namespace tgfx { +class WebImageBuffer : public ImageBuffer { + public: + /** + * Creates a new ImageBuffer object from the platform-specific nativeImage in the CPU. The + * returned ImageBuffer object takes a reference to the nativeImage. Returns nullptr if the + * nativeImage is nullptr or has a size of zero. + */ + static std::shared_ptr MakeFrom(emscripten::val nativeImage); + + /** + * Creates a new ImageBuffer object from the platform-specific nativeImage in the CPU. The + * returned ImageBuffer object takes a reference to the nativeImage and will call + * tgfx.releaseNativeImage() when it goes out of scope. Returns nullptr if the nativeImage is + * nullptr or has a size of zero. + */ + static std::shared_ptr MakeAdopted(emscripten::val nativeImage); + + ~WebImageBuffer() override; + + int width() const override { + return _width; + } + + int height() const override { + return _height; + } + + bool isAlphaOnly() const override { + return false; + } + + protected: + std::shared_ptr onMakeTexture(Context* context, bool mipMapped) const override; + + private: + int _width = 0; + int _height = 0; + emscripten::val nativeImage = emscripten::val::null(); + bool usePromise = false; + bool adopted = false; + + WebImageBuffer(int width, int height, emscripten::val nativeImage, bool usePromise); + + emscripten::val getImage() const; + + friend class NativeCodec; +}; +} // namespace tgfx diff --git a/src/platform/web/WebImageInfo.cpp b/src/platform/web/WebImageInfo.cpp new file mode 100644 index 00000000..5ac4e8ed --- /dev/null +++ b/src/platform/web/WebImageInfo.cpp @@ -0,0 +1,134 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "WebImageInfo.h" + +using namespace emscripten; + +namespace tgfx { +static std::string ReadString(const DataView& dataView, size_t offset, size_t length) { + return std::string(reinterpret_cast(dataView.bytes()) + offset, length); +} + +static bool Compare(const DataView& dataView, size_t offset, size_t length, + const std::string& text) { + return memcmp(dataView.bytes() + offset, text.c_str(), length) == 0; +} + +static bool CompareAnyOf(const DataView& dataView, size_t offset, size_t length, + const std::vector& textList) { + return std::any_of(textList.begin(), textList.end(), [&](const std::string& text) { + return memcmp(dataView.bytes() + offset, text.c_str(), length) == 0; + }); +} + +static bool IsPng(std::shared_ptr imageBytes, int& width, int& height) { + DataView dataView(imageBytes->bytes(), imageBytes->size(), ByteOrder::BigEndian); + if (dataView.size() < 4) { + return false; + } + if (!Compare(dataView, 0, 4, "\x89PNG")) { + return false; + } + std::string firstChunkType = ReadString(dataView, 12, 4); + if (firstChunkType == "IHDR" && dataView.size() >= 24) { + width = dataView.getUint32(16); + height = dataView.getUint32(20); + return true; + } else if (firstChunkType == "CgBI") { + if (Compare(dataView, 28, 4, "IHDR") && dataView.size() >= 40) { + width = dataView.getUint32(32); + height = dataView.getUint32(36); + return true; + } + } + return false; +} + +static bool IsJpeg(std::shared_ptr imageBytes, int& width, int& height) { + DataView dataView(imageBytes->bytes(), imageBytes->size(), ByteOrder::BigEndian); + if (dataView.size() < 2) { + return false; + } + if (!Compare(dataView, 0, 2, "\xFF\xD8")) { + return false; + } + off_t offset = 2; + while (offset + 9 <= static_cast(dataView.size())) { + uint16_t sectionSize = dataView.getUint16(offset + 2); + if (CompareAnyOf(dataView, offset, 2, {"\xFF\xC0", "\xFF\xC1", "\xFF\xC2"})) { + height = dataView.getUint16(offset + 5); + width = dataView.getUint16(offset + 7); + return true; + } + offset += sectionSize + 2; + } + return false; +} + +static bool IsWebp(std::shared_ptr imageBytes, int& width, int& height) { + DataView dataView(imageBytes->bytes(), imageBytes->size()); + if (dataView.size() < 16) { + return false; + } + if (!Compare(dataView, 0, 4, "RIFF") || !Compare(dataView, 8, 4, "WEBP")) { + return false; + } + auto type = ReadString(dataView, 12, 4); + if (type == "VP8 " && dataView.size() >= 30) { + width = dataView.getUint16(26) & 0x3FFF; + height = dataView.getUint16(28) & 0x3FFF; + return true; + } else if (type == "VP8L" && dataView.size() >= 25) { + uint32_t n = dataView.getUint32(21); + width = (n & 0x3FFF) + 1; + height = ((n >> 14) & 0x3FFF) + 1; + return true; + } else if (type == "VP8X" && dataView.size() >= 30) { + uint8_t extendedHeader = dataView.getUint8(20); + bool validStart = (extendedHeader & 0xc0) == 0; + bool validEnd = (extendedHeader & 0x01) == 0; + if (validStart && validEnd) { + width = (dataView.getUint32(24) & 0x00FFFFFF) + 1; + height = (dataView.getUint32(27) & 0x00FFFFFF) + 1; + return true; + } + } + return false; +} + +static bool CheckWebpSupport() { + return val::module_property("tgfx").call("hasWebpSupport").as(); +} + +ISize WebImageInfo::GetSize(std::shared_ptr imageBytes) { + static const bool hasWebpSupport = CheckWebpSupport(); + auto imageSize = ISize::MakeEmpty(); + if (IsPng(imageBytes, imageSize.width, imageSize.height)) { + return imageSize; + } + if (IsJpeg(imageBytes, imageSize.width, imageSize.height)) { + return imageSize; + } + if (hasWebpSupport && IsWebp(imageBytes, imageSize.width, imageSize.height)) { + return imageSize; + } + return imageSize; +} + +} // namespace tgfx diff --git a/src/platform/web/WebImageInfo.h b/src/platform/web/WebImageInfo.h new file mode 100644 index 00000000..4d4d2fb7 --- /dev/null +++ b/src/platform/web/WebImageInfo.h @@ -0,0 +1,33 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include +#include "tgfx/core/Data.h" +#include "tgfx/core/Size.h" +#include "tgfx/utils/DataView.h" + +namespace tgfx { +class WebImageInfo { + public: + static ISize GetSize(std::shared_ptr imageBytes); +}; + +} // namespace tgfx diff --git a/src/platform/web/WebImageStream.cpp b/src/platform/web/WebImageStream.cpp new file mode 100644 index 00000000..2ebf6574 --- /dev/null +++ b/src/platform/web/WebImageStream.cpp @@ -0,0 +1,63 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "platform/web/WebImageStream.h" +#include "gpu/Gpu.h" +#include "gpu/Texture.h" +#include "opengl/GLSampler.h" + +namespace tgfx { +using namespace emscripten; + +std::shared_ptr WebImageStream::MakeFrom(val source, int width, int height, + bool alphaOnly) { + if (source == val::null() || width < 1 || height < 1) { + return nullptr; + } + return std::shared_ptr(new WebImageStream(source, width, height, alphaOnly)); +} + +WebImageStream::WebImageStream(emscripten::val source, int width, int height, bool alphaOnly) + : source(source), _width(width), _height(height), alphaOnly(alphaOnly) { +} + +std::shared_ptr WebImageStream::onMakeTexture(Context* context, bool mipMapped) { + std::shared_ptr texture = nullptr; + if (alphaOnly) { + texture = Texture::MakeAlpha(context, width(), height(), ImageOrigin::TopLeft, mipMapped); + } else { + texture = Texture::MakeRGBA(context, width(), height(), ImageOrigin::TopLeft, mipMapped); + } + if (texture != nullptr) { + onUpdateTexture(texture, Rect::MakeWH(_width, _height)); + } + return texture; +} + +bool WebImageStream::onUpdateTexture(std::shared_ptr texture, const Rect&) { + auto glSampler = static_cast(texture->getSampler()); + val::module_property("tgfx").call("uploadToTexture", emscripten::val::module_property("GL"), + source, glSampler->id, alphaOnly); + if (glSampler->hasMipmaps()) { + auto gpu = texture->getContext()->gpu(); + gpu->regenerateMipMapLevels(glSampler); + } + return true; +} + +} // namespace tgfx \ No newline at end of file diff --git a/src/platform/web/WebImageStream.h b/src/platform/web/WebImageStream.h new file mode 100644 index 00000000..2f542c28 --- /dev/null +++ b/src/platform/web/WebImageStream.h @@ -0,0 +1,69 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "core/ImageStream.h" + +namespace tgfx { +/** + * The WebImageStream class allows direct access to image buffers rendered into a TexImageSource + * object on the web platform. It is typically used with the ImageReader class. + */ +class WebImageStream : public ImageStream { + public: + /** + * Creates a new WebImageStream from the specified TexImageSource object and the size. Returns + * nullptr if the source is null or the buffer size is zero. + */ + static std::shared_ptr MakeFrom(emscripten::val source, int width, int height, + bool alphaOnly = false); + + int width() const override { + return _width; + } + + int height() const override { + return _height; + } + + bool isAlphaOnly() const override { + return alphaOnly; + } + + bool isHardwareBacked() const override { + return false; + } + + protected: + WebImageStream(emscripten::val source, int width, int height, bool alphaOnly); + + std::shared_ptr onMakeTexture(Context* context, bool mipMapped) override; + + bool onUpdateTexture(std::shared_ptr texture, const Rect& bounds) override; + + private: + emscripten::val source = emscripten::val::null(); + int _width = 0; + int _height = 0; + bool alphaOnly = false; + + friend class WebMask; +}; +} // namespace tgfx diff --git a/src/shaders/ColorFilterShader.cpp b/src/shaders/ColorFilterShader.cpp new file mode 100644 index 00000000..8313b528 --- /dev/null +++ b/src/shaders/ColorFilterShader.cpp @@ -0,0 +1,50 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "ColorFilterShader.h" +#include "gpu/processors/FragmentProcessor.h" + +namespace tgfx { +std::shared_ptr Shader::makeWithColorFilter( + std::shared_ptr colorFilter) const { + auto strongThis = weakThis.lock(); + if (strongThis == nullptr) { + return nullptr; + } + if (colorFilter == nullptr) { + return strongThis; + } + auto shader = std::make_shared(std::move(strongThis), std::move(colorFilter)); + shader->weakThis = shader; + return shader; +} + +std::unique_ptr ColorFilterShader::asFragmentProcessor( + const FPArgs& args) const { + auto fp1 = shader->asFragmentProcessor(args); + if (fp1 == nullptr) { + return nullptr; + } + auto fp2 = colorFilter->asFragmentProcessor(); + if (fp2 == nullptr) { + return fp1; + } + std::unique_ptr fpSeries[] = {std::move(fp1), std::move(fp2)}; + return FragmentProcessor::RunInSeries(fpSeries, 2); +} +} // namespace tgfx diff --git a/src/shaders/ColorFilterShader.h b/src/shaders/ColorFilterShader.h new file mode 100644 index 00000000..5114ab64 --- /dev/null +++ b/src/shaders/ColorFilterShader.h @@ -0,0 +1,36 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/Shader.h" + +namespace tgfx { +class ColorFilterShader : public Shader { + public: + ColorFilterShader(std::shared_ptr shader, std::shared_ptr colorFilter) + : shader(std::move(shader)), colorFilter(std::move(colorFilter)) { + } + + std::unique_ptr asFragmentProcessor(const FPArgs& args) const override; + + private: + std::shared_ptr shader; + std::shared_ptr colorFilter; +}; +} // namespace tgfx diff --git a/src/shaders/ColorShader.cpp b/src/shaders/ColorShader.cpp new file mode 100644 index 00000000..a23c26a7 --- /dev/null +++ b/src/shaders/ColorShader.cpp @@ -0,0 +1,37 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "ColorShader.h" +#include "gpu/processors/ConstColorProcessor.h" + +namespace tgfx { +std::shared_ptr Shader::MakeColorShader(Color color) { + auto shader = std::make_shared(color); + shader->weakThis = shader; + return shader; +} + +bool ColorShader::isOpaque() const { + return color.isOpaque(); +} + +std::unique_ptr ColorShader::asFragmentProcessor(const FPArgs&) const { + return ConstColorProcessor::Make(color.premultiply(), InputMode::ModulateA); +} + +} // namespace tgfx diff --git a/src/shaders/ColorShader.h b/src/shaders/ColorShader.h new file mode 100644 index 00000000..a8fcf0d3 --- /dev/null +++ b/src/shaders/ColorShader.h @@ -0,0 +1,36 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/Shader.h" + +namespace tgfx { +class ColorShader : public Shader { + public: + explicit ColorShader(Color color) : color(color) { + } + + bool isOpaque() const override; + + std::unique_ptr asFragmentProcessor(const FPArgs& args) const override; + + private: + Color color; +}; +} // namespace tgfx diff --git a/src/shaders/GradientShader.cpp b/src/shaders/GradientShader.cpp new file mode 100644 index 00000000..40427a95 --- /dev/null +++ b/src/shaders/GradientShader.cpp @@ -0,0 +1,305 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "GradientShader.h" +#include "gpu/ResourceProvider.h" +#include "gpu/processors/ClampedGradientEffect.h" +#include "gpu/processors/DualIntervalGradientColorizer.h" +#include "gpu/processors/LinearGradientLayout.h" +#include "gpu/processors/RadialGradientLayout.h" +#include "gpu/processors/SingleIntervalGradientColorizer.h" +#include "gpu/processors/SweepGradientLayout.h" +#include "gpu/processors/TextureGradientColorizer.h" +#include "gpu/processors/UnrolledBinaryGradientColorizer.h" +#include "utils/MathExtra.h" + +namespace tgfx { +// Intervals smaller than this (that aren't hard stops) on low-precision-only devices force us to +// use the textured gradient +static constexpr float LowPrecisionIntervalLimit = 0.01f; +static constexpr float DegenerateThreshold = 1.0f / (1 << 15); + +// Analyze the shader's color stops and positions and chooses an appropriate colorizer to represent +// the gradient. +static std::unique_ptr MakeColorizer(const Context* context, const Color* colors, + const float* positions, int count) { + // If there are hard stops at the beginning or end, the first and/or last color should be + // ignored by the colorizer since it should only be used in a clamped border color. By detecting + // and removing these stops at the beginning, it makes optimizing the remaining color stops + // simpler. + bool bottomHardStop = FloatNearlyEqual(positions[0], positions[1]); + bool topHardStop = FloatNearlyEqual(positions[count - 2], positions[count - 1]); + int offset = 0; + if (bottomHardStop) { + offset += 1; + count--; + } + if (topHardStop) { + count--; + } + + // Two remaining colors means a single interval from 0 to 1 + // (but it may have originally been a 3 or 4 color gradient with 1-2 hard stops at the ends) + if (count == 2) { + return SingleIntervalGradientColorizer::Make(colors[offset], colors[offset + 1]); + } + + bool tryAnalyticColorizer = count <= UnrolledBinaryGradientColorizer::kMaxColorCount; + + // The remaining analytic colorizes use scale*t+bias, and the scale/bias values can become + // quite large when thresholds are close (but still outside the hard stop limit). If float isn't + // 32-bit, output can be incorrect if the thresholds are too close together. However, the + // analytic shaders are higher quality, so they can be used with lower precision hardware when + // the thresholds are not ill-conditioned. + if (!context->caps()->floatIs32Bits && tryAnalyticColorizer) { + // Could run into problems, check if thresholds are close together (with a limit of .01, so + // that scales will be less than 100, which leaves 4 decimals of precision on 16-bit). + for (int i = offset; i < count - 1; i++) { + auto delta = std::abs(positions[i] - positions[i + 1]); + if (delta <= LowPrecisionIntervalLimit && delta > FLOAT_NEARLY_ZERO) { + tryAnalyticColorizer = false; + break; + } + } + } + if (tryAnalyticColorizer) { + if (count == 3) { + // Must be a dual interval gradient, where the middle point is at offset+1 and the two + // intervals share the middle color stop. + return DualIntervalGradientColorizer::Make(colors[offset], colors[offset + 1], + colors[offset + 1], colors[offset + 2], + positions[offset + 1]); + } else if (count == 4 && FloatNearlyEqual(positions[offset + 1], positions[offset + 2])) { + // Two separate intervals that join at the same threshold position + return DualIntervalGradientColorizer::Make(colors[offset], colors[offset + 1], + colors[offset + 2], colors[offset + 3], + positions[offset + 1]); + } + + // The single and dual intervals are a specialized case of the unrolled binary search + // colorizer which can analytically render gradients of up to 8 intervals (up to 9 or 16 + // colors depending on how many hard stops are inserted). + auto unrolled = + UnrolledBinaryGradientColorizer::Make(colors + offset, positions + offset, count); + if (unrolled) { + return unrolled; + } + } + + // Otherwise, fall back to a raster gradient sample by a texture, which can handle + // arbitrary gradients (the only downside being sampling resolution). + return TextureGradientColorizer::Make( + context->resourceProvider()->getGradient(colors + offset, positions + offset, count)); +} + +GradientShaderBase::GradientShaderBase(const std::vector& colors, + const std::vector& positions, + const Matrix& pointsToUnit) + : pointsToUnit(pointsToUnit) { + colorsAreOpaque = true; + for (auto& color : colors) { + if (!color.isOpaque()) { + colorsAreOpaque = false; + break; + } + } + auto dummyFirst = false; + auto dummyLast = false; + if (!positions.empty()) { + dummyFirst = positions[0] != 0; + dummyLast = positions[positions.size() - 1] != 1.0f; + } + // Now copy over the colors, adding the dummies as needed + if (dummyFirst) { + originalColors.push_back(colors[0]); + } + originalColors.insert(originalColors.end(), colors.begin(), colors.end()); + if (dummyLast) { + originalColors.push_back(colors[colors.size() - 1]); + } + if (positions.empty()) { + auto posScale = 1.0f / static_cast(colors.size() - 1); + for (size_t i = 0; i < colors.size(); i++) { + originalPositions.push_back(static_cast(i) * posScale); + } + } else { + float prev = 0; + originalPositions.push_back(prev); // force the first pos to 0 + for (size_t i = dummyFirst ? 0 : 1; i < colors.size() + dummyLast; ++i) { + // Pin the last value to 1.0, and make sure position is monotonic. + float curr; + if (i != colors.size()) { + curr = std::max(std::min(positions[i], 1.0f), prev); + } else { + curr = 1.0f; + } + originalPositions.push_back(curr); + prev = curr; + } + } +} + +// Combines the colorizer and layout with an appropriately configured master effect based on the +// gradient's tile mode +static std::unique_ptr MakeGradient(const Context* context, + const GradientShaderBase& shader, + std::unique_ptr layout) { + if (layout == nullptr) { + return nullptr; + } + bool allOpaque = true; + for (const auto& color : shader.originalColors) { + if (!FloatNearlyEqual(color.alpha, 1.0)) { + allOpaque = false; + break; + } + } + // All gradients are colorized the same way, regardless of layout + std::unique_ptr colorizer = + MakeColorizer(context, &(shader.originalColors[0]), &(shader.originalPositions[0]), + static_cast(shader.originalColors.size())); + if (colorizer == nullptr) { + return nullptr; + } + + // The master effect has to export premultiply colors, but under certain conditions it doesn't + // need to do anything to achieve that: i.e. all the colors have a = 1, in which case + // premultiply is a no op. + return ClampedGradientEffect::Make( + std::move(colorizer), std::move(layout), shader.originalColors[0], + shader.originalColors[shader.originalColors.size() - 1], !allOpaque); +} + +static Matrix PointsToUnitMatrix(const Point& startPoint, const Point& endPoint) { + Point vec = {endPoint.x - startPoint.x, endPoint.y - startPoint.y}; + float mag = Point::Length(vec.x, vec.y); + float inv = mag != 0 ? 1 / mag : 0; + vec.set(vec.x * inv, vec.y * inv); + auto matrix = Matrix::I(); + matrix.setSinCos(-vec.y, vec.x, startPoint.x, startPoint.y); + matrix.postTranslate(-startPoint.x, -startPoint.y); + matrix.postScale(inv, inv); + return matrix; +} + +LinearGradient::LinearGradient(const Point& startPoint, const Point& endPoint, + const std::vector& colors, + const std::vector& positions) + : GradientShaderBase(colors, positions, PointsToUnitMatrix(startPoint, endPoint)) { +} + +std::unique_ptr LinearGradient::asFragmentProcessor(const FPArgs& args) const { + auto matrix = Matrix::I(); + if (!ComputeTotalInverse(args, &matrix)) { + return nullptr; + } + matrix.postConcat(pointsToUnit); + return MakeGradient(args.context, *this, LinearGradientLayout::Make(matrix)); +} + +static Matrix RadialToUnitMatrix(const Point& center, float radius) { + float inv = 1 / radius; + auto matrix = Matrix::MakeTrans(-center.x, -center.y); + matrix.postScale(inv, inv); + return matrix; +} + +RadialGradient::RadialGradient(const Point& center, float radius, const std::vector& colors, + const std::vector& positions) + : GradientShaderBase(colors, positions, RadialToUnitMatrix(center, radius)) { +} + +std::unique_ptr RadialGradient::asFragmentProcessor(const FPArgs& args) const { + auto matrix = Matrix::I(); + if (!ComputeTotalInverse(args, &matrix)) { + return nullptr; + } + matrix.postConcat(pointsToUnit); + return MakeGradient(args.context, *this, RadialGradientLayout::Make(matrix)); +} + +SweepGradient::SweepGradient(const Point& center, float t0, float t1, + const std::vector& colors, const std::vector& positions) + : GradientShaderBase(colors, positions, Matrix::MakeTrans(-center.x, -center.y)), bias(-t0), + scale(1.f / (t1 - t0)) { +} + +std::unique_ptr SweepGradient::asFragmentProcessor(const FPArgs& args) const { + auto matrix = Matrix::I(); + if (!ComputeTotalInverse(args, &matrix)) { + return nullptr; + } + matrix.postConcat(pointsToUnit); + return MakeGradient(args.context, *this, SweepGradientLayout::Make(matrix, bias, scale)); +} + +std::shared_ptr Shader::MakeLinearGradient(const Point& startPoint, const Point& endPoint, + const std::vector& colors, + const std::vector& positions) { + if (!std::isfinite(Point::Distance(endPoint, startPoint)) || colors.empty()) { + return nullptr; + } + if (1 == colors.size()) { + return Shader::MakeColorShader(colors[0]); + } + if (FloatNearlyZero((endPoint - startPoint).length(), DegenerateThreshold)) { + // Degenerate gradient, the only tricky complication is when in clamp mode, the limit of + // the gradient approaches two half planes of solid color (first and last). However, they + // are divided by the line perpendicular to the start and end point, which becomes undefined + // once start and end are exactly the same, so just use the end color for a stable solution. + return Shader::MakeColorShader(colors[0]); + } + auto shader = std::make_shared(startPoint, endPoint, colors, positions); + shader->weakThis = shader; + return shader; +} + +std::shared_ptr Shader::MakeRadialGradient(const Point& center, float radius, + const std::vector& colors, + const std::vector& positions) { + if (radius < 0 || colors.empty()) { + return nullptr; + } + if (1 == colors.size()) { + return Shader::MakeColorShader(colors[0]); + } + + if (FloatNearlyZero(radius, DegenerateThreshold)) { + // Degenerate gradient optimization, and no special logic needed for clamped radial gradient + return Shader::MakeColorShader(colors[colors.size() - 1]); + } + auto shader = std::make_shared(center, radius, colors, positions); + shader->weakThis = shader; + return shader; +} + +std::shared_ptr Shader::MakeSweepGradient(const Point& center, float startAngle, + float endAngle, const std::vector& colors, + const std::vector& positions) { + if (colors.empty()) { + return nullptr; + } + if (1 == colors.size() || FloatNearlyEqual(startAngle, endAngle, DegenerateThreshold)) { + return Shader::MakeColorShader(colors[0]); + } + auto shader = std::make_shared(center, startAngle / 360.f, endAngle / 360.f, + colors, positions); + shader->weakThis = shader; + return shader; +} +} // namespace tgfx diff --git a/src/shaders/GradientShader.h b/src/shaders/GradientShader.h new file mode 100644 index 00000000..88eb3625 --- /dev/null +++ b/src/shaders/GradientShader.h @@ -0,0 +1,70 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/Matrix.h" +#include "tgfx/core/Shader.h" + +namespace tgfx { + +class GradientShaderBase : public Shader { + public: + GradientShaderBase(const std::vector& colors, const std::vector& positions, + const Matrix& pointsToUnit); + + bool isOpaque() const override { + return colorsAreOpaque; + } + + std::vector originalColors = {}; + std::vector originalPositions = {}; + + protected: + const Matrix pointsToUnit; + bool colorsAreOpaque = false; +}; + +class LinearGradient : public GradientShaderBase { + public: + LinearGradient(const Point& startPoint, const Point& endPoint, const std::vector& colors, + const std::vector& positions); + + std::unique_ptr asFragmentProcessor(const FPArgs& args) const override; +}; + +class RadialGradient : public GradientShaderBase { + public: + RadialGradient(const Point& center, float radius, const std::vector& colors, + const std::vector& positions); + + std::unique_ptr asFragmentProcessor(const FPArgs& args) const override; +}; + +class SweepGradient : public GradientShaderBase { + public: + SweepGradient(const Point& center, float t0, float t1, const std::vector& colors, + const std::vector& positions); + + std::unique_ptr asFragmentProcessor(const FPArgs& args) const override; + + private: + float bias; + float scale; +}; +} // namespace tgfx diff --git a/src/shaders/ImageShader.cpp b/src/shaders/ImageShader.cpp new file mode 100644 index 00000000..ab11cf48 --- /dev/null +++ b/src/shaders/ImageShader.cpp @@ -0,0 +1,51 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "ImageShader.h" +#include "gpu/TextureSampler.h" +#include "gpu/processors/TiledTextureEffect.h" +#include "images/ImageSource.h" + +namespace tgfx { +std::shared_ptr Shader::MakeImageShader(std::shared_ptr image, TileMode tileModeX, + TileMode tileModeY, SamplingOptions sampling) { + if (image == nullptr) { + return nullptr; + } + auto shader = std::shared_ptr( + new ImageShader(std::move(image), tileModeX, tileModeY, sampling)); + shader->weakThis = shader; + return shader; +} + +std::unique_ptr ImageShader::asFragmentProcessor(const FPArgs& args) const { + auto matrix = Matrix::I(); + if (!ComputeTotalInverse(args, &matrix)) { + return nullptr; + } + auto processor = image->asFragmentProcessor(args.context, args.surfaceFlags, tileModeX, tileModeY, + sampling, &matrix); + if (processor == nullptr) { + return nullptr; + } + if (image->isAlphaOnly()) { + return processor; + } + return FragmentProcessor::MulChildByInputAlpha(std::move(processor)); +} +} // namespace tgfx diff --git a/src/shaders/ImageShader.h b/src/shaders/ImageShader.h new file mode 100644 index 00000000..b53f12d0 --- /dev/null +++ b/src/shaders/ImageShader.h @@ -0,0 +1,43 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/Texture.h" +#include "tgfx/core/Image.h" +#include "tgfx/core/Shader.h" + +namespace tgfx { +class ImageShader : public Shader { + public: + std::unique_ptr asFragmentProcessor(const FPArgs& args) const override; + + private: + ImageShader(std::shared_ptr image, TileMode tileModeX, TileMode tileModeY, + SamplingOptions sampling) + : image(std::move(image)), tileModeX(tileModeX), tileModeY(tileModeY), sampling(sampling) { + } + + std::shared_ptr image = nullptr; + TileMode tileModeX = TileMode::Clamp; + TileMode tileModeY = TileMode::Clamp; + SamplingOptions sampling; + + friend class Shader; +}; +} // namespace tgfx diff --git a/src/shaders/LocalMatrixShader.cpp b/src/shaders/LocalMatrixShader.cpp new file mode 100644 index 00000000..6df4b900 --- /dev/null +++ b/src/shaders/LocalMatrixShader.cpp @@ -0,0 +1,67 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "LocalMatrixShader.h" +#include "gpu/processors/FragmentProcessor.h" + +namespace tgfx { +std::shared_ptr Shader::makeWithLocalMatrix(const Matrix& matrix, bool isPre) const { + if (matrix.isIdentity()) { + return weakThis.lock(); + } + auto baseShader = weakThis.lock(); + if (baseShader == nullptr) { + return nullptr; + } + std::shared_ptr shader; + if (isPre) { + shader = std::make_shared(std::move(baseShader), matrix, Matrix::I()); + } else { + shader = std::make_shared(std::move(baseShader), Matrix::I(), matrix); + } + shader->weakThis = shader; + return shader; +} + +std::shared_ptr LocalMatrixShader::makeWithLocalMatrix(const Matrix& matrix, + bool isPre) const { + if (matrix.isIdentity()) { + return weakThis.lock(); + } + std::shared_ptr shader; + if (isPre) { + auto localMatrix = _preLocalMatrix; + localMatrix.preConcat(matrix); + shader = std::make_shared(proxyShader, localMatrix, _postLocalMatrix); + } else { + auto localMatrix = _postLocalMatrix; + localMatrix.postConcat(matrix); + shader = std::make_shared(proxyShader, _preLocalMatrix, localMatrix); + } + shader->weakThis = shader; + return shader; +} + +std::unique_ptr LocalMatrixShader::asFragmentProcessor( + const FPArgs& args) const { + FPArgs fpArgs(args); + fpArgs.preLocalMatrix.setConcat(_preLocalMatrix, fpArgs.preLocalMatrix); + fpArgs.postLocalMatrix.setConcat(fpArgs.postLocalMatrix, _postLocalMatrix); + return proxyShader->asFragmentProcessor(fpArgs); +} +} // namespace tgfx diff --git a/src/shaders/LocalMatrixShader.h b/src/shaders/LocalMatrixShader.h new file mode 100644 index 00000000..384d0531 --- /dev/null +++ b/src/shaders/LocalMatrixShader.h @@ -0,0 +1,46 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/Matrix.h" +#include "tgfx/core/Shader.h" + +namespace tgfx { +class LocalMatrixShader final : public Shader { + public: + LocalMatrixShader(std::shared_ptr proxy, const Matrix& preLocalMatrix, + const Matrix& postLocalMatrix) + : proxyShader(std::move(proxy)), _preLocalMatrix(preLocalMatrix), + _postLocalMatrix(postLocalMatrix) { + } + + bool isOpaque() const override { + return proxyShader->isOpaque(); + } + + std::shared_ptr makeWithLocalMatrix(const Matrix& matrix, bool isPre) const override; + + std::unique_ptr asFragmentProcessor(const FPArgs& args) const override; + + private: + std::shared_ptr proxyShader; + const Matrix _preLocalMatrix; + const Matrix _postLocalMatrix; +}; +} // namespace tgfx diff --git a/src/shapes/PathProxy.cpp b/src/shapes/PathProxy.cpp new file mode 100644 index 00000000..0b16e188 --- /dev/null +++ b/src/shapes/PathProxy.cpp @@ -0,0 +1,110 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "PathProxy.h" +#include "tgfx/core/PathEffect.h" +#include "utils/Log.h" + +namespace tgfx { +class FillPathProxy : public PathProxy { + public: + explicit FillPathProxy(const Path& path) : path(path) { + } + + Rect getBounds(float scale) const override { + auto bounds = path.getBounds(); + bounds.scale(scale, scale); + return bounds; + } + + Path getPath(float scale) const override { + auto fillPath = path; + fillPath.transform(Matrix::MakeScale(scale)); + return fillPath; + } + + private: + Path path = {}; +}; + +class FillTextProxy : public PathProxy { + public: + explicit FillTextProxy(std::shared_ptr textBlob) : textBlob(std::move(textBlob)) { + } + + Rect getBounds(float scale) const override { + auto bounds = textBlob->getBounds(); + bounds.scale(scale, scale); + return bounds; + } + + Path getPath(float scale) const override { + Path fillPath = {}; + textBlob->getPath(&fillPath); + fillPath.transform(Matrix::MakeScale(scale)); + return fillPath; + } + + private: + std::shared_ptr textBlob = nullptr; +}; + +class StrokeTextProxy : public PathProxy { + public: + explicit StrokeTextProxy(std::shared_ptr textBlob, const Stroke& stroke) + : textBlob(std::move(textBlob)), stroke(stroke) { + } + + Rect getBounds(float scale) const override { + auto bounds = textBlob->getBounds(&stroke); + bounds.scale(scale, scale); + return bounds; + } + + Path getPath(float scale) const override { + Path fillPath = {}; + textBlob->getPath(&fillPath, &stroke); + fillPath.transform(Matrix::MakeScale(scale)); + return fillPath; + } + + private: + std::shared_ptr textBlob = nullptr; + Stroke stroke = {}; +}; + +std::unique_ptr PathProxy::MakeFromFill(const Path& path) { + return std::make_unique(path); +} + +std::unique_ptr PathProxy::MakeFromFill(std::shared_ptr textBlob) { + if (textBlob->hasColor()) { + return nullptr; + } + return std::make_unique(std::move(textBlob)); +} + +std::unique_ptr PathProxy::MakeFromStroke(std::shared_ptr textBlob, + const Stroke& stroke) { + if (textBlob->hasColor() || stroke.width <= 0) { + return nullptr; + } + return std::make_unique(std::move(textBlob), stroke); +} + +} // namespace tgfx diff --git a/src/shapes/PathProxy.h b/src/shapes/PathProxy.h new file mode 100644 index 00000000..f2c946f7 --- /dev/null +++ b/src/shapes/PathProxy.h @@ -0,0 +1,42 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "gpu/GpuPaint.h" +#include "tgfx/core/Path.h" +#include "tgfx/core/Stroke.h" +#include "tgfx/core/TextBlob.h" + +namespace tgfx { +class PathProxy { + public: + static std::unique_ptr MakeFromFill(const Path& path); + + static std::unique_ptr MakeFromFill(std::shared_ptr textBlob); + + static std::unique_ptr MakeFromStroke(std::shared_ptr textBlob, + const Stroke& stroke); + + virtual ~PathProxy() = default; + + virtual Rect getBounds(float scale) const = 0; + + virtual Path getPath(float scale) const = 0; +}; +} // namespace tgfx diff --git a/src/shapes/PathShape.cpp b/src/shapes/PathShape.cpp new file mode 100644 index 00000000..c4559646 --- /dev/null +++ b/src/shapes/PathShape.cpp @@ -0,0 +1,30 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "PathShape.h" + +namespace tgfx { +PathShape::PathShape(std::unique_ptr pathProxy, float resolutionScale) + : Shape(resolutionScale), proxy(std::move(pathProxy)) { + bounds = proxy->getBounds(resolutionScale); +} + +Path PathShape::getFillPath() const { + return proxy->getPath(resolutionScale()); +} +} // namespace tgfx diff --git a/src/shapes/PathShape.h b/src/shapes/PathShape.h new file mode 100644 index 00000000..89ab3d1b --- /dev/null +++ b/src/shapes/PathShape.h @@ -0,0 +1,41 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "PathProxy.h" +#include "tgfx/core/Shape.h" + +namespace tgfx { +class PathShape : public Shape { + public: + explicit PathShape(std::unique_ptr proxy, float resolutionScale); + + Rect getBounds() const override { + return bounds; + } + + protected: + Rect bounds = Rect::MakeEmpty(); + + Path getFillPath() const; + + private: + std::unique_ptr proxy = nullptr; +}; +} // namespace tgfx diff --git a/src/shapes/RRectShape.cpp b/src/shapes/RRectShape.cpp new file mode 100644 index 00000000..63753085 --- /dev/null +++ b/src/shapes/RRectShape.cpp @@ -0,0 +1,33 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "RRectShape.h" +#include "gpu/GpuPaint.h" +#include "gpu/ops/RRectOp.h" + +namespace tgfx { +RRectShape::RRectShape(const RRect& rRect, float resolutionScale) + : Shape(resolutionScale), rRect(rRect) { + this->rRect.scale(resolutionScale, resolutionScale); +} + +std::unique_ptr RRectShape::makeOp(GpuPaint* paint, const Matrix& viewMatrix, + uint32_t) const { + return RRectOp::Make(paint->color, rRect, viewMatrix); +} +} // namespace tgfx diff --git a/src/shapes/RRectShape.h b/src/shapes/RRectShape.h new file mode 100644 index 00000000..012e188b --- /dev/null +++ b/src/shapes/RRectShape.h @@ -0,0 +1,38 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/Shape.h" + +namespace tgfx { +class RRectShape : public Shape { + public: + explicit RRectShape(const RRect& rRect, float resolutionScale = 1.0f); + + Rect getBounds() const override { + return rRect.rect; + } + + private: + RRect rRect = {}; + + std::unique_ptr makeOp(GpuPaint* paint, const Matrix& viewMatrix, + uint32_t surfaceFlags) const override; +}; +} // namespace tgfx diff --git a/src/shapes/RectShape.cpp b/src/shapes/RectShape.cpp new file mode 100644 index 00000000..763d2dc4 --- /dev/null +++ b/src/shapes/RectShape.cpp @@ -0,0 +1,32 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "RectShape.h" +#include "gpu/GpuPaint.h" +#include "gpu/ops/FillRectOp.h" + +namespace tgfx { +RectShape::RectShape(const Rect& rect, float resolutionScale) : Shape(resolutionScale), rect(rect) { + this->rect.scale(resolutionScale, resolutionScale); +} + +std::unique_ptr RectShape::makeOp(GpuPaint* paint, const Matrix& viewMatrix, + uint32_t) const { + return FillRectOp::Make(paint->color, rect, viewMatrix); +} +} // namespace tgfx diff --git a/src/shapes/RectShape.h b/src/shapes/RectShape.h new file mode 100644 index 00000000..603a1f2c --- /dev/null +++ b/src/shapes/RectShape.h @@ -0,0 +1,38 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/Shape.h" + +namespace tgfx { +class RectShape : public Shape { + public: + explicit RectShape(const Rect& rect, float resolutionScale = 1.0f); + + Rect getBounds() const override { + return rect; + } + + private: + Rect rect = {}; + + std::unique_ptr makeOp(GpuPaint* paint, const Matrix& viewMatrix, + uint32_t surfaceFlags) const override; +}; +} // namespace tgfx diff --git a/src/shapes/TextureShape.cpp b/src/shapes/TextureShape.cpp new file mode 100644 index 00000000..e0fefde9 --- /dev/null +++ b/src/shapes/TextureShape.cpp @@ -0,0 +1,69 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "TextureShape.h" +#include "gpu/ops/FillRectOp.h" +#include "gpu/processors/TextureEffect.h" +#include "tgfx/core/Mask.h" + +namespace tgfx { +TextureShape::TextureShape(std::unique_ptr pathProxy, float resolutionScale) + : PathShape(std::move(pathProxy), resolutionScale) { +} + +std::unique_ptr TextureShape::makeOp(GpuPaint* paint, const Matrix& viewMatrix, + uint32_t surfaceFlags) const { + auto resourceCache = paint->context->resourceCache(); + auto texture = std::static_pointer_cast(resourceCache->findUniqueResource(uniqueKey)); + if (texture != nullptr) { + return makeTextureOp(texture, paint, viewMatrix); + } + auto path = getFillPath(); + auto width = ceilf(bounds.width()); + auto height = ceilf(bounds.height()); + auto mask = Mask::Make(static_cast(width), static_cast(height)); + if (!mask) { + return nullptr; + } + auto matrix = Matrix::MakeTrans(-bounds.x(), -bounds.y()); + matrix.postScale(width / bounds.width(), height / bounds.height()); + mask->setMatrix(matrix); + mask->fillPath(path); + // TODO(pengweilv): mip map + texture = Texture::MakeFrom(paint->context, mask->makeBuffer()); + if (texture == nullptr) { + return nullptr; + } + if (!(surfaceFlags & SurfaceOptions::DisableCacheFlag)) { + texture->assignUniqueKey(uniqueKey); + } + return makeTextureOp(texture, paint, viewMatrix); +} + +std::unique_ptr TextureShape::makeTextureOp(std::shared_ptr texture, + GpuPaint* paint, + const Matrix& viewMatrix) const { + auto maskLocalMatrix = Matrix::I(); + maskLocalMatrix.postTranslate(-bounds.x(), -bounds.y()); + maskLocalMatrix.postScale(static_cast(texture->width()) / bounds.width(), + static_cast(texture->height()) / bounds.height()); + paint->coverageFragmentProcessors.emplace_back(FragmentProcessor::MulInputByChildAlpha( + TextureEffect::Make(std::move(texture), SamplingOptions(), &maskLocalMatrix))); + return FillRectOp::Make(paint->color, bounds, viewMatrix); +} +} // namespace tgfx diff --git a/src/shapes/TextureShape.h b/src/shapes/TextureShape.h new file mode 100644 index 00000000..84225e0a --- /dev/null +++ b/src/shapes/TextureShape.h @@ -0,0 +1,35 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "PathShape.h" + +namespace tgfx { +class TextureShape : public PathShape { + public: + explicit TextureShape(std::unique_ptr proxy, float resolutionScale = 1.0f); + + private: + std::unique_ptr makeOp(GpuPaint* paint, const Matrix& viewMatrix, + uint32_t surfaceFlags) const override; + + std::unique_ptr makeTextureOp(std::shared_ptr texture, GpuPaint* paint, + const Matrix& viewMatrix) const; +}; +} // namespace tgfx diff --git a/src/shapes/TriangulatingShape.cpp b/src/shapes/TriangulatingShape.cpp new file mode 100644 index 00000000..f4c10b2d --- /dev/null +++ b/src/shapes/TriangulatingShape.cpp @@ -0,0 +1,54 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "TriangulatingShape.h" +#include "core/PathRef.h" +#include "gpu/GpuBuffer.h" +#include "gpu/ops/TriangulatingPathOp.h" + +namespace tgfx { +TriangulatingShape::TriangulatingShape(std::unique_ptr pathProxy, float resolutionScale) + : PathShape(std::move(pathProxy), resolutionScale) { +} + +std::unique_ptr TriangulatingShape::makeOp(GpuPaint* paint, const Matrix& viewMatrix, + uint32_t surfaceFlags) const { + auto resourceCache = paint->context->resourceCache(); + auto buffer = std::static_pointer_cast(resourceCache->findUniqueResource(uniqueKey)); + if (buffer != nullptr) { + auto vertexCount = buffer->size() / (sizeof(float) * 3); + return std::make_unique(paint->color, buffer, vertexCount, bounds, + viewMatrix); + } + auto path = getFillPath(); + std::vector vertices = {}; + int count = PathRef::ToAATriangles(path, bounds, &vertices); + if (count == 0) { + return nullptr; + } + buffer = GpuBuffer::Make(paint->context, BufferType::Vertex, &vertices[0], + vertices.size() * sizeof(float)); + if (buffer == nullptr) { + return nullptr; + } + if (!(surfaceFlags & SurfaceOptions::DisableCacheFlag)) { + buffer->assignUniqueKey(uniqueKey); + } + return std::make_unique(paint->color, buffer, count, bounds, viewMatrix); +} +} // namespace tgfx diff --git a/src/shapes/TriangulatingShape.h b/src/shapes/TriangulatingShape.h new file mode 100644 index 00000000..ce5e94df --- /dev/null +++ b/src/shapes/TriangulatingShape.h @@ -0,0 +1,33 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "PathProxy.h" +#include "PathShape.h" + +namespace tgfx { +class TriangulatingShape : public PathShape { + public: + explicit TriangulatingShape(std::unique_ptr proxy, float resolutionScale = 1.0f); + + private: + std::unique_ptr makeOp(GpuPaint* paint, const Matrix& viewMatrix, + uint32_t surfaceFlags) const override; +}; +} // namespace tgfx diff --git a/src/utils/Buffer.cpp b/src/utils/Buffer.cpp new file mode 100644 index 00000000..5c6a078a --- /dev/null +++ b/src/utils/Buffer.cpp @@ -0,0 +1,121 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/utils/Buffer.h" +#include +#include "utils/Log.h" + +namespace tgfx { +Buffer::Buffer(size_t size) { + alloc(size); +} + +Buffer::Buffer(const void* data, size_t size) { + if (data == nullptr || size == 0) { + return; + } + alloc(size); + if (_data != nullptr) { + memcpy(_data, data, size); + } +} + +Buffer::Buffer(std::shared_ptr data) : Buffer(data->data(), data->size()) { +} + +Buffer::~Buffer() { + delete[] _data; +} + +bool Buffer::alloc(size_t size) { + if (_data != nullptr) { + delete[] _data; + _data = nullptr; + _size = 0; + } + _data = size > 0 ? new (std::nothrow) uint8_t[size] : nullptr; + if (_data != nullptr) { + _size = size; + } + return _data != nullptr; +} + +std::shared_ptr Buffer::release() { + if (isEmpty()) { + return nullptr; + } + auto data = Data::MakeAdopted(_data, _size, Data::DeleteProc); + _data = nullptr; + _size = 0; + return data; +} + +void Buffer::reset() { + if (isEmpty()) { + return; + } + delete[] _data; + _data = nullptr; + _size = 0; +} + +void Buffer::clear() { + if (isEmpty()) { + return; + } + memset(_data, 0, _size); +} + +std::shared_ptr Buffer::copyRange(size_t offset, size_t length) { + length = getClampedLength(offset, length); + if (length == 0) { + return nullptr; + } + return Data::MakeWithCopy(_data + offset, length); +} + +void Buffer::writeRange(size_t offset, size_t length, const void* bytes) { + length = getClampedLength(offset, length); + if (length == 0) { + return; + } + memcpy(_data + offset, bytes, length); +} + +uint8_t Buffer::operator[](size_t index) const { + DEBUG_ASSERT(index >= 0 && index < _size); + return _data[index]; +} + +uint8_t& Buffer::operator[](size_t index) { + DEBUG_ASSERT(index >= 0 && index < _size); + return _data[index]; +} + +size_t Buffer::getClampedLength(size_t offset, size_t length) const { + size_t available = _size; + if (offset >= available || length == 0) { + return 0; + } + available -= offset; + if (length > available) { + length = available; + } + return length; +} +} // namespace tgfx diff --git a/src/utils/BytesKey.cpp b/src/utils/BytesKey.cpp new file mode 100644 index 00000000..1f87c4f9 --- /dev/null +++ b/src/utils/BytesKey.cpp @@ -0,0 +1,67 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/utils/BytesKey.h" +#include + +namespace tgfx { +union DataConverter { + float floatValue; + uint8_t bytes[4]; + uint32_t uintValue; +}; + +union PointerConverter { + const void* pointer; + uint32_t uintValues[2]; +}; + +void BytesKey::write(uint32_t value) { + values.push_back(value); +} + +void BytesKey::write(const void* value) { + PointerConverter converter = {}; + converter.pointer = value; + values.push_back(converter.uintValues[0]); + static size_t size = sizeof(intptr_t); + if (size > 4) { + values.push_back(converter.uintValues[1]); + } +} + +void BytesKey::write(const uint8_t value[4]) { + DataConverter converter = {}; + memcpy(converter.bytes, value, 4); + values.push_back(converter.uintValue); +} + +void BytesKey::write(float value) { + DataConverter converter = {}; + converter.floatValue = value; + values.push_back(converter.uintValue); +} + +size_t BytesKeyHasher::operator()(const BytesKey& key) const { + auto hash = key.values.size(); + for (auto& value : key.values) { + hash ^= value + 0x9e3779b9 + (hash << 6u) + (hash >> 2u); + } + return hash; +} +} // namespace tgfx \ No newline at end of file diff --git a/src/utils/Clock.cpp b/src/utils/Clock.cpp new file mode 100644 index 00000000..599a4d0b --- /dev/null +++ b/src/utils/Clock.cpp @@ -0,0 +1,80 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/utils/Clock.h" +#include "utils/Log.h" + +namespace tgfx { +int64_t Clock::Now() { + static const auto START_TIME = std::chrono::high_resolution_clock::now(); + auto now = std::chrono::high_resolution_clock::now(); + auto us = std::chrono::duration_cast(now - START_TIME); + return static_cast(us.count()); +} + +Clock::Clock() { + startTime = Now(); +} + +void Clock::reset() { + startTime = Now(); + markers = {}; +} + +int64_t Clock::elapsedTime() { + return Now() - startTime; +} + +void Clock::mark(const std::string& name) { + if (name.empty()) { + LOGE("Clock::mark(): An empty marker name was specified!"); + return; + } + auto result = markers.find(name); + if (result != markers.end()) { + LOGE("Clock::mark(): The specified marker name '%s' already exists!", name.c_str()); + return; + } + markers[name] = Now(); +} + +int64_t Clock::measure(const std::string& makerFrom, const std::string& makerTo) const { + int64_t start, end; + if (!makerFrom.empty()) { + auto result = markers.find(makerFrom); + if (result == markers.end()) { + LOGE("Clock::measure(): The specified makerFrom '%s' does not exist!", makerFrom.c_str()); + return 0; + } + start = result->second; + } else { + start = startTime; + } + if (!makerTo.empty()) { + auto result = markers.find(makerTo); + if (result == markers.end()) { + LOGE("Clock::measure(): The specified makerTo '%s' does not exist!", makerTo.c_str()); + return 0; + } + end = result->second; + } else { + end = Now(); + } + return end - start; +} +} // namespace tgfx diff --git a/src/utils/DataView.cpp b/src/utils/DataView.cpp new file mode 100644 index 00000000..9b956809 --- /dev/null +++ b/src/utils/DataView.cpp @@ -0,0 +1,290 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/utils/DataView.h" +#include "utils/Log.h" + +namespace tgfx { +static ByteOrder EndianTest() { + int i = 1; + auto p = static_cast(static_cast(&i)); + return (*p == 1) ? ByteOrder::LittleEndian : ByteOrder::BigEndian; +} + +static const ByteOrder NATIVE_BYTE_ORDER = EndianTest(); + +union Bit8 { + uint8_t bytes[1]; + int8_t intValue; + uint8_t uintValue; + bool boolValue; +}; + +union Bit16 { + uint8_t bytes[2]; + int16_t intValue; + uint16_t uintValue; +}; + +union Bit32 { + uint8_t bytes[4]; + int32_t intValue; + uint32_t uintValue; + float floatValue; +}; + +union Bit64 { + uint8_t bytes[8]; + int64_t intValue; + uint64_t uintValue; + double doubleValue; +}; + +DataView::DataView(const uint8_t* buffer, size_t length, ByteOrder byteOrder) + : _bytes(buffer), _size(length), _byteOrder(byteOrder) { + if (_bytes == nullptr || _size == 0) { + reset(); + } +} + +DataView::DataView(uint8_t* buffer, size_t length, ByteOrder byteOrder) + : _bytes(buffer), _writableBytes(buffer), _size(length), _byteOrder(byteOrder) { + if (_bytes == nullptr || _size == 0) { + reset(); + } +} + +void DataView::reset() { + _bytes = nullptr; + _writableBytes = nullptr; + _size = 0; +} + +void DataView::reset(const uint8_t* bytes, size_t length) { + if (bytes == nullptr || length == 0) { + reset(); + } else { + _bytes = bytes; + _writableBytes = nullptr; + _size = length; + } +} + +void DataView::reset(uint8_t* bytes, size_t length) { + if (bytes == nullptr || length == 0) { + reset(); + } else { + _bytes = bytes; + _writableBytes = bytes; + _size = length; + } +} + +bool DataView::getBoolean(size_t offset) const { + Bit8 data = {}; + if (!readData(offset, data.bytes, 1)) { + return false; + } + return data.uintValue != 0; +} + +int8_t DataView::getInt8(size_t offset) const { + Bit8 data = {}; + if (!readData(offset, data.bytes, 1)) { + return 0; + } + return data.intValue; +} + +uint8_t DataView::getUint8(size_t offset) const { + Bit8 data = {}; + if (!readData(offset, data.bytes, 1)) { + return 0; + } + return data.uintValue; +} + +int16_t DataView::getInt16(size_t offset) const { + Bit16 data = {}; + if (!readData(offset, data.bytes, 2)) { + return 0; + } + return data.intValue; +} + +uint16_t DataView::getUint16(size_t offset) const { + Bit16 data = {}; + if (!readData(offset, data.bytes, 2)) { + return 0; + } + return data.uintValue; +} + +int32_t DataView::getInt32(size_t offset) const { + Bit32 data = {}; + if (!readData(offset, data.bytes, 4)) { + return 0; + } + return data.intValue; +} + +uint32_t DataView::getUint32(size_t offset) const { + Bit32 data = {}; + if (!readData(offset, data.bytes, 4)) { + return 0; + } + return data.uintValue; +} + +int64_t DataView::getInt64(size_t offset) const { + Bit64 data = {}; + if (!readData(offset, data.bytes, 8)) { + return 0; + } + return data.intValue; +} + +uint64_t DataView::getUint64(size_t offset) const { + Bit64 data = {}; + if (!readData(offset, data.bytes, 8)) { + return 0; + } + return data.uintValue; +} + +float DataView::getFloat(size_t offset) const { + Bit32 data = {}; + if (!readData(offset, data.bytes, 4)) { + return 0.0f; + } + return data.floatValue; +} + +double DataView::getDouble(size_t offset) const { + Bit64 data = {}; + if (!readData(offset, data.bytes, 8)) { + return 0.0; + } + return data.doubleValue; +} + +bool DataView::setBoolean(size_t offset, bool value) { + Bit8 data = {}; + data.boolValue = value; + return writeData(offset, data.bytes, 1); +} + +bool DataView::setInt8(size_t offset, int8_t value) { + Bit8 data = {}; + data.intValue = value; + return writeData(offset, data.bytes, 1); +} + +bool DataView::setUint8(size_t offset, uint8_t value) { + Bit8 data = {}; + data.uintValue = value; + return writeData(offset, data.bytes, 1); +} + +bool DataView::setInt16(size_t offset, int16_t value) { + Bit16 data = {}; + data.intValue = value; + return writeData(offset, data.bytes, 2); +} + +bool DataView::setUint16(size_t offset, uint16_t value) { + Bit16 data = {}; + data.uintValue = value; + return writeData(offset, data.bytes, 2); +} + +bool DataView::setInt32(size_t offset, int32_t value) { + Bit32 data = {}; + data.intValue = value; + return writeData(offset, data.bytes, 4); +} + +bool DataView::setUint32(size_t offset, uint32_t value) { + Bit32 data = {}; + data.uintValue = value; + return writeData(offset, data.bytes, 4); +} + +bool DataView::setInt64(size_t offset, int64_t value) { + Bit64 data = {}; + data.intValue = value; + return writeData(offset, data.bytes, 8); +} + +bool DataView::setUint64(size_t offset, uint64_t value) { + Bit64 data = {}; + data.uintValue = value; + return writeData(offset, data.bytes, 8); +} + +bool DataView::setFloat(size_t offset, float value) { + Bit32 data = {}; + data.floatValue = value; + return writeData(offset, data.bytes, 4); +} + +bool DataView::setDouble(size_t offset, double value) { + Bit64 data = {}; + data.doubleValue = value; + return writeData(offset, data.bytes, 8); +} + +bool DataView::readData(size_t offset, uint8_t* data, size_t byteSize) const { + if (_size - offset < byteSize) { + LOGE("DataView::readData(): End of bytes was encountered!"); + return false; + } + if (_byteOrder == NATIVE_BYTE_ORDER) { + for (int i = 0; i < static_cast(byteSize); i++) { + data[i] = _bytes[offset++]; + } + } else { + for (int i = static_cast(byteSize) - 1; i >= 0; i--) { + data[i] = _bytes[offset++]; + } + } + return true; +} + +bool DataView::writeData(size_t offset, const uint8_t* data, size_t byteSize) { + if (_writableBytes == nullptr) { + LOGE("DataView::writeData(): This DataView is read-only!"); + return false; + } + if (_size - offset < byteSize) { + LOGE("DataView::writeData(): End of bytes was encountered!"); + return false; + } + if (_byteOrder == NATIVE_BYTE_ORDER) { + for (int i = 0; i < static_cast(byteSize); i++) { + _writableBytes[offset++] = data[i]; + } + } else { + for (int i = static_cast(byteSize) - 1; i >= 0; i--) { + _writableBytes[offset++] = data[i]; + } + } + return true; +} + +} // namespace tgfx diff --git a/src/utils/EnumHasher.h b/src/utils/EnumHasher.h new file mode 100644 index 00000000..1c5c8bd2 --- /dev/null +++ b/src/utils/EnumHasher.h @@ -0,0 +1,32 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include + +namespace tgfx { + +struct EnumHasher { + template + size_t operator()(T t) const { + return static_cast(t); + } +}; + +} // namespace tgfx diff --git a/src/utils/Log.h b/src/utils/Log.h new file mode 100644 index 00000000..d5573fde --- /dev/null +++ b/src/utils/Log.h @@ -0,0 +1,57 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "tgfx/platform/Print.h" + +namespace tgfx { +#define ABORT(msg) \ + do { \ + tgfx::PrintError("%s:%d: fatal error: \"%s\"\n", __FILE__, __LINE__, #msg); \ + ::abort(); \ + } while (false) + +#ifdef NO_LOG + +#define LOGI(...) +#define LOGE(...) +#define ASSERT(assertion) + +#else + +#define LOGI(...) tgfx::PrintLog(__VA_ARGS__) +#define LOGE(...) tgfx::PrintError(__VA_ARGS__) +#define ASSERT(assertion) \ + if (!(assertion)) { \ + ABORT(#assertion); \ + } + +#endif + +#if DEBUG + +#define DEBUG_ASSERT(assertion) ASSERT(assertion) + +#else + +#define DEBUG_ASSERT(assertion) + +#endif +} // namespace tgfx diff --git a/src/utils/MathExtra.h b/src/utils/MathExtra.h new file mode 100644 index 00000000..8b7a7d88 --- /dev/null +++ b/src/utils/MathExtra.h @@ -0,0 +1,70 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include + +namespace tgfx { +static constexpr float M_PI_F = static_cast(M_PI); +static constexpr float M_PI_2_F = static_cast(M_PI_2); +static constexpr float FLOAT_NEARLY_ZERO = 1.0f / (1 << 12); +static constexpr float FLOAT_SQRT2 = 1.41421356f; + +static inline float DegreesToRadians(float degrees) { + return degrees * (static_cast(M_PI) / 180.0f); +} + +static inline float RadiansToDegrees(float radians) { + return radians * (180.0f / static_cast(M_PI)); +} + +static inline bool FloatNearlyZero(float x, float tolerance = FLOAT_NEARLY_ZERO) { + return fabsf(x) <= tolerance; +} + +static inline bool FloatNearlyEqual(float x, float y, float tolerance = FLOAT_NEARLY_ZERO) { + return fabsf(x - y) <= tolerance; +} + +static inline float SinSnapToZero(float radians) { + float v = sinf(radians); + return FloatNearlyZero(v) ? 0.0f : v; +} + +static inline float CosSnapToZero(float radians) { + float v = cosf(radians); + return FloatNearlyZero(v) ? 0.0f : v; +} + +static inline bool FloatsAreFinite(const float array[], int count) { + float prod = 0; + for (int i = 0; i < count; ++i) { + prod *= array[i]; + } + return prod == 0; +} + +/** + * Returns true if value is a power of 2. Does not explicitly check for value <= 0. + */ +template +constexpr inline bool IsPow2(T value) { + return (value & (value - 1)) == 0; +} +} // namespace tgfx diff --git a/src/utils/OrientationHelper.h b/src/utils/OrientationHelper.h new file mode 100644 index 00000000..057ecf0e --- /dev/null +++ b/src/utils/OrientationHelper.h @@ -0,0 +1,99 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "tgfx/core/EncodedOrigin.h" + +namespace tgfx { +/* + * @param data Buffer to read bytes from + * @param isLittleEndian Output parameter + * Indicates if the data is little endian + * Is unaffected on false returns + */ +static inline bool is_valid_endian_marker(const uint8_t* data, bool* isLittleEndian) { + // II indicates Intel (little endian) and MM indicates motorola (big endian). + if (('I' != data[0] || 'I' != data[1]) && ('M' != data[0] || 'M' != data[1])) { + return false; + } + + *isLittleEndian = ('I' == data[0]); + return true; +} + +static inline uint32_t get_endian_int(const uint8_t* data, bool littleEndian) { + if (littleEndian) { + return (data[3] << 24) | (data[2] << 16) | (data[1] << 8) | (data[0]); + } + + return (data[0] << 24) | (data[1] << 16) | (data[2] << 8) | (data[3]); +} + +inline uint16_t get_endian_short(const uint8_t* data, bool littleEndian) { + if (littleEndian) { + return (data[1] << 8) | (data[0]); + } + + return (data[0] << 8) | (data[1]); +} + +static bool is_orientation_marker(const uint8_t* data, size_t data_length, EncodedOrigin* origin) { + bool littleEndian; + // We need eight bytes to read the endian marker and the offset, below. + if (data_length < 8 || !is_valid_endian_marker(data, &littleEndian)) { + return false; + } + + // Get the offset from the start of the marker. + // Though this only reads four bytes, use a larger int in case it overflows. + uint64_t offset = get_endian_int(data + 4, littleEndian); + + // Require that the marker is at least large enough to contain the number of entries. + if (data_length < offset + 2) { + return false; + } + uint32_t numEntries = get_endian_short(data + offset, littleEndian); + + // Tag (2 bytes), Datatype (2 bytes), Number of elements (4 bytes), Data (4 bytes) + const uint32_t kEntrySize = 12; + const auto max = static_cast((data_length - offset - 2) / kEntrySize); + numEntries = numEntries < max ? numEntries : max; + + // Advance the data to the start of the entries. + data += offset + 2; + + const uint16_t kOriginTag = 0x112; + const uint16_t kOriginType = 3; + for (uint32_t i = 0; i < numEntries; i++, data += kEntrySize) { + uint16_t tag = get_endian_short(data, littleEndian); + uint16_t type = get_endian_short(data + 2, littleEndian); + uint32_t count = get_endian_int(data + 4, littleEndian); + if (kOriginTag == tag && kOriginType == type && 1 == count) { + uint16_t val = get_endian_short(data + 8, littleEndian); + if (0 < val && val <= static_cast(EncodedOrigin::LeftBottom)) { + *origin = (EncodedOrigin)val; + return true; + } + } + } + + return false; +} + +} // namespace tgfx \ No newline at end of file diff --git a/src/utils/PixelFormatUtil.cpp b/src/utils/PixelFormatUtil.cpp new file mode 100644 index 00000000..7ec00dc6 --- /dev/null +++ b/src/utils/PixelFormatUtil.cpp @@ -0,0 +1,66 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "PixelFormatUtil.h" + +namespace tgfx { +PixelFormat ColorTypeToPixelFormat(ColorType type) { + switch (type) { + case ColorType::RGBA_8888: + return PixelFormat::RGBA_8888; + case ColorType::ALPHA_8: + return PixelFormat::ALPHA_8; + case ColorType::BGRA_8888: + return PixelFormat::BGRA_8888; + case ColorType::Gray_8: + return PixelFormat::GRAY_8; + default: + return PixelFormat::Unknown; + } +} + +ColorType PixelFormatToColorType(PixelFormat format) { + switch (format) { + case PixelFormat::RGBA_8888: + return ColorType::RGBA_8888; + case PixelFormat::ALPHA_8: + return ColorType::ALPHA_8; + case PixelFormat::BGRA_8888: + return ColorType::BGRA_8888; + case PixelFormat::GRAY_8: + return ColorType::Gray_8; + default: + return ColorType::Unknown; + } +} + +size_t PixelFormatBytesPerPixel(PixelFormat format) { + switch (format) { + case PixelFormat::ALPHA_8: + case PixelFormat::GRAY_8: + return 1; + case PixelFormat::RG_88: + return 2; + case PixelFormat::RGBA_8888: + case PixelFormat::BGRA_8888: + return 4; + default: + return 0; + } +} +} // namespace tgfx diff --git a/src/utils/PixelFormatUtil.h b/src/utils/PixelFormatUtil.h new file mode 100644 index 00000000..d5a1f0a8 --- /dev/null +++ b/src/utils/PixelFormatUtil.h @@ -0,0 +1,31 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "tgfx/core/ImageInfo.h" +#include "tgfx/gpu/PixelFormat.h" + +namespace tgfx { +PixelFormat ColorTypeToPixelFormat(ColorType type); + +ColorType PixelFormatToColorType(PixelFormat format); + +size_t PixelFormatBytesPerPixel(PixelFormat format); +} // namespace tgfx diff --git a/src/utils/Stream.cpp b/src/utils/Stream.cpp new file mode 100644 index 00000000..e5e6f66a --- /dev/null +++ b/src/utils/Stream.cpp @@ -0,0 +1,74 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/utils/Stream.h" +#include "utils/Log.h" + +namespace tgfx { + +class FileStream : public Stream { + public: + FileStream(FILE* file, size_t length) : file(file), length(length) { + } + + ~FileStream() override { + fclose(file); + } + + size_t size() const override { + return length; + } + + bool seek(size_t position) override { + return fseek(file, position, SEEK_SET) == 0; + } + + bool move(int offset) override { + return fseek(file, offset, SEEK_CUR) == 0; + } + + size_t read(void* buffer, size_t size) override { + return fread(buffer, 1, size, file); + } + + bool rewind() override { + return fseek(file, 0, SEEK_SET) == 0; + } + + private: + FILE* file = nullptr; + size_t length = 0; +}; + +std::unique_ptr Stream::MakeFromFile(const std::string& filePath) { + auto file = fopen(filePath.c_str(), "rb"); + if (file == nullptr) { + LOGE("file open failed! filePath:%s \n", filePath.c_str()); + return nullptr; + } + fseek(file, 0, SEEK_END); + auto length = ftell(file); + if (length <= 0) { + fclose(file); + return nullptr; + } + fseek(file, 0, SEEK_SET); + return std::make_unique(file, length); +} + +} // namespace tgfx diff --git a/src/utils/Task.cpp b/src/utils/Task.cpp new file mode 100644 index 00000000..c3ada5c8 --- /dev/null +++ b/src/utils/Task.cpp @@ -0,0 +1,90 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/utils/Task.h" +#include "utils/Log.h" +#include "utils/TaskGroup.h" + +namespace tgfx { +std::shared_ptr Task::Run(std::function block) { + if (block == nullptr) { + return nullptr; + } + auto task = std::shared_ptr(new Task(std::move(block))); + if (!TaskGroup::GetInstance()->pushTask(task)) { + task->execute(); + } + return task; +} + +Task::Task(std::function block) : block(std::move(block)) { +} + +bool Task::executing() { + std::lock_guard autoLock(locker); + return _executing; +} + +bool Task::cancelled() { + std::lock_guard autoLock(locker); + return _cancelled; +} + +bool Task::finished() { + std::lock_guard autoLock(locker); + return !_executing && !_cancelled; +} + +void Task::wait() { + std::unique_lock autoLock(locker); + if (!_executing) { + return; + } + // Try to remove the task from the queue. Execute it directly on the current thread if the task is + // not in the queue. This is to avoid the deadlock situation. + if (removeTask()) { + block(); + _executing = false; + condition.notify_all(); + return; + } + condition.wait(autoLock); +} + +void Task::cancel() { + std::unique_lock autoLock(locker); + if (!_executing) { + return; + } + if (removeTask()) { + _executing = false; + _cancelled = true; + } +} + +bool Task::removeTask() { + return TaskGroup::GetInstance()->removeTask(this); +} + +void Task::execute() { + block(); + std::lock_guard auoLock(locker); + _executing = false; + condition.notify_all(); +} +} // namespace tgfx diff --git a/src/utils/TaskGroup.cpp b/src/utils/TaskGroup.cpp new file mode 100644 index 00000000..97d7524a --- /dev/null +++ b/src/utils/TaskGroup.cpp @@ -0,0 +1,164 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "TaskGroup.h" +#include +#include +#include "utils/Log.h" + +#ifdef __APPLE__ +#include +#endif + +namespace tgfx { +static constexpr auto THREAD_TIMEOUT = std::chrono::seconds(10); + +int GetCPUCores() { + int cpuCores = 0; +#ifdef __APPLE__ + size_t len = sizeof(cpuCores); + // We can get the exact number of physical CPUs on apple platforms. + sysctlbyname("hw.physicalcpu", &cpuCores, &len, nullptr, 0); +#else + cpuCores = static_cast(std::thread::hardware_concurrency()); +#endif + if (cpuCores <= 0) { + cpuCores = 8; + } + return cpuCores; +} + +TaskGroup* TaskGroup::GetInstance() { + static auto& taskGroup = *new TaskGroup(); + return &taskGroup; +} + +void TaskGroup::RunLoop(TaskGroup* taskGroup) { + while (true) { + auto task = taskGroup->popTask(); + if (!task) { + break; + } + task->execute(); + } +} + +static void ReleaseThread(std::thread* thread) { + if (thread->joinable()) { + thread->join(); + } + delete thread; +} + +void OnAppExit() { + // Forces all pending tasks to be finished when the app is exiting to prevent accessing wild + // pointers. + TaskGroup::GetInstance()->exit(); +} + +TaskGroup::TaskGroup() { + std::atexit(OnAppExit); +} + +bool TaskGroup::checkThreads() { + static const int CPUCores = GetCPUCores(); + static const int MaxThreads = CPUCores > 16 ? 16 : CPUCores; + while (!timeoutThreads.empty()) { + auto threadID = timeoutThreads.back(); + timeoutThreads.pop_back(); + auto result = std::find_if(threads.begin(), threads.end(), + [=](std::thread* thread) { return thread->get_id() == threadID; }); + if (result != threads.end()) { + ReleaseThread(*result); + threads.erase(result); + } + } + auto totalThreads = static_cast(threads.size()); + if (activeThreads < totalThreads || totalThreads >= MaxThreads) { + return true; + } + auto thread = new (std::nothrow) std::thread(&TaskGroup::RunLoop, this); + if (thread) { + activeThreads++; + threads.push_back(thread); + // LOGI("TaskGroup: A task thread is created, the current number of threads : %lld", + // threads.size()); + } + return !threads.empty(); +} + +bool TaskGroup::pushTask(std::shared_ptr task) { + std::lock_guard autoLock(locker); +#ifdef TGFX_BUILD_FOR_WEB + return false; +#endif + if (exited || !checkThreads()) { + return false; + } + tasks.push_back(std::move(task)); + condition.notify_one(); + return true; +} + +std::shared_ptr TaskGroup::popTask() { + std::unique_lock autoLock(locker); + activeThreads--; + while (true) { + if (tasks.empty()) { + auto status = condition.wait_for(autoLock, THREAD_TIMEOUT); + if (exited || status == std::cv_status::timeout) { + auto threadID = std::this_thread::get_id(); + timeoutThreads.push_back(threadID); + // LOGI("TaskGroup: A task thread is exited, the current number of threads : %lld", + // threads.size() - expiredThreads.size()); + return nullptr; + } + } else { + auto task = tasks.front(); + tasks.pop_front(); + activeThreads++; + // LOGI("TaskGroup: A task is running, the current active threads : %lld", + // activeThreads); + return task; + } + } +} + +bool TaskGroup::removeTask(Task* target) { + std::lock_guard autoLock(locker); + auto position = std::find_if(tasks.begin(), tasks.end(), + [=](std::shared_ptr task) { return task.get() == target; }); + if (position == tasks.end()) { + return false; + } + tasks.erase(position); + return true; +} + +void TaskGroup::exit() { + locker.lock(); + exited = true; + tasks.clear(); + condition.notify_all(); + locker.unlock(); + for (auto& thread : threads) { + ReleaseThread(thread); + } + threads.clear(); +} +} // namespace tgfx diff --git a/src/utils/TaskGroup.h b/src/utils/TaskGroup.h new file mode 100644 index 00000000..a432fde5 --- /dev/null +++ b/src/utils/TaskGroup.h @@ -0,0 +1,52 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include +#include +#include +#include +#include "tgfx/utils/Task.h" + +namespace tgfx { +class TaskGroup { + private: + std::mutex locker = {}; + std::condition_variable condition = {}; + int activeThreads = 0; + bool exited = false; + std::list> tasks = {}; + std::vector threads = {}; + std::vector timeoutThreads = {}; + + static TaskGroup* GetInstance(); + static void RunLoop(TaskGroup* taskGroup); + + TaskGroup(); + bool checkThreads(); + bool pushTask(std::shared_ptr task); + std::shared_ptr popTask(); + bool removeTask(Task* task); + void exit(); + + friend class Task; + friend void OnAppExit(); +}; +} // namespace tgfx diff --git a/src/utils/USE.h b/src/utils/USE.h new file mode 100644 index 00000000..afff29b9 --- /dev/null +++ b/src/utils/USE.h @@ -0,0 +1,46 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +namespace tgfx { + +/** + * The USE(x, ...) template is used to silence C++ compiler warnings issued for (yet) unused + * variables (typically parameters). + */ + +#if defined(_MSC_VER) +#define USE(expr) \ + __pragma(warning(push)) /* Disable warning C4127: conditional expression is constant */ \ + __pragma(warning(disable : 4127)) do { \ + if (false) { \ + (void)(expr); \ + } \ + } \ + while (false) __pragma(warning(pop)) +#else +#define USE(expr) \ + do { \ + if (false) { \ + (void)(expr); \ + } \ + } while (false) +#endif + +} // namespace tgfx diff --git a/src/utils/UTF.cpp b/src/utils/UTF.cpp new file mode 100644 index 00000000..d1b72c90 --- /dev/null +++ b/src/utils/UTF.cpp @@ -0,0 +1,110 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "tgfx/utils/UTF.h" + +namespace tgfx { +static int utf8_byte_type(uint8_t c) { + if (c < 0x80) { + return 1; + } else if (c < 0xC0) { + return 0; + } else if (c >= 0xF5 || (c & 0xFE) == 0xC0) { + return -1; + } else { + int value = (((0xe5 << 24) >> ((unsigned)c >> 4 << 1)) & 3) + 1; + return value; + } +} + +static bool utf8_type_is_valid_leading_byte(int type) { + return type > 0; +} + +static bool utf8_byte_is_continuation(uint8_t c) { + return utf8_byte_type(c) == 0; +} + +int UTF::CountUTF8(const char* utf8, size_t byteLength) { + if (!utf8) { + return -1; + } + int count = 0; + const char* stop = utf8 + byteLength; + while (utf8 < stop) { + int type = utf8_byte_type(*(const uint8_t*)utf8); + if (!utf8_type_is_valid_leading_byte(type) || utf8 + type > stop) { + return -1; + } + while (type-- > 1) { + ++utf8; + if (!utf8_byte_is_continuation(*(const uint8_t*)utf8)) { + return -1; + } + } + ++utf8; + ++count; + } + return count; +} + +template +static int32_t next_fail(const T** ptr, const T* end) { + *ptr = end; + return -1; +} + +static constexpr inline int32_t left_shift(int32_t value, int32_t shift) { + return (int32_t)((uint32_t)value << shift); +} + +int32_t UTF::NextUTF8(const char** ptr, const char* end) { + if (!ptr || !end) { + return -1; + } + const uint8_t* p = (const uint8_t*)*ptr; + if (!p || p >= (const uint8_t*)end) { + return next_fail(ptr, end); + } + int c = *p; + int hic = c << 24; + + if (!utf8_type_is_valid_leading_byte(utf8_byte_type(c))) { + return next_fail(ptr, end); + } + if (hic < 0) { + uint32_t mask = (uint32_t)~0x3F; + hic = left_shift(hic, 1); + do { + ++p; + if (p >= (const uint8_t*)end) { + return next_fail(ptr, end); + } + uint8_t nextByte = *p; + if (!utf8_byte_is_continuation(nextByte)) { + return next_fail(ptr, end); + } + c = (c << 6) | (nextByte & 0x3F); + mask <<= 5; + } while ((hic = left_shift(hic, 1)) < 0); + c &= ~mask; + } + *ptr = (char*)p + 1; + return c; +} +} // namespace tgfx diff --git a/src/utils/UniqueID.cpp b/src/utils/UniqueID.cpp new file mode 100644 index 00000000..927d9c51 --- /dev/null +++ b/src/utils/UniqueID.cpp @@ -0,0 +1,28 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "UniqueID.h" + +namespace tgfx { + +static std::atomic_uint32_t IDCount = {1}; + +uint32_t UniqueID::Next() { + return IDCount++; +} +} // namespace tgfx \ No newline at end of file diff --git a/src/utils/UniqueID.h b/src/utils/UniqueID.h new file mode 100644 index 00000000..bb0b36b4 --- /dev/null +++ b/src/utils/UniqueID.h @@ -0,0 +1,28 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include + +namespace tgfx { +class UniqueID { + public: + static uint32_t Next(); +}; +} // namespace tgfx diff --git a/src/vectors/coregraphics/CGMask.cpp b/src/vectors/coregraphics/CGMask.cpp new file mode 100644 index 00000000..2392eb7c --- /dev/null +++ b/src/vectors/coregraphics/CGMask.cpp @@ -0,0 +1,216 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "CGMask.h" +#include "CGTypeface.h" +#include "CGUtil.h" +#include "core/SimpleTextBlob.h" +#include "platform/apple/BitmapContextUtil.h" +#include "tgfx/core/Mask.h" +#include "tgfx/core/Pixmap.h" + +namespace tgfx { +static void Iterator(PathVerb verb, const Point points[4], void* info) { + auto cgPath = reinterpret_cast(info); + switch (verb) { + case PathVerb::Move: + CGPathMoveToPoint(cgPath, nullptr, points[0].x, points[0].y); + break; + case PathVerb::Line: + CGPathAddLineToPoint(cgPath, nullptr, points[1].x, points[1].y); + break; + case PathVerb::Quad: + CGPathAddQuadCurveToPoint(cgPath, nullptr, points[1].x, points[1].y, points[2].x, + points[2].y); + break; + case PathVerb::Cubic: + CGPathAddCurveToPoint(cgPath, nullptr, points[1].x, points[1].y, points[2].x, points[2].y, + points[3].x, points[3].y); + break; + case PathVerb::Close: + CGPathCloseSubpath(cgPath); + break; + } +} + +std::shared_ptr Mask::Make(int width, int height, bool tryHardware) { + auto pixelRef = PixelRef::Make(width, height, true, tryHardware); + if (pixelRef == nullptr) { + return nullptr; + } + pixelRef->clear(); + return std::make_shared(std::move(pixelRef)); +} + +static void DrawPath(const Path& path, CGContextRef cgContext, const ImageInfo& info) { + auto cgPath = CGPathCreateMutable(); + path.decompose(Iterator, cgPath); + + CGContextSetShouldAntialias(cgContext, true); + static const CGFloat white[] = {1.f, 1.f, 1.f, 1.f}; + if (path.isInverseFillType()) { + auto rect = CGRectMake(0.f, 0.f, info.width(), info.height()); + CGContextAddRect(cgContext, rect); + CGContextSetFillColor(cgContext, white); + CGContextFillPath(cgContext); + CGContextAddPath(cgContext, cgPath); + if (path.getFillType() == PathFillType::InverseWinding) { + CGContextClip(cgContext); + } else { + CGContextEOClip(cgContext); + } + CGContextClearRect(cgContext, rect); + } else { + CGContextAddPath(cgContext, cgPath); + CGContextSetFillColor(cgContext, white); + if (path.getFillType() == PathFillType::Winding) { + CGContextFillPath(cgContext); + } else { + CGContextEOFillPath(cgContext); + } + } + CGPathRelease(cgPath); +} + +static CGImageRef CreateCGImage(const Path& path, void* pixels, const ImageInfo& info, float left, + float top, const std::array& gammaTable) { + auto cgContext = CreateBitmapContext(info, pixels); + if (cgContext == nullptr) { + return nullptr; + } + CGContextTranslateCTM(cgContext, -left, -top); + DrawPath(path, cgContext, info); + CGContextFlush(cgContext); + auto* p = static_cast(pixels); + auto stride = info.rowBytes(); + for (int y = 0; y < info.height(); ++y) { + for (int x = 0; x < info.width(); ++x) { + p[x] = gammaTable[p[x]]; + } + p += stride; + } + CGContextSynchronize(cgContext); + auto image = CGBitmapContextCreateImage(cgContext); + CGContextRelease(cgContext); + return image; +} + +void CGMask::onFillPath(const Path& path, const Matrix& matrix, bool needsGammaCorrection) { + if (path.isEmpty()) { + return; + } + auto pixels = pixelRef->lockWritablePixels(); + if (pixels == nullptr) { + return; + } + const auto& info = pixelRef->info(); + auto cgContext = CreateBitmapContext(info, pixels); + if (cgContext == nullptr) { + pixelRef->unlockPixels(); + return; + } + + auto finalPath = path; + auto totalMatrix = matrix; + totalMatrix.postScale(1, -1); + totalMatrix.postTranslate(0, static_cast(info.height())); + finalPath.transform(totalMatrix); + auto bounds = finalPath.getBounds(); + bounds.roundOut(); + markContentDirty(bounds, true); + + if (!needsGammaCorrection) { + DrawPath(finalPath, cgContext, info); + CGContextRelease(cgContext); + pixelRef->unlockPixels(); + return; + } + int width = static_cast(bounds.width()); + int height = static_cast(bounds.height()); + auto tempBuffer = PixelBuffer::Make(width, height, true, false); + if (tempBuffer == nullptr) { + pixelRef->unlockPixels(); + return; + } + auto* tempPixels = tempBuffer->lockPixels(); + if (tempPixels == nullptr) { + pixelRef->unlockPixels(); + return; + } + memset(tempPixels, 0, tempBuffer->info().byteSize()); + auto image = CreateCGImage(finalPath, tempPixels, tempBuffer->info(), bounds.left, bounds.top, + PixelRefMask::GammaTable()); + tempBuffer->unlockPixels(); + if (image == nullptr) { + pixelRef->unlockPixels(); + return; + } + auto rect = CGRectMake(bounds.left, bounds.top, bounds.width(), bounds.height()); + CGContextDrawImage(cgContext, rect, image); + pixelRef->unlockPixels(); +} + +bool CGMask::onFillText(const TextBlob* textBlob, const Stroke* stroke, const Matrix& matrix) { + if (textBlob == nullptr || stroke) { + return false; + } + auto blob = static_cast(textBlob); + if (blob->getFont().isFauxBold()) { + return false; + } + auto pixels = pixelRef->lockWritablePixels(); + auto cgContext = CreateBitmapContext(pixelRef->info(), pixels); + if (cgContext == nullptr) { + pixelRef->unlockPixels(); + return false; + } + CGContextSetBlendMode(cgContext, kCGBlendModeCopy); + CGContextSetTextDrawingMode(cgContext, kCGTextFill); + CGContextSetGrayFillColor(cgContext, 1.0f, 1.0f); + CGContextSetShouldAntialias(cgContext, true); + CGContextSetShouldSmoothFonts(cgContext, true); + CGContextSetAllowsFontSubpixelQuantization(cgContext, false); + CGContextSetShouldSubpixelQuantizeFonts(cgContext, false); + CGContextSetAllowsFontSubpixelPositioning(cgContext, true); + CGContextSetShouldSubpixelPositionFonts(cgContext, true); + const auto& font = blob->getFont(); + CTFontRef ctFont = std::static_pointer_cast(font.getTypeface())->getCTFont(); + ctFont = CTFontCreateCopyWithAttributes(ctFont, static_cast(font.getSize()), nullptr, + nullptr); + if (font.isFauxItalic()) { + CGContextSetTextMatrix(cgContext, CGAffineTransformMake(1, 0, -ITALIC_SKEW, 1, 0, 0)); + } + CGContextTranslateCTM(cgContext, 0.f, static_cast(height())); + CGContextScaleCTM(cgContext, 1.f, -1.f); + CGContextConcatCTM(cgContext, MatrixToCGAffineTransform(matrix)); + auto point = CGPointZero; + for (size_t i = 0; i < blob->getGlyphIDs().size(); ++i) { + auto position = blob->getPositions()[i]; + CGContextSaveGState(cgContext); + CGContextTranslateCTM(cgContext, position.x, position.y); + CGContextScaleCTM(cgContext, 1.f, -1.f); + CTFontDrawGlyphs(ctFont, static_cast(&blob->getGlyphIDs()[i]), &point, 1, + cgContext); + CGContextRestoreGState(cgContext); + } + CFRelease(ctFont); + CGContextRelease(cgContext); + pixelRef->unlockPixels(); + return true; +} +} // namespace tgfx diff --git a/src/vectors/coregraphics/CGMask.h b/src/vectors/coregraphics/CGMask.h new file mode 100644 index 00000000..91850274 --- /dev/null +++ b/src/vectors/coregraphics/CGMask.h @@ -0,0 +1,34 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "core/PixelRefMask.h" + +namespace tgfx { +class CGMask : public PixelRefMask { + public: + explicit CGMask(std::shared_ptr pixelRef) : PixelRefMask(std::move(pixelRef)) { + } + + protected: + void onFillPath(const Path& path, const Matrix& matrix, bool needsGammaCorrection) override; + + bool onFillText(const TextBlob* textBlob, const Stroke* stroke, const Matrix& matrix) override; +}; +} // namespace tgfx diff --git a/src/vectors/coregraphics/CGScalerContext.cpp b/src/vectors/coregraphics/CGScalerContext.cpp new file mode 100644 index 00000000..e4e5a1b3 --- /dev/null +++ b/src/vectors/coregraphics/CGScalerContext.cpp @@ -0,0 +1,312 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "vectors/coregraphics/CGScalerContext.h" +#include "CGUtil.h" +#include "platform/apple/BitmapContextUtil.h" +#include "tgfx/core/PathEffect.h" +#include "utils/Log.h" +#include "utils/MathExtra.h" + +namespace tgfx { +inline float Interpolate(const float& a, const float& b, const float& t) { + return a + (b - a) * t; +} + +/** + * Interpolate along the function described by (keys[length], values[length]) + * for the passed searchKey. SearchKeys outside the range keys[0]-keys[Length] + * clamp to the min or max value. This function assumes the number of pairs + * (length) will be small and a linear search is used. + * + * Repeated keys are allowed for discontinuous functions (so long as keys is + * monotonically increasing). If key is the value of a repeated scalar in + * keys the first one will be used. + */ +float FloatInterpFunc(float searchKey, const float keys[], const float values[], int length) { + int right = 0; + while (right < length && keys[right] < searchKey) { + ++right; + } + // Could use sentinel values to eliminate conditionals, but since the + // tables are taken as input, a simpler format is better. + if (right == length) { + return values[length - 1]; + } + if (right == 0) { + return values[0]; + } + // Otherwise, interpolate between right - 1 and right. + float leftKey = keys[right - 1]; + float rightKey = keys[right]; + float t = (searchKey - leftKey) / (rightKey - leftKey); + return Interpolate(values[right - 1], values[right], t); +} + +static const float kStdFakeBoldInterpKeys[] = { + 9.f, + 36.f, +}; +static const float kStdFakeBoldInterpValues[] = { + 1.f / 24.f, + 1.f / 32.f, +}; + +std::unique_ptr CGScalerContext::Make(std::shared_ptr typeface, + float size, bool fauxBold, bool fauxItalic, + bool verticalText) { + if (typeface == nullptr) { + return nullptr; + } + CGScalerContextRec rec; + rec.textSize = size; + if (fauxBold) { + auto fauxBoldScale = FloatInterpFunc(size, kStdFakeBoldInterpKeys, kStdFakeBoldInterpValues, 2); + rec.fauxBoldSize = size * fauxBoldScale; + } + rec.skewX = fauxItalic ? ITALIC_SKEW : 0; + rec.verticalText = verticalText; + return std::unique_ptr(new CGScalerContext(std::move(typeface), rec)); +} + +CGScalerContext::CGScalerContext(std::shared_ptr typeface, CGScalerContextRec rec) + : _typeface(std::move(typeface)), rec(rec) { + CTFontRef font = std::static_pointer_cast(_typeface)->ctFont; + auto matrix = Matrix::I(); + matrix.postSkew(rec.skewX, 0.f); + transform = MatrixToCGAffineTransform(matrix); + ctFont = + CTFontCreateCopyWithAttributes(font, static_cast(rec.textSize), nullptr, nullptr); +} + +CGScalerContext::~CGScalerContext() { + if (ctFont) { + CFRelease(ctFont); + } +} + +FontMetrics CGScalerContext::generateFontMetrics() { + FontMetrics metrics; + auto theBounds = CTFontGetBoundingBox(ctFont); + metrics.top = static_cast(-CGRectGetMaxY(theBounds)); + metrics.ascent = static_cast(-CTFontGetAscent(ctFont)); + metrics.descent = static_cast(CTFontGetDescent(ctFont)); + metrics.bottom = static_cast(-CGRectGetMinY(theBounds)); + metrics.leading = static_cast(CTFontGetLeading(ctFont)); + metrics.xMin = static_cast(CGRectGetMinX(theBounds)); + metrics.xMax = static_cast(CGRectGetMaxX(theBounds)); + metrics.xHeight = static_cast(CTFontGetXHeight(ctFont)); + metrics.capHeight = static_cast(CTFontGetCapHeight(ctFont)); + metrics.underlineThickness = static_cast(CTFontGetUnderlineThickness(ctFont)); + metrics.underlinePosition = -static_cast(CTFontGetUnderlinePosition(ctFont)); + return metrics; +} + +GlyphMetrics CGScalerContext::generateGlyphMetrics(GlyphID glyphID) { + GlyphMetrics glyph; + const auto cgGlyph = static_cast(glyphID); + // The following block produces cgAdvance in CG units (pixels, y up). + CGSize cgAdvance; + if (rec.verticalText) { + CTFontGetAdvancesForGlyphs(ctFont, kCTFontOrientationVertical, &cgGlyph, &cgAdvance, 1); + // Vertical advances are returned as widths instead of heights. + std::swap(cgAdvance.width, cgAdvance.height); + } else { + CTFontGetAdvancesForGlyphs(ctFont, kCTFontOrientationHorizontal, &cgGlyph, &cgAdvance, 1); + } + cgAdvance = CGSizeApplyAffineTransform(cgAdvance, transform); + glyph.advanceX = static_cast(cgAdvance.width); + glyph.advanceY = static_cast(cgAdvance.height); + // Glyphs are always drawn from the horizontal origin. The caller must manually use the result + // of CTFontGetVerticalTranslationsForGlyphs to calculate where to draw the glyph for vertical + // glyphs. As a result, always get the horizontal bounds of a glyph and translate it if the + // glyph is vertical. This avoids any disagreement between the various means of retrieving + // vertical metrics. + // CTFontGetBoundingRectsForGlyphs produces cgBounds in CG units (pixels, y up). + CGRect cgBounds; + CTFontGetBoundingRectsForGlyphs(ctFont, kCTFontOrientationHorizontal, &cgGlyph, &cgBounds, 1); + cgBounds = CGRectApplyAffineTransform(cgBounds, transform); + if (CGRectIsEmpty(cgBounds)) { + return glyph; + } + // Convert cgBounds to Glyph units (pixels, y down). + auto rect = Rect::MakeXYWH(static_cast(cgBounds.origin.x), + static_cast(-cgBounds.origin.y - cgBounds.size.height), + static_cast(cgBounds.size.width), + static_cast(cgBounds.size.height)); + + // We're trying to pack left and top into int16_t, + // and width and height into uint16_t + if (!Rect::MakeXYWH(-32767, -32767, 65535, 65535).contains(rect)) { + return glyph; + } + if (rec.fauxBoldSize != 0) { + rect.outset(rec.fauxBoldSize, rec.fauxBoldSize); + } + rect.roundOut(); + rect.outset(1.f, 1.f); + glyph.left = rect.left; + glyph.top = rect.top; + glyph.width = rect.width(); + glyph.height = rect.height(); + return glyph; +} + +Point CGScalerContext::getVerticalOffset(GlyphID glyphID) const { + // CTFontGetVerticalTranslationsForGlyphs produces cgVertOffset in CG units (pixels, y up). + CGSize cgVertOffset; + CTFontGetVerticalTranslationsForGlyphs(ctFont, &glyphID, &cgVertOffset, 1); + cgVertOffset = CGSizeApplyAffineTransform(cgVertOffset, transform); + Point vertOffset = {static_cast(cgVertOffset.width), + static_cast(cgVertOffset.height)}; + // From CG units (pixels, y up) to Glyph units (pixels, y down). + vertOffset.y = -vertOffset.y; + return vertOffset; +} + +class CTPathGeometrySink { + public: + Path path() { + return _path; + } + + static void ApplyElement(void* ctx, const CGPathElement* element) { + CTPathGeometrySink& self = *static_cast(ctx); + CGPoint* points = element->points; + switch (element->type) { + case kCGPathElementMoveToPoint: + self.started = false; + self.current = points[0]; + break; + case kCGPathElementAddLineToPoint: + if (self.currentIsNot(points[0])) { + self.goingTo(points[0]); + self._path.lineTo(static_cast(points[0].x), -static_cast(points[0].y)); + } + break; + case kCGPathElementAddQuadCurveToPoint: + if (self.currentIsNot(points[0]) || self.currentIsNot(points[1])) { + self.goingTo(points[1]); + self._path.quadTo(static_cast(points[0].x), -static_cast(points[0].y), + static_cast(points[1].x), -static_cast(points[1].y)); + } + break; + case kCGPathElementAddCurveToPoint: + if (self.currentIsNot(points[0]) || self.currentIsNot(points[1]) || + self.currentIsNot(points[2])) { + self.goingTo(points[2]); + self._path.cubicTo(static_cast(points[0].x), -static_cast(points[0].y), + static_cast(points[1].x), -static_cast(points[1].y), + static_cast(points[2].x), -static_cast(points[2].y)); + } + break; + case kCGPathElementCloseSubpath: + if (self.started) { + self._path.close(); + } + break; + default: + LOGE("Unknown path element!"); + break; + } + } + + private: + void goingTo(const CGPoint pt) { + if (!started) { + started = true; + _path.moveTo(static_cast(current.x), -static_cast(current.y)); + } + current = pt; + } + + bool currentIsNot(const CGPoint pt) const { + return current.x != pt.x || current.y != pt.y; + } + + Path _path = {}; + bool started = false; + CGPoint current = {0, 0}; +}; + +bool CGScalerContext::generatePath(GlyphID glyphID, Path* path) { + auto fontFormat = CTFontCopyAttribute(ctFont, kCTFontFormatAttribute); + if (!fontFormat) { + return false; + } + SInt16 format; + CFNumberGetValue(static_cast(fontFormat), kCFNumberSInt16Type, &format); + CFRelease(fontFormat); + if (format == kCTFontFormatUnrecognized || format == kCTFontFormatBitmap) { + return false; + } + CGAffineTransform xform = transform; + auto cgGlyph = static_cast(glyphID); + auto cgPath = CTFontCreatePathForGlyph(ctFont, cgGlyph, &xform); + if (cgPath) { + CTPathGeometrySink sink; + CGPathApply(cgPath, &sink, CTPathGeometrySink::ApplyElement); + *path = sink.path(); + CFRelease(cgPath); + if (rec.fauxBoldSize != 0) { + auto strokePath = *path; + Stroke stroke(rec.fauxBoldSize); + auto pathEffect = PathEffect::MakeStroke(&stroke); + pathEffect->applyTo(&strokePath); + path->addPath(strokePath, PathOp::Union); + } + } else { + path->reset(); + } + return true; +} + +std::shared_ptr CGScalerContext::generateImage(GlyphID glyphID, Matrix* matrix) { + CGRect cgBounds; + CTFontGetBoundingRectsForGlyphs(ctFont, kCTFontOrientationHorizontal, &glyphID, &cgBounds, 1); + cgBounds = CGRectApplyAffineTransform(cgBounds, transform); + CTFontSymbolicTraits traits = CTFontGetSymbolicTraits(ctFont); + auto hasColor = static_cast(traits & kCTFontTraitColorGlyphs); + auto pixelBuffer = PixelBuffer::Make(static_cast(cgBounds.size.width), + static_cast(cgBounds.size.height), !hasColor); + if (pixelBuffer == nullptr) { + return nullptr; + } + auto pixels = pixelBuffer->lockPixels(); + auto cgContext = CreateBitmapContext(pixelBuffer->info(), pixels); + if (cgContext == nullptr) { + pixelBuffer->unlockPixels(); + return nullptr; + } + CGContextClearRect(cgContext, CGRectMake(0, 0, cgBounds.size.width, cgBounds.size.height)); + CGContextSetTextMatrix(cgContext, transform); + CGContextSetBlendMode(cgContext, kCGBlendModeCopy); + CGContextSetTextDrawingMode(cgContext, kCGTextFill); + CGContextSetShouldAntialias(cgContext, true); + CGContextSetShouldSmoothFonts(cgContext, true); + auto point = CGPointMake(-cgBounds.origin.x, -cgBounds.origin.y); + CTFontDrawGlyphs(ctFont, &glyphID, &point, 1, cgContext); + CGContextRelease(cgContext); + pixelBuffer->unlockPixels(); + if (matrix) { + matrix->setTranslate(static_cast(cgBounds.origin.x), + static_cast(-cgBounds.origin.y - cgBounds.size.height)); + } + return pixelBuffer; +} +} // namespace tgfx diff --git a/src/vectors/coregraphics/CGScalerContext.h b/src/vectors/coregraphics/CGScalerContext.h new file mode 100644 index 00000000..46dc37dc --- /dev/null +++ b/src/vectors/coregraphics/CGScalerContext.h @@ -0,0 +1,59 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "CGTypeface.h" +#include "core/PixelBuffer.h" +#include "tgfx/core/ImageInfo.h" + +namespace tgfx { +class CGScalerContextRec { + public: + float textSize = 12.f; + float skewX = 0.f; + float fauxBoldSize = 0.f; + bool verticalText = false; +}; + +class CGScalerContext { + public: + static std::unique_ptr Make(std::shared_ptr typeface, float size, + bool fauxBold = false, bool fauxItalic = false, + bool verticalText = false); + ~CGScalerContext(); + + FontMetrics generateFontMetrics(); + + GlyphMetrics generateGlyphMetrics(GlyphID glyphID); + + Point getVerticalOffset(GlyphID glyphID) const; + + bool generatePath(GlyphID glyphID, Path* path); + + std::shared_ptr generateImage(GlyphID glyphID, Matrix* matrix); + + private: + CGScalerContext(std::shared_ptr typeface, CGScalerContextRec); + + std::shared_ptr _typeface; + CGScalerContextRec rec; + CTFontRef ctFont = nullptr; + CGAffineTransform transform = CGAffineTransformIdentity; +}; +} // namespace tgfx diff --git a/src/vectors/coregraphics/CGTypeface.cpp b/src/vectors/coregraphics/CGTypeface.cpp new file mode 100644 index 00000000..d8cafcf6 --- /dev/null +++ b/src/vectors/coregraphics/CGTypeface.cpp @@ -0,0 +1,276 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "CGTypeface.h" +#include "CGScalerContext.h" +#include "tgfx/utils/UTF.h" +#include "utils/UniqueID.h" + +namespace tgfx { +std::string StringFromCFString(CFStringRef src) { + static const CFIndex kCStringSize = 128; + char temporaryCString[kCStringSize]; + bzero(temporaryCString, kCStringSize); + CFStringGetCString(src, temporaryCString, kCStringSize, kCFStringEncodingUTF8); + return {temporaryCString}; +} + +std::shared_ptr Typeface::MakeFromName(const std::string& fontFamily, + const std::string& fontStyle) { + CFMutableDictionaryRef cfAttributes = CFDictionaryCreateMutable( + kCFAllocatorDefault, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks); + if (!fontFamily.empty()) { + auto cfFontName = + CFStringCreateWithCString(kCFAllocatorDefault, fontFamily.c_str(), kCFStringEncodingUTF8); + if (cfFontName) { + CFDictionaryAddValue(cfAttributes, kCTFontFamilyNameAttribute, cfFontName); + CFRelease(cfFontName); + } + } + if (!fontStyle.empty()) { + auto cfStyleName = + CFStringCreateWithCString(kCFAllocatorDefault, fontStyle.c_str(), kCFStringEncodingUTF8); + if (cfStyleName) { + CFDictionaryAddValue(cfAttributes, kCTFontStyleNameAttribute, cfStyleName); + CFRelease(cfStyleName); + } + } + std::shared_ptr typeface; + auto cfDesc = CTFontDescriptorCreateWithAttributes(cfAttributes); + if (cfDesc) { + auto ctFont = CTFontCreateWithFontDescriptor(cfDesc, 0, nullptr); + if (ctFont) { + typeface = CGTypeface::Make(ctFont); + CFRelease(ctFont); + } + CFRelease(cfDesc); + } + CFRelease(cfAttributes); + return typeface; +} + +std::shared_ptr Typeface::MakeFromPath(const std::string& fontPath, int) { + CGDataProviderRef cgDataProvider = CGDataProviderCreateWithFilename(fontPath.c_str()); + if (cgDataProvider == nullptr) { + return nullptr; + } + std::shared_ptr typeface; + auto cgFont = CGFontCreateWithDataProvider(cgDataProvider); + if (cgFont) { + auto ctFont = CTFontCreateWithGraphicsFont(cgFont, 0, nullptr, nullptr); + if (ctFont) { + typeface = CGTypeface::Make(ctFont); + CFRelease(ctFont); + } + CFRelease(cgFont); + } + CGDataProviderRelease(cgDataProvider); + return typeface; +} + +std::shared_ptr Typeface::MakeFromBytes(const void* data, size_t length, int) { + auto copyData = Data::MakeWithCopy(data, length); + if (copyData->size() == 0) { + return nullptr; + } + auto cfData = + CFDataCreateWithBytesNoCopy(kCFAllocatorNull, static_cast(copyData->data()), + static_cast(copyData->size()), kCFAllocatorNull); + if (cfData == nullptr) { + return nullptr; + } + std::shared_ptr typeface; + auto cfDesc = CTFontManagerCreateFontDescriptorFromData(cfData); + if (cfDesc) { + auto ctFont = CTFontCreateWithFontDescriptor(cfDesc, 0, nullptr); + if (ctFont) { + typeface = CGTypeface::Make(ctFont, std::move(copyData)); + CFRelease(ctFont); + } + CFRelease(cfDesc); + } + CFRelease(cfData); + return typeface; +} + +std::shared_ptr Typeface::MakeDefault() { + static auto typeface = Typeface::MakeFromName("Lucida Sans", ""); + return typeface; +} + +std::shared_ptr CGTypeface::Make(CTFontRef ctFont, std::shared_ptr data) { + if (ctFont == nullptr) { + return nullptr; + } + auto typeface = std::shared_ptr(new CGTypeface(ctFont, std::move(data))); + typeface->weakThis = typeface; + return typeface; +} + +CGTypeface::CGTypeface(CTFontRef ctFont, std::shared_ptr data) + : _uniqueID(UniqueID::Next()), ctFont(ctFont), data(std::move(data)) { + CFRetain(ctFont); +} + +CGTypeface::~CGTypeface() { + if (ctFont) { + CFRelease(ctFont); + } +} + +std::string CGTypeface::fontFamily() const { + std::string familyName; + auto ctFamilyName = CTFontCopyName(ctFont, kCTFontFamilyNameKey); + if (ctFamilyName != nullptr) { + familyName = StringFromCFString(ctFamilyName); + CFRelease(ctFamilyName); + } + return familyName; +} + +std::string CGTypeface::fontStyle() const { + std::string styleName; + auto ctStyleName = CTFontCopyName(ctFont, kCTFontStyleNameKey); + if (ctStyleName != nullptr) { + styleName = StringFromCFString(ctStyleName); + CFRelease(ctStyleName); + } + return styleName; +} + +int CGTypeface::unitsPerEm() const { + auto cgFont = CTFontCopyGraphicsFont(ctFont, nullptr); + auto ret = CGFontGetUnitsPerEm(cgFont); + CGFontRelease(cgFont); + return ret; +} + +bool CGTypeface::hasColor() const { + CTFontSymbolicTraits traits = CTFontGetSymbolicTraits(ctFont); + return static_cast(traits & kCTFontTraitColorGlyphs); +} + +static size_t ToUTF16(int32_t uni, uint16_t utf16[2]) { + if ((uint32_t)uni > 0x10FFFF) { + return 0; + } + int extra = (uni > 0xFFFF); + if (utf16) { + if (extra) { + utf16[0] = (uint16_t)((0xD800 - 64) + (uni >> 10)); + utf16[1] = (uint16_t)(0xDC00 | (uni & 0x3FF)); + } else { + utf16[0] = (uint16_t)uni; + } + } + return 1 + extra; +} + +GlyphID CGTypeface::getGlyphID(const std::string& name) const { + const char* start = &(name[0]); + auto uni = UTF::NextUTF8(&start, start + name.size()); + UniChar utf16[2] = {0, 0}; + auto srcCount = ToUTF16(uni, utf16); + GlyphID macGlyphs[2] = {0, 0}; + CTFontGetGlyphsForCharacters(ctFont, utf16, macGlyphs, static_cast(srcCount)); + return macGlyphs[0]; +} + +std::shared_ptr CGTypeface::getBytes() const { + return data; +} + +std::shared_ptr CGTypeface::copyTableData(FontTableTag tag) const { + auto cfData = + CTFontCopyTable(ctFont, static_cast(tag), kCTFontTableOptionNoOptions); + if (cfData == nullptr) { + auto cgFont = CTFontCopyGraphicsFont(ctFont, nullptr); + cfData = CGFontCopyTableForTag(cgFont, tag); + CGFontRelease(cgFont); + } + if (cfData == nullptr) { + return nullptr; + } + const auto* bytePtr = CFDataGetBytePtr(cfData); + auto length = CFDataGetLength(cfData); + return Data::MakeAdopted( + bytePtr, length, [](const void*, void* context) { CFRelease((CFDataRef)context); }, + (void*)cfData); +} + +FontMetrics CGTypeface::getMetrics(float size) const { + auto scalerContext = CGScalerContext::Make(weakThis.lock(), size); + if (scalerContext == nullptr) { + return {}; + } + return scalerContext->generateFontMetrics(); +} + +Rect CGTypeface::getBounds(GlyphID glyphID, float size, bool fauxBold, bool fauxItalic) const { + auto scalerContext = CGScalerContext::Make(weakThis.lock(), size, fauxBold, fauxItalic); + if (scalerContext == nullptr) { + return Rect::MakeEmpty(); + } + auto metrics = scalerContext->generateGlyphMetrics(glyphID); + auto bounds = Rect::MakeXYWH(metrics.left, metrics.top, metrics.width, metrics.height); + auto advance = metrics.advanceX; + if (bounds.isEmpty() && advance > 0) { + auto fontMetrics = scalerContext->generateFontMetrics(); + bounds.setLTRB(0, fontMetrics.ascent, advance, fontMetrics.descent); + } + return bounds; +} + +float CGTypeface::getAdvance(GlyphID glyphID, float size, bool fauxBold, bool fauxItalic, + bool verticalText) const { + auto scalerContext = + CGScalerContext::Make(weakThis.lock(), size, fauxBold, fauxItalic, verticalText); + if (scalerContext == nullptr) { + return 0; + } + auto glyphMetrics = scalerContext->generateGlyphMetrics(glyphID); + return verticalText ? glyphMetrics.advanceY : glyphMetrics.advanceX; +} + +bool CGTypeface::getPath(GlyphID glyphID, float size, bool fauxBold, bool fauxItalic, + Path* path) const { + auto scalerContext = CGScalerContext::Make(weakThis.lock(), size, fauxBold, fauxItalic); + if (scalerContext == nullptr) { + return false; + } + return scalerContext->generatePath(glyphID, path); +} + +std::shared_ptr CGTypeface::getGlyphImage(GlyphID glyphID, float size, bool fauxBold, + bool fauxItalic, Matrix* matrix) const { + auto scalerContext = CGScalerContext::Make(weakThis.lock(), size, fauxBold, fauxItalic); + if (scalerContext == nullptr) { + return nullptr; + } + return scalerContext->generateImage(glyphID, matrix); +} + +Point CGTypeface::getVerticalOffset(GlyphID glyphID, float size, bool fauxBold, + bool fauxItalic) const { + auto scalerContext = CGScalerContext::Make(weakThis.lock(), size, fauxBold, fauxItalic); + if (scalerContext == nullptr) { + return Point::Zero(); + } + return scalerContext->getVerticalOffset(glyphID); +} +} // namespace tgfx diff --git a/src/vectors/coregraphics/CGTypeface.h b/src/vectors/coregraphics/CGTypeface.h new file mode 100644 index 00000000..003ba9e4 --- /dev/null +++ b/src/vectors/coregraphics/CGTypeface.h @@ -0,0 +1,84 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "tgfx/core/Typeface.h" + +namespace tgfx { +class CGTypeface : public Typeface { + public: + static std::shared_ptr Make(CTFontRef ctFont, std::shared_ptr data = nullptr); + + ~CGTypeface() override; + + CTFontRef getCTFont() const { + return ctFont; + } + + uint32_t uniqueID() const override { + return _uniqueID; + } + + std::string fontFamily() const override; + + std::string fontStyle() const override; + + int glyphsCount() const override { + return static_cast(CTFontGetGlyphCount(ctFont)); + } + + int unitsPerEm() const override; + + bool hasColor() const override; + + GlyphID getGlyphID(const std::string& name) const override; + + std::shared_ptr getBytes() const override; + + std::shared_ptr copyTableData(FontTableTag tag) const override; + + protected: + Rect getBounds(GlyphID glyphID, float size, bool fauxBold, bool fauxItalic) const override; + + FontMetrics getMetrics(float size) const override; + + Point getVerticalOffset(GlyphID glyphID, float size, bool fauxBold, + bool fauxItalic) const override; + + bool getPath(GlyphID glyphID, float size, bool fauxBold, bool fauxItalic, + Path* path) const override; + + std::shared_ptr getGlyphImage(GlyphID glyphID, float size, bool fauxBold, + bool fauxItalic, Matrix* matrix) const override; + + float getAdvance(GlyphID glyphID, float size, bool fauxBold, bool fauxItalic, + bool verticalText) const override; + + private: + CGTypeface(CTFontRef ctFont, std::shared_ptr data); + + uint32_t _uniqueID = 0; + CTFontRef ctFont = nullptr; + std::shared_ptr data; + std::weak_ptr weakThis; + + friend class CGScalerContext; +}; +} // namespace tgfx diff --git a/src/vectors/coregraphics/CGUtil.cpp b/src/vectors/coregraphics/CGUtil.cpp new file mode 100644 index 00000000..fb83908c --- /dev/null +++ b/src/vectors/coregraphics/CGUtil.cpp @@ -0,0 +1,28 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "CGUtil.h" + +namespace tgfx { +CGAffineTransform MatrixToCGAffineTransform(const Matrix& matrix) { + return CGAffineTransformMake( + static_cast(matrix.getScaleX()), -static_cast(matrix.getSkewY()), + -static_cast(matrix.getSkewX()), static_cast(matrix.getScaleY()), + static_cast(matrix.getTranslateX()), static_cast(matrix.getTranslateY())); +} +} // namespace tgfx diff --git a/src/vectors/coregraphics/CGUtil.h b/src/vectors/coregraphics/CGUtil.h new file mode 100644 index 00000000..55f47981 --- /dev/null +++ b/src/vectors/coregraphics/CGUtil.h @@ -0,0 +1,27 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "CoreGraphics/CGAffineTransform.h" +#include "tgfx/core/Matrix.h" + +namespace tgfx { +static constexpr float ITALIC_SKEW = -0.20f; +CGAffineTransform MatrixToCGAffineTransform(const Matrix& matrix); +} // namespace tgfx diff --git a/src/vectors/freetype/FTFace.cpp b/src/vectors/freetype/FTFace.cpp new file mode 100644 index 00000000..13c1089c --- /dev/null +++ b/src/vectors/freetype/FTFace.cpp @@ -0,0 +1,83 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "FTFace.h" +#include "FTUtil.h" + +namespace tgfx { +std::mutex& FTMutex() { + static std::mutex& mutex = *new std::mutex; + return mutex; +} + +static FTLibrary* gFTLibrary; +static int gFTCount = 0; + +static bool RefFTLibrary() { + if (0 == gFTCount) { + gFTLibrary = new FTLibrary; + } + ++gFTCount; + return gFTLibrary->library() != nullptr; +} + +// Caller must lock f_t_mutex() before calling this function. +static void UnrefFTLibrary() { + --gFTCount; + if (0 == gFTCount) { + delete gFTLibrary; + gFTLibrary = nullptr; + } +} + +FTFace::FTFace() { + RefFTLibrary(); +} + +FTFace::~FTFace() { + if (face) { + FT_Done_Face(face); + } + UnrefFTLibrary(); +} + +std::unique_ptr FTFace::Make(const FTFontData& data) { + std::lock_guard lockGuard(FTMutex()); + FT_Open_Args args; + memset(&args, 0, sizeof(args)); + if (data.data) { + args.flags = FT_OPEN_MEMORY; + args.memory_base = static_cast(data.data->data()); + args.memory_size = static_cast(data.data->size()); + } else if (!data.path.empty()) { + args.flags = FT_OPEN_PATHNAME; + args.pathname = const_cast(data.path.c_str()); + } else { + return nullptr; + } + auto face = std::make_unique(); + auto err = FT_Open_Face(gFTLibrary->library(), &args, data.ttcIndex, &face->face); + if (err || !face->face->family_name) { + return nullptr; + } + if (!face->face->charmap) { + FT_Select_Charmap(face->face, FT_ENCODING_MS_SYMBOL); + } + return face; +} +} // namespace tgfx diff --git a/src/vectors/freetype/FTFace.h b/src/vectors/freetype/FTFace.h new file mode 100644 index 00000000..59f6735c --- /dev/null +++ b/src/vectors/freetype/FTFace.h @@ -0,0 +1,40 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include +#include "FTFontData.h" +#include "ft2build.h" +#include FT_FREETYPE_H + +namespace tgfx { +std::mutex& FTMutex(); + +class FTFace { + public: + static std::unique_ptr Make(const FTFontData& data); + + FTFace(); + + ~FTFace(); + + FT_Face face = nullptr; +}; +} // namespace tgfx diff --git a/src/vectors/freetype/FTFontData.h b/src/vectors/freetype/FTFontData.h new file mode 100644 index 00000000..d74f66a1 --- /dev/null +++ b/src/vectors/freetype/FTFontData.h @@ -0,0 +1,37 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "tgfx/core/Data.h" + +namespace tgfx { +struct FTFontData { + FTFontData(std::string path, int ttcIndex) : path(std::move(path)), ttcIndex(ttcIndex) { + } + + FTFontData(const void* data, size_t length, int ttcIndex) + : data(Data::MakeWithCopy(data, length)), ttcIndex(ttcIndex) { + } + + std::string path; + std::shared_ptr data; + int ttcIndex = -1; +}; +} // namespace tgfx diff --git a/src/vectors/freetype/FTMask.cpp b/src/vectors/freetype/FTMask.cpp new file mode 100644 index 00000000..a96329b6 --- /dev/null +++ b/src/vectors/freetype/FTMask.cpp @@ -0,0 +1,141 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "FTMask.h" +#include "FTPath.h" +#include "tgfx/core/Pixmap.h" + +namespace tgfx { +static const FTLibrary& GetLibrary() { + static const auto& library = *new FTLibrary; + return library; +} + +static void Iterator(PathVerb verb, const Point points[4], void* info) { + auto path = reinterpret_cast(info); + switch (verb) { + case PathVerb::Move: + path->moveTo(points[0]); + break; + case PathVerb::Line: + path->lineTo(points[1]); + break; + case PathVerb::Quad: + path->quadTo(points[1], points[2]); + break; + case PathVerb::Cubic: + path->cubicTo(points[1], points[2], points[3]); + break; + case PathVerb::Close: + path->close(); + break; + } +} + +std::shared_ptr Mask::Make(int width, int height, bool tryHardware) { + auto pixelRef = PixelRef::Make(width, height, true, tryHardware); + if (pixelRef == nullptr) { + return nullptr; + } + pixelRef->clear(); + return std::make_shared(std::move(pixelRef)); +} + +struct RasterTarget { + unsigned char* origin; + int pitch; + const uint8_t* gammaTable; +}; + +static void SpanFunc(int y, int, const FT_Span* spans, void* user) { + auto* target = reinterpret_cast(user); + auto* q = target->origin - target->pitch * y + spans->x; + auto c = spans->coverage; + if (target->gammaTable) { + c = target->gammaTable[c]; + } + auto aCount = spans->len; + /** + * For small-spans it is faster to do it by ourselves than calling memset. + * This is mainly due to the cost of the function call. + */ + switch (aCount) { + case 7: + *q++ = c; + case 6: + *q++ = c; + case 5: + *q++ = c; + case 4: + *q++ = c; + case 3: + *q++ = c; + case 2: + *q++ = c; + case 1: + *q = c; + case 0: + break; + default: + memset(q, c, aCount); + } +} + +void FTMask::onFillPath(const Path& path, const Matrix& matrix, bool needsGammaCorrection) { + if (path.isEmpty()) { + return; + } + auto pixels = pixelRef->lockWritablePixels(); + if (pixels == nullptr) { + return; + } + const auto& info = pixelRef->info(); + auto finalPath = path; + auto totalMatrix = matrix; + totalMatrix.postScale(1, -1); + totalMatrix.postTranslate(0, static_cast(pixelRef->height())); + finalPath.transform(totalMatrix); + auto bounds = finalPath.getBounds(); + bounds.roundOut(); + markContentDirty(bounds, true); + FTPath ftPath = {}; + finalPath.decompose(Iterator, &ftPath); + ftPath.setFillType(path.getFillType()); + auto outlines = ftPath.getOutlines(); + auto ftLibrary = GetLibrary().library(); + auto buffer = static_cast(pixels); + int rows = info.height(); + int pitch = static_cast(info.rowBytes()); + RasterTarget target{}; + target.origin = buffer + (rows - 1) * pitch; + target.pitch = pitch; + if (needsGammaCorrection) { + target.gammaTable = PixelRefMask::GammaTable().data(); + } else { + target.gammaTable = nullptr; + } + FT_Raster_Params params; + params.flags = FT_RASTER_FLAG_AA | FT_RASTER_FLAG_DIRECT; + params.gray_spans = SpanFunc; + params.user = ⌖ + for (auto& outline : outlines) { + FT_Outline_Render(ftLibrary, &(outline->outline), ¶ms); + } + pixelRef->unlockPixels(); +} +} // namespace tgfx diff --git a/src/vectors/freetype/FTMask.h b/src/vectors/freetype/FTMask.h new file mode 100644 index 00000000..243bcc1c --- /dev/null +++ b/src/vectors/freetype/FTMask.h @@ -0,0 +1,32 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "core/PixelRefMask.h" + +namespace tgfx { +class FTMask : public PixelRefMask { + public: + explicit FTMask(std::shared_ptr pixelRef) : PixelRefMask(std::move(pixelRef)) { + } + + protected: + void onFillPath(const Path& path, const Matrix& matrix, bool needsGammaCorrection) override; +}; +} // namespace tgfx diff --git a/src/vectors/freetype/FTPath.cpp b/src/vectors/freetype/FTPath.cpp new file mode 100644 index 00000000..11ec95ff --- /dev/null +++ b/src/vectors/freetype/FTPath.cpp @@ -0,0 +1,130 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "FTPath.h" + +namespace tgfx { +bool FTPath::isEmpty() const { + return points.empty(); +} + +void FTPath::moveTo(const Point& point) { + if (!points.empty()) { + contours.push_back(static_cast(points.size()) - 1); + } + verbs.push_back(PathVerb::Move); + points.push_back({FloatToFDot6(point.x), FloatToFDot6(point.y)}); + tags.push_back(FT_CURVE_TAG_ON); +} + +void FTPath::lineTo(const Point& point) { + verbs.push_back(PathVerb::Line); + points.push_back({FloatToFDot6(point.x), FloatToFDot6(point.y)}); + tags.push_back(FT_CURVE_TAG_ON); +} + +void FTPath::quadTo(const Point& control, const Point& end) { + verbs.push_back(PathVerb::Quad); + points.push_back({FloatToFDot6(control.x), FloatToFDot6(control.y)}); + tags.push_back(FT_CURVE_TAG_CONIC); + points.push_back({FloatToFDot6(end.x), FloatToFDot6(end.y)}); + tags.push_back(FT_CURVE_TAG_ON); +} + +void FTPath::cubicTo(const Point& control1, const Point& control2, const Point& end) { + verbs.push_back(PathVerb::Cubic); + points.push_back({FloatToFDot6(control1.x), FloatToFDot6(control1.y)}); + tags.push_back(FT_CURVE_TAG_CUBIC); + points.push_back({FloatToFDot6(control2.x), FloatToFDot6(control2.y)}); + tags.push_back(FT_CURVE_TAG_CUBIC); + points.push_back({FloatToFDot6(end.x), FloatToFDot6(end.y)}); + tags.push_back(FT_CURVE_TAG_ON); +} + +void FTPath::close() { + if (!verbs.empty() && verbs[verbs.size() - 1] == PathVerb::Close) { + return; + } + auto lastContourPointIndex = contours.empty() ? -1 : contours[contours.size() - 1]; + if (points.size() - lastContourPointIndex < 2) { + return; + } + auto startPoint = points[lastContourPointIndex + 1]; + auto endPoint = points[points.size() - 1]; + if (startPoint.x != endPoint.x || startPoint.y != endPoint.y) { + verbs.push_back(PathVerb::Line); + points.push_back(startPoint); + tags.push_back(FT_CURVE_TAG_ON); + } + verbs.push_back(PathVerb::Close); +} + +std::vector> FTPath::getOutlines() const { + if (points.empty()) { + return {}; + } + std::vector> outlines = {}; + auto outline = std::make_shared(); + auto contourCount = contours.size() + 1; + auto startPointIndex = 0; + for (size_t i = 0; i < contourCount; i++) { + auto contourPointIndex = contours.size() > i ? contours[i] : points.size() - 1; + if (contourPointIndex - startPointIndex >= FT_OUTLINE_POINTS_MAX) { + if (!finalizeOutline(outline.get(), startPointIndex)) { + return {}; + } + outlines.push_back(outline); + outline = std::make_shared(); + startPointIndex = contours[i - 1] + 1; + } + outline->contours.push_back(static_cast(contourPointIndex - startPointIndex)); + } + if (finalizeOutline(outline.get(), startPointIndex)) { + outlines.push_back(outline); + } + return outlines; +} + +bool FTPath::finalizeOutline(FreetypeOutline* outline, int startPointIndex) const { + if (outline->contours.empty()) { + return false; + } + auto endPointIndex = outline->contours[outline->contours.size() - 1]; + outline->outline.points = const_cast(&(points[startPointIndex])); + outline->outline.tags = const_cast(&(tags[startPointIndex])); + outline->outline.contours = const_cast(&(outline->contours[0])); + outline->outline.n_points = static_cast(endPointIndex + 1); + outline->outline.n_contours = static_cast(outline->contours.size()); + switch (getFillType()) { + case PathFillType::EvenOdd: + outline->outline.flags = FT_OUTLINE_EVEN_ODD_FILL; + break; + case PathFillType::InverseEvenOdd: + outline->outline.flags = FT_OUTLINE_EVEN_ODD_FILL | FT_OUTLINE_REVERSE_FILL; + break; + case PathFillType::Winding: + outline->outline.flags = FT_OUTLINE_NONE; + break; + case PathFillType::InverseWinding: + outline->outline.flags = FT_OUTLINE_REVERSE_FILL; + break; + } + return true; +} + +} // namespace tgfx \ No newline at end of file diff --git a/src/vectors/freetype/FTPath.h b/src/vectors/freetype/FTPath.h new file mode 100644 index 00000000..ebc7ca80 --- /dev/null +++ b/src/vectors/freetype/FTPath.h @@ -0,0 +1,58 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "FTUtil.h" +#include "tgfx/core/Path.h" +#include FT_STROKER_H + +namespace tgfx { +struct FreetypeOutline { + FT_Outline outline = {}; + std::vector contours = {}; +}; + +class FTPath { + public: + PathFillType getFillType() const { + return fillType; + } + + void setFillType(PathFillType type) { + fillType = type; + } + + bool isEmpty() const; + void moveTo(const Point& point); + void lineTo(const Point& point); + void quadTo(const Point& control, const Point& point); + void cubicTo(const Point& control1, const Point& control2, const Point& point); + void close(); + std::vector> getOutlines() const; + + private: + bool finalizeOutline(FreetypeOutline* outline, int startPointIndex) const; + + std::vector points = {}; + std::vector verbs = {}; + std::vector tags = {}; + std::vector contours = {}; + PathFillType fillType = PathFillType::Winding; +}; +} // namespace tgfx diff --git a/src/vectors/freetype/FTScalerContext.cpp b/src/vectors/freetype/FTScalerContext.cpp new file mode 100644 index 00000000..7180135a --- /dev/null +++ b/src/vectors/freetype/FTScalerContext.cpp @@ -0,0 +1,674 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "FTScalerContext.h" +#include +#include "ft2build.h" +#include FT_BITMAP_H +#include FT_OUTLINE_H +#include FT_SIZES_H +#include FT_TRUETYPE_TABLES_H +#include "FTUtil.h" +#include "skcms.h" +#include "tgfx/core/Pixmap.h" +#include "utils/Log.h" +#include "utils/MathExtra.h" + +namespace tgfx { +static float FTFixedToFloat(FT_Fixed x) { + return static_cast(x) * 1.52587890625e-5f; +} + +static FT_Fixed FloatToFTFixed(float x) { + static constexpr float MaxS32FitsInFloat = 2147483520.f; + static constexpr float MinS32FitsInFloat = -MaxS32FitsInFloat; + x = x < MaxS32FitsInFloat ? x : MaxS32FitsInFloat; + x = x > MinS32FitsInFloat ? x : MinS32FitsInFloat; + return static_cast(x * (1 << 16)); +} + +static void ComputeGivensRotation(const Point& h, Matrix* G) { + const auto& a = h.x; + const auto& b = h.y; + float c, s; + if (0 == b) { + c = copysignf(1.f, a); + s = 0; + } else if (0 == a) { + c = 0; + s = -copysignf(1.f, b); + } else if (fabsf(b) > fabsf(a)) { + auto t = a / b; + auto u = copysignf(sqrtf(1.f + t * t), b); + s = -1.f / u; + c = -s * t; + } else { + auto t = b / a; + auto u = copysignf(sqrtf(1.f + t * t), a); + c = 1.f / u; + s = -c * t; + } + + G->setSinCos(s, c); +} + +bool FTScalerContextRec::computeMatrices(Point* s, Matrix* sA) const { + // A is the 'total' matrix. + auto A = Matrix::MakeScale(textSize, textSize); + if (skewX != 0) { + A.postSkew(skewX, 0); + } + + // GA is the matrix A with rotation removed. + Matrix GA; + bool skewedOrFlipped = + A.getSkewX() != 0 || A.getSkewY() != 0 || A.getScaleX() < 0 || A.getScaleY() < 0; + if (skewedOrFlipped) { + // QR by Givens rotations. G is Q^T and GA is R. G is rotational (no reflections). + // h is where A maps the horizontal baseline. + Point h = Point::Make(1, 0); + A.mapPoints(&h, 1); + + // G is the Givens Matrix for A (rotational matrix where GA[0][1] == 0). + Matrix G; + ComputeGivensRotation(h, &G); + + GA = G; + GA.preConcat(A); + } else { + GA = A; + } + + // If the 'total' matrix is singular, set the 'scale' to something finite and zero the matrices. + // All underlying ports have issues with zero text size, so use the matricies to zero. + // If one of the scale factors is less than 1/256 then an EM filling square will + // never affect any pixels. + // If there are any nonfinite numbers in the matrix, bail out and set the matrices to zero. + if (fabsf(GA.getScaleX()) <= FLOAT_NEARLY_ZERO || fabsf(GA.getScaleY()) <= FLOAT_NEARLY_ZERO || + !GA.isFinite()) { + s->x = 1.f; + s->y = 1.f; + sA->setScale(0, 0); + return false; + } + + // At this point, given GA, create s. + s->x = fabsf(GA.getScaleX()); + s->y = fabsf(GA.getScaleY()); + + // The 'remaining' matrix sA is the total matrix A without the scale. + if (!skewedOrFlipped) { + sA->reset(); + } else { + *sA = A; + sA->preScale(1.f / s->x, 1.f / s->y); + } + return true; +} + +static constexpr float ITALIC_SKEW = -0.20f; + +std::unique_ptr FTScalerContext::Make(std::shared_ptr typeface, + float size, bool fauxBold, bool fauxItalic, + bool verticalText) { + if (typeface == nullptr) { + return nullptr; + } + FTScalerContextRec rec; + rec.textSize = size; + rec.embolden = fauxBold; + rec.skewX = fauxItalic ? ITALIC_SKEW : 0; + rec.verticalText = verticalText; + auto scalerContext = + std::unique_ptr(new FTScalerContext(std::move(typeface), rec)); + if (!scalerContext->valid()) { + return nullptr; + } + return scalerContext; +} + +/** + * Returns the bitmap strike equal to or just larger than the requested size. + */ +static FT_Int ChooseBitmapStrike(FT_Face face, FT_F26Dot6 scaleY) { + if (face == nullptr) { + return -1; + } + + FT_Pos requestedPPEM = scaleY; // FT_Bitmap_Size::y_ppem is in 26.6 format. + FT_Int chosenStrikeIndex = -1; + FT_Pos chosenPPEM = 0; + for (FT_Int strikeIndex = 0; strikeIndex < face->num_fixed_sizes; ++strikeIndex) { + FT_Pos strikePPEM = face->available_sizes[strikeIndex].y_ppem; + if (strikePPEM == requestedPPEM) { + // exact match - our search stops here + return strikeIndex; + } else if (chosenPPEM < requestedPPEM) { + // attempt to increase chosenPPEM + if (chosenPPEM < strikePPEM) { + chosenPPEM = strikePPEM; + chosenStrikeIndex = strikeIndex; + } + } else { + // attempt to decrease chosenPPEM, but not below requestedPPEM + if (requestedPPEM < strikePPEM && strikePPEM < chosenPPEM) { + chosenPPEM = strikePPEM; + chosenStrikeIndex = strikeIndex; + } + } + } + return chosenStrikeIndex; +} + +FTScalerContext::FTScalerContext(std::shared_ptr typeFace, FTScalerContextRec rec) + : typeface(std::move(typeFace)), rec(rec) { + std::lock_guard lockGuard(FTMutex()); + _face = static_cast(typeface.get())->_face.get(); + loadGlyphFlags |= FT_LOAD_NO_BITMAP; + // Always using FT_LOAD_IGNORE_GLOBAL_ADVANCE_WIDTH to get correct + // advances, as fontconfig and cairo do. + loadGlyphFlags |= FT_LOAD_IGNORE_GLOBAL_ADVANCE_WIDTH; + loadGlyphFlags |= FT_LOAD_TARGET_NORMAL; + if (FT_HAS_COLOR(_face->face)) { + loadGlyphFlags |= FT_LOAD_COLOR; + } + if (rec.verticalText) { + loadGlyphFlags |= FT_LOAD_VERTICAL_LAYOUT; + } + + auto err = FT_New_Size(_face->face, &ftSize); + if (err != FT_Err_Ok) { + LOGE("FT_New_Size(%s) failed.", _face->face->family_name); + return; + } + err = FT_Activate_Size(ftSize); + if (err != FT_Err_Ok) { + LOGE("FT_Activate_Size(%s) failed.", _face->face->family_name); + return; + } + rec.computeMatrices(&scale, &matrix22Scalar); + auto scaleX = FloatToFDot6(scale.x); + auto scaleY = FloatToFDot6(scale.y); + if (FT_IS_SCALABLE(_face->face)) { + err = FT_Set_Char_Size(_face->face, scaleX, scaleY, 72, 72); + if (err != FT_Err_Ok) { + LOGE("FT_Set_CharSize(%s, %f, %f) failed.", _face->face->family_name, scaleX, scaleY); + return; + } + // Adjust the matrix to reflect the actually chosen scale. + // FreeType currently does not allow requesting sizes less than 1, this allows for scaling. + // Don't do this at all sizes as that will interfere with hinting. + if (scale.x < 1 || scale.y < 1) { + auto unitsPerEm = static_cast(_face->face->units_per_EM); + const auto& metrics = _face->face->size->metrics; + auto xPpem = unitsPerEm * FTFixedToFloat(metrics.x_scale) / 64.0f; + auto yPpem = unitsPerEm * FTFixedToFloat(metrics.y_scale) / 64.0f; + matrix22Scalar.preScale(scale.x / xPpem, scale.y / yPpem); + } + } else if (FT_HAS_FIXED_SIZES(_face->face)) { + strikeIndex = ChooseBitmapStrike(_face->face, scaleY); + if (strikeIndex == -1) { + LOGE("No glyphs for font \"%s\" size %f.\n", _face->face->family_name, rec.textSize); + return; + } + + err = FT_Select_Size(_face->face, strikeIndex); + if (err != 0) { + LOGE("FT_Select_Size(%s, %d) failed.", _face->face->family_name, strikeIndex); + strikeIndex = -1; + return; + } + + // Adjust the matrix to reflect the actually chosen scale. + // It is likely that the ppem chosen was not the one requested, this allows for scaling. + matrix22Scalar.preScale(scale.x / static_cast(_face->face->size->metrics.x_ppem), + scale.y / static_cast(_face->face->size->metrics.y_ppem)); + + // FreeType documentation says: + // FT_LOAD_NO_BITMAP -- Ignore bitmap strikes when loading. + // Bitmap-only fonts ignore this flag. + // + // However, in FreeType 2.5.1 color bitmap only fonts do not ignore this flag. + // Force this flag off for bitmap only fonts. + loadGlyphFlags &= ~FT_LOAD_NO_BITMAP; + } else { + return; + } + matrix22.xx = FloatToFTFixed(matrix22Scalar.getScaleX()); + matrix22.xy = FloatToFTFixed(-matrix22Scalar.getSkewX()); + matrix22.yx = FloatToFTFixed(-matrix22Scalar.getSkewY()); + matrix22.yy = FloatToFTFixed(matrix22Scalar.getScaleY()); + FT_Set_Transform(_face->face, &matrix22, nullptr); +} + +FTScalerContext::~FTScalerContext() { + if (ftSize) { + std::lock_guard lockGuard(FTMutex()); + FT_Done_Size(ftSize); + } +} + +int FTScalerContext::setupSize() { + FT_Error err = FT_Activate_Size(ftSize); + if (err != 0) { + return err; + } + FT_Set_Transform(_face->face, &matrix22, nullptr); + return 0; +} + +FontMetrics FTScalerContext::generateFontMetrics() { + std::lock_guard lockGuard(FTMutex()); + FontMetrics metrics; + if (setupSize()) { + return metrics; + } + auto face = _face->face; + auto upem = static_cast(FTTypeface::GetUnitsPerEm(face)); + + // use the os/2 table as a source of reasonable defaults. + auto xHeight = 0.0f; + auto capHeight = 0.0f; + auto* os2 = static_cast(FT_Get_Sfnt_Table(face, FT_SFNT_OS2)); + if (os2) { + xHeight = static_cast(os2->sxHeight) / upem * scale.y; + if (os2->version != 0xFFFF && os2->version >= 2) { + capHeight = static_cast(os2->sCapHeight) / upem * scale.y; + } + } + + // pull from format-specific metrics as needed + float ascent; + float descent; + float leading; + float xmin; + float xmax; + float ymin; + float ymax; + float underlineThickness; + float underlinePosition; + if (face->face_flags & FT_FACE_FLAG_SCALABLE) { // scalable outline font + // FreeType will always use HHEA metrics if they're not zero. + // It completely ignores the OS/2 fsSelection::UseTypoMetrics bit. + // It also ignores the VDMX tables, which are also of interest here + // (and override everything else when they apply). + static const int kUseTypoMetricsMask = (1 << 7); + if (os2 && os2->version != 0xFFFF && (os2->fsSelection & kUseTypoMetricsMask)) { + ascent = -static_cast(os2->sTypoAscender) / upem; + descent = -static_cast(os2->sTypoDescender) / upem; + leading = static_cast(os2->sTypoLineGap) / upem; + } else { + ascent = -static_cast(face->ascender) / upem; + descent = -static_cast(face->descender) / upem; + leading = static_cast(face->height + (face->descender - face->ascender)) / upem; + } + xmin = static_cast(face->bbox.xMin) / upem; + xmax = static_cast(face->bbox.xMax) / upem; + ymin = -static_cast(face->bbox.yMin) / upem; + ymax = -static_cast(face->bbox.yMax) / upem; + underlineThickness = static_cast(face->underline_thickness) / upem; + underlinePosition = -(static_cast(face->underline_position) + + static_cast(face->underline_thickness) / 2) / + upem; + + // we may be able to synthesize x_height and cap_height from outline + if (xHeight == 0.f) { + FT_BBox bbox; + if (getCBoxForLetter('x', &bbox)) { + xHeight = static_cast(bbox.yMax) / 64.0f; + } + } + if (capHeight == 0.f) { + FT_BBox bbox; + if (getCBoxForLetter('H', &bbox)) { + capHeight = static_cast(bbox.yMax) / 64.0f; + } + } + } else if (strikeIndex != -1) { // bitmap strike metrics + auto xppem = static_cast(face->size->metrics.x_ppem); + auto yppem = static_cast(face->size->metrics.y_ppem); + ascent = -static_cast(face->size->metrics.ascender) / (yppem * 64.0f); + descent = -static_cast(face->size->metrics.descender) / (yppem * 64.0f); + leading = (static_cast(face->size->metrics.height) / (yppem * 64.0f)) + ascent - descent; + + xmin = 0.0f; + xmax = static_cast(face->available_sizes[strikeIndex].width) / xppem; + ymin = descent; + ymax = ascent; + + underlineThickness = 0; + underlinePosition = 0; + + auto* post = static_cast(FT_Get_Sfnt_Table(face, FT_SFNT_POST)); + if (post) { + underlineThickness = static_cast(post->underlineThickness) / upem; + underlinePosition = -static_cast(post->underlinePosition) / upem; + } + } else { + return metrics; + } + + // synthesize elements that were not provided by the os/2 table or format-specific metrics + if (xHeight == 0.f) { + xHeight = -ascent * scale.y; + } + if (capHeight == 0.f) { + capHeight = -ascent * scale.y; + } + + // disallow negative line spacing + if (leading < 0.0f) { + leading = 0.0f; + } + + metrics.top = ymax * scale.y; + metrics.ascent = ascent * scale.y; + metrics.descent = descent * scale.y; + metrics.bottom = ymin * scale.y; + metrics.leading = leading * scale.y; + metrics.xMin = xmin * scale.y; + metrics.xMax = xmax * scale.y; + metrics.xHeight = xHeight; + metrics.capHeight = capHeight; + metrics.underlineThickness = underlineThickness * scale.y; + metrics.underlinePosition = underlinePosition * scale.y; + return metrics; +} + +bool FTScalerContext::getCBoxForLetter(char letter, FT_BBox* bbox) { + auto face = _face->face; + const auto glyph_id = FT_Get_Char_Index(face, letter); + if (glyph_id == 0) { + return false; + } + if (FT_Load_Glyph(face, glyph_id, loadGlyphFlags) != FT_Err_Ok) { + return false; + } + emboldenIfNeeded(face, face->glyph, glyph_id); + FT_Outline_Get_CBox(&face->glyph->outline, bbox); + return true; +} + +static constexpr int kOutlineEmboldenDivisor = 24; +// static constexpr int kOutlineEmboldenDivisorAndroid = 34; +// See +// http://freetype.sourceforge.net/freetype2/docs/reference/ft2-bitmap_handling.html#FT_Bitmap_Embolden +// This value was chosen by eyeballing the result in Firefox and trying to match it. +static constexpr FT_Pos kBitmapEmboldenStrength = 1 << 6; +void FTScalerContext::emboldenIfNeeded(FT_Face face, FT_GlyphSlot glyph, GlyphID gid) const { + if (!rec.embolden) { + return; + } + switch (glyph->format) { + case FT_GLYPH_FORMAT_OUTLINE: + FT_Pos strength; + strength = + FT_MulFix(face->units_per_EM, face->size->metrics.y_scale) / kOutlineEmboldenDivisor; + FT_Outline_Embolden(&glyph->outline, strength); + break; + case FT_GLYPH_FORMAT_BITMAP: + if (!face->glyph->bitmap.buffer) { + FT_Load_Glyph(face, gid, loadGlyphFlags); + } + FT_GlyphSlot_Own_Bitmap(glyph); + FT_Bitmap_Embolden(glyph->library, &glyph->bitmap, kBitmapEmboldenStrength, 0); + break; + default: + LOGE("unknown glyph format"); + } +} + +class FTGeometrySink { + public: + explicit FTGeometrySink(Path* path) : path(path) { + } + + static int Move(const FT_Vector* pt, void* ctx) { + auto& self = *static_cast(ctx); + if (self.started) { + self.path->close(); + self.started = false; + } + self.current = *pt; + return 0; + } + + static int Line(const FT_Vector* pt, void* ctx) { + auto& self = *static_cast(ctx); + if (self.currentIsNot(pt)) { + self.goingTo(pt); + self.path->lineTo(FDot6ToFloat(pt->x), -FDot6ToFloat(pt->y)); + } + return 0; + } + + static int Conic(const FT_Vector* pt0, const FT_Vector* pt1, void* ctx) { + auto& self = *static_cast(ctx); + if (self.currentIsNot(pt0) || self.currentIsNot(pt1)) { + self.goingTo(pt1); + self.path->quadTo(FDot6ToFloat(pt0->x), -FDot6ToFloat(pt0->y), FDot6ToFloat(pt1->x), + -FDot6ToFloat(pt1->y)); + } + return 0; + } + + static int Cubic(const FT_Vector* pt0, const FT_Vector* pt1, const FT_Vector* pt2, void* ctx) { + auto& self = *static_cast(ctx); + if (self.currentIsNot(pt0) || self.currentIsNot(pt1) || self.currentIsNot(pt2)) { + self.goingTo(pt2); + self.path->cubicTo(FDot6ToFloat(pt0->x), -FDot6ToFloat(pt0->y), FDot6ToFloat(pt1->x), + -FDot6ToFloat(pt1->y), FDot6ToFloat(pt2->x), -FDot6ToFloat(pt2->y)); + } + return 0; + } + + private: + bool currentIsNot(const FT_Vector* pt) const { + return current.x != pt->x || current.y != pt->y; + } + + void goingTo(const FT_Vector* pt) { + if (!started) { + started = true; + path->moveTo(FDot6ToFloat(current.x), -FDot6ToFloat(current.y)); + } + current = *pt; + } + + Path* path; + bool started = false; + FT_Vector current = {0, 0}; +}; + +static bool GenerateGlyphPath(FT_Face face, Path* path) { + static constexpr FT_Outline_Funcs gFuncs{ + /*move_to =*/FTGeometrySink::Move, + /*line_to =*/FTGeometrySink::Line, + /*conic_to =*/FTGeometrySink::Conic, + /*cubic_to =*/FTGeometrySink::Cubic, + /*shift = */ 0, + /*delta =*/0, + }; + FTGeometrySink sink(path); + auto err = FT_Outline_Decompose(&face->glyph->outline, &gFuncs, &sink); + if (err != FT_Err_Ok) { + path->reset(); + return false; + } + path->close(); + return true; +} + +bool FTScalerContext::generatePath(GlyphID glyphID, Path* path) { + std::lock_guard lockGuard(FTMutex()); + auto face = _face->face; + // FT_IS_SCALABLE is documented to mean the face contains outline glyphs. + if (!FT_IS_SCALABLE(face) || this->setupSize()) { + path->reset(); + return false; + } + auto flags = loadGlyphFlags; + flags |= FT_LOAD_NO_BITMAP; // ignore embedded bitmaps so we're sure to get the outline + flags &= ~FT_LOAD_RENDER; // don't scan convert (we just want the outline) + + auto err = FT_Load_Glyph(face, glyphID, flags); + if (err != FT_Err_Ok || face->glyph->format != FT_GLYPH_FORMAT_OUTLINE) { + path->reset(); + return false; + } + emboldenIfNeeded(face, face->glyph, glyphID); + + if (!GenerateGlyphPath(face, path)) { + path->reset(); + return false; + } + return true; +} + +void FTScalerContext::getBBoxForCurrentGlyph(FT_BBox* bbox) { + auto face = _face->face; + FT_Outline_Get_CBox(&face->glyph->outline, bbox); + + // outset the box to integral boundaries + bbox->xMin &= ~63; + bbox->yMin &= ~63; + bbox->xMax = (bbox->xMax + 63) & ~63; + bbox->yMax = (bbox->yMax + 63) & ~63; +} + +GlyphMetrics FTScalerContext::generateGlyphMetrics(GlyphID glyphID) { + std::lock_guard lockGuard(FTMutex()); + GlyphMetrics glyph; + if (setupSize()) { + return glyph; + } + + auto face = _face->face; + auto err = FT_Load_Glyph(face, glyphID, + loadGlyphFlags | static_cast(FT_LOAD_BITMAP_METRICS_ONLY)); + if (err != FT_Err_Ok) { + return glyph; + } + emboldenIfNeeded(face, face->glyph, glyphID); + if (face->glyph->format == FT_GLYPH_FORMAT_OUTLINE) { + using FT_PosLimits = std::numeric_limits; + FT_BBox bounds = {FT_PosLimits::max(), FT_PosLimits::max(), FT_PosLimits::min(), + FT_PosLimits::min()}; + if (0 < face->glyph->outline.n_contours) { + getBBoxForCurrentGlyph(&bounds); + } else { + bounds = {0, 0, 0, 0}; + } + // Round out, no longer dot6. + bounds.xMin = FDot6Floor(bounds.xMin); + bounds.yMin = FDot6Floor(bounds.yMin); + bounds.xMax = FDot6Ceil(bounds.xMax); + bounds.yMax = FDot6Ceil(bounds.yMax); + + FT_Pos width = bounds.xMax - bounds.xMin; + FT_Pos height = bounds.yMax - bounds.yMin; + FT_Pos top = -bounds.yMax; // Freetype y-up, We y-down. + FT_Pos left = bounds.xMin; + + glyph.width = static_cast(width); + glyph.height = static_cast(height); + glyph.top = static_cast(top); + glyph.left = static_cast(left); + } else if (face->glyph->format == FT_GLYPH_FORMAT_BITMAP) { + auto rect = Rect::MakeXYWH(static_cast(face->glyph->bitmap_left), + -static_cast(face->glyph->bitmap_top), + static_cast(face->glyph->bitmap.width), + static_cast(face->glyph->bitmap.rows)); + matrix22Scalar.mapRect(&rect); + rect.roundOut(); + glyph.width = static_cast(rect.width()); + glyph.height = static_cast(rect.height()); + glyph.top = static_cast(rect.top); + glyph.left = static_cast(rect.left); + } else { + LOGE("unknown glyph format"); + return glyph; + } + + glyph.advanceX = FDot6ToFloat(face->glyph->advance.x); + glyph.advanceY = FDot6ToFloat(face->glyph->advance.y); + return glyph; +} + +static gfx::skcms_PixelFormat ToPixelFormat(ColorType colorType) { + switch (colorType) { + case ColorType::ALPHA_8: + return gfx::skcms_PixelFormat_A_8; + case ColorType::BGRA_8888: + return gfx::skcms_PixelFormat_BGRA_8888; + default: + return gfx::skcms_PixelFormat_RGBA_8888; + } +} + +std::shared_ptr CopyFTBitmap(const FT_Bitmap& ftBitmap) { + auto alphaOnly = ftBitmap.pixel_mode == FT_PIXEL_MODE_GRAY; + Bitmap bitmap(static_cast(ftBitmap.width), static_cast(ftBitmap.rows), alphaOnly); + if (bitmap.isEmpty()) { + return nullptr; + } + auto width = static_cast(ftBitmap.width); + auto height = static_cast(ftBitmap.rows); + auto src = reinterpret_cast(ftBitmap.buffer); + // FT_Bitmap::pitch is an int and allowed to be negative. + auto srcRB = ftBitmap.pitch; + auto srcFormat = ftBitmap.pixel_mode == FT_PIXEL_MODE_GRAY ? gfx::skcms_PixelFormat_A_8 + : gfx::skcms_PixelFormat_BGRA_8888; + Pixmap bm(bitmap); + auto dst = static_cast(bm.writablePixels()); + auto dstRB = bm.rowBytes(); + auto dstFormat = ToPixelFormat(bm.colorType()); + for (int i = 0; i < height; i++) { + gfx::skcms_Transform(src, srcFormat, gfx::skcms_AlphaFormat_PremulAsEncoded, nullptr, dst, + dstFormat, gfx::skcms_AlphaFormat_PremulAsEncoded, nullptr, width); + src += srcRB; + dst += dstRB; + } + return bitmap.makeBuffer(); +} + +std::shared_ptr FTScalerContext::generateImage(GlyphID glyphId, Matrix* matrix) { + std::lock_guard lockGuard(FTMutex()); + if (setupSize()) { + return nullptr; + } + auto face = _face->face; + auto flags = loadGlyphFlags; + flags |= FT_LOAD_RENDER; + flags &= ~FT_LOAD_NO_BITMAP; + auto err = FT_Load_Glyph(face, glyphId, flags); + if (err != FT_Err_Ok || face->glyph->format != FT_GLYPH_FORMAT_BITMAP) { + return nullptr; + } + auto ftBitmap = face->glyph->bitmap; + if (ftBitmap.pixel_mode != FT_PIXEL_MODE_BGRA && ftBitmap.pixel_mode != FT_PIXEL_MODE_GRAY) { + return nullptr; + } + if (matrix) { + matrix->setTranslate(static_cast(face->glyph->bitmap_left), + -static_cast(face->glyph->bitmap_top)); + matrix->postConcat(matrix22Scalar); + } + return CopyFTBitmap(ftBitmap); +} +} // namespace tgfx diff --git a/src/vectors/freetype/FTScalerContext.h b/src/vectors/freetype/FTScalerContext.h new file mode 100644 index 00000000..9410a01c --- /dev/null +++ b/src/vectors/freetype/FTScalerContext.h @@ -0,0 +1,80 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "ft2build.h" +#include FT_FREETYPE_H +#include "FTFace.h" +#include "FTTypeface.h" +#include "core/PixelBuffer.h" + +namespace tgfx { +class FTScalerContextRec { + public: + bool computeMatrices(Point* scale, Matrix* remaining) const; + + float textSize = 12.f; + float skewX = 0.0f; + bool embolden = false; + bool verticalText = false; +}; + +class FTScalerContext { + public: + static std::unique_ptr Make(std::shared_ptr typeface, float size, + bool fauxBold = false, bool fauxItalic = false, + bool verticalText = false); + + ~FTScalerContext(); + + FontMetrics generateFontMetrics(); + + GlyphMetrics generateGlyphMetrics(GlyphID glyphID); + + bool generatePath(GlyphID glyphID, Path* path); + + std::shared_ptr generateImage(GlyphID glyphId, Matrix* matrix); + + private: + FTScalerContext(std::shared_ptr typeFace, FTScalerContextRec rec); + + bool valid() const { + return _face != nullptr && ftSize != nullptr; + } + + int setupSize(); + + bool getCBoxForLetter(char letter, FT_BBox* bbox); + + void emboldenIfNeeded(FT_Face face, FT_GlyphSlot glyph, GlyphID gid) const; + + void getBBoxForCurrentGlyph(FT_BBox* bbox); + + std::shared_ptr typeface; + FTScalerContextRec rec; + FTFace* _face = nullptr; + FT_Size ftSize = nullptr; + FT_Int strikeIndex = -1; // The bitmap strike for the face (or -1 if none). + Matrix matrix22Scalar = Matrix::I(); + FT_Matrix matrix22 = {}; + Point scale = Point::Make(1.f, 1.f); + FT_Int32 loadGlyphFlags = 0; +}; +} // namespace tgfx diff --git a/src/vectors/freetype/FTTypeface.cpp b/src/vectors/freetype/FTTypeface.cpp new file mode 100644 index 00000000..29ba822d --- /dev/null +++ b/src/vectors/freetype/FTTypeface.cpp @@ -0,0 +1,261 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "FTTypeface.h" + +#include FT_TRUETYPE_TABLES_H + +#include "FTScalerContext.h" +#include "SystemFont.h" +#include "tgfx/utils/UTF.h" +#include "utils/UniqueID.h" + +namespace tgfx { +class EmptyTypeface : public Typeface { + public: + uint32_t uniqueID() const override { + return _uniqueID; + } + + std::string fontFamily() const override { + return ""; + } + + std::string fontStyle() const override { + return ""; + } + + int glyphsCount() const override { + return 0; + } + + int unitsPerEm() const override { + return 0; + } + + bool hasColor() const override { + return false; + } + + GlyphID getGlyphID(const std::string&) const override { + return 0; + } + + std::shared_ptr getBytes() const override { + return nullptr; + } + + std::shared_ptr copyTableData(tgfx::FontTableTag) const override { + return nullptr; + } + + protected: + FontMetrics getMetrics(float) const override { + return {}; + } + + Rect getBounds(GlyphID, float, bool, bool) const override { + return {}; + } + + float getAdvance(GlyphID, float, bool, bool, bool) const override { + return 0; + } + + bool getPath(GlyphID, float, bool, bool, Path*) const override { + return false; + } + + std::shared_ptr getGlyphImage(GlyphID, float, bool, bool, Matrix*) const override { + return nullptr; + } + + Point getVerticalOffset(GlyphID, float, bool, bool) const override { + return Point::Zero(); + } + + private: + uint32_t _uniqueID = UniqueID::Next(); +}; + +std::shared_ptr Typeface::MakeFromName(const std::string& fontFamily, + const std::string& fontStyle) { + return SystemFont::MakeFromName(fontFamily, fontStyle); +} + +std::shared_ptr Typeface::MakeFromPath(const std::string& fontPath, int ttcIndex) { + return FTTypeface::Make(FTFontData(fontPath, ttcIndex)); +} + +std::shared_ptr Typeface::MakeFromBytes(const void* data, size_t length, int ttcIndex) { + return FTTypeface::Make(FTFontData(data, length, ttcIndex)); +} + +std::shared_ptr Typeface::MakeDefault() { + return std::make_shared(); +} + +std::shared_ptr FTTypeface::Make(FTFontData data) { + auto face = FTFace::Make(data); + if (face == nullptr) { + return nullptr; + } + auto typeface = std::shared_ptr(new FTTypeface(std::move(data), std::move(face))); + typeface->weakThis = typeface; + return typeface; +} + +FTTypeface::FTTypeface(FTFontData data, std::unique_ptr face) + : _uniqueID(UniqueID::Next()), data(std::move(data)), _face(std::move(face)) { +} + +FTTypeface::~FTTypeface() { + std::lock_guard lockGuard(FTMutex()); + _face = nullptr; +} + +int FTTypeface::GetUnitsPerEm(FT_Face face) { + auto unitsPerEm = face->units_per_EM; + // At least some versions of FreeType set face->units_per_EM to 0 for bitmap only fonts. + if (unitsPerEm == 0) { + auto* ttHeader = static_cast(FT_Get_Sfnt_Table(face, FT_SFNT_HEAD)); + if (ttHeader) { + unitsPerEm = ttHeader->Units_Per_EM; + } + } + return unitsPerEm; +} + +int FTTypeface::glyphsCount() const { + return static_cast(_face->face->num_glyphs); +} + +int FTTypeface::unitsPerEm() const { + return GetUnitsPerEm(_face->face); +} + +bool FTTypeface::hasColor() const { + return FT_HAS_COLOR(_face->face); +} + +GlyphID FTTypeface::getGlyphID(const std::string& name) const { + if (name.empty()) { + return 0; + } + auto count = tgfx::UTF::CountUTF8(name.c_str(), name.size()); + if (count > 1 || count <= 0) { + return 0; + } + const char* start = &(name[0]); + auto unichar = tgfx::UTF::NextUTF8(&start, start + name.size()); + return FT_Get_Char_Index(_face->face, static_cast(unichar)); +} + +std::shared_ptr FTTypeface::getBytes() const { + return data.data; +} + +std::shared_ptr FTTypeface::copyTableData(FontTableTag tag) const { + std::lock_guard lockGuard(FTMutex()); + auto face = _face->face; + if (face == nullptr) { + return nullptr; + } + + FT_ULong tableLength = 0; + auto error = FT_Load_Sfnt_Table(face, tag, 0, nullptr, &tableLength); + if (error) { + return nullptr; + } + auto tableData = new (std::nothrow) uint8_t[tableLength]; + if (tableData == nullptr) { + return nullptr; + } + error = FT_Load_Sfnt_Table(face, tag, 0, reinterpret_cast(tableData), &tableLength); + if (error) { + delete[] tableData; + return nullptr; + } + return Data::MakeAdopted(tableData, tableLength); +} + +FontMetrics FTTypeface::getMetrics(float size) const { + auto scalerContext = FTScalerContext::Make(weakThis.lock(), size); + if (scalerContext == nullptr) { + return {}; + } + return scalerContext->generateFontMetrics(); +} + +Rect FTTypeface::getBounds(GlyphID glyphID, float size, bool fauxBold, bool fauxItalic) const { + auto scalerContext = FTScalerContext::Make(weakThis.lock(), size, fauxBold, fauxItalic); + if (scalerContext == nullptr) { + return Rect::MakeEmpty(); + } + auto glyphMetrics = scalerContext->generateGlyphMetrics(glyphID); + auto bounds = + Rect::MakeXYWH(glyphMetrics.left, glyphMetrics.top, glyphMetrics.width, glyphMetrics.height); + auto advance = glyphMetrics.advanceX; + if (bounds.isEmpty() && advance > 0) { + auto fontMetrics = scalerContext->generateFontMetrics(); + bounds.setLTRB(0, fontMetrics.ascent, advance, fontMetrics.descent); + } + return bounds; +} + +float FTTypeface::getAdvance(GlyphID glyphID, float size, bool fauxBold, bool fauxItalic, + bool verticalText) const { + auto scalerContext = + FTScalerContext::Make(weakThis.lock(), size, fauxBold, fauxItalic, verticalText); + if (scalerContext == nullptr) { + return 0; + } + auto glyphMetrics = scalerContext->generateGlyphMetrics(glyphID); + return verticalText ? glyphMetrics.advanceY : glyphMetrics.advanceX; +} + +bool FTTypeface::getPath(GlyphID glyphID, float size, bool fauxBold, bool fauxItalic, + Path* path) const { + auto scalerContext = FTScalerContext::Make(weakThis.lock(), size, fauxBold, fauxItalic); + if (scalerContext == nullptr) { + return false; + } + return scalerContext->generatePath(glyphID, path); +} + +std::shared_ptr FTTypeface::getGlyphImage(GlyphID glyphID, float size, bool fauxBold, + bool fauxItalic, Matrix* matrix) const { + auto scalerContext = FTScalerContext::Make(weakThis.lock(), size, fauxBold, fauxItalic); + if (scalerContext == nullptr) { + return nullptr; + } + return scalerContext->generateImage(glyphID, matrix); +} + +Point FTTypeface::getVerticalOffset(GlyphID glyphID, float size, bool fauxBold, + bool fauxItalic) const { + auto scalerContext = FTScalerContext::Make(weakThis.lock(), size, fauxBold, fauxItalic); + if (scalerContext == nullptr) { + return Point::Zero(); + } + auto metrics = scalerContext->generateFontMetrics(); + auto offsetY = metrics.capHeight; + auto glyphMetrics = scalerContext->generateGlyphMetrics(glyphID); + return {-glyphMetrics.advanceX * 0.5f, offsetY}; +} +} // namespace tgfx diff --git a/src/vectors/freetype/FTTypeface.h b/src/vectors/freetype/FTTypeface.h new file mode 100644 index 00000000..de3e6e65 --- /dev/null +++ b/src/vectors/freetype/FTTypeface.h @@ -0,0 +1,89 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include "ft2build.h" +#include FT_FREETYPE_H + +#include "FTFace.h" +#include "FTFontData.h" +#include "tgfx/core/Font.h" +#include "tgfx/core/Typeface.h" + +namespace tgfx { +class FTTypeface : public Typeface { + public: + static std::shared_ptr Make(FTFontData data); + + ~FTTypeface() override; + + uint32_t uniqueID() const override { + return _uniqueID; + } + + std::string fontFamily() const override { + return _face->face->family_name ? _face->face->family_name : ""; + } + + std::string fontStyle() const override { + return _face->face->style_name ? _face->face->style_name : ""; + } + + static int GetUnitsPerEm(FT_Face face); + + int glyphsCount() const override; + + int unitsPerEm() const override; + + bool hasColor() const override; + + GlyphID getGlyphID(const std::string& name) const override; + + std::shared_ptr getBytes() const override; + + std::shared_ptr copyTableData(FontTableTag tag) const override; + + protected: + float getAdvance(GlyphID glyphID, float size, bool fauxBold, bool fauxItalic, + bool verticalText) const override; + + FontMetrics getMetrics(float size) const override; + + bool getPath(GlyphID glyphID, float size, bool fauxBold, bool fauxItalic, + Path* path) const override; + + std::shared_ptr getGlyphImage(GlyphID glyphID, float size, bool fauxBold, + bool fauxItalic, Matrix* matrix) const override; + + Rect getBounds(GlyphID glyphID, float size, bool fauxBold, bool fauxItalic) const override; + + Point getVerticalOffset(GlyphID glyphID, float size, bool fauxBold, + bool fauxItalic) const override; + + private: + FTTypeface(FTFontData data, std::unique_ptr face); + + uint32_t _uniqueID = 0; + FTFontData data; + std::unique_ptr _face; + std::weak_ptr weakThis; + + friend class FTScalerContext; +}; +} // namespace tgfx diff --git a/src/vectors/freetype/FTUtil.cpp b/src/vectors/freetype/FTUtil.cpp new file mode 100644 index 00000000..9e974408 --- /dev/null +++ b/src/vectors/freetype/FTUtil.cpp @@ -0,0 +1,33 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "FTUtil.h" + +#include FT_MODULE_H + +namespace tgfx { +FTLibrary::FTLibrary() { + FT_Init_FreeType(&_library); +} + +FTLibrary::~FTLibrary() { + if (_library) { + FT_Done_Library(_library); + } +} +} // namespace tgfx diff --git a/src/vectors/freetype/FTUtil.h b/src/vectors/freetype/FTUtil.h new file mode 100644 index 00000000..fc1498e0 --- /dev/null +++ b/src/vectors/freetype/FTUtil.h @@ -0,0 +1,64 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "ft2build.h" +#include FT_FREETYPE_H + +namespace tgfx { +inline FT_F26Dot6 FloatToFDot6(float x) { + return static_cast(x * 64.f); +} + +inline float FDot6ToFloat(FT_F26Dot6 x) { + return static_cast(x) * 0.015625f; +} + +inline FT_F26Dot6 FDot6Floor(FT_F26Dot6 x) { + return x >> 6; +} + +inline FT_F26Dot6 FDot6Ceil(FT_F26Dot6 x) { + return ((x) + 63) >> 6; +} + +inline FT_F26Dot6 FDot6Round(FT_F26Dot6 x) { + return (((x) + 32) >> 6); +} + +class FTLibrary { + public: + FTLibrary(); + + FTLibrary(const FTLibrary&) = delete; + + FTLibrary& operator=(const FTLibrary&) = delete; + + ~FTLibrary(); + + FT_Library library() const { + return _library; + } + + private: + FT_Library _library = nullptr; +}; + +} // namespace tgfx diff --git a/src/vectors/freetype/SystemFont.cpp b/src/vectors/freetype/SystemFont.cpp new file mode 100644 index 00000000..3ab92a11 --- /dev/null +++ b/src/vectors/freetype/SystemFont.cpp @@ -0,0 +1,243 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "SystemFont.h" + +#ifdef _WIN32 +#include +#include +#include +#endif + +#pragma clang diagnostic ignored "-Wunused-parameter" + +namespace tgfx { +#ifdef _WIN32 +struct DWriteFontStyle { + DWRITE_FONT_WEIGHT weight; + DWRITE_FONT_STRETCH stretch; + DWRITE_FONT_STYLE fontStyle; +}; + +static constexpr std::pair FontWeightPair[] = { + {"thin", DWRITE_FONT_WEIGHT_THIN}, + {"hairline", DWRITE_FONT_WEIGHT_THIN}, + {"w1", DWRITE_FONT_WEIGHT_THIN}, + {"extralight", DWRITE_FONT_WEIGHT_EXTRA_LIGHT}, + {"ultralight", DWRITE_FONT_WEIGHT_ULTRA_LIGHT}, + {"w2", DWRITE_FONT_WEIGHT_ULTRA_LIGHT}, + {"light", DWRITE_FONT_WEIGHT_LIGHT}, + {"w3", DWRITE_FONT_WEIGHT_LIGHT}, + {"book", DWRITE_FONT_WEIGHT_SEMI_LIGHT}, + {"normal", DWRITE_FONT_WEIGHT_NORMAL}, + {"plain", DWRITE_FONT_WEIGHT_NORMAL}, + {"roman", DWRITE_FONT_WEIGHT_NORMAL}, + {"standard", DWRITE_FONT_WEIGHT_NORMAL}, + {"w4", DWRITE_FONT_WEIGHT_NORMAL}, + {"r", DWRITE_FONT_WEIGHT_NORMAL}, + {"regular", DWRITE_FONT_WEIGHT_REGULAR}, + {"medium", DWRITE_FONT_WEIGHT_MEDIUM}, + {"w5", DWRITE_FONT_WEIGHT_MEDIUM}, + {"semi", DWRITE_FONT_WEIGHT_SEMI_BOLD}, + {"semibold", DWRITE_FONT_WEIGHT_SEMI_BOLD}, + {"demibold", DWRITE_FONT_WEIGHT_DEMI_BOLD}, + {"w6", DWRITE_FONT_WEIGHT_DEMI_BOLD}, + {"bold", DWRITE_FONT_WEIGHT_BOLD}, + {"w7", DWRITE_FONT_WEIGHT_BOLD}, + {"extra", DWRITE_FONT_WEIGHT_EXTRA_BOLD}, + {"extrabold", DWRITE_FONT_WEIGHT_EXTRA_BOLD}, + {"ultra", DWRITE_FONT_WEIGHT_ULTRA_BOLD}, + {"ultrabold", DWRITE_FONT_WEIGHT_ULTRA_BOLD}, + {"w8", DWRITE_FONT_WEIGHT_ULTRA_BOLD}, + {"black", DWRITE_FONT_WEIGHT_BLACK}, + {"heavy", DWRITE_FONT_WEIGHT_HEAVY}, + {"w9", DWRITE_FONT_WEIGHT_HEAVY}, + {"extrablack", DWRITE_FONT_WEIGHT_EXTRA_BLACK}, + {"ultrablack", DWRITE_FONT_WEIGHT_ULTRA_BLACK}, + {"extraheavy", DWRITE_FONT_WEIGHT_ULTRA_BLACK}, + {"ultraheavy", DWRITE_FONT_WEIGHT_ULTRA_BLACK}, + {"w10", DWRITE_FONT_WEIGHT_ULTRA_BLACK}}; + +static constexpr std::pair FontWidthPair[] = { + {"ultracondensed", DWRITE_FONT_STRETCH_ULTRA_CONDENSED}, + {"extracondensed", DWRITE_FONT_STRETCH_EXTRA_CONDENSED}, + {"condensed", DWRITE_FONT_STRETCH_CONDENSED}, + {"narrow", DWRITE_FONT_STRETCH_CONDENSED}, + {"semicondensed", DWRITE_FONT_STRETCH_SEMI_CONDENSED}, + {"normal", DWRITE_FONT_STRETCH_NORMAL}, + {"semiexpanded", DWRITE_FONT_STRETCH_SEMI_EXPANDED}, + {"expanded", DWRITE_FONT_STRETCH_EXPANDED}, + {"extraexpanded", DWRITE_FONT_STRETCH_EXTRA_EXPANDED}, + {"ultraexpanded", DWRITE_FONT_STRETCH_ULTRA_EXPANDED}}; + +static constexpr std::pair FontSlantPair[] = { + {"upright", DWRITE_FONT_STYLE_NORMAL}, + {"italic", DWRITE_FONT_STYLE_ITALIC}, + {"oblique", DWRITE_FONT_STYLE_OBLIQUE}}; + +DWriteFontStyle ToDWriteFontStyle(const std::string& fontStyle) { + std::string key; + key.resize(fontStyle.length()); + std::transform(fontStyle.begin(), fontStyle.end(), key.begin(), ::tolower); + DWriteFontStyle dWriteFontStyle{}; + dWriteFontStyle.weight = DWRITE_FONT_WEIGHT_NORMAL; + dWriteFontStyle.stretch = DWRITE_FONT_STRETCH_NORMAL; + dWriteFontStyle.fontStyle = DWRITE_FONT_STYLE_NORMAL; + + std::vector keys; + std::string::size_type pos1, pos2; + pos2 = key.find(' '); + pos1 = 0; + while (std::string::npos != pos2) { + keys.push_back(key.substr(pos1, pos2 - pos1)); + pos1 = pos2 + 1; + pos2 = key.find(' ', pos1); + } + if (pos1 != key.length()) { + keys.push_back(key.substr(pos1)); + } + + for (auto& styleKey : keys) { + for (const auto& pair : FontWeightPair) { + if (pair.first == styleKey) { + dWriteFontStyle.weight = pair.second; + break; + } + } + for (const auto& pair : FontWidthPair) { + if (pair.first == styleKey) { + dWriteFontStyle.stretch = pair.second; + break; + } + } + for (const auto& pair : FontSlantPair) { + if (pair.first == styleKey) { + dWriteFontStyle.fontStyle = pair.second; + break; + } + } + } + return dWriteFontStyle; +} + +std::wstring ToWstring(const std::string& input) { + std::wstring result; + int len = MultiByteToWideChar(CP_ACP, 0, input.c_str(), input.size(), nullptr, 0); + WCHAR* buffer = new (std::nothrow) wchar_t[len + 1]; + if (buffer == nullptr) { + return result; + } + MultiByteToWideChar(CP_ACP, 0, input.c_str(), input.size(), buffer, len); + buffer[len] = '\0'; + result.append(buffer); + delete[] buffer; + return result; +} + +std::string ToString(const std::wstring& wstring) { + std::string result; + int len = + WideCharToMultiByte(CP_ACP, 0, wstring.c_str(), wstring.size(), nullptr, 0, nullptr, nullptr); + char* buffer = new (std::nothrow) char[len + 1]; + if (buffer == nullptr) { + return result; + } + WideCharToMultiByte(CP_ACP, 0, wstring.c_str(), wstring.size(), buffer, len, nullptr, nullptr); + buffer[len] = '\0'; + result.append(buffer); + delete[] buffer; + return result; +} + +template +void SafeRelease(T** ppT) { + if (*ppT) { + (*ppT)->Release(); + *ppT = nullptr; + } +} + +std::shared_ptr MakeFromFontName(const std::string& fontFamily, + const std::string& fontStyle) { + if (fontFamily.empty()) { + return nullptr; + } + std::shared_ptr typeface = nullptr; + IDWriteFactory3* writeFactory = nullptr; + HRESULT hResult = DWriteCreateFactory(DWRITE_FACTORY_TYPE_SHARED, __uuidof(IDWriteFactory), + reinterpret_cast(&writeFactory)); + IDWriteFontSet* fontSet = nullptr; + if (SUCCEEDED(hResult)) { + hResult = writeFactory->GetSystemFontSet(&fontSet); + } + if (SUCCEEDED(hResult)) { + DWriteFontStyle dWriteFontStyle = ToDWriteFontStyle(fontStyle); + hResult = + fontSet->GetMatchingFonts(ToWstring(fontFamily).c_str(), dWriteFontStyle.weight, + dWriteFontStyle.stretch, dWriteFontStyle.fontStyle, &fontSet); + } + UINT32 fontCount = 0; + if (SUCCEEDED(hResult)) { + fontCount = fontSet->GetFontCount(); + } + if (fontCount > 0) { + IDWriteFontFaceReference* fontFaceReference = nullptr; + if (SUCCEEDED(hResult)) { + hResult = fontSet->GetFontFaceReference(0, &fontFaceReference); + } + IDWriteFontFile* fontFile = nullptr; + if (SUCCEEDED(hResult)) { + hResult = fontFaceReference->GetFontFile(&fontFile); + } + IDWriteLocalFontFileLoader* fontFileLoader = nullptr; + if (SUCCEEDED(hResult)) { + hResult = fontFile->GetLoader(reinterpret_cast(&fontFileLoader)); + } + + const void* referenceKey; + UINT32 fontFileReferenceKeySize = 0; + if (SUCCEEDED(hResult)) { + hResult = fontFile->GetReferenceKey(&referenceKey, &fontFileReferenceKeySize); + } + UINT32 filePathLength = 100; + WCHAR* wFilePath = new (std::nothrow) wchar_t[filePathLength]; + if (SUCCEEDED(hResult)) { + hResult = fontFileLoader->GetFilePathFromKey(referenceKey, fontFileReferenceKeySize, + wFilePath, filePathLength); + } + typeface = Typeface::MakeFromPath(ToString(wFilePath), 0); + + SafeRelease(&fontFileLoader); + SafeRelease(&fontFile); + SafeRelease(&fontFaceReference); + delete[] wFilePath; + } + SafeRelease(&fontSet); + SafeRelease(&writeFactory); + return typeface; +} +#endif + +std::shared_ptr SystemFont::MakeFromName(const std::string& fontFamily, + const std::string& fontStyle) { +#ifdef _WIN32 + return MakeFromFontName(fontFamily, fontStyle); +#endif + return nullptr; +} +} // namespace tgfx diff --git a/src/vectors/freetype/SystemFont.h b/src/vectors/freetype/SystemFont.h new file mode 100644 index 00000000..c66b3c35 --- /dev/null +++ b/src/vectors/freetype/SystemFont.h @@ -0,0 +1,28 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once +#include "tgfx/core/Typeface.h" + +namespace tgfx { +class SystemFont { + public: + static std::shared_ptr MakeFromName(const std::string& fontFamily, + const std::string& fontStyle); +}; +} // namespace tgfx \ No newline at end of file diff --git a/src/vectors/web/WebMask.cpp b/src/vectors/web/WebMask.cpp new file mode 100644 index 00000000..ba004168 --- /dev/null +++ b/src/vectors/web/WebMask.cpp @@ -0,0 +1,150 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "WebMask.h" +#include "WebTypeface.h" +#include "core/SimpleTextBlob.h" +#include "platform/web/WebImageBuffer.h" +#include "platform/web/WebImageStream.h" +#include "utils/Log.h" + +using namespace emscripten; + +namespace tgfx { +std::shared_ptr Mask::Make(int width, int height, bool) { + auto canvas = val::module_property("tgfx").call("createCanvas2D", width, height); + if (!canvas.as()) { + return nullptr; + } + auto buffer = WebImageBuffer::MakeAdopted(canvas); + auto webMaskClass = val::module_property("WebMask"); + if (!webMaskClass.as()) { + return nullptr; + } + auto webMask = webMaskClass.call("create", canvas); + if (!webMask.as()) { + return nullptr; + } + auto stream = WebImageStream::MakeFrom(canvas, width, height, true); + if (stream == nullptr) { + return nullptr; + } + return std::make_shared(std::move(buffer), std::move(stream), webMask); +} + +WebMask::WebMask(std::shared_ptr buffer, std::shared_ptr stream, + emscripten::val webMask) + : buffer(std::move(buffer)), stream(std::move(stream)), webMask(webMask) { +} + +void WebMask::clear() { + aboutToFill(); + stream->markContentDirty(Rect::MakeWH(width(), height())); + webMask.call("clear"); +} + +static void Iterator(PathVerb verb, const Point points[4], void* info) { + auto path2D = reinterpret_cast(info); + switch (verb) { + case PathVerb::Move: + path2D->call("moveTo", points[0].x, points[0].y); + break; + case PathVerb::Line: + path2D->call("lineTo", points[1].x, points[1].y); + break; + case PathVerb::Quad: + path2D->call("quadraticCurveTo", points[1].x, points[1].y, points[2].x, points[2].y); + break; + case PathVerb::Cubic: + path2D->call("bezierCurveTo", points[1].x, points[1].y, points[2].x, points[2].y, + points[3].x, points[3].y); + break; + case PathVerb::Close: + path2D->call("closePath"); + break; + } +} + +void WebMask::onFillPath(const Path& path, const Matrix& matrix, bool) { + if (path.isEmpty()) { + return; + } + auto path2DClass = val::global("Path2D"); + if (!path2DClass.as()) { + return; + } + aboutToFill(); + auto finalPath = path; + finalPath.transform(matrix); + stream->markContentDirty(finalPath.getBounds()); + auto path2D = path2DClass.new_(); + finalPath.decompose(Iterator, &path2D); + webMask.call("fillPath", path2D, path.getFillType()); +} + +static void GetTextsAndPositions(const SimpleTextBlob* textBlob, std::vector* texts, + std::vector* points) { + auto& font = textBlob->getFont(); + auto& glyphIDs = textBlob->getGlyphIDs(); + auto& positions = textBlob->getPositions(); + auto typeface = std::static_pointer_cast(font.getTypeface()); + for (size_t i = 0; i < glyphIDs.size(); ++i) { + texts->push_back(typeface->getText(glyphIDs[i])); + points->push_back(positions[i]); + } +} + +bool WebMask::onFillText(const TextBlob* textBlob, const Stroke* stroke, const Matrix& matrix) { + aboutToFill(); + auto bounds = textBlob->getBounds(stroke); + matrix.mapRect(&bounds); + stream->markContentDirty(bounds); + std::vector texts = {}; + std::vector points = {}; + auto blob = static_cast(textBlob); + GetTextsAndPositions(blob, &texts, &points); + const auto& font = blob->getFont(); + const auto* typeFace = static_cast(font.getTypeface().get()); + auto webFont = val::object(); + webFont.set("name", typeFace->fontFamily()); + webFont.set("style", typeFace->fontStyle()); + webFont.set("size", font.getSize()); + webFont.set("bold", font.isFauxBold()); + webFont.set("italic", font.isFauxItalic()); + if (stroke) { + webMask.call("strokeText", webFont, *stroke, texts, points, matrix); + } else { + webMask.call("fillText", webFont, texts, points, matrix); + } + return true; +} + +void WebMask::aboutToFill() { + if (buffer.unique()) { + return; + } + auto canvas = val::module_property("tgfx").call("createCanvas2D", width(), height()); + if (!canvas.as()) { + ABORT("WebMask::aboutToFill() : Failed to create new Canvas2D!"); + return; + } + buffer = WebImageBuffer::MakeAdopted(canvas); + webMask.call("updateCanvas", canvas); + stream->source = canvas; +} +} // namespace tgfx diff --git a/src/vectors/web/WebMask.h b/src/vectors/web/WebMask.h new file mode 100644 index 00000000..71571f47 --- /dev/null +++ b/src/vectors/web/WebMask.h @@ -0,0 +1,65 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include "platform/web/WebImageStream.h" +#include "tgfx/core/Mask.h" + +namespace tgfx { +class WebMask : public Mask { + public: + WebMask(std::shared_ptr buffer, std::shared_ptr stream, + emscripten::val webMask); + + int width() const override { + return stream->width(); + } + + int height() const override { + return stream->height(); + } + + bool isHardwareBacked() const override { + return stream->width(); + } + + void clear() override; + + std::shared_ptr makeBuffer() const override { + return buffer; + } + + protected: + std::shared_ptr getImageStream() const override { + return stream; + } + + void onFillPath(const Path& path, const Matrix& matrix, bool) override; + + bool onFillText(const TextBlob* textBlob, const Stroke* stroke, const Matrix& matrix) override; + + private: + std::shared_ptr buffer = nullptr; + std::shared_ptr stream = nullptr; + emscripten::val webMask = emscripten::val::null(); + + void aboutToFill(); +}; +} // namespace tgfx diff --git a/src/vectors/web/WebTypeface.cpp b/src/vectors/web/WebTypeface.cpp new file mode 100644 index 00000000..40924720 --- /dev/null +++ b/src/vectors/web/WebTypeface.cpp @@ -0,0 +1,164 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#include "WebTypeface.h" +#include +#include "platform/web/WebImageBuffer.h" +#include "utils/UniqueID.h" + +using namespace emscripten; + +namespace tgfx { +std::shared_ptr Typeface::MakeFromName(const std::string& name, + const std::string& style) { + return WebTypeface::Make(name, style); +} + +std::shared_ptr Typeface::MakeFromPath(const std::string&, int) { + return nullptr; +} + +std::shared_ptr Typeface::MakeFromBytes(const void*, size_t, int) { + return nullptr; +} + +std::shared_ptr Typeface::MakeDefault() { + return WebTypeface::Make("Arial"); +} + +std::shared_ptr WebTypeface::Make(const std::string& name, const std::string& style) { + auto scalerContextClass = val::module_property("ScalerContext"); + if (!scalerContextClass.as()) { + return nullptr; + } + auto webTypeface = std::shared_ptr(new WebTypeface(name, style)); + webTypeface->scalerContextClass = scalerContextClass; + return webTypeface; +} + +WebTypeface::WebTypeface(std::string name, std::string style) + : _uniqueID(UniqueID::Next()), name(std::move(name)), style(std::move(style)) { + webFontFamily = this->name; + if (!this->style.empty()) { + webFontFamily += " " + this->style; + } +} + +WebTypeface::~WebTypeface() { + delete fontMetricsMap; +} + +bool WebTypeface::hasColor() const { + auto emojiName = name; + std::transform(emojiName.begin(), emojiName.end(), emojiName.begin(), ::tolower); + return emojiName.find("emoji") != std::string::npos; +} + +// The web side does not involve multithreading and does not require locking. +static std::unordered_map>& GlyphsMap() { + static auto& glyphs = *new std::unordered_map>; + return glyphs; +} + +GlyphID WebTypeface::getGlyphID(const std::string& text) const { + if (!hasColor() && scalerContextClass.call("isEmoji", text)) { + return 0; + } + auto& glyphs = GlyphsMap()[webFontFamily]; + auto iter = std::find(glyphs.begin(), glyphs.end(), text); + if (iter != glyphs.end()) { + return iter - glyphs.begin() + 1; + } + if (glyphs.size() >= UINT16_MAX) { + return 0; + } + glyphs.push_back(text); + return glyphs.size(); +} + +std::shared_ptr WebTypeface::getBytes() const { + return nullptr; +} + +std::string WebTypeface::getText(GlyphID glyphID) const { + if (glyphID == 0 || GlyphsMap().find(webFontFamily) == GlyphsMap().end()) { + return ""; + } + const auto& glyphs = GlyphsMap()[webFontFamily]; + if (glyphID > glyphs.size()) { + return ""; + } + return glyphs.at(glyphID - 1); +} + +Point WebTypeface::getVerticalOffset(GlyphID glyphID, float size, bool fauxBold, + bool fauxItalic) const { + if (glyphID == 0) { + return Point::Zero(); + } + auto metrics = getMetrics(size); + auto advance = getAdvance(glyphID, size, fauxBold, fauxItalic, true); + return {-advance * 0.5f, metrics.capHeight}; +} + +float WebTypeface::getAdvance(GlyphID glyphID, float size, bool fauxBold, bool fauxItalic, + bool) const { + if (glyphID == 0) { + return 0; + } + auto scalerContext = scalerContextClass.new_(name, style, size, fauxBold, fauxItalic); + return scalerContext.call("getTextAdvance", getText(glyphID)); +} + +FontMetrics WebTypeface::getMetrics(float size) const { + auto iter = fontMetricsMap->find(size); + if (iter != fontMetricsMap->end()) { + return iter->second; + } + auto scalerContext = scalerContextClass.new_(name, style, size); + auto metrics = scalerContext.call("generateFontMetrics"); + fontMetricsMap->insert(std::make_pair(size, metrics)); + return metrics; +} + +bool WebTypeface::getPath(GlyphID, float, bool, bool, Path*) const { + return false; +} + +Rect WebTypeface::getBounds(GlyphID glyphID, float size, bool fauxBold, bool fauxItalic) const { + if (glyphID == 0) { + return Rect::MakeEmpty(); + } + auto scalerContext = scalerContextClass.new_(name, style, size, fauxBold, fauxItalic); + return scalerContext.call("getTextBounds", getText(glyphID)); +} + +std::shared_ptr WebTypeface::getGlyphImage(GlyphID glyphID, float size, bool fauxBold, + bool fauxItalic, Matrix* matrix) const { + if (glyphID == 0) { + return nullptr; + } + auto scalerContext = scalerContextClass.new_(name, style, size, fauxBold, fauxItalic); + auto bounds = scalerContext.call("getTextBounds", getText(glyphID)); + auto buffer = scalerContext.call("generateImage", getText(glyphID), bounds); + if (matrix) { + matrix->setTranslate(bounds.left, bounds.top); + } + return WebImageBuffer::MakeAdopted(std::move(buffer)); +} +} // namespace tgfx diff --git a/src/vectors/web/WebTypeface.h b/src/vectors/web/WebTypeface.h new file mode 100644 index 00000000..161649db --- /dev/null +++ b/src/vectors/web/WebTypeface.h @@ -0,0 +1,93 @@ +///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// + +#pragma once + +#include +#include +#include +#include "tgfx/core/Font.h" +#include "tgfx/core/Typeface.h" + +namespace tgfx { +class WebTypeface : public Typeface { + public: + static std::shared_ptr Make(const std::string& name, const std::string& style = ""); + + ~WebTypeface() override; + + uint32_t uniqueID() const override { + return _uniqueID; + } + + std::string fontFamily() const override { + return name; + } + + std::string fontStyle() const override { + return style; + } + + int glyphsCount() const override { + return 0; + } + + int unitsPerEm() const override { + return 0; + } + + bool hasColor() const override; + + std::string getText(GlyphID glyphID) const; + + GlyphID getGlyphID(const std::string&) const override; + + std::shared_ptr getBytes() const override; + + std::shared_ptr copyTableData(FontTableTag) const override { + return nullptr; + } + + protected: + Point getVerticalOffset(GlyphID glyphID, float size, bool fauxBold, + bool fauxItalic) const override; + + float getAdvance(GlyphID glyphID, float size, bool fauxBold, bool fauxItalic, + bool verticalText) const override; + + FontMetrics getMetrics(float size) const override; + + bool getPath(GlyphID glyphID, float size, bool fauxBold, bool fauxItalic, + Path* path) const override; + Rect getBounds(GlyphID glyphID, float size, bool fauxBold, bool fauxItalic) const override; + + std::shared_ptr getGlyphImage(GlyphID glyphID, float size, bool fauxBold, + bool fauxItalic, Matrix* matrix) const override; + + private: + explicit WebTypeface(std::string name, std::string style); + + uint32_t _uniqueID; + std::unordered_map* fontMetricsMap = + new std::unordered_map; + emscripten::val scalerContextClass = emscripten::val::null(); + std::string name; + std::string style; + std::string webFontFamily; +}; +} // namespace tgfx diff --git a/sync_deps.sh b/sync_deps.sh new file mode 100755 index 00000000..04cd2491 --- /dev/null +++ b/sync_deps.sh @@ -0,0 +1,33 @@ +#!/bin/bash -e +cd $(dirname $0) + +if [[ `uname` == 'Darwin' ]]; then + MAC_REQUIRED_TOOLS="node cmake ninja yasm git-lfs emcc" + for TOOL in ${MAC_REQUIRED_TOOLS[@]}; do + if [ ! $(which $TOOL) ]; then + if [ ! $(which brew) ]; then + echo "Homebrew not found. Trying to install..." + /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)" || + exit 1 + fi + if [ $TOOL == 'emcc' ]; then + echo "emscripten not found. Trying to install..." + sh ./web/script/install-emscripten.sh || exit 1 + else + echo "$TOOL not found. Trying to install..." + brew install $TOOL || exit 1 + fi + fi + done +fi + +NODE_REQUIRED_TOOLS="depsync" + +for TOOL in ${NODE_REQUIRED_TOOLS[@]}; do + if [ ! $(which $TOOL) ]; then + echo "$TOOL not found. Trying to install..." + npm install -g $TOOL > /dev/null + fi +done + +depsync \ No newline at end of file diff --git a/update_baseline.sh b/update_baseline.sh new file mode 100755 index 00000000..d7706cd3 --- /dev/null +++ b/update_baseline.sh @@ -0,0 +1,58 @@ +#!/bin/sh +{ + CACHE_VERSION_FILE=./test/baseline/.cache/version.json + if [ -f "$CACHE_VERSION_FILE" ]; then + HAS_DIFF=$(git diff --name-only origin/main:test/baseline/version.json $CACHE_VERSION_FILE) + if [[ ${HAS_DIFF} == "" ]]; then + exit 0 + fi + fi + echo "~~~~~~~~~~~~~~~~~~~Update Baseline Start~~~~~~~~~~~~~~~~~~~~~" + CURRENT_BRANCH=$(git rev-parse --abbrev-ref HEAD) + STASH_LIST_BEFORE=$(git stash list) + git stash push --quiet + STASH_LIST_AFTER=$(git stash list) + git switch main --quiet + + if [[ $1 == "1" ]]; then + BUILD_DIR=build + else + BUILD_DIR=cmake-build-debug + fi + + if [ ! -d "./${BUILD_DIR}" ]; then + mkdir ${BUILD_DIR} + fi + cd ${BUILD_DIR} + + if [ -f "./CMakeCache.txt" ]; then + TEXT=$(cat ./CMakeCache.txt) + TEXT=${TEXT#*CMAKE_COMMAND:INTERNAL=} + for line in ${TEXT}; do + CMAKE_COMMAND=$line + break + done + fi + if [ ! -f "$CMAKE_COMMAND" ]; then + CMAKE_COMMAND="cmake" + fi + echo $CMAKE_COMMAND + + if [[ $1 == "1" ]]; then + $CMAKE_COMMAND -DCMAKE_CXX_FLAGS="-fprofile-arcs -ftest-coverage -g -O0" -DTGFX_USE_SWIFTSHADER=ON -DCMAKE_BUILD_TYPE=Debug ../ + else + $CMAKE_COMMAND -DCMAKE_BUILD_TYPE=Debug ../ + fi + + $CMAKE_COMMAND --build . --target TGFXBaseline -- -j 12 + ./TGFXBaseline + cd .. + + git switch $CURRENT_BRANCH --quiet + if [[ $STASH_LIST_BEFORE != "$STASH_LIST_AFTER" ]]; then + git stash pop --index --quiet + fi + + echo "~~~~~~~~~~~~~~~~~~~Update Baseline END~~~~~~~~~~~~~~~~~~~~~" + exit +} diff --git a/vendor.json b/vendor.json new file mode 100644 index 00000000..246e9518 --- /dev/null +++ b/vendor.json @@ -0,0 +1,161 @@ +{ + "source": "third_party", + "out": "third_party/out", + "vendors": [ + { + "name": "skcms", + "cmake": { + "targets": [ + "skcms" + ] + } + }, + { + "name": "pathkit", + "cmake": { + "targets": [ + "pathkit" + ] + } + }, + { + "name": "zlib", + "cmake": { + "targets": [ + "zlibstatic" + ], + "includes": [ + "${SOURCE_DIR}/zlib.h", + "${BUILD_DIR}/zconf.h" + ] + } + }, + { + "name": "libwebp", + "cmake": { + "targets": [ + "webp", + "webpdemux" + ] + } + }, + { + "name": "libwebp", + "cmake": { + "targets": [ + "webp", + "webpdemux" + ], + "arguments": [ + "-DWEBP_BUILD_WEBP_JS=ON" + ], + "platforms": [ + "web" + ] + } + }, + { + "name": "libjpeg-turbo", + "cmake": { + "targets": [ + "turbojpeg-static" + ], + "includes": [ + "${SOURCE_DIR}/jmorecfg.h", + "${SOURCE_DIR}/jpeglib.h", + "${SOURCE_DIR}/jerror.h", + "${BUILD_DIR}/jconfig.h" + ] + } + }, + { + "name": "libpng", + "deps": { + "ZLIB": "zlib" + }, + "cmake": { + "targets": [ + "png_static" + ], + "arguments": [ + "-DPNG_BUILD_ZLIB=ON" + ], + "includes": [ + "${SOURCE_DIR}/png.h", + "${SOURCE_DIR}/pngconf.h", + "${BUILD_DIR}/pnglibconf.h" + ], + "platforms": [ + "android", + "win", + "linux" + ] + } + }, + { + "name": "libpng", + "deps": { + "ZLIB": "zlib" + }, + "cmake": { + "targets": [ + "png_static" + ], + "arguments": [ + "-DPNG_BUILD_ZLIB=ON", + "-DPNG_ARM_NEON=on" + ], + "includes": [ + "${SOURCE_DIR}/png.h", + "${SOURCE_DIR}/pngconf.h", + "${BUILD_DIR}/pnglibconf.h" + ], + "platforms": [ + "ios", + "mac" + ] + } + }, + { + "name": "freetype", + "deps": { + "ZLIB": "zlib", + "PNG": "libpng" + }, + "cmake": { + "targets": [ + "freetype" + ], + "arguments": [ + "-DCMAKE_DISABLE_FIND_PACKAGE_BZip2=TRUE", + "-DCMAKE_DISABLE_FIND_PACKAGE_HarfBuzz=TRUE" + ] + } + }, + { + "name": "swiftshader", + "cmake": { + "targets": [ + "libEGL", + "libGLESv2" + ], + "platforms": [ + "linux" + ] + } + }, + { + "name": "googletest", + "cmake": { + "targets": [ + "gtest" + ], + "platforms": [ + "mac", + "win", + "linux" + ] + } + } + ] +} \ No newline at end of file diff --git a/vendor/angle/include/CL/.clang-format b/vendor/angle/include/CL/.clang-format new file mode 100644 index 00000000..06147100 --- /dev/null +++ b/vendor/angle/include/CL/.clang-format @@ -0,0 +1,3 @@ + + +DisableFormat: true diff --git a/vendor/angle/include/CL/cl.h b/vendor/angle/include/CL/cl.h new file mode 100644 index 00000000..d4d7ae08 --- /dev/null +++ b/vendor/angle/include/CL/cl.h @@ -0,0 +1,1929 @@ +/******************************************************************************* + * Copyright (c) 2008-2020 The Khronos Group Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ + +#ifndef __OPENCL_CL_H +#define __OPENCL_CL_H + +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/******************************************************************************/ + +typedef struct _cl_platform_id * cl_platform_id; +typedef struct _cl_device_id * cl_device_id; +typedef struct _cl_context * cl_context; +typedef struct _cl_command_queue * cl_command_queue; +typedef struct _cl_mem * cl_mem; +typedef struct _cl_program * cl_program; +typedef struct _cl_kernel * cl_kernel; +typedef struct _cl_event * cl_event; +typedef struct _cl_sampler * cl_sampler; + +typedef cl_uint cl_bool; /* WARNING! Unlike cl_ types in cl_platform.h, cl_bool is not guaranteed to be the same size as the bool in kernels. */ +typedef cl_ulong cl_bitfield; +typedef cl_ulong cl_properties; +typedef cl_bitfield cl_device_type; +typedef cl_uint cl_platform_info; +typedef cl_uint cl_device_info; +typedef cl_bitfield cl_device_fp_config; +typedef cl_uint cl_device_mem_cache_type; +typedef cl_uint cl_device_local_mem_type; +typedef cl_bitfield cl_device_exec_capabilities; +#ifdef CL_VERSION_2_0 +typedef cl_bitfield cl_device_svm_capabilities; +#endif +typedef cl_bitfield cl_command_queue_properties; +#ifdef CL_VERSION_1_2 +typedef intptr_t cl_device_partition_property; +typedef cl_bitfield cl_device_affinity_domain; +#endif + +typedef intptr_t cl_context_properties; +typedef cl_uint cl_context_info; +#ifdef CL_VERSION_2_0 +typedef cl_properties cl_queue_properties; +#endif +typedef cl_uint cl_command_queue_info; +typedef cl_uint cl_channel_order; +typedef cl_uint cl_channel_type; +typedef cl_bitfield cl_mem_flags; +#ifdef CL_VERSION_2_0 +typedef cl_bitfield cl_svm_mem_flags; +#endif +typedef cl_uint cl_mem_object_type; +typedef cl_uint cl_mem_info; +#ifdef CL_VERSION_1_2 +typedef cl_bitfield cl_mem_migration_flags; +#endif +typedef cl_uint cl_image_info; +#ifdef CL_VERSION_1_1 +typedef cl_uint cl_buffer_create_type; +#endif +typedef cl_uint cl_addressing_mode; +typedef cl_uint cl_filter_mode; +typedef cl_uint cl_sampler_info; +typedef cl_bitfield cl_map_flags; +#ifdef CL_VERSION_2_0 +typedef intptr_t cl_pipe_properties; +typedef cl_uint cl_pipe_info; +#endif +typedef cl_uint cl_program_info; +typedef cl_uint cl_program_build_info; +#ifdef CL_VERSION_1_2 +typedef cl_uint cl_program_binary_type; +#endif +typedef cl_int cl_build_status; +typedef cl_uint cl_kernel_info; +#ifdef CL_VERSION_1_2 +typedef cl_uint cl_kernel_arg_info; +typedef cl_uint cl_kernel_arg_address_qualifier; +typedef cl_uint cl_kernel_arg_access_qualifier; +typedef cl_bitfield cl_kernel_arg_type_qualifier; +#endif +typedef cl_uint cl_kernel_work_group_info; +#ifdef CL_VERSION_2_1 +typedef cl_uint cl_kernel_sub_group_info; +#endif +typedef cl_uint cl_event_info; +typedef cl_uint cl_command_type; +typedef cl_uint cl_profiling_info; +#ifdef CL_VERSION_2_0 +typedef cl_properties cl_sampler_properties; +typedef cl_uint cl_kernel_exec_info; +#endif +#ifdef CL_VERSION_3_0 +typedef cl_bitfield cl_device_atomic_capabilities; +typedef cl_bitfield cl_device_device_enqueue_capabilities; +typedef cl_uint cl_khronos_vendor_id; +typedef cl_properties cl_mem_properties; +typedef cl_uint cl_version; +#endif + +typedef struct _cl_image_format { + cl_channel_order image_channel_order; + cl_channel_type image_channel_data_type; +} cl_image_format; + +#ifdef CL_VERSION_1_2 + +typedef struct _cl_image_desc { + cl_mem_object_type image_type; + size_t image_width; + size_t image_height; + size_t image_depth; + size_t image_array_size; + size_t image_row_pitch; + size_t image_slice_pitch; + cl_uint num_mip_levels; + cl_uint num_samples; +#ifdef CL_VERSION_2_0 +#if defined(__GNUC__) + __extension__ /* Prevents warnings about anonymous union in -pedantic builds */ +#endif +#if defined(_MSC_VER) && !defined(__STDC__) +#pragma warning( push ) +#pragma warning( disable : 4201 ) /* Prevents warning about nameless struct/union in /W4 builds */ +#endif +#if defined(_MSC_VER) && defined(__STDC__) + /* Anonymous unions are not supported in /Za builds */ +#else + union { +#endif +#endif + cl_mem buffer; +#ifdef CL_VERSION_2_0 +#if defined(_MSC_VER) && defined(__STDC__) + /* Anonymous unions are not supported in /Za builds */ +#else + cl_mem mem_object; + }; +#endif +#if defined(_MSC_VER) && !defined(__STDC__) +#pragma warning( pop ) +#endif +#endif +} cl_image_desc; + +#endif + +#ifdef CL_VERSION_1_1 + +typedef struct _cl_buffer_region { + size_t origin; + size_t size; +} cl_buffer_region; + +#endif + +#ifdef CL_VERSION_3_0 + +#define CL_NAME_VERSION_MAX_NAME_SIZE 64 + +typedef struct _cl_name_version { + cl_version version; + char name[CL_NAME_VERSION_MAX_NAME_SIZE]; +} cl_name_version; + +#endif + +/******************************************************************************/ + +/* Error Codes */ +#define CL_SUCCESS 0 +#define CL_DEVICE_NOT_FOUND -1 +#define CL_DEVICE_NOT_AVAILABLE -2 +#define CL_COMPILER_NOT_AVAILABLE -3 +#define CL_MEM_OBJECT_ALLOCATION_FAILURE -4 +#define CL_OUT_OF_RESOURCES -5 +#define CL_OUT_OF_HOST_MEMORY -6 +#define CL_PROFILING_INFO_NOT_AVAILABLE -7 +#define CL_MEM_COPY_OVERLAP -8 +#define CL_IMAGE_FORMAT_MISMATCH -9 +#define CL_IMAGE_FORMAT_NOT_SUPPORTED -10 +#define CL_BUILD_PROGRAM_FAILURE -11 +#define CL_MAP_FAILURE -12 +#ifdef CL_VERSION_1_1 +#define CL_MISALIGNED_SUB_BUFFER_OFFSET -13 +#define CL_EXEC_STATUS_ERROR_FOR_EVENTS_IN_WAIT_LIST -14 +#endif +#ifdef CL_VERSION_1_2 +#define CL_COMPILE_PROGRAM_FAILURE -15 +#define CL_LINKER_NOT_AVAILABLE -16 +#define CL_LINK_PROGRAM_FAILURE -17 +#define CL_DEVICE_PARTITION_FAILED -18 +#define CL_KERNEL_ARG_INFO_NOT_AVAILABLE -19 +#endif + +#define CL_INVALID_VALUE -30 +#define CL_INVALID_DEVICE_TYPE -31 +#define CL_INVALID_PLATFORM -32 +#define CL_INVALID_DEVICE -33 +#define CL_INVALID_CONTEXT -34 +#define CL_INVALID_QUEUE_PROPERTIES -35 +#define CL_INVALID_COMMAND_QUEUE -36 +#define CL_INVALID_HOST_PTR -37 +#define CL_INVALID_MEM_OBJECT -38 +#define CL_INVALID_IMAGE_FORMAT_DESCRIPTOR -39 +#define CL_INVALID_IMAGE_SIZE -40 +#define CL_INVALID_SAMPLER -41 +#define CL_INVALID_BINARY -42 +#define CL_INVALID_BUILD_OPTIONS -43 +#define CL_INVALID_PROGRAM -44 +#define CL_INVALID_PROGRAM_EXECUTABLE -45 +#define CL_INVALID_KERNEL_NAME -46 +#define CL_INVALID_KERNEL_DEFINITION -47 +#define CL_INVALID_KERNEL -48 +#define CL_INVALID_ARG_INDEX -49 +#define CL_INVALID_ARG_VALUE -50 +#define CL_INVALID_ARG_SIZE -51 +#define CL_INVALID_KERNEL_ARGS -52 +#define CL_INVALID_WORK_DIMENSION -53 +#define CL_INVALID_WORK_GROUP_SIZE -54 +#define CL_INVALID_WORK_ITEM_SIZE -55 +#define CL_INVALID_GLOBAL_OFFSET -56 +#define CL_INVALID_EVENT_WAIT_LIST -57 +#define CL_INVALID_EVENT -58 +#define CL_INVALID_OPERATION -59 +#define CL_INVALID_GL_OBJECT -60 +#define CL_INVALID_BUFFER_SIZE -61 +#define CL_INVALID_MIP_LEVEL -62 +#define CL_INVALID_GLOBAL_WORK_SIZE -63 +#ifdef CL_VERSION_1_1 +#define CL_INVALID_PROPERTY -64 +#endif +#ifdef CL_VERSION_1_2 +#define CL_INVALID_IMAGE_DESCRIPTOR -65 +#define CL_INVALID_COMPILER_OPTIONS -66 +#define CL_INVALID_LINKER_OPTIONS -67 +#define CL_INVALID_DEVICE_PARTITION_COUNT -68 +#endif +#ifdef CL_VERSION_2_0 +#define CL_INVALID_PIPE_SIZE -69 +#define CL_INVALID_DEVICE_QUEUE -70 +#endif +#ifdef CL_VERSION_2_2 +#define CL_INVALID_SPEC_ID -71 +#define CL_MAX_SIZE_RESTRICTION_EXCEEDED -72 +#endif + + +/* cl_bool */ +#define CL_FALSE 0 +#define CL_TRUE 1 +#ifdef CL_VERSION_1_2 +#define CL_BLOCKING CL_TRUE +#define CL_NON_BLOCKING CL_FALSE +#endif + +/* cl_platform_info */ +#define CL_PLATFORM_PROFILE 0x0900 +#define CL_PLATFORM_VERSION 0x0901 +#define CL_PLATFORM_NAME 0x0902 +#define CL_PLATFORM_VENDOR 0x0903 +#define CL_PLATFORM_EXTENSIONS 0x0904 +#ifdef CL_VERSION_2_1 +#define CL_PLATFORM_HOST_TIMER_RESOLUTION 0x0905 +#endif +#ifdef CL_VERSION_3_0 +#define CL_PLATFORM_NUMERIC_VERSION 0x0906 +#define CL_PLATFORM_EXTENSIONS_WITH_VERSION 0x0907 +#endif + +/* cl_device_type - bitfield */ +#define CL_DEVICE_TYPE_DEFAULT (1 << 0) +#define CL_DEVICE_TYPE_CPU (1 << 1) +#define CL_DEVICE_TYPE_GPU (1 << 2) +#define CL_DEVICE_TYPE_ACCELERATOR (1 << 3) +#ifdef CL_VERSION_1_2 +#define CL_DEVICE_TYPE_CUSTOM (1 << 4) +#endif +#define CL_DEVICE_TYPE_ALL 0xFFFFFFFF + +/* cl_device_info */ +#define CL_DEVICE_TYPE 0x1000 +#define CL_DEVICE_VENDOR_ID 0x1001 +#define CL_DEVICE_MAX_COMPUTE_UNITS 0x1002 +#define CL_DEVICE_MAX_WORK_ITEM_DIMENSIONS 0x1003 +#define CL_DEVICE_MAX_WORK_GROUP_SIZE 0x1004 +#define CL_DEVICE_MAX_WORK_ITEM_SIZES 0x1005 +#define CL_DEVICE_PREFERRED_VECTOR_WIDTH_CHAR 0x1006 +#define CL_DEVICE_PREFERRED_VECTOR_WIDTH_SHORT 0x1007 +#define CL_DEVICE_PREFERRED_VECTOR_WIDTH_INT 0x1008 +#define CL_DEVICE_PREFERRED_VECTOR_WIDTH_LONG 0x1009 +#define CL_DEVICE_PREFERRED_VECTOR_WIDTH_FLOAT 0x100A +#define CL_DEVICE_PREFERRED_VECTOR_WIDTH_DOUBLE 0x100B +#define CL_DEVICE_MAX_CLOCK_FREQUENCY 0x100C +#define CL_DEVICE_ADDRESS_BITS 0x100D +#define CL_DEVICE_MAX_READ_IMAGE_ARGS 0x100E +#define CL_DEVICE_MAX_WRITE_IMAGE_ARGS 0x100F +#define CL_DEVICE_MAX_MEM_ALLOC_SIZE 0x1010 +#define CL_DEVICE_IMAGE2D_MAX_WIDTH 0x1011 +#define CL_DEVICE_IMAGE2D_MAX_HEIGHT 0x1012 +#define CL_DEVICE_IMAGE3D_MAX_WIDTH 0x1013 +#define CL_DEVICE_IMAGE3D_MAX_HEIGHT 0x1014 +#define CL_DEVICE_IMAGE3D_MAX_DEPTH 0x1015 +#define CL_DEVICE_IMAGE_SUPPORT 0x1016 +#define CL_DEVICE_MAX_PARAMETER_SIZE 0x1017 +#define CL_DEVICE_MAX_SAMPLERS 0x1018 +#define CL_DEVICE_MEM_BASE_ADDR_ALIGN 0x1019 +#define CL_DEVICE_MIN_DATA_TYPE_ALIGN_SIZE 0x101A +#define CL_DEVICE_SINGLE_FP_CONFIG 0x101B +#define CL_DEVICE_GLOBAL_MEM_CACHE_TYPE 0x101C +#define CL_DEVICE_GLOBAL_MEM_CACHELINE_SIZE 0x101D +#define CL_DEVICE_GLOBAL_MEM_CACHE_SIZE 0x101E +#define CL_DEVICE_GLOBAL_MEM_SIZE 0x101F +#define CL_DEVICE_MAX_CONSTANT_BUFFER_SIZE 0x1020 +#define CL_DEVICE_MAX_CONSTANT_ARGS 0x1021 +#define CL_DEVICE_LOCAL_MEM_TYPE 0x1022 +#define CL_DEVICE_LOCAL_MEM_SIZE 0x1023 +#define CL_DEVICE_ERROR_CORRECTION_SUPPORT 0x1024 +#define CL_DEVICE_PROFILING_TIMER_RESOLUTION 0x1025 +#define CL_DEVICE_ENDIAN_LITTLE 0x1026 +#define CL_DEVICE_AVAILABLE 0x1027 +#define CL_DEVICE_COMPILER_AVAILABLE 0x1028 +#define CL_DEVICE_EXECUTION_CAPABILITIES 0x1029 +#define CL_DEVICE_QUEUE_PROPERTIES 0x102A /* deprecated */ +#ifdef CL_VERSION_2_0 +#define CL_DEVICE_QUEUE_ON_HOST_PROPERTIES 0x102A +#endif +#define CL_DEVICE_NAME 0x102B +#define CL_DEVICE_VENDOR 0x102C +#define CL_DRIVER_VERSION 0x102D +#define CL_DEVICE_PROFILE 0x102E +#define CL_DEVICE_VERSION 0x102F +#define CL_DEVICE_EXTENSIONS 0x1030 +#define CL_DEVICE_PLATFORM 0x1031 +#ifdef CL_VERSION_1_2 +#define CL_DEVICE_DOUBLE_FP_CONFIG 0x1032 +#endif +/* 0x1033 reserved for CL_DEVICE_HALF_FP_CONFIG which is already defined in "cl_ext.h" */ +#ifdef CL_VERSION_1_1 +#define CL_DEVICE_PREFERRED_VECTOR_WIDTH_HALF 0x1034 +#define CL_DEVICE_HOST_UNIFIED_MEMORY 0x1035 /* deprecated */ +#define CL_DEVICE_NATIVE_VECTOR_WIDTH_CHAR 0x1036 +#define CL_DEVICE_NATIVE_VECTOR_WIDTH_SHORT 0x1037 +#define CL_DEVICE_NATIVE_VECTOR_WIDTH_INT 0x1038 +#define CL_DEVICE_NATIVE_VECTOR_WIDTH_LONG 0x1039 +#define CL_DEVICE_NATIVE_VECTOR_WIDTH_FLOAT 0x103A +#define CL_DEVICE_NATIVE_VECTOR_WIDTH_DOUBLE 0x103B +#define CL_DEVICE_NATIVE_VECTOR_WIDTH_HALF 0x103C +#define CL_DEVICE_OPENCL_C_VERSION 0x103D +#endif +#ifdef CL_VERSION_1_2 +#define CL_DEVICE_LINKER_AVAILABLE 0x103E +#define CL_DEVICE_BUILT_IN_KERNELS 0x103F +#define CL_DEVICE_IMAGE_MAX_BUFFER_SIZE 0x1040 +#define CL_DEVICE_IMAGE_MAX_ARRAY_SIZE 0x1041 +#define CL_DEVICE_PARENT_DEVICE 0x1042 +#define CL_DEVICE_PARTITION_MAX_SUB_DEVICES 0x1043 +#define CL_DEVICE_PARTITION_PROPERTIES 0x1044 +#define CL_DEVICE_PARTITION_AFFINITY_DOMAIN 0x1045 +#define CL_DEVICE_PARTITION_TYPE 0x1046 +#define CL_DEVICE_REFERENCE_COUNT 0x1047 +#define CL_DEVICE_PREFERRED_INTEROP_USER_SYNC 0x1048 +#define CL_DEVICE_PRINTF_BUFFER_SIZE 0x1049 +#endif +#ifdef CL_VERSION_2_0 +#define CL_DEVICE_IMAGE_PITCH_ALIGNMENT 0x104A +#define CL_DEVICE_IMAGE_BASE_ADDRESS_ALIGNMENT 0x104B +#define CL_DEVICE_MAX_READ_WRITE_IMAGE_ARGS 0x104C +#define CL_DEVICE_MAX_GLOBAL_VARIABLE_SIZE 0x104D +#define CL_DEVICE_QUEUE_ON_DEVICE_PROPERTIES 0x104E +#define CL_DEVICE_QUEUE_ON_DEVICE_PREFERRED_SIZE 0x104F +#define CL_DEVICE_QUEUE_ON_DEVICE_MAX_SIZE 0x1050 +#define CL_DEVICE_MAX_ON_DEVICE_QUEUES 0x1051 +#define CL_DEVICE_MAX_ON_DEVICE_EVENTS 0x1052 +#define CL_DEVICE_SVM_CAPABILITIES 0x1053 +#define CL_DEVICE_GLOBAL_VARIABLE_PREFERRED_TOTAL_SIZE 0x1054 +#define CL_DEVICE_MAX_PIPE_ARGS 0x1055 +#define CL_DEVICE_PIPE_MAX_ACTIVE_RESERVATIONS 0x1056 +#define CL_DEVICE_PIPE_MAX_PACKET_SIZE 0x1057 +#define CL_DEVICE_PREFERRED_PLATFORM_ATOMIC_ALIGNMENT 0x1058 +#define CL_DEVICE_PREFERRED_GLOBAL_ATOMIC_ALIGNMENT 0x1059 +#define CL_DEVICE_PREFERRED_LOCAL_ATOMIC_ALIGNMENT 0x105A +#endif +#ifdef CL_VERSION_2_1 +#define CL_DEVICE_IL_VERSION 0x105B +#define CL_DEVICE_MAX_NUM_SUB_GROUPS 0x105C +#define CL_DEVICE_SUB_GROUP_INDEPENDENT_FORWARD_PROGRESS 0x105D +#endif +#ifdef CL_VERSION_3_0 +#define CL_DEVICE_NUMERIC_VERSION 0x105E +#define CL_DEVICE_EXTENSIONS_WITH_VERSION 0x1060 +#define CL_DEVICE_ILS_WITH_VERSION 0x1061 +#define CL_DEVICE_BUILT_IN_KERNELS_WITH_VERSION 0x1062 +#define CL_DEVICE_ATOMIC_MEMORY_CAPABILITIES 0x1063 +#define CL_DEVICE_ATOMIC_FENCE_CAPABILITIES 0x1064 +#define CL_DEVICE_NON_UNIFORM_WORK_GROUP_SUPPORT 0x1065 +#define CL_DEVICE_OPENCL_C_ALL_VERSIONS 0x1066 +#define CL_DEVICE_PREFERRED_WORK_GROUP_SIZE_MULTIPLE 0x1067 +#define CL_DEVICE_WORK_GROUP_COLLECTIVE_FUNCTIONS_SUPPORT 0x1068 +#define CL_DEVICE_GENERIC_ADDRESS_SPACE_SUPPORT 0x1069 +/* 0x106A to 0x106E - Reserved for upcoming KHR extension */ +#define CL_DEVICE_OPENCL_C_FEATURES 0x106F +#define CL_DEVICE_DEVICE_ENQUEUE_CAPABILITIES 0x1070 +#define CL_DEVICE_PIPE_SUPPORT 0x1071 +#define CL_DEVICE_LATEST_CONFORMANCE_VERSION_PASSED 0x1072 +#endif + +/* cl_device_fp_config - bitfield */ +#define CL_FP_DENORM (1 << 0) +#define CL_FP_INF_NAN (1 << 1) +#define CL_FP_ROUND_TO_NEAREST (1 << 2) +#define CL_FP_ROUND_TO_ZERO (1 << 3) +#define CL_FP_ROUND_TO_INF (1 << 4) +#define CL_FP_FMA (1 << 5) +#ifdef CL_VERSION_1_1 +#define CL_FP_SOFT_FLOAT (1 << 6) +#endif +#ifdef CL_VERSION_1_2 +#define CL_FP_CORRECTLY_ROUNDED_DIVIDE_SQRT (1 << 7) +#endif + +/* cl_device_mem_cache_type */ +#define CL_NONE 0x0 +#define CL_READ_ONLY_CACHE 0x1 +#define CL_READ_WRITE_CACHE 0x2 + +/* cl_device_local_mem_type */ +#define CL_LOCAL 0x1 +#define CL_GLOBAL 0x2 + +/* cl_device_exec_capabilities - bitfield */ +#define CL_EXEC_KERNEL (1 << 0) +#define CL_EXEC_NATIVE_KERNEL (1 << 1) + +/* cl_command_queue_properties - bitfield */ +#define CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE (1 << 0) +#define CL_QUEUE_PROFILING_ENABLE (1 << 1) +#ifdef CL_VERSION_2_0 +#define CL_QUEUE_ON_DEVICE (1 << 2) +#define CL_QUEUE_ON_DEVICE_DEFAULT (1 << 3) +#endif + +/* cl_context_info */ +#define CL_CONTEXT_REFERENCE_COUNT 0x1080 +#define CL_CONTEXT_DEVICES 0x1081 +#define CL_CONTEXT_PROPERTIES 0x1082 +#ifdef CL_VERSION_1_1 +#define CL_CONTEXT_NUM_DEVICES 0x1083 +#endif + +/* cl_context_properties */ +#define CL_CONTEXT_PLATFORM 0x1084 +#ifdef CL_VERSION_1_2 +#define CL_CONTEXT_INTEROP_USER_SYNC 0x1085 +#endif + +#ifdef CL_VERSION_1_2 + +/* cl_device_partition_property */ +#define CL_DEVICE_PARTITION_EQUALLY 0x1086 +#define CL_DEVICE_PARTITION_BY_COUNTS 0x1087 +#define CL_DEVICE_PARTITION_BY_COUNTS_LIST_END 0x0 +#define CL_DEVICE_PARTITION_BY_AFFINITY_DOMAIN 0x1088 + +#endif + +#ifdef CL_VERSION_1_2 + +/* cl_device_affinity_domain */ +#define CL_DEVICE_AFFINITY_DOMAIN_NUMA (1 << 0) +#define CL_DEVICE_AFFINITY_DOMAIN_L4_CACHE (1 << 1) +#define CL_DEVICE_AFFINITY_DOMAIN_L3_CACHE (1 << 2) +#define CL_DEVICE_AFFINITY_DOMAIN_L2_CACHE (1 << 3) +#define CL_DEVICE_AFFINITY_DOMAIN_L1_CACHE (1 << 4) +#define CL_DEVICE_AFFINITY_DOMAIN_NEXT_PARTITIONABLE (1 << 5) + +#endif + +#ifdef CL_VERSION_2_0 + +/* cl_device_svm_capabilities */ +#define CL_DEVICE_SVM_COARSE_GRAIN_BUFFER (1 << 0) +#define CL_DEVICE_SVM_FINE_GRAIN_BUFFER (1 << 1) +#define CL_DEVICE_SVM_FINE_GRAIN_SYSTEM (1 << 2) +#define CL_DEVICE_SVM_ATOMICS (1 << 3) + +#endif + +/* cl_command_queue_info */ +#define CL_QUEUE_CONTEXT 0x1090 +#define CL_QUEUE_DEVICE 0x1091 +#define CL_QUEUE_REFERENCE_COUNT 0x1092 +#define CL_QUEUE_PROPERTIES 0x1093 +#ifdef CL_VERSION_2_0 +#define CL_QUEUE_SIZE 0x1094 +#endif +#ifdef CL_VERSION_2_1 +#define CL_QUEUE_DEVICE_DEFAULT 0x1095 +#endif +#ifdef CL_VERSION_3_0 +#define CL_QUEUE_PROPERTIES_ARRAY 0x1098 +#endif + +/* cl_mem_flags and cl_svm_mem_flags - bitfield */ +#define CL_MEM_READ_WRITE (1 << 0) +#define CL_MEM_WRITE_ONLY (1 << 1) +#define CL_MEM_READ_ONLY (1 << 2) +#define CL_MEM_USE_HOST_PTR (1 << 3) +#define CL_MEM_ALLOC_HOST_PTR (1 << 4) +#define CL_MEM_COPY_HOST_PTR (1 << 5) +/* reserved (1 << 6) */ +#ifdef CL_VERSION_1_2 +#define CL_MEM_HOST_WRITE_ONLY (1 << 7) +#define CL_MEM_HOST_READ_ONLY (1 << 8) +#define CL_MEM_HOST_NO_ACCESS (1 << 9) +#endif +#ifdef CL_VERSION_2_0 +#define CL_MEM_SVM_FINE_GRAIN_BUFFER (1 << 10) /* used by cl_svm_mem_flags only */ +#define CL_MEM_SVM_ATOMICS (1 << 11) /* used by cl_svm_mem_flags only */ +#define CL_MEM_KERNEL_READ_AND_WRITE (1 << 12) +#endif + +#ifdef CL_VERSION_1_2 + +/* cl_mem_migration_flags - bitfield */ +#define CL_MIGRATE_MEM_OBJECT_HOST (1 << 0) +#define CL_MIGRATE_MEM_OBJECT_CONTENT_UNDEFINED (1 << 1) + +#endif + +/* cl_channel_order */ +#define CL_R 0x10B0 +#define CL_A 0x10B1 +#define CL_RG 0x10B2 +#define CL_RA 0x10B3 +#define CL_RGB 0x10B4 +#define CL_RGBA 0x10B5 +#define CL_BGRA 0x10B6 +#define CL_ARGB 0x10B7 +#define CL_INTENSITY 0x10B8 +#define CL_LUMINANCE 0x10B9 +#ifdef CL_VERSION_1_1 +#define CL_Rx 0x10BA +#define CL_RGx 0x10BB +#define CL_RGBx 0x10BC +#endif +#ifdef CL_VERSION_1_2 +#define CL_DEPTH 0x10BD +#define CL_DEPTH_STENCIL 0x10BE +#endif +#ifdef CL_VERSION_2_0 +#define CL_sRGB 0x10BF +#define CL_sRGBx 0x10C0 +#define CL_sRGBA 0x10C1 +#define CL_sBGRA 0x10C2 +#define CL_ABGR 0x10C3 +#endif + +/* cl_channel_type */ +#define CL_SNORM_INT8 0x10D0 +#define CL_SNORM_INT16 0x10D1 +#define CL_UNORM_INT8 0x10D2 +#define CL_UNORM_INT16 0x10D3 +#define CL_UNORM_SHORT_565 0x10D4 +#define CL_UNORM_SHORT_555 0x10D5 +#define CL_UNORM_INT_101010 0x10D6 +#define CL_SIGNED_INT8 0x10D7 +#define CL_SIGNED_INT16 0x10D8 +#define CL_SIGNED_INT32 0x10D9 +#define CL_UNSIGNED_INT8 0x10DA +#define CL_UNSIGNED_INT16 0x10DB +#define CL_UNSIGNED_INT32 0x10DC +#define CL_HALF_FLOAT 0x10DD +#define CL_FLOAT 0x10DE +#ifdef CL_VERSION_1_2 +#define CL_UNORM_INT24 0x10DF +#endif +#ifdef CL_VERSION_2_1 +#define CL_UNORM_INT_101010_2 0x10E0 +#endif + +/* cl_mem_object_type */ +#define CL_MEM_OBJECT_BUFFER 0x10F0 +#define CL_MEM_OBJECT_IMAGE2D 0x10F1 +#define CL_MEM_OBJECT_IMAGE3D 0x10F2 +#ifdef CL_VERSION_1_2 +#define CL_MEM_OBJECT_IMAGE2D_ARRAY 0x10F3 +#define CL_MEM_OBJECT_IMAGE1D 0x10F4 +#define CL_MEM_OBJECT_IMAGE1D_ARRAY 0x10F5 +#define CL_MEM_OBJECT_IMAGE1D_BUFFER 0x10F6 +#endif +#ifdef CL_VERSION_2_0 +#define CL_MEM_OBJECT_PIPE 0x10F7 +#endif + +/* cl_mem_info */ +#define CL_MEM_TYPE 0x1100 +#define CL_MEM_FLAGS 0x1101 +#define CL_MEM_SIZE 0x1102 +#define CL_MEM_HOST_PTR 0x1103 +#define CL_MEM_MAP_COUNT 0x1104 +#define CL_MEM_REFERENCE_COUNT 0x1105 +#define CL_MEM_CONTEXT 0x1106 +#ifdef CL_VERSION_1_1 +#define CL_MEM_ASSOCIATED_MEMOBJECT 0x1107 +#define CL_MEM_OFFSET 0x1108 +#endif +#ifdef CL_VERSION_2_0 +#define CL_MEM_USES_SVM_POINTER 0x1109 +#endif +#ifdef CL_VERSION_3_0 +#define CL_MEM_PROPERTIES 0x110A +#endif + +/* cl_image_info */ +#define CL_IMAGE_FORMAT 0x1110 +#define CL_IMAGE_ELEMENT_SIZE 0x1111 +#define CL_IMAGE_ROW_PITCH 0x1112 +#define CL_IMAGE_SLICE_PITCH 0x1113 +#define CL_IMAGE_WIDTH 0x1114 +#define CL_IMAGE_HEIGHT 0x1115 +#define CL_IMAGE_DEPTH 0x1116 +#ifdef CL_VERSION_1_2 +#define CL_IMAGE_ARRAY_SIZE 0x1117 +#define CL_IMAGE_BUFFER 0x1118 +#define CL_IMAGE_NUM_MIP_LEVELS 0x1119 +#define CL_IMAGE_NUM_SAMPLES 0x111A +#endif + + +/* cl_pipe_info */ +#ifdef CL_VERSION_2_0 +#define CL_PIPE_PACKET_SIZE 0x1120 +#define CL_PIPE_MAX_PACKETS 0x1121 +#endif +#ifdef CL_VERSION_3_0 +#define CL_PIPE_PROPERTIES 0x1122 +#endif + +/* cl_addressing_mode */ +#define CL_ADDRESS_NONE 0x1130 +#define CL_ADDRESS_CLAMP_TO_EDGE 0x1131 +#define CL_ADDRESS_CLAMP 0x1132 +#define CL_ADDRESS_REPEAT 0x1133 +#ifdef CL_VERSION_1_1 +#define CL_ADDRESS_MIRRORED_REPEAT 0x1134 +#endif + +/* cl_filter_mode */ +#define CL_FILTER_NEAREST 0x1140 +#define CL_FILTER_LINEAR 0x1141 + +/* cl_sampler_info */ +#define CL_SAMPLER_REFERENCE_COUNT 0x1150 +#define CL_SAMPLER_CONTEXT 0x1151 +#define CL_SAMPLER_NORMALIZED_COORDS 0x1152 +#define CL_SAMPLER_ADDRESSING_MODE 0x1153 +#define CL_SAMPLER_FILTER_MODE 0x1154 +#ifdef CL_VERSION_2_0 +/* These enumerants are for the cl_khr_mipmap_image extension. + They have since been added to cl_ext.h with an appropriate + KHR suffix, but are left here for backwards compatibility. */ +#define CL_SAMPLER_MIP_FILTER_MODE 0x1155 +#define CL_SAMPLER_LOD_MIN 0x1156 +#define CL_SAMPLER_LOD_MAX 0x1157 +#endif +#ifdef CL_VERSION_3_0 +#define CL_SAMPLER_PROPERTIES 0x1158 +#endif + +/* cl_map_flags - bitfield */ +#define CL_MAP_READ (1 << 0) +#define CL_MAP_WRITE (1 << 1) +#ifdef CL_VERSION_1_2 +#define CL_MAP_WRITE_INVALIDATE_REGION (1 << 2) +#endif + +/* cl_program_info */ +#define CL_PROGRAM_REFERENCE_COUNT 0x1160 +#define CL_PROGRAM_CONTEXT 0x1161 +#define CL_PROGRAM_NUM_DEVICES 0x1162 +#define CL_PROGRAM_DEVICES 0x1163 +#define CL_PROGRAM_SOURCE 0x1164 +#define CL_PROGRAM_BINARY_SIZES 0x1165 +#define CL_PROGRAM_BINARIES 0x1166 +#ifdef CL_VERSION_1_2 +#define CL_PROGRAM_NUM_KERNELS 0x1167 +#define CL_PROGRAM_KERNEL_NAMES 0x1168 +#endif +#ifdef CL_VERSION_2_1 +#define CL_PROGRAM_IL 0x1169 +#endif +#ifdef CL_VERSION_2_2 +#define CL_PROGRAM_SCOPE_GLOBAL_CTORS_PRESENT 0x116A +#define CL_PROGRAM_SCOPE_GLOBAL_DTORS_PRESENT 0x116B +#endif + +/* cl_program_build_info */ +#define CL_PROGRAM_BUILD_STATUS 0x1181 +#define CL_PROGRAM_BUILD_OPTIONS 0x1182 +#define CL_PROGRAM_BUILD_LOG 0x1183 +#ifdef CL_VERSION_1_2 +#define CL_PROGRAM_BINARY_TYPE 0x1184 +#endif +#ifdef CL_VERSION_2_0 +#define CL_PROGRAM_BUILD_GLOBAL_VARIABLE_TOTAL_SIZE 0x1185 +#endif + +#ifdef CL_VERSION_1_2 + +/* cl_program_binary_type */ +#define CL_PROGRAM_BINARY_TYPE_NONE 0x0 +#define CL_PROGRAM_BINARY_TYPE_COMPILED_OBJECT 0x1 +#define CL_PROGRAM_BINARY_TYPE_LIBRARY 0x2 +#define CL_PROGRAM_BINARY_TYPE_EXECUTABLE 0x4 + +#endif + +/* cl_build_status */ +#define CL_BUILD_SUCCESS 0 +#define CL_BUILD_NONE -1 +#define CL_BUILD_ERROR -2 +#define CL_BUILD_IN_PROGRESS -3 + +/* cl_kernel_info */ +#define CL_KERNEL_FUNCTION_NAME 0x1190 +#define CL_KERNEL_NUM_ARGS 0x1191 +#define CL_KERNEL_REFERENCE_COUNT 0x1192 +#define CL_KERNEL_CONTEXT 0x1193 +#define CL_KERNEL_PROGRAM 0x1194 +#ifdef CL_VERSION_1_2 +#define CL_KERNEL_ATTRIBUTES 0x1195 +#endif + +#ifdef CL_VERSION_1_2 + +/* cl_kernel_arg_info */ +#define CL_KERNEL_ARG_ADDRESS_QUALIFIER 0x1196 +#define CL_KERNEL_ARG_ACCESS_QUALIFIER 0x1197 +#define CL_KERNEL_ARG_TYPE_NAME 0x1198 +#define CL_KERNEL_ARG_TYPE_QUALIFIER 0x1199 +#define CL_KERNEL_ARG_NAME 0x119A + +#endif + +#ifdef CL_VERSION_1_2 + +/* cl_kernel_arg_address_qualifier */ +#define CL_KERNEL_ARG_ADDRESS_GLOBAL 0x119B +#define CL_KERNEL_ARG_ADDRESS_LOCAL 0x119C +#define CL_KERNEL_ARG_ADDRESS_CONSTANT 0x119D +#define CL_KERNEL_ARG_ADDRESS_PRIVATE 0x119E + +#endif + +#ifdef CL_VERSION_1_2 + +/* cl_kernel_arg_access_qualifier */ +#define CL_KERNEL_ARG_ACCESS_READ_ONLY 0x11A0 +#define CL_KERNEL_ARG_ACCESS_WRITE_ONLY 0x11A1 +#define CL_KERNEL_ARG_ACCESS_READ_WRITE 0x11A2 +#define CL_KERNEL_ARG_ACCESS_NONE 0x11A3 + +#endif + +#ifdef CL_VERSION_1_2 + +/* cl_kernel_arg_type_qualifier */ +#define CL_KERNEL_ARG_TYPE_NONE 0 +#define CL_KERNEL_ARG_TYPE_CONST (1 << 0) +#define CL_KERNEL_ARG_TYPE_RESTRICT (1 << 1) +#define CL_KERNEL_ARG_TYPE_VOLATILE (1 << 2) +#ifdef CL_VERSION_2_0 +#define CL_KERNEL_ARG_TYPE_PIPE (1 << 3) +#endif + +#endif + +/* cl_kernel_work_group_info */ +#define CL_KERNEL_WORK_GROUP_SIZE 0x11B0 +#define CL_KERNEL_COMPILE_WORK_GROUP_SIZE 0x11B1 +#define CL_KERNEL_LOCAL_MEM_SIZE 0x11B2 +#define CL_KERNEL_PREFERRED_WORK_GROUP_SIZE_MULTIPLE 0x11B3 +#define CL_KERNEL_PRIVATE_MEM_SIZE 0x11B4 +#ifdef CL_VERSION_1_2 +#define CL_KERNEL_GLOBAL_WORK_SIZE 0x11B5 +#endif + +#ifdef CL_VERSION_2_1 + +/* cl_kernel_sub_group_info */ +#define CL_KERNEL_MAX_SUB_GROUP_SIZE_FOR_NDRANGE 0x2033 +#define CL_KERNEL_SUB_GROUP_COUNT_FOR_NDRANGE 0x2034 +#define CL_KERNEL_LOCAL_SIZE_FOR_SUB_GROUP_COUNT 0x11B8 +#define CL_KERNEL_MAX_NUM_SUB_GROUPS 0x11B9 +#define CL_KERNEL_COMPILE_NUM_SUB_GROUPS 0x11BA + +#endif + +#ifdef CL_VERSION_2_0 + +/* cl_kernel_exec_info */ +#define CL_KERNEL_EXEC_INFO_SVM_PTRS 0x11B6 +#define CL_KERNEL_EXEC_INFO_SVM_FINE_GRAIN_SYSTEM 0x11B7 + +#endif + +/* cl_event_info */ +#define CL_EVENT_COMMAND_QUEUE 0x11D0 +#define CL_EVENT_COMMAND_TYPE 0x11D1 +#define CL_EVENT_REFERENCE_COUNT 0x11D2 +#define CL_EVENT_COMMAND_EXECUTION_STATUS 0x11D3 +#ifdef CL_VERSION_1_1 +#define CL_EVENT_CONTEXT 0x11D4 +#endif + +/* cl_command_type */ +#define CL_COMMAND_NDRANGE_KERNEL 0x11F0 +#define CL_COMMAND_TASK 0x11F1 +#define CL_COMMAND_NATIVE_KERNEL 0x11F2 +#define CL_COMMAND_READ_BUFFER 0x11F3 +#define CL_COMMAND_WRITE_BUFFER 0x11F4 +#define CL_COMMAND_COPY_BUFFER 0x11F5 +#define CL_COMMAND_READ_IMAGE 0x11F6 +#define CL_COMMAND_WRITE_IMAGE 0x11F7 +#define CL_COMMAND_COPY_IMAGE 0x11F8 +#define CL_COMMAND_COPY_IMAGE_TO_BUFFER 0x11F9 +#define CL_COMMAND_COPY_BUFFER_TO_IMAGE 0x11FA +#define CL_COMMAND_MAP_BUFFER 0x11FB +#define CL_COMMAND_MAP_IMAGE 0x11FC +#define CL_COMMAND_UNMAP_MEM_OBJECT 0x11FD +#define CL_COMMAND_MARKER 0x11FE +#define CL_COMMAND_ACQUIRE_GL_OBJECTS 0x11FF +#define CL_COMMAND_RELEASE_GL_OBJECTS 0x1200 +#ifdef CL_VERSION_1_1 +#define CL_COMMAND_READ_BUFFER_RECT 0x1201 +#define CL_COMMAND_WRITE_BUFFER_RECT 0x1202 +#define CL_COMMAND_COPY_BUFFER_RECT 0x1203 +#define CL_COMMAND_USER 0x1204 +#endif +#ifdef CL_VERSION_1_2 +#define CL_COMMAND_BARRIER 0x1205 +#define CL_COMMAND_MIGRATE_MEM_OBJECTS 0x1206 +#define CL_COMMAND_FILL_BUFFER 0x1207 +#define CL_COMMAND_FILL_IMAGE 0x1208 +#endif +#ifdef CL_VERSION_2_0 +#define CL_COMMAND_SVM_FREE 0x1209 +#define CL_COMMAND_SVM_MEMCPY 0x120A +#define CL_COMMAND_SVM_MEMFILL 0x120B +#define CL_COMMAND_SVM_MAP 0x120C +#define CL_COMMAND_SVM_UNMAP 0x120D +#endif +#ifdef CL_VERSION_3_0 +#define CL_COMMAND_SVM_MIGRATE_MEM 0x120E +#endif + +/* command execution status */ +#define CL_COMPLETE 0x0 +#define CL_RUNNING 0x1 +#define CL_SUBMITTED 0x2 +#define CL_QUEUED 0x3 + +/* cl_buffer_create_type */ +#ifdef CL_VERSION_1_1 +#define CL_BUFFER_CREATE_TYPE_REGION 0x1220 +#endif + +/* cl_profiling_info */ +#define CL_PROFILING_COMMAND_QUEUED 0x1280 +#define CL_PROFILING_COMMAND_SUBMIT 0x1281 +#define CL_PROFILING_COMMAND_START 0x1282 +#define CL_PROFILING_COMMAND_END 0x1283 +#ifdef CL_VERSION_2_0 +#define CL_PROFILING_COMMAND_COMPLETE 0x1284 +#endif + +/* cl_device_atomic_capabilities - bitfield */ +#ifdef CL_VERSION_3_0 +#define CL_DEVICE_ATOMIC_ORDER_RELAXED (1 << 0) +#define CL_DEVICE_ATOMIC_ORDER_ACQ_REL (1 << 1) +#define CL_DEVICE_ATOMIC_ORDER_SEQ_CST (1 << 2) +#define CL_DEVICE_ATOMIC_SCOPE_WORK_ITEM (1 << 3) +#define CL_DEVICE_ATOMIC_SCOPE_WORK_GROUP (1 << 4) +#define CL_DEVICE_ATOMIC_SCOPE_DEVICE (1 << 5) +#define CL_DEVICE_ATOMIC_SCOPE_ALL_DEVICES (1 << 6) +#endif + +/* cl_device_device_enqueue_capabilities - bitfield */ +#ifdef CL_VERSION_3_0 +#define CL_DEVICE_QUEUE_SUPPORTED (1 << 0) +#define CL_DEVICE_QUEUE_REPLACEABLE_DEFAULT (1 << 1) +#endif + +/* cl_khronos_vendor_id */ +#define CL_KHRONOS_VENDOR_ID_CODEPLAY 0x10004 + +#ifdef CL_VERSION_3_0 + +/* cl_version */ +#define CL_VERSION_MAJOR_BITS (10) +#define CL_VERSION_MINOR_BITS (10) +#define CL_VERSION_PATCH_BITS (12) + +#define CL_VERSION_MAJOR_MASK ((1 << CL_VERSION_MAJOR_BITS) - 1) +#define CL_VERSION_MINOR_MASK ((1 << CL_VERSION_MINOR_BITS) - 1) +#define CL_VERSION_PATCH_MASK ((1 << CL_VERSION_PATCH_BITS) - 1) + +#define CL_VERSION_MAJOR(version) \ + ((version) >> (CL_VERSION_MINOR_BITS + CL_VERSION_PATCH_BITS)) + +#define CL_VERSION_MINOR(version) \ + (((version) >> CL_VERSION_PATCH_BITS) & CL_VERSION_MINOR_MASK) + +#define CL_VERSION_PATCH(version) ((version) & CL_VERSION_PATCH_MASK) + +#define CL_MAKE_VERSION(major, minor, patch) \ + ((((major) & CL_VERSION_MAJOR_MASK) \ + << (CL_VERSION_MINOR_BITS + CL_VERSION_PATCH_BITS)) | \ + (((minor) & CL_VERSION_MINOR_MASK) << CL_VERSION_PATCH_BITS) | \ + ((patch) & CL_VERSION_PATCH_MASK)) + +#endif + +/********************************************************************************************************/ + +/* Platform API */ +extern CL_API_ENTRY cl_int CL_API_CALL +clGetPlatformIDs(cl_uint num_entries, + cl_platform_id * platforms, + cl_uint * num_platforms) CL_API_SUFFIX__VERSION_1_0; + +extern CL_API_ENTRY cl_int CL_API_CALL +clGetPlatformInfo(cl_platform_id platform, + cl_platform_info param_name, + size_t param_value_size, + void * param_value, + size_t * param_value_size_ret) CL_API_SUFFIX__VERSION_1_0; + +/* Device APIs */ +extern CL_API_ENTRY cl_int CL_API_CALL +clGetDeviceIDs(cl_platform_id platform, + cl_device_type device_type, + cl_uint num_entries, + cl_device_id * devices, + cl_uint * num_devices) CL_API_SUFFIX__VERSION_1_0; + +extern CL_API_ENTRY cl_int CL_API_CALL +clGetDeviceInfo(cl_device_id device, + cl_device_info param_name, + size_t param_value_size, + void * param_value, + size_t * param_value_size_ret) CL_API_SUFFIX__VERSION_1_0; + +#ifdef CL_VERSION_1_2 + +extern CL_API_ENTRY cl_int CL_API_CALL +clCreateSubDevices(cl_device_id in_device, + const cl_device_partition_property * properties, + cl_uint num_devices, + cl_device_id * out_devices, + cl_uint * num_devices_ret) CL_API_SUFFIX__VERSION_1_2; + +extern CL_API_ENTRY cl_int CL_API_CALL +clRetainDevice(cl_device_id device) CL_API_SUFFIX__VERSION_1_2; + +extern CL_API_ENTRY cl_int CL_API_CALL +clReleaseDevice(cl_device_id device) CL_API_SUFFIX__VERSION_1_2; + +#endif + +#ifdef CL_VERSION_2_1 + +extern CL_API_ENTRY cl_int CL_API_CALL +clSetDefaultDeviceCommandQueue(cl_context context, + cl_device_id device, + cl_command_queue command_queue) CL_API_SUFFIX__VERSION_2_1; + +extern CL_API_ENTRY cl_int CL_API_CALL +clGetDeviceAndHostTimer(cl_device_id device, + cl_ulong* device_timestamp, + cl_ulong* host_timestamp) CL_API_SUFFIX__VERSION_2_1; + +extern CL_API_ENTRY cl_int CL_API_CALL +clGetHostTimer(cl_device_id device, + cl_ulong * host_timestamp) CL_API_SUFFIX__VERSION_2_1; + +#endif + +/* Context APIs */ +extern CL_API_ENTRY cl_context CL_API_CALL +clCreateContext(const cl_context_properties * properties, + cl_uint num_devices, + const cl_device_id * devices, + void (CL_CALLBACK * pfn_notify)(const char * errinfo, + const void * private_info, + size_t cb, + void * user_data), + void * user_data, + cl_int * errcode_ret) CL_API_SUFFIX__VERSION_1_0; + +extern CL_API_ENTRY cl_context CL_API_CALL +clCreateContextFromType(const cl_context_properties * properties, + cl_device_type device_type, + void (CL_CALLBACK * pfn_notify)(const char * errinfo, + const void * private_info, + size_t cb, + void * user_data), + void * user_data, + cl_int * errcode_ret) CL_API_SUFFIX__VERSION_1_0; + +extern CL_API_ENTRY cl_int CL_API_CALL +clRetainContext(cl_context context) CL_API_SUFFIX__VERSION_1_0; + +extern CL_API_ENTRY cl_int CL_API_CALL +clReleaseContext(cl_context context) CL_API_SUFFIX__VERSION_1_0; + +extern CL_API_ENTRY cl_int CL_API_CALL +clGetContextInfo(cl_context context, + cl_context_info param_name, + size_t param_value_size, + void * param_value, + size_t * param_value_size_ret) CL_API_SUFFIX__VERSION_1_0; + +#ifdef CL_VERSION_3_0 + +extern CL_API_ENTRY cl_int CL_API_CALL +clSetContextDestructorCallback(cl_context context, + void (CL_CALLBACK* pfn_notify)(cl_context context, + void* user_data), + void* user_data) CL_API_SUFFIX__VERSION_3_0; + +#endif + +/* Command Queue APIs */ + +#ifdef CL_VERSION_2_0 + +extern CL_API_ENTRY cl_command_queue CL_API_CALL +clCreateCommandQueueWithProperties(cl_context context, + cl_device_id device, + const cl_queue_properties * properties, + cl_int * errcode_ret) CL_API_SUFFIX__VERSION_2_0; + +#endif + +extern CL_API_ENTRY cl_int CL_API_CALL +clRetainCommandQueue(cl_command_queue command_queue) CL_API_SUFFIX__VERSION_1_0; + +extern CL_API_ENTRY cl_int CL_API_CALL +clReleaseCommandQueue(cl_command_queue command_queue) CL_API_SUFFIX__VERSION_1_0; + +extern CL_API_ENTRY cl_int CL_API_CALL +clGetCommandQueueInfo(cl_command_queue command_queue, + cl_command_queue_info param_name, + size_t param_value_size, + void * param_value, + size_t * param_value_size_ret) CL_API_SUFFIX__VERSION_1_0; + +/* Memory Object APIs */ +extern CL_API_ENTRY cl_mem CL_API_CALL +clCreateBuffer(cl_context context, + cl_mem_flags flags, + size_t size, + void * host_ptr, + cl_int * errcode_ret) CL_API_SUFFIX__VERSION_1_0; + +#ifdef CL_VERSION_1_1 + +extern CL_API_ENTRY cl_mem CL_API_CALL +clCreateSubBuffer(cl_mem buffer, + cl_mem_flags flags, + cl_buffer_create_type buffer_create_type, + const void * buffer_create_info, + cl_int * errcode_ret) CL_API_SUFFIX__VERSION_1_1; + +#endif + +#ifdef CL_VERSION_1_2 + +extern CL_API_ENTRY cl_mem CL_API_CALL +clCreateImage(cl_context context, + cl_mem_flags flags, + const cl_image_format * image_format, + const cl_image_desc * image_desc, + void * host_ptr, + cl_int * errcode_ret) CL_API_SUFFIX__VERSION_1_2; + +#endif + +#ifdef CL_VERSION_2_0 + +extern CL_API_ENTRY cl_mem CL_API_CALL +clCreatePipe(cl_context context, + cl_mem_flags flags, + cl_uint pipe_packet_size, + cl_uint pipe_max_packets, + const cl_pipe_properties * properties, + cl_int * errcode_ret) CL_API_SUFFIX__VERSION_2_0; + +#endif + +#ifdef CL_VERSION_3_0 + +extern CL_API_ENTRY cl_mem CL_API_CALL +clCreateBufferWithProperties(cl_context context, + const cl_mem_properties * properties, + cl_mem_flags flags, + size_t size, + void * host_ptr, + cl_int * errcode_ret) CL_API_SUFFIX__VERSION_3_0; + +extern CL_API_ENTRY cl_mem CL_API_CALL +clCreateImageWithProperties(cl_context context, + const cl_mem_properties * properties, + cl_mem_flags flags, + const cl_image_format * image_format, + const cl_image_desc * image_desc, + void * host_ptr, + cl_int * errcode_ret) CL_API_SUFFIX__VERSION_3_0; + +#endif + +extern CL_API_ENTRY cl_int CL_API_CALL +clRetainMemObject(cl_mem memobj) CL_API_SUFFIX__VERSION_1_0; + +extern CL_API_ENTRY cl_int CL_API_CALL +clReleaseMemObject(cl_mem memobj) CL_API_SUFFIX__VERSION_1_0; + +extern CL_API_ENTRY cl_int CL_API_CALL +clGetSupportedImageFormats(cl_context context, + cl_mem_flags flags, + cl_mem_object_type image_type, + cl_uint num_entries, + cl_image_format * image_formats, + cl_uint * num_image_formats) CL_API_SUFFIX__VERSION_1_0; + +extern CL_API_ENTRY cl_int CL_API_CALL +clGetMemObjectInfo(cl_mem memobj, + cl_mem_info param_name, + size_t param_value_size, + void * param_value, + size_t * param_value_size_ret) CL_API_SUFFIX__VERSION_1_0; + +extern CL_API_ENTRY cl_int CL_API_CALL +clGetImageInfo(cl_mem image, + cl_image_info param_name, + size_t param_value_size, + void * param_value, + size_t * param_value_size_ret) CL_API_SUFFIX__VERSION_1_0; + +#ifdef CL_VERSION_2_0 + +extern CL_API_ENTRY cl_int CL_API_CALL +clGetPipeInfo(cl_mem pipe, + cl_pipe_info param_name, + size_t param_value_size, + void * param_value, + size_t * param_value_size_ret) CL_API_SUFFIX__VERSION_2_0; + +#endif + +#ifdef CL_VERSION_1_1 + +extern CL_API_ENTRY cl_int CL_API_CALL +clSetMemObjectDestructorCallback(cl_mem memobj, + void (CL_CALLBACK * pfn_notify)(cl_mem memobj, + void * user_data), + void * user_data) CL_API_SUFFIX__VERSION_1_1; + +#endif + +/* SVM Allocation APIs */ + +#ifdef CL_VERSION_2_0 + +extern CL_API_ENTRY void * CL_API_CALL +clSVMAlloc(cl_context context, + cl_svm_mem_flags flags, + size_t size, + cl_uint alignment) CL_API_SUFFIX__VERSION_2_0; + +extern CL_API_ENTRY void CL_API_CALL +clSVMFree(cl_context context, + void * svm_pointer) CL_API_SUFFIX__VERSION_2_0; + +#endif + +/* Sampler APIs */ + +#ifdef CL_VERSION_2_0 + +extern CL_API_ENTRY cl_sampler CL_API_CALL +clCreateSamplerWithProperties(cl_context context, + const cl_sampler_properties * sampler_properties, + cl_int * errcode_ret) CL_API_SUFFIX__VERSION_2_0; + +#endif + +extern CL_API_ENTRY cl_int CL_API_CALL +clRetainSampler(cl_sampler sampler) CL_API_SUFFIX__VERSION_1_0; + +extern CL_API_ENTRY cl_int CL_API_CALL +clReleaseSampler(cl_sampler sampler) CL_API_SUFFIX__VERSION_1_0; + +extern CL_API_ENTRY cl_int CL_API_CALL +clGetSamplerInfo(cl_sampler sampler, + cl_sampler_info param_name, + size_t param_value_size, + void * param_value, + size_t * param_value_size_ret) CL_API_SUFFIX__VERSION_1_0; + +/* Program Object APIs */ +extern CL_API_ENTRY cl_program CL_API_CALL +clCreateProgramWithSource(cl_context context, + cl_uint count, + const char ** strings, + const size_t * lengths, + cl_int * errcode_ret) CL_API_SUFFIX__VERSION_1_0; + +extern CL_API_ENTRY cl_program CL_API_CALL +clCreateProgramWithBinary(cl_context context, + cl_uint num_devices, + const cl_device_id * device_list, + const size_t * lengths, + const unsigned char ** binaries, + cl_int * binary_status, + cl_int * errcode_ret) CL_API_SUFFIX__VERSION_1_0; + +#ifdef CL_VERSION_1_2 + +extern CL_API_ENTRY cl_program CL_API_CALL +clCreateProgramWithBuiltInKernels(cl_context context, + cl_uint num_devices, + const cl_device_id * device_list, + const char * kernel_names, + cl_int * errcode_ret) CL_API_SUFFIX__VERSION_1_2; + +#endif + +#ifdef CL_VERSION_2_1 + +extern CL_API_ENTRY cl_program CL_API_CALL +clCreateProgramWithIL(cl_context context, + const void* il, + size_t length, + cl_int* errcode_ret) CL_API_SUFFIX__VERSION_2_1; + +#endif + +extern CL_API_ENTRY cl_int CL_API_CALL +clRetainProgram(cl_program program) CL_API_SUFFIX__VERSION_1_0; + +extern CL_API_ENTRY cl_int CL_API_CALL +clReleaseProgram(cl_program program) CL_API_SUFFIX__VERSION_1_0; + +extern CL_API_ENTRY cl_int CL_API_CALL +clBuildProgram(cl_program program, + cl_uint num_devices, + const cl_device_id * device_list, + const char * options, + void (CL_CALLBACK * pfn_notify)(cl_program program, + void * user_data), + void * user_data) CL_API_SUFFIX__VERSION_1_0; + +#ifdef CL_VERSION_1_2 + +extern CL_API_ENTRY cl_int CL_API_CALL +clCompileProgram(cl_program program, + cl_uint num_devices, + const cl_device_id * device_list, + const char * options, + cl_uint num_input_headers, + const cl_program * input_headers, + const char ** header_include_names, + void (CL_CALLBACK * pfn_notify)(cl_program program, + void * user_data), + void * user_data) CL_API_SUFFIX__VERSION_1_2; + +extern CL_API_ENTRY cl_program CL_API_CALL +clLinkProgram(cl_context context, + cl_uint num_devices, + const cl_device_id * device_list, + const char * options, + cl_uint num_input_programs, + const cl_program * input_programs, + void (CL_CALLBACK * pfn_notify)(cl_program program, + void * user_data), + void * user_data, + cl_int * errcode_ret) CL_API_SUFFIX__VERSION_1_2; + +#endif + +#ifdef CL_VERSION_2_2 + +extern CL_API_ENTRY CL_EXT_PREFIX__VERSION_2_2_DEPRECATED cl_int CL_API_CALL +clSetProgramReleaseCallback(cl_program program, + void (CL_CALLBACK * pfn_notify)(cl_program program, + void * user_data), + void * user_data) CL_EXT_SUFFIX__VERSION_2_2_DEPRECATED; + +extern CL_API_ENTRY cl_int CL_API_CALL +clSetProgramSpecializationConstant(cl_program program, + cl_uint spec_id, + size_t spec_size, + const void* spec_value) CL_API_SUFFIX__VERSION_2_2; + +#endif + +#ifdef CL_VERSION_1_2 + +extern CL_API_ENTRY cl_int CL_API_CALL +clUnloadPlatformCompiler(cl_platform_id platform) CL_API_SUFFIX__VERSION_1_2; + +#endif + +extern CL_API_ENTRY cl_int CL_API_CALL +clGetProgramInfo(cl_program program, + cl_program_info param_name, + size_t param_value_size, + void * param_value, + size_t * param_value_size_ret) CL_API_SUFFIX__VERSION_1_0; + +extern CL_API_ENTRY cl_int CL_API_CALL +clGetProgramBuildInfo(cl_program program, + cl_device_id device, + cl_program_build_info param_name, + size_t param_value_size, + void * param_value, + size_t * param_value_size_ret) CL_API_SUFFIX__VERSION_1_0; + +/* Kernel Object APIs */ +extern CL_API_ENTRY cl_kernel CL_API_CALL +clCreateKernel(cl_program program, + const char * kernel_name, + cl_int * errcode_ret) CL_API_SUFFIX__VERSION_1_0; + +extern CL_API_ENTRY cl_int CL_API_CALL +clCreateKernelsInProgram(cl_program program, + cl_uint num_kernels, + cl_kernel * kernels, + cl_uint * num_kernels_ret) CL_API_SUFFIX__VERSION_1_0; + +#ifdef CL_VERSION_2_1 + +extern CL_API_ENTRY cl_kernel CL_API_CALL +clCloneKernel(cl_kernel source_kernel, + cl_int* errcode_ret) CL_API_SUFFIX__VERSION_2_1; + +#endif + +extern CL_API_ENTRY cl_int CL_API_CALL +clRetainKernel(cl_kernel kernel) CL_API_SUFFIX__VERSION_1_0; + +extern CL_API_ENTRY cl_int CL_API_CALL +clReleaseKernel(cl_kernel kernel) CL_API_SUFFIX__VERSION_1_0; + +extern CL_API_ENTRY cl_int CL_API_CALL +clSetKernelArg(cl_kernel kernel, + cl_uint arg_index, + size_t arg_size, + const void * arg_value) CL_API_SUFFIX__VERSION_1_0; + +#ifdef CL_VERSION_2_0 + +extern CL_API_ENTRY cl_int CL_API_CALL +clSetKernelArgSVMPointer(cl_kernel kernel, + cl_uint arg_index, + const void * arg_value) CL_API_SUFFIX__VERSION_2_0; + +extern CL_API_ENTRY cl_int CL_API_CALL +clSetKernelExecInfo(cl_kernel kernel, + cl_kernel_exec_info param_name, + size_t param_value_size, + const void * param_value) CL_API_SUFFIX__VERSION_2_0; + +#endif + +extern CL_API_ENTRY cl_int CL_API_CALL +clGetKernelInfo(cl_kernel kernel, + cl_kernel_info param_name, + size_t param_value_size, + void * param_value, + size_t * param_value_size_ret) CL_API_SUFFIX__VERSION_1_0; + +#ifdef CL_VERSION_1_2 + +extern CL_API_ENTRY cl_int CL_API_CALL +clGetKernelArgInfo(cl_kernel kernel, + cl_uint arg_indx, + cl_kernel_arg_info param_name, + size_t param_value_size, + void * param_value, + size_t * param_value_size_ret) CL_API_SUFFIX__VERSION_1_2; + +#endif + +extern CL_API_ENTRY cl_int CL_API_CALL +clGetKernelWorkGroupInfo(cl_kernel kernel, + cl_device_id device, + cl_kernel_work_group_info param_name, + size_t param_value_size, + void * param_value, + size_t * param_value_size_ret) CL_API_SUFFIX__VERSION_1_0; + +#ifdef CL_VERSION_2_1 + +extern CL_API_ENTRY cl_int CL_API_CALL +clGetKernelSubGroupInfo(cl_kernel kernel, + cl_device_id device, + cl_kernel_sub_group_info param_name, + size_t input_value_size, + const void* input_value, + size_t param_value_size, + void* param_value, + size_t* param_value_size_ret) CL_API_SUFFIX__VERSION_2_1; + +#endif + +/* Event Object APIs */ +extern CL_API_ENTRY cl_int CL_API_CALL +clWaitForEvents(cl_uint num_events, + const cl_event * event_list) CL_API_SUFFIX__VERSION_1_0; + +extern CL_API_ENTRY cl_int CL_API_CALL +clGetEventInfo(cl_event event, + cl_event_info param_name, + size_t param_value_size, + void * param_value, + size_t * param_value_size_ret) CL_API_SUFFIX__VERSION_1_0; + +#ifdef CL_VERSION_1_1 + +extern CL_API_ENTRY cl_event CL_API_CALL +clCreateUserEvent(cl_context context, + cl_int * errcode_ret) CL_API_SUFFIX__VERSION_1_1; + +#endif + +extern CL_API_ENTRY cl_int CL_API_CALL +clRetainEvent(cl_event event) CL_API_SUFFIX__VERSION_1_0; + +extern CL_API_ENTRY cl_int CL_API_CALL +clReleaseEvent(cl_event event) CL_API_SUFFIX__VERSION_1_0; + +#ifdef CL_VERSION_1_1 + +extern CL_API_ENTRY cl_int CL_API_CALL +clSetUserEventStatus(cl_event event, + cl_int execution_status) CL_API_SUFFIX__VERSION_1_1; + +extern CL_API_ENTRY cl_int CL_API_CALL +clSetEventCallback(cl_event event, + cl_int command_exec_callback_type, + void (CL_CALLBACK * pfn_notify)(cl_event event, + cl_int event_command_status, + void * user_data), + void * user_data) CL_API_SUFFIX__VERSION_1_1; + +#endif + +/* Profiling APIs */ +extern CL_API_ENTRY cl_int CL_API_CALL +clGetEventProfilingInfo(cl_event event, + cl_profiling_info param_name, + size_t param_value_size, + void * param_value, + size_t * param_value_size_ret) CL_API_SUFFIX__VERSION_1_0; + +/* Flush and Finish APIs */ +extern CL_API_ENTRY cl_int CL_API_CALL +clFlush(cl_command_queue command_queue) CL_API_SUFFIX__VERSION_1_0; + +extern CL_API_ENTRY cl_int CL_API_CALL +clFinish(cl_command_queue command_queue) CL_API_SUFFIX__VERSION_1_0; + +/* Enqueued Commands APIs */ +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueReadBuffer(cl_command_queue command_queue, + cl_mem buffer, + cl_bool blocking_read, + size_t offset, + size_t size, + void * ptr, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_API_SUFFIX__VERSION_1_0; + +#ifdef CL_VERSION_1_1 + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueReadBufferRect(cl_command_queue command_queue, + cl_mem buffer, + cl_bool blocking_read, + const size_t * buffer_origin, + const size_t * host_origin, + const size_t * region, + size_t buffer_row_pitch, + size_t buffer_slice_pitch, + size_t host_row_pitch, + size_t host_slice_pitch, + void * ptr, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_API_SUFFIX__VERSION_1_1; + +#endif + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueWriteBuffer(cl_command_queue command_queue, + cl_mem buffer, + cl_bool blocking_write, + size_t offset, + size_t size, + const void * ptr, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_API_SUFFIX__VERSION_1_0; + +#ifdef CL_VERSION_1_1 + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueWriteBufferRect(cl_command_queue command_queue, + cl_mem buffer, + cl_bool blocking_write, + const size_t * buffer_origin, + const size_t * host_origin, + const size_t * region, + size_t buffer_row_pitch, + size_t buffer_slice_pitch, + size_t host_row_pitch, + size_t host_slice_pitch, + const void * ptr, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_API_SUFFIX__VERSION_1_1; + +#endif + +#ifdef CL_VERSION_1_2 + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueFillBuffer(cl_command_queue command_queue, + cl_mem buffer, + const void * pattern, + size_t pattern_size, + size_t offset, + size_t size, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_API_SUFFIX__VERSION_1_2; + +#endif + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueCopyBuffer(cl_command_queue command_queue, + cl_mem src_buffer, + cl_mem dst_buffer, + size_t src_offset, + size_t dst_offset, + size_t size, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_API_SUFFIX__VERSION_1_0; + +#ifdef CL_VERSION_1_1 + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueCopyBufferRect(cl_command_queue command_queue, + cl_mem src_buffer, + cl_mem dst_buffer, + const size_t * src_origin, + const size_t * dst_origin, + const size_t * region, + size_t src_row_pitch, + size_t src_slice_pitch, + size_t dst_row_pitch, + size_t dst_slice_pitch, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_API_SUFFIX__VERSION_1_1; + +#endif + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueReadImage(cl_command_queue command_queue, + cl_mem image, + cl_bool blocking_read, + const size_t * origin, + const size_t * region, + size_t row_pitch, + size_t slice_pitch, + void * ptr, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_API_SUFFIX__VERSION_1_0; + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueWriteImage(cl_command_queue command_queue, + cl_mem image, + cl_bool blocking_write, + const size_t * origin, + const size_t * region, + size_t input_row_pitch, + size_t input_slice_pitch, + const void * ptr, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_API_SUFFIX__VERSION_1_0; + +#ifdef CL_VERSION_1_2 + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueFillImage(cl_command_queue command_queue, + cl_mem image, + const void * fill_color, + const size_t * origin, + const size_t * region, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_API_SUFFIX__VERSION_1_2; + +#endif + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueCopyImage(cl_command_queue command_queue, + cl_mem src_image, + cl_mem dst_image, + const size_t * src_origin, + const size_t * dst_origin, + const size_t * region, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_API_SUFFIX__VERSION_1_0; + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueCopyImageToBuffer(cl_command_queue command_queue, + cl_mem src_image, + cl_mem dst_buffer, + const size_t * src_origin, + const size_t * region, + size_t dst_offset, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_API_SUFFIX__VERSION_1_0; + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueCopyBufferToImage(cl_command_queue command_queue, + cl_mem src_buffer, + cl_mem dst_image, + size_t src_offset, + const size_t * dst_origin, + const size_t * region, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_API_SUFFIX__VERSION_1_0; + +extern CL_API_ENTRY void * CL_API_CALL +clEnqueueMapBuffer(cl_command_queue command_queue, + cl_mem buffer, + cl_bool blocking_map, + cl_map_flags map_flags, + size_t offset, + size_t size, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event, + cl_int * errcode_ret) CL_API_SUFFIX__VERSION_1_0; + +extern CL_API_ENTRY void * CL_API_CALL +clEnqueueMapImage(cl_command_queue command_queue, + cl_mem image, + cl_bool blocking_map, + cl_map_flags map_flags, + const size_t * origin, + const size_t * region, + size_t * image_row_pitch, + size_t * image_slice_pitch, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event, + cl_int * errcode_ret) CL_API_SUFFIX__VERSION_1_0; + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueUnmapMemObject(cl_command_queue command_queue, + cl_mem memobj, + void * mapped_ptr, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_API_SUFFIX__VERSION_1_0; + +#ifdef CL_VERSION_1_2 + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueMigrateMemObjects(cl_command_queue command_queue, + cl_uint num_mem_objects, + const cl_mem * mem_objects, + cl_mem_migration_flags flags, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_API_SUFFIX__VERSION_1_2; + +#endif + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueNDRangeKernel(cl_command_queue command_queue, + cl_kernel kernel, + cl_uint work_dim, + const size_t * global_work_offset, + const size_t * global_work_size, + const size_t * local_work_size, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_API_SUFFIX__VERSION_1_0; + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueNativeKernel(cl_command_queue command_queue, + void (CL_CALLBACK * user_func)(void *), + void * args, + size_t cb_args, + cl_uint num_mem_objects, + const cl_mem * mem_list, + const void ** args_mem_loc, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_API_SUFFIX__VERSION_1_0; + +#ifdef CL_VERSION_1_2 + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueMarkerWithWaitList(cl_command_queue command_queue, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_API_SUFFIX__VERSION_1_2; + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueBarrierWithWaitList(cl_command_queue command_queue, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_API_SUFFIX__VERSION_1_2; + +#endif + +#ifdef CL_VERSION_2_0 + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueSVMFree(cl_command_queue command_queue, + cl_uint num_svm_pointers, + void * svm_pointers[], + void (CL_CALLBACK * pfn_free_func)(cl_command_queue queue, + cl_uint num_svm_pointers, + void * svm_pointers[], + void * user_data), + void * user_data, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_API_SUFFIX__VERSION_2_0; + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueSVMMemcpy(cl_command_queue command_queue, + cl_bool blocking_copy, + void * dst_ptr, + const void * src_ptr, + size_t size, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_API_SUFFIX__VERSION_2_0; + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueSVMMemFill(cl_command_queue command_queue, + void * svm_ptr, + const void * pattern, + size_t pattern_size, + size_t size, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_API_SUFFIX__VERSION_2_0; + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueSVMMap(cl_command_queue command_queue, + cl_bool blocking_map, + cl_map_flags flags, + void * svm_ptr, + size_t size, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_API_SUFFIX__VERSION_2_0; + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueSVMUnmap(cl_command_queue command_queue, + void * svm_ptr, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_API_SUFFIX__VERSION_2_0; + +#endif + +#ifdef CL_VERSION_2_1 + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueSVMMigrateMem(cl_command_queue command_queue, + cl_uint num_svm_pointers, + const void ** svm_pointers, + const size_t * sizes, + cl_mem_migration_flags flags, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_API_SUFFIX__VERSION_2_1; + +#endif + +#ifdef CL_VERSION_1_2 + +/* Extension function access + * + * Returns the extension function address for the given function name, + * or NULL if a valid function can not be found. The client must + * check to make sure the address is not NULL, before using or + * calling the returned function address. + */ +extern CL_API_ENTRY void * CL_API_CALL +clGetExtensionFunctionAddressForPlatform(cl_platform_id platform, + const char * func_name) CL_API_SUFFIX__VERSION_1_2; + +#endif + +#ifdef CL_USE_DEPRECATED_OPENCL_1_0_APIS + /* + * WARNING: + * This API introduces mutable state into the OpenCL implementation. It has been REMOVED + * to better facilitate thread safety. The 1.0 API is not thread safe. It is not tested by the + * OpenCL 1.1 conformance test, and consequently may not work or may not work dependably. + * It is likely to be non-performant. Use of this API is not advised. Use at your own risk. + * + * Software developers previously relying on this API are instructed to set the command queue + * properties when creating the queue, instead. + */ + extern CL_API_ENTRY cl_int CL_API_CALL + clSetCommandQueueProperty(cl_command_queue command_queue, + cl_command_queue_properties properties, + cl_bool enable, + cl_command_queue_properties * old_properties) CL_EXT_SUFFIX__VERSION_1_0_DEPRECATED; +#endif /* CL_USE_DEPRECATED_OPENCL_1_0_APIS */ + +/* Deprecated OpenCL 1.1 APIs */ +extern CL_API_ENTRY CL_EXT_PREFIX__VERSION_1_1_DEPRECATED cl_mem CL_API_CALL +clCreateImage2D(cl_context context, + cl_mem_flags flags, + const cl_image_format * image_format, + size_t image_width, + size_t image_height, + size_t image_row_pitch, + void * host_ptr, + cl_int * errcode_ret) CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED; + +extern CL_API_ENTRY CL_EXT_PREFIX__VERSION_1_1_DEPRECATED cl_mem CL_API_CALL +clCreateImage3D(cl_context context, + cl_mem_flags flags, + const cl_image_format * image_format, + size_t image_width, + size_t image_height, + size_t image_depth, + size_t image_row_pitch, + size_t image_slice_pitch, + void * host_ptr, + cl_int * errcode_ret) CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED; + +extern CL_API_ENTRY CL_EXT_PREFIX__VERSION_1_1_DEPRECATED cl_int CL_API_CALL +clEnqueueMarker(cl_command_queue command_queue, + cl_event * event) CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED; + +extern CL_API_ENTRY CL_EXT_PREFIX__VERSION_1_1_DEPRECATED cl_int CL_API_CALL +clEnqueueWaitForEvents(cl_command_queue command_queue, + cl_uint num_events, + const cl_event * event_list) CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED; + +extern CL_API_ENTRY CL_EXT_PREFIX__VERSION_1_1_DEPRECATED cl_int CL_API_CALL +clEnqueueBarrier(cl_command_queue command_queue) CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED; + +extern CL_API_ENTRY CL_EXT_PREFIX__VERSION_1_1_DEPRECATED cl_int CL_API_CALL +clUnloadCompiler(void) CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED; + +extern CL_API_ENTRY CL_EXT_PREFIX__VERSION_1_1_DEPRECATED void * CL_API_CALL +clGetExtensionFunctionAddress(const char * func_name) CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED; + +/* Deprecated OpenCL 2.0 APIs */ +extern CL_API_ENTRY CL_EXT_PREFIX__VERSION_1_2_DEPRECATED cl_command_queue CL_API_CALL +clCreateCommandQueue(cl_context context, + cl_device_id device, + cl_command_queue_properties properties, + cl_int * errcode_ret) CL_EXT_SUFFIX__VERSION_1_2_DEPRECATED; + +extern CL_API_ENTRY CL_EXT_PREFIX__VERSION_1_2_DEPRECATED cl_sampler CL_API_CALL +clCreateSampler(cl_context context, + cl_bool normalized_coords, + cl_addressing_mode addressing_mode, + cl_filter_mode filter_mode, + cl_int * errcode_ret) CL_EXT_SUFFIX__VERSION_1_2_DEPRECATED; + +extern CL_API_ENTRY CL_EXT_PREFIX__VERSION_1_2_DEPRECATED cl_int CL_API_CALL +clEnqueueTask(cl_command_queue command_queue, + cl_kernel kernel, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_EXT_SUFFIX__VERSION_1_2_DEPRECATED; + +#ifdef __cplusplus +} +#endif + +#endif /* __OPENCL_CL_H */ diff --git a/vendor/angle/include/CL/cl_d3d10.h b/vendor/angle/include/CL/cl_d3d10.h new file mode 100644 index 00000000..2b80d90c --- /dev/null +++ b/vendor/angle/include/CL/cl_d3d10.h @@ -0,0 +1,128 @@ +/******************************************************************************* + * Copyright (c) 2008-2020 The Khronos Group Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ + +#ifndef __OPENCL_CL_D3D10_H +#define __OPENCL_CL_D3D10_H + +#if defined(_MSC_VER) +#if _MSC_VER >=1500 +#pragma warning( push ) +#pragma warning( disable : 4201 ) +#endif +#endif +#include +#if defined(_MSC_VER) +#if _MSC_VER >=1500 +#pragma warning( pop ) +#endif +#endif +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/****************************************************************************** + * cl_khr_d3d10_sharing */ +#define cl_khr_d3d10_sharing 1 + +typedef cl_uint cl_d3d10_device_source_khr; +typedef cl_uint cl_d3d10_device_set_khr; + +/******************************************************************************/ + +/* Error Codes */ +#define CL_INVALID_D3D10_DEVICE_KHR -1002 +#define CL_INVALID_D3D10_RESOURCE_KHR -1003 +#define CL_D3D10_RESOURCE_ALREADY_ACQUIRED_KHR -1004 +#define CL_D3D10_RESOURCE_NOT_ACQUIRED_KHR -1005 + +/* cl_d3d10_device_source_nv */ +#define CL_D3D10_DEVICE_KHR 0x4010 +#define CL_D3D10_DXGI_ADAPTER_KHR 0x4011 + +/* cl_d3d10_device_set_nv */ +#define CL_PREFERRED_DEVICES_FOR_D3D10_KHR 0x4012 +#define CL_ALL_DEVICES_FOR_D3D10_KHR 0x4013 + +/* cl_context_info */ +#define CL_CONTEXT_D3D10_DEVICE_KHR 0x4014 +#define CL_CONTEXT_D3D10_PREFER_SHARED_RESOURCES_KHR 0x402C + +/* cl_mem_info */ +#define CL_MEM_D3D10_RESOURCE_KHR 0x4015 + +/* cl_image_info */ +#define CL_IMAGE_D3D10_SUBRESOURCE_KHR 0x4016 + +/* cl_command_type */ +#define CL_COMMAND_ACQUIRE_D3D10_OBJECTS_KHR 0x4017 +#define CL_COMMAND_RELEASE_D3D10_OBJECTS_KHR 0x4018 + +/******************************************************************************/ + +typedef CL_API_ENTRY cl_int (CL_API_CALL *clGetDeviceIDsFromD3D10KHR_fn)( + cl_platform_id platform, + cl_d3d10_device_source_khr d3d_device_source, + void * d3d_object, + cl_d3d10_device_set_khr d3d_device_set, + cl_uint num_entries, + cl_device_id * devices, + cl_uint * num_devices) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_mem (CL_API_CALL *clCreateFromD3D10BufferKHR_fn)( + cl_context context, + cl_mem_flags flags, + ID3D10Buffer * resource, + cl_int * errcode_ret) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_mem (CL_API_CALL *clCreateFromD3D10Texture2DKHR_fn)( + cl_context context, + cl_mem_flags flags, + ID3D10Texture2D * resource, + UINT subresource, + cl_int * errcode_ret) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_mem (CL_API_CALL *clCreateFromD3D10Texture3DKHR_fn)( + cl_context context, + cl_mem_flags flags, + ID3D10Texture3D * resource, + UINT subresource, + cl_int * errcode_ret) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int (CL_API_CALL *clEnqueueAcquireD3D10ObjectsKHR_fn)( + cl_command_queue command_queue, + cl_uint num_objects, + const cl_mem * mem_objects, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int (CL_API_CALL *clEnqueueReleaseD3D10ObjectsKHR_fn)( + cl_command_queue command_queue, + cl_uint num_objects, + const cl_mem * mem_objects, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_API_SUFFIX__VERSION_1_0; + +#ifdef __cplusplus +} +#endif + +#endif /* __OPENCL_CL_D3D10_H */ + diff --git a/vendor/angle/include/CL/cl_d3d11.h b/vendor/angle/include/CL/cl_d3d11.h new file mode 100644 index 00000000..10023dde --- /dev/null +++ b/vendor/angle/include/CL/cl_d3d11.h @@ -0,0 +1,128 @@ +/******************************************************************************* + * Copyright (c) 2008-2020 The Khronos Group Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ + +#ifndef __OPENCL_CL_D3D11_H +#define __OPENCL_CL_D3D11_H + +#if defined(_MSC_VER) +#if _MSC_VER >=1500 +#pragma warning( push ) +#pragma warning( disable : 4201 ) +#endif +#endif +#include +#if defined(_MSC_VER) +#if _MSC_VER >=1500 +#pragma warning( pop ) +#endif +#endif +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/****************************************************************************** + * cl_khr_d3d11_sharing */ +#define cl_khr_d3d11_sharing 1 + +typedef cl_uint cl_d3d11_device_source_khr; +typedef cl_uint cl_d3d11_device_set_khr; + +/******************************************************************************/ + +/* Error Codes */ +#define CL_INVALID_D3D11_DEVICE_KHR -1006 +#define CL_INVALID_D3D11_RESOURCE_KHR -1007 +#define CL_D3D11_RESOURCE_ALREADY_ACQUIRED_KHR -1008 +#define CL_D3D11_RESOURCE_NOT_ACQUIRED_KHR -1009 + +/* cl_d3d11_device_source */ +#define CL_D3D11_DEVICE_KHR 0x4019 +#define CL_D3D11_DXGI_ADAPTER_KHR 0x401A + +/* cl_d3d11_device_set */ +#define CL_PREFERRED_DEVICES_FOR_D3D11_KHR 0x401B +#define CL_ALL_DEVICES_FOR_D3D11_KHR 0x401C + +/* cl_context_info */ +#define CL_CONTEXT_D3D11_DEVICE_KHR 0x401D +#define CL_CONTEXT_D3D11_PREFER_SHARED_RESOURCES_KHR 0x402D + +/* cl_mem_info */ +#define CL_MEM_D3D11_RESOURCE_KHR 0x401E + +/* cl_image_info */ +#define CL_IMAGE_D3D11_SUBRESOURCE_KHR 0x401F + +/* cl_command_type */ +#define CL_COMMAND_ACQUIRE_D3D11_OBJECTS_KHR 0x4020 +#define CL_COMMAND_RELEASE_D3D11_OBJECTS_KHR 0x4021 + +/******************************************************************************/ + +typedef CL_API_ENTRY cl_int (CL_API_CALL *clGetDeviceIDsFromD3D11KHR_fn)( + cl_platform_id platform, + cl_d3d11_device_source_khr d3d_device_source, + void * d3d_object, + cl_d3d11_device_set_khr d3d_device_set, + cl_uint num_entries, + cl_device_id * devices, + cl_uint * num_devices) CL_API_SUFFIX__VERSION_1_2; + +typedef CL_API_ENTRY cl_mem (CL_API_CALL *clCreateFromD3D11BufferKHR_fn)( + cl_context context, + cl_mem_flags flags, + ID3D11Buffer * resource, + cl_int * errcode_ret) CL_API_SUFFIX__VERSION_1_2; + +typedef CL_API_ENTRY cl_mem (CL_API_CALL *clCreateFromD3D11Texture2DKHR_fn)( + cl_context context, + cl_mem_flags flags, + ID3D11Texture2D * resource, + UINT subresource, + cl_int * errcode_ret) CL_API_SUFFIX__VERSION_1_2; + +typedef CL_API_ENTRY cl_mem (CL_API_CALL *clCreateFromD3D11Texture3DKHR_fn)( + cl_context context, + cl_mem_flags flags, + ID3D11Texture3D * resource, + UINT subresource, + cl_int * errcode_ret) CL_API_SUFFIX__VERSION_1_2; + +typedef CL_API_ENTRY cl_int (CL_API_CALL *clEnqueueAcquireD3D11ObjectsKHR_fn)( + cl_command_queue command_queue, + cl_uint num_objects, + const cl_mem * mem_objects, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_API_SUFFIX__VERSION_1_2; + +typedef CL_API_ENTRY cl_int (CL_API_CALL *clEnqueueReleaseD3D11ObjectsKHR_fn)( + cl_command_queue command_queue, + cl_uint num_objects, + const cl_mem * mem_objects, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_API_SUFFIX__VERSION_1_2; + +#ifdef __cplusplus +} +#endif + +#endif /* __OPENCL_CL_D3D11_H */ + diff --git a/vendor/angle/include/CL/cl_dx9_media_sharing.h b/vendor/angle/include/CL/cl_dx9_media_sharing.h new file mode 100644 index 00000000..4e64b189 --- /dev/null +++ b/vendor/angle/include/CL/cl_dx9_media_sharing.h @@ -0,0 +1,229 @@ +/******************************************************************************* + * Copyright (c) 2008-2020 The Khronos Group Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ + +#ifndef __OPENCL_CL_DX9_MEDIA_SHARING_H +#define __OPENCL_CL_DX9_MEDIA_SHARING_H + +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/******************************************************************************/ +/* cl_khr_dx9_media_sharing */ +#define cl_khr_dx9_media_sharing 1 + +typedef cl_uint cl_dx9_media_adapter_type_khr; +typedef cl_uint cl_dx9_media_adapter_set_khr; + +#if defined(_WIN32) +#include +typedef struct _cl_dx9_surface_info_khr +{ + IDirect3DSurface9 *resource; + HANDLE shared_handle; +} cl_dx9_surface_info_khr; +#endif + + +/******************************************************************************/ + +/* Error Codes */ +#define CL_INVALID_DX9_MEDIA_ADAPTER_KHR -1010 +#define CL_INVALID_DX9_MEDIA_SURFACE_KHR -1011 +#define CL_DX9_MEDIA_SURFACE_ALREADY_ACQUIRED_KHR -1012 +#define CL_DX9_MEDIA_SURFACE_NOT_ACQUIRED_KHR -1013 + +/* cl_media_adapter_type_khr */ +#define CL_ADAPTER_D3D9_KHR 0x2020 +#define CL_ADAPTER_D3D9EX_KHR 0x2021 +#define CL_ADAPTER_DXVA_KHR 0x2022 + +/* cl_media_adapter_set_khr */ +#define CL_PREFERRED_DEVICES_FOR_DX9_MEDIA_ADAPTER_KHR 0x2023 +#define CL_ALL_DEVICES_FOR_DX9_MEDIA_ADAPTER_KHR 0x2024 + +/* cl_context_info */ +#define CL_CONTEXT_ADAPTER_D3D9_KHR 0x2025 +#define CL_CONTEXT_ADAPTER_D3D9EX_KHR 0x2026 +#define CL_CONTEXT_ADAPTER_DXVA_KHR 0x2027 + +/* cl_mem_info */ +#define CL_MEM_DX9_MEDIA_ADAPTER_TYPE_KHR 0x2028 +#define CL_MEM_DX9_MEDIA_SURFACE_INFO_KHR 0x2029 + +/* cl_image_info */ +#define CL_IMAGE_DX9_MEDIA_PLANE_KHR 0x202A + +/* cl_command_type */ +#define CL_COMMAND_ACQUIRE_DX9_MEDIA_SURFACES_KHR 0x202B +#define CL_COMMAND_RELEASE_DX9_MEDIA_SURFACES_KHR 0x202C + +/******************************************************************************/ + +typedef CL_API_ENTRY cl_int (CL_API_CALL *clGetDeviceIDsFromDX9MediaAdapterKHR_fn)( + cl_platform_id platform, + cl_uint num_media_adapters, + cl_dx9_media_adapter_type_khr * media_adapter_type, + void * media_adapters, + cl_dx9_media_adapter_set_khr media_adapter_set, + cl_uint num_entries, + cl_device_id * devices, + cl_uint * num_devices) CL_API_SUFFIX__VERSION_1_2; + +typedef CL_API_ENTRY cl_mem (CL_API_CALL *clCreateFromDX9MediaSurfaceKHR_fn)( + cl_context context, + cl_mem_flags flags, + cl_dx9_media_adapter_type_khr adapter_type, + void * surface_info, + cl_uint plane, + cl_int * errcode_ret) CL_API_SUFFIX__VERSION_1_2; + +typedef CL_API_ENTRY cl_int (CL_API_CALL *clEnqueueAcquireDX9MediaSurfacesKHR_fn)( + cl_command_queue command_queue, + cl_uint num_objects, + const cl_mem * mem_objects, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_API_SUFFIX__VERSION_1_2; + +typedef CL_API_ENTRY cl_int (CL_API_CALL *clEnqueueReleaseDX9MediaSurfacesKHR_fn)( + cl_command_queue command_queue, + cl_uint num_objects, + const cl_mem * mem_objects, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_API_SUFFIX__VERSION_1_2; + +/*************************************** +* cl_intel_dx9_media_sharing extension * +****************************************/ + +#define cl_intel_dx9_media_sharing 1 + +typedef cl_uint cl_dx9_device_source_intel; +typedef cl_uint cl_dx9_device_set_intel; + +/* error codes */ +#define CL_INVALID_DX9_DEVICE_INTEL -1010 +#define CL_INVALID_DX9_RESOURCE_INTEL -1011 +#define CL_DX9_RESOURCE_ALREADY_ACQUIRED_INTEL -1012 +#define CL_DX9_RESOURCE_NOT_ACQUIRED_INTEL -1013 + +/* cl_dx9_device_source_intel */ +#define CL_D3D9_DEVICE_INTEL 0x4022 +#define CL_D3D9EX_DEVICE_INTEL 0x4070 +#define CL_DXVA_DEVICE_INTEL 0x4071 + +/* cl_dx9_device_set_intel */ +#define CL_PREFERRED_DEVICES_FOR_DX9_INTEL 0x4024 +#define CL_ALL_DEVICES_FOR_DX9_INTEL 0x4025 + +/* cl_context_info */ +#define CL_CONTEXT_D3D9_DEVICE_INTEL 0x4026 +#define CL_CONTEXT_D3D9EX_DEVICE_INTEL 0x4072 +#define CL_CONTEXT_DXVA_DEVICE_INTEL 0x4073 + +/* cl_mem_info */ +#define CL_MEM_DX9_RESOURCE_INTEL 0x4027 +#define CL_MEM_DX9_SHARED_HANDLE_INTEL 0x4074 + +/* cl_image_info */ +#define CL_IMAGE_DX9_PLANE_INTEL 0x4075 + +/* cl_command_type */ +#define CL_COMMAND_ACQUIRE_DX9_OBJECTS_INTEL 0x402A +#define CL_COMMAND_RELEASE_DX9_OBJECTS_INTEL 0x402B +/******************************************************************************/ + +extern CL_API_ENTRY cl_int CL_API_CALL +clGetDeviceIDsFromDX9INTEL( + cl_platform_id platform, + cl_dx9_device_source_intel dx9_device_source, + void* dx9_object, + cl_dx9_device_set_intel dx9_device_set, + cl_uint num_entries, + cl_device_id* devices, + cl_uint* num_devices) CL_EXT_SUFFIX__VERSION_1_1; + +typedef CL_API_ENTRY cl_int (CL_API_CALL* clGetDeviceIDsFromDX9INTEL_fn)( + cl_platform_id platform, + cl_dx9_device_source_intel dx9_device_source, + void* dx9_object, + cl_dx9_device_set_intel dx9_device_set, + cl_uint num_entries, + cl_device_id* devices, + cl_uint* num_devices) CL_EXT_SUFFIX__VERSION_1_1; + +extern CL_API_ENTRY cl_mem CL_API_CALL +clCreateFromDX9MediaSurfaceINTEL( + cl_context context, + cl_mem_flags flags, + IDirect3DSurface9* resource, + HANDLE sharedHandle, + UINT plane, + cl_int* errcode_ret) CL_EXT_SUFFIX__VERSION_1_1; + +typedef CL_API_ENTRY cl_mem (CL_API_CALL *clCreateFromDX9MediaSurfaceINTEL_fn)( + cl_context context, + cl_mem_flags flags, + IDirect3DSurface9* resource, + HANDLE sharedHandle, + UINT plane, + cl_int* errcode_ret) CL_EXT_SUFFIX__VERSION_1_1; + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueAcquireDX9ObjectsINTEL( + cl_command_queue command_queue, + cl_uint num_objects, + const cl_mem* mem_objects, + cl_uint num_events_in_wait_list, + const cl_event* event_wait_list, + cl_event* event) CL_EXT_SUFFIX__VERSION_1_1; + +typedef CL_API_ENTRY cl_int (CL_API_CALL *clEnqueueAcquireDX9ObjectsINTEL_fn)( + cl_command_queue command_queue, + cl_uint num_objects, + const cl_mem* mem_objects, + cl_uint num_events_in_wait_list, + const cl_event* event_wait_list, + cl_event* event) CL_EXT_SUFFIX__VERSION_1_1; + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueReleaseDX9ObjectsINTEL( + cl_command_queue command_queue, + cl_uint num_objects, + cl_mem* mem_objects, + cl_uint num_events_in_wait_list, + const cl_event* event_wait_list, + cl_event* event) CL_EXT_SUFFIX__VERSION_1_1; + +typedef CL_API_ENTRY cl_int (CL_API_CALL *clEnqueueReleaseDX9ObjectsINTEL_fn)( + cl_command_queue command_queue, + cl_uint num_objects, + cl_mem* mem_objects, + cl_uint num_events_in_wait_list, + const cl_event* event_wait_list, + cl_event* event) CL_EXT_SUFFIX__VERSION_1_1; + +#ifdef __cplusplus +} +#endif + +#endif /* __OPENCL_CL_DX9_MEDIA_SHARING_H */ + diff --git a/vendor/angle/include/CL/cl_dx9_media_sharing_intel.h b/vendor/angle/include/CL/cl_dx9_media_sharing_intel.h new file mode 100644 index 00000000..f6518d7f --- /dev/null +++ b/vendor/angle/include/CL/cl_dx9_media_sharing_intel.h @@ -0,0 +1,18 @@ +/******************************************************************************* + * Copyright (c) 2008-2020 The Khronos Group Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ + +#include +#pragma message("The Intel DX9 media sharing extensions have been moved into cl_dx9_media_sharing.h. Please include cl_dx9_media_sharing.h directly.") diff --git a/vendor/angle/include/CL/cl_egl.h b/vendor/angle/include/CL/cl_egl.h new file mode 100644 index 00000000..c8bde80e --- /dev/null +++ b/vendor/angle/include/CL/cl_egl.h @@ -0,0 +1,120 @@ +/******************************************************************************* + * Copyright (c) 2008-2020 The Khronos Group Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ + +#ifndef __OPENCL_CL_EGL_H +#define __OPENCL_CL_EGL_H + +#include + +#ifdef __cplusplus +extern "C" { +#endif + + +/* Command type for events created with clEnqueueAcquireEGLObjectsKHR */ +#define CL_COMMAND_EGL_FENCE_SYNC_OBJECT_KHR 0x202F +#define CL_COMMAND_ACQUIRE_EGL_OBJECTS_KHR 0x202D +#define CL_COMMAND_RELEASE_EGL_OBJECTS_KHR 0x202E + +/* Error type for clCreateFromEGLImageKHR */ +#define CL_INVALID_EGL_OBJECT_KHR -1093 +#define CL_EGL_RESOURCE_NOT_ACQUIRED_KHR -1092 + +/* CLeglImageKHR is an opaque handle to an EGLImage */ +typedef void* CLeglImageKHR; + +/* CLeglDisplayKHR is an opaque handle to an EGLDisplay */ +typedef void* CLeglDisplayKHR; + +/* CLeglSyncKHR is an opaque handle to an EGLSync object */ +typedef void* CLeglSyncKHR; + +/* properties passed to clCreateFromEGLImageKHR */ +typedef intptr_t cl_egl_image_properties_khr; + + +#define cl_khr_egl_image 1 + +extern CL_API_ENTRY cl_mem CL_API_CALL +clCreateFromEGLImageKHR(cl_context context, + CLeglDisplayKHR egldisplay, + CLeglImageKHR eglimage, + cl_mem_flags flags, + const cl_egl_image_properties_khr * properties, + cl_int * errcode_ret) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_mem (CL_API_CALL *clCreateFromEGLImageKHR_fn)( + cl_context context, + CLeglDisplayKHR egldisplay, + CLeglImageKHR eglimage, + cl_mem_flags flags, + const cl_egl_image_properties_khr * properties, + cl_int * errcode_ret); + + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueAcquireEGLObjectsKHR(cl_command_queue command_queue, + cl_uint num_objects, + const cl_mem * mem_objects, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int (CL_API_CALL *clEnqueueAcquireEGLObjectsKHR_fn)( + cl_command_queue command_queue, + cl_uint num_objects, + const cl_mem * mem_objects, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event); + + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueReleaseEGLObjectsKHR(cl_command_queue command_queue, + cl_uint num_objects, + const cl_mem * mem_objects, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int (CL_API_CALL *clEnqueueReleaseEGLObjectsKHR_fn)( + cl_command_queue command_queue, + cl_uint num_objects, + const cl_mem * mem_objects, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event); + + +#define cl_khr_egl_event 1 + +extern CL_API_ENTRY cl_event CL_API_CALL +clCreateEventFromEGLSyncKHR(cl_context context, + CLeglSyncKHR sync, + CLeglDisplayKHR display, + cl_int * errcode_ret) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_event (CL_API_CALL *clCreateEventFromEGLSyncKHR_fn)( + cl_context context, + CLeglSyncKHR sync, + CLeglDisplayKHR display, + cl_int * errcode_ret); + +#ifdef __cplusplus +} +#endif + +#endif /* __OPENCL_CL_EGL_H */ diff --git a/vendor/angle/include/CL/cl_ext.h b/vendor/angle/include/CL/cl_ext.h new file mode 100644 index 00000000..a1087e12 --- /dev/null +++ b/vendor/angle/include/CL/cl_ext.h @@ -0,0 +1,1626 @@ +/******************************************************************************* + * Copyright (c) 2008-2020 The Khronos Group Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ + +/* cl_ext.h contains OpenCL extensions which don't have external */ +/* (OpenGL, D3D) dependencies. */ + +#ifndef __CL_EXT_H +#define __CL_EXT_H + +#ifdef __cplusplus +extern "C" { +#endif + +#include + +/* cl_khr_fp64 extension - no extension #define since it has no functions */ +/* CL_DEVICE_DOUBLE_FP_CONFIG is defined in CL.h for OpenCL >= 120 */ + +#if CL_TARGET_OPENCL_VERSION <= 110 +#define CL_DEVICE_DOUBLE_FP_CONFIG 0x1032 +#endif + +/* cl_khr_fp16 extension - no extension #define since it has no functions */ +#define CL_DEVICE_HALF_FP_CONFIG 0x1033 + +/* Memory object destruction + * + * Apple extension for use to manage externally allocated buffers used with cl_mem objects with CL_MEM_USE_HOST_PTR + * + * Registers a user callback function that will be called when the memory object is deleted and its resources + * freed. Each call to clSetMemObjectCallbackFn registers the specified user callback function on a callback + * stack associated with memobj. The registered user callback functions are called in the reverse order in + * which they were registered. The user callback functions are called and then the memory object is deleted + * and its resources freed. This provides a mechanism for the application (and libraries) using memobj to be + * notified when the memory referenced by host_ptr, specified when the memory object is created and used as + * the storage bits for the memory object, can be reused or freed. + * + * The application may not call CL api's with the cl_mem object passed to the pfn_notify. + * + * Please check for the "cl_APPLE_SetMemObjectDestructor" extension using clGetDeviceInfo(CL_DEVICE_EXTENSIONS) + * before using. + */ +#define cl_APPLE_SetMemObjectDestructor 1 +cl_int CL_API_ENTRY clSetMemObjectDestructorAPPLE( cl_mem memobj, + void (* pfn_notify)(cl_mem memobj, void * user_data), + void * user_data) CL_EXT_SUFFIX__VERSION_1_0; + + +/* Context Logging Functions + * + * The next three convenience functions are intended to be used as the pfn_notify parameter to clCreateContext(). + * Please check for the "cl_APPLE_ContextLoggingFunctions" extension using clGetDeviceInfo(CL_DEVICE_EXTENSIONS) + * before using. + * + * clLogMessagesToSystemLog forwards on all log messages to the Apple System Logger + */ +#define cl_APPLE_ContextLoggingFunctions 1 +extern void CL_API_ENTRY clLogMessagesToSystemLogAPPLE( const char * errstr, + const void * private_info, + size_t cb, + void * user_data) CL_EXT_SUFFIX__VERSION_1_0; + +/* clLogMessagesToStdout sends all log messages to the file descriptor stdout */ +extern void CL_API_ENTRY clLogMessagesToStdoutAPPLE( const char * errstr, + const void * private_info, + size_t cb, + void * user_data) CL_EXT_SUFFIX__VERSION_1_0; + +/* clLogMessagesToStderr sends all log messages to the file descriptor stderr */ +extern void CL_API_ENTRY clLogMessagesToStderrAPPLE( const char * errstr, + const void * private_info, + size_t cb, + void * user_data) CL_EXT_SUFFIX__VERSION_1_0; + + +/************************ +* cl_khr_icd extension * +************************/ +#define cl_khr_icd 1 + +/* cl_platform_info */ +#define CL_PLATFORM_ICD_SUFFIX_KHR 0x0920 + +/* Additional Error Codes */ +#define CL_PLATFORM_NOT_FOUND_KHR -1001 + +extern CL_API_ENTRY cl_int CL_API_CALL +clIcdGetPlatformIDsKHR(cl_uint num_entries, + cl_platform_id * platforms, + cl_uint * num_platforms); + +typedef CL_API_ENTRY cl_int +(CL_API_CALL *clIcdGetPlatformIDsKHR_fn)(cl_uint num_entries, + cl_platform_id * platforms, + cl_uint * num_platforms); + + +/******************************* + * cl_khr_il_program extension * + *******************************/ +#define cl_khr_il_program 1 + +/* New property to clGetDeviceInfo for retrieving supported intermediate + * languages + */ +#define CL_DEVICE_IL_VERSION_KHR 0x105B + +/* New property to clGetProgramInfo for retrieving for retrieving the IL of a + * program + */ +#define CL_PROGRAM_IL_KHR 0x1169 + +extern CL_API_ENTRY cl_program CL_API_CALL +clCreateProgramWithILKHR(cl_context context, + const void * il, + size_t length, + cl_int * errcode_ret); + +typedef CL_API_ENTRY cl_program +(CL_API_CALL *clCreateProgramWithILKHR_fn)(cl_context context, + const void * il, + size_t length, + cl_int * errcode_ret) CL_EXT_SUFFIX__VERSION_1_2; + +/* Extension: cl_khr_image2d_from_buffer + * + * This extension allows a 2D image to be created from a cl_mem buffer without + * a copy. The type associated with a 2D image created from a buffer in an + * OpenCL program is image2d_t. Both the sampler and sampler-less read_image + * built-in functions are supported for 2D images and 2D images created from + * a buffer. Similarly, the write_image built-ins are also supported for 2D + * images created from a buffer. + * + * When the 2D image from buffer is created, the client must specify the + * width, height, image format (i.e. channel order and channel data type) + * and optionally the row pitch. + * + * The pitch specified must be a multiple of + * CL_DEVICE_IMAGE_PITCH_ALIGNMENT_KHR pixels. + * The base address of the buffer must be aligned to + * CL_DEVICE_IMAGE_BASE_ADDRESS_ALIGNMENT_KHR pixels. + */ + +#define CL_DEVICE_IMAGE_PITCH_ALIGNMENT_KHR 0x104A +#define CL_DEVICE_IMAGE_BASE_ADDRESS_ALIGNMENT_KHR 0x104B + + +/************************************** + * cl_khr_initialize_memory extension * + **************************************/ + +#define CL_CONTEXT_MEMORY_INITIALIZE_KHR 0x2030 + + +/************************************** + * cl_khr_terminate_context extension * + **************************************/ + +#define CL_CONTEXT_TERMINATED_KHR -1121 + +#define CL_DEVICE_TERMINATE_CAPABILITY_KHR 0x2031 +#define CL_CONTEXT_TERMINATE_KHR 0x2032 + +#define cl_khr_terminate_context 1 +extern CL_API_ENTRY cl_int CL_API_CALL +clTerminateContextKHR(cl_context context) CL_EXT_SUFFIX__VERSION_1_2; + +typedef CL_API_ENTRY cl_int +(CL_API_CALL *clTerminateContextKHR_fn)(cl_context context) CL_EXT_SUFFIX__VERSION_1_2; + + +/* + * Extension: cl_khr_spir + * + * This extension adds support to create an OpenCL program object from a + * Standard Portable Intermediate Representation (SPIR) instance + */ + +#define CL_DEVICE_SPIR_VERSIONS 0x40E0 +#define CL_PROGRAM_BINARY_TYPE_INTERMEDIATE 0x40E1 + + +/***************************************** + * cl_khr_create_command_queue extension * + *****************************************/ +#define cl_khr_create_command_queue 1 + +typedef cl_properties cl_queue_properties_khr; + +extern CL_API_ENTRY cl_command_queue CL_API_CALL +clCreateCommandQueueWithPropertiesKHR(cl_context context, + cl_device_id device, + const cl_queue_properties_khr* properties, + cl_int* errcode_ret) CL_EXT_SUFFIX__VERSION_1_2; + +typedef CL_API_ENTRY cl_command_queue +(CL_API_CALL *clCreateCommandQueueWithPropertiesKHR_fn)(cl_context context, + cl_device_id device, + const cl_queue_properties_khr* properties, + cl_int* errcode_ret) CL_EXT_SUFFIX__VERSION_1_2; + + +/****************************************** +* cl_nv_device_attribute_query extension * +******************************************/ + +/* cl_nv_device_attribute_query extension - no extension #define since it has no functions */ +#define CL_DEVICE_COMPUTE_CAPABILITY_MAJOR_NV 0x4000 +#define CL_DEVICE_COMPUTE_CAPABILITY_MINOR_NV 0x4001 +#define CL_DEVICE_REGISTERS_PER_BLOCK_NV 0x4002 +#define CL_DEVICE_WARP_SIZE_NV 0x4003 +#define CL_DEVICE_GPU_OVERLAP_NV 0x4004 +#define CL_DEVICE_KERNEL_EXEC_TIMEOUT_NV 0x4005 +#define CL_DEVICE_INTEGRATED_MEMORY_NV 0x4006 + + +/********************************* +* cl_amd_device_attribute_query * +*********************************/ + +#define CL_DEVICE_PROFILING_TIMER_OFFSET_AMD 0x4036 +#define CL_DEVICE_TOPOLOGY_AMD 0x4037 +#define CL_DEVICE_BOARD_NAME_AMD 0x4038 +#define CL_DEVICE_GLOBAL_FREE_MEMORY_AMD 0x4039 +#define CL_DEVICE_SIMD_PER_COMPUTE_UNIT_AMD 0x4040 +#define CL_DEVICE_SIMD_WIDTH_AMD 0x4041 +#define CL_DEVICE_SIMD_INSTRUCTION_WIDTH_AMD 0x4042 +#define CL_DEVICE_WAVEFRONT_WIDTH_AMD 0x4043 +#define CL_DEVICE_GLOBAL_MEM_CHANNELS_AMD 0x4044 +#define CL_DEVICE_GLOBAL_MEM_CHANNEL_BANKS_AMD 0x4045 +#define CL_DEVICE_GLOBAL_MEM_CHANNEL_BANK_WIDTH_AMD 0x4046 +#define CL_DEVICE_LOCAL_MEM_SIZE_PER_COMPUTE_UNIT_AMD 0x4047 +#define CL_DEVICE_LOCAL_MEM_BANKS_AMD 0x4048 +#define CL_DEVICE_THREAD_TRACE_SUPPORTED_AMD 0x4049 +#define CL_DEVICE_GFXIP_MAJOR_AMD 0x404A +#define CL_DEVICE_GFXIP_MINOR_AMD 0x404B +#define CL_DEVICE_AVAILABLE_ASYNC_QUEUES_AMD 0x404C +#define CL_DEVICE_PREFERRED_WORK_GROUP_SIZE_AMD 0x4030 +#define CL_DEVICE_MAX_WORK_GROUP_SIZE_AMD 0x4031 +#define CL_DEVICE_PREFERRED_CONSTANT_BUFFER_SIZE_AMD 0x4033 +#define CL_DEVICE_PCIE_ID_AMD 0x4034 + + +/********************************* +* cl_arm_printf extension +*********************************/ + +#define CL_PRINTF_CALLBACK_ARM 0x40B0 +#define CL_PRINTF_BUFFERSIZE_ARM 0x40B1 + + +/*********************************** +* cl_ext_device_fission extension +***********************************/ +#define cl_ext_device_fission 1 + +extern CL_API_ENTRY cl_int CL_API_CALL +clReleaseDeviceEXT(cl_device_id device) CL_EXT_SUFFIX__VERSION_1_1; + +typedef CL_API_ENTRY cl_int +(CL_API_CALL *clReleaseDeviceEXT_fn)(cl_device_id device) CL_EXT_SUFFIX__VERSION_1_1; + +extern CL_API_ENTRY cl_int CL_API_CALL +clRetainDeviceEXT(cl_device_id device) CL_EXT_SUFFIX__VERSION_1_1; + +typedef CL_API_ENTRY cl_int +(CL_API_CALL *clRetainDeviceEXT_fn)(cl_device_id device) CL_EXT_SUFFIX__VERSION_1_1; + +typedef cl_ulong cl_device_partition_property_ext; +extern CL_API_ENTRY cl_int CL_API_CALL +clCreateSubDevicesEXT(cl_device_id in_device, + const cl_device_partition_property_ext * properties, + cl_uint num_entries, + cl_device_id * out_devices, + cl_uint * num_devices) CL_EXT_SUFFIX__VERSION_1_1; + +typedef CL_API_ENTRY cl_int +(CL_API_CALL * clCreateSubDevicesEXT_fn)(cl_device_id in_device, + const cl_device_partition_property_ext * properties, + cl_uint num_entries, + cl_device_id * out_devices, + cl_uint * num_devices) CL_EXT_SUFFIX__VERSION_1_1; + +/* cl_device_partition_property_ext */ +#define CL_DEVICE_PARTITION_EQUALLY_EXT 0x4050 +#define CL_DEVICE_PARTITION_BY_COUNTS_EXT 0x4051 +#define CL_DEVICE_PARTITION_BY_NAMES_EXT 0x4052 +#define CL_DEVICE_PARTITION_BY_AFFINITY_DOMAIN_EXT 0x4053 + +/* clDeviceGetInfo selectors */ +#define CL_DEVICE_PARENT_DEVICE_EXT 0x4054 +#define CL_DEVICE_PARTITION_TYPES_EXT 0x4055 +#define CL_DEVICE_AFFINITY_DOMAINS_EXT 0x4056 +#define CL_DEVICE_REFERENCE_COUNT_EXT 0x4057 +#define CL_DEVICE_PARTITION_STYLE_EXT 0x4058 + +/* error codes */ +#define CL_DEVICE_PARTITION_FAILED_EXT -1057 +#define CL_INVALID_PARTITION_COUNT_EXT -1058 +#define CL_INVALID_PARTITION_NAME_EXT -1059 + +/* CL_AFFINITY_DOMAINs */ +#define CL_AFFINITY_DOMAIN_L1_CACHE_EXT 0x1 +#define CL_AFFINITY_DOMAIN_L2_CACHE_EXT 0x2 +#define CL_AFFINITY_DOMAIN_L3_CACHE_EXT 0x3 +#define CL_AFFINITY_DOMAIN_L4_CACHE_EXT 0x4 +#define CL_AFFINITY_DOMAIN_NUMA_EXT 0x10 +#define CL_AFFINITY_DOMAIN_NEXT_FISSIONABLE_EXT 0x100 + +/* cl_device_partition_property_ext list terminators */ +#define CL_PROPERTIES_LIST_END_EXT ((cl_device_partition_property_ext) 0) +#define CL_PARTITION_BY_COUNTS_LIST_END_EXT ((cl_device_partition_property_ext) 0) +#define CL_PARTITION_BY_NAMES_LIST_END_EXT ((cl_device_partition_property_ext) 0 - 1) + + +/*********************************** + * cl_ext_migrate_memobject extension definitions + ***********************************/ +#define cl_ext_migrate_memobject 1 + +typedef cl_bitfield cl_mem_migration_flags_ext; + +#define CL_MIGRATE_MEM_OBJECT_HOST_EXT 0x1 + +#define CL_COMMAND_MIGRATE_MEM_OBJECT_EXT 0x4040 + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueMigrateMemObjectEXT(cl_command_queue command_queue, + cl_uint num_mem_objects, + const cl_mem * mem_objects, + cl_mem_migration_flags_ext flags, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event); + +typedef CL_API_ENTRY cl_int +(CL_API_CALL *clEnqueueMigrateMemObjectEXT_fn)(cl_command_queue command_queue, + cl_uint num_mem_objects, + const cl_mem * mem_objects, + cl_mem_migration_flags_ext flags, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event); + + +/********************************* +* cl_ext_cxx_for_opencl extension +*********************************/ +#define cl_ext_cxx_for_opencl 1 + +#define CL_DEVICE_CXX_FOR_OPENCL_NUMERIC_VERSION_EXT 0x4230 + +/********************************* +* cl_qcom_ext_host_ptr extension +*********************************/ +#define cl_qcom_ext_host_ptr 1 + +#define CL_MEM_EXT_HOST_PTR_QCOM (1 << 29) + +#define CL_DEVICE_EXT_MEM_PADDING_IN_BYTES_QCOM 0x40A0 +#define CL_DEVICE_PAGE_SIZE_QCOM 0x40A1 +#define CL_IMAGE_ROW_ALIGNMENT_QCOM 0x40A2 +#define CL_IMAGE_SLICE_ALIGNMENT_QCOM 0x40A3 +#define CL_MEM_HOST_UNCACHED_QCOM 0x40A4 +#define CL_MEM_HOST_WRITEBACK_QCOM 0x40A5 +#define CL_MEM_HOST_WRITETHROUGH_QCOM 0x40A6 +#define CL_MEM_HOST_WRITE_COMBINING_QCOM 0x40A7 + +typedef cl_uint cl_image_pitch_info_qcom; + +extern CL_API_ENTRY cl_int CL_API_CALL +clGetDeviceImageInfoQCOM(cl_device_id device, + size_t image_width, + size_t image_height, + const cl_image_format *image_format, + cl_image_pitch_info_qcom param_name, + size_t param_value_size, + void *param_value, + size_t *param_value_size_ret); + +typedef struct _cl_mem_ext_host_ptr +{ + /* Type of external memory allocation. */ + /* Legal values will be defined in layered extensions. */ + cl_uint allocation_type; + + /* Host cache policy for this external memory allocation. */ + cl_uint host_cache_policy; + +} cl_mem_ext_host_ptr; + + +/******************************************* +* cl_qcom_ext_host_ptr_iocoherent extension +********************************************/ + +/* Cache policy specifying io-coherence */ +#define CL_MEM_HOST_IOCOHERENT_QCOM 0x40A9 + + +/********************************* +* cl_qcom_ion_host_ptr extension +*********************************/ + +#define CL_MEM_ION_HOST_PTR_QCOM 0x40A8 + +typedef struct _cl_mem_ion_host_ptr +{ + /* Type of external memory allocation. */ + /* Must be CL_MEM_ION_HOST_PTR_QCOM for ION allocations. */ + cl_mem_ext_host_ptr ext_host_ptr; + + /* ION file descriptor */ + int ion_filedesc; + + /* Host pointer to the ION allocated memory */ + void* ion_hostptr; + +} cl_mem_ion_host_ptr; + + +/********************************* +* cl_qcom_android_native_buffer_host_ptr extension +*********************************/ + +#define CL_MEM_ANDROID_NATIVE_BUFFER_HOST_PTR_QCOM 0x40C6 + +typedef struct _cl_mem_android_native_buffer_host_ptr +{ + /* Type of external memory allocation. */ + /* Must be CL_MEM_ANDROID_NATIVE_BUFFER_HOST_PTR_QCOM for Android native buffers. */ + cl_mem_ext_host_ptr ext_host_ptr; + + /* Virtual pointer to the android native buffer */ + void* anb_ptr; + +} cl_mem_android_native_buffer_host_ptr; + + +/****************************************** + * cl_img_yuv_image extension * + ******************************************/ + +/* Image formats used in clCreateImage */ +#define CL_NV21_IMG 0x40D0 +#define CL_YV12_IMG 0x40D1 + + +/****************************************** + * cl_img_cached_allocations extension * + ******************************************/ + +/* Flag values used by clCreateBuffer */ +#define CL_MEM_USE_UNCACHED_CPU_MEMORY_IMG (1 << 26) +#define CL_MEM_USE_CACHED_CPU_MEMORY_IMG (1 << 27) + + +/****************************************** + * cl_img_use_gralloc_ptr extension * + ******************************************/ +#define cl_img_use_gralloc_ptr 1 + +/* Flag values used by clCreateBuffer */ +#define CL_MEM_USE_GRALLOC_PTR_IMG (1 << 28) + +/* To be used by clGetEventInfo: */ +#define CL_COMMAND_ACQUIRE_GRALLOC_OBJECTS_IMG 0x40D2 +#define CL_COMMAND_RELEASE_GRALLOC_OBJECTS_IMG 0x40D3 + +/* Error codes from clEnqueueAcquireGrallocObjectsIMG and clEnqueueReleaseGrallocObjectsIMG */ +#define CL_GRALLOC_RESOURCE_NOT_ACQUIRED_IMG 0x40D4 +#define CL_INVALID_GRALLOC_OBJECT_IMG 0x40D5 + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueAcquireGrallocObjectsIMG(cl_command_queue command_queue, + cl_uint num_objects, + const cl_mem * mem_objects, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_EXT_SUFFIX__VERSION_1_2; + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueReleaseGrallocObjectsIMG(cl_command_queue command_queue, + cl_uint num_objects, + const cl_mem * mem_objects, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_EXT_SUFFIX__VERSION_1_2; + +/****************************************** + * cl_img_generate_mipmap extension * + ******************************************/ +#define cl_img_generate_mipmap 1 + +typedef cl_uint cl_mipmap_filter_mode_img; + +/* To be used by clEnqueueGenerateMipmapIMG */ +#define CL_MIPMAP_FILTER_ANY_IMG 0x0 +#define CL_MIPMAP_FILTER_BOX_IMG 0x1 + +/* To be used by clGetEventInfo */ +#define CL_COMMAND_GENERATE_MIPMAP_IMG 0x40D6 + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueGenerateMipmapIMG(cl_command_queue command_queue, + cl_mem src_image, + cl_mem dst_image, + cl_mipmap_filter_mode_img mipmap_filter_mode, + const size_t *array_region, + const size_t *mip_region, + cl_uint num_events_in_wait_list, + const cl_event *event_wait_list, + cl_event *event) CL_EXT_SUFFIX__VERSION_1_2; + +/****************************************** + * cl_img_mem_properties extension * + ******************************************/ +#define cl_img_mem_properties 1 + +/* To be used by clCreateBufferWithProperties */ +#define CL_MEM_ALLOC_FLAGS_IMG 0x40D7 + +/* To be used wiith the CL_MEM_ALLOC_FLAGS_IMG property */ +typedef cl_bitfield cl_mem_alloc_flags_img; + +/* To be used with cl_mem_alloc_flags_img */ +#define CL_MEM_ALLOC_RELAX_REQUIREMENTS_IMG (1 << 0) + +/********************************* +* cl_khr_subgroups extension +*********************************/ +#define cl_khr_subgroups 1 + +#if !defined(CL_VERSION_2_1) +/* For OpenCL 2.1 and newer, cl_kernel_sub_group_info is declared in CL.h. + In hindsight, there should have been a khr suffix on this type for + the extension, but keeping it un-suffixed to maintain backwards + compatibility. */ +typedef cl_uint cl_kernel_sub_group_info; +#endif + +/* cl_kernel_sub_group_info */ +#define CL_KERNEL_MAX_SUB_GROUP_SIZE_FOR_NDRANGE_KHR 0x2033 +#define CL_KERNEL_SUB_GROUP_COUNT_FOR_NDRANGE_KHR 0x2034 + +extern CL_API_ENTRY cl_int CL_API_CALL +clGetKernelSubGroupInfoKHR(cl_kernel in_kernel, + cl_device_id in_device, + cl_kernel_sub_group_info param_name, + size_t input_value_size, + const void * input_value, + size_t param_value_size, + void * param_value, + size_t * param_value_size_ret) CL_EXT_SUFFIX__VERSION_2_0_DEPRECATED; + +typedef CL_API_ENTRY cl_int +(CL_API_CALL * clGetKernelSubGroupInfoKHR_fn)(cl_kernel in_kernel, + cl_device_id in_device, + cl_kernel_sub_group_info param_name, + size_t input_value_size, + const void * input_value, + size_t param_value_size, + void * param_value, + size_t * param_value_size_ret) CL_EXT_SUFFIX__VERSION_2_0_DEPRECATED; + + +/********************************* +* cl_khr_mipmap_image extension +*********************************/ + +/* cl_sampler_properties */ +#define CL_SAMPLER_MIP_FILTER_MODE_KHR 0x1155 +#define CL_SAMPLER_LOD_MIN_KHR 0x1156 +#define CL_SAMPLER_LOD_MAX_KHR 0x1157 + + +/********************************* +* cl_khr_priority_hints extension +*********************************/ +/* This extension define is for backwards compatibility. + It shouldn't be required since this extension has no new functions. */ +#define cl_khr_priority_hints 1 + +typedef cl_uint cl_queue_priority_khr; + +/* cl_command_queue_properties */ +#define CL_QUEUE_PRIORITY_KHR 0x1096 + +/* cl_queue_priority_khr */ +#define CL_QUEUE_PRIORITY_HIGH_KHR (1<<0) +#define CL_QUEUE_PRIORITY_MED_KHR (1<<1) +#define CL_QUEUE_PRIORITY_LOW_KHR (1<<2) + + +/********************************* +* cl_khr_throttle_hints extension +*********************************/ +/* This extension define is for backwards compatibility. + It shouldn't be required since this extension has no new functions. */ +#define cl_khr_throttle_hints 1 + +typedef cl_uint cl_queue_throttle_khr; + +/* cl_command_queue_properties */ +#define CL_QUEUE_THROTTLE_KHR 0x1097 + +/* cl_queue_throttle_khr */ +#define CL_QUEUE_THROTTLE_HIGH_KHR (1<<0) +#define CL_QUEUE_THROTTLE_MED_KHR (1<<1) +#define CL_QUEUE_THROTTLE_LOW_KHR (1<<2) + + +/********************************* +* cl_khr_subgroup_named_barrier +*********************************/ +/* This extension define is for backwards compatibility. + It shouldn't be required since this extension has no new functions. */ +#define cl_khr_subgroup_named_barrier 1 + +/* cl_device_info */ +#define CL_DEVICE_MAX_NAMED_BARRIER_COUNT_KHR 0x2035 + + +/********************************* +* cl_khr_extended_versioning +*********************************/ + +#define cl_khr_extended_versioning 1 + +#define CL_VERSION_MAJOR_BITS_KHR (10) +#define CL_VERSION_MINOR_BITS_KHR (10) +#define CL_VERSION_PATCH_BITS_KHR (12) + +#define CL_VERSION_MAJOR_MASK_KHR ((1 << CL_VERSION_MAJOR_BITS_KHR) - 1) +#define CL_VERSION_MINOR_MASK_KHR ((1 << CL_VERSION_MINOR_BITS_KHR) - 1) +#define CL_VERSION_PATCH_MASK_KHR ((1 << CL_VERSION_PATCH_BITS_KHR) - 1) + +#define CL_VERSION_MAJOR_KHR(version) ((version) >> (CL_VERSION_MINOR_BITS_KHR + CL_VERSION_PATCH_BITS_KHR)) +#define CL_VERSION_MINOR_KHR(version) (((version) >> CL_VERSION_PATCH_BITS_KHR) & CL_VERSION_MINOR_MASK_KHR) +#define CL_VERSION_PATCH_KHR(version) ((version) & CL_VERSION_PATCH_MASK_KHR) + +#define CL_MAKE_VERSION_KHR(major, minor, patch) \ + ((((major) & CL_VERSION_MAJOR_MASK_KHR) << (CL_VERSION_MINOR_BITS_KHR + CL_VERSION_PATCH_BITS_KHR)) | \ + (((minor) & CL_VERSION_MINOR_MASK_KHR) << CL_VERSION_PATCH_BITS_KHR) | \ + ((patch) & CL_VERSION_PATCH_MASK_KHR)) + +typedef cl_uint cl_version_khr; + +#define CL_NAME_VERSION_MAX_NAME_SIZE_KHR 64 + +typedef struct _cl_name_version_khr +{ + cl_version_khr version; + char name[CL_NAME_VERSION_MAX_NAME_SIZE_KHR]; +} cl_name_version_khr; + +/* cl_platform_info */ +#define CL_PLATFORM_NUMERIC_VERSION_KHR 0x0906 +#define CL_PLATFORM_EXTENSIONS_WITH_VERSION_KHR 0x0907 + +/* cl_device_info */ +#define CL_DEVICE_NUMERIC_VERSION_KHR 0x105E +#define CL_DEVICE_OPENCL_C_NUMERIC_VERSION_KHR 0x105F +#define CL_DEVICE_EXTENSIONS_WITH_VERSION_KHR 0x1060 +#define CL_DEVICE_ILS_WITH_VERSION_KHR 0x1061 +#define CL_DEVICE_BUILT_IN_KERNELS_WITH_VERSION_KHR 0x1062 + + +/********************************* +* cl_khr_device_uuid extension +*********************************/ +#define cl_khr_device_uuid 1 + +#define CL_UUID_SIZE_KHR 16 +#define CL_LUID_SIZE_KHR 8 + +#define CL_DEVICE_UUID_KHR 0x106A +#define CL_DRIVER_UUID_KHR 0x106B +#define CL_DEVICE_LUID_VALID_KHR 0x106C +#define CL_DEVICE_LUID_KHR 0x106D +#define CL_DEVICE_NODE_MASK_KHR 0x106E + + +/********************************** + * cl_arm_import_memory extension * + **********************************/ +#define cl_arm_import_memory 1 + +typedef intptr_t cl_import_properties_arm; + +/* Default and valid proporties name for cl_arm_import_memory */ +#define CL_IMPORT_TYPE_ARM 0x40B2 + +/* Host process memory type default value for CL_IMPORT_TYPE_ARM property */ +#define CL_IMPORT_TYPE_HOST_ARM 0x40B3 + +/* DMA BUF memory type value for CL_IMPORT_TYPE_ARM property */ +#define CL_IMPORT_TYPE_DMA_BUF_ARM 0x40B4 + +/* Protected memory property */ +#define CL_IMPORT_TYPE_PROTECTED_ARM 0x40B5 + +/* Android hardware buffer type value for CL_IMPORT_TYPE_ARM property */ +#define CL_IMPORT_TYPE_ANDROID_HARDWARE_BUFFER_ARM 0x41E2 + +/* Data consistency with host property */ +#define CL_IMPORT_DMA_BUF_DATA_CONSISTENCY_WITH_HOST_ARM 0x41E3 + +/* Index of plane in a multiplanar hardware buffer */ +#define CL_IMPORT_ANDROID_HARDWARE_BUFFER_PLANE_INDEX_ARM 0x41EF + +/* Index of layer in a multilayer hardware buffer */ +#define CL_IMPORT_ANDROID_HARDWARE_BUFFER_LAYER_INDEX_ARM 0x41F0 + +/* Import memory size value to indicate a size for the whole buffer */ +#define CL_IMPORT_MEMORY_WHOLE_ALLOCATION_ARM SIZE_MAX + +/* This extension adds a new function that allows for direct memory import into + * OpenCL via the clImportMemoryARM function. + * + * Memory imported through this interface will be mapped into the device's page + * tables directly, providing zero copy access. It will never fall back to copy + * operations and aliased buffers. + * + * Types of memory supported for import are specified as additional extension + * strings. + * + * This extension produces cl_mem allocations which are compatible with all other + * users of cl_mem in the standard API. + * + * This extension maps pages with the same properties as the normal buffer creation + * function clCreateBuffer. + */ +extern CL_API_ENTRY cl_mem CL_API_CALL +clImportMemoryARM( cl_context context, + cl_mem_flags flags, + const cl_import_properties_arm *properties, + void *memory, + size_t size, + cl_int *errcode_ret) CL_EXT_SUFFIX__VERSION_1_0; + + +/****************************************** + * cl_arm_shared_virtual_memory extension * + ******************************************/ +#define cl_arm_shared_virtual_memory 1 + +/* Used by clGetDeviceInfo */ +#define CL_DEVICE_SVM_CAPABILITIES_ARM 0x40B6 + +/* Used by clGetMemObjectInfo */ +#define CL_MEM_USES_SVM_POINTER_ARM 0x40B7 + +/* Used by clSetKernelExecInfoARM: */ +#define CL_KERNEL_EXEC_INFO_SVM_PTRS_ARM 0x40B8 +#define CL_KERNEL_EXEC_INFO_SVM_FINE_GRAIN_SYSTEM_ARM 0x40B9 + +/* To be used by clGetEventInfo: */ +#define CL_COMMAND_SVM_FREE_ARM 0x40BA +#define CL_COMMAND_SVM_MEMCPY_ARM 0x40BB +#define CL_COMMAND_SVM_MEMFILL_ARM 0x40BC +#define CL_COMMAND_SVM_MAP_ARM 0x40BD +#define CL_COMMAND_SVM_UNMAP_ARM 0x40BE + +/* Flag values returned by clGetDeviceInfo with CL_DEVICE_SVM_CAPABILITIES_ARM as the param_name. */ +#define CL_DEVICE_SVM_COARSE_GRAIN_BUFFER_ARM (1 << 0) +#define CL_DEVICE_SVM_FINE_GRAIN_BUFFER_ARM (1 << 1) +#define CL_DEVICE_SVM_FINE_GRAIN_SYSTEM_ARM (1 << 2) +#define CL_DEVICE_SVM_ATOMICS_ARM (1 << 3) + +/* Flag values used by clSVMAllocARM: */ +#define CL_MEM_SVM_FINE_GRAIN_BUFFER_ARM (1 << 10) +#define CL_MEM_SVM_ATOMICS_ARM (1 << 11) + +typedef cl_bitfield cl_svm_mem_flags_arm; +typedef cl_uint cl_kernel_exec_info_arm; +typedef cl_bitfield cl_device_svm_capabilities_arm; + +extern CL_API_ENTRY void * CL_API_CALL +clSVMAllocARM(cl_context context, + cl_svm_mem_flags_arm flags, + size_t size, + cl_uint alignment) CL_EXT_SUFFIX__VERSION_1_2; + +extern CL_API_ENTRY void CL_API_CALL +clSVMFreeARM(cl_context context, + void * svm_pointer) CL_EXT_SUFFIX__VERSION_1_2; + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueSVMFreeARM(cl_command_queue command_queue, + cl_uint num_svm_pointers, + void * svm_pointers[], + void (CL_CALLBACK * pfn_free_func)(cl_command_queue queue, + cl_uint num_svm_pointers, + void * svm_pointers[], + void * user_data), + void * user_data, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_EXT_SUFFIX__VERSION_1_2; + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueSVMMemcpyARM(cl_command_queue command_queue, + cl_bool blocking_copy, + void * dst_ptr, + const void * src_ptr, + size_t size, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_EXT_SUFFIX__VERSION_1_2; + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueSVMMemFillARM(cl_command_queue command_queue, + void * svm_ptr, + const void * pattern, + size_t pattern_size, + size_t size, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_EXT_SUFFIX__VERSION_1_2; + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueSVMMapARM(cl_command_queue command_queue, + cl_bool blocking_map, + cl_map_flags flags, + void * svm_ptr, + size_t size, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_EXT_SUFFIX__VERSION_1_2; + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueSVMUnmapARM(cl_command_queue command_queue, + void * svm_ptr, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_EXT_SUFFIX__VERSION_1_2; + +extern CL_API_ENTRY cl_int CL_API_CALL +clSetKernelArgSVMPointerARM(cl_kernel kernel, + cl_uint arg_index, + const void * arg_value) CL_EXT_SUFFIX__VERSION_1_2; + +extern CL_API_ENTRY cl_int CL_API_CALL +clSetKernelExecInfoARM(cl_kernel kernel, + cl_kernel_exec_info_arm param_name, + size_t param_value_size, + const void * param_value) CL_EXT_SUFFIX__VERSION_1_2; + +/******************************** + * cl_arm_get_core_id extension * + ********************************/ + +#ifdef CL_VERSION_1_2 + +#define cl_arm_get_core_id 1 + +/* Device info property for bitfield of cores present */ +#define CL_DEVICE_COMPUTE_UNITS_BITFIELD_ARM 0x40BF + +#endif /* CL_VERSION_1_2 */ + +/********************************* +* cl_arm_job_slot_selection +*********************************/ + +#define cl_arm_job_slot_selection 1 + +/* cl_device_info */ +#define CL_DEVICE_JOB_SLOTS_ARM 0x41E0 + +/* cl_command_queue_properties */ +#define CL_QUEUE_JOB_SLOT_ARM 0x41E1 + +/********************************* +* cl_arm_scheduling_controls +*********************************/ + +#define cl_arm_scheduling_controls 1 + +typedef cl_bitfield cl_device_scheduling_controls_capabilities_arm; + +/* cl_device_info */ +#define CL_DEVICE_SCHEDULING_CONTROLS_CAPABILITIES_ARM 0x41E4 + +#define CL_DEVICE_SCHEDULING_KERNEL_BATCHING_ARM (1 << 0) +#define CL_DEVICE_SCHEDULING_WORKGROUP_BATCH_SIZE_ARM (1 << 1) +#define CL_DEVICE_SCHEDULING_WORKGROUP_BATCH_SIZE_MODIFIER_ARM (1 << 2) +#define CL_DEVICE_SCHEDULING_DEFERRED_FLUSH_ARM (1 << 3) +#define CL_DEVICE_SCHEDULING_REGISTER_ALLOCATION_ARM (1 << 4) + +#define CL_DEVICE_SUPPORTED_REGISTER_ALLOCATIONS_ARM 0x41EB + +/* cl_kernel_info */ +#define CL_KERNEL_EXEC_INFO_WORKGROUP_BATCH_SIZE_ARM 0x41E5 +#define CL_KERNEL_EXEC_INFO_WORKGROUP_BATCH_SIZE_MODIFIER_ARM 0x41E6 + +/* cl_queue_properties */ +#define CL_QUEUE_KERNEL_BATCHING_ARM 0x41E7 +#define CL_QUEUE_DEFERRED_FLUSH_ARM 0x41EC + +/************************************** +* cl_arm_controlled_kernel_termination +***************************************/ + +#define cl_arm_controlled_kernel_termination 1 + +/* Error code to indicate kernel terminated with failure */ +#define CL_COMMAND_TERMINATED_ITSELF_WITH_FAILURE_ARM -1108 + +/* cl_device_info */ +#define CL_DEVICE_CONTROLLED_TERMINATION_CAPABILITIES_ARM 0x41EE + +/* Bit fields for controlled termination feature query */ +typedef cl_bitfield cl_device_controlled_termination_capabilities_arm; + +#define CL_DEVICE_CONTROLLED_TERMINATION_SUCCESS_ARM (1 << 0) +#define CL_DEVICE_CONTROLLED_TERMINATION_FAILURE_ARM (1 << 1) +#define CL_DEVICE_CONTROLLED_TERMINATION_QUERY_ARM (1 << 2) + +/* cl_event_info */ +#define CL_EVENT_COMMAND_TERMINATION_REASON_ARM 0x41ED + +/* Values returned for event termination reason query */ +typedef cl_uint cl_command_termination_reason_arm; + +#define CL_COMMAND_TERMINATION_COMPLETION_ARM 0 +#define CL_COMMAND_TERMINATION_CONTROLLED_SUCCESS_ARM 1 +#define CL_COMMAND_TERMINATION_CONTROLLED_FAILURE_ARM 2 +#define CL_COMMAND_TERMINATION_ERROR_ARM 3 + +/*************************************** +* cl_intel_thread_local_exec extension * +****************************************/ + +#define cl_intel_thread_local_exec 1 + +#define CL_QUEUE_THREAD_LOCAL_EXEC_ENABLE_INTEL (((cl_bitfield)1) << 31) + +/*********************************************** +* cl_intel_device_partition_by_names extension * +************************************************/ + +#define cl_intel_device_partition_by_names 1 + +#define CL_DEVICE_PARTITION_BY_NAMES_INTEL 0x4052 +#define CL_PARTITION_BY_NAMES_LIST_END_INTEL -1 + +/************************************************ +* cl_intel_accelerator extension * +* cl_intel_motion_estimation extension * +* cl_intel_advanced_motion_estimation extension * +*************************************************/ + +#define cl_intel_accelerator 1 +#define cl_intel_motion_estimation 1 +#define cl_intel_advanced_motion_estimation 1 + +typedef struct _cl_accelerator_intel* cl_accelerator_intel; +typedef cl_uint cl_accelerator_type_intel; +typedef cl_uint cl_accelerator_info_intel; + +typedef struct _cl_motion_estimation_desc_intel { + cl_uint mb_block_type; + cl_uint subpixel_mode; + cl_uint sad_adjust_mode; + cl_uint search_path_type; +} cl_motion_estimation_desc_intel; + +/* error codes */ +#define CL_INVALID_ACCELERATOR_INTEL -1094 +#define CL_INVALID_ACCELERATOR_TYPE_INTEL -1095 +#define CL_INVALID_ACCELERATOR_DESCRIPTOR_INTEL -1096 +#define CL_ACCELERATOR_TYPE_NOT_SUPPORTED_INTEL -1097 + +/* cl_accelerator_type_intel */ +#define CL_ACCELERATOR_TYPE_MOTION_ESTIMATION_INTEL 0x0 + +/* cl_accelerator_info_intel */ +#define CL_ACCELERATOR_DESCRIPTOR_INTEL 0x4090 +#define CL_ACCELERATOR_REFERENCE_COUNT_INTEL 0x4091 +#define CL_ACCELERATOR_CONTEXT_INTEL 0x4092 +#define CL_ACCELERATOR_TYPE_INTEL 0x4093 + +/* cl_motion_detect_desc_intel flags */ +#define CL_ME_MB_TYPE_16x16_INTEL 0x0 +#define CL_ME_MB_TYPE_8x8_INTEL 0x1 +#define CL_ME_MB_TYPE_4x4_INTEL 0x2 + +#define CL_ME_SUBPIXEL_MODE_INTEGER_INTEL 0x0 +#define CL_ME_SUBPIXEL_MODE_HPEL_INTEL 0x1 +#define CL_ME_SUBPIXEL_MODE_QPEL_INTEL 0x2 + +#define CL_ME_SAD_ADJUST_MODE_NONE_INTEL 0x0 +#define CL_ME_SAD_ADJUST_MODE_HAAR_INTEL 0x1 + +#define CL_ME_SEARCH_PATH_RADIUS_2_2_INTEL 0x0 +#define CL_ME_SEARCH_PATH_RADIUS_4_4_INTEL 0x1 +#define CL_ME_SEARCH_PATH_RADIUS_16_12_INTEL 0x5 + +#define CL_ME_SKIP_BLOCK_TYPE_16x16_INTEL 0x0 +#define CL_ME_CHROMA_INTRA_PREDICT_ENABLED_INTEL 0x1 +#define CL_ME_LUMA_INTRA_PREDICT_ENABLED_INTEL 0x2 +#define CL_ME_SKIP_BLOCK_TYPE_8x8_INTEL 0x4 + +#define CL_ME_FORWARD_INPUT_MODE_INTEL 0x1 +#define CL_ME_BACKWARD_INPUT_MODE_INTEL 0x2 +#define CL_ME_BIDIRECTION_INPUT_MODE_INTEL 0x3 + +#define CL_ME_BIDIR_WEIGHT_QUARTER_INTEL 16 +#define CL_ME_BIDIR_WEIGHT_THIRD_INTEL 21 +#define CL_ME_BIDIR_WEIGHT_HALF_INTEL 32 +#define CL_ME_BIDIR_WEIGHT_TWO_THIRD_INTEL 43 +#define CL_ME_BIDIR_WEIGHT_THREE_QUARTER_INTEL 48 + +#define CL_ME_COST_PENALTY_NONE_INTEL 0x0 +#define CL_ME_COST_PENALTY_LOW_INTEL 0x1 +#define CL_ME_COST_PENALTY_NORMAL_INTEL 0x2 +#define CL_ME_COST_PENALTY_HIGH_INTEL 0x3 + +#define CL_ME_COST_PRECISION_QPEL_INTEL 0x0 +#define CL_ME_COST_PRECISION_HPEL_INTEL 0x1 +#define CL_ME_COST_PRECISION_PEL_INTEL 0x2 +#define CL_ME_COST_PRECISION_DPEL_INTEL 0x3 + +#define CL_ME_LUMA_PREDICTOR_MODE_VERTICAL_INTEL 0x0 +#define CL_ME_LUMA_PREDICTOR_MODE_HORIZONTAL_INTEL 0x1 +#define CL_ME_LUMA_PREDICTOR_MODE_DC_INTEL 0x2 +#define CL_ME_LUMA_PREDICTOR_MODE_DIAGONAL_DOWN_LEFT_INTEL 0x3 + +#define CL_ME_LUMA_PREDICTOR_MODE_DIAGONAL_DOWN_RIGHT_INTEL 0x4 +#define CL_ME_LUMA_PREDICTOR_MODE_PLANE_INTEL 0x4 +#define CL_ME_LUMA_PREDICTOR_MODE_VERTICAL_RIGHT_INTEL 0x5 +#define CL_ME_LUMA_PREDICTOR_MODE_HORIZONTAL_DOWN_INTEL 0x6 +#define CL_ME_LUMA_PREDICTOR_MODE_VERTICAL_LEFT_INTEL 0x7 +#define CL_ME_LUMA_PREDICTOR_MODE_HORIZONTAL_UP_INTEL 0x8 + +#define CL_ME_CHROMA_PREDICTOR_MODE_DC_INTEL 0x0 +#define CL_ME_CHROMA_PREDICTOR_MODE_HORIZONTAL_INTEL 0x1 +#define CL_ME_CHROMA_PREDICTOR_MODE_VERTICAL_INTEL 0x2 +#define CL_ME_CHROMA_PREDICTOR_MODE_PLANE_INTEL 0x3 + +/* cl_device_info */ +#define CL_DEVICE_ME_VERSION_INTEL 0x407E + +#define CL_ME_VERSION_LEGACY_INTEL 0x0 +#define CL_ME_VERSION_ADVANCED_VER_1_INTEL 0x1 +#define CL_ME_VERSION_ADVANCED_VER_2_INTEL 0x2 + +extern CL_API_ENTRY cl_accelerator_intel CL_API_CALL +clCreateAcceleratorINTEL( + cl_context context, + cl_accelerator_type_intel accelerator_type, + size_t descriptor_size, + const void* descriptor, + cl_int* errcode_ret) CL_EXT_SUFFIX__VERSION_1_2; + +typedef CL_API_ENTRY cl_accelerator_intel (CL_API_CALL *clCreateAcceleratorINTEL_fn)( + cl_context context, + cl_accelerator_type_intel accelerator_type, + size_t descriptor_size, + const void* descriptor, + cl_int* errcode_ret) CL_EXT_SUFFIX__VERSION_1_2; + +extern CL_API_ENTRY cl_int CL_API_CALL +clGetAcceleratorInfoINTEL( + cl_accelerator_intel accelerator, + cl_accelerator_info_intel param_name, + size_t param_value_size, + void* param_value, + size_t* param_value_size_ret) CL_EXT_SUFFIX__VERSION_1_2; + +typedef CL_API_ENTRY cl_int (CL_API_CALL *clGetAcceleratorInfoINTEL_fn)( + cl_accelerator_intel accelerator, + cl_accelerator_info_intel param_name, + size_t param_value_size, + void* param_value, + size_t* param_value_size_ret) CL_EXT_SUFFIX__VERSION_1_2; + +extern CL_API_ENTRY cl_int CL_API_CALL +clRetainAcceleratorINTEL( + cl_accelerator_intel accelerator) CL_EXT_SUFFIX__VERSION_1_2; + +typedef CL_API_ENTRY cl_int (CL_API_CALL *clRetainAcceleratorINTEL_fn)( + cl_accelerator_intel accelerator) CL_EXT_SUFFIX__VERSION_1_2; + +extern CL_API_ENTRY cl_int CL_API_CALL +clReleaseAcceleratorINTEL( + cl_accelerator_intel accelerator) CL_EXT_SUFFIX__VERSION_1_2; + +typedef CL_API_ENTRY cl_int (CL_API_CALL *clReleaseAcceleratorINTEL_fn)( + cl_accelerator_intel accelerator) CL_EXT_SUFFIX__VERSION_1_2; + +/****************************************** +* cl_intel_simultaneous_sharing extension * +*******************************************/ + +#define cl_intel_simultaneous_sharing 1 + +#define CL_DEVICE_SIMULTANEOUS_INTEROPS_INTEL 0x4104 +#define CL_DEVICE_NUM_SIMULTANEOUS_INTEROPS_INTEL 0x4105 + +/*********************************** +* cl_intel_egl_image_yuv extension * +************************************/ + +#define cl_intel_egl_image_yuv 1 + +#define CL_EGL_YUV_PLANE_INTEL 0x4107 + +/******************************** +* cl_intel_packed_yuv extension * +*********************************/ + +#define cl_intel_packed_yuv 1 + +#define CL_YUYV_INTEL 0x4076 +#define CL_UYVY_INTEL 0x4077 +#define CL_YVYU_INTEL 0x4078 +#define CL_VYUY_INTEL 0x4079 + +/******************************************** +* cl_intel_required_subgroup_size extension * +*********************************************/ + +#define cl_intel_required_subgroup_size 1 + +#define CL_DEVICE_SUB_GROUP_SIZES_INTEL 0x4108 +#define CL_KERNEL_SPILL_MEM_SIZE_INTEL 0x4109 +#define CL_KERNEL_COMPILE_SUB_GROUP_SIZE_INTEL 0x410A + +/**************************************** +* cl_intel_driver_diagnostics extension * +*****************************************/ + +#define cl_intel_driver_diagnostics 1 + +typedef cl_uint cl_diagnostics_verbose_level; + +#define CL_CONTEXT_SHOW_DIAGNOSTICS_INTEL 0x4106 + +#define CL_CONTEXT_DIAGNOSTICS_LEVEL_ALL_INTEL ( 0xff ) +#define CL_CONTEXT_DIAGNOSTICS_LEVEL_GOOD_INTEL ( 1 ) +#define CL_CONTEXT_DIAGNOSTICS_LEVEL_BAD_INTEL ( 1 << 1 ) +#define CL_CONTEXT_DIAGNOSTICS_LEVEL_NEUTRAL_INTEL ( 1 << 2 ) + +/******************************** +* cl_intel_planar_yuv extension * +*********************************/ + +#define CL_NV12_INTEL 0x410E + +#define CL_MEM_NO_ACCESS_INTEL ( 1 << 24 ) +#define CL_MEM_ACCESS_FLAGS_UNRESTRICTED_INTEL ( 1 << 25 ) + +#define CL_DEVICE_PLANAR_YUV_MAX_WIDTH_INTEL 0x417E +#define CL_DEVICE_PLANAR_YUV_MAX_HEIGHT_INTEL 0x417F + +/******************************************************* +* cl_intel_device_side_avc_motion_estimation extension * +********************************************************/ + +#define CL_DEVICE_AVC_ME_VERSION_INTEL 0x410B +#define CL_DEVICE_AVC_ME_SUPPORTS_TEXTURE_SAMPLER_USE_INTEL 0x410C +#define CL_DEVICE_AVC_ME_SUPPORTS_PREEMPTION_INTEL 0x410D + +#define CL_AVC_ME_VERSION_0_INTEL 0x0 /* No support. */ +#define CL_AVC_ME_VERSION_1_INTEL 0x1 /* First supported version. */ + +#define CL_AVC_ME_MAJOR_16x16_INTEL 0x0 +#define CL_AVC_ME_MAJOR_16x8_INTEL 0x1 +#define CL_AVC_ME_MAJOR_8x16_INTEL 0x2 +#define CL_AVC_ME_MAJOR_8x8_INTEL 0x3 + +#define CL_AVC_ME_MINOR_8x8_INTEL 0x0 +#define CL_AVC_ME_MINOR_8x4_INTEL 0x1 +#define CL_AVC_ME_MINOR_4x8_INTEL 0x2 +#define CL_AVC_ME_MINOR_4x4_INTEL 0x3 + +#define CL_AVC_ME_MAJOR_FORWARD_INTEL 0x0 +#define CL_AVC_ME_MAJOR_BACKWARD_INTEL 0x1 +#define CL_AVC_ME_MAJOR_BIDIRECTIONAL_INTEL 0x2 + +#define CL_AVC_ME_PARTITION_MASK_ALL_INTEL 0x0 +#define CL_AVC_ME_PARTITION_MASK_16x16_INTEL 0x7E +#define CL_AVC_ME_PARTITION_MASK_16x8_INTEL 0x7D +#define CL_AVC_ME_PARTITION_MASK_8x16_INTEL 0x7B +#define CL_AVC_ME_PARTITION_MASK_8x8_INTEL 0x77 +#define CL_AVC_ME_PARTITION_MASK_8x4_INTEL 0x6F +#define CL_AVC_ME_PARTITION_MASK_4x8_INTEL 0x5F +#define CL_AVC_ME_PARTITION_MASK_4x4_INTEL 0x3F + +#define CL_AVC_ME_SEARCH_WINDOW_EXHAUSTIVE_INTEL 0x0 +#define CL_AVC_ME_SEARCH_WINDOW_SMALL_INTEL 0x1 +#define CL_AVC_ME_SEARCH_WINDOW_TINY_INTEL 0x2 +#define CL_AVC_ME_SEARCH_WINDOW_EXTRA_TINY_INTEL 0x3 +#define CL_AVC_ME_SEARCH_WINDOW_DIAMOND_INTEL 0x4 +#define CL_AVC_ME_SEARCH_WINDOW_LARGE_DIAMOND_INTEL 0x5 +#define CL_AVC_ME_SEARCH_WINDOW_RESERVED0_INTEL 0x6 +#define CL_AVC_ME_SEARCH_WINDOW_RESERVED1_INTEL 0x7 +#define CL_AVC_ME_SEARCH_WINDOW_CUSTOM_INTEL 0x8 +#define CL_AVC_ME_SEARCH_WINDOW_16x12_RADIUS_INTEL 0x9 +#define CL_AVC_ME_SEARCH_WINDOW_4x4_RADIUS_INTEL 0x2 +#define CL_AVC_ME_SEARCH_WINDOW_2x2_RADIUS_INTEL 0xa + +#define CL_AVC_ME_SAD_ADJUST_MODE_NONE_INTEL 0x0 +#define CL_AVC_ME_SAD_ADJUST_MODE_HAAR_INTEL 0x2 + +#define CL_AVC_ME_SUBPIXEL_MODE_INTEGER_INTEL 0x0 +#define CL_AVC_ME_SUBPIXEL_MODE_HPEL_INTEL 0x1 +#define CL_AVC_ME_SUBPIXEL_MODE_QPEL_INTEL 0x3 + +#define CL_AVC_ME_COST_PRECISION_QPEL_INTEL 0x0 +#define CL_AVC_ME_COST_PRECISION_HPEL_INTEL 0x1 +#define CL_AVC_ME_COST_PRECISION_PEL_INTEL 0x2 +#define CL_AVC_ME_COST_PRECISION_DPEL_INTEL 0x3 + +#define CL_AVC_ME_BIDIR_WEIGHT_QUARTER_INTEL 0x10 +#define CL_AVC_ME_BIDIR_WEIGHT_THIRD_INTEL 0x15 +#define CL_AVC_ME_BIDIR_WEIGHT_HALF_INTEL 0x20 +#define CL_AVC_ME_BIDIR_WEIGHT_TWO_THIRD_INTEL 0x2B +#define CL_AVC_ME_BIDIR_WEIGHT_THREE_QUARTER_INTEL 0x30 + +#define CL_AVC_ME_BORDER_REACHED_LEFT_INTEL 0x0 +#define CL_AVC_ME_BORDER_REACHED_RIGHT_INTEL 0x2 +#define CL_AVC_ME_BORDER_REACHED_TOP_INTEL 0x4 +#define CL_AVC_ME_BORDER_REACHED_BOTTOM_INTEL 0x8 + +#define CL_AVC_ME_SKIP_BLOCK_PARTITION_16x16_INTEL 0x0 +#define CL_AVC_ME_SKIP_BLOCK_PARTITION_8x8_INTEL 0x4000 + +#define CL_AVC_ME_SKIP_BLOCK_16x16_FORWARD_ENABLE_INTEL ( 0x1 << 24 ) +#define CL_AVC_ME_SKIP_BLOCK_16x16_BACKWARD_ENABLE_INTEL ( 0x2 << 24 ) +#define CL_AVC_ME_SKIP_BLOCK_16x16_DUAL_ENABLE_INTEL ( 0x3 << 24 ) +#define CL_AVC_ME_SKIP_BLOCK_8x8_FORWARD_ENABLE_INTEL ( 0x55 << 24 ) +#define CL_AVC_ME_SKIP_BLOCK_8x8_BACKWARD_ENABLE_INTEL ( 0xAA << 24 ) +#define CL_AVC_ME_SKIP_BLOCK_8x8_DUAL_ENABLE_INTEL ( 0xFF << 24 ) +#define CL_AVC_ME_SKIP_BLOCK_8x8_0_FORWARD_ENABLE_INTEL ( 0x1 << 24 ) +#define CL_AVC_ME_SKIP_BLOCK_8x8_0_BACKWARD_ENABLE_INTEL ( 0x2 << 24 ) +#define CL_AVC_ME_SKIP_BLOCK_8x8_1_FORWARD_ENABLE_INTEL ( 0x1 << 26 ) +#define CL_AVC_ME_SKIP_BLOCK_8x8_1_BACKWARD_ENABLE_INTEL ( 0x2 << 26 ) +#define CL_AVC_ME_SKIP_BLOCK_8x8_2_FORWARD_ENABLE_INTEL ( 0x1 << 28 ) +#define CL_AVC_ME_SKIP_BLOCK_8x8_2_BACKWARD_ENABLE_INTEL ( 0x2 << 28 ) +#define CL_AVC_ME_SKIP_BLOCK_8x8_3_FORWARD_ENABLE_INTEL ( 0x1 << 30 ) +#define CL_AVC_ME_SKIP_BLOCK_8x8_3_BACKWARD_ENABLE_INTEL ( 0x2 << 30 ) + +#define CL_AVC_ME_BLOCK_BASED_SKIP_4x4_INTEL 0x00 +#define CL_AVC_ME_BLOCK_BASED_SKIP_8x8_INTEL 0x80 + +#define CL_AVC_ME_INTRA_16x16_INTEL 0x0 +#define CL_AVC_ME_INTRA_8x8_INTEL 0x1 +#define CL_AVC_ME_INTRA_4x4_INTEL 0x2 + +#define CL_AVC_ME_INTRA_LUMA_PARTITION_MASK_16x16_INTEL 0x6 +#define CL_AVC_ME_INTRA_LUMA_PARTITION_MASK_8x8_INTEL 0x5 +#define CL_AVC_ME_INTRA_LUMA_PARTITION_MASK_4x4_INTEL 0x3 + +#define CL_AVC_ME_INTRA_NEIGHBOR_LEFT_MASK_ENABLE_INTEL 0x60 +#define CL_AVC_ME_INTRA_NEIGHBOR_UPPER_MASK_ENABLE_INTEL 0x10 +#define CL_AVC_ME_INTRA_NEIGHBOR_UPPER_RIGHT_MASK_ENABLE_INTEL 0x8 +#define CL_AVC_ME_INTRA_NEIGHBOR_UPPER_LEFT_MASK_ENABLE_INTEL 0x4 + +#define CL_AVC_ME_LUMA_PREDICTOR_MODE_VERTICAL_INTEL 0x0 +#define CL_AVC_ME_LUMA_PREDICTOR_MODE_HORIZONTAL_INTEL 0x1 +#define CL_AVC_ME_LUMA_PREDICTOR_MODE_DC_INTEL 0x2 +#define CL_AVC_ME_LUMA_PREDICTOR_MODE_DIAGONAL_DOWN_LEFT_INTEL 0x3 +#define CL_AVC_ME_LUMA_PREDICTOR_MODE_DIAGONAL_DOWN_RIGHT_INTEL 0x4 +#define CL_AVC_ME_LUMA_PREDICTOR_MODE_PLANE_INTEL 0x4 +#define CL_AVC_ME_LUMA_PREDICTOR_MODE_VERTICAL_RIGHT_INTEL 0x5 +#define CL_AVC_ME_LUMA_PREDICTOR_MODE_HORIZONTAL_DOWN_INTEL 0x6 +#define CL_AVC_ME_LUMA_PREDICTOR_MODE_VERTICAL_LEFT_INTEL 0x7 +#define CL_AVC_ME_LUMA_PREDICTOR_MODE_HORIZONTAL_UP_INTEL 0x8 +#define CL_AVC_ME_CHROMA_PREDICTOR_MODE_DC_INTEL 0x0 +#define CL_AVC_ME_CHROMA_PREDICTOR_MODE_HORIZONTAL_INTEL 0x1 +#define CL_AVC_ME_CHROMA_PREDICTOR_MODE_VERTICAL_INTEL 0x2 +#define CL_AVC_ME_CHROMA_PREDICTOR_MODE_PLANE_INTEL 0x3 + +#define CL_AVC_ME_FRAME_FORWARD_INTEL 0x1 +#define CL_AVC_ME_FRAME_BACKWARD_INTEL 0x2 +#define CL_AVC_ME_FRAME_DUAL_INTEL 0x3 + +#define CL_AVC_ME_SLICE_TYPE_PRED_INTEL 0x0 +#define CL_AVC_ME_SLICE_TYPE_BPRED_INTEL 0x1 +#define CL_AVC_ME_SLICE_TYPE_INTRA_INTEL 0x2 + +#define CL_AVC_ME_INTERLACED_SCAN_TOP_FIELD_INTEL 0x0 +#define CL_AVC_ME_INTERLACED_SCAN_BOTTOM_FIELD_INTEL 0x1 + +/******************************************* +* cl_intel_unified_shared_memory extension * +********************************************/ + +/* These APIs are in sync with Revision Q of the cl_intel_unified_shared_memory spec! */ + +#define cl_intel_unified_shared_memory 1 + +/* cl_device_info */ +#define CL_DEVICE_HOST_MEM_CAPABILITIES_INTEL 0x4190 +#define CL_DEVICE_DEVICE_MEM_CAPABILITIES_INTEL 0x4191 +#define CL_DEVICE_SINGLE_DEVICE_SHARED_MEM_CAPABILITIES_INTEL 0x4192 +#define CL_DEVICE_CROSS_DEVICE_SHARED_MEM_CAPABILITIES_INTEL 0x4193 +#define CL_DEVICE_SHARED_SYSTEM_MEM_CAPABILITIES_INTEL 0x4194 + +typedef cl_bitfield cl_device_unified_shared_memory_capabilities_intel; + +/* cl_device_unified_shared_memory_capabilities_intel - bitfield */ +#define CL_UNIFIED_SHARED_MEMORY_ACCESS_INTEL (1 << 0) +#define CL_UNIFIED_SHARED_MEMORY_ATOMIC_ACCESS_INTEL (1 << 1) +#define CL_UNIFIED_SHARED_MEMORY_CONCURRENT_ACCESS_INTEL (1 << 2) +#define CL_UNIFIED_SHARED_MEMORY_CONCURRENT_ATOMIC_ACCESS_INTEL (1 << 3) + +typedef cl_properties cl_mem_properties_intel; + +/* cl_mem_properties_intel */ +#define CL_MEM_ALLOC_FLAGS_INTEL 0x4195 + +typedef cl_bitfield cl_mem_alloc_flags_intel; + +/* cl_mem_alloc_flags_intel - bitfield */ +#define CL_MEM_ALLOC_WRITE_COMBINED_INTEL (1 << 0) + +typedef cl_uint cl_mem_info_intel; + +/* cl_mem_alloc_info_intel */ +#define CL_MEM_ALLOC_TYPE_INTEL 0x419A +#define CL_MEM_ALLOC_BASE_PTR_INTEL 0x419B +#define CL_MEM_ALLOC_SIZE_INTEL 0x419C +#define CL_MEM_ALLOC_DEVICE_INTEL 0x419D +/* Enum values 0x419E-0x419F are reserved for future queries. */ + +typedef cl_uint cl_unified_shared_memory_type_intel; + +/* cl_unified_shared_memory_type_intel */ +#define CL_MEM_TYPE_UNKNOWN_INTEL 0x4196 +#define CL_MEM_TYPE_HOST_INTEL 0x4197 +#define CL_MEM_TYPE_DEVICE_INTEL 0x4198 +#define CL_MEM_TYPE_SHARED_INTEL 0x4199 + +typedef cl_uint cl_mem_advice_intel; + +/* cl_mem_advice_intel */ +/* Enum values 0x4208-0x420F are reserved for future memory advices. */ + +/* cl_kernel_exec_info */ +#define CL_KERNEL_EXEC_INFO_INDIRECT_HOST_ACCESS_INTEL 0x4200 +#define CL_KERNEL_EXEC_INFO_INDIRECT_DEVICE_ACCESS_INTEL 0x4201 +#define CL_KERNEL_EXEC_INFO_INDIRECT_SHARED_ACCESS_INTEL 0x4202 +#define CL_KERNEL_EXEC_INFO_USM_PTRS_INTEL 0x4203 + +/* cl_command_type */ +#define CL_COMMAND_MEMFILL_INTEL 0x4204 +#define CL_COMMAND_MEMCPY_INTEL 0x4205 +#define CL_COMMAND_MIGRATEMEM_INTEL 0x4206 +#define CL_COMMAND_MEMADVISE_INTEL 0x4207 + +extern CL_API_ENTRY void* CL_API_CALL +clHostMemAllocINTEL( + cl_context context, + const cl_mem_properties_intel* properties, + size_t size, + cl_uint alignment, + cl_int* errcode_ret); + +typedef CL_API_ENTRY void* (CL_API_CALL * +clHostMemAllocINTEL_fn)( + cl_context context, + const cl_mem_properties_intel* properties, + size_t size, + cl_uint alignment, + cl_int* errcode_ret); + +extern CL_API_ENTRY void* CL_API_CALL +clDeviceMemAllocINTEL( + cl_context context, + cl_device_id device, + const cl_mem_properties_intel* properties, + size_t size, + cl_uint alignment, + cl_int* errcode_ret); + +typedef CL_API_ENTRY void* (CL_API_CALL * +clDeviceMemAllocINTEL_fn)( + cl_context context, + cl_device_id device, + const cl_mem_properties_intel* properties, + size_t size, + cl_uint alignment, + cl_int* errcode_ret); + +extern CL_API_ENTRY void* CL_API_CALL +clSharedMemAllocINTEL( + cl_context context, + cl_device_id device, + const cl_mem_properties_intel* properties, + size_t size, + cl_uint alignment, + cl_int* errcode_ret); + +typedef CL_API_ENTRY void* (CL_API_CALL * +clSharedMemAllocINTEL_fn)( + cl_context context, + cl_device_id device, + const cl_mem_properties_intel* properties, + size_t size, + cl_uint alignment, + cl_int* errcode_ret); + +extern CL_API_ENTRY cl_int CL_API_CALL +clMemFreeINTEL( + cl_context context, + void* ptr); + +typedef CL_API_ENTRY cl_int (CL_API_CALL * +clMemFreeINTEL_fn)( + cl_context context, + void* ptr); + +extern CL_API_ENTRY cl_int CL_API_CALL +clMemBlockingFreeINTEL( + cl_context context, + void* ptr); + +typedef CL_API_ENTRY cl_int (CL_API_CALL * +clMemBlockingFreeINTEL_fn)( + cl_context context, + void* ptr); + +extern CL_API_ENTRY cl_int CL_API_CALL +clGetMemAllocInfoINTEL( + cl_context context, + const void* ptr, + cl_mem_info_intel param_name, + size_t param_value_size, + void* param_value, + size_t* param_value_size_ret); + +typedef CL_API_ENTRY cl_int (CL_API_CALL * +clGetMemAllocInfoINTEL_fn)( + cl_context context, + const void* ptr, + cl_mem_info_intel param_name, + size_t param_value_size, + void* param_value, + size_t* param_value_size_ret); + +extern CL_API_ENTRY cl_int CL_API_CALL +clSetKernelArgMemPointerINTEL( + cl_kernel kernel, + cl_uint arg_index, + const void* arg_value); + +typedef CL_API_ENTRY cl_int (CL_API_CALL * +clSetKernelArgMemPointerINTEL_fn)( + cl_kernel kernel, + cl_uint arg_index, + const void* arg_value); + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueMemsetINTEL( /* Deprecated */ + cl_command_queue command_queue, + void* dst_ptr, + cl_int value, + size_t size, + cl_uint num_events_in_wait_list, + const cl_event* event_wait_list, + cl_event* event); + +typedef CL_API_ENTRY cl_int (CL_API_CALL * +clEnqueueMemsetINTEL_fn)( /* Deprecated */ + cl_command_queue command_queue, + void* dst_ptr, + cl_int value, + size_t size, + cl_uint num_events_in_wait_list, + const cl_event* event_wait_list, + cl_event* event); + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueMemFillINTEL( + cl_command_queue command_queue, + void* dst_ptr, + const void* pattern, + size_t pattern_size, + size_t size, + cl_uint num_events_in_wait_list, + const cl_event* event_wait_list, + cl_event* event); + +typedef CL_API_ENTRY cl_int (CL_API_CALL * +clEnqueueMemFillINTEL_fn)( + cl_command_queue command_queue, + void* dst_ptr, + const void* pattern, + size_t pattern_size, + size_t size, + cl_uint num_events_in_wait_list, + const cl_event* event_wait_list, + cl_event* event); + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueMemcpyINTEL( + cl_command_queue command_queue, + cl_bool blocking, + void* dst_ptr, + const void* src_ptr, + size_t size, + cl_uint num_events_in_wait_list, + const cl_event* event_wait_list, + cl_event* event); + +typedef CL_API_ENTRY cl_int (CL_API_CALL * +clEnqueueMemcpyINTEL_fn)( + cl_command_queue command_queue, + cl_bool blocking, + void* dst_ptr, + const void* src_ptr, + size_t size, + cl_uint num_events_in_wait_list, + const cl_event* event_wait_list, + cl_event* event); + +#ifdef CL_VERSION_1_2 + +/* Because these APIs use cl_mem_migration_flags, they require + OpenCL 1.2: */ + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueMigrateMemINTEL( + cl_command_queue command_queue, + const void* ptr, + size_t size, + cl_mem_migration_flags flags, + cl_uint num_events_in_wait_list, + const cl_event* event_wait_list, + cl_event* event); + +typedef CL_API_ENTRY cl_int (CL_API_CALL * +clEnqueueMigrateMemINTEL_fn)( + cl_command_queue command_queue, + const void* ptr, + size_t size, + cl_mem_migration_flags flags, + cl_uint num_events_in_wait_list, + const cl_event* event_wait_list, + cl_event* event); + +#endif + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueMemAdviseINTEL( + cl_command_queue command_queue, + const void* ptr, + size_t size, + cl_mem_advice_intel advice, + cl_uint num_events_in_wait_list, + const cl_event* event_wait_list, + cl_event* event); + +typedef CL_API_ENTRY cl_int (CL_API_CALL * +clEnqueueMemAdviseINTEL_fn)( + cl_command_queue command_queue, + const void* ptr, + size_t size, + cl_mem_advice_intel advice, + cl_uint num_events_in_wait_list, + const cl_event* event_wait_list, + cl_event* event); + +/*************************************************** +* cl_intel_create_buffer_with_properties extension * +****************************************************/ + +#define cl_intel_create_buffer_with_properties 1 + +extern CL_API_ENTRY cl_mem CL_API_CALL +clCreateBufferWithPropertiesINTEL( + cl_context context, + const cl_mem_properties_intel* properties, + cl_mem_flags flags, + size_t size, + void * host_ptr, + cl_int * errcode_ret) CL_EXT_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_mem (CL_API_CALL * +clCreateBufferWithPropertiesINTEL_fn)( + cl_context context, + const cl_mem_properties_intel* properties, + cl_mem_flags flags, + size_t size, + void * host_ptr, + cl_int * errcode_ret) CL_EXT_SUFFIX__VERSION_1_0; + +/****************************************** +* cl_intel_mem_channel_property extension * +*******************************************/ + +#define CL_MEM_CHANNEL_INTEL 0x4213 + +/********************************* +* cl_intel_mem_force_host_memory * +**********************************/ + +#define cl_intel_mem_force_host_memory 1 + +/* cl_mem_flags */ +#define CL_MEM_FORCE_HOST_MEMORY_INTEL (1 << 20) + +#ifdef __cplusplus +} +#endif + + +#endif /* __CL_EXT_H */ diff --git a/vendor/angle/include/CL/cl_ext_intel.h b/vendor/angle/include/CL/cl_ext_intel.h new file mode 100644 index 00000000..a7ae87a3 --- /dev/null +++ b/vendor/angle/include/CL/cl_ext_intel.h @@ -0,0 +1,19 @@ +/******************************************************************************* + * Copyright (c) 2008-2020 The Khronos Group Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + ******************************************************************************/ + +#include +#pragma message("The Intel extensions have been moved into cl_ext.h. Please include cl_ext.h directly.") diff --git a/vendor/angle/include/CL/cl_gl.h b/vendor/angle/include/CL/cl_gl.h new file mode 100644 index 00000000..836005d4 --- /dev/null +++ b/vendor/angle/include/CL/cl_gl.h @@ -0,0 +1,169 @@ +/******************************************************************************* + * Copyright (c) 2008-2021 The Khronos Group Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ + +#ifndef __OPENCL_CL_GL_H +#define __OPENCL_CL_GL_H + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +typedef cl_uint cl_gl_object_type; +typedef cl_uint cl_gl_texture_info; +typedef cl_uint cl_gl_platform_info; +typedef struct __GLsync *cl_GLsync; + +/* cl_gl_object_type = 0x2000 - 0x200F enum values are currently taken */ +#define CL_GL_OBJECT_BUFFER 0x2000 +#define CL_GL_OBJECT_TEXTURE2D 0x2001 +#define CL_GL_OBJECT_TEXTURE3D 0x2002 +#define CL_GL_OBJECT_RENDERBUFFER 0x2003 +#ifdef CL_VERSION_1_2 +#define CL_GL_OBJECT_TEXTURE2D_ARRAY 0x200E +#define CL_GL_OBJECT_TEXTURE1D 0x200F +#define CL_GL_OBJECT_TEXTURE1D_ARRAY 0x2010 +#define CL_GL_OBJECT_TEXTURE_BUFFER 0x2011 +#endif + +/* cl_gl_texture_info */ +#define CL_GL_TEXTURE_TARGET 0x2004 +#define CL_GL_MIPMAP_LEVEL 0x2005 +#ifdef CL_VERSION_1_2 +#define CL_GL_NUM_SAMPLES 0x2012 +#endif + + +extern CL_API_ENTRY cl_mem CL_API_CALL +clCreateFromGLBuffer(cl_context context, + cl_mem_flags flags, + cl_GLuint bufobj, + cl_int * errcode_ret) CL_API_SUFFIX__VERSION_1_0; + +#ifdef CL_VERSION_1_2 + +extern CL_API_ENTRY cl_mem CL_API_CALL +clCreateFromGLTexture(cl_context context, + cl_mem_flags flags, + cl_GLenum target, + cl_GLint miplevel, + cl_GLuint texture, + cl_int * errcode_ret) CL_API_SUFFIX__VERSION_1_2; + +#endif + +extern CL_API_ENTRY cl_mem CL_API_CALL +clCreateFromGLRenderbuffer(cl_context context, + cl_mem_flags flags, + cl_GLuint renderbuffer, + cl_int * errcode_ret) CL_API_SUFFIX__VERSION_1_0; + +extern CL_API_ENTRY cl_int CL_API_CALL +clGetGLObjectInfo(cl_mem memobj, + cl_gl_object_type * gl_object_type, + cl_GLuint * gl_object_name) CL_API_SUFFIX__VERSION_1_0; + +extern CL_API_ENTRY cl_int CL_API_CALL +clGetGLTextureInfo(cl_mem memobj, + cl_gl_texture_info param_name, + size_t param_value_size, + void * param_value, + size_t * param_value_size_ret) CL_API_SUFFIX__VERSION_1_0; + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueAcquireGLObjects(cl_command_queue command_queue, + cl_uint num_objects, + const cl_mem * mem_objects, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_API_SUFFIX__VERSION_1_0; + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueReleaseGLObjects(cl_command_queue command_queue, + cl_uint num_objects, + const cl_mem * mem_objects, + cl_uint num_events_in_wait_list, + const cl_event * event_wait_list, + cl_event * event) CL_API_SUFFIX__VERSION_1_0; + + +/* Deprecated OpenCL 1.1 APIs */ +extern CL_API_ENTRY CL_EXT_PREFIX__VERSION_1_1_DEPRECATED cl_mem CL_API_CALL +clCreateFromGLTexture2D(cl_context context, + cl_mem_flags flags, + cl_GLenum target, + cl_GLint miplevel, + cl_GLuint texture, + cl_int * errcode_ret) CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED; + +extern CL_API_ENTRY CL_EXT_PREFIX__VERSION_1_1_DEPRECATED cl_mem CL_API_CALL +clCreateFromGLTexture3D(cl_context context, + cl_mem_flags flags, + cl_GLenum target, + cl_GLint miplevel, + cl_GLuint texture, + cl_int * errcode_ret) CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED; + +/* cl_khr_gl_sharing extension */ + +#define cl_khr_gl_sharing 1 + +typedef cl_uint cl_gl_context_info; + +/* Additional Error Codes */ +#define CL_INVALID_GL_SHAREGROUP_REFERENCE_KHR -1000 + +/* cl_gl_context_info */ +#define CL_CURRENT_DEVICE_FOR_GL_CONTEXT_KHR 0x2006 +#define CL_DEVICES_FOR_GL_CONTEXT_KHR 0x2007 + +/* Additional cl_context_properties */ +#define CL_GL_CONTEXT_KHR 0x2008 +#define CL_EGL_DISPLAY_KHR 0x2009 +#define CL_GLX_DISPLAY_KHR 0x200A +#define CL_WGL_HDC_KHR 0x200B +#define CL_CGL_SHAREGROUP_KHR 0x200C + +extern CL_API_ENTRY cl_int CL_API_CALL +clGetGLContextInfoKHR(const cl_context_properties * properties, + cl_gl_context_info param_name, + size_t param_value_size, + void * param_value, + size_t * param_value_size_ret) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int (CL_API_CALL *clGetGLContextInfoKHR_fn)( + const cl_context_properties * properties, + cl_gl_context_info param_name, + size_t param_value_size, + void * param_value, + size_t * param_value_size_ret); + +/* + * cl_khr_gl_event extension + */ +#define CL_COMMAND_GL_FENCE_SYNC_OBJECT_KHR 0x200D + +extern CL_API_ENTRY cl_event CL_API_CALL +clCreateEventFromGLsyncKHR(cl_context context, + cl_GLsync sync, + cl_int * errcode_ret) CL_EXT_SUFFIX__VERSION_1_1; + +#ifdef __cplusplus +} +#endif + +#endif /* __OPENCL_CL_GL_H */ diff --git a/vendor/angle/include/CL/cl_gl_ext.h b/vendor/angle/include/CL/cl_gl_ext.h new file mode 100644 index 00000000..8ec81816 --- /dev/null +++ b/vendor/angle/include/CL/cl_gl_ext.h @@ -0,0 +1,18 @@ +/******************************************************************************* + * Copyright (c) 2008-2021 The Khronos Group Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ + +#include +#pragma message("All OpenGL-related extensions have been moved into cl_gl.h. Please include cl_gl.h directly.") diff --git a/vendor/angle/include/CL/cl_half.h b/vendor/angle/include/CL/cl_half.h new file mode 100644 index 00000000..ecc42233 --- /dev/null +++ b/vendor/angle/include/CL/cl_half.h @@ -0,0 +1,440 @@ +/******************************************************************************* + * Copyright (c) 2019-2020 The Khronos Group Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ + +/** + * This is a header-only utility library that provides OpenCL host code with + * routines for converting to/from cl_half values. + * + * Example usage: + * + * #include + * ... + * cl_half h = cl_half_from_float(0.5f, CL_HALF_RTE); + * cl_float f = cl_half_to_float(h); + */ + +#ifndef OPENCL_CL_HALF_H +#define OPENCL_CL_HALF_H + +#include + +#include + +#ifdef __cplusplus +extern "C" { +#endif + + +/** + * Rounding mode used when converting to cl_half. + */ +typedef enum +{ + CL_HALF_RTE, // round to nearest even + CL_HALF_RTZ, // round towards zero + CL_HALF_RTP, // round towards positive infinity + CL_HALF_RTN, // round towards negative infinity +} cl_half_rounding_mode; + + +/* Private utility macros. */ +#define CL_HALF_EXP_MASK 0x7C00 +#define CL_HALF_MAX_FINITE_MAG 0x7BFF + + +/* + * Utility to deal with values that overflow when converting to half precision. + */ +static inline cl_half cl_half_handle_overflow(cl_half_rounding_mode rounding_mode, + uint16_t sign) +{ + if (rounding_mode == CL_HALF_RTZ) + { + // Round overflow towards zero -> largest finite number (preserving sign) + return (sign << 15) | CL_HALF_MAX_FINITE_MAG; + } + else if (rounding_mode == CL_HALF_RTP && sign) + { + // Round negative overflow towards positive infinity -> most negative finite number + return (1 << 15) | CL_HALF_MAX_FINITE_MAG; + } + else if (rounding_mode == CL_HALF_RTN && !sign) + { + // Round positive overflow towards negative infinity -> largest finite number + return CL_HALF_MAX_FINITE_MAG; + } + + // Overflow to infinity + return (sign << 15) | CL_HALF_EXP_MASK; +} + +/* + * Utility to deal with values that underflow when converting to half precision. + */ +static inline cl_half cl_half_handle_underflow(cl_half_rounding_mode rounding_mode, + uint16_t sign) +{ + if (rounding_mode == CL_HALF_RTP && !sign) + { + // Round underflow towards positive infinity -> smallest positive value + return (sign << 15) | 1; + } + else if (rounding_mode == CL_HALF_RTN && sign) + { + // Round underflow towards negative infinity -> largest negative value + return (sign << 15) | 1; + } + + // Flush to zero + return (sign << 15); +} + + +/** + * Convert a cl_float to a cl_half. + */ +static inline cl_half cl_half_from_float(cl_float f, cl_half_rounding_mode rounding_mode) +{ + // Type-punning to get direct access to underlying bits + union + { + cl_float f; + uint32_t i; + } f32; + f32.f = f; + + // Extract sign bit + uint16_t sign = f32.i >> 31; + + // Extract FP32 exponent and mantissa + uint32_t f_exp = (f32.i >> (CL_FLT_MANT_DIG - 1)) & 0xFF; + uint32_t f_mant = f32.i & ((1 << (CL_FLT_MANT_DIG - 1)) - 1); + + // Remove FP32 exponent bias + int32_t exp = f_exp - CL_FLT_MAX_EXP + 1; + + // Add FP16 exponent bias + uint16_t h_exp = (uint16_t)(exp + CL_HALF_MAX_EXP - 1); + + // Position of the bit that will become the FP16 mantissa LSB + uint32_t lsb_pos = CL_FLT_MANT_DIG - CL_HALF_MANT_DIG; + + // Check for NaN / infinity + if (f_exp == 0xFF) + { + if (f_mant) + { + // NaN -> propagate mantissa and silence it + uint16_t h_mant = (uint16_t)(f_mant >> lsb_pos); + h_mant |= 0x200; + return (sign << 15) | CL_HALF_EXP_MASK | h_mant; + } + else + { + // Infinity -> zero mantissa + return (sign << 15) | CL_HALF_EXP_MASK; + } + } + + // Check for zero + if (!f_exp && !f_mant) + { + return (sign << 15); + } + + // Check for overflow + if (exp >= CL_HALF_MAX_EXP) + { + return cl_half_handle_overflow(rounding_mode, sign); + } + + // Check for underflow + if (exp < (CL_HALF_MIN_EXP - CL_HALF_MANT_DIG - 1)) + { + return cl_half_handle_underflow(rounding_mode, sign); + } + + // Check for value that will become denormal + if (exp < -14) + { + // Denormal -> include the implicit 1 from the FP32 mantissa + h_exp = 0; + f_mant |= 1 << (CL_FLT_MANT_DIG - 1); + + // Mantissa shift amount depends on exponent + lsb_pos = -exp + (CL_FLT_MANT_DIG - 25); + } + + // Generate FP16 mantissa by shifting FP32 mantissa + uint16_t h_mant = (uint16_t)(f_mant >> lsb_pos); + + // Check whether we need to round + uint32_t halfway = 1 << (lsb_pos - 1); + uint32_t mask = (halfway << 1) - 1; + switch (rounding_mode) + { + case CL_HALF_RTE: + if ((f_mant & mask) > halfway) + { + // More than halfway -> round up + h_mant += 1; + } + else if ((f_mant & mask) == halfway) + { + // Exactly halfway -> round to nearest even + if (h_mant & 0x1) + h_mant += 1; + } + break; + case CL_HALF_RTZ: + // Mantissa has already been truncated -> do nothing + break; + case CL_HALF_RTP: + if ((f_mant & mask) && !sign) + { + // Round positive numbers up + h_mant += 1; + } + break; + case CL_HALF_RTN: + if ((f_mant & mask) && sign) + { + // Round negative numbers down + h_mant += 1; + } + break; + } + + // Check for mantissa overflow + if (h_mant & 0x400) + { + h_exp += 1; + h_mant = 0; + } + + return (sign << 15) | (h_exp << 10) | h_mant; +} + + +/** + * Convert a cl_double to a cl_half. + */ +static inline cl_half cl_half_from_double(cl_double d, cl_half_rounding_mode rounding_mode) +{ + // Type-punning to get direct access to underlying bits + union + { + cl_double d; + uint64_t i; + } f64; + f64.d = d; + + // Extract sign bit + uint16_t sign = f64.i >> 63; + + // Extract FP64 exponent and mantissa + uint64_t d_exp = (f64.i >> (CL_DBL_MANT_DIG - 1)) & 0x7FF; + uint64_t d_mant = f64.i & (((uint64_t)1 << (CL_DBL_MANT_DIG - 1)) - 1); + + // Remove FP64 exponent bias + int64_t exp = d_exp - CL_DBL_MAX_EXP + 1; + + // Add FP16 exponent bias + uint16_t h_exp = (uint16_t)(exp + CL_HALF_MAX_EXP - 1); + + // Position of the bit that will become the FP16 mantissa LSB + uint32_t lsb_pos = CL_DBL_MANT_DIG - CL_HALF_MANT_DIG; + + // Check for NaN / infinity + if (d_exp == 0x7FF) + { + if (d_mant) + { + // NaN -> propagate mantissa and silence it + uint16_t h_mant = (uint16_t)(d_mant >> lsb_pos); + h_mant |= 0x200; + return (sign << 15) | CL_HALF_EXP_MASK | h_mant; + } + else + { + // Infinity -> zero mantissa + return (sign << 15) | CL_HALF_EXP_MASK; + } + } + + // Check for zero + if (!d_exp && !d_mant) + { + return (sign << 15); + } + + // Check for overflow + if (exp >= CL_HALF_MAX_EXP) + { + return cl_half_handle_overflow(rounding_mode, sign); + } + + // Check for underflow + if (exp < (CL_HALF_MIN_EXP - CL_HALF_MANT_DIG - 1)) + { + return cl_half_handle_underflow(rounding_mode, sign); + } + + // Check for value that will become denormal + if (exp < -14) + { + // Include the implicit 1 from the FP64 mantissa + h_exp = 0; + d_mant |= (uint64_t)1 << (CL_DBL_MANT_DIG - 1); + + // Mantissa shift amount depends on exponent + lsb_pos = (uint32_t)(-exp + (CL_DBL_MANT_DIG - 25)); + } + + // Generate FP16 mantissa by shifting FP64 mantissa + uint16_t h_mant = (uint16_t)(d_mant >> lsb_pos); + + // Check whether we need to round + uint64_t halfway = (uint64_t)1 << (lsb_pos - 1); + uint64_t mask = (halfway << 1) - 1; + switch (rounding_mode) + { + case CL_HALF_RTE: + if ((d_mant & mask) > halfway) + { + // More than halfway -> round up + h_mant += 1; + } + else if ((d_mant & mask) == halfway) + { + // Exactly halfway -> round to nearest even + if (h_mant & 0x1) + h_mant += 1; + } + break; + case CL_HALF_RTZ: + // Mantissa has already been truncated -> do nothing + break; + case CL_HALF_RTP: + if ((d_mant & mask) && !sign) + { + // Round positive numbers up + h_mant += 1; + } + break; + case CL_HALF_RTN: + if ((d_mant & mask) && sign) + { + // Round negative numbers down + h_mant += 1; + } + break; + } + + // Check for mantissa overflow + if (h_mant & 0x400) + { + h_exp += 1; + h_mant = 0; + } + + return (sign << 15) | (h_exp << 10) | h_mant; +} + + +/** + * Convert a cl_half to a cl_float. + */ +static inline cl_float cl_half_to_float(cl_half h) +{ + // Type-punning to get direct access to underlying bits + union + { + cl_float f; + uint32_t i; + } f32; + + // Extract sign bit + uint16_t sign = h >> 15; + + // Extract FP16 exponent and mantissa + uint16_t h_exp = (h >> (CL_HALF_MANT_DIG - 1)) & 0x1F; + uint16_t h_mant = h & 0x3FF; + + // Remove FP16 exponent bias + int32_t exp = h_exp - CL_HALF_MAX_EXP + 1; + + // Add FP32 exponent bias + uint32_t f_exp = exp + CL_FLT_MAX_EXP - 1; + + // Check for NaN / infinity + if (h_exp == 0x1F) + { + if (h_mant) + { + // NaN -> propagate mantissa and silence it + uint32_t f_mant = h_mant << (CL_FLT_MANT_DIG - CL_HALF_MANT_DIG); + f_mant |= 0x400000; + f32.i = (sign << 31) | 0x7F800000 | f_mant; + return f32.f; + } + else + { + // Infinity -> zero mantissa + f32.i = (sign << 31) | 0x7F800000; + return f32.f; + } + } + + // Check for zero / denormal + if (h_exp == 0) + { + if (h_mant == 0) + { + // Zero -> zero exponent + f_exp = 0; + } + else + { + // Denormal -> normalize it + // - Shift mantissa to make most-significant 1 implicit + // - Adjust exponent accordingly + uint32_t shift = 0; + while ((h_mant & 0x400) == 0) + { + h_mant <<= 1; + shift++; + } + h_mant &= 0x3FF; + f_exp -= shift - 1; + } + } + + f32.i = (sign << 31) | (f_exp << 23) | (h_mant << 13); + return f32.f; +} + + +#undef CL_HALF_EXP_MASK +#undef CL_HALF_MAX_FINITE_MAG + + +#ifdef __cplusplus +} +#endif + + +#endif /* OPENCL_CL_HALF_H */ diff --git a/vendor/angle/include/CL/cl_icd.h b/vendor/angle/include/CL/cl_icd.h new file mode 100644 index 00000000..ad770bb6 --- /dev/null +++ b/vendor/angle/include/CL/cl_icd.h @@ -0,0 +1,1294 @@ +/******************************************************************************* + * Copyright (c) 2019-2020 The Khronos Group Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ + +#ifndef OPENCL_CL_ICD_H +#define OPENCL_CL_ICD_H + +#include +#include +#include +#include + +#if defined(_WIN32) +#include +#include +#include +#endif + +#ifdef __cplusplus +extern "C" { +#endif + +/* + * This file contains pointer type definitions for each of the CL API calls as + * well as a type definition for the dispatch table used by the Khronos ICD + * loader (see cl_khr_icd extension specification for background). + */ + +/* API function pointer definitions */ + +// Platform APIs +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clGetPlatformIDs)( + cl_uint num_entries, cl_platform_id *platforms, + cl_uint *num_platforms) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clGetPlatformInfo)( + cl_platform_id platform, cl_platform_info param_name, + size_t param_value_size, void *param_value, + size_t *param_value_size_ret) CL_API_SUFFIX__VERSION_1_0; + +// Device APIs +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clGetDeviceIDs)( + cl_platform_id platform, cl_device_type device_type, cl_uint num_entries, + cl_device_id *devices, cl_uint *num_devices) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clGetDeviceInfo)( + cl_device_id device, cl_device_info param_name, size_t param_value_size, + void *param_value, size_t *param_value_size_ret) CL_API_SUFFIX__VERSION_1_0; + +#ifdef CL_VERSION_1_2 + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clCreateSubDevices)( + cl_device_id in_device, + const cl_device_partition_property *partition_properties, + cl_uint num_entries, cl_device_id *out_devices, cl_uint *num_devices); + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clRetainDevice)( + cl_device_id device) CL_API_SUFFIX__VERSION_1_2; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clReleaseDevice)( + cl_device_id device) CL_API_SUFFIX__VERSION_1_2; + +#else + +typedef void *cl_api_clCreateSubDevices; +typedef void *cl_api_clRetainDevice; +typedef void *cl_api_clReleaseDevice; + +#endif + +// Context APIs +typedef CL_API_ENTRY cl_context(CL_API_CALL *cl_api_clCreateContext)( + const cl_context_properties *properties, cl_uint num_devices, + const cl_device_id *devices, + void(CL_CALLBACK *pfn_notify)(const char *, const void *, size_t, void *), + void *user_data, cl_int *errcode_ret) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_context(CL_API_CALL *cl_api_clCreateContextFromType)( + const cl_context_properties *properties, cl_device_type device_type, + void(CL_CALLBACK *pfn_notify)(const char *, const void *, size_t, void *), + void *user_data, cl_int *errcode_ret) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clRetainContext)( + cl_context context) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clReleaseContext)( + cl_context context) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clGetContextInfo)( + cl_context context, cl_context_info param_name, size_t param_value_size, + void *param_value, size_t *param_value_size_ret) CL_API_SUFFIX__VERSION_1_0; + +// Command Queue APIs +typedef CL_API_ENTRY cl_command_queue(CL_API_CALL *cl_api_clCreateCommandQueue)( + cl_context context, cl_device_id device, + cl_command_queue_properties properties, + cl_int *errcode_ret) CL_API_SUFFIX__VERSION_1_0; + +#ifdef CL_VERSION_2_0 + +typedef CL_API_ENTRY +cl_command_queue(CL_API_CALL *cl_api_clCreateCommandQueueWithProperties)( + cl_context /* context */, cl_device_id /* device */, + const cl_queue_properties * /* properties */, + cl_int * /* errcode_ret */) CL_API_SUFFIX__VERSION_2_0; + +#else + +typedef void *cl_api_clCreateCommandQueueWithProperties; + +#endif + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clRetainCommandQueue)( + cl_command_queue command_queue) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clReleaseCommandQueue)( + cl_command_queue command_queue) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clGetCommandQueueInfo)( + cl_command_queue command_queue, cl_command_queue_info param_name, + size_t param_value_size, void *param_value, + size_t *param_value_size_ret) CL_API_SUFFIX__VERSION_1_0; + +// Memory Object APIs +typedef CL_API_ENTRY cl_mem(CL_API_CALL *cl_api_clCreateBuffer)( + cl_context context, cl_mem_flags flags, size_t size, void *host_ptr, + cl_int *errcode_ret) CL_API_SUFFIX__VERSION_1_0; + +#ifdef CL_VERSION_1_2 + +typedef CL_API_ENTRY cl_mem(CL_API_CALL *cl_api_clCreateImage)( + cl_context context, cl_mem_flags flags, const cl_image_format *image_format, + const cl_image_desc *image_desc, void *host_ptr, + cl_int *errcode_ret) CL_API_SUFFIX__VERSION_1_2; + +#else + +typedef void *cl_api_clCreateImage; + +#endif + +#ifdef CL_VERSION_3_0 + +typedef CL_API_ENTRY cl_mem(CL_API_CALL *cl_api_clCreateBufferWithProperties)( + cl_context context, const cl_mem_properties *properties, cl_mem_flags flags, + size_t size, void *host_ptr, + cl_int *errcode_ret) CL_API_SUFFIX__VERSION_3_0; + +typedef CL_API_ENTRY cl_mem(CL_API_CALL *cl_api_clCreateImageWithProperties)( + cl_context context, const cl_mem_properties *properties, cl_mem_flags flags, + const cl_image_format *image_format, const cl_image_desc *image_desc, + void *host_ptr, cl_int *errcode_ret) CL_API_SUFFIX__VERSION_3_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL* cl_api_clSetContextDestructorCallback)( + cl_context context, + void(CL_CALLBACK* pfn_notify)(cl_context context, void* user_data), + void* user_data) CL_API_SUFFIX__VERSION_3_0; + +#else + +typedef void *cl_api_clCreateBufferWithProperties; +typedef void *cl_api_clCreateImageWithProperties; +typedef void *cl_api_clSetContextDestructorCallback; + +#endif + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clRetainMemObject)( + cl_mem memobj) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clReleaseMemObject)( + cl_mem memobj) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clGetSupportedImageFormats)( + cl_context context, cl_mem_flags flags, cl_mem_object_type image_type, + cl_uint num_entries, cl_image_format *image_formats, + cl_uint *num_image_formats) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clGetMemObjectInfo)( + cl_mem memobj, cl_mem_info param_name, size_t param_value_size, + void *param_value, size_t *param_value_size_ret) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clGetImageInfo)( + cl_mem image, cl_image_info param_name, size_t param_value_size, + void *param_value, size_t *param_value_size_ret) CL_API_SUFFIX__VERSION_1_0; + +#ifdef CL_VERSION_2_0 + +typedef CL_API_ENTRY cl_mem(CL_API_CALL *cl_api_clCreatePipe)( + cl_context /* context */, cl_mem_flags /* flags */, + cl_uint /* pipe_packet_size */, cl_uint /* pipe_max_packets */, + const cl_pipe_properties * /* properties */, + cl_int * /* errcode_ret */) CL_API_SUFFIX__VERSION_2_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clGetPipeInfo)( + cl_mem /* pipe */, cl_pipe_info /* param_name */, + size_t /* param_value_size */, void * /* param_value */, + size_t * /* param_value_size_ret */) CL_API_SUFFIX__VERSION_2_0; + +typedef CL_API_ENTRY void *(CL_API_CALL *cl_api_clSVMAlloc)( + cl_context /* context */, cl_svm_mem_flags /* flags */, size_t /* size */, + unsigned int /* alignment */)CL_API_SUFFIX__VERSION_2_0; + +typedef CL_API_ENTRY void(CL_API_CALL *cl_api_clSVMFree)( + cl_context /* context */, + void * /* svm_pointer */) CL_API_SUFFIX__VERSION_2_0; + +#else + +typedef void *cl_api_clCreatePipe; +typedef void *cl_api_clGetPipeInfo; +typedef void *cl_api_clSVMAlloc; +typedef void *cl_api_clSVMFree; + +#endif + +// Sampler APIs +typedef CL_API_ENTRY cl_sampler(CL_API_CALL *cl_api_clCreateSampler)( + cl_context context, cl_bool normalized_coords, + cl_addressing_mode addressing_mode, cl_filter_mode filter_mode, + cl_int *errcode_ret) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clRetainSampler)( + cl_sampler sampler) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clReleaseSampler)( + cl_sampler sampler) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clGetSamplerInfo)( + cl_sampler sampler, cl_sampler_info param_name, size_t param_value_size, + void *param_value, size_t *param_value_size_ret) CL_API_SUFFIX__VERSION_1_0; + +#ifdef CL_VERSION_2_0 + +typedef CL_API_ENTRY +cl_sampler(CL_API_CALL *cl_api_clCreateSamplerWithProperties)( + cl_context /* context */, + const cl_sampler_properties * /* sampler_properties */, + cl_int * /* errcode_ret */) CL_API_SUFFIX__VERSION_2_0; + +#else + +typedef void *cl_api_clCreateSamplerWithProperties; + +#endif + +// Program Object APIs +typedef CL_API_ENTRY cl_program(CL_API_CALL *cl_api_clCreateProgramWithSource)( + cl_context context, cl_uint count, const char **strings, + const size_t *lengths, cl_int *errcode_ret) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_program(CL_API_CALL *cl_api_clCreateProgramWithBinary)( + cl_context context, cl_uint num_devices, const cl_device_id *device_list, + const size_t *lengths, const unsigned char **binaries, + cl_int *binary_status, cl_int *errcode_ret) CL_API_SUFFIX__VERSION_1_0; + +#ifdef CL_VERSION_1_2 + +typedef CL_API_ENTRY +cl_program(CL_API_CALL *cl_api_clCreateProgramWithBuiltInKernels)( + cl_context context, cl_uint num_devices, const cl_device_id *device_list, + const char *kernel_names, cl_int *errcode_ret) CL_API_SUFFIX__VERSION_1_2; + +#else + +typedef void *cl_api_clCreateProgramWithBuiltInKernels; + +#endif + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clRetainProgram)( + cl_program program) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clReleaseProgram)( + cl_program program) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clBuildProgram)( + cl_program program, cl_uint num_devices, const cl_device_id *device_list, + const char *options, + void(CL_CALLBACK *pfn_notify)(cl_program program, void *user_data), + void *user_data) CL_API_SUFFIX__VERSION_1_0; + +#ifdef CL_VERSION_1_2 + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clCompileProgram)( + cl_program program, cl_uint num_devices, const cl_device_id *device_list, + const char *options, cl_uint num_input_headers, + const cl_program *input_headers, const char **header_include_names, + void(CL_CALLBACK *pfn_notify)(cl_program program, void *user_data), + void *user_data) CL_API_SUFFIX__VERSION_1_2; + +typedef CL_API_ENTRY cl_program(CL_API_CALL *cl_api_clLinkProgram)( + cl_context context, cl_uint num_devices, const cl_device_id *device_list, + const char *options, cl_uint num_input_programs, + const cl_program *input_programs, + void(CL_CALLBACK *pfn_notify)(cl_program program, void *user_data), + void *user_data, cl_int *errcode_ret) CL_API_SUFFIX__VERSION_1_2; + +#else + +typedef void *cl_api_clCompileProgram; +typedef void *cl_api_clLinkProgram; + +#endif + +#ifdef CL_VERSION_2_2 + +typedef CL_API_ENTRY +cl_int(CL_API_CALL *cl_api_clSetProgramSpecializationConstant)( + cl_program program, cl_uint spec_id, size_t spec_size, + const void *spec_value) CL_API_SUFFIX__VERSION_2_2; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clSetProgramReleaseCallback)( + cl_program program, + void(CL_CALLBACK *pfn_notify)(cl_program program, void *user_data), + void *user_data) CL_API_SUFFIX__VERSION_2_2; + +#else + +typedef void *cl_api_clSetProgramSpecializationConstant; +typedef void *cl_api_clSetProgramReleaseCallback; + +#endif + +#ifdef CL_VERSION_1_2 + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clUnloadPlatformCompiler)( + cl_platform_id platform) CL_API_SUFFIX__VERSION_1_2; + +#else + +typedef void *cl_api_clUnloadPlatformCompiler; + +#endif + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clGetProgramInfo)( + cl_program program, cl_program_info param_name, size_t param_value_size, + void *param_value, size_t *param_value_size_ret) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clGetProgramBuildInfo)( + cl_program program, cl_device_id device, cl_program_build_info param_name, + size_t param_value_size, void *param_value, + size_t *param_value_size_ret) CL_API_SUFFIX__VERSION_1_0; + +// Kernel Object APIs +typedef CL_API_ENTRY cl_kernel(CL_API_CALL *cl_api_clCreateKernel)( + cl_program program, const char *kernel_name, + cl_int *errcode_ret) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clCreateKernelsInProgram)( + cl_program program, cl_uint num_kernels, cl_kernel *kernels, + cl_uint *num_kernels_ret) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clRetainKernel)( + cl_kernel kernel) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clReleaseKernel)( + cl_kernel kernel) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clSetKernelArg)( + cl_kernel kernel, cl_uint arg_index, size_t arg_size, + const void *arg_value) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clGetKernelInfo)( + cl_kernel kernel, cl_kernel_info param_name, size_t param_value_size, + void *param_value, size_t *param_value_size_ret) CL_API_SUFFIX__VERSION_1_0; + +#ifdef CL_VERSION_1_2 + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clGetKernelArgInfo)( + cl_kernel kernel, cl_uint arg_indx, cl_kernel_arg_info param_name, + size_t param_value_size, void *param_value, + size_t *param_value_size_ret) CL_API_SUFFIX__VERSION_1_2; + +#else + +typedef void *cl_api_clGetKernelArgInfo; + +#endif + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clGetKernelWorkGroupInfo)( + cl_kernel kernel, cl_device_id device, cl_kernel_work_group_info param_name, + size_t param_value_size, void *param_value, + size_t *param_value_size_ret) CL_API_SUFFIX__VERSION_1_0; + +#ifdef CL_VERSION_2_0 + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clSetKernelArgSVMPointer)( + cl_kernel /* kernel */, cl_uint /* arg_index */, + const void * /* arg_value */) CL_API_SUFFIX__VERSION_2_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clSetKernelExecInfo)( + cl_kernel /* kernel */, cl_kernel_exec_info /* param_name */, + size_t /* param_value_size */, + const void * /* param_value */) CL_API_SUFFIX__VERSION_2_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clGetKernelSubGroupInfoKHR)( + cl_kernel /* in_kernel */, cl_device_id /*in_device*/, + cl_kernel_sub_group_info /* param_name */, size_t /*input_value_size*/, + const void * /*input_value*/, size_t /*param_value_size*/, + void * /*param_value*/, + size_t * /*param_value_size_ret*/) CL_EXT_SUFFIX__VERSION_2_0; + +#else + +typedef void *cl_api_clSetKernelArgSVMPointer; +typedef void *cl_api_clSetKernelExecInfo; +typedef void *cl_api_clGetKernelSubGroupInfoKHR; + +#endif + +// Event Object APIs +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clWaitForEvents)( + cl_uint num_events, const cl_event *event_list) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clGetEventInfo)( + cl_event event, cl_event_info param_name, size_t param_value_size, + void *param_value, size_t *param_value_size_ret) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clRetainEvent)(cl_event event) + CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clReleaseEvent)(cl_event event) + CL_API_SUFFIX__VERSION_1_0; + +// Profiling APIs +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clGetEventProfilingInfo)( + cl_event event, cl_profiling_info param_name, size_t param_value_size, + void *param_value, size_t *param_value_size_ret) CL_API_SUFFIX__VERSION_1_0; + +// Flush and Finish APIs +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clFlush)( + cl_command_queue command_queue) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clFinish)( + cl_command_queue command_queue) CL_API_SUFFIX__VERSION_1_0; + +// Enqueued Commands APIs +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clEnqueueReadBuffer)( + cl_command_queue command_queue, cl_mem buffer, cl_bool blocking_read, + size_t offset, size_t cb, void *ptr, cl_uint num_events_in_wait_list, + const cl_event *event_wait_list, + cl_event *event) CL_API_SUFFIX__VERSION_1_0; + +#ifdef CL_VERSION_1_1 + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clEnqueueReadBufferRect)( + cl_command_queue command_queue, cl_mem buffer, cl_bool blocking_read, + const size_t *buffer_origin, const size_t *host_origin, + const size_t *region, size_t buffer_row_pitch, size_t buffer_slice_pitch, + size_t host_row_pitch, size_t host_slice_pitch, void *ptr, + cl_uint num_events_in_wait_list, const cl_event *event_wait_list, + cl_event *event) CL_API_SUFFIX__VERSION_1_1; + +#else + +typedef void *cl_api_clEnqueueReadBufferRect; + +#endif + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clEnqueueWriteBuffer)( + cl_command_queue command_queue, cl_mem buffer, cl_bool blocking_write, + size_t offset, size_t cb, const void *ptr, cl_uint num_events_in_wait_list, + const cl_event *event_wait_list, + cl_event *event) CL_API_SUFFIX__VERSION_1_0; + +#ifdef CL_VERSION_1_1 + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clEnqueueWriteBufferRect)( + cl_command_queue command_queue, cl_mem buffer, cl_bool blocking_read, + const size_t *buffer_origin, const size_t *host_origin, + const size_t *region, size_t buffer_row_pitch, size_t buffer_slice_pitch, + size_t host_row_pitch, size_t host_slice_pitch, const void *ptr, + cl_uint num_events_in_wait_list, const cl_event *event_wait_list, + cl_event *event) CL_API_SUFFIX__VERSION_1_1; + +#else + +typedef void *cl_api_clEnqueueWriteBufferRect; + +#endif + +#ifdef CL_VERSION_1_2 + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clEnqueueFillBuffer)( + cl_command_queue command_queue, cl_mem buffer, const void *pattern, + size_t pattern_size, size_t offset, size_t cb, + cl_uint num_events_in_wait_list, const cl_event *event_wait_list, + cl_event *event) CL_API_SUFFIX__VERSION_1_2; + +#else + +typedef void *cl_api_clEnqueueFillBuffer; + +#endif + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clEnqueueCopyBuffer)( + cl_command_queue command_queue, cl_mem src_buffer, cl_mem dst_buffer, + size_t src_offset, size_t dst_offset, size_t cb, + cl_uint num_events_in_wait_list, const cl_event *event_wait_list, + cl_event *event) CL_API_SUFFIX__VERSION_1_0; + +#ifdef CL_VERSION_1_1 + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clEnqueueCopyBufferRect)( + cl_command_queue command_queue, cl_mem src_buffer, cl_mem dst_buffer, + const size_t *src_origin, const size_t *dst_origin, const size_t *region, + size_t src_row_pitch, size_t src_slice_pitch, size_t dst_row_pitch, + size_t dst_slice_pitch, cl_uint num_events_in_wait_list, + const cl_event *event_wait_list, + cl_event *event) CL_API_SUFFIX__VERSION_1_1; + +#else + +typedef void *cl_api_clEnqueueCopyBufferRect; + +#endif + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clEnqueueReadImage)( + cl_command_queue command_queue, cl_mem image, cl_bool blocking_read, + const size_t *origin, const size_t *region, size_t row_pitch, + size_t slice_pitch, void *ptr, cl_uint num_events_in_wait_list, + const cl_event *event_wait_list, + cl_event *event) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clEnqueueWriteImage)( + cl_command_queue command_queue, cl_mem image, cl_bool blocking_write, + const size_t *origin, const size_t *region, size_t input_row_pitch, + size_t input_slice_pitch, const void *ptr, cl_uint num_events_in_wait_list, + const cl_event *event_wait_list, + cl_event *event) CL_API_SUFFIX__VERSION_1_0; + +#ifdef CL_VERSION_1_2 + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clEnqueueFillImage)( + cl_command_queue command_queue, cl_mem image, const void *fill_color, + const size_t origin[3], const size_t region[3], + cl_uint num_events_in_wait_list, const cl_event *event_wait_list, + cl_event *event) CL_API_SUFFIX__VERSION_1_2; + +#else + +typedef void *cl_api_clEnqueueFillImage; + +#endif + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clEnqueueCopyImage)( + cl_command_queue command_queue, cl_mem src_image, cl_mem dst_image, + const size_t *src_origin, const size_t *dst_origin, const size_t *region, + cl_uint num_events_in_wait_list, const cl_event *event_wait_list, + cl_event *event) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clEnqueueCopyImageToBuffer)( + cl_command_queue command_queue, cl_mem src_image, cl_mem dst_buffer, + const size_t *src_origin, const size_t *region, size_t dst_offset, + cl_uint num_events_in_wait_list, const cl_event *event_wait_list, + cl_event *event) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clEnqueueCopyBufferToImage)( + cl_command_queue command_queue, cl_mem src_buffer, cl_mem dst_image, + size_t src_offset, const size_t *dst_origin, const size_t *region, + cl_uint num_events_in_wait_list, const cl_event *event_wait_list, + cl_event *event) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY void *(CL_API_CALL *cl_api_clEnqueueMapBuffer)( + cl_command_queue command_queue, cl_mem buffer, cl_bool blocking_map, + cl_map_flags map_flags, size_t offset, size_t cb, + cl_uint num_events_in_wait_list, const cl_event *event_wait_list, + cl_event *event, cl_int *errcode_ret)CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY void *(CL_API_CALL *cl_api_clEnqueueMapImage)( + cl_command_queue command_queue, cl_mem image, cl_bool blocking_map, + cl_map_flags map_flags, const size_t *origin, const size_t *region, + size_t *image_row_pitch, size_t *image_slice_pitch, + cl_uint num_events_in_wait_list, const cl_event *event_wait_list, + cl_event *event, cl_int *errcode_ret)CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clEnqueueUnmapMemObject)( + cl_command_queue command_queue, cl_mem memobj, void *mapped_ptr, + cl_uint num_events_in_wait_list, const cl_event *event_wait_list, + cl_event *event) CL_API_SUFFIX__VERSION_1_0; + +#ifdef CL_VERSION_1_2 + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clEnqueueMigrateMemObjects)( + cl_command_queue command_queue, cl_uint num_mem_objects, + const cl_mem *mem_objects, cl_mem_migration_flags flags, + cl_uint num_events_in_wait_list, const cl_event *event_wait_list, + cl_event *event) CL_API_SUFFIX__VERSION_1_2; + +#else + +typedef void *cl_api_clEnqueueMigrateMemObjects; + +#endif + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clEnqueueNDRangeKernel)( + cl_command_queue command_queue, cl_kernel kernel, cl_uint work_dim, + const size_t *global_work_offset, const size_t *global_work_size, + const size_t *local_work_size, cl_uint num_events_in_wait_list, + const cl_event *event_wait_list, + cl_event *event) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clEnqueueTask)( + cl_command_queue command_queue, cl_kernel kernel, + cl_uint num_events_in_wait_list, const cl_event *event_wait_list, + cl_event *event) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clEnqueueNativeKernel)( + cl_command_queue command_queue, void(CL_CALLBACK *user_func)(void *), + void *args, size_t cb_args, cl_uint num_mem_objects, const cl_mem *mem_list, + const void **args_mem_loc, cl_uint num_events_in_wait_list, + const cl_event *event_wait_list, + cl_event *event) CL_API_SUFFIX__VERSION_1_0; + +#ifdef CL_VERSION_1_2 + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clEnqueueMarkerWithWaitList)( + cl_command_queue command_queue, cl_uint num_events_in_wait_list, + const cl_event *event_wait_list, + cl_event *event) CL_API_SUFFIX__VERSION_1_2; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clEnqueueBarrierWithWaitList)( + cl_command_queue command_queue, cl_uint num_events_in_wait_list, + const cl_event *event_wait_list, + cl_event *event) CL_API_SUFFIX__VERSION_1_2; + +typedef CL_API_ENTRY void *( + CL_API_CALL *cl_api_clGetExtensionFunctionAddressForPlatform)( + cl_platform_id platform, + const char *function_name)CL_API_SUFFIX__VERSION_1_2; + +#else + +typedef void *cl_api_clEnqueueMarkerWithWaitList; +typedef void *cl_api_clEnqueueBarrierWithWaitList; +typedef void *cl_api_clGetExtensionFunctionAddressForPlatform; + +#endif + +// Shared Virtual Memory APIs + +#ifdef CL_VERSION_2_0 + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clEnqueueSVMFree)( + cl_command_queue /* command_queue */, cl_uint /* num_svm_pointers */, + void ** /* svm_pointers */, + void(CL_CALLBACK *pfn_free_func)(cl_command_queue /* queue */, + cl_uint /* num_svm_pointers */, + void ** /* svm_pointers[] */, + void * /* user_data */), + void * /* user_data */, cl_uint /* num_events_in_wait_list */, + const cl_event * /* event_wait_list */, + cl_event * /* event */) CL_API_SUFFIX__VERSION_2_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clEnqueueSVMMemcpy)( + cl_command_queue /* command_queue */, cl_bool /* blocking_copy */, + void * /* dst_ptr */, const void * /* src_ptr */, size_t /* size */, + cl_uint /* num_events_in_wait_list */, + const cl_event * /* event_wait_list */, + cl_event * /* event */) CL_API_SUFFIX__VERSION_2_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clEnqueueSVMMemFill)( + cl_command_queue /* command_queue */, void * /* svm_ptr */, + const void * /* pattern */, size_t /* pattern_size */, size_t /* size */, + cl_uint /* num_events_in_wait_list */, + const cl_event * /* event_wait_list */, + cl_event * /* event */) CL_API_SUFFIX__VERSION_2_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clEnqueueSVMMap)( + cl_command_queue /* command_queue */, cl_bool /* blocking_map */, + cl_map_flags /* map_flags */, void * /* svm_ptr */, size_t /* size */, + cl_uint /* num_events_in_wait_list */, + const cl_event * /* event_wait_list */, + cl_event * /* event */) CL_API_SUFFIX__VERSION_2_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clEnqueueSVMUnmap)( + cl_command_queue /* command_queue */, void * /* svm_ptr */, + cl_uint /* num_events_in_wait_list */, + const cl_event * /* event_wait_list */, + cl_event * /* event */) CL_API_SUFFIX__VERSION_2_0; + +#else + +typedef void *cl_api_clEnqueueSVMFree; +typedef void *cl_api_clEnqueueSVMMemcpy; +typedef void *cl_api_clEnqueueSVMMemFill; +typedef void *cl_api_clEnqueueSVMMap; +typedef void *cl_api_clEnqueueSVMUnmap; + +#endif + +// Deprecated APIs +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clSetCommandQueueProperty)( + cl_command_queue command_queue, cl_command_queue_properties properties, + cl_bool enable, cl_command_queue_properties *old_properties) + CL_EXT_SUFFIX__VERSION_1_0_DEPRECATED; + +typedef CL_API_ENTRY cl_mem(CL_API_CALL *cl_api_clCreateImage2D)( + cl_context context, cl_mem_flags flags, const cl_image_format *image_format, + size_t image_width, size_t image_height, size_t image_row_pitch, + void *host_ptr, cl_int *errcode_ret) CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED; + +typedef CL_API_ENTRY cl_mem(CL_API_CALL *cl_api_clCreateImage3D)( + cl_context context, cl_mem_flags flags, const cl_image_format *image_format, + size_t image_width, size_t image_height, size_t image_depth, + size_t image_row_pitch, size_t image_slice_pitch, void *host_ptr, + cl_int *errcode_ret) CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clUnloadCompiler)(void) + CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clEnqueueMarker)( + cl_command_queue command_queue, + cl_event *event) CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clEnqueueWaitForEvents)( + cl_command_queue command_queue, cl_uint num_events, + const cl_event *event_list) CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clEnqueueBarrier)( + cl_command_queue command_queue) CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED; + +typedef CL_API_ENTRY void *(CL_API_CALL *cl_api_clGetExtensionFunctionAddress)( + const char *function_name)CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED; + +// GL and other APIs +typedef CL_API_ENTRY cl_mem(CL_API_CALL *cl_api_clCreateFromGLBuffer)( + cl_context context, cl_mem_flags flags, cl_GLuint bufobj, + int *errcode_ret) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_mem(CL_API_CALL *cl_api_clCreateFromGLTexture)( + cl_context context, cl_mem_flags flags, cl_GLenum target, cl_GLint miplevel, + cl_GLuint texture, cl_int *errcode_ret) CL_API_SUFFIX__VERSION_1_2; + +typedef CL_API_ENTRY cl_mem(CL_API_CALL *cl_api_clCreateFromGLTexture2D)( + cl_context context, cl_mem_flags flags, cl_GLenum target, cl_GLint miplevel, + cl_GLuint texture, cl_int *errcode_ret) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_mem(CL_API_CALL *cl_api_clCreateFromGLTexture3D)( + cl_context context, cl_mem_flags flags, cl_GLenum target, cl_GLint miplevel, + cl_GLuint texture, cl_int *errcode_ret) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_mem(CL_API_CALL *cl_api_clCreateFromGLRenderbuffer)( + cl_context context, cl_mem_flags flags, cl_GLuint renderbuffer, + cl_int *errcode_ret) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clGetGLObjectInfo)( + cl_mem memobj, cl_gl_object_type *gl_object_type, + cl_GLuint *gl_object_name) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clGetGLTextureInfo)( + cl_mem memobj, cl_gl_texture_info param_name, size_t param_value_size, + void *param_value, size_t *param_value_size_ret) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clEnqueueAcquireGLObjects)( + cl_command_queue command_queue, cl_uint num_objects, + const cl_mem *mem_objects, cl_uint num_events_in_wait_list, + const cl_event *event_wait_list, + cl_event *event) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clEnqueueReleaseGLObjects)( + cl_command_queue command_queue, cl_uint num_objects, + const cl_mem *mem_objects, cl_uint num_events_in_wait_list, + const cl_event *event_wait_list, + cl_event *event) CL_API_SUFFIX__VERSION_1_0; + +/* cl_khr_gl_sharing */ +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clGetGLContextInfoKHR)( + const cl_context_properties *properties, cl_gl_context_info param_name, + size_t param_value_size, void *param_value, size_t *param_value_size_ret); + +/* cl_khr_gl_event */ +typedef CL_API_ENTRY cl_event(CL_API_CALL *cl_api_clCreateEventFromGLsyncKHR)( + cl_context context, cl_GLsync sync, cl_int *errcode_ret); + +#if defined(_WIN32) + +/* cl_khr_d3d10_sharing */ + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clGetDeviceIDsFromD3D10KHR)( + cl_platform_id platform, cl_d3d10_device_source_khr d3d_device_source, + void *d3d_object, cl_d3d10_device_set_khr d3d_device_set, + cl_uint num_entries, cl_device_id *devices, + cl_uint *num_devices) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_mem(CL_API_CALL *cl_api_clCreateFromD3D10BufferKHR)( + cl_context context, cl_mem_flags flags, ID3D10Buffer *resource, + cl_int *errcode_ret) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_mem(CL_API_CALL *cl_api_clCreateFromD3D10Texture2DKHR)( + cl_context context, cl_mem_flags flags, ID3D10Texture2D *resource, + UINT subresource, cl_int *errcode_ret) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_mem(CL_API_CALL *cl_api_clCreateFromD3D10Texture3DKHR)( + cl_context context, cl_mem_flags flags, ID3D10Texture3D *resource, + UINT subresource, cl_int *errcode_ret) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY +cl_int(CL_API_CALL *cl_api_clEnqueueAcquireD3D10ObjectsKHR)( + cl_command_queue command_queue, cl_uint num_objects, + const cl_mem *mem_objects, cl_uint num_events_in_wait_list, + const cl_event *event_wait_list, + cl_event *event) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY +cl_int(CL_API_CALL *cl_api_clEnqueueReleaseD3D10ObjectsKHR)( + cl_command_queue command_queue, cl_uint num_objects, + const cl_mem *mem_objects, cl_uint num_events_in_wait_list, + const cl_event *event_wait_list, + cl_event *event) CL_API_SUFFIX__VERSION_1_0; + +extern CL_API_ENTRY cl_int CL_API_CALL clGetDeviceIDsFromD3D10KHR( + cl_platform_id platform, cl_d3d10_device_source_khr d3d_device_source, + void *d3d_object, cl_d3d10_device_set_khr d3d_device_set, + cl_uint num_entries, cl_device_id *devices, cl_uint *num_devices); + +extern CL_API_ENTRY cl_mem CL_API_CALL +clCreateFromD3D10BufferKHR(cl_context context, cl_mem_flags flags, + ID3D10Buffer *resource, cl_int *errcode_ret); + +extern CL_API_ENTRY cl_mem CL_API_CALL clCreateFromD3D10Texture2DKHR( + cl_context context, cl_mem_flags flags, ID3D10Texture2D *resource, + UINT subresource, cl_int *errcode_ret); + +extern CL_API_ENTRY cl_mem CL_API_CALL clCreateFromD3D10Texture3DKHR( + cl_context context, cl_mem_flags flags, ID3D10Texture3D *resource, + UINT subresource, cl_int *errcode_ret); + +extern CL_API_ENTRY cl_int CL_API_CALL clEnqueueAcquireD3D10ObjectsKHR( + cl_command_queue command_queue, cl_uint num_objects, + const cl_mem *mem_objects, cl_uint num_events_in_wait_list, + const cl_event *event_wait_list, cl_event *event); + +extern CL_API_ENTRY cl_int CL_API_CALL clEnqueueReleaseD3D10ObjectsKHR( + cl_command_queue command_queue, cl_uint num_objects, + const cl_mem *mem_objects, cl_uint num_events_in_wait_list, + const cl_event *event_wait_list, cl_event *event); + +/* cl_khr_d3d11_sharing */ +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clGetDeviceIDsFromD3D11KHR)( + cl_platform_id platform, cl_d3d11_device_source_khr d3d_device_source, + void *d3d_object, cl_d3d11_device_set_khr d3d_device_set, + cl_uint num_entries, cl_device_id *devices, + cl_uint *num_devices) CL_API_SUFFIX__VERSION_1_2; + +typedef CL_API_ENTRY cl_mem(CL_API_CALL *cl_api_clCreateFromD3D11BufferKHR)( + cl_context context, cl_mem_flags flags, ID3D11Buffer *resource, + cl_int *errcode_ret) CL_API_SUFFIX__VERSION_1_2; + +typedef CL_API_ENTRY cl_mem(CL_API_CALL *cl_api_clCreateFromD3D11Texture2DKHR)( + cl_context context, cl_mem_flags flags, ID3D11Texture2D *resource, + UINT subresource, cl_int *errcode_ret) CL_API_SUFFIX__VERSION_1_2; + +typedef CL_API_ENTRY cl_mem(CL_API_CALL *cl_api_clCreateFromD3D11Texture3DKHR)( + cl_context context, cl_mem_flags flags, ID3D11Texture3D *resource, + UINT subresource, cl_int *errcode_ret) CL_API_SUFFIX__VERSION_1_2; + +typedef CL_API_ENTRY +cl_int(CL_API_CALL *cl_api_clEnqueueAcquireD3D11ObjectsKHR)( + cl_command_queue command_queue, cl_uint num_objects, + const cl_mem *mem_objects, cl_uint num_events_in_wait_list, + const cl_event *event_wait_list, + cl_event *event) CL_API_SUFFIX__VERSION_1_2; + +typedef CL_API_ENTRY +cl_int(CL_API_CALL *cl_api_clEnqueueReleaseD3D11ObjectsKHR)( + cl_command_queue command_queue, cl_uint num_objects, + const cl_mem *mem_objects, cl_uint num_events_in_wait_list, + const cl_event *event_wait_list, + cl_event *event) CL_API_SUFFIX__VERSION_1_2; + +/* cl_khr_dx9_media_sharing */ +typedef CL_API_ENTRY +cl_int(CL_API_CALL *cl_api_clGetDeviceIDsFromDX9MediaAdapterKHR)( + cl_platform_id platform, cl_uint num_media_adapters, + cl_dx9_media_adapter_type_khr *media_adapters_type, void *media_adapters, + cl_dx9_media_adapter_set_khr media_adapter_set, cl_uint num_entries, + cl_device_id *devices, cl_uint *num_devices) CL_API_SUFFIX__VERSION_1_2; + +typedef CL_API_ENTRY cl_mem(CL_API_CALL *cl_api_clCreateFromDX9MediaSurfaceKHR)( + cl_context context, cl_mem_flags flags, + cl_dx9_media_adapter_type_khr adapter_type, void *surface_info, + cl_uint plane, cl_int *errcode_ret) CL_API_SUFFIX__VERSION_1_2; + +typedef CL_API_ENTRY +cl_int(CL_API_CALL *cl_api_clEnqueueAcquireDX9MediaSurfacesKHR)( + cl_command_queue command_queue, cl_uint num_objects, + const cl_mem *mem_objects, cl_uint num_events_in_wait_list, + const cl_event *event_wait_list, + cl_event *event) CL_API_SUFFIX__VERSION_1_2; + +typedef CL_API_ENTRY +cl_int(CL_API_CALL *cl_api_clEnqueueReleaseDX9MediaSurfacesKHR)( + cl_command_queue command_queue, cl_uint num_objects, + const cl_mem *mem_objects, cl_uint num_events_in_wait_list, + const cl_event *event_wait_list, + cl_event *event) CL_API_SUFFIX__VERSION_1_2; + +/* cl_khr_d3d11_sharing */ +extern CL_API_ENTRY cl_int CL_API_CALL clGetDeviceIDsFromD3D11KHR( + cl_platform_id platform, cl_d3d11_device_source_khr d3d_device_source, + void *d3d_object, cl_d3d11_device_set_khr d3d_device_set, + cl_uint num_entries, cl_device_id *devices, cl_uint *num_devices); + +extern CL_API_ENTRY cl_mem CL_API_CALL +clCreateFromD3D11BufferKHR(cl_context context, cl_mem_flags flags, + ID3D11Buffer *resource, cl_int *errcode_ret); + +extern CL_API_ENTRY cl_mem CL_API_CALL clCreateFromD3D11Texture2DKHR( + cl_context context, cl_mem_flags flags, ID3D11Texture2D *resource, + UINT subresource, cl_int *errcode_ret); + +extern CL_API_ENTRY cl_mem CL_API_CALL clCreateFromD3D11Texture3DKHR( + cl_context context, cl_mem_flags flags, ID3D11Texture3D *resource, + UINT subresource, cl_int *errcode_ret); + +extern CL_API_ENTRY cl_int CL_API_CALL clEnqueueAcquireD3D11ObjectsKHR( + cl_command_queue command_queue, cl_uint num_objects, + const cl_mem *mem_objects, cl_uint num_events_in_wait_list, + const cl_event *event_wait_list, cl_event *event); + +extern CL_API_ENTRY cl_int CL_API_CALL clEnqueueReleaseD3D11ObjectsKHR( + cl_command_queue command_queue, cl_uint num_objects, + const cl_mem *mem_objects, cl_uint num_events_in_wait_list, + const cl_event *event_wait_list, cl_event *event); + +/* cl_khr_dx9_media_sharing */ +extern CL_API_ENTRY cl_int CL_API_CALL clGetDeviceIDsFromDX9MediaAdapterKHR( + cl_platform_id platform, cl_uint num_media_adapters, + cl_dx9_media_adapter_type_khr *media_adapter_type, void *media_adapters, + cl_dx9_media_adapter_set_khr media_adapter_set, cl_uint num_entries, + cl_device_id *devices, cl_uint *num_devices); + +extern CL_API_ENTRY cl_mem CL_API_CALL clCreateFromDX9MediaSurfaceKHR( + cl_context context, cl_mem_flags flags, + cl_dx9_media_adapter_type_khr adapter_type, void *surface_info, + cl_uint plane, cl_int *errcode_ret); + +extern CL_API_ENTRY cl_int CL_API_CALL clEnqueueAcquireDX9MediaSurfacesKHR( + cl_command_queue command_queue, cl_uint num_objects, + const cl_mem *mem_objects, cl_uint num_events_in_wait_list, + const cl_event *event_wait_list, cl_event *event); + +extern CL_API_ENTRY cl_int CL_API_CALL clEnqueueReleaseDX9MediaSurfacesKHR( + cl_command_queue command_queue, cl_uint num_objects, + const cl_mem *mem_objects, cl_uint num_events_in_wait_list, + const cl_event *event_wait_list, cl_event *event); + +#else + +/* cl_khr_d3d10_sharing */ +typedef void *cl_api_clGetDeviceIDsFromD3D10KHR; +typedef void *cl_api_clCreateFromD3D10BufferKHR; +typedef void *cl_api_clCreateFromD3D10Texture2DKHR; +typedef void *cl_api_clCreateFromD3D10Texture3DKHR; +typedef void *cl_api_clEnqueueAcquireD3D10ObjectsKHR; +typedef void *cl_api_clEnqueueReleaseD3D10ObjectsKHR; + +/* cl_khr_d3d11_sharing */ +typedef void *cl_api_clGetDeviceIDsFromD3D11KHR; +typedef void *cl_api_clCreateFromD3D11BufferKHR; +typedef void *cl_api_clCreateFromD3D11Texture2DKHR; +typedef void *cl_api_clCreateFromD3D11Texture3DKHR; +typedef void *cl_api_clEnqueueAcquireD3D11ObjectsKHR; +typedef void *cl_api_clEnqueueReleaseD3D11ObjectsKHR; + +/* cl_khr_dx9_media_sharing */ +typedef void *cl_api_clCreateFromDX9MediaSurfaceKHR; +typedef void *cl_api_clEnqueueAcquireDX9MediaSurfacesKHR; +typedef void *cl_api_clEnqueueReleaseDX9MediaSurfacesKHR; +typedef void *cl_api_clGetDeviceIDsFromDX9MediaAdapterKHR; + +#endif + +/* OpenCL 1.1 */ + +#ifdef CL_VERSION_1_1 + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clSetEventCallback)( + cl_event /* event */, cl_int /* command_exec_callback_type */, + void(CL_CALLBACK * /* pfn_notify */)(cl_event, cl_int, void *), + void * /* user_data */) CL_API_SUFFIX__VERSION_1_1; + +typedef CL_API_ENTRY cl_mem(CL_API_CALL *cl_api_clCreateSubBuffer)( + cl_mem /* buffer */, cl_mem_flags /* flags */, + cl_buffer_create_type /* buffer_create_type */, + const void * /* buffer_create_info */, + cl_int * /* errcode_ret */) CL_API_SUFFIX__VERSION_1_1; + +typedef CL_API_ENTRY +cl_int(CL_API_CALL *cl_api_clSetMemObjectDestructorCallback)( + cl_mem /* memobj */, + void(CL_CALLBACK * /*pfn_notify*/)(cl_mem /* memobj */, + void * /*user_data*/), + void * /*user_data */) CL_API_SUFFIX__VERSION_1_1; + +typedef CL_API_ENTRY cl_event(CL_API_CALL *cl_api_clCreateUserEvent)( + cl_context /* context */, + cl_int * /* errcode_ret */) CL_API_SUFFIX__VERSION_1_1; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clSetUserEventStatus)( + cl_event /* event */, + cl_int /* execution_status */) CL_API_SUFFIX__VERSION_1_1; + +#else + +typedef void *cl_api_clSetEventCallback; +typedef void *cl_api_clCreateSubBuffer; +typedef void *cl_api_clSetMemObjectDestructorCallback; +typedef void *cl_api_clCreateUserEvent; +typedef void *cl_api_clSetUserEventStatus; + +#endif + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clCreateSubDevicesEXT)( + cl_device_id in_device, + const cl_device_partition_property_ext *partition_properties, + cl_uint num_entries, cl_device_id *out_devices, cl_uint *num_devices); + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clRetainDeviceEXT)( + cl_device_id device) CL_API_SUFFIX__VERSION_1_0; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clReleaseDeviceEXT)( + cl_device_id device) CL_API_SUFFIX__VERSION_1_0; + +/* cl_khr_egl_image */ +typedef CL_API_ENTRY cl_mem(CL_API_CALL *cl_api_clCreateFromEGLImageKHR)( + cl_context context, CLeglDisplayKHR display, CLeglImageKHR image, + cl_mem_flags flags, const cl_egl_image_properties_khr *properties, + cl_int *errcode_ret); + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clEnqueueAcquireEGLObjectsKHR)( + cl_command_queue command_queue, cl_uint num_objects, + const cl_mem *mem_objects, cl_uint num_events_in_wait_list, + const cl_event *event_wait_list, cl_event *event); + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clEnqueueReleaseEGLObjectsKHR)( + cl_command_queue command_queue, cl_uint num_objects, + const cl_mem *mem_objects, cl_uint num_events_in_wait_list, + const cl_event *event_wait_list, cl_event *event); + +/* cl_khr_egl_event */ +typedef CL_API_ENTRY cl_event(CL_API_CALL *cl_api_clCreateEventFromEGLSyncKHR)( + cl_context context, CLeglSyncKHR sync, CLeglDisplayKHR display, + cl_int *errcode_ret); + +#ifdef CL_VERSION_2_1 + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clSetDefaultDeviceCommandQueue)( + cl_context context, cl_device_id device, + cl_command_queue command_queue) CL_API_SUFFIX__VERSION_2_1; + +typedef CL_API_ENTRY cl_program(CL_API_CALL *cl_api_clCreateProgramWithIL)( + cl_context context, const void *il, size_t length, + cl_int *errcode_ret) CL_API_SUFFIX__VERSION_2_1; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clGetKernelSubGroupInfo)( + cl_kernel kernel, cl_device_id device, cl_kernel_sub_group_info param_name, + size_t input_value_size, const void *input_value, size_t param_value_size, + void *param_value, size_t *param_value_size_ret) CL_API_SUFFIX__VERSION_2_1; + +typedef CL_API_ENTRY cl_kernel(CL_API_CALL *cl_api_clCloneKernel)( + cl_kernel source_kernel, cl_int *errcode_ret) CL_API_SUFFIX__VERSION_2_1; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clEnqueueSVMMigrateMem)( + cl_command_queue command_queue, cl_uint num_svm_pointers, + const void **svm_pointers, const size_t *sizes, + cl_mem_migration_flags flags, cl_uint num_events_in_wait_list, + const cl_event *event_wait_list, + cl_event *event) CL_API_SUFFIX__VERSION_2_1; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clGetDeviceAndHostTimer)( + cl_device_id device, cl_ulong *device_timestamp, + cl_ulong *host_timestamp) CL_API_SUFFIX__VERSION_2_1; + +typedef CL_API_ENTRY cl_int(CL_API_CALL *cl_api_clGetHostTimer)( + cl_device_id device, cl_ulong *host_timestamp) CL_API_SUFFIX__VERSION_2_1; + +#else + +typedef void *cl_api_clSetDefaultDeviceCommandQueue; +typedef void *cl_api_clCreateProgramWithIL; +typedef void *cl_api_clGetKernelSubGroupInfo; +typedef void *cl_api_clCloneKernel; +typedef void *cl_api_clEnqueueSVMMigrateMem; +typedef void *cl_api_clGetDeviceAndHostTimer; +typedef void *cl_api_clGetHostTimer; + +#endif + +/* Vendor dispatch table struture */ + +typedef struct _cl_icd_dispatch { + /* OpenCL 1.0 */ + cl_api_clGetPlatformIDs clGetPlatformIDs; + cl_api_clGetPlatformInfo clGetPlatformInfo; + cl_api_clGetDeviceIDs clGetDeviceIDs; + cl_api_clGetDeviceInfo clGetDeviceInfo; + cl_api_clCreateContext clCreateContext; + cl_api_clCreateContextFromType clCreateContextFromType; + cl_api_clRetainContext clRetainContext; + cl_api_clReleaseContext clReleaseContext; + cl_api_clGetContextInfo clGetContextInfo; + cl_api_clCreateCommandQueue clCreateCommandQueue; + cl_api_clRetainCommandQueue clRetainCommandQueue; + cl_api_clReleaseCommandQueue clReleaseCommandQueue; + cl_api_clGetCommandQueueInfo clGetCommandQueueInfo; + cl_api_clSetCommandQueueProperty clSetCommandQueueProperty; + cl_api_clCreateBuffer clCreateBuffer; + cl_api_clCreateImage2D clCreateImage2D; + cl_api_clCreateImage3D clCreateImage3D; + cl_api_clRetainMemObject clRetainMemObject; + cl_api_clReleaseMemObject clReleaseMemObject; + cl_api_clGetSupportedImageFormats clGetSupportedImageFormats; + cl_api_clGetMemObjectInfo clGetMemObjectInfo; + cl_api_clGetImageInfo clGetImageInfo; + cl_api_clCreateSampler clCreateSampler; + cl_api_clRetainSampler clRetainSampler; + cl_api_clReleaseSampler clReleaseSampler; + cl_api_clGetSamplerInfo clGetSamplerInfo; + cl_api_clCreateProgramWithSource clCreateProgramWithSource; + cl_api_clCreateProgramWithBinary clCreateProgramWithBinary; + cl_api_clRetainProgram clRetainProgram; + cl_api_clReleaseProgram clReleaseProgram; + cl_api_clBuildProgram clBuildProgram; + cl_api_clUnloadCompiler clUnloadCompiler; + cl_api_clGetProgramInfo clGetProgramInfo; + cl_api_clGetProgramBuildInfo clGetProgramBuildInfo; + cl_api_clCreateKernel clCreateKernel; + cl_api_clCreateKernelsInProgram clCreateKernelsInProgram; + cl_api_clRetainKernel clRetainKernel; + cl_api_clReleaseKernel clReleaseKernel; + cl_api_clSetKernelArg clSetKernelArg; + cl_api_clGetKernelInfo clGetKernelInfo; + cl_api_clGetKernelWorkGroupInfo clGetKernelWorkGroupInfo; + cl_api_clWaitForEvents clWaitForEvents; + cl_api_clGetEventInfo clGetEventInfo; + cl_api_clRetainEvent clRetainEvent; + cl_api_clReleaseEvent clReleaseEvent; + cl_api_clGetEventProfilingInfo clGetEventProfilingInfo; + cl_api_clFlush clFlush; + cl_api_clFinish clFinish; + cl_api_clEnqueueReadBuffer clEnqueueReadBuffer; + cl_api_clEnqueueWriteBuffer clEnqueueWriteBuffer; + cl_api_clEnqueueCopyBuffer clEnqueueCopyBuffer; + cl_api_clEnqueueReadImage clEnqueueReadImage; + cl_api_clEnqueueWriteImage clEnqueueWriteImage; + cl_api_clEnqueueCopyImage clEnqueueCopyImage; + cl_api_clEnqueueCopyImageToBuffer clEnqueueCopyImageToBuffer; + cl_api_clEnqueueCopyBufferToImage clEnqueueCopyBufferToImage; + cl_api_clEnqueueMapBuffer clEnqueueMapBuffer; + cl_api_clEnqueueMapImage clEnqueueMapImage; + cl_api_clEnqueueUnmapMemObject clEnqueueUnmapMemObject; + cl_api_clEnqueueNDRangeKernel clEnqueueNDRangeKernel; + cl_api_clEnqueueTask clEnqueueTask; + cl_api_clEnqueueNativeKernel clEnqueueNativeKernel; + cl_api_clEnqueueMarker clEnqueueMarker; + cl_api_clEnqueueWaitForEvents clEnqueueWaitForEvents; + cl_api_clEnqueueBarrier clEnqueueBarrier; + cl_api_clGetExtensionFunctionAddress clGetExtensionFunctionAddress; + cl_api_clCreateFromGLBuffer clCreateFromGLBuffer; + cl_api_clCreateFromGLTexture2D clCreateFromGLTexture2D; + cl_api_clCreateFromGLTexture3D clCreateFromGLTexture3D; + cl_api_clCreateFromGLRenderbuffer clCreateFromGLRenderbuffer; + cl_api_clGetGLObjectInfo clGetGLObjectInfo; + cl_api_clGetGLTextureInfo clGetGLTextureInfo; + cl_api_clEnqueueAcquireGLObjects clEnqueueAcquireGLObjects; + cl_api_clEnqueueReleaseGLObjects clEnqueueReleaseGLObjects; + cl_api_clGetGLContextInfoKHR clGetGLContextInfoKHR; + + /* cl_khr_d3d10_sharing */ + cl_api_clGetDeviceIDsFromD3D10KHR clGetDeviceIDsFromD3D10KHR; + cl_api_clCreateFromD3D10BufferKHR clCreateFromD3D10BufferKHR; + cl_api_clCreateFromD3D10Texture2DKHR clCreateFromD3D10Texture2DKHR; + cl_api_clCreateFromD3D10Texture3DKHR clCreateFromD3D10Texture3DKHR; + cl_api_clEnqueueAcquireD3D10ObjectsKHR clEnqueueAcquireD3D10ObjectsKHR; + cl_api_clEnqueueReleaseD3D10ObjectsKHR clEnqueueReleaseD3D10ObjectsKHR; + + /* OpenCL 1.1 */ + cl_api_clSetEventCallback clSetEventCallback; + cl_api_clCreateSubBuffer clCreateSubBuffer; + cl_api_clSetMemObjectDestructorCallback clSetMemObjectDestructorCallback; + cl_api_clCreateUserEvent clCreateUserEvent; + cl_api_clSetUserEventStatus clSetUserEventStatus; + cl_api_clEnqueueReadBufferRect clEnqueueReadBufferRect; + cl_api_clEnqueueWriteBufferRect clEnqueueWriteBufferRect; + cl_api_clEnqueueCopyBufferRect clEnqueueCopyBufferRect; + + /* cl_ext_device_fission */ + cl_api_clCreateSubDevicesEXT clCreateSubDevicesEXT; + cl_api_clRetainDeviceEXT clRetainDeviceEXT; + cl_api_clReleaseDeviceEXT clReleaseDeviceEXT; + + /* cl_khr_gl_event */ + cl_api_clCreateEventFromGLsyncKHR clCreateEventFromGLsyncKHR; + + /* OpenCL 1.2 */ + cl_api_clCreateSubDevices clCreateSubDevices; + cl_api_clRetainDevice clRetainDevice; + cl_api_clReleaseDevice clReleaseDevice; + cl_api_clCreateImage clCreateImage; + cl_api_clCreateProgramWithBuiltInKernels clCreateProgramWithBuiltInKernels; + cl_api_clCompileProgram clCompileProgram; + cl_api_clLinkProgram clLinkProgram; + cl_api_clUnloadPlatformCompiler clUnloadPlatformCompiler; + cl_api_clGetKernelArgInfo clGetKernelArgInfo; + cl_api_clEnqueueFillBuffer clEnqueueFillBuffer; + cl_api_clEnqueueFillImage clEnqueueFillImage; + cl_api_clEnqueueMigrateMemObjects clEnqueueMigrateMemObjects; + cl_api_clEnqueueMarkerWithWaitList clEnqueueMarkerWithWaitList; + cl_api_clEnqueueBarrierWithWaitList clEnqueueBarrierWithWaitList; + cl_api_clGetExtensionFunctionAddressForPlatform + clGetExtensionFunctionAddressForPlatform; + cl_api_clCreateFromGLTexture clCreateFromGLTexture; + + /* cl_khr_d3d11_sharing */ + cl_api_clGetDeviceIDsFromD3D11KHR clGetDeviceIDsFromD3D11KHR; + cl_api_clCreateFromD3D11BufferKHR clCreateFromD3D11BufferKHR; + cl_api_clCreateFromD3D11Texture2DKHR clCreateFromD3D11Texture2DKHR; + cl_api_clCreateFromD3D11Texture3DKHR clCreateFromD3D11Texture3DKHR; + cl_api_clCreateFromDX9MediaSurfaceKHR clCreateFromDX9MediaSurfaceKHR; + cl_api_clEnqueueAcquireD3D11ObjectsKHR clEnqueueAcquireD3D11ObjectsKHR; + cl_api_clEnqueueReleaseD3D11ObjectsKHR clEnqueueReleaseD3D11ObjectsKHR; + + /* cl_khr_dx9_media_sharing */ + cl_api_clGetDeviceIDsFromDX9MediaAdapterKHR + clGetDeviceIDsFromDX9MediaAdapterKHR; + cl_api_clEnqueueAcquireDX9MediaSurfacesKHR + clEnqueueAcquireDX9MediaSurfacesKHR; + cl_api_clEnqueueReleaseDX9MediaSurfacesKHR + clEnqueueReleaseDX9MediaSurfacesKHR; + + /* cl_khr_egl_image */ + cl_api_clCreateFromEGLImageKHR clCreateFromEGLImageKHR; + cl_api_clEnqueueAcquireEGLObjectsKHR clEnqueueAcquireEGLObjectsKHR; + cl_api_clEnqueueReleaseEGLObjectsKHR clEnqueueReleaseEGLObjectsKHR; + + /* cl_khr_egl_event */ + cl_api_clCreateEventFromEGLSyncKHR clCreateEventFromEGLSyncKHR; + + /* OpenCL 2.0 */ + cl_api_clCreateCommandQueueWithProperties clCreateCommandQueueWithProperties; + cl_api_clCreatePipe clCreatePipe; + cl_api_clGetPipeInfo clGetPipeInfo; + cl_api_clSVMAlloc clSVMAlloc; + cl_api_clSVMFree clSVMFree; + cl_api_clEnqueueSVMFree clEnqueueSVMFree; + cl_api_clEnqueueSVMMemcpy clEnqueueSVMMemcpy; + cl_api_clEnqueueSVMMemFill clEnqueueSVMMemFill; + cl_api_clEnqueueSVMMap clEnqueueSVMMap; + cl_api_clEnqueueSVMUnmap clEnqueueSVMUnmap; + cl_api_clCreateSamplerWithProperties clCreateSamplerWithProperties; + cl_api_clSetKernelArgSVMPointer clSetKernelArgSVMPointer; + cl_api_clSetKernelExecInfo clSetKernelExecInfo; + + /* cl_khr_sub_groups */ + cl_api_clGetKernelSubGroupInfoKHR clGetKernelSubGroupInfoKHR; + + /* OpenCL 2.1 */ + cl_api_clCloneKernel clCloneKernel; + cl_api_clCreateProgramWithIL clCreateProgramWithIL; + cl_api_clEnqueueSVMMigrateMem clEnqueueSVMMigrateMem; + cl_api_clGetDeviceAndHostTimer clGetDeviceAndHostTimer; + cl_api_clGetHostTimer clGetHostTimer; + cl_api_clGetKernelSubGroupInfo clGetKernelSubGroupInfo; + cl_api_clSetDefaultDeviceCommandQueue clSetDefaultDeviceCommandQueue; + + /* OpenCL 2.2 */ + cl_api_clSetProgramReleaseCallback clSetProgramReleaseCallback; + cl_api_clSetProgramSpecializationConstant clSetProgramSpecializationConstant; + + /* OpenCL 3.0 */ + cl_api_clCreateBufferWithProperties clCreateBufferWithProperties; + cl_api_clCreateImageWithProperties clCreateImageWithProperties; + cl_api_clSetContextDestructorCallback clSetContextDestructorCallback; + +} cl_icd_dispatch; + +#ifdef __cplusplus +} +#endif + +#endif /* #ifndef OPENCL_CL_ICD_H */ diff --git a/vendor/angle/include/CL/cl_layer.h b/vendor/angle/include/CL/cl_layer.h new file mode 100644 index 00000000..b75e6bc7 --- /dev/null +++ b/vendor/angle/include/CL/cl_layer.h @@ -0,0 +1,61 @@ +/* + * Copyright (c) 2020 The Khronos Group Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * OpenCL is a trademark of Apple Inc. used under license by Khronos. + */ + +#ifndef OPENCL_CL_LAYER_H +#define OPENCL_CL_LAYER_H + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +typedef cl_uint cl_layer_info; +typedef cl_uint cl_layer_api_version; +#define CL_LAYER_API_VERSION 0x4240 +#define CL_LAYER_API_VERSION_100 100 + +extern CL_API_ENTRY cl_int CL_API_CALL +clGetLayerInfo(cl_layer_info param_name, + size_t param_value_size, + void *param_value, + size_t *param_value_size_ret); + +CL_API_ENTRY typedef cl_int +(CL_API_CALL *pfn_clGetLayerInfo)(cl_layer_info param_name, + size_t param_value_size, + void *param_value, + size_t *param_value_size_ret); + +extern CL_API_ENTRY cl_int CL_API_CALL +clInitLayer(cl_uint num_entries, + const cl_icd_dispatch *target_dispatch, + cl_uint *num_entries_ret, + const cl_icd_dispatch **layer_dispatch_ret); + +CL_API_ENTRY typedef cl_int +(CL_API_CALL *pfn_clInitLayer)(cl_uint num_entries, + const cl_icd_dispatch *target_dispatch, + cl_uint *num_entries_ret, + const cl_icd_dispatch **layer_dispatch_ret); + +#ifdef __cplusplus +} +#endif + +#endif /* OPENCL_CL_LAYER_H */ diff --git a/vendor/angle/include/CL/cl_platform.h b/vendor/angle/include/CL/cl_platform.h new file mode 100644 index 00000000..90344a01 --- /dev/null +++ b/vendor/angle/include/CL/cl_platform.h @@ -0,0 +1,1394 @@ +/******************************************************************************* + * Copyright (c) 2008-2020 The Khronos Group Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ + +#ifndef __CL_PLATFORM_H +#define __CL_PLATFORM_H + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +#if defined(_WIN32) + #define CL_API_ENTRY + #define CL_API_CALL __stdcall + #define CL_CALLBACK __stdcall +#else + #define CL_API_ENTRY + #define CL_API_CALL + #define CL_CALLBACK +#endif + +/* + * Deprecation flags refer to the last version of the header in which the + * feature was not deprecated. + * + * E.g. VERSION_1_1_DEPRECATED means the feature is present in 1.1 without + * deprecation but is deprecated in versions later than 1.1. + */ + +#ifndef CL_API_SUFFIX_USER +#define CL_API_SUFFIX_USER +#endif + +#ifndef CL_API_PREFIX_USER +#define CL_API_PREFIX_USER +#endif + +#define CL_API_SUFFIX_COMMON CL_API_SUFFIX_USER +#define CL_API_PREFIX_COMMON CL_API_PREFIX_USER + +#define CL_API_SUFFIX__VERSION_1_0 CL_API_SUFFIX_COMMON +#define CL_EXT_SUFFIX__VERSION_1_0 CL_API_SUFFIX_COMMON +#define CL_API_SUFFIX__VERSION_1_1 CL_API_SUFFIX_COMMON +#define CL_EXT_SUFFIX__VERSION_1_1 CL_API_SUFFIX_COMMON +#define CL_API_SUFFIX__VERSION_1_2 CL_API_SUFFIX_COMMON +#define CL_EXT_SUFFIX__VERSION_1_2 CL_API_SUFFIX_COMMON +#define CL_API_SUFFIX__VERSION_2_0 CL_API_SUFFIX_COMMON +#define CL_EXT_SUFFIX__VERSION_2_0 CL_API_SUFFIX_COMMON +#define CL_API_SUFFIX__VERSION_2_1 CL_API_SUFFIX_COMMON +#define CL_EXT_SUFFIX__VERSION_2_1 CL_API_SUFFIX_COMMON +#define CL_API_SUFFIX__VERSION_2_2 CL_API_SUFFIX_COMMON +#define CL_EXT_SUFFIX__VERSION_2_2 CL_API_SUFFIX_COMMON +#define CL_API_SUFFIX__VERSION_3_0 CL_API_SUFFIX_COMMON +#define CL_EXT_SUFFIX__VERSION_3_0 CL_API_SUFFIX_COMMON +#define CL_API_SUFFIX__EXPERIMENTAL CL_API_SUFFIX_COMMON +#define CL_EXT_SUFFIX__EXPERIMENTAL CL_API_SUFFIX_COMMON + + +#ifdef __GNUC__ + #define CL_EXT_SUFFIX_DEPRECATED __attribute__((deprecated)) + #define CL_EXT_PREFIX_DEPRECATED +#elif defined(_WIN32) + #define CL_EXT_SUFFIX_DEPRECATED + #define CL_EXT_PREFIX_DEPRECATED __declspec(deprecated) +#else + #define CL_EXT_SUFFIX_DEPRECATED + #define CL_EXT_PREFIX_DEPRECATED +#endif + +#ifdef CL_USE_DEPRECATED_OPENCL_1_0_APIS + #define CL_EXT_SUFFIX__VERSION_1_0_DEPRECATED CL_API_SUFFIX_COMMON + #define CL_EXT_PREFIX__VERSION_1_0_DEPRECATED CL_API_PREFIX_COMMON +#else + #define CL_EXT_SUFFIX__VERSION_1_0_DEPRECATED CL_API_SUFFIX_COMMON CL_EXT_SUFFIX_DEPRECATED + #define CL_EXT_PREFIX__VERSION_1_0_DEPRECATED CL_API_PREFIX_COMMON CL_EXT_PREFIX_DEPRECATED +#endif + +#ifdef CL_USE_DEPRECATED_OPENCL_1_1_APIS + #define CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED CL_API_SUFFIX_COMMON + #define CL_EXT_PREFIX__VERSION_1_1_DEPRECATED CL_API_PREFIX_COMMON +#else + #define CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED CL_API_SUFFIX_COMMON CL_EXT_SUFFIX_DEPRECATED + #define CL_EXT_PREFIX__VERSION_1_1_DEPRECATED CL_API_PREFIX_COMMON CL_EXT_PREFIX_DEPRECATED +#endif + +#ifdef CL_USE_DEPRECATED_OPENCL_1_2_APIS + #define CL_EXT_SUFFIX__VERSION_1_2_DEPRECATED CL_API_SUFFIX_COMMON + #define CL_EXT_PREFIX__VERSION_1_2_DEPRECATED CL_API_PREFIX_COMMON +#else + #define CL_EXT_SUFFIX__VERSION_1_2_DEPRECATED CL_API_SUFFIX_COMMON CL_EXT_SUFFIX_DEPRECATED + #define CL_EXT_PREFIX__VERSION_1_2_DEPRECATED CL_API_PREFIX_COMMON CL_EXT_PREFIX_DEPRECATED + #endif + +#ifdef CL_USE_DEPRECATED_OPENCL_2_0_APIS + #define CL_EXT_SUFFIX__VERSION_2_0_DEPRECATED CL_API_SUFFIX_COMMON + #define CL_EXT_PREFIX__VERSION_2_0_DEPRECATED CL_API_PREFIX_COMMON +#else + #define CL_EXT_SUFFIX__VERSION_2_0_DEPRECATED CL_API_SUFFIX_COMMON CL_EXT_SUFFIX_DEPRECATED + #define CL_EXT_PREFIX__VERSION_2_0_DEPRECATED CL_API_PREFIX_COMMON CL_EXT_PREFIX_DEPRECATED +#endif + +#ifdef CL_USE_DEPRECATED_OPENCL_2_1_APIS + #define CL_EXT_SUFFIX__VERSION_2_1_DEPRECATED CL_API_SUFFIX_COMMON + #define CL_EXT_PREFIX__VERSION_2_1_DEPRECATED CL_API_PREFIX_COMMON +#else + #define CL_EXT_SUFFIX__VERSION_2_1_DEPRECATED CL_API_SUFFIX_COMMON CL_EXT_SUFFIX_DEPRECATED + #define CL_EXT_PREFIX__VERSION_2_1_DEPRECATED CL_API_PREFIX_COMMON CL_EXT_PREFIX_DEPRECATED +#endif + +#ifdef CL_USE_DEPRECATED_OPENCL_2_2_APIS + #define CL_EXT_SUFFIX__VERSION_2_2_DEPRECATED CL_API_SUFFIX_COMMON + #define CL_EXT_PREFIX__VERSION_2_2_DEPRECATED CL_API_PREFIX_COMMON +#else + #define CL_EXT_SUFFIX__VERSION_2_2_DEPRECATED CL_API_SUFFIX_COMMON CL_EXT_SUFFIX_DEPRECATED + #define CL_EXT_PREFIX__VERSION_2_2_DEPRECATED CL_API_PREFIX_COMMON CL_EXT_PREFIX_DEPRECATED +#endif + +#if (defined (_WIN32) && defined(_MSC_VER)) + +/* scalar types */ +typedef signed __int8 cl_char; +typedef unsigned __int8 cl_uchar; +typedef signed __int16 cl_short; +typedef unsigned __int16 cl_ushort; +typedef signed __int32 cl_int; +typedef unsigned __int32 cl_uint; +typedef signed __int64 cl_long; +typedef unsigned __int64 cl_ulong; + +typedef unsigned __int16 cl_half; +typedef float cl_float; +typedef double cl_double; + +/* Macro names and corresponding values defined by OpenCL */ +#define CL_CHAR_BIT 8 +#define CL_SCHAR_MAX 127 +#define CL_SCHAR_MIN (-127-1) +#define CL_CHAR_MAX CL_SCHAR_MAX +#define CL_CHAR_MIN CL_SCHAR_MIN +#define CL_UCHAR_MAX 255 +#define CL_SHRT_MAX 32767 +#define CL_SHRT_MIN (-32767-1) +#define CL_USHRT_MAX 65535 +#define CL_INT_MAX 2147483647 +#define CL_INT_MIN (-2147483647-1) +#define CL_UINT_MAX 0xffffffffU +#define CL_LONG_MAX ((cl_long) 0x7FFFFFFFFFFFFFFFLL) +#define CL_LONG_MIN ((cl_long) -0x7FFFFFFFFFFFFFFFLL - 1LL) +#define CL_ULONG_MAX ((cl_ulong) 0xFFFFFFFFFFFFFFFFULL) + +#define CL_FLT_DIG 6 +#define CL_FLT_MANT_DIG 24 +#define CL_FLT_MAX_10_EXP +38 +#define CL_FLT_MAX_EXP +128 +#define CL_FLT_MIN_10_EXP -37 +#define CL_FLT_MIN_EXP -125 +#define CL_FLT_RADIX 2 +#define CL_FLT_MAX 340282346638528859811704183484516925440.0f +#define CL_FLT_MIN 1.175494350822287507969e-38f +#define CL_FLT_EPSILON 1.1920928955078125e-7f + +#define CL_HALF_DIG 3 +#define CL_HALF_MANT_DIG 11 +#define CL_HALF_MAX_10_EXP +4 +#define CL_HALF_MAX_EXP +16 +#define CL_HALF_MIN_10_EXP -4 +#define CL_HALF_MIN_EXP -13 +#define CL_HALF_RADIX 2 +#define CL_HALF_MAX 65504.0f +#define CL_HALF_MIN 6.103515625e-05f +#define CL_HALF_EPSILON 9.765625e-04f + +#define CL_DBL_DIG 15 +#define CL_DBL_MANT_DIG 53 +#define CL_DBL_MAX_10_EXP +308 +#define CL_DBL_MAX_EXP +1024 +#define CL_DBL_MIN_10_EXP -307 +#define CL_DBL_MIN_EXP -1021 +#define CL_DBL_RADIX 2 +#define CL_DBL_MAX 1.7976931348623158e+308 +#define CL_DBL_MIN 2.225073858507201383090e-308 +#define CL_DBL_EPSILON 2.220446049250313080847e-16 + +#define CL_M_E 2.7182818284590452354 +#define CL_M_LOG2E 1.4426950408889634074 +#define CL_M_LOG10E 0.43429448190325182765 +#define CL_M_LN2 0.69314718055994530942 +#define CL_M_LN10 2.30258509299404568402 +#define CL_M_PI 3.14159265358979323846 +#define CL_M_PI_2 1.57079632679489661923 +#define CL_M_PI_4 0.78539816339744830962 +#define CL_M_1_PI 0.31830988618379067154 +#define CL_M_2_PI 0.63661977236758134308 +#define CL_M_2_SQRTPI 1.12837916709551257390 +#define CL_M_SQRT2 1.41421356237309504880 +#define CL_M_SQRT1_2 0.70710678118654752440 + +#define CL_M_E_F 2.718281828f +#define CL_M_LOG2E_F 1.442695041f +#define CL_M_LOG10E_F 0.434294482f +#define CL_M_LN2_F 0.693147181f +#define CL_M_LN10_F 2.302585093f +#define CL_M_PI_F 3.141592654f +#define CL_M_PI_2_F 1.570796327f +#define CL_M_PI_4_F 0.785398163f +#define CL_M_1_PI_F 0.318309886f +#define CL_M_2_PI_F 0.636619772f +#define CL_M_2_SQRTPI_F 1.128379167f +#define CL_M_SQRT2_F 1.414213562f +#define CL_M_SQRT1_2_F 0.707106781f + +#define CL_NAN (CL_INFINITY - CL_INFINITY) +#define CL_HUGE_VALF ((cl_float) 1e50) +#define CL_HUGE_VAL ((cl_double) 1e500) +#define CL_MAXFLOAT CL_FLT_MAX +#define CL_INFINITY CL_HUGE_VALF + +#else + +#include + +/* scalar types */ +typedef int8_t cl_char; +typedef uint8_t cl_uchar; +typedef int16_t cl_short; +typedef uint16_t cl_ushort; +typedef int32_t cl_int; +typedef uint32_t cl_uint; +typedef int64_t cl_long; +typedef uint64_t cl_ulong; + +typedef uint16_t cl_half; +typedef float cl_float; +typedef double cl_double; + +/* Macro names and corresponding values defined by OpenCL */ +#define CL_CHAR_BIT 8 +#define CL_SCHAR_MAX 127 +#define CL_SCHAR_MIN (-127-1) +#define CL_CHAR_MAX CL_SCHAR_MAX +#define CL_CHAR_MIN CL_SCHAR_MIN +#define CL_UCHAR_MAX 255 +#define CL_SHRT_MAX 32767 +#define CL_SHRT_MIN (-32767-1) +#define CL_USHRT_MAX 65535 +#define CL_INT_MAX 2147483647 +#define CL_INT_MIN (-2147483647-1) +#define CL_UINT_MAX 0xffffffffU +#define CL_LONG_MAX ((cl_long) 0x7FFFFFFFFFFFFFFFLL) +#define CL_LONG_MIN ((cl_long) -0x7FFFFFFFFFFFFFFFLL - 1LL) +#define CL_ULONG_MAX ((cl_ulong) 0xFFFFFFFFFFFFFFFFULL) + +#define CL_FLT_DIG 6 +#define CL_FLT_MANT_DIG 24 +#define CL_FLT_MAX_10_EXP +38 +#define CL_FLT_MAX_EXP +128 +#define CL_FLT_MIN_10_EXP -37 +#define CL_FLT_MIN_EXP -125 +#define CL_FLT_RADIX 2 +#define CL_FLT_MAX 340282346638528859811704183484516925440.0f +#define CL_FLT_MIN 1.175494350822287507969e-38f +#define CL_FLT_EPSILON 1.1920928955078125e-7f + +#define CL_HALF_DIG 3 +#define CL_HALF_MANT_DIG 11 +#define CL_HALF_MAX_10_EXP +4 +#define CL_HALF_MAX_EXP +16 +#define CL_HALF_MIN_10_EXP -4 +#define CL_HALF_MIN_EXP -13 +#define CL_HALF_RADIX 2 +#define CL_HALF_MAX 65504.0f +#define CL_HALF_MIN 6.103515625e-05f +#define CL_HALF_EPSILON 9.765625e-04f + +#define CL_DBL_DIG 15 +#define CL_DBL_MANT_DIG 53 +#define CL_DBL_MAX_10_EXP +308 +#define CL_DBL_MAX_EXP +1024 +#define CL_DBL_MIN_10_EXP -307 +#define CL_DBL_MIN_EXP -1021 +#define CL_DBL_RADIX 2 +#define CL_DBL_MAX 179769313486231570814527423731704356798070567525844996598917476803157260780028538760589558632766878171540458953514382464234321326889464182768467546703537516986049910576551282076245490090389328944075868508455133942304583236903222948165808559332123348274797826204144723168738177180919299881250404026184124858368.0 +#define CL_DBL_MIN 2.225073858507201383090e-308 +#define CL_DBL_EPSILON 2.220446049250313080847e-16 + +#define CL_M_E 2.7182818284590452354 +#define CL_M_LOG2E 1.4426950408889634074 +#define CL_M_LOG10E 0.43429448190325182765 +#define CL_M_LN2 0.69314718055994530942 +#define CL_M_LN10 2.30258509299404568402 +#define CL_M_PI 3.14159265358979323846 +#define CL_M_PI_2 1.57079632679489661923 +#define CL_M_PI_4 0.78539816339744830962 +#define CL_M_1_PI 0.31830988618379067154 +#define CL_M_2_PI 0.63661977236758134308 +#define CL_M_2_SQRTPI 1.12837916709551257390 +#define CL_M_SQRT2 1.41421356237309504880 +#define CL_M_SQRT1_2 0.70710678118654752440 + +#define CL_M_E_F 2.718281828f +#define CL_M_LOG2E_F 1.442695041f +#define CL_M_LOG10E_F 0.434294482f +#define CL_M_LN2_F 0.693147181f +#define CL_M_LN10_F 2.302585093f +#define CL_M_PI_F 3.141592654f +#define CL_M_PI_2_F 1.570796327f +#define CL_M_PI_4_F 0.785398163f +#define CL_M_1_PI_F 0.318309886f +#define CL_M_2_PI_F 0.636619772f +#define CL_M_2_SQRTPI_F 1.128379167f +#define CL_M_SQRT2_F 1.414213562f +#define CL_M_SQRT1_2_F 0.707106781f + +#if defined( __GNUC__ ) + #define CL_HUGE_VALF __builtin_huge_valf() + #define CL_HUGE_VAL __builtin_huge_val() + #define CL_NAN __builtin_nanf( "" ) +#else + #define CL_HUGE_VALF ((cl_float) 1e50) + #define CL_HUGE_VAL ((cl_double) 1e500) + float nanf( const char * ); + #define CL_NAN nanf( "" ) +#endif +#define CL_MAXFLOAT CL_FLT_MAX +#define CL_INFINITY CL_HUGE_VALF + +#endif + +#include + +/* Mirror types to GL types. Mirror types allow us to avoid deciding which 87s to load based on whether we are using GL or GLES here. */ +typedef unsigned int cl_GLuint; +typedef int cl_GLint; +typedef unsigned int cl_GLenum; + +/* + * Vector types + * + * Note: OpenCL requires that all types be naturally aligned. + * This means that vector types must be naturally aligned. + * For example, a vector of four floats must be aligned to + * a 16 byte boundary (calculated as 4 * the natural 4-byte + * alignment of the float). The alignment qualifiers here + * will only function properly if your compiler supports them + * and if you don't actively work to defeat them. For example, + * in order for a cl_float4 to be 16 byte aligned in a struct, + * the start of the struct must itself be 16-byte aligned. + * + * Maintaining proper alignment is the user's responsibility. + */ + +/* Define basic vector types */ +#if defined( __VEC__ ) + #if !defined(__clang__) + #include /* may be omitted depending on compiler. AltiVec spec provides no way to detect whether the header is required. */ + #endif + typedef __vector unsigned char __cl_uchar16; + typedef __vector signed char __cl_char16; + typedef __vector unsigned short __cl_ushort8; + typedef __vector signed short __cl_short8; + typedef __vector unsigned int __cl_uint4; + typedef __vector signed int __cl_int4; + typedef __vector float __cl_float4; + #define __CL_UCHAR16__ 1 + #define __CL_CHAR16__ 1 + #define __CL_USHORT8__ 1 + #define __CL_SHORT8__ 1 + #define __CL_UINT4__ 1 + #define __CL_INT4__ 1 + #define __CL_FLOAT4__ 1 +#endif + +#if defined( __SSE__ ) + #if defined( __MINGW64__ ) + #include + #else + #include + #endif + #if defined( __GNUC__ ) + typedef float __cl_float4 __attribute__((vector_size(16))); + #else + typedef __m128 __cl_float4; + #endif + #define __CL_FLOAT4__ 1 +#endif + +#if defined( __SSE2__ ) + #if defined( __MINGW64__ ) + #include + #else + #include + #endif + #if defined( __GNUC__ ) + typedef cl_uchar __cl_uchar16 __attribute__((vector_size(16))); + typedef cl_char __cl_char16 __attribute__((vector_size(16))); + typedef cl_ushort __cl_ushort8 __attribute__((vector_size(16))); + typedef cl_short __cl_short8 __attribute__((vector_size(16))); + typedef cl_uint __cl_uint4 __attribute__((vector_size(16))); + typedef cl_int __cl_int4 __attribute__((vector_size(16))); + typedef cl_ulong __cl_ulong2 __attribute__((vector_size(16))); + typedef cl_long __cl_long2 __attribute__((vector_size(16))); + typedef cl_double __cl_double2 __attribute__((vector_size(16))); + #else + typedef __m128i __cl_uchar16; + typedef __m128i __cl_char16; + typedef __m128i __cl_ushort8; + typedef __m128i __cl_short8; + typedef __m128i __cl_uint4; + typedef __m128i __cl_int4; + typedef __m128i __cl_ulong2; + typedef __m128i __cl_long2; + typedef __m128d __cl_double2; + #endif + #define __CL_UCHAR16__ 1 + #define __CL_CHAR16__ 1 + #define __CL_USHORT8__ 1 + #define __CL_SHORT8__ 1 + #define __CL_INT4__ 1 + #define __CL_UINT4__ 1 + #define __CL_ULONG2__ 1 + #define __CL_LONG2__ 1 + #define __CL_DOUBLE2__ 1 +#endif + +#if defined( __MMX__ ) + #include + #if defined( __GNUC__ ) + typedef cl_uchar __cl_uchar8 __attribute__((vector_size(8))); + typedef cl_char __cl_char8 __attribute__((vector_size(8))); + typedef cl_ushort __cl_ushort4 __attribute__((vector_size(8))); + typedef cl_short __cl_short4 __attribute__((vector_size(8))); + typedef cl_uint __cl_uint2 __attribute__((vector_size(8))); + typedef cl_int __cl_int2 __attribute__((vector_size(8))); + typedef cl_ulong __cl_ulong1 __attribute__((vector_size(8))); + typedef cl_long __cl_long1 __attribute__((vector_size(8))); + typedef cl_float __cl_float2 __attribute__((vector_size(8))); + #else + typedef __m64 __cl_uchar8; + typedef __m64 __cl_char8; + typedef __m64 __cl_ushort4; + typedef __m64 __cl_short4; + typedef __m64 __cl_uint2; + typedef __m64 __cl_int2; + typedef __m64 __cl_ulong1; + typedef __m64 __cl_long1; + typedef __m64 __cl_float2; + #endif + #define __CL_UCHAR8__ 1 + #define __CL_CHAR8__ 1 + #define __CL_USHORT4__ 1 + #define __CL_SHORT4__ 1 + #define __CL_INT2__ 1 + #define __CL_UINT2__ 1 + #define __CL_ULONG1__ 1 + #define __CL_LONG1__ 1 + #define __CL_FLOAT2__ 1 +#endif + +#if defined( __AVX__ ) + #if defined( __MINGW64__ ) + #include + #else + #include + #endif + #if defined( __GNUC__ ) + typedef cl_float __cl_float8 __attribute__((vector_size(32))); + typedef cl_double __cl_double4 __attribute__((vector_size(32))); + #else + typedef __m256 __cl_float8; + typedef __m256d __cl_double4; + #endif + #define __CL_FLOAT8__ 1 + #define __CL_DOUBLE4__ 1 +#endif + +/* Define capabilities for anonymous struct members. */ +#if !defined(__cplusplus) && defined(__STDC_VERSION__) && __STDC_VERSION__ >= 201112L +#define __CL_HAS_ANON_STRUCT__ 1 +#define __CL_ANON_STRUCT__ +#elif defined( __GNUC__) && ! defined( __STRICT_ANSI__ ) +#define __CL_HAS_ANON_STRUCT__ 1 +#define __CL_ANON_STRUCT__ __extension__ +#elif defined( _WIN32) && defined(_MSC_VER) && ! defined(__STDC__) + #if _MSC_VER >= 1500 + /* Microsoft Developer Studio 2008 supports anonymous structs, but + * complains by default. */ + #define __CL_HAS_ANON_STRUCT__ 1 + #define __CL_ANON_STRUCT__ + /* Disable warning C4201: nonstandard extension used : nameless + * struct/union */ + #pragma warning( push ) + #pragma warning( disable : 4201 ) + #endif +#else +#define __CL_HAS_ANON_STRUCT__ 0 +#define __CL_ANON_STRUCT__ +#endif + +/* Define alignment keys */ +#if defined( __GNUC__ ) || defined(__INTEGRITY) + #define CL_ALIGNED(_x) __attribute__ ((aligned(_x))) +#elif defined( _WIN32) && (_MSC_VER) + /* Alignment keys neutered on windows because MSVC can't swallow function arguments with alignment requirements */ + /* http://msdn.microsoft.com/en-us/library/373ak2y1%28VS.71%29.aspx */ + /* #include */ + /* #define CL_ALIGNED(_x) _CRT_ALIGN(_x) */ + #define CL_ALIGNED(_x) +#else + #warning Need to implement some method to align data here + #define CL_ALIGNED(_x) +#endif + +/* Indicate whether .xyzw, .s0123 and .hi.lo are supported */ +#if __CL_HAS_ANON_STRUCT__ + /* .xyzw and .s0123...{f|F} are supported */ + #define CL_HAS_NAMED_VECTOR_FIELDS 1 + /* .hi and .lo are supported */ + #define CL_HAS_HI_LO_VECTOR_FIELDS 1 +#endif + +/* Define cl_vector types */ + +/* ---- cl_charn ---- */ +typedef union +{ + cl_char CL_ALIGNED(2) s[2]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_char x, y; }; + __CL_ANON_STRUCT__ struct{ cl_char s0, s1; }; + __CL_ANON_STRUCT__ struct{ cl_char lo, hi; }; +#endif +#if defined( __CL_CHAR2__) + __cl_char2 v2; +#endif +}cl_char2; + +typedef union +{ + cl_char CL_ALIGNED(4) s[4]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_char x, y, z, w; }; + __CL_ANON_STRUCT__ struct{ cl_char s0, s1, s2, s3; }; + __CL_ANON_STRUCT__ struct{ cl_char2 lo, hi; }; +#endif +#if defined( __CL_CHAR2__) + __cl_char2 v2[2]; +#endif +#if defined( __CL_CHAR4__) + __cl_char4 v4; +#endif +}cl_char4; + +/* cl_char3 is identical in size, alignment and behavior to cl_char4. See section 6.1.5. */ +typedef cl_char4 cl_char3; + +typedef union +{ + cl_char CL_ALIGNED(8) s[8]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_char x, y, z, w; }; + __CL_ANON_STRUCT__ struct{ cl_char s0, s1, s2, s3, s4, s5, s6, s7; }; + __CL_ANON_STRUCT__ struct{ cl_char4 lo, hi; }; +#endif +#if defined( __CL_CHAR2__) + __cl_char2 v2[4]; +#endif +#if defined( __CL_CHAR4__) + __cl_char4 v4[2]; +#endif +#if defined( __CL_CHAR8__ ) + __cl_char8 v8; +#endif +}cl_char8; + +typedef union +{ + cl_char CL_ALIGNED(16) s[16]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_char x, y, z, w, __spacer4, __spacer5, __spacer6, __spacer7, __spacer8, __spacer9, sa, sb, sc, sd, se, sf; }; + __CL_ANON_STRUCT__ struct{ cl_char s0, s1, s2, s3, s4, s5, s6, s7, s8, s9, sA, sB, sC, sD, sE, sF; }; + __CL_ANON_STRUCT__ struct{ cl_char8 lo, hi; }; +#endif +#if defined( __CL_CHAR2__) + __cl_char2 v2[8]; +#endif +#if defined( __CL_CHAR4__) + __cl_char4 v4[4]; +#endif +#if defined( __CL_CHAR8__ ) + __cl_char8 v8[2]; +#endif +#if defined( __CL_CHAR16__ ) + __cl_char16 v16; +#endif +}cl_char16; + + +/* ---- cl_ucharn ---- */ +typedef union +{ + cl_uchar CL_ALIGNED(2) s[2]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_uchar x, y; }; + __CL_ANON_STRUCT__ struct{ cl_uchar s0, s1; }; + __CL_ANON_STRUCT__ struct{ cl_uchar lo, hi; }; +#endif +#if defined( __cl_uchar2__) + __cl_uchar2 v2; +#endif +}cl_uchar2; + +typedef union +{ + cl_uchar CL_ALIGNED(4) s[4]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_uchar x, y, z, w; }; + __CL_ANON_STRUCT__ struct{ cl_uchar s0, s1, s2, s3; }; + __CL_ANON_STRUCT__ struct{ cl_uchar2 lo, hi; }; +#endif +#if defined( __CL_UCHAR2__) + __cl_uchar2 v2[2]; +#endif +#if defined( __CL_UCHAR4__) + __cl_uchar4 v4; +#endif +}cl_uchar4; + +/* cl_uchar3 is identical in size, alignment and behavior to cl_uchar4. See section 6.1.5. */ +typedef cl_uchar4 cl_uchar3; + +typedef union +{ + cl_uchar CL_ALIGNED(8) s[8]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_uchar x, y, z, w; }; + __CL_ANON_STRUCT__ struct{ cl_uchar s0, s1, s2, s3, s4, s5, s6, s7; }; + __CL_ANON_STRUCT__ struct{ cl_uchar4 lo, hi; }; +#endif +#if defined( __CL_UCHAR2__) + __cl_uchar2 v2[4]; +#endif +#if defined( __CL_UCHAR4__) + __cl_uchar4 v4[2]; +#endif +#if defined( __CL_UCHAR8__ ) + __cl_uchar8 v8; +#endif +}cl_uchar8; + +typedef union +{ + cl_uchar CL_ALIGNED(16) s[16]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_uchar x, y, z, w, __spacer4, __spacer5, __spacer6, __spacer7, __spacer8, __spacer9, sa, sb, sc, sd, se, sf; }; + __CL_ANON_STRUCT__ struct{ cl_uchar s0, s1, s2, s3, s4, s5, s6, s7, s8, s9, sA, sB, sC, sD, sE, sF; }; + __CL_ANON_STRUCT__ struct{ cl_uchar8 lo, hi; }; +#endif +#if defined( __CL_UCHAR2__) + __cl_uchar2 v2[8]; +#endif +#if defined( __CL_UCHAR4__) + __cl_uchar4 v4[4]; +#endif +#if defined( __CL_UCHAR8__ ) + __cl_uchar8 v8[2]; +#endif +#if defined( __CL_UCHAR16__ ) + __cl_uchar16 v16; +#endif +}cl_uchar16; + + +/* ---- cl_shortn ---- */ +typedef union +{ + cl_short CL_ALIGNED(4) s[2]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_short x, y; }; + __CL_ANON_STRUCT__ struct{ cl_short s0, s1; }; + __CL_ANON_STRUCT__ struct{ cl_short lo, hi; }; +#endif +#if defined( __CL_SHORT2__) + __cl_short2 v2; +#endif +}cl_short2; + +typedef union +{ + cl_short CL_ALIGNED(8) s[4]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_short x, y, z, w; }; + __CL_ANON_STRUCT__ struct{ cl_short s0, s1, s2, s3; }; + __CL_ANON_STRUCT__ struct{ cl_short2 lo, hi; }; +#endif +#if defined( __CL_SHORT2__) + __cl_short2 v2[2]; +#endif +#if defined( __CL_SHORT4__) + __cl_short4 v4; +#endif +}cl_short4; + +/* cl_short3 is identical in size, alignment and behavior to cl_short4. See section 6.1.5. */ +typedef cl_short4 cl_short3; + +typedef union +{ + cl_short CL_ALIGNED(16) s[8]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_short x, y, z, w; }; + __CL_ANON_STRUCT__ struct{ cl_short s0, s1, s2, s3, s4, s5, s6, s7; }; + __CL_ANON_STRUCT__ struct{ cl_short4 lo, hi; }; +#endif +#if defined( __CL_SHORT2__) + __cl_short2 v2[4]; +#endif +#if defined( __CL_SHORT4__) + __cl_short4 v4[2]; +#endif +#if defined( __CL_SHORT8__ ) + __cl_short8 v8; +#endif +}cl_short8; + +typedef union +{ + cl_short CL_ALIGNED(32) s[16]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_short x, y, z, w, __spacer4, __spacer5, __spacer6, __spacer7, __spacer8, __spacer9, sa, sb, sc, sd, se, sf; }; + __CL_ANON_STRUCT__ struct{ cl_short s0, s1, s2, s3, s4, s5, s6, s7, s8, s9, sA, sB, sC, sD, sE, sF; }; + __CL_ANON_STRUCT__ struct{ cl_short8 lo, hi; }; +#endif +#if defined( __CL_SHORT2__) + __cl_short2 v2[8]; +#endif +#if defined( __CL_SHORT4__) + __cl_short4 v4[4]; +#endif +#if defined( __CL_SHORT8__ ) + __cl_short8 v8[2]; +#endif +#if defined( __CL_SHORT16__ ) + __cl_short16 v16; +#endif +}cl_short16; + + +/* ---- cl_ushortn ---- */ +typedef union +{ + cl_ushort CL_ALIGNED(4) s[2]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_ushort x, y; }; + __CL_ANON_STRUCT__ struct{ cl_ushort s0, s1; }; + __CL_ANON_STRUCT__ struct{ cl_ushort lo, hi; }; +#endif +#if defined( __CL_USHORT2__) + __cl_ushort2 v2; +#endif +}cl_ushort2; + +typedef union +{ + cl_ushort CL_ALIGNED(8) s[4]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_ushort x, y, z, w; }; + __CL_ANON_STRUCT__ struct{ cl_ushort s0, s1, s2, s3; }; + __CL_ANON_STRUCT__ struct{ cl_ushort2 lo, hi; }; +#endif +#if defined( __CL_USHORT2__) + __cl_ushort2 v2[2]; +#endif +#if defined( __CL_USHORT4__) + __cl_ushort4 v4; +#endif +}cl_ushort4; + +/* cl_ushort3 is identical in size, alignment and behavior to cl_ushort4. See section 6.1.5. */ +typedef cl_ushort4 cl_ushort3; + +typedef union +{ + cl_ushort CL_ALIGNED(16) s[8]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_ushort x, y, z, w; }; + __CL_ANON_STRUCT__ struct{ cl_ushort s0, s1, s2, s3, s4, s5, s6, s7; }; + __CL_ANON_STRUCT__ struct{ cl_ushort4 lo, hi; }; +#endif +#if defined( __CL_USHORT2__) + __cl_ushort2 v2[4]; +#endif +#if defined( __CL_USHORT4__) + __cl_ushort4 v4[2]; +#endif +#if defined( __CL_USHORT8__ ) + __cl_ushort8 v8; +#endif +}cl_ushort8; + +typedef union +{ + cl_ushort CL_ALIGNED(32) s[16]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_ushort x, y, z, w, __spacer4, __spacer5, __spacer6, __spacer7, __spacer8, __spacer9, sa, sb, sc, sd, se, sf; }; + __CL_ANON_STRUCT__ struct{ cl_ushort s0, s1, s2, s3, s4, s5, s6, s7, s8, s9, sA, sB, sC, sD, sE, sF; }; + __CL_ANON_STRUCT__ struct{ cl_ushort8 lo, hi; }; +#endif +#if defined( __CL_USHORT2__) + __cl_ushort2 v2[8]; +#endif +#if defined( __CL_USHORT4__) + __cl_ushort4 v4[4]; +#endif +#if defined( __CL_USHORT8__ ) + __cl_ushort8 v8[2]; +#endif +#if defined( __CL_USHORT16__ ) + __cl_ushort16 v16; +#endif +}cl_ushort16; + + +/* ---- cl_halfn ---- */ +typedef union +{ + cl_half CL_ALIGNED(4) s[2]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_half x, y; }; + __CL_ANON_STRUCT__ struct{ cl_half s0, s1; }; + __CL_ANON_STRUCT__ struct{ cl_half lo, hi; }; +#endif +#if defined( __CL_HALF2__) + __cl_half2 v2; +#endif +}cl_half2; + +typedef union +{ + cl_half CL_ALIGNED(8) s[4]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_half x, y, z, w; }; + __CL_ANON_STRUCT__ struct{ cl_half s0, s1, s2, s3; }; + __CL_ANON_STRUCT__ struct{ cl_half2 lo, hi; }; +#endif +#if defined( __CL_HALF2__) + __cl_half2 v2[2]; +#endif +#if defined( __CL_HALF4__) + __cl_half4 v4; +#endif +}cl_half4; + +/* cl_half3 is identical in size, alignment and behavior to cl_half4. See section 6.1.5. */ +typedef cl_half4 cl_half3; + +typedef union +{ + cl_half CL_ALIGNED(16) s[8]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_half x, y, z, w; }; + __CL_ANON_STRUCT__ struct{ cl_half s0, s1, s2, s3, s4, s5, s6, s7; }; + __CL_ANON_STRUCT__ struct{ cl_half4 lo, hi; }; +#endif +#if defined( __CL_HALF2__) + __cl_half2 v2[4]; +#endif +#if defined( __CL_HALF4__) + __cl_half4 v4[2]; +#endif +#if defined( __CL_HALF8__ ) + __cl_half8 v8; +#endif +}cl_half8; + +typedef union +{ + cl_half CL_ALIGNED(32) s[16]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_half x, y, z, w, __spacer4, __spacer5, __spacer6, __spacer7, __spacer8, __spacer9, sa, sb, sc, sd, se, sf; }; + __CL_ANON_STRUCT__ struct{ cl_half s0, s1, s2, s3, s4, s5, s6, s7, s8, s9, sA, sB, sC, sD, sE, sF; }; + __CL_ANON_STRUCT__ struct{ cl_half8 lo, hi; }; +#endif +#if defined( __CL_HALF2__) + __cl_half2 v2[8]; +#endif +#if defined( __CL_HALF4__) + __cl_half4 v4[4]; +#endif +#if defined( __CL_HALF8__ ) + __cl_half8 v8[2]; +#endif +#if defined( __CL_HALF16__ ) + __cl_half16 v16; +#endif +}cl_half16; + +/* ---- cl_intn ---- */ +typedef union +{ + cl_int CL_ALIGNED(8) s[2]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_int x, y; }; + __CL_ANON_STRUCT__ struct{ cl_int s0, s1; }; + __CL_ANON_STRUCT__ struct{ cl_int lo, hi; }; +#endif +#if defined( __CL_INT2__) + __cl_int2 v2; +#endif +}cl_int2; + +typedef union +{ + cl_int CL_ALIGNED(16) s[4]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_int x, y, z, w; }; + __CL_ANON_STRUCT__ struct{ cl_int s0, s1, s2, s3; }; + __CL_ANON_STRUCT__ struct{ cl_int2 lo, hi; }; +#endif +#if defined( __CL_INT2__) + __cl_int2 v2[2]; +#endif +#if defined( __CL_INT4__) + __cl_int4 v4; +#endif +}cl_int4; + +/* cl_int3 is identical in size, alignment and behavior to cl_int4. See section 6.1.5. */ +typedef cl_int4 cl_int3; + +typedef union +{ + cl_int CL_ALIGNED(32) s[8]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_int x, y, z, w; }; + __CL_ANON_STRUCT__ struct{ cl_int s0, s1, s2, s3, s4, s5, s6, s7; }; + __CL_ANON_STRUCT__ struct{ cl_int4 lo, hi; }; +#endif +#if defined( __CL_INT2__) + __cl_int2 v2[4]; +#endif +#if defined( __CL_INT4__) + __cl_int4 v4[2]; +#endif +#if defined( __CL_INT8__ ) + __cl_int8 v8; +#endif +}cl_int8; + +typedef union +{ + cl_int CL_ALIGNED(64) s[16]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_int x, y, z, w, __spacer4, __spacer5, __spacer6, __spacer7, __spacer8, __spacer9, sa, sb, sc, sd, se, sf; }; + __CL_ANON_STRUCT__ struct{ cl_int s0, s1, s2, s3, s4, s5, s6, s7, s8, s9, sA, sB, sC, sD, sE, sF; }; + __CL_ANON_STRUCT__ struct{ cl_int8 lo, hi; }; +#endif +#if defined( __CL_INT2__) + __cl_int2 v2[8]; +#endif +#if defined( __CL_INT4__) + __cl_int4 v4[4]; +#endif +#if defined( __CL_INT8__ ) + __cl_int8 v8[2]; +#endif +#if defined( __CL_INT16__ ) + __cl_int16 v16; +#endif +}cl_int16; + + +/* ---- cl_uintn ---- */ +typedef union +{ + cl_uint CL_ALIGNED(8) s[2]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_uint x, y; }; + __CL_ANON_STRUCT__ struct{ cl_uint s0, s1; }; + __CL_ANON_STRUCT__ struct{ cl_uint lo, hi; }; +#endif +#if defined( __CL_UINT2__) + __cl_uint2 v2; +#endif +}cl_uint2; + +typedef union +{ + cl_uint CL_ALIGNED(16) s[4]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_uint x, y, z, w; }; + __CL_ANON_STRUCT__ struct{ cl_uint s0, s1, s2, s3; }; + __CL_ANON_STRUCT__ struct{ cl_uint2 lo, hi; }; +#endif +#if defined( __CL_UINT2__) + __cl_uint2 v2[2]; +#endif +#if defined( __CL_UINT4__) + __cl_uint4 v4; +#endif +}cl_uint4; + +/* cl_uint3 is identical in size, alignment and behavior to cl_uint4. See section 6.1.5. */ +typedef cl_uint4 cl_uint3; + +typedef union +{ + cl_uint CL_ALIGNED(32) s[8]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_uint x, y, z, w; }; + __CL_ANON_STRUCT__ struct{ cl_uint s0, s1, s2, s3, s4, s5, s6, s7; }; + __CL_ANON_STRUCT__ struct{ cl_uint4 lo, hi; }; +#endif +#if defined( __CL_UINT2__) + __cl_uint2 v2[4]; +#endif +#if defined( __CL_UINT4__) + __cl_uint4 v4[2]; +#endif +#if defined( __CL_UINT8__ ) + __cl_uint8 v8; +#endif +}cl_uint8; + +typedef union +{ + cl_uint CL_ALIGNED(64) s[16]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_uint x, y, z, w, __spacer4, __spacer5, __spacer6, __spacer7, __spacer8, __spacer9, sa, sb, sc, sd, se, sf; }; + __CL_ANON_STRUCT__ struct{ cl_uint s0, s1, s2, s3, s4, s5, s6, s7, s8, s9, sA, sB, sC, sD, sE, sF; }; + __CL_ANON_STRUCT__ struct{ cl_uint8 lo, hi; }; +#endif +#if defined( __CL_UINT2__) + __cl_uint2 v2[8]; +#endif +#if defined( __CL_UINT4__) + __cl_uint4 v4[4]; +#endif +#if defined( __CL_UINT8__ ) + __cl_uint8 v8[2]; +#endif +#if defined( __CL_UINT16__ ) + __cl_uint16 v16; +#endif +}cl_uint16; + +/* ---- cl_longn ---- */ +typedef union +{ + cl_long CL_ALIGNED(16) s[2]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_long x, y; }; + __CL_ANON_STRUCT__ struct{ cl_long s0, s1; }; + __CL_ANON_STRUCT__ struct{ cl_long lo, hi; }; +#endif +#if defined( __CL_LONG2__) + __cl_long2 v2; +#endif +}cl_long2; + +typedef union +{ + cl_long CL_ALIGNED(32) s[4]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_long x, y, z, w; }; + __CL_ANON_STRUCT__ struct{ cl_long s0, s1, s2, s3; }; + __CL_ANON_STRUCT__ struct{ cl_long2 lo, hi; }; +#endif +#if defined( __CL_LONG2__) + __cl_long2 v2[2]; +#endif +#if defined( __CL_LONG4__) + __cl_long4 v4; +#endif +}cl_long4; + +/* cl_long3 is identical in size, alignment and behavior to cl_long4. See section 6.1.5. */ +typedef cl_long4 cl_long3; + +typedef union +{ + cl_long CL_ALIGNED(64) s[8]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_long x, y, z, w; }; + __CL_ANON_STRUCT__ struct{ cl_long s0, s1, s2, s3, s4, s5, s6, s7; }; + __CL_ANON_STRUCT__ struct{ cl_long4 lo, hi; }; +#endif +#if defined( __CL_LONG2__) + __cl_long2 v2[4]; +#endif +#if defined( __CL_LONG4__) + __cl_long4 v4[2]; +#endif +#if defined( __CL_LONG8__ ) + __cl_long8 v8; +#endif +}cl_long8; + +typedef union +{ + cl_long CL_ALIGNED(128) s[16]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_long x, y, z, w, __spacer4, __spacer5, __spacer6, __spacer7, __spacer8, __spacer9, sa, sb, sc, sd, se, sf; }; + __CL_ANON_STRUCT__ struct{ cl_long s0, s1, s2, s3, s4, s5, s6, s7, s8, s9, sA, sB, sC, sD, sE, sF; }; + __CL_ANON_STRUCT__ struct{ cl_long8 lo, hi; }; +#endif +#if defined( __CL_LONG2__) + __cl_long2 v2[8]; +#endif +#if defined( __CL_LONG4__) + __cl_long4 v4[4]; +#endif +#if defined( __CL_LONG8__ ) + __cl_long8 v8[2]; +#endif +#if defined( __CL_LONG16__ ) + __cl_long16 v16; +#endif +}cl_long16; + + +/* ---- cl_ulongn ---- */ +typedef union +{ + cl_ulong CL_ALIGNED(16) s[2]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_ulong x, y; }; + __CL_ANON_STRUCT__ struct{ cl_ulong s0, s1; }; + __CL_ANON_STRUCT__ struct{ cl_ulong lo, hi; }; +#endif +#if defined( __CL_ULONG2__) + __cl_ulong2 v2; +#endif +}cl_ulong2; + +typedef union +{ + cl_ulong CL_ALIGNED(32) s[4]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_ulong x, y, z, w; }; + __CL_ANON_STRUCT__ struct{ cl_ulong s0, s1, s2, s3; }; + __CL_ANON_STRUCT__ struct{ cl_ulong2 lo, hi; }; +#endif +#if defined( __CL_ULONG2__) + __cl_ulong2 v2[2]; +#endif +#if defined( __CL_ULONG4__) + __cl_ulong4 v4; +#endif +}cl_ulong4; + +/* cl_ulong3 is identical in size, alignment and behavior to cl_ulong4. See section 6.1.5. */ +typedef cl_ulong4 cl_ulong3; + +typedef union +{ + cl_ulong CL_ALIGNED(64) s[8]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_ulong x, y, z, w; }; + __CL_ANON_STRUCT__ struct{ cl_ulong s0, s1, s2, s3, s4, s5, s6, s7; }; + __CL_ANON_STRUCT__ struct{ cl_ulong4 lo, hi; }; +#endif +#if defined( __CL_ULONG2__) + __cl_ulong2 v2[4]; +#endif +#if defined( __CL_ULONG4__) + __cl_ulong4 v4[2]; +#endif +#if defined( __CL_ULONG8__ ) + __cl_ulong8 v8; +#endif +}cl_ulong8; + +typedef union +{ + cl_ulong CL_ALIGNED(128) s[16]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_ulong x, y, z, w, __spacer4, __spacer5, __spacer6, __spacer7, __spacer8, __spacer9, sa, sb, sc, sd, se, sf; }; + __CL_ANON_STRUCT__ struct{ cl_ulong s0, s1, s2, s3, s4, s5, s6, s7, s8, s9, sA, sB, sC, sD, sE, sF; }; + __CL_ANON_STRUCT__ struct{ cl_ulong8 lo, hi; }; +#endif +#if defined( __CL_ULONG2__) + __cl_ulong2 v2[8]; +#endif +#if defined( __CL_ULONG4__) + __cl_ulong4 v4[4]; +#endif +#if defined( __CL_ULONG8__ ) + __cl_ulong8 v8[2]; +#endif +#if defined( __CL_ULONG16__ ) + __cl_ulong16 v16; +#endif +}cl_ulong16; + + +/* --- cl_floatn ---- */ + +typedef union +{ + cl_float CL_ALIGNED(8) s[2]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_float x, y; }; + __CL_ANON_STRUCT__ struct{ cl_float s0, s1; }; + __CL_ANON_STRUCT__ struct{ cl_float lo, hi; }; +#endif +#if defined( __CL_FLOAT2__) + __cl_float2 v2; +#endif +}cl_float2; + +typedef union +{ + cl_float CL_ALIGNED(16) s[4]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_float x, y, z, w; }; + __CL_ANON_STRUCT__ struct{ cl_float s0, s1, s2, s3; }; + __CL_ANON_STRUCT__ struct{ cl_float2 lo, hi; }; +#endif +#if defined( __CL_FLOAT2__) + __cl_float2 v2[2]; +#endif +#if defined( __CL_FLOAT4__) + __cl_float4 v4; +#endif +}cl_float4; + +/* cl_float3 is identical in size, alignment and behavior to cl_float4. See section 6.1.5. */ +typedef cl_float4 cl_float3; + +typedef union +{ + cl_float CL_ALIGNED(32) s[8]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_float x, y, z, w; }; + __CL_ANON_STRUCT__ struct{ cl_float s0, s1, s2, s3, s4, s5, s6, s7; }; + __CL_ANON_STRUCT__ struct{ cl_float4 lo, hi; }; +#endif +#if defined( __CL_FLOAT2__) + __cl_float2 v2[4]; +#endif +#if defined( __CL_FLOAT4__) + __cl_float4 v4[2]; +#endif +#if defined( __CL_FLOAT8__ ) + __cl_float8 v8; +#endif +}cl_float8; + +typedef union +{ + cl_float CL_ALIGNED(64) s[16]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_float x, y, z, w, __spacer4, __spacer5, __spacer6, __spacer7, __spacer8, __spacer9, sa, sb, sc, sd, se, sf; }; + __CL_ANON_STRUCT__ struct{ cl_float s0, s1, s2, s3, s4, s5, s6, s7, s8, s9, sA, sB, sC, sD, sE, sF; }; + __CL_ANON_STRUCT__ struct{ cl_float8 lo, hi; }; +#endif +#if defined( __CL_FLOAT2__) + __cl_float2 v2[8]; +#endif +#if defined( __CL_FLOAT4__) + __cl_float4 v4[4]; +#endif +#if defined( __CL_FLOAT8__ ) + __cl_float8 v8[2]; +#endif +#if defined( __CL_FLOAT16__ ) + __cl_float16 v16; +#endif +}cl_float16; + +/* --- cl_doublen ---- */ + +typedef union +{ + cl_double CL_ALIGNED(16) s[2]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_double x, y; }; + __CL_ANON_STRUCT__ struct{ cl_double s0, s1; }; + __CL_ANON_STRUCT__ struct{ cl_double lo, hi; }; +#endif +#if defined( __CL_DOUBLE2__) + __cl_double2 v2; +#endif +}cl_double2; + +typedef union +{ + cl_double CL_ALIGNED(32) s[4]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_double x, y, z, w; }; + __CL_ANON_STRUCT__ struct{ cl_double s0, s1, s2, s3; }; + __CL_ANON_STRUCT__ struct{ cl_double2 lo, hi; }; +#endif +#if defined( __CL_DOUBLE2__) + __cl_double2 v2[2]; +#endif +#if defined( __CL_DOUBLE4__) + __cl_double4 v4; +#endif +}cl_double4; + +/* cl_double3 is identical in size, alignment and behavior to cl_double4. See section 6.1.5. */ +typedef cl_double4 cl_double3; + +typedef union +{ + cl_double CL_ALIGNED(64) s[8]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_double x, y, z, w; }; + __CL_ANON_STRUCT__ struct{ cl_double s0, s1, s2, s3, s4, s5, s6, s7; }; + __CL_ANON_STRUCT__ struct{ cl_double4 lo, hi; }; +#endif +#if defined( __CL_DOUBLE2__) + __cl_double2 v2[4]; +#endif +#if defined( __CL_DOUBLE4__) + __cl_double4 v4[2]; +#endif +#if defined( __CL_DOUBLE8__ ) + __cl_double8 v8; +#endif +}cl_double8; + +typedef union +{ + cl_double CL_ALIGNED(128) s[16]; +#if __CL_HAS_ANON_STRUCT__ + __CL_ANON_STRUCT__ struct{ cl_double x, y, z, w, __spacer4, __spacer5, __spacer6, __spacer7, __spacer8, __spacer9, sa, sb, sc, sd, se, sf; }; + __CL_ANON_STRUCT__ struct{ cl_double s0, s1, s2, s3, s4, s5, s6, s7, s8, s9, sA, sB, sC, sD, sE, sF; }; + __CL_ANON_STRUCT__ struct{ cl_double8 lo, hi; }; +#endif +#if defined( __CL_DOUBLE2__) + __cl_double2 v2[8]; +#endif +#if defined( __CL_DOUBLE4__) + __cl_double4 v4[4]; +#endif +#if defined( __CL_DOUBLE8__ ) + __cl_double8 v8[2]; +#endif +#if defined( __CL_DOUBLE16__ ) + __cl_double16 v16; +#endif +}cl_double16; + +/* Macro to facilitate debugging + * Usage: + * Place CL_PROGRAM_STRING_DEBUG_INFO on the line before the first line of your source. + * The first line ends with: CL_PROGRAM_STRING_DEBUG_INFO \" + * Each line thereafter of OpenCL C source must end with: \n\ + * The last line ends in "; + * + * Example: + * + * const char *my_program = CL_PROGRAM_STRING_DEBUG_INFO "\ + * kernel void foo( int a, float * b ) \n\ + * { \n\ + * // my comment \n\ + * *b[ get_global_id(0)] = a; \n\ + * } \n\ + * "; + * + * This should correctly set up the line, (column) and file information for your source + * string so you can do source level debugging. + */ +#define __CL_STRINGIFY( _x ) # _x +#define _CL_STRINGIFY( _x ) __CL_STRINGIFY( _x ) +#define CL_PROGRAM_STRING_DEBUG_INFO "#line " _CL_STRINGIFY(__LINE__) " \"" __FILE__ "\" \n\n" + +#ifdef __cplusplus +} +#endif + +#if defined( _WIN32) && defined(_MSC_VER) && ! defined(__STDC__) + #if _MSC_VER >=1500 + #pragma warning( pop ) + #endif +#endif + +#endif /* __CL_PLATFORM_H */ diff --git a/vendor/angle/include/CL/cl_va_api_media_sharing_intel.h b/vendor/angle/include/CL/cl_va_api_media_sharing_intel.h new file mode 100644 index 00000000..7c4a4bac --- /dev/null +++ b/vendor/angle/include/CL/cl_va_api_media_sharing_intel.h @@ -0,0 +1,136 @@ +/******************************************************************************* + * Copyright (c) 2008-2020 The Khronos Group Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ + +#ifndef __OPENCL_CL_VA_API_MEDIA_SHARING_INTEL_H +#define __OPENCL_CL_VA_API_MEDIA_SHARING_INTEL_H + +#include +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/****************************************** +* cl_intel_va_api_media_sharing extension * +*******************************************/ + +#define cl_intel_va_api_media_sharing 1 + +/* error codes */ +#define CL_INVALID_VA_API_MEDIA_ADAPTER_INTEL -1098 +#define CL_INVALID_VA_API_MEDIA_SURFACE_INTEL -1099 +#define CL_VA_API_MEDIA_SURFACE_ALREADY_ACQUIRED_INTEL -1100 +#define CL_VA_API_MEDIA_SURFACE_NOT_ACQUIRED_INTEL -1101 + +/* cl_va_api_device_source_intel */ +#define CL_VA_API_DISPLAY_INTEL 0x4094 + +/* cl_va_api_device_set_intel */ +#define CL_PREFERRED_DEVICES_FOR_VA_API_INTEL 0x4095 +#define CL_ALL_DEVICES_FOR_VA_API_INTEL 0x4096 + +/* cl_context_info */ +#define CL_CONTEXT_VA_API_DISPLAY_INTEL 0x4097 + +/* cl_mem_info */ +#define CL_MEM_VA_API_MEDIA_SURFACE_INTEL 0x4098 + +/* cl_image_info */ +#define CL_IMAGE_VA_API_PLANE_INTEL 0x4099 + +/* cl_command_type */ +#define CL_COMMAND_ACQUIRE_VA_API_MEDIA_SURFACES_INTEL 0x409A +#define CL_COMMAND_RELEASE_VA_API_MEDIA_SURFACES_INTEL 0x409B + +typedef cl_uint cl_va_api_device_source_intel; +typedef cl_uint cl_va_api_device_set_intel; + +extern CL_API_ENTRY cl_int CL_API_CALL +clGetDeviceIDsFromVA_APIMediaAdapterINTEL( + cl_platform_id platform, + cl_va_api_device_source_intel media_adapter_type, + void* media_adapter, + cl_va_api_device_set_intel media_adapter_set, + cl_uint num_entries, + cl_device_id* devices, + cl_uint* num_devices) CL_EXT_SUFFIX__VERSION_1_2; + +typedef CL_API_ENTRY cl_int (CL_API_CALL * clGetDeviceIDsFromVA_APIMediaAdapterINTEL_fn)( + cl_platform_id platform, + cl_va_api_device_source_intel media_adapter_type, + void* media_adapter, + cl_va_api_device_set_intel media_adapter_set, + cl_uint num_entries, + cl_device_id* devices, + cl_uint* num_devices) CL_EXT_SUFFIX__VERSION_1_2; + +extern CL_API_ENTRY cl_mem CL_API_CALL +clCreateFromVA_APIMediaSurfaceINTEL( + cl_context context, + cl_mem_flags flags, + VASurfaceID* surface, + cl_uint plane, + cl_int* errcode_ret) CL_EXT_SUFFIX__VERSION_1_2; + +typedef CL_API_ENTRY cl_mem (CL_API_CALL * clCreateFromVA_APIMediaSurfaceINTEL_fn)( + cl_context context, + cl_mem_flags flags, + VASurfaceID* surface, + cl_uint plane, + cl_int* errcode_ret) CL_EXT_SUFFIX__VERSION_1_2; + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueAcquireVA_APIMediaSurfacesINTEL( + cl_command_queue command_queue, + cl_uint num_objects, + const cl_mem* mem_objects, + cl_uint num_events_in_wait_list, + const cl_event* event_wait_list, + cl_event* event) CL_EXT_SUFFIX__VERSION_1_2; + +typedef CL_API_ENTRY cl_int (CL_API_CALL *clEnqueueAcquireVA_APIMediaSurfacesINTEL_fn)( + cl_command_queue command_queue, + cl_uint num_objects, + const cl_mem* mem_objects, + cl_uint num_events_in_wait_list, + const cl_event* event_wait_list, + cl_event* event) CL_EXT_SUFFIX__VERSION_1_2; + +extern CL_API_ENTRY cl_int CL_API_CALL +clEnqueueReleaseVA_APIMediaSurfacesINTEL( + cl_command_queue command_queue, + cl_uint num_objects, + const cl_mem* mem_objects, + cl_uint num_events_in_wait_list, + const cl_event* event_wait_list, + cl_event* event) CL_EXT_SUFFIX__VERSION_1_2; + +typedef CL_API_ENTRY cl_int (CL_API_CALL *clEnqueueReleaseVA_APIMediaSurfacesINTEL_fn)( + cl_command_queue command_queue, + cl_uint num_objects, + const cl_mem* mem_objects, + cl_uint num_events_in_wait_list, + const cl_event* event_wait_list, + cl_event* event) CL_EXT_SUFFIX__VERSION_1_2; + +#ifdef __cplusplus +} +#endif + +#endif /* __OPENCL_CL_VA_API_MEDIA_SHARING_INTEL_H */ + diff --git a/vendor/angle/include/CL/cl_version.h b/vendor/angle/include/CL/cl_version.h new file mode 100644 index 00000000..3844938d --- /dev/null +++ b/vendor/angle/include/CL/cl_version.h @@ -0,0 +1,81 @@ +/******************************************************************************* + * Copyright (c) 2018-2020 The Khronos Group Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ + +#ifndef __CL_VERSION_H +#define __CL_VERSION_H + +/* Detect which version to target */ +#if !defined(CL_TARGET_OPENCL_VERSION) +#pragma message("cl_version.h: CL_TARGET_OPENCL_VERSION is not defined. Defaulting to 300 (OpenCL 3.0)") +#define CL_TARGET_OPENCL_VERSION 300 +#endif +#if CL_TARGET_OPENCL_VERSION != 100 && \ + CL_TARGET_OPENCL_VERSION != 110 && \ + CL_TARGET_OPENCL_VERSION != 120 && \ + CL_TARGET_OPENCL_VERSION != 200 && \ + CL_TARGET_OPENCL_VERSION != 210 && \ + CL_TARGET_OPENCL_VERSION != 220 && \ + CL_TARGET_OPENCL_VERSION != 300 +#pragma message("cl_version: CL_TARGET_OPENCL_VERSION is not a valid value (100, 110, 120, 200, 210, 220, 300). Defaulting to 300 (OpenCL 3.0)") +#undef CL_TARGET_OPENCL_VERSION +#define CL_TARGET_OPENCL_VERSION 300 +#endif + + +/* OpenCL Version */ +#if CL_TARGET_OPENCL_VERSION >= 300 && !defined(CL_VERSION_3_0) +#define CL_VERSION_3_0 1 +#endif +#if CL_TARGET_OPENCL_VERSION >= 220 && !defined(CL_VERSION_2_2) +#define CL_VERSION_2_2 1 +#endif +#if CL_TARGET_OPENCL_VERSION >= 210 && !defined(CL_VERSION_2_1) +#define CL_VERSION_2_1 1 +#endif +#if CL_TARGET_OPENCL_VERSION >= 200 && !defined(CL_VERSION_2_0) +#define CL_VERSION_2_0 1 +#endif +#if CL_TARGET_OPENCL_VERSION >= 120 && !defined(CL_VERSION_1_2) +#define CL_VERSION_1_2 1 +#endif +#if CL_TARGET_OPENCL_VERSION >= 110 && !defined(CL_VERSION_1_1) +#define CL_VERSION_1_1 1 +#endif +#if CL_TARGET_OPENCL_VERSION >= 100 && !defined(CL_VERSION_1_0) +#define CL_VERSION_1_0 1 +#endif + +/* Allow deprecated APIs for older OpenCL versions. */ +#if CL_TARGET_OPENCL_VERSION <= 220 && !defined(CL_USE_DEPRECATED_OPENCL_2_2_APIS) +#define CL_USE_DEPRECATED_OPENCL_2_2_APIS +#endif +#if CL_TARGET_OPENCL_VERSION <= 210 && !defined(CL_USE_DEPRECATED_OPENCL_2_1_APIS) +#define CL_USE_DEPRECATED_OPENCL_2_1_APIS +#endif +#if CL_TARGET_OPENCL_VERSION <= 200 && !defined(CL_USE_DEPRECATED_OPENCL_2_0_APIS) +#define CL_USE_DEPRECATED_OPENCL_2_0_APIS +#endif +#if CL_TARGET_OPENCL_VERSION <= 120 && !defined(CL_USE_DEPRECATED_OPENCL_1_2_APIS) +#define CL_USE_DEPRECATED_OPENCL_1_2_APIS +#endif +#if CL_TARGET_OPENCL_VERSION <= 110 && !defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS) +#define CL_USE_DEPRECATED_OPENCL_1_1_APIS +#endif +#if CL_TARGET_OPENCL_VERSION <= 100 && !defined(CL_USE_DEPRECATED_OPENCL_1_0_APIS) +#define CL_USE_DEPRECATED_OPENCL_1_0_APIS +#endif + +#endif /* __CL_VERSION_H */ diff --git a/vendor/angle/include/CL/opencl.h b/vendor/angle/include/CL/opencl.h new file mode 100644 index 00000000..058bcc4d --- /dev/null +++ b/vendor/angle/include/CL/opencl.h @@ -0,0 +1,32 @@ +/******************************************************************************* + * Copyright (c) 2008-2021 The Khronos Group Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ + +#ifndef __OPENCL_H +#define __OPENCL_H + +#ifdef __cplusplus +extern "C" { +#endif + +#include +#include +#include + +#ifdef __cplusplus +} +#endif + +#endif /* __OPENCL_H */ diff --git a/vendor/angle/include/EGL/.clang-format b/vendor/angle/include/EGL/.clang-format new file mode 100644 index 00000000..06147100 --- /dev/null +++ b/vendor/angle/include/EGL/.clang-format @@ -0,0 +1,3 @@ + + +DisableFormat: true diff --git a/vendor/angle/include/EGL/egl.h b/vendor/angle/include/EGL/egl.h new file mode 100644 index 00000000..6e05b156 --- /dev/null +++ b/vendor/angle/include/EGL/egl.h @@ -0,0 +1,342 @@ +#ifndef __egl_h_ +#define __egl_h_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright 2013-2020 The Khronos Group Inc. +** SPDX-License-Identifier: Apache-2.0 +** +** This header is generated from the Khronos EGL XML API Registry. +** The current version of the Registry, generator scripts +** used to make the header, and the header can be found at +** http://www.khronos.org/registry/egl +** +** Khronos $Git commit SHA1: 5a9a7e3fcb $ on $Git commit date: 2020-08-24 11:05:32 -0700 $ +*/ + +#include + +#ifndef EGL_EGL_PROTOTYPES +#define EGL_EGL_PROTOTYPES 1 +#endif + +/* Generated on date 20201001 */ + +/* Generated C header for: + * API: egl + * Versions considered: .* + * Versions emitted: .* + * Default extensions included: None + * Additional extensions included: _nomatch_^ + * Extensions removed: _nomatch_^ + */ + +#ifndef EGL_VERSION_1_0 +#define EGL_VERSION_1_0 1 +typedef unsigned int EGLBoolean; +typedef void *EGLDisplay; +#include +#include +typedef void *EGLConfig; +typedef void *EGLSurface; +typedef void *EGLContext; +typedef void (*__eglMustCastToProperFunctionPointerType)(void); +#define EGL_ALPHA_SIZE 0x3021 +#define EGL_BAD_ACCESS 0x3002 +#define EGL_BAD_ALLOC 0x3003 +#define EGL_BAD_ATTRIBUTE 0x3004 +#define EGL_BAD_CONFIG 0x3005 +#define EGL_BAD_CONTEXT 0x3006 +#define EGL_BAD_CURRENT_SURFACE 0x3007 +#define EGL_BAD_DISPLAY 0x3008 +#define EGL_BAD_MATCH 0x3009 +#define EGL_BAD_NATIVE_PIXMAP 0x300A +#define EGL_BAD_NATIVE_WINDOW 0x300B +#define EGL_BAD_PARAMETER 0x300C +#define EGL_BAD_SURFACE 0x300D +#define EGL_BLUE_SIZE 0x3022 +#define EGL_BUFFER_SIZE 0x3020 +#define EGL_CONFIG_CAVEAT 0x3027 +#define EGL_CONFIG_ID 0x3028 +#define EGL_CORE_NATIVE_ENGINE 0x305B +#define EGL_DEPTH_SIZE 0x3025 +#define EGL_DONT_CARE EGL_CAST(EGLint,-1) +#define EGL_DRAW 0x3059 +#define EGL_EXTENSIONS 0x3055 +#define EGL_FALSE 0 +#define EGL_GREEN_SIZE 0x3023 +#define EGL_HEIGHT 0x3056 +#define EGL_LARGEST_PBUFFER 0x3058 +#define EGL_LEVEL 0x3029 +#define EGL_MAX_PBUFFER_HEIGHT 0x302A +#define EGL_MAX_PBUFFER_PIXELS 0x302B +#define EGL_MAX_PBUFFER_WIDTH 0x302C +#define EGL_NATIVE_RENDERABLE 0x302D +#define EGL_NATIVE_VISUAL_ID 0x302E +#define EGL_NATIVE_VISUAL_TYPE 0x302F +#define EGL_NONE 0x3038 +#define EGL_NON_CONFORMANT_CONFIG 0x3051 +#define EGL_NOT_INITIALIZED 0x3001 +#define EGL_NO_CONTEXT EGL_CAST(EGLContext,0) +#define EGL_NO_DISPLAY EGL_CAST(EGLDisplay,0) +#define EGL_NO_SURFACE EGL_CAST(EGLSurface,0) +#define EGL_PBUFFER_BIT 0x0001 +#define EGL_PIXMAP_BIT 0x0002 +#define EGL_READ 0x305A +#define EGL_RED_SIZE 0x3024 +#define EGL_SAMPLES 0x3031 +#define EGL_SAMPLE_BUFFERS 0x3032 +#define EGL_SLOW_CONFIG 0x3050 +#define EGL_STENCIL_SIZE 0x3026 +#define EGL_SUCCESS 0x3000 +#define EGL_SURFACE_TYPE 0x3033 +#define EGL_TRANSPARENT_BLUE_VALUE 0x3035 +#define EGL_TRANSPARENT_GREEN_VALUE 0x3036 +#define EGL_TRANSPARENT_RED_VALUE 0x3037 +#define EGL_TRANSPARENT_RGB 0x3052 +#define EGL_TRANSPARENT_TYPE 0x3034 +#define EGL_TRUE 1 +#define EGL_VENDOR 0x3053 +#define EGL_VERSION 0x3054 +#define EGL_WIDTH 0x3057 +#define EGL_WINDOW_BIT 0x0004 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLCHOOSECONFIGPROC) (EGLDisplay dpy, const EGLint *attrib_list, EGLConfig *configs, EGLint config_size, EGLint *num_config); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLCOPYBUFFERSPROC) (EGLDisplay dpy, EGLSurface surface, EGLNativePixmapType target); +typedef EGLContext (EGLAPIENTRYP PFNEGLCREATECONTEXTPROC) (EGLDisplay dpy, EGLConfig config, EGLContext share_context, const EGLint *attrib_list); +typedef EGLSurface (EGLAPIENTRYP PFNEGLCREATEPBUFFERSURFACEPROC) (EGLDisplay dpy, EGLConfig config, const EGLint *attrib_list); +typedef EGLSurface (EGLAPIENTRYP PFNEGLCREATEPIXMAPSURFACEPROC) (EGLDisplay dpy, EGLConfig config, EGLNativePixmapType pixmap, const EGLint *attrib_list); +typedef EGLSurface (EGLAPIENTRYP PFNEGLCREATEWINDOWSURFACEPROC) (EGLDisplay dpy, EGLConfig config, EGLNativeWindowType win, const EGLint *attrib_list); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLDESTROYCONTEXTPROC) (EGLDisplay dpy, EGLContext ctx); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLDESTROYSURFACEPROC) (EGLDisplay dpy, EGLSurface surface); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLGETCONFIGATTRIBPROC) (EGLDisplay dpy, EGLConfig config, EGLint attribute, EGLint *value); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLGETCONFIGSPROC) (EGLDisplay dpy, EGLConfig *configs, EGLint config_size, EGLint *num_config); +typedef EGLDisplay (EGLAPIENTRYP PFNEGLGETCURRENTDISPLAYPROC) (void); +typedef EGLSurface (EGLAPIENTRYP PFNEGLGETCURRENTSURFACEPROC) (EGLint readdraw); +typedef EGLDisplay (EGLAPIENTRYP PFNEGLGETDISPLAYPROC) (EGLNativeDisplayType display_id); +typedef EGLint (EGLAPIENTRYP PFNEGLGETERRORPROC) (void); +typedef __eglMustCastToProperFunctionPointerType (EGLAPIENTRYP PFNEGLGETPROCADDRESSPROC) (const char *procname); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLINITIALIZEPROC) (EGLDisplay dpy, EGLint *major, EGLint *minor); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLMAKECURRENTPROC) (EGLDisplay dpy, EGLSurface draw, EGLSurface read, EGLContext ctx); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYCONTEXTPROC) (EGLDisplay dpy, EGLContext ctx, EGLint attribute, EGLint *value); +typedef const char *(EGLAPIENTRYP PFNEGLQUERYSTRINGPROC) (EGLDisplay dpy, EGLint name); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYSURFACEPROC) (EGLDisplay dpy, EGLSurface surface, EGLint attribute, EGLint *value); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSWAPBUFFERSPROC) (EGLDisplay dpy, EGLSurface surface); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLTERMINATEPROC) (EGLDisplay dpy); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLWAITGLPROC) (void); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLWAITNATIVEPROC) (EGLint engine); +#if EGL_EGL_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglChooseConfig (EGLDisplay dpy, const EGLint *attrib_list, EGLConfig *configs, EGLint config_size, EGLint *num_config); +EGLAPI EGLBoolean EGLAPIENTRY eglCopyBuffers (EGLDisplay dpy, EGLSurface surface, EGLNativePixmapType target); +EGLAPI EGLContext EGLAPIENTRY eglCreateContext (EGLDisplay dpy, EGLConfig config, EGLContext share_context, const EGLint *attrib_list); +EGLAPI EGLSurface EGLAPIENTRY eglCreatePbufferSurface (EGLDisplay dpy, EGLConfig config, const EGLint *attrib_list); +EGLAPI EGLSurface EGLAPIENTRY eglCreatePixmapSurface (EGLDisplay dpy, EGLConfig config, EGLNativePixmapType pixmap, const EGLint *attrib_list); +EGLAPI EGLSurface EGLAPIENTRY eglCreateWindowSurface (EGLDisplay dpy, EGLConfig config, EGLNativeWindowType win, const EGLint *attrib_list); +EGLAPI EGLBoolean EGLAPIENTRY eglDestroyContext (EGLDisplay dpy, EGLContext ctx); +EGLAPI EGLBoolean EGLAPIENTRY eglDestroySurface (EGLDisplay dpy, EGLSurface surface); +EGLAPI EGLBoolean EGLAPIENTRY eglGetConfigAttrib (EGLDisplay dpy, EGLConfig config, EGLint attribute, EGLint *value); +EGLAPI EGLBoolean EGLAPIENTRY eglGetConfigs (EGLDisplay dpy, EGLConfig *configs, EGLint config_size, EGLint *num_config); +EGLAPI EGLDisplay EGLAPIENTRY eglGetCurrentDisplay (void); +EGLAPI EGLSurface EGLAPIENTRY eglGetCurrentSurface (EGLint readdraw); +EGLAPI EGLDisplay EGLAPIENTRY eglGetDisplay (EGLNativeDisplayType display_id); +EGLAPI EGLint EGLAPIENTRY eglGetError (void); +EGLAPI __eglMustCastToProperFunctionPointerType EGLAPIENTRY eglGetProcAddress (const char *procname); +EGLAPI EGLBoolean EGLAPIENTRY eglInitialize (EGLDisplay dpy, EGLint *major, EGLint *minor); +EGLAPI EGLBoolean EGLAPIENTRY eglMakeCurrent (EGLDisplay dpy, EGLSurface draw, EGLSurface read, EGLContext ctx); +EGLAPI EGLBoolean EGLAPIENTRY eglQueryContext (EGLDisplay dpy, EGLContext ctx, EGLint attribute, EGLint *value); +EGLAPI const char *EGLAPIENTRY eglQueryString (EGLDisplay dpy, EGLint name); +EGLAPI EGLBoolean EGLAPIENTRY eglQuerySurface (EGLDisplay dpy, EGLSurface surface, EGLint attribute, EGLint *value); +EGLAPI EGLBoolean EGLAPIENTRY eglSwapBuffers (EGLDisplay dpy, EGLSurface surface); +EGLAPI EGLBoolean EGLAPIENTRY eglTerminate (EGLDisplay dpy); +EGLAPI EGLBoolean EGLAPIENTRY eglWaitGL (void); +EGLAPI EGLBoolean EGLAPIENTRY eglWaitNative (EGLint engine); +#endif +#endif /* EGL_VERSION_1_0 */ + +#ifndef EGL_VERSION_1_1 +#define EGL_VERSION_1_1 1 +#define EGL_BACK_BUFFER 0x3084 +#define EGL_BIND_TO_TEXTURE_RGB 0x3039 +#define EGL_BIND_TO_TEXTURE_RGBA 0x303A +#define EGL_CONTEXT_LOST 0x300E +#define EGL_MIN_SWAP_INTERVAL 0x303B +#define EGL_MAX_SWAP_INTERVAL 0x303C +#define EGL_MIPMAP_TEXTURE 0x3082 +#define EGL_MIPMAP_LEVEL 0x3083 +#define EGL_NO_TEXTURE 0x305C +#define EGL_TEXTURE_2D 0x305F +#define EGL_TEXTURE_FORMAT 0x3080 +#define EGL_TEXTURE_RGB 0x305D +#define EGL_TEXTURE_RGBA 0x305E +#define EGL_TEXTURE_TARGET 0x3081 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLBINDTEXIMAGEPROC) (EGLDisplay dpy, EGLSurface surface, EGLint buffer); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLRELEASETEXIMAGEPROC) (EGLDisplay dpy, EGLSurface surface, EGLint buffer); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSURFACEATTRIBPROC) (EGLDisplay dpy, EGLSurface surface, EGLint attribute, EGLint value); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSWAPINTERVALPROC) (EGLDisplay dpy, EGLint interval); +#if EGL_EGL_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglBindTexImage (EGLDisplay dpy, EGLSurface surface, EGLint buffer); +EGLAPI EGLBoolean EGLAPIENTRY eglReleaseTexImage (EGLDisplay dpy, EGLSurface surface, EGLint buffer); +EGLAPI EGLBoolean EGLAPIENTRY eglSurfaceAttrib (EGLDisplay dpy, EGLSurface surface, EGLint attribute, EGLint value); +EGLAPI EGLBoolean EGLAPIENTRY eglSwapInterval (EGLDisplay dpy, EGLint interval); +#endif +#endif /* EGL_VERSION_1_1 */ + +#ifndef EGL_VERSION_1_2 +#define EGL_VERSION_1_2 1 +typedef unsigned int EGLenum; +typedef void *EGLClientBuffer; +#define EGL_ALPHA_FORMAT 0x3088 +#define EGL_ALPHA_FORMAT_NONPRE 0x308B +#define EGL_ALPHA_FORMAT_PRE 0x308C +#define EGL_ALPHA_MASK_SIZE 0x303E +#define EGL_BUFFER_PRESERVED 0x3094 +#define EGL_BUFFER_DESTROYED 0x3095 +#define EGL_CLIENT_APIS 0x308D +#define EGL_COLORSPACE 0x3087 +#define EGL_COLORSPACE_sRGB 0x3089 +#define EGL_COLORSPACE_LINEAR 0x308A +#define EGL_COLOR_BUFFER_TYPE 0x303F +#define EGL_CONTEXT_CLIENT_TYPE 0x3097 +#define EGL_DISPLAY_SCALING 10000 +#define EGL_HORIZONTAL_RESOLUTION 0x3090 +#define EGL_LUMINANCE_BUFFER 0x308F +#define EGL_LUMINANCE_SIZE 0x303D +#define EGL_OPENGL_ES_BIT 0x0001 +#define EGL_OPENVG_BIT 0x0002 +#define EGL_OPENGL_ES_API 0x30A0 +#define EGL_OPENVG_API 0x30A1 +#define EGL_OPENVG_IMAGE 0x3096 +#define EGL_PIXEL_ASPECT_RATIO 0x3092 +#define EGL_RENDERABLE_TYPE 0x3040 +#define EGL_RENDER_BUFFER 0x3086 +#define EGL_RGB_BUFFER 0x308E +#define EGL_SINGLE_BUFFER 0x3085 +#define EGL_SWAP_BEHAVIOR 0x3093 +#define EGL_UNKNOWN EGL_CAST(EGLint,-1) +#define EGL_VERTICAL_RESOLUTION 0x3091 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLBINDAPIPROC) (EGLenum api); +typedef EGLenum (EGLAPIENTRYP PFNEGLQUERYAPIPROC) (void); +typedef EGLSurface (EGLAPIENTRYP PFNEGLCREATEPBUFFERFROMCLIENTBUFFERPROC) (EGLDisplay dpy, EGLenum buftype, EGLClientBuffer buffer, EGLConfig config, const EGLint *attrib_list); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLRELEASETHREADPROC) (void); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLWAITCLIENTPROC) (void); +#if EGL_EGL_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglBindAPI (EGLenum api); +EGLAPI EGLenum EGLAPIENTRY eglQueryAPI (void); +EGLAPI EGLSurface EGLAPIENTRY eglCreatePbufferFromClientBuffer (EGLDisplay dpy, EGLenum buftype, EGLClientBuffer buffer, EGLConfig config, const EGLint *attrib_list); +EGLAPI EGLBoolean EGLAPIENTRY eglReleaseThread (void); +EGLAPI EGLBoolean EGLAPIENTRY eglWaitClient (void); +#endif +#endif /* EGL_VERSION_1_2 */ + +#ifndef EGL_VERSION_1_3 +#define EGL_VERSION_1_3 1 +#define EGL_CONFORMANT 0x3042 +#define EGL_CONTEXT_CLIENT_VERSION 0x3098 +#define EGL_MATCH_NATIVE_PIXMAP 0x3041 +#define EGL_OPENGL_ES2_BIT 0x0004 +#define EGL_VG_ALPHA_FORMAT 0x3088 +#define EGL_VG_ALPHA_FORMAT_NONPRE 0x308B +#define EGL_VG_ALPHA_FORMAT_PRE 0x308C +#define EGL_VG_ALPHA_FORMAT_PRE_BIT 0x0040 +#define EGL_VG_COLORSPACE 0x3087 +#define EGL_VG_COLORSPACE_sRGB 0x3089 +#define EGL_VG_COLORSPACE_LINEAR 0x308A +#define EGL_VG_COLORSPACE_LINEAR_BIT 0x0020 +#endif /* EGL_VERSION_1_3 */ + +#ifndef EGL_VERSION_1_4 +#define EGL_VERSION_1_4 1 +#define EGL_DEFAULT_DISPLAY EGL_CAST(EGLNativeDisplayType,0) +#define EGL_MULTISAMPLE_RESOLVE_BOX_BIT 0x0200 +#define EGL_MULTISAMPLE_RESOLVE 0x3099 +#define EGL_MULTISAMPLE_RESOLVE_DEFAULT 0x309A +#define EGL_MULTISAMPLE_RESOLVE_BOX 0x309B +#define EGL_OPENGL_API 0x30A2 +#define EGL_OPENGL_BIT 0x0008 +#define EGL_SWAP_BEHAVIOR_PRESERVED_BIT 0x0400 +typedef EGLContext (EGLAPIENTRYP PFNEGLGETCURRENTCONTEXTPROC) (void); +#if EGL_EGL_PROTOTYPES +EGLAPI EGLContext EGLAPIENTRY eglGetCurrentContext (void); +#endif +#endif /* EGL_VERSION_1_4 */ + +#ifndef EGL_VERSION_1_5 +#define EGL_VERSION_1_5 1 +typedef void *EGLSync; +typedef intptr_t EGLAttrib; +typedef khronos_utime_nanoseconds_t EGLTime; +typedef void *EGLImage; +#define EGL_CONTEXT_MAJOR_VERSION 0x3098 +#define EGL_CONTEXT_MINOR_VERSION 0x30FB +#define EGL_CONTEXT_OPENGL_PROFILE_MASK 0x30FD +#define EGL_CONTEXT_OPENGL_RESET_NOTIFICATION_STRATEGY 0x31BD +#define EGL_NO_RESET_NOTIFICATION 0x31BE +#define EGL_LOSE_CONTEXT_ON_RESET 0x31BF +#define EGL_CONTEXT_OPENGL_CORE_PROFILE_BIT 0x00000001 +#define EGL_CONTEXT_OPENGL_COMPATIBILITY_PROFILE_BIT 0x00000002 +#define EGL_CONTEXT_OPENGL_DEBUG 0x31B0 +#define EGL_CONTEXT_OPENGL_FORWARD_COMPATIBLE 0x31B1 +#define EGL_CONTEXT_OPENGL_ROBUST_ACCESS 0x31B2 +#define EGL_OPENGL_ES3_BIT 0x00000040 +#define EGL_CL_EVENT_HANDLE 0x309C +#define EGL_SYNC_CL_EVENT 0x30FE +#define EGL_SYNC_CL_EVENT_COMPLETE 0x30FF +#define EGL_SYNC_PRIOR_COMMANDS_COMPLETE 0x30F0 +#define EGL_SYNC_TYPE 0x30F7 +#define EGL_SYNC_STATUS 0x30F1 +#define EGL_SYNC_CONDITION 0x30F8 +#define EGL_SIGNALED 0x30F2 +#define EGL_UNSIGNALED 0x30F3 +#define EGL_SYNC_FLUSH_COMMANDS_BIT 0x0001 +#define EGL_FOREVER 0xFFFFFFFFFFFFFFFFull +#define EGL_TIMEOUT_EXPIRED 0x30F5 +#define EGL_CONDITION_SATISFIED 0x30F6 +#define EGL_NO_SYNC EGL_CAST(EGLSync,0) +#define EGL_SYNC_FENCE 0x30F9 +#define EGL_GL_COLORSPACE 0x309D +#define EGL_GL_COLORSPACE_SRGB 0x3089 +#define EGL_GL_COLORSPACE_LINEAR 0x308A +#define EGL_GL_RENDERBUFFER 0x30B9 +#define EGL_GL_TEXTURE_2D 0x30B1 +#define EGL_GL_TEXTURE_LEVEL 0x30BC +#define EGL_GL_TEXTURE_3D 0x30B2 +#define EGL_GL_TEXTURE_ZOFFSET 0x30BD +#define EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_X 0x30B3 +#define EGL_GL_TEXTURE_CUBE_MAP_NEGATIVE_X 0x30B4 +#define EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_Y 0x30B5 +#define EGL_GL_TEXTURE_CUBE_MAP_NEGATIVE_Y 0x30B6 +#define EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_Z 0x30B7 +#define EGL_GL_TEXTURE_CUBE_MAP_NEGATIVE_Z 0x30B8 +#define EGL_IMAGE_PRESERVED 0x30D2 +#define EGL_NO_IMAGE EGL_CAST(EGLImage,0) +typedef EGLSync (EGLAPIENTRYP PFNEGLCREATESYNCPROC) (EGLDisplay dpy, EGLenum type, const EGLAttrib *attrib_list); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLDESTROYSYNCPROC) (EGLDisplay dpy, EGLSync sync); +typedef EGLint (EGLAPIENTRYP PFNEGLCLIENTWAITSYNCPROC) (EGLDisplay dpy, EGLSync sync, EGLint flags, EGLTime timeout); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLGETSYNCATTRIBPROC) (EGLDisplay dpy, EGLSync sync, EGLint attribute, EGLAttrib *value); +typedef EGLImage (EGLAPIENTRYP PFNEGLCREATEIMAGEPROC) (EGLDisplay dpy, EGLContext ctx, EGLenum target, EGLClientBuffer buffer, const EGLAttrib *attrib_list); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLDESTROYIMAGEPROC) (EGLDisplay dpy, EGLImage image); +typedef EGLDisplay (EGLAPIENTRYP PFNEGLGETPLATFORMDISPLAYPROC) (EGLenum platform, void *native_display, const EGLAttrib *attrib_list); +typedef EGLSurface (EGLAPIENTRYP PFNEGLCREATEPLATFORMWINDOWSURFACEPROC) (EGLDisplay dpy, EGLConfig config, void *native_window, const EGLAttrib *attrib_list); +typedef EGLSurface (EGLAPIENTRYP PFNEGLCREATEPLATFORMPIXMAPSURFACEPROC) (EGLDisplay dpy, EGLConfig config, void *native_pixmap, const EGLAttrib *attrib_list); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLWAITSYNCPROC) (EGLDisplay dpy, EGLSync sync, EGLint flags); +#if EGL_EGL_PROTOTYPES +EGLAPI EGLSync EGLAPIENTRY eglCreateSync (EGLDisplay dpy, EGLenum type, const EGLAttrib *attrib_list); +EGLAPI EGLBoolean EGLAPIENTRY eglDestroySync (EGLDisplay dpy, EGLSync sync); +EGLAPI EGLint EGLAPIENTRY eglClientWaitSync (EGLDisplay dpy, EGLSync sync, EGLint flags, EGLTime timeout); +EGLAPI EGLBoolean EGLAPIENTRY eglGetSyncAttrib (EGLDisplay dpy, EGLSync sync, EGLint attribute, EGLAttrib *value); +EGLAPI EGLImage EGLAPIENTRY eglCreateImage (EGLDisplay dpy, EGLContext ctx, EGLenum target, EGLClientBuffer buffer, const EGLAttrib *attrib_list); +EGLAPI EGLBoolean EGLAPIENTRY eglDestroyImage (EGLDisplay dpy, EGLImage image); +EGLAPI EGLDisplay EGLAPIENTRY eglGetPlatformDisplay (EGLenum platform, void *native_display, const EGLAttrib *attrib_list); +EGLAPI EGLSurface EGLAPIENTRY eglCreatePlatformWindowSurface (EGLDisplay dpy, EGLConfig config, void *native_window, const EGLAttrib *attrib_list); +EGLAPI EGLSurface EGLAPIENTRY eglCreatePlatformPixmapSurface (EGLDisplay dpy, EGLConfig config, void *native_pixmap, const EGLAttrib *attrib_list); +EGLAPI EGLBoolean EGLAPIENTRY eglWaitSync (EGLDisplay dpy, EGLSync sync, EGLint flags); +#endif +#endif /* EGL_VERSION_1_5 */ + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/angle/include/EGL/eglext.h b/vendor/angle/include/EGL/eglext.h new file mode 100644 index 00000000..77651878 --- /dev/null +++ b/vendor/angle/include/EGL/eglext.h @@ -0,0 +1,1422 @@ +#ifndef __eglext_h_ +#define __eglext_h_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright 2013-2020 The Khronos Group Inc. +** SPDX-License-Identifier: Apache-2.0 +** +** This header is generated from the Khronos EGL XML API Registry. +** The current version of the Registry, generator scripts +** used to make the header, and the header can be found at +** http://www.khronos.org/registry/egl +** +** Khronos $Git commit SHA1: 5a9a7e3fcb $ on $Git commit date: 2020-08-24 11:05:32 -0700 $ +*/ + +#include + +#define EGL_EGLEXT_VERSION 20201001 + +/* Generated C header for: + * API: egl + * Versions considered: .* + * Versions emitted: _nomatch_^ + * Default extensions included: egl + * Additional extensions included: _nomatch_^ + * Extensions removed: _nomatch_^ + */ + +#ifndef EGL_KHR_cl_event +#define EGL_KHR_cl_event 1 +#define EGL_CL_EVENT_HANDLE_KHR 0x309C +#define EGL_SYNC_CL_EVENT_KHR 0x30FE +#define EGL_SYNC_CL_EVENT_COMPLETE_KHR 0x30FF +#endif /* EGL_KHR_cl_event */ + +#ifndef EGL_KHR_cl_event2 +#define EGL_KHR_cl_event2 1 +typedef void *EGLSyncKHR; +typedef intptr_t EGLAttribKHR; +typedef EGLSyncKHR (EGLAPIENTRYP PFNEGLCREATESYNC64KHRPROC) (EGLDisplay dpy, EGLenum type, const EGLAttribKHR *attrib_list); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLSyncKHR EGLAPIENTRY eglCreateSync64KHR (EGLDisplay dpy, EGLenum type, const EGLAttribKHR *attrib_list); +#endif +#endif /* EGL_KHR_cl_event2 */ + +#ifndef EGL_KHR_client_get_all_proc_addresses +#define EGL_KHR_client_get_all_proc_addresses 1 +#endif /* EGL_KHR_client_get_all_proc_addresses */ + +#ifndef EGL_KHR_config_attribs +#define EGL_KHR_config_attribs 1 +#define EGL_CONFORMANT_KHR 0x3042 +#define EGL_VG_COLORSPACE_LINEAR_BIT_KHR 0x0020 +#define EGL_VG_ALPHA_FORMAT_PRE_BIT_KHR 0x0040 +#endif /* EGL_KHR_config_attribs */ + +#ifndef EGL_KHR_context_flush_control +#define EGL_KHR_context_flush_control 1 +#define EGL_CONTEXT_RELEASE_BEHAVIOR_NONE_KHR 0 +#define EGL_CONTEXT_RELEASE_BEHAVIOR_KHR 0x2097 +#define EGL_CONTEXT_RELEASE_BEHAVIOR_FLUSH_KHR 0x2098 +#endif /* EGL_KHR_context_flush_control */ + +#ifndef EGL_KHR_create_context +#define EGL_KHR_create_context 1 +#define EGL_CONTEXT_MAJOR_VERSION_KHR 0x3098 +#define EGL_CONTEXT_MINOR_VERSION_KHR 0x30FB +#define EGL_CONTEXT_FLAGS_KHR 0x30FC +#define EGL_CONTEXT_OPENGL_PROFILE_MASK_KHR 0x30FD +#define EGL_CONTEXT_OPENGL_RESET_NOTIFICATION_STRATEGY_KHR 0x31BD +#define EGL_NO_RESET_NOTIFICATION_KHR 0x31BE +#define EGL_LOSE_CONTEXT_ON_RESET_KHR 0x31BF +#define EGL_CONTEXT_OPENGL_DEBUG_BIT_KHR 0x00000001 +#define EGL_CONTEXT_OPENGL_FORWARD_COMPATIBLE_BIT_KHR 0x00000002 +#define EGL_CONTEXT_OPENGL_ROBUST_ACCESS_BIT_KHR 0x00000004 +#define EGL_CONTEXT_OPENGL_CORE_PROFILE_BIT_KHR 0x00000001 +#define EGL_CONTEXT_OPENGL_COMPATIBILITY_PROFILE_BIT_KHR 0x00000002 +#define EGL_OPENGL_ES3_BIT_KHR 0x00000040 +#endif /* EGL_KHR_create_context */ + +#ifndef EGL_KHR_create_context_no_error +#define EGL_KHR_create_context_no_error 1 +#define EGL_CONTEXT_OPENGL_NO_ERROR_KHR 0x31B3 +#endif /* EGL_KHR_create_context_no_error */ + +#ifndef EGL_KHR_debug +#define EGL_KHR_debug 1 +typedef void *EGLLabelKHR; +typedef void *EGLObjectKHR; +typedef void (EGLAPIENTRY *EGLDEBUGPROCKHR)(EGLenum error,const char *command,EGLint messageType,EGLLabelKHR threadLabel,EGLLabelKHR objectLabel,const char* message); +#define EGL_OBJECT_THREAD_KHR 0x33B0 +#define EGL_OBJECT_DISPLAY_KHR 0x33B1 +#define EGL_OBJECT_CONTEXT_KHR 0x33B2 +#define EGL_OBJECT_SURFACE_KHR 0x33B3 +#define EGL_OBJECT_IMAGE_KHR 0x33B4 +#define EGL_OBJECT_SYNC_KHR 0x33B5 +#define EGL_OBJECT_STREAM_KHR 0x33B6 +#define EGL_DEBUG_MSG_CRITICAL_KHR 0x33B9 +#define EGL_DEBUG_MSG_ERROR_KHR 0x33BA +#define EGL_DEBUG_MSG_WARN_KHR 0x33BB +#define EGL_DEBUG_MSG_INFO_KHR 0x33BC +#define EGL_DEBUG_CALLBACK_KHR 0x33B8 +typedef EGLint (EGLAPIENTRYP PFNEGLDEBUGMESSAGECONTROLKHRPROC) (EGLDEBUGPROCKHR callback, const EGLAttrib *attrib_list); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYDEBUGKHRPROC) (EGLint attribute, EGLAttrib *value); +typedef EGLint (EGLAPIENTRYP PFNEGLLABELOBJECTKHRPROC) (EGLDisplay display, EGLenum objectType, EGLObjectKHR object, EGLLabelKHR label); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLint EGLAPIENTRY eglDebugMessageControlKHR (EGLDEBUGPROCKHR callback, const EGLAttrib *attrib_list); +EGLAPI EGLBoolean EGLAPIENTRY eglQueryDebugKHR (EGLint attribute, EGLAttrib *value); +EGLAPI EGLint EGLAPIENTRY eglLabelObjectKHR (EGLDisplay display, EGLenum objectType, EGLObjectKHR object, EGLLabelKHR label); +#endif +#endif /* EGL_KHR_debug */ + +#ifndef EGL_KHR_display_reference +#define EGL_KHR_display_reference 1 +#define EGL_TRACK_REFERENCES_KHR 0x3352 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYDISPLAYATTRIBKHRPROC) (EGLDisplay dpy, EGLint name, EGLAttrib *value); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglQueryDisplayAttribKHR (EGLDisplay dpy, EGLint name, EGLAttrib *value); +#endif +#endif /* EGL_KHR_display_reference */ + +#ifndef EGL_KHR_fence_sync +#define EGL_KHR_fence_sync 1 +typedef khronos_utime_nanoseconds_t EGLTimeKHR; +#ifdef KHRONOS_SUPPORT_INT64 +#define EGL_SYNC_PRIOR_COMMANDS_COMPLETE_KHR 0x30F0 +#define EGL_SYNC_CONDITION_KHR 0x30F8 +#define EGL_SYNC_FENCE_KHR 0x30F9 +typedef EGLSyncKHR (EGLAPIENTRYP PFNEGLCREATESYNCKHRPROC) (EGLDisplay dpy, EGLenum type, const EGLint *attrib_list); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLDESTROYSYNCKHRPROC) (EGLDisplay dpy, EGLSyncKHR sync); +typedef EGLint (EGLAPIENTRYP PFNEGLCLIENTWAITSYNCKHRPROC) (EGLDisplay dpy, EGLSyncKHR sync, EGLint flags, EGLTimeKHR timeout); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLGETSYNCATTRIBKHRPROC) (EGLDisplay dpy, EGLSyncKHR sync, EGLint attribute, EGLint *value); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLSyncKHR EGLAPIENTRY eglCreateSyncKHR (EGLDisplay dpy, EGLenum type, const EGLint *attrib_list); +EGLAPI EGLBoolean EGLAPIENTRY eglDestroySyncKHR (EGLDisplay dpy, EGLSyncKHR sync); +EGLAPI EGLint EGLAPIENTRY eglClientWaitSyncKHR (EGLDisplay dpy, EGLSyncKHR sync, EGLint flags, EGLTimeKHR timeout); +EGLAPI EGLBoolean EGLAPIENTRY eglGetSyncAttribKHR (EGLDisplay dpy, EGLSyncKHR sync, EGLint attribute, EGLint *value); +#endif +#endif /* KHRONOS_SUPPORT_INT64 */ +#endif /* EGL_KHR_fence_sync */ + +#ifndef EGL_KHR_get_all_proc_addresses +#define EGL_KHR_get_all_proc_addresses 1 +#endif /* EGL_KHR_get_all_proc_addresses */ + +#ifndef EGL_KHR_gl_colorspace +#define EGL_KHR_gl_colorspace 1 +#define EGL_GL_COLORSPACE_KHR 0x309D +#define EGL_GL_COLORSPACE_SRGB_KHR 0x3089 +#define EGL_GL_COLORSPACE_LINEAR_KHR 0x308A +#endif /* EGL_KHR_gl_colorspace */ + +#ifndef EGL_KHR_gl_renderbuffer_image +#define EGL_KHR_gl_renderbuffer_image 1 +#define EGL_GL_RENDERBUFFER_KHR 0x30B9 +#endif /* EGL_KHR_gl_renderbuffer_image */ + +#ifndef EGL_KHR_gl_texture_2D_image +#define EGL_KHR_gl_texture_2D_image 1 +#define EGL_GL_TEXTURE_2D_KHR 0x30B1 +#define EGL_GL_TEXTURE_LEVEL_KHR 0x30BC +#endif /* EGL_KHR_gl_texture_2D_image */ + +#ifndef EGL_KHR_gl_texture_3D_image +#define EGL_KHR_gl_texture_3D_image 1 +#define EGL_GL_TEXTURE_3D_KHR 0x30B2 +#define EGL_GL_TEXTURE_ZOFFSET_KHR 0x30BD +#endif /* EGL_KHR_gl_texture_3D_image */ + +#ifndef EGL_KHR_gl_texture_cubemap_image +#define EGL_KHR_gl_texture_cubemap_image 1 +#define EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_X_KHR 0x30B3 +#define EGL_GL_TEXTURE_CUBE_MAP_NEGATIVE_X_KHR 0x30B4 +#define EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_Y_KHR 0x30B5 +#define EGL_GL_TEXTURE_CUBE_MAP_NEGATIVE_Y_KHR 0x30B6 +#define EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_Z_KHR 0x30B7 +#define EGL_GL_TEXTURE_CUBE_MAP_NEGATIVE_Z_KHR 0x30B8 +#endif /* EGL_KHR_gl_texture_cubemap_image */ + +#ifndef EGL_KHR_image +#define EGL_KHR_image 1 +typedef void *EGLImageKHR; +#define EGL_NATIVE_PIXMAP_KHR 0x30B0 +#define EGL_NO_IMAGE_KHR EGL_CAST(EGLImageKHR,0) +typedef EGLImageKHR (EGLAPIENTRYP PFNEGLCREATEIMAGEKHRPROC) (EGLDisplay dpy, EGLContext ctx, EGLenum target, EGLClientBuffer buffer, const EGLint *attrib_list); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLDESTROYIMAGEKHRPROC) (EGLDisplay dpy, EGLImageKHR image); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLImageKHR EGLAPIENTRY eglCreateImageKHR (EGLDisplay dpy, EGLContext ctx, EGLenum target, EGLClientBuffer buffer, const EGLint *attrib_list); +EGLAPI EGLBoolean EGLAPIENTRY eglDestroyImageKHR (EGLDisplay dpy, EGLImageKHR image); +#endif +#endif /* EGL_KHR_image */ + +#ifndef EGL_KHR_image_base +#define EGL_KHR_image_base 1 +#define EGL_IMAGE_PRESERVED_KHR 0x30D2 +#endif /* EGL_KHR_image_base */ + +#ifndef EGL_KHR_image_pixmap +#define EGL_KHR_image_pixmap 1 +#endif /* EGL_KHR_image_pixmap */ + +#ifndef EGL_KHR_lock_surface +#define EGL_KHR_lock_surface 1 +#define EGL_READ_SURFACE_BIT_KHR 0x0001 +#define EGL_WRITE_SURFACE_BIT_KHR 0x0002 +#define EGL_LOCK_SURFACE_BIT_KHR 0x0080 +#define EGL_OPTIMAL_FORMAT_BIT_KHR 0x0100 +#define EGL_MATCH_FORMAT_KHR 0x3043 +#define EGL_FORMAT_RGB_565_EXACT_KHR 0x30C0 +#define EGL_FORMAT_RGB_565_KHR 0x30C1 +#define EGL_FORMAT_RGBA_8888_EXACT_KHR 0x30C2 +#define EGL_FORMAT_RGBA_8888_KHR 0x30C3 +#define EGL_MAP_PRESERVE_PIXELS_KHR 0x30C4 +#define EGL_LOCK_USAGE_HINT_KHR 0x30C5 +#define EGL_BITMAP_POINTER_KHR 0x30C6 +#define EGL_BITMAP_PITCH_KHR 0x30C7 +#define EGL_BITMAP_ORIGIN_KHR 0x30C8 +#define EGL_BITMAP_PIXEL_RED_OFFSET_KHR 0x30C9 +#define EGL_BITMAP_PIXEL_GREEN_OFFSET_KHR 0x30CA +#define EGL_BITMAP_PIXEL_BLUE_OFFSET_KHR 0x30CB +#define EGL_BITMAP_PIXEL_ALPHA_OFFSET_KHR 0x30CC +#define EGL_BITMAP_PIXEL_LUMINANCE_OFFSET_KHR 0x30CD +#define EGL_LOWER_LEFT_KHR 0x30CE +#define EGL_UPPER_LEFT_KHR 0x30CF +typedef EGLBoolean (EGLAPIENTRYP PFNEGLLOCKSURFACEKHRPROC) (EGLDisplay dpy, EGLSurface surface, const EGLint *attrib_list); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLUNLOCKSURFACEKHRPROC) (EGLDisplay dpy, EGLSurface surface); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglLockSurfaceKHR (EGLDisplay dpy, EGLSurface surface, const EGLint *attrib_list); +EGLAPI EGLBoolean EGLAPIENTRY eglUnlockSurfaceKHR (EGLDisplay dpy, EGLSurface surface); +#endif +#endif /* EGL_KHR_lock_surface */ + +#ifndef EGL_KHR_lock_surface2 +#define EGL_KHR_lock_surface2 1 +#define EGL_BITMAP_PIXEL_SIZE_KHR 0x3110 +#endif /* EGL_KHR_lock_surface2 */ + +#ifndef EGL_KHR_lock_surface3 +#define EGL_KHR_lock_surface3 1 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYSURFACE64KHRPROC) (EGLDisplay dpy, EGLSurface surface, EGLint attribute, EGLAttribKHR *value); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglQuerySurface64KHR (EGLDisplay dpy, EGLSurface surface, EGLint attribute, EGLAttribKHR *value); +#endif +#endif /* EGL_KHR_lock_surface3 */ + +#ifndef EGL_KHR_mutable_render_buffer +#define EGL_KHR_mutable_render_buffer 1 +#define EGL_MUTABLE_RENDER_BUFFER_BIT_KHR 0x1000 +#endif /* EGL_KHR_mutable_render_buffer */ + +#ifndef EGL_KHR_no_config_context +#define EGL_KHR_no_config_context 1 +#define EGL_NO_CONFIG_KHR EGL_CAST(EGLConfig,0) +#endif /* EGL_KHR_no_config_context */ + +#ifndef EGL_KHR_partial_update +#define EGL_KHR_partial_update 1 +#define EGL_BUFFER_AGE_KHR 0x313D +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSETDAMAGEREGIONKHRPROC) (EGLDisplay dpy, EGLSurface surface, EGLint *rects, EGLint n_rects); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglSetDamageRegionKHR (EGLDisplay dpy, EGLSurface surface, EGLint *rects, EGLint n_rects); +#endif +#endif /* EGL_KHR_partial_update */ + +#ifndef EGL_KHR_platform_android +#define EGL_KHR_platform_android 1 +#define EGL_PLATFORM_ANDROID_KHR 0x3141 +#endif /* EGL_KHR_platform_android */ + +#ifndef EGL_KHR_platform_gbm +#define EGL_KHR_platform_gbm 1 +#define EGL_PLATFORM_GBM_KHR 0x31D7 +#endif /* EGL_KHR_platform_gbm */ + +#ifndef EGL_KHR_platform_wayland +#define EGL_KHR_platform_wayland 1 +#define EGL_PLATFORM_WAYLAND_KHR 0x31D8 +#endif /* EGL_KHR_platform_wayland */ + +#ifndef EGL_KHR_platform_x11 +#define EGL_KHR_platform_x11 1 +#define EGL_PLATFORM_X11_KHR 0x31D5 +#define EGL_PLATFORM_X11_SCREEN_KHR 0x31D6 +#endif /* EGL_KHR_platform_x11 */ + +#ifndef EGL_KHR_reusable_sync +#define EGL_KHR_reusable_sync 1 +#ifdef KHRONOS_SUPPORT_INT64 +#define EGL_SYNC_STATUS_KHR 0x30F1 +#define EGL_SIGNALED_KHR 0x30F2 +#define EGL_UNSIGNALED_KHR 0x30F3 +#define EGL_TIMEOUT_EXPIRED_KHR 0x30F5 +#define EGL_CONDITION_SATISFIED_KHR 0x30F6 +#define EGL_SYNC_TYPE_KHR 0x30F7 +#define EGL_SYNC_REUSABLE_KHR 0x30FA +#define EGL_SYNC_FLUSH_COMMANDS_BIT_KHR 0x0001 +#define EGL_FOREVER_KHR 0xFFFFFFFFFFFFFFFFull +#define EGL_NO_SYNC_KHR EGL_CAST(EGLSyncKHR,0) +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSIGNALSYNCKHRPROC) (EGLDisplay dpy, EGLSyncKHR sync, EGLenum mode); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglSignalSyncKHR (EGLDisplay dpy, EGLSyncKHR sync, EGLenum mode); +#endif +#endif /* KHRONOS_SUPPORT_INT64 */ +#endif /* EGL_KHR_reusable_sync */ + +#ifndef EGL_KHR_stream +#define EGL_KHR_stream 1 +typedef void *EGLStreamKHR; +typedef khronos_uint64_t EGLuint64KHR; +#ifdef KHRONOS_SUPPORT_INT64 +#define EGL_NO_STREAM_KHR EGL_CAST(EGLStreamKHR,0) +#define EGL_CONSUMER_LATENCY_USEC_KHR 0x3210 +#define EGL_PRODUCER_FRAME_KHR 0x3212 +#define EGL_CONSUMER_FRAME_KHR 0x3213 +#define EGL_STREAM_STATE_KHR 0x3214 +#define EGL_STREAM_STATE_CREATED_KHR 0x3215 +#define EGL_STREAM_STATE_CONNECTING_KHR 0x3216 +#define EGL_STREAM_STATE_EMPTY_KHR 0x3217 +#define EGL_STREAM_STATE_NEW_FRAME_AVAILABLE_KHR 0x3218 +#define EGL_STREAM_STATE_OLD_FRAME_AVAILABLE_KHR 0x3219 +#define EGL_STREAM_STATE_DISCONNECTED_KHR 0x321A +#define EGL_BAD_STREAM_KHR 0x321B +#define EGL_BAD_STATE_KHR 0x321C +typedef EGLStreamKHR (EGLAPIENTRYP PFNEGLCREATESTREAMKHRPROC) (EGLDisplay dpy, const EGLint *attrib_list); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLDESTROYSTREAMKHRPROC) (EGLDisplay dpy, EGLStreamKHR stream); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSTREAMATTRIBKHRPROC) (EGLDisplay dpy, EGLStreamKHR stream, EGLenum attribute, EGLint value); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYSTREAMKHRPROC) (EGLDisplay dpy, EGLStreamKHR stream, EGLenum attribute, EGLint *value); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYSTREAMU64KHRPROC) (EGLDisplay dpy, EGLStreamKHR stream, EGLenum attribute, EGLuint64KHR *value); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLStreamKHR EGLAPIENTRY eglCreateStreamKHR (EGLDisplay dpy, const EGLint *attrib_list); +EGLAPI EGLBoolean EGLAPIENTRY eglDestroyStreamKHR (EGLDisplay dpy, EGLStreamKHR stream); +EGLAPI EGLBoolean EGLAPIENTRY eglStreamAttribKHR (EGLDisplay dpy, EGLStreamKHR stream, EGLenum attribute, EGLint value); +EGLAPI EGLBoolean EGLAPIENTRY eglQueryStreamKHR (EGLDisplay dpy, EGLStreamKHR stream, EGLenum attribute, EGLint *value); +EGLAPI EGLBoolean EGLAPIENTRY eglQueryStreamu64KHR (EGLDisplay dpy, EGLStreamKHR stream, EGLenum attribute, EGLuint64KHR *value); +#endif +#endif /* KHRONOS_SUPPORT_INT64 */ +#endif /* EGL_KHR_stream */ + +#ifndef EGL_KHR_stream_attrib +#define EGL_KHR_stream_attrib 1 +#ifdef KHRONOS_SUPPORT_INT64 +typedef EGLStreamKHR (EGLAPIENTRYP PFNEGLCREATESTREAMATTRIBKHRPROC) (EGLDisplay dpy, const EGLAttrib *attrib_list); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSETSTREAMATTRIBKHRPROC) (EGLDisplay dpy, EGLStreamKHR stream, EGLenum attribute, EGLAttrib value); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYSTREAMATTRIBKHRPROC) (EGLDisplay dpy, EGLStreamKHR stream, EGLenum attribute, EGLAttrib *value); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSTREAMCONSUMERACQUIREATTRIBKHRPROC) (EGLDisplay dpy, EGLStreamKHR stream, const EGLAttrib *attrib_list); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSTREAMCONSUMERRELEASEATTRIBKHRPROC) (EGLDisplay dpy, EGLStreamKHR stream, const EGLAttrib *attrib_list); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLStreamKHR EGLAPIENTRY eglCreateStreamAttribKHR (EGLDisplay dpy, const EGLAttrib *attrib_list); +EGLAPI EGLBoolean EGLAPIENTRY eglSetStreamAttribKHR (EGLDisplay dpy, EGLStreamKHR stream, EGLenum attribute, EGLAttrib value); +EGLAPI EGLBoolean EGLAPIENTRY eglQueryStreamAttribKHR (EGLDisplay dpy, EGLStreamKHR stream, EGLenum attribute, EGLAttrib *value); +EGLAPI EGLBoolean EGLAPIENTRY eglStreamConsumerAcquireAttribKHR (EGLDisplay dpy, EGLStreamKHR stream, const EGLAttrib *attrib_list); +EGLAPI EGLBoolean EGLAPIENTRY eglStreamConsumerReleaseAttribKHR (EGLDisplay dpy, EGLStreamKHR stream, const EGLAttrib *attrib_list); +#endif +#endif /* KHRONOS_SUPPORT_INT64 */ +#endif /* EGL_KHR_stream_attrib */ + +#ifndef EGL_KHR_stream_consumer_gltexture +#define EGL_KHR_stream_consumer_gltexture 1 +#ifdef EGL_KHR_stream +#define EGL_CONSUMER_ACQUIRE_TIMEOUT_USEC_KHR 0x321E +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSTREAMCONSUMERGLTEXTUREEXTERNALKHRPROC) (EGLDisplay dpy, EGLStreamKHR stream); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSTREAMCONSUMERACQUIREKHRPROC) (EGLDisplay dpy, EGLStreamKHR stream); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSTREAMCONSUMERRELEASEKHRPROC) (EGLDisplay dpy, EGLStreamKHR stream); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglStreamConsumerGLTextureExternalKHR (EGLDisplay dpy, EGLStreamKHR stream); +EGLAPI EGLBoolean EGLAPIENTRY eglStreamConsumerAcquireKHR (EGLDisplay dpy, EGLStreamKHR stream); +EGLAPI EGLBoolean EGLAPIENTRY eglStreamConsumerReleaseKHR (EGLDisplay dpy, EGLStreamKHR stream); +#endif +#endif /* EGL_KHR_stream */ +#endif /* EGL_KHR_stream_consumer_gltexture */ + +#ifndef EGL_KHR_stream_cross_process_fd +#define EGL_KHR_stream_cross_process_fd 1 +typedef int EGLNativeFileDescriptorKHR; +#ifdef EGL_KHR_stream +#define EGL_NO_FILE_DESCRIPTOR_KHR EGL_CAST(EGLNativeFileDescriptorKHR,-1) +typedef EGLNativeFileDescriptorKHR (EGLAPIENTRYP PFNEGLGETSTREAMFILEDESCRIPTORKHRPROC) (EGLDisplay dpy, EGLStreamKHR stream); +typedef EGLStreamKHR (EGLAPIENTRYP PFNEGLCREATESTREAMFROMFILEDESCRIPTORKHRPROC) (EGLDisplay dpy, EGLNativeFileDescriptorKHR file_descriptor); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLNativeFileDescriptorKHR EGLAPIENTRY eglGetStreamFileDescriptorKHR (EGLDisplay dpy, EGLStreamKHR stream); +EGLAPI EGLStreamKHR EGLAPIENTRY eglCreateStreamFromFileDescriptorKHR (EGLDisplay dpy, EGLNativeFileDescriptorKHR file_descriptor); +#endif +#endif /* EGL_KHR_stream */ +#endif /* EGL_KHR_stream_cross_process_fd */ + +#ifndef EGL_KHR_stream_fifo +#define EGL_KHR_stream_fifo 1 +#ifdef EGL_KHR_stream +#define EGL_STREAM_FIFO_LENGTH_KHR 0x31FC +#define EGL_STREAM_TIME_NOW_KHR 0x31FD +#define EGL_STREAM_TIME_CONSUMER_KHR 0x31FE +#define EGL_STREAM_TIME_PRODUCER_KHR 0x31FF +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYSTREAMTIMEKHRPROC) (EGLDisplay dpy, EGLStreamKHR stream, EGLenum attribute, EGLTimeKHR *value); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglQueryStreamTimeKHR (EGLDisplay dpy, EGLStreamKHR stream, EGLenum attribute, EGLTimeKHR *value); +#endif +#endif /* EGL_KHR_stream */ +#endif /* EGL_KHR_stream_fifo */ + +#ifndef EGL_KHR_stream_producer_aldatalocator +#define EGL_KHR_stream_producer_aldatalocator 1 +#ifdef EGL_KHR_stream +#endif /* EGL_KHR_stream */ +#endif /* EGL_KHR_stream_producer_aldatalocator */ + +#ifndef EGL_KHR_stream_producer_eglsurface +#define EGL_KHR_stream_producer_eglsurface 1 +#ifdef EGL_KHR_stream +#define EGL_STREAM_BIT_KHR 0x0800 +typedef EGLSurface (EGLAPIENTRYP PFNEGLCREATESTREAMPRODUCERSURFACEKHRPROC) (EGLDisplay dpy, EGLConfig config, EGLStreamKHR stream, const EGLint *attrib_list); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLSurface EGLAPIENTRY eglCreateStreamProducerSurfaceKHR (EGLDisplay dpy, EGLConfig config, EGLStreamKHR stream, const EGLint *attrib_list); +#endif +#endif /* EGL_KHR_stream */ +#endif /* EGL_KHR_stream_producer_eglsurface */ + +#ifndef EGL_KHR_surfaceless_context +#define EGL_KHR_surfaceless_context 1 +#endif /* EGL_KHR_surfaceless_context */ + +#ifndef EGL_KHR_swap_buffers_with_damage +#define EGL_KHR_swap_buffers_with_damage 1 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSWAPBUFFERSWITHDAMAGEKHRPROC) (EGLDisplay dpy, EGLSurface surface, const EGLint *rects, EGLint n_rects); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglSwapBuffersWithDamageKHR (EGLDisplay dpy, EGLSurface surface, const EGLint *rects, EGLint n_rects); +#endif +#endif /* EGL_KHR_swap_buffers_with_damage */ + +#ifndef EGL_KHR_vg_parent_image +#define EGL_KHR_vg_parent_image 1 +#define EGL_VG_PARENT_IMAGE_KHR 0x30BA +#endif /* EGL_KHR_vg_parent_image */ + +#ifndef EGL_KHR_wait_sync +#define EGL_KHR_wait_sync 1 +typedef EGLint (EGLAPIENTRYP PFNEGLWAITSYNCKHRPROC) (EGLDisplay dpy, EGLSyncKHR sync, EGLint flags); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLint EGLAPIENTRY eglWaitSyncKHR (EGLDisplay dpy, EGLSyncKHR sync, EGLint flags); +#endif +#endif /* EGL_KHR_wait_sync */ + +#ifndef EGL_ANDROID_GLES_layers +#define EGL_ANDROID_GLES_layers 1 +#endif /* EGL_ANDROID_GLES_layers */ + +#ifndef EGL_ANDROID_blob_cache +#define EGL_ANDROID_blob_cache 1 +typedef khronos_ssize_t EGLsizeiANDROID; +typedef void (*EGLSetBlobFuncANDROID) (const void *key, EGLsizeiANDROID keySize, const void *value, EGLsizeiANDROID valueSize); +typedef EGLsizeiANDROID (*EGLGetBlobFuncANDROID) (const void *key, EGLsizeiANDROID keySize, void *value, EGLsizeiANDROID valueSize); +typedef void (EGLAPIENTRYP PFNEGLSETBLOBCACHEFUNCSANDROIDPROC) (EGLDisplay dpy, EGLSetBlobFuncANDROID set, EGLGetBlobFuncANDROID get); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI void EGLAPIENTRY eglSetBlobCacheFuncsANDROID (EGLDisplay dpy, EGLSetBlobFuncANDROID set, EGLGetBlobFuncANDROID get); +#endif +#endif /* EGL_ANDROID_blob_cache */ + +#ifndef EGL_ANDROID_create_native_client_buffer +#define EGL_ANDROID_create_native_client_buffer 1 +#define EGL_NATIVE_BUFFER_USAGE_ANDROID 0x3143 +#define EGL_NATIVE_BUFFER_USAGE_PROTECTED_BIT_ANDROID 0x00000001 +#define EGL_NATIVE_BUFFER_USAGE_RENDERBUFFER_BIT_ANDROID 0x00000002 +#define EGL_NATIVE_BUFFER_USAGE_TEXTURE_BIT_ANDROID 0x00000004 +typedef EGLClientBuffer (EGLAPIENTRYP PFNEGLCREATENATIVECLIENTBUFFERANDROIDPROC) (const EGLint *attrib_list); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLClientBuffer EGLAPIENTRY eglCreateNativeClientBufferANDROID (const EGLint *attrib_list); +#endif +#endif /* EGL_ANDROID_create_native_client_buffer */ + +#ifndef EGL_ANDROID_framebuffer_target +#define EGL_ANDROID_framebuffer_target 1 +#define EGL_FRAMEBUFFER_TARGET_ANDROID 0x3147 +#endif /* EGL_ANDROID_framebuffer_target */ + +#ifndef EGL_ANDROID_front_buffer_auto_refresh +#define EGL_ANDROID_front_buffer_auto_refresh 1 +#define EGL_FRONT_BUFFER_AUTO_REFRESH_ANDROID 0x314C +#endif /* EGL_ANDROID_front_buffer_auto_refresh */ + +#ifndef EGL_ANDROID_get_frame_timestamps +#define EGL_ANDROID_get_frame_timestamps 1 +typedef khronos_stime_nanoseconds_t EGLnsecsANDROID; +#define EGL_TIMESTAMP_PENDING_ANDROID EGL_CAST(EGLnsecsANDROID,-2) +#define EGL_TIMESTAMP_INVALID_ANDROID EGL_CAST(EGLnsecsANDROID,-1) +#define EGL_TIMESTAMPS_ANDROID 0x3430 +#define EGL_COMPOSITE_DEADLINE_ANDROID 0x3431 +#define EGL_COMPOSITE_INTERVAL_ANDROID 0x3432 +#define EGL_COMPOSITE_TO_PRESENT_LATENCY_ANDROID 0x3433 +#define EGL_REQUESTED_PRESENT_TIME_ANDROID 0x3434 +#define EGL_RENDERING_COMPLETE_TIME_ANDROID 0x3435 +#define EGL_COMPOSITION_LATCH_TIME_ANDROID 0x3436 +#define EGL_FIRST_COMPOSITION_START_TIME_ANDROID 0x3437 +#define EGL_LAST_COMPOSITION_START_TIME_ANDROID 0x3438 +#define EGL_FIRST_COMPOSITION_GPU_FINISHED_TIME_ANDROID 0x3439 +#define EGL_DISPLAY_PRESENT_TIME_ANDROID 0x343A +#define EGL_DEQUEUE_READY_TIME_ANDROID 0x343B +#define EGL_READS_DONE_TIME_ANDROID 0x343C +typedef EGLBoolean (EGLAPIENTRYP PFNEGLGETCOMPOSITORTIMINGSUPPORTEDANDROIDPROC) (EGLDisplay dpy, EGLSurface surface, EGLint name); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLGETCOMPOSITORTIMINGANDROIDPROC) (EGLDisplay dpy, EGLSurface surface, EGLint numTimestamps, const EGLint *names, EGLnsecsANDROID *values); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLGETNEXTFRAMEIDANDROIDPROC) (EGLDisplay dpy, EGLSurface surface, EGLuint64KHR *frameId); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLGETFRAMETIMESTAMPSUPPORTEDANDROIDPROC) (EGLDisplay dpy, EGLSurface surface, EGLint timestamp); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLGETFRAMETIMESTAMPSANDROIDPROC) (EGLDisplay dpy, EGLSurface surface, EGLuint64KHR frameId, EGLint numTimestamps, const EGLint *timestamps, EGLnsecsANDROID *values); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglGetCompositorTimingSupportedANDROID (EGLDisplay dpy, EGLSurface surface, EGLint name); +EGLAPI EGLBoolean EGLAPIENTRY eglGetCompositorTimingANDROID (EGLDisplay dpy, EGLSurface surface, EGLint numTimestamps, const EGLint *names, EGLnsecsANDROID *values); +EGLAPI EGLBoolean EGLAPIENTRY eglGetNextFrameIdANDROID (EGLDisplay dpy, EGLSurface surface, EGLuint64KHR *frameId); +EGLAPI EGLBoolean EGLAPIENTRY eglGetFrameTimestampSupportedANDROID (EGLDisplay dpy, EGLSurface surface, EGLint timestamp); +EGLAPI EGLBoolean EGLAPIENTRY eglGetFrameTimestampsANDROID (EGLDisplay dpy, EGLSurface surface, EGLuint64KHR frameId, EGLint numTimestamps, const EGLint *timestamps, EGLnsecsANDROID *values); +#endif +#endif /* EGL_ANDROID_get_frame_timestamps */ + +#ifndef EGL_ANDROID_get_native_client_buffer +#define EGL_ANDROID_get_native_client_buffer 1 +struct AHardwareBuffer; +typedef EGLClientBuffer (EGLAPIENTRYP PFNEGLGETNATIVECLIENTBUFFERANDROIDPROC) (const struct AHardwareBuffer *buffer); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLClientBuffer EGLAPIENTRY eglGetNativeClientBufferANDROID (const struct AHardwareBuffer *buffer); +#endif +#endif /* EGL_ANDROID_get_native_client_buffer */ + +#ifndef EGL_ANDROID_image_native_buffer +#define EGL_ANDROID_image_native_buffer 1 +#define EGL_NATIVE_BUFFER_ANDROID 0x3140 +#endif /* EGL_ANDROID_image_native_buffer */ + +#ifndef EGL_ANDROID_native_fence_sync +#define EGL_ANDROID_native_fence_sync 1 +#define EGL_SYNC_NATIVE_FENCE_ANDROID 0x3144 +#define EGL_SYNC_NATIVE_FENCE_FD_ANDROID 0x3145 +#define EGL_SYNC_NATIVE_FENCE_SIGNALED_ANDROID 0x3146 +#define EGL_NO_NATIVE_FENCE_FD_ANDROID -1 +typedef EGLint (EGLAPIENTRYP PFNEGLDUPNATIVEFENCEFDANDROIDPROC) (EGLDisplay dpy, EGLSyncKHR sync); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLint EGLAPIENTRY eglDupNativeFenceFDANDROID (EGLDisplay dpy, EGLSyncKHR sync); +#endif +#endif /* EGL_ANDROID_native_fence_sync */ + +#ifndef EGL_ANDROID_presentation_time +#define EGL_ANDROID_presentation_time 1 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLPRESENTATIONTIMEANDROIDPROC) (EGLDisplay dpy, EGLSurface surface, EGLnsecsANDROID time); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglPresentationTimeANDROID (EGLDisplay dpy, EGLSurface surface, EGLnsecsANDROID time); +#endif +#endif /* EGL_ANDROID_presentation_time */ + +#ifndef EGL_ANDROID_recordable +#define EGL_ANDROID_recordable 1 +#define EGL_RECORDABLE_ANDROID 0x3142 +#endif /* EGL_ANDROID_recordable */ + +#ifndef EGL_ANGLE_d3d_share_handle_client_buffer +#define EGL_ANGLE_d3d_share_handle_client_buffer 1 +#define EGL_D3D_TEXTURE_2D_SHARE_HANDLE_ANGLE 0x3200 +#endif /* EGL_ANGLE_d3d_share_handle_client_buffer */ + +#ifndef EGL_ANGLE_device_d3d +#define EGL_ANGLE_device_d3d 1 +#define EGL_D3D9_DEVICE_ANGLE 0x33A0 +#define EGL_D3D11_DEVICE_ANGLE 0x33A1 +#endif /* EGL_ANGLE_device_d3d */ + +#ifndef EGL_ANGLE_query_surface_pointer +#define EGL_ANGLE_query_surface_pointer 1 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYSURFACEPOINTERANGLEPROC) (EGLDisplay dpy, EGLSurface surface, EGLint attribute, void **value); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglQuerySurfacePointerANGLE (EGLDisplay dpy, EGLSurface surface, EGLint attribute, void **value); +#endif +#endif /* EGL_ANGLE_query_surface_pointer */ + +#ifndef EGL_ANGLE_surface_d3d_texture_2d_share_handle +#define EGL_ANGLE_surface_d3d_texture_2d_share_handle 1 +#endif /* EGL_ANGLE_surface_d3d_texture_2d_share_handle */ + +#ifndef EGL_ANGLE_window_fixed_size +#define EGL_ANGLE_window_fixed_size 1 +#define EGL_FIXED_SIZE_ANGLE 0x3201 +#endif /* EGL_ANGLE_window_fixed_size */ + +#ifndef EGL_ARM_image_format +#define EGL_ARM_image_format 1 +#define EGL_COLOR_COMPONENT_TYPE_UNSIGNED_INTEGER_ARM 0x3287 +#define EGL_COLOR_COMPONENT_TYPE_INTEGER_ARM 0x3288 +#endif /* EGL_ARM_image_format */ + +#ifndef EGL_ARM_implicit_external_sync +#define EGL_ARM_implicit_external_sync 1 +#define EGL_SYNC_PRIOR_COMMANDS_IMPLICIT_EXTERNAL_ARM 0x328A +#endif /* EGL_ARM_implicit_external_sync */ + +#ifndef EGL_ARM_pixmap_multisample_discard +#define EGL_ARM_pixmap_multisample_discard 1 +#define EGL_DISCARD_SAMPLES_ARM 0x3286 +#endif /* EGL_ARM_pixmap_multisample_discard */ + +#ifndef EGL_EXT_bind_to_front +#define EGL_EXT_bind_to_front 1 +#define EGL_FRONT_BUFFER_EXT 0x3464 +#endif /* EGL_EXT_bind_to_front */ + +#ifndef EGL_EXT_buffer_age +#define EGL_EXT_buffer_age 1 +#define EGL_BUFFER_AGE_EXT 0x313D +#endif /* EGL_EXT_buffer_age */ + +#ifndef EGL_EXT_client_extensions +#define EGL_EXT_client_extensions 1 +#endif /* EGL_EXT_client_extensions */ + +#ifndef EGL_EXT_client_sync +#define EGL_EXT_client_sync 1 +#define EGL_SYNC_CLIENT_EXT 0x3364 +#define EGL_SYNC_CLIENT_SIGNAL_EXT 0x3365 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLCLIENTSIGNALSYNCEXTPROC) (EGLDisplay dpy, EGLSync sync, const EGLAttrib *attrib_list); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglClientSignalSyncEXT (EGLDisplay dpy, EGLSync sync, const EGLAttrib *attrib_list); +#endif +#endif /* EGL_EXT_client_sync */ + +#ifndef EGL_EXT_compositor +#define EGL_EXT_compositor 1 +#define EGL_PRIMARY_COMPOSITOR_CONTEXT_EXT 0x3460 +#define EGL_EXTERNAL_REF_ID_EXT 0x3461 +#define EGL_COMPOSITOR_DROP_NEWEST_FRAME_EXT 0x3462 +#define EGL_COMPOSITOR_KEEP_NEWEST_FRAME_EXT 0x3463 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLCOMPOSITORSETCONTEXTLISTEXTPROC) (const EGLint *external_ref_ids, EGLint num_entries); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLCOMPOSITORSETCONTEXTATTRIBUTESEXTPROC) (EGLint external_ref_id, const EGLint *context_attributes, EGLint num_entries); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLCOMPOSITORSETWINDOWLISTEXTPROC) (EGLint external_ref_id, const EGLint *external_win_ids, EGLint num_entries); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLCOMPOSITORSETWINDOWATTRIBUTESEXTPROC) (EGLint external_win_id, const EGLint *window_attributes, EGLint num_entries); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLCOMPOSITORBINDTEXWINDOWEXTPROC) (EGLint external_win_id); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLCOMPOSITORSETSIZEEXTPROC) (EGLint external_win_id, EGLint width, EGLint height); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLCOMPOSITORSWAPPOLICYEXTPROC) (EGLint external_win_id, EGLint policy); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglCompositorSetContextListEXT (const EGLint *external_ref_ids, EGLint num_entries); +EGLAPI EGLBoolean EGLAPIENTRY eglCompositorSetContextAttributesEXT (EGLint external_ref_id, const EGLint *context_attributes, EGLint num_entries); +EGLAPI EGLBoolean EGLAPIENTRY eglCompositorSetWindowListEXT (EGLint external_ref_id, const EGLint *external_win_ids, EGLint num_entries); +EGLAPI EGLBoolean EGLAPIENTRY eglCompositorSetWindowAttributesEXT (EGLint external_win_id, const EGLint *window_attributes, EGLint num_entries); +EGLAPI EGLBoolean EGLAPIENTRY eglCompositorBindTexWindowEXT (EGLint external_win_id); +EGLAPI EGLBoolean EGLAPIENTRY eglCompositorSetSizeEXT (EGLint external_win_id, EGLint width, EGLint height); +EGLAPI EGLBoolean EGLAPIENTRY eglCompositorSwapPolicyEXT (EGLint external_win_id, EGLint policy); +#endif +#endif /* EGL_EXT_compositor */ + +#ifndef EGL_EXT_create_context_robustness +#define EGL_EXT_create_context_robustness 1 +#define EGL_CONTEXT_OPENGL_ROBUST_ACCESS_EXT 0x30BF +#define EGL_CONTEXT_OPENGL_RESET_NOTIFICATION_STRATEGY_EXT 0x3138 +#define EGL_NO_RESET_NOTIFICATION_EXT 0x31BE +#define EGL_LOSE_CONTEXT_ON_RESET_EXT 0x31BF +#endif /* EGL_EXT_create_context_robustness */ + +#ifndef EGL_EXT_device_base +#define EGL_EXT_device_base 1 +typedef void *EGLDeviceEXT; +#define EGL_NO_DEVICE_EXT EGL_CAST(EGLDeviceEXT,0) +#define EGL_BAD_DEVICE_EXT 0x322B +#define EGL_DEVICE_EXT 0x322C +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYDEVICEATTRIBEXTPROC) (EGLDeviceEXT device, EGLint attribute, EGLAttrib *value); +typedef const char *(EGLAPIENTRYP PFNEGLQUERYDEVICESTRINGEXTPROC) (EGLDeviceEXT device, EGLint name); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYDEVICESEXTPROC) (EGLint max_devices, EGLDeviceEXT *devices, EGLint *num_devices); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYDISPLAYATTRIBEXTPROC) (EGLDisplay dpy, EGLint attribute, EGLAttrib *value); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglQueryDeviceAttribEXT (EGLDeviceEXT device, EGLint attribute, EGLAttrib *value); +EGLAPI const char *EGLAPIENTRY eglQueryDeviceStringEXT (EGLDeviceEXT device, EGLint name); +EGLAPI EGLBoolean EGLAPIENTRY eglQueryDevicesEXT (EGLint max_devices, EGLDeviceEXT *devices, EGLint *num_devices); +EGLAPI EGLBoolean EGLAPIENTRY eglQueryDisplayAttribEXT (EGLDisplay dpy, EGLint attribute, EGLAttrib *value); +#endif +#endif /* EGL_EXT_device_base */ + +#ifndef EGL_EXT_device_drm +#define EGL_EXT_device_drm 1 +#define EGL_DRM_DEVICE_FILE_EXT 0x3233 +#define EGL_DRM_MASTER_FD_EXT 0x333C +#endif /* EGL_EXT_device_drm */ + +#ifndef EGL_EXT_device_enumeration +#define EGL_EXT_device_enumeration 1 +#endif /* EGL_EXT_device_enumeration */ + +#ifndef EGL_EXT_device_openwf +#define EGL_EXT_device_openwf 1 +#define EGL_OPENWF_DEVICE_ID_EXT 0x3237 +#endif /* EGL_EXT_device_openwf */ + +#ifndef EGL_EXT_device_query +#define EGL_EXT_device_query 1 +#endif /* EGL_EXT_device_query */ + +#ifndef EGL_EXT_device_query_name +#define EGL_EXT_device_query_name 1 +#define EGL_RENDERER_EXT 0x335F +#endif /* EGL_EXT_device_query_name */ + +#ifndef EGL_EXT_gl_colorspace_bt2020_linear +#define EGL_EXT_gl_colorspace_bt2020_linear 1 +#define EGL_GL_COLORSPACE_BT2020_LINEAR_EXT 0x333F +#endif /* EGL_EXT_gl_colorspace_bt2020_linear */ + +#ifndef EGL_EXT_gl_colorspace_bt2020_pq +#define EGL_EXT_gl_colorspace_bt2020_pq 1 +#define EGL_GL_COLORSPACE_BT2020_PQ_EXT 0x3340 +#endif /* EGL_EXT_gl_colorspace_bt2020_pq */ + +#ifndef EGL_EXT_gl_colorspace_display_p3 +#define EGL_EXT_gl_colorspace_display_p3 1 +#define EGL_GL_COLORSPACE_DISPLAY_P3_EXT 0x3363 +#endif /* EGL_EXT_gl_colorspace_display_p3 */ + +#ifndef EGL_EXT_gl_colorspace_display_p3_linear +#define EGL_EXT_gl_colorspace_display_p3_linear 1 +#define EGL_GL_COLORSPACE_DISPLAY_P3_LINEAR_EXT 0x3362 +#endif /* EGL_EXT_gl_colorspace_display_p3_linear */ + +#ifndef EGL_EXT_gl_colorspace_display_p3_passthrough +#define EGL_EXT_gl_colorspace_display_p3_passthrough 1 +#define EGL_GL_COLORSPACE_DISPLAY_P3_PASSTHROUGH_EXT 0x3490 +#endif /* EGL_EXT_gl_colorspace_display_p3_passthrough */ + +#ifndef EGL_EXT_gl_colorspace_scrgb +#define EGL_EXT_gl_colorspace_scrgb 1 +#define EGL_GL_COLORSPACE_SCRGB_EXT 0x3351 +#endif /* EGL_EXT_gl_colorspace_scrgb */ + +#ifndef EGL_EXT_gl_colorspace_scrgb_linear +#define EGL_EXT_gl_colorspace_scrgb_linear 1 +#define EGL_GL_COLORSPACE_SCRGB_LINEAR_EXT 0x3350 +#endif /* EGL_EXT_gl_colorspace_scrgb_linear */ + +#ifndef EGL_EXT_image_dma_buf_import +#define EGL_EXT_image_dma_buf_import 1 +#define EGL_LINUX_DMA_BUF_EXT 0x3270 +#define EGL_LINUX_DRM_FOURCC_EXT 0x3271 +#define EGL_DMA_BUF_PLANE0_FD_EXT 0x3272 +#define EGL_DMA_BUF_PLANE0_OFFSET_EXT 0x3273 +#define EGL_DMA_BUF_PLANE0_PITCH_EXT 0x3274 +#define EGL_DMA_BUF_PLANE1_FD_EXT 0x3275 +#define EGL_DMA_BUF_PLANE1_OFFSET_EXT 0x3276 +#define EGL_DMA_BUF_PLANE1_PITCH_EXT 0x3277 +#define EGL_DMA_BUF_PLANE2_FD_EXT 0x3278 +#define EGL_DMA_BUF_PLANE2_OFFSET_EXT 0x3279 +#define EGL_DMA_BUF_PLANE2_PITCH_EXT 0x327A +#define EGL_YUV_COLOR_SPACE_HINT_EXT 0x327B +#define EGL_SAMPLE_RANGE_HINT_EXT 0x327C +#define EGL_YUV_CHROMA_HORIZONTAL_SITING_HINT_EXT 0x327D +#define EGL_YUV_CHROMA_VERTICAL_SITING_HINT_EXT 0x327E +#define EGL_ITU_REC601_EXT 0x327F +#define EGL_ITU_REC709_EXT 0x3280 +#define EGL_ITU_REC2020_EXT 0x3281 +#define EGL_YUV_FULL_RANGE_EXT 0x3282 +#define EGL_YUV_NARROW_RANGE_EXT 0x3283 +#define EGL_YUV_CHROMA_SITING_0_EXT 0x3284 +#define EGL_YUV_CHROMA_SITING_0_5_EXT 0x3285 +#endif /* EGL_EXT_image_dma_buf_import */ + +#ifndef EGL_EXT_image_dma_buf_import_modifiers +#define EGL_EXT_image_dma_buf_import_modifiers 1 +#define EGL_DMA_BUF_PLANE3_FD_EXT 0x3440 +#define EGL_DMA_BUF_PLANE3_OFFSET_EXT 0x3441 +#define EGL_DMA_BUF_PLANE3_PITCH_EXT 0x3442 +#define EGL_DMA_BUF_PLANE0_MODIFIER_LO_EXT 0x3443 +#define EGL_DMA_BUF_PLANE0_MODIFIER_HI_EXT 0x3444 +#define EGL_DMA_BUF_PLANE1_MODIFIER_LO_EXT 0x3445 +#define EGL_DMA_BUF_PLANE1_MODIFIER_HI_EXT 0x3446 +#define EGL_DMA_BUF_PLANE2_MODIFIER_LO_EXT 0x3447 +#define EGL_DMA_BUF_PLANE2_MODIFIER_HI_EXT 0x3448 +#define EGL_DMA_BUF_PLANE3_MODIFIER_LO_EXT 0x3449 +#define EGL_DMA_BUF_PLANE3_MODIFIER_HI_EXT 0x344A +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYDMABUFFORMATSEXTPROC) (EGLDisplay dpy, EGLint max_formats, EGLint *formats, EGLint *num_formats); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYDMABUFMODIFIERSEXTPROC) (EGLDisplay dpy, EGLint format, EGLint max_modifiers, EGLuint64KHR *modifiers, EGLBoolean *external_only, EGLint *num_modifiers); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglQueryDmaBufFormatsEXT (EGLDisplay dpy, EGLint max_formats, EGLint *formats, EGLint *num_formats); +EGLAPI EGLBoolean EGLAPIENTRY eglQueryDmaBufModifiersEXT (EGLDisplay dpy, EGLint format, EGLint max_modifiers, EGLuint64KHR *modifiers, EGLBoolean *external_only, EGLint *num_modifiers); +#endif +#endif /* EGL_EXT_image_dma_buf_import_modifiers */ + +#ifndef EGL_EXT_image_gl_colorspace +#define EGL_EXT_image_gl_colorspace 1 +#define EGL_GL_COLORSPACE_DEFAULT_EXT 0x314D +#endif /* EGL_EXT_image_gl_colorspace */ + +#ifndef EGL_EXT_image_implicit_sync_control +#define EGL_EXT_image_implicit_sync_control 1 +#define EGL_IMPORT_SYNC_TYPE_EXT 0x3470 +#define EGL_IMPORT_IMPLICIT_SYNC_EXT 0x3471 +#define EGL_IMPORT_EXPLICIT_SYNC_EXT 0x3472 +#endif /* EGL_EXT_image_implicit_sync_control */ + +#ifndef EGL_EXT_multiview_window +#define EGL_EXT_multiview_window 1 +#define EGL_MULTIVIEW_VIEW_COUNT_EXT 0x3134 +#endif /* EGL_EXT_multiview_window */ + +#ifndef EGL_EXT_output_base +#define EGL_EXT_output_base 1 +typedef void *EGLOutputLayerEXT; +typedef void *EGLOutputPortEXT; +#define EGL_NO_OUTPUT_LAYER_EXT EGL_CAST(EGLOutputLayerEXT,0) +#define EGL_NO_OUTPUT_PORT_EXT EGL_CAST(EGLOutputPortEXT,0) +#define EGL_BAD_OUTPUT_LAYER_EXT 0x322D +#define EGL_BAD_OUTPUT_PORT_EXT 0x322E +#define EGL_SWAP_INTERVAL_EXT 0x322F +typedef EGLBoolean (EGLAPIENTRYP PFNEGLGETOUTPUTLAYERSEXTPROC) (EGLDisplay dpy, const EGLAttrib *attrib_list, EGLOutputLayerEXT *layers, EGLint max_layers, EGLint *num_layers); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLGETOUTPUTPORTSEXTPROC) (EGLDisplay dpy, const EGLAttrib *attrib_list, EGLOutputPortEXT *ports, EGLint max_ports, EGLint *num_ports); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLOUTPUTLAYERATTRIBEXTPROC) (EGLDisplay dpy, EGLOutputLayerEXT layer, EGLint attribute, EGLAttrib value); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYOUTPUTLAYERATTRIBEXTPROC) (EGLDisplay dpy, EGLOutputLayerEXT layer, EGLint attribute, EGLAttrib *value); +typedef const char *(EGLAPIENTRYP PFNEGLQUERYOUTPUTLAYERSTRINGEXTPROC) (EGLDisplay dpy, EGLOutputLayerEXT layer, EGLint name); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLOUTPUTPORTATTRIBEXTPROC) (EGLDisplay dpy, EGLOutputPortEXT port, EGLint attribute, EGLAttrib value); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYOUTPUTPORTATTRIBEXTPROC) (EGLDisplay dpy, EGLOutputPortEXT port, EGLint attribute, EGLAttrib *value); +typedef const char *(EGLAPIENTRYP PFNEGLQUERYOUTPUTPORTSTRINGEXTPROC) (EGLDisplay dpy, EGLOutputPortEXT port, EGLint name); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglGetOutputLayersEXT (EGLDisplay dpy, const EGLAttrib *attrib_list, EGLOutputLayerEXT *layers, EGLint max_layers, EGLint *num_layers); +EGLAPI EGLBoolean EGLAPIENTRY eglGetOutputPortsEXT (EGLDisplay dpy, const EGLAttrib *attrib_list, EGLOutputPortEXT *ports, EGLint max_ports, EGLint *num_ports); +EGLAPI EGLBoolean EGLAPIENTRY eglOutputLayerAttribEXT (EGLDisplay dpy, EGLOutputLayerEXT layer, EGLint attribute, EGLAttrib value); +EGLAPI EGLBoolean EGLAPIENTRY eglQueryOutputLayerAttribEXT (EGLDisplay dpy, EGLOutputLayerEXT layer, EGLint attribute, EGLAttrib *value); +EGLAPI const char *EGLAPIENTRY eglQueryOutputLayerStringEXT (EGLDisplay dpy, EGLOutputLayerEXT layer, EGLint name); +EGLAPI EGLBoolean EGLAPIENTRY eglOutputPortAttribEXT (EGLDisplay dpy, EGLOutputPortEXT port, EGLint attribute, EGLAttrib value); +EGLAPI EGLBoolean EGLAPIENTRY eglQueryOutputPortAttribEXT (EGLDisplay dpy, EGLOutputPortEXT port, EGLint attribute, EGLAttrib *value); +EGLAPI const char *EGLAPIENTRY eglQueryOutputPortStringEXT (EGLDisplay dpy, EGLOutputPortEXT port, EGLint name); +#endif +#endif /* EGL_EXT_output_base */ + +#ifndef EGL_EXT_output_drm +#define EGL_EXT_output_drm 1 +#define EGL_DRM_CRTC_EXT 0x3234 +#define EGL_DRM_PLANE_EXT 0x3235 +#define EGL_DRM_CONNECTOR_EXT 0x3236 +#endif /* EGL_EXT_output_drm */ + +#ifndef EGL_EXT_output_openwf +#define EGL_EXT_output_openwf 1 +#define EGL_OPENWF_PIPELINE_ID_EXT 0x3238 +#define EGL_OPENWF_PORT_ID_EXT 0x3239 +#endif /* EGL_EXT_output_openwf */ + +#ifndef EGL_EXT_pixel_format_float +#define EGL_EXT_pixel_format_float 1 +#define EGL_COLOR_COMPONENT_TYPE_EXT 0x3339 +#define EGL_COLOR_COMPONENT_TYPE_FIXED_EXT 0x333A +#define EGL_COLOR_COMPONENT_TYPE_FLOAT_EXT 0x333B +#endif /* EGL_EXT_pixel_format_float */ + +#ifndef EGL_EXT_platform_base +#define EGL_EXT_platform_base 1 +typedef EGLDisplay (EGLAPIENTRYP PFNEGLGETPLATFORMDISPLAYEXTPROC) (EGLenum platform, void *native_display, const EGLint *attrib_list); +typedef EGLSurface (EGLAPIENTRYP PFNEGLCREATEPLATFORMWINDOWSURFACEEXTPROC) (EGLDisplay dpy, EGLConfig config, void *native_window, const EGLint *attrib_list); +typedef EGLSurface (EGLAPIENTRYP PFNEGLCREATEPLATFORMPIXMAPSURFACEEXTPROC) (EGLDisplay dpy, EGLConfig config, void *native_pixmap, const EGLint *attrib_list); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLDisplay EGLAPIENTRY eglGetPlatformDisplayEXT (EGLenum platform, void *native_display, const EGLint *attrib_list); +EGLAPI EGLSurface EGLAPIENTRY eglCreatePlatformWindowSurfaceEXT (EGLDisplay dpy, EGLConfig config, void *native_window, const EGLint *attrib_list); +EGLAPI EGLSurface EGLAPIENTRY eglCreatePlatformPixmapSurfaceEXT (EGLDisplay dpy, EGLConfig config, void *native_pixmap, const EGLint *attrib_list); +#endif +#endif /* EGL_EXT_platform_base */ + +#ifndef EGL_EXT_platform_device +#define EGL_EXT_platform_device 1 +#define EGL_PLATFORM_DEVICE_EXT 0x313F +#endif /* EGL_EXT_platform_device */ + +#ifndef EGL_EXT_platform_wayland +#define EGL_EXT_platform_wayland 1 +#define EGL_PLATFORM_WAYLAND_EXT 0x31D8 +#endif /* EGL_EXT_platform_wayland */ + +#ifndef EGL_EXT_platform_x11 +#define EGL_EXT_platform_x11 1 +#define EGL_PLATFORM_X11_EXT 0x31D5 +#define EGL_PLATFORM_X11_SCREEN_EXT 0x31D6 +#endif /* EGL_EXT_platform_x11 */ + +#ifndef EGL_EXT_platform_xcb +#define EGL_EXT_platform_xcb 1 +#define EGL_PLATFORM_XCB_EXT 0x31DC +#define EGL_PLATFORM_XCB_SCREEN_EXT 0x31DE +#endif /* EGL_EXT_platform_xcb */ + +#ifndef EGL_EXT_protected_content +#define EGL_EXT_protected_content 1 +#define EGL_PROTECTED_CONTENT_EXT 0x32C0 +#endif /* EGL_EXT_protected_content */ + +#ifndef EGL_EXT_protected_surface +#define EGL_EXT_protected_surface 1 +#endif /* EGL_EXT_protected_surface */ + +#ifndef EGL_EXT_stream_consumer_egloutput +#define EGL_EXT_stream_consumer_egloutput 1 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSTREAMCONSUMEROUTPUTEXTPROC) (EGLDisplay dpy, EGLStreamKHR stream, EGLOutputLayerEXT layer); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglStreamConsumerOutputEXT (EGLDisplay dpy, EGLStreamKHR stream, EGLOutputLayerEXT layer); +#endif +#endif /* EGL_EXT_stream_consumer_egloutput */ + +#ifndef EGL_EXT_surface_CTA861_3_metadata +#define EGL_EXT_surface_CTA861_3_metadata 1 +#define EGL_CTA861_3_MAX_CONTENT_LIGHT_LEVEL_EXT 0x3360 +#define EGL_CTA861_3_MAX_FRAME_AVERAGE_LEVEL_EXT 0x3361 +#endif /* EGL_EXT_surface_CTA861_3_metadata */ + +#ifndef EGL_EXT_surface_SMPTE2086_metadata +#define EGL_EXT_surface_SMPTE2086_metadata 1 +#define EGL_SMPTE2086_DISPLAY_PRIMARY_RX_EXT 0x3341 +#define EGL_SMPTE2086_DISPLAY_PRIMARY_RY_EXT 0x3342 +#define EGL_SMPTE2086_DISPLAY_PRIMARY_GX_EXT 0x3343 +#define EGL_SMPTE2086_DISPLAY_PRIMARY_GY_EXT 0x3344 +#define EGL_SMPTE2086_DISPLAY_PRIMARY_BX_EXT 0x3345 +#define EGL_SMPTE2086_DISPLAY_PRIMARY_BY_EXT 0x3346 +#define EGL_SMPTE2086_WHITE_POINT_X_EXT 0x3347 +#define EGL_SMPTE2086_WHITE_POINT_Y_EXT 0x3348 +#define EGL_SMPTE2086_MAX_LUMINANCE_EXT 0x3349 +#define EGL_SMPTE2086_MIN_LUMINANCE_EXT 0x334A +#define EGL_METADATA_SCALING_EXT 50000 +#endif /* EGL_EXT_surface_SMPTE2086_metadata */ + +#ifndef EGL_EXT_swap_buffers_with_damage +#define EGL_EXT_swap_buffers_with_damage 1 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSWAPBUFFERSWITHDAMAGEEXTPROC) (EGLDisplay dpy, EGLSurface surface, const EGLint *rects, EGLint n_rects); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglSwapBuffersWithDamageEXT (EGLDisplay dpy, EGLSurface surface, const EGLint *rects, EGLint n_rects); +#endif +#endif /* EGL_EXT_swap_buffers_with_damage */ + +#ifndef EGL_EXT_sync_reuse +#define EGL_EXT_sync_reuse 1 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLUNSIGNALSYNCEXTPROC) (EGLDisplay dpy, EGLSync sync, const EGLAttrib *attrib_list); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglUnsignalSyncEXT (EGLDisplay dpy, EGLSync sync, const EGLAttrib *attrib_list); +#endif +#endif /* EGL_EXT_sync_reuse */ + +#ifndef EGL_EXT_yuv_surface +#define EGL_EXT_yuv_surface 1 +#define EGL_YUV_ORDER_EXT 0x3301 +#define EGL_YUV_NUMBER_OF_PLANES_EXT 0x3311 +#define EGL_YUV_SUBSAMPLE_EXT 0x3312 +#define EGL_YUV_DEPTH_RANGE_EXT 0x3317 +#define EGL_YUV_CSC_STANDARD_EXT 0x330A +#define EGL_YUV_PLANE_BPP_EXT 0x331A +#define EGL_YUV_BUFFER_EXT 0x3300 +#define EGL_YUV_ORDER_YUV_EXT 0x3302 +#define EGL_YUV_ORDER_YVU_EXT 0x3303 +#define EGL_YUV_ORDER_YUYV_EXT 0x3304 +#define EGL_YUV_ORDER_UYVY_EXT 0x3305 +#define EGL_YUV_ORDER_YVYU_EXT 0x3306 +#define EGL_YUV_ORDER_VYUY_EXT 0x3307 +#define EGL_YUV_ORDER_AYUV_EXT 0x3308 +#define EGL_YUV_SUBSAMPLE_4_2_0_EXT 0x3313 +#define EGL_YUV_SUBSAMPLE_4_2_2_EXT 0x3314 +#define EGL_YUV_SUBSAMPLE_4_4_4_EXT 0x3315 +#define EGL_YUV_DEPTH_RANGE_LIMITED_EXT 0x3318 +#define EGL_YUV_DEPTH_RANGE_FULL_EXT 0x3319 +#define EGL_YUV_CSC_STANDARD_601_EXT 0x330B +#define EGL_YUV_CSC_STANDARD_709_EXT 0x330C +#define EGL_YUV_CSC_STANDARD_2020_EXT 0x330D +#define EGL_YUV_PLANE_BPP_0_EXT 0x331B +#define EGL_YUV_PLANE_BPP_8_EXT 0x331C +#define EGL_YUV_PLANE_BPP_10_EXT 0x331D +#endif /* EGL_EXT_yuv_surface */ + +#ifndef EGL_HI_clientpixmap +#define EGL_HI_clientpixmap 1 +struct EGLClientPixmapHI { + void *pData; + EGLint iWidth; + EGLint iHeight; + EGLint iStride; +}; +#define EGL_CLIENT_PIXMAP_POINTER_HI 0x8F74 +typedef EGLSurface (EGLAPIENTRYP PFNEGLCREATEPIXMAPSURFACEHIPROC) (EGLDisplay dpy, EGLConfig config, struct EGLClientPixmapHI *pixmap); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLSurface EGLAPIENTRY eglCreatePixmapSurfaceHI (EGLDisplay dpy, EGLConfig config, struct EGLClientPixmapHI *pixmap); +#endif +#endif /* EGL_HI_clientpixmap */ + +#ifndef EGL_HI_colorformats +#define EGL_HI_colorformats 1 +#define EGL_COLOR_FORMAT_HI 0x8F70 +#define EGL_COLOR_RGB_HI 0x8F71 +#define EGL_COLOR_RGBA_HI 0x8F72 +#define EGL_COLOR_ARGB_HI 0x8F73 +#endif /* EGL_HI_colorformats */ + +#ifndef EGL_IMG_context_priority +#define EGL_IMG_context_priority 1 +#define EGL_CONTEXT_PRIORITY_LEVEL_IMG 0x3100 +#define EGL_CONTEXT_PRIORITY_HIGH_IMG 0x3101 +#define EGL_CONTEXT_PRIORITY_MEDIUM_IMG 0x3102 +#define EGL_CONTEXT_PRIORITY_LOW_IMG 0x3103 +#endif /* EGL_IMG_context_priority */ + +#ifndef EGL_IMG_image_plane_attribs +#define EGL_IMG_image_plane_attribs 1 +#define EGL_NATIVE_BUFFER_MULTIPLANE_SEPARATE_IMG 0x3105 +#define EGL_NATIVE_BUFFER_PLANE_OFFSET_IMG 0x3106 +#endif /* EGL_IMG_image_plane_attribs */ + +#ifndef EGL_MESA_drm_image +#define EGL_MESA_drm_image 1 +#define EGL_DRM_BUFFER_FORMAT_MESA 0x31D0 +#define EGL_DRM_BUFFER_USE_MESA 0x31D1 +#define EGL_DRM_BUFFER_FORMAT_ARGB32_MESA 0x31D2 +#define EGL_DRM_BUFFER_MESA 0x31D3 +#define EGL_DRM_BUFFER_STRIDE_MESA 0x31D4 +#define EGL_DRM_BUFFER_USE_SCANOUT_MESA 0x00000001 +#define EGL_DRM_BUFFER_USE_SHARE_MESA 0x00000002 +#define EGL_DRM_BUFFER_USE_CURSOR_MESA 0x00000004 +typedef EGLImageKHR (EGLAPIENTRYP PFNEGLCREATEDRMIMAGEMESAPROC) (EGLDisplay dpy, const EGLint *attrib_list); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLEXPORTDRMIMAGEMESAPROC) (EGLDisplay dpy, EGLImageKHR image, EGLint *name, EGLint *handle, EGLint *stride); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLImageKHR EGLAPIENTRY eglCreateDRMImageMESA (EGLDisplay dpy, const EGLint *attrib_list); +EGLAPI EGLBoolean EGLAPIENTRY eglExportDRMImageMESA (EGLDisplay dpy, EGLImageKHR image, EGLint *name, EGLint *handle, EGLint *stride); +#endif +#endif /* EGL_MESA_drm_image */ + +#ifndef EGL_MESA_image_dma_buf_export +#define EGL_MESA_image_dma_buf_export 1 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLEXPORTDMABUFIMAGEQUERYMESAPROC) (EGLDisplay dpy, EGLImageKHR image, int *fourcc, int *num_planes, EGLuint64KHR *modifiers); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLEXPORTDMABUFIMAGEMESAPROC) (EGLDisplay dpy, EGLImageKHR image, int *fds, EGLint *strides, EGLint *offsets); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglExportDMABUFImageQueryMESA (EGLDisplay dpy, EGLImageKHR image, int *fourcc, int *num_planes, EGLuint64KHR *modifiers); +EGLAPI EGLBoolean EGLAPIENTRY eglExportDMABUFImageMESA (EGLDisplay dpy, EGLImageKHR image, int *fds, EGLint *strides, EGLint *offsets); +#endif +#endif /* EGL_MESA_image_dma_buf_export */ + +#ifndef EGL_MESA_platform_gbm +#define EGL_MESA_platform_gbm 1 +#define EGL_PLATFORM_GBM_MESA 0x31D7 +#endif /* EGL_MESA_platform_gbm */ + +#ifndef EGL_MESA_platform_surfaceless +#define EGL_MESA_platform_surfaceless 1 +#define EGL_PLATFORM_SURFACELESS_MESA 0x31DD +#endif /* EGL_MESA_platform_surfaceless */ + +#ifndef EGL_MESA_query_driver +#define EGL_MESA_query_driver 1 +typedef char *(EGLAPIENTRYP PFNEGLGETDISPLAYDRIVERCONFIGPROC) (EGLDisplay dpy); +typedef const char *(EGLAPIENTRYP PFNEGLGETDISPLAYDRIVERNAMEPROC) (EGLDisplay dpy); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI char *EGLAPIENTRY eglGetDisplayDriverConfig (EGLDisplay dpy); +EGLAPI const char *EGLAPIENTRY eglGetDisplayDriverName (EGLDisplay dpy); +#endif +#endif /* EGL_MESA_query_driver */ + +#ifndef EGL_NOK_swap_region +#define EGL_NOK_swap_region 1 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSWAPBUFFERSREGIONNOKPROC) (EGLDisplay dpy, EGLSurface surface, EGLint numRects, const EGLint *rects); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglSwapBuffersRegionNOK (EGLDisplay dpy, EGLSurface surface, EGLint numRects, const EGLint *rects); +#endif +#endif /* EGL_NOK_swap_region */ + +#ifndef EGL_NOK_swap_region2 +#define EGL_NOK_swap_region2 1 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSWAPBUFFERSREGION2NOKPROC) (EGLDisplay dpy, EGLSurface surface, EGLint numRects, const EGLint *rects); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglSwapBuffersRegion2NOK (EGLDisplay dpy, EGLSurface surface, EGLint numRects, const EGLint *rects); +#endif +#endif /* EGL_NOK_swap_region2 */ + +#ifndef EGL_NOK_texture_from_pixmap +#define EGL_NOK_texture_from_pixmap 1 +#define EGL_Y_INVERTED_NOK 0x307F +#endif /* EGL_NOK_texture_from_pixmap */ + +#ifndef EGL_NV_3dvision_surface +#define EGL_NV_3dvision_surface 1 +#define EGL_AUTO_STEREO_NV 0x3136 +#endif /* EGL_NV_3dvision_surface */ + +#ifndef EGL_NV_context_priority_realtime +#define EGL_NV_context_priority_realtime 1 +#define EGL_CONTEXT_PRIORITY_REALTIME_NV 0x3357 +#endif /* EGL_NV_context_priority_realtime */ + +#ifndef EGL_NV_coverage_sample +#define EGL_NV_coverage_sample 1 +#define EGL_COVERAGE_BUFFERS_NV 0x30E0 +#define EGL_COVERAGE_SAMPLES_NV 0x30E1 +#endif /* EGL_NV_coverage_sample */ + +#ifndef EGL_NV_coverage_sample_resolve +#define EGL_NV_coverage_sample_resolve 1 +#define EGL_COVERAGE_SAMPLE_RESOLVE_NV 0x3131 +#define EGL_COVERAGE_SAMPLE_RESOLVE_DEFAULT_NV 0x3132 +#define EGL_COVERAGE_SAMPLE_RESOLVE_NONE_NV 0x3133 +#endif /* EGL_NV_coverage_sample_resolve */ + +#ifndef EGL_NV_cuda_event +#define EGL_NV_cuda_event 1 +#define EGL_CUDA_EVENT_HANDLE_NV 0x323B +#define EGL_SYNC_CUDA_EVENT_NV 0x323C +#define EGL_SYNC_CUDA_EVENT_COMPLETE_NV 0x323D +#endif /* EGL_NV_cuda_event */ + +#ifndef EGL_NV_depth_nonlinear +#define EGL_NV_depth_nonlinear 1 +#define EGL_DEPTH_ENCODING_NV 0x30E2 +#define EGL_DEPTH_ENCODING_NONE_NV 0 +#define EGL_DEPTH_ENCODING_NONLINEAR_NV 0x30E3 +#endif /* EGL_NV_depth_nonlinear */ + +#ifndef EGL_NV_device_cuda +#define EGL_NV_device_cuda 1 +#define EGL_CUDA_DEVICE_NV 0x323A +#endif /* EGL_NV_device_cuda */ + +#ifndef EGL_NV_native_query +#define EGL_NV_native_query 1 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYNATIVEDISPLAYNVPROC) (EGLDisplay dpy, EGLNativeDisplayType *display_id); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYNATIVEWINDOWNVPROC) (EGLDisplay dpy, EGLSurface surf, EGLNativeWindowType *window); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYNATIVEPIXMAPNVPROC) (EGLDisplay dpy, EGLSurface surf, EGLNativePixmapType *pixmap); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglQueryNativeDisplayNV (EGLDisplay dpy, EGLNativeDisplayType *display_id); +EGLAPI EGLBoolean EGLAPIENTRY eglQueryNativeWindowNV (EGLDisplay dpy, EGLSurface surf, EGLNativeWindowType *window); +EGLAPI EGLBoolean EGLAPIENTRY eglQueryNativePixmapNV (EGLDisplay dpy, EGLSurface surf, EGLNativePixmapType *pixmap); +#endif +#endif /* EGL_NV_native_query */ + +#ifndef EGL_NV_post_convert_rounding +#define EGL_NV_post_convert_rounding 1 +#endif /* EGL_NV_post_convert_rounding */ + +#ifndef EGL_NV_post_sub_buffer +#define EGL_NV_post_sub_buffer 1 +#define EGL_POST_SUB_BUFFER_SUPPORTED_NV 0x30BE +typedef EGLBoolean (EGLAPIENTRYP PFNEGLPOSTSUBBUFFERNVPROC) (EGLDisplay dpy, EGLSurface surface, EGLint x, EGLint y, EGLint width, EGLint height); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglPostSubBufferNV (EGLDisplay dpy, EGLSurface surface, EGLint x, EGLint y, EGLint width, EGLint height); +#endif +#endif /* EGL_NV_post_sub_buffer */ + +#ifndef EGL_NV_quadruple_buffer +#define EGL_NV_quadruple_buffer 1 +#define EGL_QUADRUPLE_BUFFER_NV 0x3231 +#endif /* EGL_NV_quadruple_buffer */ + +#ifndef EGL_NV_robustness_video_memory_purge +#define EGL_NV_robustness_video_memory_purge 1 +#define EGL_GENERATE_RESET_ON_VIDEO_MEMORY_PURGE_NV 0x334C +#endif /* EGL_NV_robustness_video_memory_purge */ + +#ifndef EGL_NV_stream_consumer_eglimage +#define EGL_NV_stream_consumer_eglimage 1 +#define EGL_STREAM_CONSUMER_IMAGE_NV 0x3373 +#define EGL_STREAM_IMAGE_ADD_NV 0x3374 +#define EGL_STREAM_IMAGE_REMOVE_NV 0x3375 +#define EGL_STREAM_IMAGE_AVAILABLE_NV 0x3376 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSTREAMIMAGECONSUMERCONNECTNVPROC) (EGLDisplay dpy, EGLStreamKHR stream, EGLint num_modifiers, EGLuint64KHR *modifiers, EGLAttrib *attrib_list); +typedef EGLint (EGLAPIENTRYP PFNEGLQUERYSTREAMCONSUMEREVENTNVPROC) (EGLDisplay dpy, EGLStreamKHR stream, EGLTime timeout, EGLenum *event, EGLAttrib *aux); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSTREAMACQUIREIMAGENVPROC) (EGLDisplay dpy, EGLStreamKHR stream, EGLImage *pImage, EGLSync sync); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSTREAMRELEASEIMAGENVPROC) (EGLDisplay dpy, EGLStreamKHR stream, EGLImage image, EGLSync sync); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglStreamImageConsumerConnectNV (EGLDisplay dpy, EGLStreamKHR stream, EGLint num_modifiers, EGLuint64KHR *modifiers, EGLAttrib *attrib_list); +EGLAPI EGLint EGLAPIENTRY eglQueryStreamConsumerEventNV (EGLDisplay dpy, EGLStreamKHR stream, EGLTime timeout, EGLenum *event, EGLAttrib *aux); +EGLAPI EGLBoolean EGLAPIENTRY eglStreamAcquireImageNV (EGLDisplay dpy, EGLStreamKHR stream, EGLImage *pImage, EGLSync sync); +EGLAPI EGLBoolean EGLAPIENTRY eglStreamReleaseImageNV (EGLDisplay dpy, EGLStreamKHR stream, EGLImage image, EGLSync sync); +#endif +#endif /* EGL_NV_stream_consumer_eglimage */ + +#ifndef EGL_NV_stream_consumer_gltexture_yuv +#define EGL_NV_stream_consumer_gltexture_yuv 1 +#define EGL_YUV_PLANE0_TEXTURE_UNIT_NV 0x332C +#define EGL_YUV_PLANE1_TEXTURE_UNIT_NV 0x332D +#define EGL_YUV_PLANE2_TEXTURE_UNIT_NV 0x332E +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSTREAMCONSUMERGLTEXTUREEXTERNALATTRIBSNVPROC) (EGLDisplay dpy, EGLStreamKHR stream, const EGLAttrib *attrib_list); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglStreamConsumerGLTextureExternalAttribsNV (EGLDisplay dpy, EGLStreamKHR stream, const EGLAttrib *attrib_list); +#endif +#endif /* EGL_NV_stream_consumer_gltexture_yuv */ + +#ifndef EGL_NV_stream_cross_display +#define EGL_NV_stream_cross_display 1 +#define EGL_STREAM_CROSS_DISPLAY_NV 0x334E +#endif /* EGL_NV_stream_cross_display */ + +#ifndef EGL_NV_stream_cross_object +#define EGL_NV_stream_cross_object 1 +#define EGL_STREAM_CROSS_OBJECT_NV 0x334D +#endif /* EGL_NV_stream_cross_object */ + +#ifndef EGL_NV_stream_cross_partition +#define EGL_NV_stream_cross_partition 1 +#define EGL_STREAM_CROSS_PARTITION_NV 0x323F +#endif /* EGL_NV_stream_cross_partition */ + +#ifndef EGL_NV_stream_cross_process +#define EGL_NV_stream_cross_process 1 +#define EGL_STREAM_CROSS_PROCESS_NV 0x3245 +#endif /* EGL_NV_stream_cross_process */ + +#ifndef EGL_NV_stream_cross_system +#define EGL_NV_stream_cross_system 1 +#define EGL_STREAM_CROSS_SYSTEM_NV 0x334F +#endif /* EGL_NV_stream_cross_system */ + +#ifndef EGL_NV_stream_dma +#define EGL_NV_stream_dma 1 +#define EGL_STREAM_DMA_NV 0x3371 +#define EGL_STREAM_DMA_SERVER_NV 0x3372 +#endif /* EGL_NV_stream_dma */ + +#ifndef EGL_NV_stream_fifo_next +#define EGL_NV_stream_fifo_next 1 +#define EGL_PENDING_FRAME_NV 0x3329 +#define EGL_STREAM_TIME_PENDING_NV 0x332A +#endif /* EGL_NV_stream_fifo_next */ + +#ifndef EGL_NV_stream_fifo_synchronous +#define EGL_NV_stream_fifo_synchronous 1 +#define EGL_STREAM_FIFO_SYNCHRONOUS_NV 0x3336 +#endif /* EGL_NV_stream_fifo_synchronous */ + +#ifndef EGL_NV_stream_flush +#define EGL_NV_stream_flush 1 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSTREAMFLUSHNVPROC) (EGLDisplay dpy, EGLStreamKHR stream); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglStreamFlushNV (EGLDisplay dpy, EGLStreamKHR stream); +#endif +#endif /* EGL_NV_stream_flush */ + +#ifndef EGL_NV_stream_frame_limits +#define EGL_NV_stream_frame_limits 1 +#define EGL_PRODUCER_MAX_FRAME_HINT_NV 0x3337 +#define EGL_CONSUMER_MAX_FRAME_HINT_NV 0x3338 +#endif /* EGL_NV_stream_frame_limits */ + +#ifndef EGL_NV_stream_metadata +#define EGL_NV_stream_metadata 1 +#define EGL_MAX_STREAM_METADATA_BLOCKS_NV 0x3250 +#define EGL_MAX_STREAM_METADATA_BLOCK_SIZE_NV 0x3251 +#define EGL_MAX_STREAM_METADATA_TOTAL_SIZE_NV 0x3252 +#define EGL_PRODUCER_METADATA_NV 0x3253 +#define EGL_CONSUMER_METADATA_NV 0x3254 +#define EGL_PENDING_METADATA_NV 0x3328 +#define EGL_METADATA0_SIZE_NV 0x3255 +#define EGL_METADATA1_SIZE_NV 0x3256 +#define EGL_METADATA2_SIZE_NV 0x3257 +#define EGL_METADATA3_SIZE_NV 0x3258 +#define EGL_METADATA0_TYPE_NV 0x3259 +#define EGL_METADATA1_TYPE_NV 0x325A +#define EGL_METADATA2_TYPE_NV 0x325B +#define EGL_METADATA3_TYPE_NV 0x325C +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYDISPLAYATTRIBNVPROC) (EGLDisplay dpy, EGLint attribute, EGLAttrib *value); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSETSTREAMMETADATANVPROC) (EGLDisplay dpy, EGLStreamKHR stream, EGLint n, EGLint offset, EGLint size, const void *data); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYSTREAMMETADATANVPROC) (EGLDisplay dpy, EGLStreamKHR stream, EGLenum name, EGLint n, EGLint offset, EGLint size, void *data); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglQueryDisplayAttribNV (EGLDisplay dpy, EGLint attribute, EGLAttrib *value); +EGLAPI EGLBoolean EGLAPIENTRY eglSetStreamMetadataNV (EGLDisplay dpy, EGLStreamKHR stream, EGLint n, EGLint offset, EGLint size, const void *data); +EGLAPI EGLBoolean EGLAPIENTRY eglQueryStreamMetadataNV (EGLDisplay dpy, EGLStreamKHR stream, EGLenum name, EGLint n, EGLint offset, EGLint size, void *data); +#endif +#endif /* EGL_NV_stream_metadata */ + +#ifndef EGL_NV_stream_origin +#define EGL_NV_stream_origin 1 +#define EGL_STREAM_FRAME_ORIGIN_X_NV 0x3366 +#define EGL_STREAM_FRAME_ORIGIN_Y_NV 0x3367 +#define EGL_STREAM_FRAME_MAJOR_AXIS_NV 0x3368 +#define EGL_CONSUMER_AUTO_ORIENTATION_NV 0x3369 +#define EGL_PRODUCER_AUTO_ORIENTATION_NV 0x336A +#define EGL_LEFT_NV 0x336B +#define EGL_RIGHT_NV 0x336C +#define EGL_TOP_NV 0x336D +#define EGL_BOTTOM_NV 0x336E +#define EGL_X_AXIS_NV 0x336F +#define EGL_Y_AXIS_NV 0x3370 +#endif /* EGL_NV_stream_origin */ + +#ifndef EGL_NV_stream_remote +#define EGL_NV_stream_remote 1 +#define EGL_STREAM_STATE_INITIALIZING_NV 0x3240 +#define EGL_STREAM_TYPE_NV 0x3241 +#define EGL_STREAM_PROTOCOL_NV 0x3242 +#define EGL_STREAM_ENDPOINT_NV 0x3243 +#define EGL_STREAM_LOCAL_NV 0x3244 +#define EGL_STREAM_PRODUCER_NV 0x3247 +#define EGL_STREAM_CONSUMER_NV 0x3248 +#define EGL_STREAM_PROTOCOL_FD_NV 0x3246 +#endif /* EGL_NV_stream_remote */ + +#ifndef EGL_NV_stream_reset +#define EGL_NV_stream_reset 1 +#define EGL_SUPPORT_RESET_NV 0x3334 +#define EGL_SUPPORT_REUSE_NV 0x3335 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLRESETSTREAMNVPROC) (EGLDisplay dpy, EGLStreamKHR stream); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglResetStreamNV (EGLDisplay dpy, EGLStreamKHR stream); +#endif +#endif /* EGL_NV_stream_reset */ + +#ifndef EGL_NV_stream_socket +#define EGL_NV_stream_socket 1 +#define EGL_STREAM_PROTOCOL_SOCKET_NV 0x324B +#define EGL_SOCKET_HANDLE_NV 0x324C +#define EGL_SOCKET_TYPE_NV 0x324D +#endif /* EGL_NV_stream_socket */ + +#ifndef EGL_NV_stream_socket_inet +#define EGL_NV_stream_socket_inet 1 +#define EGL_SOCKET_TYPE_INET_NV 0x324F +#endif /* EGL_NV_stream_socket_inet */ + +#ifndef EGL_NV_stream_socket_unix +#define EGL_NV_stream_socket_unix 1 +#define EGL_SOCKET_TYPE_UNIX_NV 0x324E +#endif /* EGL_NV_stream_socket_unix */ + +#ifndef EGL_NV_stream_sync +#define EGL_NV_stream_sync 1 +#define EGL_SYNC_NEW_FRAME_NV 0x321F +typedef EGLSyncKHR (EGLAPIENTRYP PFNEGLCREATESTREAMSYNCNVPROC) (EGLDisplay dpy, EGLStreamKHR stream, EGLenum type, const EGLint *attrib_list); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLSyncKHR EGLAPIENTRY eglCreateStreamSyncNV (EGLDisplay dpy, EGLStreamKHR stream, EGLenum type, const EGLint *attrib_list); +#endif +#endif /* EGL_NV_stream_sync */ + +#ifndef EGL_NV_sync +#define EGL_NV_sync 1 +typedef void *EGLSyncNV; +typedef khronos_utime_nanoseconds_t EGLTimeNV; +#ifdef KHRONOS_SUPPORT_INT64 +#define EGL_SYNC_PRIOR_COMMANDS_COMPLETE_NV 0x30E6 +#define EGL_SYNC_STATUS_NV 0x30E7 +#define EGL_SIGNALED_NV 0x30E8 +#define EGL_UNSIGNALED_NV 0x30E9 +#define EGL_SYNC_FLUSH_COMMANDS_BIT_NV 0x0001 +#define EGL_FOREVER_NV 0xFFFFFFFFFFFFFFFFull +#define EGL_ALREADY_SIGNALED_NV 0x30EA +#define EGL_TIMEOUT_EXPIRED_NV 0x30EB +#define EGL_CONDITION_SATISFIED_NV 0x30EC +#define EGL_SYNC_TYPE_NV 0x30ED +#define EGL_SYNC_CONDITION_NV 0x30EE +#define EGL_SYNC_FENCE_NV 0x30EF +#define EGL_NO_SYNC_NV EGL_CAST(EGLSyncNV,0) +typedef EGLSyncNV (EGLAPIENTRYP PFNEGLCREATEFENCESYNCNVPROC) (EGLDisplay dpy, EGLenum condition, const EGLint *attrib_list); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLDESTROYSYNCNVPROC) (EGLSyncNV sync); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLFENCENVPROC) (EGLSyncNV sync); +typedef EGLint (EGLAPIENTRYP PFNEGLCLIENTWAITSYNCNVPROC) (EGLSyncNV sync, EGLint flags, EGLTimeNV timeout); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSIGNALSYNCNVPROC) (EGLSyncNV sync, EGLenum mode); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLGETSYNCATTRIBNVPROC) (EGLSyncNV sync, EGLint attribute, EGLint *value); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLSyncNV EGLAPIENTRY eglCreateFenceSyncNV (EGLDisplay dpy, EGLenum condition, const EGLint *attrib_list); +EGLAPI EGLBoolean EGLAPIENTRY eglDestroySyncNV (EGLSyncNV sync); +EGLAPI EGLBoolean EGLAPIENTRY eglFenceNV (EGLSyncNV sync); +EGLAPI EGLint EGLAPIENTRY eglClientWaitSyncNV (EGLSyncNV sync, EGLint flags, EGLTimeNV timeout); +EGLAPI EGLBoolean EGLAPIENTRY eglSignalSyncNV (EGLSyncNV sync, EGLenum mode); +EGLAPI EGLBoolean EGLAPIENTRY eglGetSyncAttribNV (EGLSyncNV sync, EGLint attribute, EGLint *value); +#endif +#endif /* KHRONOS_SUPPORT_INT64 */ +#endif /* EGL_NV_sync */ + +#ifndef EGL_NV_system_time +#define EGL_NV_system_time 1 +typedef khronos_utime_nanoseconds_t EGLuint64NV; +#ifdef KHRONOS_SUPPORT_INT64 +typedef EGLuint64NV (EGLAPIENTRYP PFNEGLGETSYSTEMTIMEFREQUENCYNVPROC) (void); +typedef EGLuint64NV (EGLAPIENTRYP PFNEGLGETSYSTEMTIMENVPROC) (void); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLuint64NV EGLAPIENTRY eglGetSystemTimeFrequencyNV (void); +EGLAPI EGLuint64NV EGLAPIENTRY eglGetSystemTimeNV (void); +#endif +#endif /* KHRONOS_SUPPORT_INT64 */ +#endif /* EGL_NV_system_time */ + +#ifndef EGL_NV_triple_buffer +#define EGL_NV_triple_buffer 1 +#define EGL_TRIPLE_BUFFER_NV 0x3230 +#endif /* EGL_NV_triple_buffer */ + +#ifndef EGL_TIZEN_image_native_buffer +#define EGL_TIZEN_image_native_buffer 1 +#define EGL_NATIVE_BUFFER_TIZEN 0x32A0 +#endif /* EGL_TIZEN_image_native_buffer */ + +#ifndef EGL_TIZEN_image_native_surface +#define EGL_TIZEN_image_native_surface 1 +#define EGL_NATIVE_SURFACE_TIZEN 0x32A1 +#endif /* EGL_TIZEN_image_native_surface */ + +#ifndef EGL_WL_bind_wayland_display +#define EGL_WL_bind_wayland_display 1 +#define PFNEGLBINDWAYLANDDISPLAYWL PFNEGLBINDWAYLANDDISPLAYWLPROC +#define PFNEGLUNBINDWAYLANDDISPLAYWL PFNEGLUNBINDWAYLANDDISPLAYWLPROC +#define PFNEGLQUERYWAYLANDBUFFERWL PFNEGLQUERYWAYLANDBUFFERWLPROC +struct wl_display; +struct wl_resource; +#define EGL_WAYLAND_BUFFER_WL 0x31D5 +#define EGL_WAYLAND_PLANE_WL 0x31D6 +#define EGL_TEXTURE_Y_U_V_WL 0x31D7 +#define EGL_TEXTURE_Y_UV_WL 0x31D8 +#define EGL_TEXTURE_Y_XUXV_WL 0x31D9 +#define EGL_TEXTURE_EXTERNAL_WL 0x31DA +#define EGL_WAYLAND_Y_INVERTED_WL 0x31DB +typedef EGLBoolean (EGLAPIENTRYP PFNEGLBINDWAYLANDDISPLAYWLPROC) (EGLDisplay dpy, struct wl_display *display); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLUNBINDWAYLANDDISPLAYWLPROC) (EGLDisplay dpy, struct wl_display *display); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYWAYLANDBUFFERWLPROC) (EGLDisplay dpy, struct wl_resource *buffer, EGLint attribute, EGLint *value); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglBindWaylandDisplayWL (EGLDisplay dpy, struct wl_display *display); +EGLAPI EGLBoolean EGLAPIENTRY eglUnbindWaylandDisplayWL (EGLDisplay dpy, struct wl_display *display); +EGLAPI EGLBoolean EGLAPIENTRY eglQueryWaylandBufferWL (EGLDisplay dpy, struct wl_resource *buffer, EGLint attribute, EGLint *value); +#endif +#endif /* EGL_WL_bind_wayland_display */ + +#ifndef EGL_WL_create_wayland_buffer_from_image +#define EGL_WL_create_wayland_buffer_from_image 1 +#define PFNEGLCREATEWAYLANDBUFFERFROMIMAGEWL PFNEGLCREATEWAYLANDBUFFERFROMIMAGEWLPROC +struct wl_buffer; +typedef struct wl_buffer *(EGLAPIENTRYP PFNEGLCREATEWAYLANDBUFFERFROMIMAGEWLPROC) (EGLDisplay dpy, EGLImageKHR image); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI struct wl_buffer *EGLAPIENTRY eglCreateWaylandBufferFromImageWL (EGLDisplay dpy, EGLImageKHR image); +#endif +#endif /* EGL_WL_create_wayland_buffer_from_image */ + +/* ANGLE EGL extensions */ +#include "eglext_angle.h" + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/angle/include/EGL/eglext_angle.h b/vendor/angle/include/EGL/eglext_angle.h new file mode 100644 index 00000000..65da9215 --- /dev/null +++ b/vendor/angle/include/EGL/eglext_angle.h @@ -0,0 +1,350 @@ +// +// Copyright 2017 The ANGLE Project Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// +// eglext_angle.h: ANGLE modifications to the eglext.h header file. +// Currently we don't include this file directly, we patch eglext.h +// to include it implicitly so it is visible throughout our code. + +#ifndef INCLUDE_EGL_EGLEXT_ANGLE_ +#define INCLUDE_EGL_EGLEXT_ANGLE_ + +// clang-format off + +#ifndef EGL_ANGLE_robust_resource_initialization +#define EGL_ANGLE_robust_resource_initialization 1 +#define EGL_ROBUST_RESOURCE_INITIALIZATION_ANGLE 0x3453 +#endif /* EGL_ANGLE_robust_resource_initialization */ + +#ifndef EGL_ANGLE_keyed_mutex +#define EGL_ANGLE_keyed_mutex 1 +#define EGL_DXGI_KEYED_MUTEX_ANGLE 0x33A2 +#endif /* EGL_ANGLE_keyed_mutex */ + +#ifndef EGL_ANGLE_d3d_texture_client_buffer +#define EGL_ANGLE_d3d_texture_client_buffer 1 +#define EGL_D3D_TEXTURE_ANGLE 0x33A3 +#define EGL_TEXTURE_OFFSET_X_ANGLE 0x3490 +#define EGL_TEXTURE_OFFSET_Y_ANGLE 0x3491 +#define EGL_D3D11_TEXTURE_PLANE_ANGLE 0x3492 +#define EGL_D3D11_TEXTURE_ARRAY_SLICE_ANGLE 0x3493 +#endif /* EGL_ANGLE_d3d_texture_client_buffer */ + +#ifndef EGL_ANGLE_software_display +#define EGL_ANGLE_software_display 1 +#define EGL_SOFTWARE_DISPLAY_ANGLE ((EGLNativeDisplayType)-1) +#endif /* EGL_ANGLE_software_display */ + +#ifndef EGL_ANGLE_direct3d_display +#define EGL_ANGLE_direct3d_display 1 +#define EGL_D3D11_ELSE_D3D9_DISPLAY_ANGLE ((EGLNativeDisplayType)-2) +#define EGL_D3D11_ONLY_DISPLAY_ANGLE ((EGLNativeDisplayType)-3) +#endif /* EGL_ANGLE_direct3d_display */ + +#ifndef EGL_ANGLE_direct_composition +#define EGL_ANGLE_direct_composition 1 +#define EGL_DIRECT_COMPOSITION_ANGLE 0x33A5 +#endif /* EGL_ANGLE_direct_composition */ + +#ifndef EGL_ANGLE_platform_angle +#define EGL_ANGLE_platform_angle 1 +#define EGL_PLATFORM_ANGLE_ANGLE 0x3202 +#define EGL_PLATFORM_ANGLE_TYPE_ANGLE 0x3203 +#define EGL_PLATFORM_ANGLE_MAX_VERSION_MAJOR_ANGLE 0x3204 +#define EGL_PLATFORM_ANGLE_MAX_VERSION_MINOR_ANGLE 0x3205 +#define EGL_PLATFORM_ANGLE_TYPE_DEFAULT_ANGLE 0x3206 +#define EGL_PLATFORM_ANGLE_DEBUG_LAYERS_ENABLED_ANGLE 0x3451 +#define EGL_PLATFORM_ANGLE_DEVICE_TYPE_ANGLE 0x3209 +#define EGL_PLATFORM_ANGLE_DEVICE_TYPE_HARDWARE_ANGLE 0x320A +#define EGL_PLATFORM_ANGLE_DEVICE_TYPE_NULL_ANGLE 0x345E +#define EGL_PLATFORM_ANGLE_NATIVE_PLATFORM_TYPE_ANGLE 0x348F +#endif /* EGL_ANGLE_platform_angle */ + +#ifndef EGL_ANGLE_platform_angle_d3d +#define EGL_ANGLE_platform_angle_d3d 1 +#define EGL_PLATFORM_ANGLE_TYPE_D3D9_ANGLE 0x3207 +#define EGL_PLATFORM_ANGLE_TYPE_D3D11_ANGLE 0x3208 +#define EGL_PLATFORM_ANGLE_DEVICE_TYPE_D3D_WARP_ANGLE 0x320B +#define EGL_PLATFORM_ANGLE_DEVICE_TYPE_D3D_REFERENCE_ANGLE 0x320C +#define EGL_PLATFORM_ANGLE_ENABLE_AUTOMATIC_TRIM_ANGLE 0x320F +#endif /* EGL_ANGLE_platform_angle_d3d */ + +#ifndef EGL_ANGLE_platform_angle_d3d_luid +#define EGL_ANGLE_platform_angle_d3d_luid 1 +#define EGL_PLATFORM_ANGLE_D3D_LUID_HIGH_ANGLE 0x34A0 +#define EGL_PLATFORM_ANGLE_D3D_LUID_LOW_ANGLE 0x34A1 +#endif /* EGL_ANGLE_platform_angle_d3d_luid */ + +#ifndef EGL_ANGLE_platform_angle_d3d11on12 +#define EGL_ANGLE_platform_angle_d3d11on12 1 +#define EGL_PLATFORM_ANGLE_D3D11ON12_ANGLE 0x3488 +#endif /* EGL_ANGLE_platform_angle_d3d11on12 */ + +#ifndef EGL_ANGLE_platform_angle_opengl +#define EGL_ANGLE_platform_angle_opengl 1 +#define EGL_PLATFORM_ANGLE_TYPE_OPENGL_ANGLE 0x320D +#define EGL_PLATFORM_ANGLE_TYPE_OPENGLES_ANGLE 0x320E +#define EGL_PLATFORM_ANGLE_EGL_HANDLE_ANGLE 0x3480 +#endif /* EGL_ANGLE_platform_angle_opengl */ + +#ifndef EGL_ANGLE_platform_angle_null +#define EGL_ANGLE_platform_angle_null 1 +#define EGL_PLATFORM_ANGLE_TYPE_NULL_ANGLE 0x33AE +#endif /* EGL_ANGLE_platform_angle_null */ + +#ifndef EGL_ANGLE_platform_angle_vulkan +#define EGL_ANGLE_platform_angle_vulkan 1 +#define EGL_PLATFORM_ANGLE_TYPE_VULKAN_ANGLE 0x3450 +#define EGL_PLATFORM_VULKAN_DISPLAY_MODE_SIMPLE_ANGLE 0x34A4 +#define EGL_PLATFORM_VULKAN_DISPLAY_MODE_HEADLESS_ANGLE 0x34A5 +#endif /* EGL_ANGLE_platform_angle_vulkan */ + +#ifndef EGL_ANGLE_platform_angle_metal +#define EGL_ANGLE_platform_angle_metal 1 +#define EGL_PLATFORM_ANGLE_TYPE_METAL_ANGLE 0x3489 +#endif /* EGL_ANGLE_platform_angle_metal */ + +#ifndef EGL_ANGLE_platform_angle_device_type_swiftshader +#define EGL_ANGLE_platform_angle_device_type_swiftshader +#define EGL_PLATFORM_ANGLE_DEVICE_TYPE_SWIFTSHADER_ANGLE 0x3487 +#endif /* EGL_ANGLE_platform_angle_device_type_swiftshader */ + +#ifndef EGL_ANGLE_platform_angle_device_type_egl_angle +#define EGL_ANGLE_platform_angle_device_type_egl_angle +#define EGL_PLATFORM_ANGLE_DEVICE_TYPE_EGL_ANGLE 0x348E +#endif /* EGL_ANGLE_platform_angle_device_type_egl_angle */ + +#ifndef EGL_ANGLE_platform_angle_context_virtualization +#define EGL_ANGLE_platform_angle_context_virtualization 1 +#define EGL_PLATFORM_ANGLE_CONTEXT_VIRTUALIZATION_ANGLE 0x3481 +#endif /* EGL_ANGLE_platform_angle_context_virtualization */ + +#ifndef EGL_ANGLE_platform_angle_device_context_volatile_eagl +#define EGL_ANGLE_platform_angle_device_context_volatile_eagl 1 +#define EGL_PLATFORM_ANGLE_DEVICE_CONTEXT_VOLATILE_EAGL_ANGLE 0x34A2 +#endif /* EGL_ANGLE_platform_angle_device_context_volatile_eagl */ + +#ifndef EGL_ANGLE_platform_angle_device_context_volatile_cgl +#define EGL_ANGLE_platform_angle_device_context_volatile_cgl 1 +#define EGL_PLATFORM_ANGLE_DEVICE_CONTEXT_VOLATILE_CGL_ANGLE 0x34A3 +#endif /* EGL_ANGLE_platform_angle_device_context_volatile_cgl */ + +#ifndef EGL_ANGLE_x11_visual +#define EGL_ANGLE_x11_visual +#define EGL_X11_VISUAL_ID_ANGLE 0x33A3 +#endif /* EGL_ANGLE_x11_visual */ + +#ifndef EGL_ANGLE_flexible_surface_compatibility +#define EGL_ANGLE_flexible_surface_compatibility 1 +#define EGL_FLEXIBLE_SURFACE_COMPATIBILITY_SUPPORTED_ANGLE 0x33A6 +#endif /* EGL_ANGLE_flexible_surface_compatibility */ + +#ifndef EGL_ANGLE_surface_orientation +#define EGL_ANGLE_surface_orientation +#define EGL_OPTIMAL_SURFACE_ORIENTATION_ANGLE 0x33A7 +#define EGL_SURFACE_ORIENTATION_ANGLE 0x33A8 +#define EGL_SURFACE_ORIENTATION_INVERT_X_ANGLE 0x0001 +#define EGL_SURFACE_ORIENTATION_INVERT_Y_ANGLE 0x0002 +#endif /* EGL_ANGLE_surface_orientation */ + +#ifndef EGL_ANGLE_experimental_present_path +#define EGL_ANGLE_experimental_present_path +#define EGL_EXPERIMENTAL_PRESENT_PATH_ANGLE 0x33A4 +#define EGL_EXPERIMENTAL_PRESENT_PATH_FAST_ANGLE 0x33A9 +#define EGL_EXPERIMENTAL_PRESENT_PATH_COPY_ANGLE 0x33AA +#endif /* EGL_ANGLE_experimental_present_path */ + +#ifndef EGL_ANGLE_stream_producer_d3d_texture +#define EGL_ANGLE_stream_producer_d3d_texture +#define EGL_D3D_TEXTURE_SUBRESOURCE_ID_ANGLE 0x33AB +typedef EGLBoolean(EGLAPIENTRYP PFNEGLCREATESTREAMPRODUCERD3DTEXTUREANGLEPROC)(EGLDisplay dpy, EGLStreamKHR stream, const EGLAttrib *attrib_list); +typedef EGLBoolean(EGLAPIENTRYP PFNEGLSTREAMPOSTD3DTEXTUREANGLEPROC)(EGLDisplay dpy, EGLStreamKHR stream, void *texture, const EGLAttrib *attrib_list); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglCreateStreamProducerD3DTextureANGLE(EGLDisplay dpy, EGLStreamKHR stream, const EGLAttrib *attrib_list); +EGLAPI EGLBoolean EGLAPIENTRY eglStreamPostD3DTextureANGLE(EGLDisplay dpy, EGLStreamKHR stream, void *texture, const EGLAttrib *attrib_list); +#endif +#endif /* EGL_ANGLE_stream_producer_d3d_texture */ + +#ifndef EGL_ANGLE_create_context_webgl_compatibility +#define EGL_ANGLE_create_context_webgl_compatibility 1 +#define EGL_CONTEXT_WEBGL_COMPATIBILITY_ANGLE 0x33AC +#endif /* EGL_ANGLE_create_context_webgl_compatibility */ + +#ifndef EGL_ANGLE_display_texture_share_group +#define EGL_ANGLE_display_texture_share_group 1 +#define EGL_DISPLAY_TEXTURE_SHARE_GROUP_ANGLE 0x33AF +#endif /* EGL_ANGLE_display_texture_share_group */ + +#ifndef EGL_CHROMIUM_create_context_bind_generates_resource +#define EGL_CHROMIUM_create_context_bind_generates_resource 1 +#define EGL_CONTEXT_BIND_GENERATES_RESOURCE_CHROMIUM 0x33AD +#endif /* EGL_CHROMIUM_create_context_bind_generates_resource */ + +#ifndef EGL_ANGLE_create_context_client_arrays +#define EGL_ANGLE_create_context_client_arrays 1 +#define EGL_CONTEXT_CLIENT_ARRAYS_ENABLED_ANGLE 0x3452 +#endif /* EGL_ANGLE_create_context_client_arrays */ + +#ifndef EGL_ANGLE_device_creation +#define EGL_ANGLE_device_creation 1 +typedef EGLDeviceEXT(EGLAPIENTRYP PFNEGLCREATEDEVICEANGLEPROC) (EGLint device_type, void *native_device, const EGLAttrib *attrib_list); +typedef EGLBoolean(EGLAPIENTRYP PFNEGLRELEASEDEVICEANGLEPROC) (EGLDeviceEXT device); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLDeviceEXT EGLAPIENTRY eglCreateDeviceANGLE(EGLint device_type, void *native_device, const EGLAttrib *attrib_list); +EGLAPI EGLBoolean EGLAPIENTRY eglReleaseDeviceANGLE(EGLDeviceEXT device); +#endif +#endif /* EGL_ANGLE_device_creation */ + +#ifndef EGL_ANGLE_program_cache_control +#define EGL_ANGLE_program_cache_control 1 +#define EGL_PROGRAM_CACHE_SIZE_ANGLE 0x3455 +#define EGL_PROGRAM_CACHE_KEY_LENGTH_ANGLE 0x3456 +#define EGL_PROGRAM_CACHE_RESIZE_ANGLE 0x3457 +#define EGL_PROGRAM_CACHE_TRIM_ANGLE 0x3458 +#define EGL_CONTEXT_PROGRAM_BINARY_CACHE_ENABLED_ANGLE 0x3459 +typedef EGLint (EGLAPIENTRYP PFNEGLPROGRAMCACHEGETATTRIBANGLEPROC) (EGLDisplay dpy, EGLenum attrib); +typedef void (EGLAPIENTRYP PFNEGLPROGRAMCACHEQUERYANGLEPROC) (EGLDisplay dpy, EGLint index, void *key, EGLint *keysize, void *binary, EGLint *binarysize); +typedef void (EGLAPIENTRYP PFNEGLPROGRAMCACHEPOPULATEANGLEPROC) (EGLDisplay dpy, const void *key, EGLint keysize, const void *binary, EGLint binarysize); +typedef EGLint (EGLAPIENTRYP PFNEGLPROGRAMCACHERESIZEANGLEPROC) (EGLDisplay dpy, EGLint limit, EGLint mode); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLint EGLAPIENTRY eglProgramCacheGetAttribANGLE(EGLDisplay dpy, EGLenum attrib); +EGLAPI void EGLAPIENTRY eglProgramCacheQueryANGLE(EGLDisplay dpy, EGLint index, void *key, EGLint *keysize, void *binary, EGLint *binarysize); +EGLAPI void EGLAPIENTRY eglProgramCachePopulateANGLE(EGLDisplay dpy, const void *key, EGLint keysize, const void *binary, EGLint binarysize); +EGLAPI EGLint EGLAPIENTRY eglProgramCacheResizeANGLE(EGLDisplay dpy, EGLint limit, EGLint mode); +#endif +#endif /* EGL_ANGLE_program_cache_control */ + +#ifndef EGL_ANGLE_iosurface_client_buffer +#define EGL_ANGLE_iosurface_client_buffer 1 +#define EGL_IOSURFACE_ANGLE 0x3454 +#define EGL_IOSURFACE_PLANE_ANGLE 0x345A +#define EGL_TEXTURE_RECTANGLE_ANGLE 0x345B +#define EGL_TEXTURE_TYPE_ANGLE 0x345C +#define EGL_TEXTURE_INTERNAL_FORMAT_ANGLE 0x345D +#define EGL_IOSURFACE_USAGE_HINT_ANGLE 0x348A +#define EGL_IOSURFACE_READ_HINT_ANGLE 0x0001 +#define EGL_IOSURFACE_WRITE_HINT_ANGLE 0x0002 +#define EGL_BIND_TO_TEXTURE_TARGET_ANGLE 0x348D +#endif /* EGL_ANGLE_iosurface_client_buffer */ + +#ifndef EGL_ANGLE_create_context_extensions_enabled +#define EGL_ANGLE_create_context_extensions_enabled 1 +#define EGL_EXTENSIONS_ENABLED_ANGLE 0x345F +#endif /* EGL_ANGLE_create_context_extensions_enabled */ + +#ifndef EGL_CHROMIUM_sync_control +#define EGL_CHROMIUM_sync_control 1 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLGETSYNCVALUESCHROMIUMPROC) (EGLDisplay dpy, + EGLSurface surface, + EGLuint64KHR *ust, + EGLuint64KHR *msc, + EGLuint64KHR *sbc); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglGetSyncValuesCHROMIUM(EGLDisplay dpy, + EGLSurface surface, + EGLuint64KHR *ust, + EGLuint64KHR *msc, + EGLuint64KHR *sbc); +#endif +#endif /* EGL_CHROMIUM_sync_control */ + +#ifndef EGL_ANGLE_sync_control_rate +#define EGL_ANGLE_sync_control_rate 1 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLGETMSCRATEANGLEPROC) (EGLDisplay dpy, + EGLSurface surface, + EGLint *numerator, + EGLint *denominator); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglGetMscRateANGLE(EGLDisplay dpy, + EGLSurface surface, + EGLint *numerator, + EGLint *denominator); +#endif +#endif /* EGL_ANGLE_sync_control_rate */ + +#ifndef EGL_ANGLE_power_preference +#define EGL_ANGLE_power_preference 1 +#define EGL_POWER_PREFERENCE_ANGLE 0x3482 +#define EGL_LOW_POWER_ANGLE 0x0001 +#define EGL_HIGH_POWER_ANGLE 0x0002 +typedef void(EGLAPIENTRYP PFNEGLRELEASEHIGHPOWERGPUANGLEPROC) (EGLDisplay dpy, EGLContext ctx); +typedef void(EGLAPIENTRYP PFNEGLREACQUIREHIGHPOWERGPUANGLEPROC) (EGLDisplay dpy, EGLContext ctx); +typedef void(EGLAPIENTRYP PFNEGLHANDLEGPUSWITCHANGLEPROC) (EGLDisplay dpy); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI void EGLAPIENTRY eglReleaseHighPowerGPUANGLE(EGLDisplay dpy, EGLContext ctx); +EGLAPI void EGLAPIENTRY eglReacquireHighPowerGPUANGLE(EGLDisplay dpy, EGLContext ctx); +EGLAPI void EGLAPIENTRY eglHandleGPUSwitchANGLE(EGLDisplay dpy); +#endif +#endif /* EGL_ANGLE_power_preference */ + +#ifndef EGL_ANGLE_feature_control +#define EGL_ANGLE_feature_control 1 +#define EGL_FEATURE_NAME_ANGLE 0x3460 +#define EGL_FEATURE_CATEGORY_ANGLE 0x3461 +#define EGL_FEATURE_DESCRIPTION_ANGLE 0x3462 +#define EGL_FEATURE_BUG_ANGLE 0x3463 +#define EGL_FEATURE_STATUS_ANGLE 0x3464 +#define EGL_FEATURE_COUNT_ANGLE 0x3465 +#define EGL_FEATURE_OVERRIDES_ENABLED_ANGLE 0x3466 +#define EGL_FEATURE_OVERRIDES_DISABLED_ANGLE 0x3467 +#define EGL_FEATURE_CONDITION_ANGLE 0x3468 +#define EGL_FEATURE_ALL_DISABLED_ANGLE 0x3469 +typedef const char *(EGLAPIENTRYP PFNEGLQUERYSTRINGIANGLEPROC) (EGLDisplay dpy, EGLint name, EGLint index); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYDISPLAYATTRIBANGLEPROC) (EGLDisplay dpy, EGLint attribute, EGLAttrib *value); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI const char *EGLAPIENTRY eglQueryStringiANGLE(EGLDisplay dpy, EGLint name, EGLint index); +EGLAPI EGLBoolean EGLAPIENTRY eglQueryDisplayAttribANGLE(EGLDisplay dpy, EGLint attribute, EGLAttrib *value); +#endif +#endif /* EGL_ANGLE_feature_control */ + +#ifndef EGL_ANGLE_image_d3d11_texture +#define EGL_D3D11_TEXTURE_ANGLE 0x3484 +#define EGL_TEXTURE_INTERNAL_FORMAT_ANGLE 0x345D +#endif /* EGL_ANGLE_image_d3d11_texture */ + +#ifndef EGL_ANGLE_create_context_backwards_compatible +#define EGL_ANGLE_create_context_backwards_compatible 1 +#define EGL_CONTEXT_OPENGL_BACKWARDS_COMPATIBLE_ANGLE 0x3483 +#endif /* EGL_ANGLE_create_context_backwards_compatible */ + +#ifndef EGL_ANGLE_device_cgl +#define EGL_ANGLE_device_cgl 1 +#define EGL_CGL_CONTEXT_ANGLE 0x3485 +#define EGL_CGL_PIXEL_FORMAT_ANGLE 0x3486 +#endif + +#ifndef EGL_ANGLE_ggp_stream_descriptor +#define EGL_ANGLE_ggp_stream_descriptor 1 +#define EGL_GGP_STREAM_DESCRIPTOR_ANGLE 0x348B +#endif /* EGL_ANGLE_ggp_stream_descriptor */ + +#ifndef EGL_ANGLE_swap_with_frame_token +#define EGL_ANGLE_swap_with_frame_token 1 +typedef khronos_uint64_t EGLFrameTokenANGLE; +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSWAPBUFFERSWITHFRAMETOKENANGLEPROC)(EGLDisplay dpy, EGLSurface surface, EGLFrameTokenANGLE frametoken); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglSwapBuffersWithFrameTokenANGLE(EGLDisplay dpy, EGLSurface surface, EGLFrameTokenANGLE frametoken); +#endif +#endif /* EGL_ANGLE_swap_with_frame_token */ + +#ifndef EGL_ANGLE_device_eagl +#define EGL_ANGLE_device_eagl 1 +#define EGL_EAGL_CONTEXT_ANGLE 0x348C +#endif + +#ifndef EGL_ANGLE_display_semaphore_share_group +#define EGL_ANGLE_display_semaphore_share_group 1 +#define EGL_DISPLAY_SEMAPHORE_SHARE_GROUP_ANGLE 0x348D +#endif /* EGL_ANGLE_display_semaphore_share_group */ + +#ifndef EGL_ANGLE_external_context_and_surface +#define EGL_ANGLE_external_context_and_surface 1 +#define EGL_EXTERNAL_CONTEXT_ANGLE 0x348E +#define EGL_EXTERNAL_SURFACE_ANGLE 0x348F +#define EGL_EXTERNAL_CONTEXT_SAVE_STATE_ANGLE 0x3490 +#endif /* EGL_ANGLE_external_context_and_surface */ + +// clang-format on + +#endif // INCLUDE_EGL_EGLEXT_ANGLE_ diff --git a/vendor/angle/include/EGL/eglplatform.h b/vendor/angle/include/EGL/eglplatform.h new file mode 100644 index 00000000..9ebaf00a --- /dev/null +++ b/vendor/angle/include/EGL/eglplatform.h @@ -0,0 +1,175 @@ +#ifndef __eglplatform_h_ +#define __eglplatform_h_ + +/* +** Copyright 2007-2020 The Khronos Group Inc. +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* Platform-specific types and definitions for egl.h + * + * Adopters may modify khrplatform.h and this file to suit their platform. + * You are encouraged to submit all modifications to the Khronos group so that + * they can be included in future versions of this file. Please submit changes + * by filing an issue or pull request on the public Khronos EGL Registry, at + * https://www.github.com/KhronosGroup/EGL-Registry/ + */ + +#include + +/* Macros used in EGL function prototype declarations. + * + * EGL functions should be prototyped as: + * + * EGLAPI return-type EGLAPIENTRY eglFunction(arguments); + * typedef return-type (EXPAPIENTRYP PFNEGLFUNCTIONPROC) (arguments); + * + * KHRONOS_APICALL and KHRONOS_APIENTRY are defined in KHR/khrplatform.h + */ + +#ifndef EGLAPI +#define EGLAPI KHRONOS_APICALL +#endif + +#ifndef EGLAPIENTRY +#define EGLAPIENTRY KHRONOS_APIENTRY +#endif +#define EGLAPIENTRYP EGLAPIENTRY* + +/* The types NativeDisplayType, NativeWindowType, and NativePixmapType + * are aliases of window-system-dependent types, such as X Display * or + * Windows Device Context. They must be defined in platform-specific + * code below. The EGL-prefixed versions of Native*Type are the same + * types, renamed in EGL 1.3 so all types in the API start with "EGL". + * + * Khronos STRONGLY RECOMMENDS that you use the default definitions + * provided below, since these changes affect both binary and source + * portability of applications using EGL running on different EGL + * implementations. + */ + +#if defined(EGL_NO_PLATFORM_SPECIFIC_TYPES) + +typedef void *EGLNativeDisplayType; +typedef void *EGLNativePixmapType; +typedef void *EGLNativeWindowType; + +#elif defined(_WIN32) || defined(__VC32__) && !defined(__CYGWIN__) && !defined(__SCITECH_SNAP__) /* Win32 and WinCE */ +#ifndef WIN32_LEAN_AND_MEAN +#define WIN32_LEAN_AND_MEAN 1 +#endif +#include + +typedef HDC EGLNativeDisplayType; +typedef HBITMAP EGLNativePixmapType; + +#if !defined(WINAPI_FAMILY) || (WINAPI_FAMILY == WINAPI_FAMILY_DESKTOP_APP) /* Windows Desktop */ +typedef HWND EGLNativeWindowType; +#else /* Windows Store */ +#include +typedef IInspectable* EGLNativeWindowType; +#endif + +#elif defined(__EMSCRIPTEN__) + +typedef int EGLNativeDisplayType; +typedef int EGLNativePixmapType; +typedef int EGLNativeWindowType; + +#elif defined(__WINSCW__) || defined(__SYMBIAN32__) /* Symbian */ + +typedef int EGLNativeDisplayType; +typedef void *EGLNativePixmapType; +typedef void *EGLNativeWindowType; + +#elif defined(WL_EGL_PLATFORM) + +typedef struct wl_display *EGLNativeDisplayType; +typedef struct wl_egl_pixmap *EGLNativePixmapType; +typedef struct wl_egl_window *EGLNativeWindowType; + +#elif defined(__GBM__) + +typedef struct gbm_device *EGLNativeDisplayType; +typedef struct gbm_bo *EGLNativePixmapType; +typedef void *EGLNativeWindowType; + +#elif defined(__ANDROID__) || defined(ANDROID) + +struct ANativeWindow; +struct egl_native_pixmap_t; + +typedef void* EGLNativeDisplayType; +typedef struct egl_native_pixmap_t* EGLNativePixmapType; +typedef struct ANativeWindow* EGLNativeWindowType; + +#elif defined(USE_OZONE) + +typedef intptr_t EGLNativeDisplayType; +typedef intptr_t EGLNativePixmapType; +typedef intptr_t EGLNativeWindowType; + +#elif defined(__unix__) && defined(EGL_NO_X11) + +typedef void *EGLNativeDisplayType; +typedef khronos_uintptr_t EGLNativePixmapType; +typedef khronos_uintptr_t EGLNativeWindowType; + +#elif defined(__unix__) || defined(USE_X11) + +/* X11 (tentative) */ +#include +#include + +typedef Display *EGLNativeDisplayType; +typedef Pixmap EGLNativePixmapType; +typedef Window EGLNativeWindowType; + +#elif defined(__APPLE__) + +typedef int EGLNativeDisplayType; +typedef void *EGLNativePixmapType; +typedef void *EGLNativeWindowType; + +#elif defined(__HAIKU__) + +#include + +typedef void *EGLNativeDisplayType; +typedef khronos_uintptr_t EGLNativePixmapType; +typedef khronos_uintptr_t EGLNativeWindowType; + +#elif defined(__Fuchsia__) + +typedef void *EGLNativeDisplayType; +typedef khronos_uintptr_t EGLNativePixmapType; +typedef khronos_uintptr_t EGLNativeWindowType; + +#else +#error "Platform not recognized" +#endif + +/* EGL 1.2 types, renamed for consistency in EGL 1.3 */ +typedef EGLNativeDisplayType NativeDisplayType; +typedef EGLNativePixmapType NativePixmapType; +typedef EGLNativeWindowType NativeWindowType; + + +/* Define EGLint. This must be a signed integral type large enough to contain + * all legal attribute names and values passed into and out of EGL, whether + * their type is boolean, bitmask, enumerant (symbolic constant), integer, + * handle, or other. While in general a 32-bit integer will suffice, if + * handles are 64 bit types, then EGLint should be defined as a signed 64-bit + * integer type. + */ +typedef khronos_int32_t EGLint; + + +/* C++ / C typecast macros for special EGL handle values */ +#if defined(__cplusplus) +#define EGL_CAST(type, value) (static_cast(value)) +#else +#define EGL_CAST(type, value) ((type) (value)) +#endif + +#endif /* __eglplatform_h */ diff --git a/vendor/angle/include/GLES/.clang-format b/vendor/angle/include/GLES/.clang-format new file mode 100644 index 00000000..06147100 --- /dev/null +++ b/vendor/angle/include/GLES/.clang-format @@ -0,0 +1,3 @@ + + +DisableFormat: true diff --git a/vendor/angle/include/GLES/README.md b/vendor/angle/include/GLES/README.md new file mode 100644 index 00000000..1aa31f5a --- /dev/null +++ b/vendor/angle/include/GLES/README.md @@ -0,0 +1,20 @@ +# ANGLE GLES 1.0 Headers + +The GLES 1.0 headers ANGLE uses are generated using the Khronos tools but modified to include function pointer types and function prototype guards. + +### Regenerating gl.h + +1. Install **Python 3** (not 2) with the **lxml** addon. You can do this using `pip install lxml` from your Python's Scripts folder. +1. Clone [https://github.com/KhronosGroup/OpenGL-Registry.git](https://github.com/KhronosGroup/OpenGL-Registry.git). +1. Edit `OpenGL-Registry/xml/genheaders.py`: + + 1. Look for the section titled `# GLES 1.x API + mandatory extensions - GLES/gl.h (no function pointers)` + 1. Change `prefixText = prefixStrings + gles1PlatformStrings + genDateCommentString,` to `prefixText = prefixStrings + gles1PlatformStrings + apiEntryPrefixStrings + genDateCommentString,` + 1. Change `genFuncPointers = False,` to `genFuncPointers = True,` + 1. Change `protectProto = False,` to `protectProto = 'nonzero',` + 1. Change `protectProtoStr = 'GL_GLEXT_PROTOTYPES',` to `protectProtoStr = 'GL_GLES_PROTOTYPES',` + +1. Set your working directory to `OpenGL-Registry/xml/`. +1. Run `python genheaders.py ../api/GLES/gl.h` +1. The generated header will now be in `OpenGL-Registry/api/GLES/gl.h`. You can copy the header over to this folder. +1. Also update `scripts/gl.xml` with the latest version from `OpenGL-Registry/xml/`. diff --git a/vendor/angle/include/GLES/egl.h b/vendor/angle/include/GLES/egl.h new file mode 100644 index 00000000..86f644c9 --- /dev/null +++ b/vendor/angle/include/GLES/egl.h @@ -0,0 +1,29 @@ +/* +** Copyright (c) 2008-2017 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* + * Skeleton egl.h to provide compatibility for early GLES 1.0 + * applications. Several early implementations included gl.h + * in egl.h leading applications to include only egl.h + */ + +#ifndef __legacy_egl_h_ +#define __legacy_egl_h_ + +#include +#include + +#endif /* __legacy_egl_h_ */ diff --git a/vendor/angle/include/GLES/gl.h b/vendor/angle/include/GLES/gl.h new file mode 100644 index 00000000..9f3ed343 --- /dev/null +++ b/vendor/angle/include/GLES/gl.h @@ -0,0 +1,743 @@ +#ifndef __gles1_gl_h_ +#define __gles1_gl_h_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2013-2018 The Khronos Group Inc. +** +** Permission is hereby granted, free of charge, to any person obtaining a +** copy of this software and/or associated documentation files (the +** "Materials"), to deal in the Materials without restriction, including +** without limitation the rights to use, copy, modify, merge, publish, +** distribute, sublicense, and/or sell copies of the Materials, and to +** permit persons to whom the Materials are furnished to do so, subject to +** the following conditions: +** +** The above copyright notice and this permission notice shall be included +** in all copies or substantial portions of the Materials. +** +** THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +** EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +** MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +** IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +** CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +** MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. +*/ +/* +** This header is generated from the Khronos OpenGL / OpenGL ES XML +** API Registry. The current version of the Registry, generator scripts +** used to make the header, and the header can be found at +** https://github.com/KhronosGroup/OpenGL-Registry +*/ + +#include + +#ifndef GL_APIENTRYP +#define GL_APIENTRYP GL_APIENTRY* +#endif + +/* Generated on date 20181204 */ + +/* Generated C header for: + * API: gles1 + * Profile: common + * Versions considered: .* + * Versions emitted: .* + * Default extensions included: None + * Additional extensions included: ^(GL_OES_read_format|GL_OES_compressed_paletted_texture|GL_OES_point_size_array|GL_OES_point_sprite)$ + * Extensions removed: _nomatch_^ + */ + +#ifndef GL_VERSION_ES_CM_1_0 +#define GL_VERSION_ES_CM_1_0 1 +#include +typedef khronos_int8_t GLbyte; +typedef khronos_float_t GLclampf; +typedef khronos_int16_t GLshort; +typedef khronos_uint16_t GLushort; +typedef void GLvoid; +typedef unsigned int GLenum; +typedef khronos_float_t GLfloat; +typedef khronos_int32_t GLfixed; +typedef unsigned int GLuint; +typedef khronos_ssize_t GLsizeiptr; +typedef khronos_intptr_t GLintptr; +typedef unsigned int GLbitfield; +typedef int GLint; +typedef khronos_uint8_t GLubyte; +typedef unsigned char GLboolean; +typedef int GLsizei; +typedef khronos_int32_t GLclampx; +#define GL_VERSION_ES_CL_1_0 1 +#define GL_VERSION_ES_CM_1_1 1 +#define GL_VERSION_ES_CL_1_1 1 +#define GL_DEPTH_BUFFER_BIT 0x00000100 +#define GL_STENCIL_BUFFER_BIT 0x00000400 +#define GL_COLOR_BUFFER_BIT 0x00004000 +#define GL_FALSE 0 +#define GL_TRUE 1 +#define GL_POINTS 0x0000 +#define GL_LINES 0x0001 +#define GL_LINE_LOOP 0x0002 +#define GL_LINE_STRIP 0x0003 +#define GL_TRIANGLES 0x0004 +#define GL_TRIANGLE_STRIP 0x0005 +#define GL_TRIANGLE_FAN 0x0006 +#define GL_NEVER 0x0200 +#define GL_LESS 0x0201 +#define GL_EQUAL 0x0202 +#define GL_LEQUAL 0x0203 +#define GL_GREATER 0x0204 +#define GL_NOTEQUAL 0x0205 +#define GL_GEQUAL 0x0206 +#define GL_ALWAYS 0x0207 +#define GL_ZERO 0 +#define GL_ONE 1 +#define GL_SRC_COLOR 0x0300 +#define GL_ONE_MINUS_SRC_COLOR 0x0301 +#define GL_SRC_ALPHA 0x0302 +#define GL_ONE_MINUS_SRC_ALPHA 0x0303 +#define GL_DST_ALPHA 0x0304 +#define GL_ONE_MINUS_DST_ALPHA 0x0305 +#define GL_DST_COLOR 0x0306 +#define GL_ONE_MINUS_DST_COLOR 0x0307 +#define GL_SRC_ALPHA_SATURATE 0x0308 +#define GL_CLIP_PLANE0 0x3000 +#define GL_CLIP_PLANE1 0x3001 +#define GL_CLIP_PLANE2 0x3002 +#define GL_CLIP_PLANE3 0x3003 +#define GL_CLIP_PLANE4 0x3004 +#define GL_CLIP_PLANE5 0x3005 +#define GL_FRONT 0x0404 +#define GL_BACK 0x0405 +#define GL_FRONT_AND_BACK 0x0408 +#define GL_FOG 0x0B60 +#define GL_LIGHTING 0x0B50 +#define GL_TEXTURE_2D 0x0DE1 +#define GL_CULL_FACE 0x0B44 +#define GL_ALPHA_TEST 0x0BC0 +#define GL_BLEND 0x0BE2 +#define GL_COLOR_LOGIC_OP 0x0BF2 +#define GL_DITHER 0x0BD0 +#define GL_STENCIL_TEST 0x0B90 +#define GL_DEPTH_TEST 0x0B71 +#define GL_POINT_SMOOTH 0x0B10 +#define GL_LINE_SMOOTH 0x0B20 +#define GL_SCISSOR_TEST 0x0C11 +#define GL_COLOR_MATERIAL 0x0B57 +#define GL_NORMALIZE 0x0BA1 +#define GL_RESCALE_NORMAL 0x803A +#define GL_VERTEX_ARRAY 0x8074 +#define GL_NORMAL_ARRAY 0x8075 +#define GL_COLOR_ARRAY 0x8076 +#define GL_TEXTURE_COORD_ARRAY 0x8078 +#define GL_MULTISAMPLE 0x809D +#define GL_SAMPLE_ALPHA_TO_COVERAGE 0x809E +#define GL_SAMPLE_ALPHA_TO_ONE 0x809F +#define GL_SAMPLE_COVERAGE 0x80A0 +#define GL_NO_ERROR 0 +#define GL_INVALID_ENUM 0x0500 +#define GL_INVALID_VALUE 0x0501 +#define GL_INVALID_OPERATION 0x0502 +#define GL_STACK_OVERFLOW 0x0503 +#define GL_STACK_UNDERFLOW 0x0504 +#define GL_OUT_OF_MEMORY 0x0505 +#define GL_EXP 0x0800 +#define GL_EXP2 0x0801 +#define GL_FOG_DENSITY 0x0B62 +#define GL_FOG_START 0x0B63 +#define GL_FOG_END 0x0B64 +#define GL_FOG_MODE 0x0B65 +#define GL_FOG_COLOR 0x0B66 +#define GL_CW 0x0900 +#define GL_CCW 0x0901 +#define GL_CURRENT_COLOR 0x0B00 +#define GL_CURRENT_NORMAL 0x0B02 +#define GL_CURRENT_TEXTURE_COORDS 0x0B03 +#define GL_POINT_SIZE 0x0B11 +#define GL_POINT_SIZE_MIN 0x8126 +#define GL_POINT_SIZE_MAX 0x8127 +#define GL_POINT_FADE_THRESHOLD_SIZE 0x8128 +#define GL_POINT_DISTANCE_ATTENUATION 0x8129 +#define GL_SMOOTH_POINT_SIZE_RANGE 0x0B12 +#define GL_LINE_WIDTH 0x0B21 +#define GL_SMOOTH_LINE_WIDTH_RANGE 0x0B22 +#define GL_ALIASED_POINT_SIZE_RANGE 0x846D +#define GL_ALIASED_LINE_WIDTH_RANGE 0x846E +#define GL_CULL_FACE_MODE 0x0B45 +#define GL_FRONT_FACE 0x0B46 +#define GL_SHADE_MODEL 0x0B54 +#define GL_DEPTH_RANGE 0x0B70 +#define GL_DEPTH_WRITEMASK 0x0B72 +#define GL_DEPTH_CLEAR_VALUE 0x0B73 +#define GL_DEPTH_FUNC 0x0B74 +#define GL_STENCIL_CLEAR_VALUE 0x0B91 +#define GL_STENCIL_FUNC 0x0B92 +#define GL_STENCIL_VALUE_MASK 0x0B93 +#define GL_STENCIL_FAIL 0x0B94 +#define GL_STENCIL_PASS_DEPTH_FAIL 0x0B95 +#define GL_STENCIL_PASS_DEPTH_PASS 0x0B96 +#define GL_STENCIL_REF 0x0B97 +#define GL_STENCIL_WRITEMASK 0x0B98 +#define GL_MATRIX_MODE 0x0BA0 +#define GL_VIEWPORT 0x0BA2 +#define GL_MODELVIEW_STACK_DEPTH 0x0BA3 +#define GL_PROJECTION_STACK_DEPTH 0x0BA4 +#define GL_TEXTURE_STACK_DEPTH 0x0BA5 +#define GL_MODELVIEW_MATRIX 0x0BA6 +#define GL_PROJECTION_MATRIX 0x0BA7 +#define GL_TEXTURE_MATRIX 0x0BA8 +#define GL_ALPHA_TEST_FUNC 0x0BC1 +#define GL_ALPHA_TEST_REF 0x0BC2 +#define GL_BLEND_DST 0x0BE0 +#define GL_BLEND_SRC 0x0BE1 +#define GL_LOGIC_OP_MODE 0x0BF0 +#define GL_SCISSOR_BOX 0x0C10 +#define GL_COLOR_CLEAR_VALUE 0x0C22 +#define GL_COLOR_WRITEMASK 0x0C23 +#define GL_MAX_LIGHTS 0x0D31 +#define GL_MAX_CLIP_PLANES 0x0D32 +#define GL_MAX_TEXTURE_SIZE 0x0D33 +#define GL_MAX_MODELVIEW_STACK_DEPTH 0x0D36 +#define GL_MAX_PROJECTION_STACK_DEPTH 0x0D38 +#define GL_MAX_TEXTURE_STACK_DEPTH 0x0D39 +#define GL_MAX_VIEWPORT_DIMS 0x0D3A +#define GL_MAX_TEXTURE_UNITS 0x84E2 +#define GL_SUBPIXEL_BITS 0x0D50 +#define GL_RED_BITS 0x0D52 +#define GL_GREEN_BITS 0x0D53 +#define GL_BLUE_BITS 0x0D54 +#define GL_ALPHA_BITS 0x0D55 +#define GL_DEPTH_BITS 0x0D56 +#define GL_STENCIL_BITS 0x0D57 +#define GL_POLYGON_OFFSET_UNITS 0x2A00 +#define GL_POLYGON_OFFSET_FILL 0x8037 +#define GL_POLYGON_OFFSET_FACTOR 0x8038 +#define GL_TEXTURE_BINDING_2D 0x8069 +#define GL_VERTEX_ARRAY_SIZE 0x807A +#define GL_VERTEX_ARRAY_TYPE 0x807B +#define GL_VERTEX_ARRAY_STRIDE 0x807C +#define GL_NORMAL_ARRAY_TYPE 0x807E +#define GL_NORMAL_ARRAY_STRIDE 0x807F +#define GL_COLOR_ARRAY_SIZE 0x8081 +#define GL_COLOR_ARRAY_TYPE 0x8082 +#define GL_COLOR_ARRAY_STRIDE 0x8083 +#define GL_TEXTURE_COORD_ARRAY_SIZE 0x8088 +#define GL_TEXTURE_COORD_ARRAY_TYPE 0x8089 +#define GL_TEXTURE_COORD_ARRAY_STRIDE 0x808A +#define GL_VERTEX_ARRAY_POINTER 0x808E +#define GL_NORMAL_ARRAY_POINTER 0x808F +#define GL_COLOR_ARRAY_POINTER 0x8090 +#define GL_TEXTURE_COORD_ARRAY_POINTER 0x8092 +#define GL_SAMPLE_BUFFERS 0x80A8 +#define GL_SAMPLES 0x80A9 +#define GL_SAMPLE_COVERAGE_VALUE 0x80AA +#define GL_SAMPLE_COVERAGE_INVERT 0x80AB +#define GL_NUM_COMPRESSED_TEXTURE_FORMATS 0x86A2 +#define GL_COMPRESSED_TEXTURE_FORMATS 0x86A3 +#define GL_DONT_CARE 0x1100 +#define GL_FASTEST 0x1101 +#define GL_NICEST 0x1102 +#define GL_PERSPECTIVE_CORRECTION_HINT 0x0C50 +#define GL_POINT_SMOOTH_HINT 0x0C51 +#define GL_LINE_SMOOTH_HINT 0x0C52 +#define GL_FOG_HINT 0x0C54 +#define GL_GENERATE_MIPMAP_HINT 0x8192 +#define GL_LIGHT_MODEL_AMBIENT 0x0B53 +#define GL_LIGHT_MODEL_TWO_SIDE 0x0B52 +#define GL_AMBIENT 0x1200 +#define GL_DIFFUSE 0x1201 +#define GL_SPECULAR 0x1202 +#define GL_POSITION 0x1203 +#define GL_SPOT_DIRECTION 0x1204 +#define GL_SPOT_EXPONENT 0x1205 +#define GL_SPOT_CUTOFF 0x1206 +#define GL_CONSTANT_ATTENUATION 0x1207 +#define GL_LINEAR_ATTENUATION 0x1208 +#define GL_QUADRATIC_ATTENUATION 0x1209 +#define GL_BYTE 0x1400 +#define GL_UNSIGNED_BYTE 0x1401 +#define GL_SHORT 0x1402 +#define GL_UNSIGNED_SHORT 0x1403 +#define GL_FLOAT 0x1406 +#define GL_FIXED 0x140C +#define GL_CLEAR 0x1500 +#define GL_AND 0x1501 +#define GL_AND_REVERSE 0x1502 +#define GL_COPY 0x1503 +#define GL_AND_INVERTED 0x1504 +#define GL_NOOP 0x1505 +#define GL_XOR 0x1506 +#define GL_OR 0x1507 +#define GL_NOR 0x1508 +#define GL_EQUIV 0x1509 +#define GL_INVERT 0x150A +#define GL_OR_REVERSE 0x150B +#define GL_COPY_INVERTED 0x150C +#define GL_OR_INVERTED 0x150D +#define GL_NAND 0x150E +#define GL_SET 0x150F +#define GL_EMISSION 0x1600 +#define GL_SHININESS 0x1601 +#define GL_AMBIENT_AND_DIFFUSE 0x1602 +#define GL_MODELVIEW 0x1700 +#define GL_PROJECTION 0x1701 +#define GL_TEXTURE 0x1702 +#define GL_ALPHA 0x1906 +#define GL_RGB 0x1907 +#define GL_RGBA 0x1908 +#define GL_LUMINANCE 0x1909 +#define GL_LUMINANCE_ALPHA 0x190A +#define GL_UNPACK_ALIGNMENT 0x0CF5 +#define GL_PACK_ALIGNMENT 0x0D05 +#define GL_UNSIGNED_SHORT_4_4_4_4 0x8033 +#define GL_UNSIGNED_SHORT_5_5_5_1 0x8034 +#define GL_UNSIGNED_SHORT_5_6_5 0x8363 +#define GL_FLAT 0x1D00 +#define GL_SMOOTH 0x1D01 +#define GL_KEEP 0x1E00 +#define GL_REPLACE 0x1E01 +#define GL_INCR 0x1E02 +#define GL_DECR 0x1E03 +#define GL_VENDOR 0x1F00 +#define GL_RENDERER 0x1F01 +#define GL_VERSION 0x1F02 +#define GL_EXTENSIONS 0x1F03 +#define GL_MODULATE 0x2100 +#define GL_DECAL 0x2101 +#define GL_ADD 0x0104 +#define GL_TEXTURE_ENV_MODE 0x2200 +#define GL_TEXTURE_ENV_COLOR 0x2201 +#define GL_TEXTURE_ENV 0x2300 +#define GL_NEAREST 0x2600 +#define GL_LINEAR 0x2601 +#define GL_NEAREST_MIPMAP_NEAREST 0x2700 +#define GL_LINEAR_MIPMAP_NEAREST 0x2701 +#define GL_NEAREST_MIPMAP_LINEAR 0x2702 +#define GL_LINEAR_MIPMAP_LINEAR 0x2703 +#define GL_TEXTURE_MAG_FILTER 0x2800 +#define GL_TEXTURE_MIN_FILTER 0x2801 +#define GL_TEXTURE_WRAP_S 0x2802 +#define GL_TEXTURE_WRAP_T 0x2803 +#define GL_GENERATE_MIPMAP 0x8191 +#define GL_TEXTURE0 0x84C0 +#define GL_TEXTURE1 0x84C1 +#define GL_TEXTURE2 0x84C2 +#define GL_TEXTURE3 0x84C3 +#define GL_TEXTURE4 0x84C4 +#define GL_TEXTURE5 0x84C5 +#define GL_TEXTURE6 0x84C6 +#define GL_TEXTURE7 0x84C7 +#define GL_TEXTURE8 0x84C8 +#define GL_TEXTURE9 0x84C9 +#define GL_TEXTURE10 0x84CA +#define GL_TEXTURE11 0x84CB +#define GL_TEXTURE12 0x84CC +#define GL_TEXTURE13 0x84CD +#define GL_TEXTURE14 0x84CE +#define GL_TEXTURE15 0x84CF +#define GL_TEXTURE16 0x84D0 +#define GL_TEXTURE17 0x84D1 +#define GL_TEXTURE18 0x84D2 +#define GL_TEXTURE19 0x84D3 +#define GL_TEXTURE20 0x84D4 +#define GL_TEXTURE21 0x84D5 +#define GL_TEXTURE22 0x84D6 +#define GL_TEXTURE23 0x84D7 +#define GL_TEXTURE24 0x84D8 +#define GL_TEXTURE25 0x84D9 +#define GL_TEXTURE26 0x84DA +#define GL_TEXTURE27 0x84DB +#define GL_TEXTURE28 0x84DC +#define GL_TEXTURE29 0x84DD +#define GL_TEXTURE30 0x84DE +#define GL_TEXTURE31 0x84DF +#define GL_ACTIVE_TEXTURE 0x84E0 +#define GL_CLIENT_ACTIVE_TEXTURE 0x84E1 +#define GL_REPEAT 0x2901 +#define GL_CLAMP_TO_EDGE 0x812F +#define GL_LIGHT0 0x4000 +#define GL_LIGHT1 0x4001 +#define GL_LIGHT2 0x4002 +#define GL_LIGHT3 0x4003 +#define GL_LIGHT4 0x4004 +#define GL_LIGHT5 0x4005 +#define GL_LIGHT6 0x4006 +#define GL_LIGHT7 0x4007 +#define GL_ARRAY_BUFFER 0x8892 +#define GL_ELEMENT_ARRAY_BUFFER 0x8893 +#define GL_ARRAY_BUFFER_BINDING 0x8894 +#define GL_ELEMENT_ARRAY_BUFFER_BINDING 0x8895 +#define GL_VERTEX_ARRAY_BUFFER_BINDING 0x8896 +#define GL_NORMAL_ARRAY_BUFFER_BINDING 0x8897 +#define GL_COLOR_ARRAY_BUFFER_BINDING 0x8898 +#define GL_TEXTURE_COORD_ARRAY_BUFFER_BINDING 0x889A +#define GL_STATIC_DRAW 0x88E4 +#define GL_DYNAMIC_DRAW 0x88E8 +#define GL_BUFFER_SIZE 0x8764 +#define GL_BUFFER_USAGE 0x8765 +#define GL_SUBTRACT 0x84E7 +#define GL_COMBINE 0x8570 +#define GL_COMBINE_RGB 0x8571 +#define GL_COMBINE_ALPHA 0x8572 +#define GL_RGB_SCALE 0x8573 +#define GL_ADD_SIGNED 0x8574 +#define GL_INTERPOLATE 0x8575 +#define GL_CONSTANT 0x8576 +#define GL_PRIMARY_COLOR 0x8577 +#define GL_PREVIOUS 0x8578 +#define GL_OPERAND0_RGB 0x8590 +#define GL_OPERAND1_RGB 0x8591 +#define GL_OPERAND2_RGB 0x8592 +#define GL_OPERAND0_ALPHA 0x8598 +#define GL_OPERAND1_ALPHA 0x8599 +#define GL_OPERAND2_ALPHA 0x859A +#define GL_ALPHA_SCALE 0x0D1C +#define GL_SRC0_RGB 0x8580 +#define GL_SRC1_RGB 0x8581 +#define GL_SRC2_RGB 0x8582 +#define GL_SRC0_ALPHA 0x8588 +#define GL_SRC1_ALPHA 0x8589 +#define GL_SRC2_ALPHA 0x858A +#define GL_DOT3_RGB 0x86AE +#define GL_DOT3_RGBA 0x86AF +typedef void (GL_APIENTRYP PFNGLALPHAFUNCPROC) (GLenum func, GLfloat ref); +typedef void (GL_APIENTRYP PFNGLCLEARCOLORPROC) (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +typedef void (GL_APIENTRYP PFNGLCLEARDEPTHFPROC) (GLfloat d); +typedef void (GL_APIENTRYP PFNGLCLIPPLANEFPROC) (GLenum p, const GLfloat *eqn); +typedef void (GL_APIENTRYP PFNGLCOLOR4FPROC) (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +typedef void (GL_APIENTRYP PFNGLDEPTHRANGEFPROC) (GLfloat n, GLfloat f); +typedef void (GL_APIENTRYP PFNGLFOGFPROC) (GLenum pname, GLfloat param); +typedef void (GL_APIENTRYP PFNGLFOGFVPROC) (GLenum pname, const GLfloat *params); +typedef void (GL_APIENTRYP PFNGLFRUSTUMFPROC) (GLfloat l, GLfloat r, GLfloat b, GLfloat t, GLfloat n, GLfloat f); +typedef void (GL_APIENTRYP PFNGLGETCLIPPLANEFPROC) (GLenum plane, GLfloat *equation); +typedef void (GL_APIENTRYP PFNGLGETFLOATVPROC) (GLenum pname, GLfloat *data); +typedef void (GL_APIENTRYP PFNGLGETLIGHTFVPROC) (GLenum light, GLenum pname, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETMATERIALFVPROC) (GLenum face, GLenum pname, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETTEXENVFVPROC) (GLenum target, GLenum pname, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERFVPROC) (GLenum target, GLenum pname, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLLIGHTMODELFPROC) (GLenum pname, GLfloat param); +typedef void (GL_APIENTRYP PFNGLLIGHTMODELFVPROC) (GLenum pname, const GLfloat *params); +typedef void (GL_APIENTRYP PFNGLLIGHTFPROC) (GLenum light, GLenum pname, GLfloat param); +typedef void (GL_APIENTRYP PFNGLLIGHTFVPROC) (GLenum light, GLenum pname, const GLfloat *params); +typedef void (GL_APIENTRYP PFNGLLINEWIDTHPROC) (GLfloat width); +typedef void (GL_APIENTRYP PFNGLLOADMATRIXFPROC) (const GLfloat *m); +typedef void (GL_APIENTRYP PFNGLMATERIALFPROC) (GLenum face, GLenum pname, GLfloat param); +typedef void (GL_APIENTRYP PFNGLMATERIALFVPROC) (GLenum face, GLenum pname, const GLfloat *params); +typedef void (GL_APIENTRYP PFNGLMULTMATRIXFPROC) (const GLfloat *m); +typedef void (GL_APIENTRYP PFNGLMULTITEXCOORD4FPROC) (GLenum target, GLfloat s, GLfloat t, GLfloat r, GLfloat q); +typedef void (GL_APIENTRYP PFNGLNORMAL3FPROC) (GLfloat nx, GLfloat ny, GLfloat nz); +typedef void (GL_APIENTRYP PFNGLORTHOFPROC) (GLfloat l, GLfloat r, GLfloat b, GLfloat t, GLfloat n, GLfloat f); +typedef void (GL_APIENTRYP PFNGLPOINTPARAMETERFPROC) (GLenum pname, GLfloat param); +typedef void (GL_APIENTRYP PFNGLPOINTPARAMETERFVPROC) (GLenum pname, const GLfloat *params); +typedef void (GL_APIENTRYP PFNGLPOINTSIZEPROC) (GLfloat size); +typedef void (GL_APIENTRYP PFNGLPOLYGONOFFSETPROC) (GLfloat factor, GLfloat units); +typedef void (GL_APIENTRYP PFNGLROTATEFPROC) (GLfloat angle, GLfloat x, GLfloat y, GLfloat z); +typedef void (GL_APIENTRYP PFNGLSCALEFPROC) (GLfloat x, GLfloat y, GLfloat z); +typedef void (GL_APIENTRYP PFNGLTEXENVFPROC) (GLenum target, GLenum pname, GLfloat param); +typedef void (GL_APIENTRYP PFNGLTEXENVFVPROC) (GLenum target, GLenum pname, const GLfloat *params); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERFPROC) (GLenum target, GLenum pname, GLfloat param); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERFVPROC) (GLenum target, GLenum pname, const GLfloat *params); +typedef void (GL_APIENTRYP PFNGLTRANSLATEFPROC) (GLfloat x, GLfloat y, GLfloat z); +typedef void (GL_APIENTRYP PFNGLACTIVETEXTUREPROC) (GLenum texture); +typedef void (GL_APIENTRYP PFNGLALPHAFUNCXPROC) (GLenum func, GLfixed ref); +typedef void (GL_APIENTRYP PFNGLBINDBUFFERPROC) (GLenum target, GLuint buffer); +typedef void (GL_APIENTRYP PFNGLBINDTEXTUREPROC) (GLenum target, GLuint texture); +typedef void (GL_APIENTRYP PFNGLBLENDFUNCPROC) (GLenum sfactor, GLenum dfactor); +typedef void (GL_APIENTRYP PFNGLBUFFERDATAPROC) (GLenum target, GLsizeiptr size, const void *data, GLenum usage); +typedef void (GL_APIENTRYP PFNGLBUFFERSUBDATAPROC) (GLenum target, GLintptr offset, GLsizeiptr size, const void *data); +typedef void (GL_APIENTRYP PFNGLCLEARPROC) (GLbitfield mask); +typedef void (GL_APIENTRYP PFNGLCLEARCOLORXPROC) (GLfixed red, GLfixed green, GLfixed blue, GLfixed alpha); +typedef void (GL_APIENTRYP PFNGLCLEARDEPTHXPROC) (GLfixed depth); +typedef void (GL_APIENTRYP PFNGLCLEARSTENCILPROC) (GLint s); +typedef void (GL_APIENTRYP PFNGLCLIENTACTIVETEXTUREPROC) (GLenum texture); +typedef void (GL_APIENTRYP PFNGLCLIPPLANEXPROC) (GLenum plane, const GLfixed *equation); +typedef void (GL_APIENTRYP PFNGLCOLOR4UBPROC) (GLubyte red, GLubyte green, GLubyte blue, GLubyte alpha); +typedef void (GL_APIENTRYP PFNGLCOLOR4XPROC) (GLfixed red, GLfixed green, GLfixed blue, GLfixed alpha); +typedef void (GL_APIENTRYP PFNGLCOLORMASKPROC) (GLboolean red, GLboolean green, GLboolean blue, GLboolean alpha); +typedef void (GL_APIENTRYP PFNGLCOLORPOINTERPROC) (GLint size, GLenum type, GLsizei stride, const void *pointer); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXIMAGE2DPROC) (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, const void *data); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXSUBIMAGE2DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *data); +typedef void (GL_APIENTRYP PFNGLCOPYTEXIMAGE2DPROC) (GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLsizei height, GLint border); +typedef void (GL_APIENTRYP PFNGLCOPYTEXSUBIMAGE2DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLCULLFACEPROC) (GLenum mode); +typedef void (GL_APIENTRYP PFNGLDELETEBUFFERSPROC) (GLsizei n, const GLuint *buffers); +typedef void (GL_APIENTRYP PFNGLDELETETEXTURESPROC) (GLsizei n, const GLuint *textures); +typedef void (GL_APIENTRYP PFNGLDEPTHFUNCPROC) (GLenum func); +typedef void (GL_APIENTRYP PFNGLDEPTHMASKPROC) (GLboolean flag); +typedef void (GL_APIENTRYP PFNGLDEPTHRANGEXPROC) (GLfixed n, GLfixed f); +typedef void (GL_APIENTRYP PFNGLDISABLEPROC) (GLenum cap); +typedef void (GL_APIENTRYP PFNGLDISABLECLIENTSTATEPROC) (GLenum array); +typedef void (GL_APIENTRYP PFNGLDRAWARRAYSPROC) (GLenum mode, GLint first, GLsizei count); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices); +typedef void (GL_APIENTRYP PFNGLENABLEPROC) (GLenum cap); +typedef void (GL_APIENTRYP PFNGLENABLECLIENTSTATEPROC) (GLenum array); +typedef void (GL_APIENTRYP PFNGLFINISHPROC) (void); +typedef void (GL_APIENTRYP PFNGLFLUSHPROC) (void); +typedef void (GL_APIENTRYP PFNGLFOGXPROC) (GLenum pname, GLfixed param); +typedef void (GL_APIENTRYP PFNGLFOGXVPROC) (GLenum pname, const GLfixed *param); +typedef void (GL_APIENTRYP PFNGLFRONTFACEPROC) (GLenum mode); +typedef void (GL_APIENTRYP PFNGLFRUSTUMXPROC) (GLfixed l, GLfixed r, GLfixed b, GLfixed t, GLfixed n, GLfixed f); +typedef void (GL_APIENTRYP PFNGLGETBOOLEANVPROC) (GLenum pname, GLboolean *data); +typedef void (GL_APIENTRYP PFNGLGETBUFFERPARAMETERIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETCLIPPLANEXPROC) (GLenum plane, GLfixed *equation); +typedef void (GL_APIENTRYP PFNGLGENBUFFERSPROC) (GLsizei n, GLuint *buffers); +typedef void (GL_APIENTRYP PFNGLGENTEXTURESPROC) (GLsizei n, GLuint *textures); +typedef GLenum (GL_APIENTRYP PFNGLGETERRORPROC) (void); +typedef void (GL_APIENTRYP PFNGLGETFIXEDVPROC) (GLenum pname, GLfixed *params); +typedef void (GL_APIENTRYP PFNGLGETINTEGERVPROC) (GLenum pname, GLint *data); +typedef void (GL_APIENTRYP PFNGLGETLIGHTXVPROC) (GLenum light, GLenum pname, GLfixed *params); +typedef void (GL_APIENTRYP PFNGLGETMATERIALXVPROC) (GLenum face, GLenum pname, GLfixed *params); +typedef void (GL_APIENTRYP PFNGLGETPOINTERVPROC) (GLenum pname, void **params); +typedef const GLubyte *(GL_APIENTRYP PFNGLGETSTRINGPROC) (GLenum name); +typedef void (GL_APIENTRYP PFNGLGETTEXENVIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETTEXENVXVPROC) (GLenum target, GLenum pname, GLfixed *params); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERXVPROC) (GLenum target, GLenum pname, GLfixed *params); +typedef void (GL_APIENTRYP PFNGLHINTPROC) (GLenum target, GLenum mode); +typedef GLboolean (GL_APIENTRYP PFNGLISBUFFERPROC) (GLuint buffer); +typedef GLboolean (GL_APIENTRYP PFNGLISENABLEDPROC) (GLenum cap); +typedef GLboolean (GL_APIENTRYP PFNGLISTEXTUREPROC) (GLuint texture); +typedef void (GL_APIENTRYP PFNGLLIGHTMODELXPROC) (GLenum pname, GLfixed param); +typedef void (GL_APIENTRYP PFNGLLIGHTMODELXVPROC) (GLenum pname, const GLfixed *param); +typedef void (GL_APIENTRYP PFNGLLIGHTXPROC) (GLenum light, GLenum pname, GLfixed param); +typedef void (GL_APIENTRYP PFNGLLIGHTXVPROC) (GLenum light, GLenum pname, const GLfixed *params); +typedef void (GL_APIENTRYP PFNGLLINEWIDTHXPROC) (GLfixed width); +typedef void (GL_APIENTRYP PFNGLLOADIDENTITYPROC) (void); +typedef void (GL_APIENTRYP PFNGLLOADMATRIXXPROC) (const GLfixed *m); +typedef void (GL_APIENTRYP PFNGLLOGICOPPROC) (GLenum opcode); +typedef void (GL_APIENTRYP PFNGLMATERIALXPROC) (GLenum face, GLenum pname, GLfixed param); +typedef void (GL_APIENTRYP PFNGLMATERIALXVPROC) (GLenum face, GLenum pname, const GLfixed *param); +typedef void (GL_APIENTRYP PFNGLMATRIXMODEPROC) (GLenum mode); +typedef void (GL_APIENTRYP PFNGLMULTMATRIXXPROC) (const GLfixed *m); +typedef void (GL_APIENTRYP PFNGLMULTITEXCOORD4XPROC) (GLenum texture, GLfixed s, GLfixed t, GLfixed r, GLfixed q); +typedef void (GL_APIENTRYP PFNGLNORMAL3XPROC) (GLfixed nx, GLfixed ny, GLfixed nz); +typedef void (GL_APIENTRYP PFNGLNORMALPOINTERPROC) (GLenum type, GLsizei stride, const void *pointer); +typedef void (GL_APIENTRYP PFNGLORTHOXPROC) (GLfixed l, GLfixed r, GLfixed b, GLfixed t, GLfixed n, GLfixed f); +typedef void (GL_APIENTRYP PFNGLPIXELSTOREIPROC) (GLenum pname, GLint param); +typedef void (GL_APIENTRYP PFNGLPOINTPARAMETERXPROC) (GLenum pname, GLfixed param); +typedef void (GL_APIENTRYP PFNGLPOINTPARAMETERXVPROC) (GLenum pname, const GLfixed *params); +typedef void (GL_APIENTRYP PFNGLPOINTSIZEXPROC) (GLfixed size); +typedef void (GL_APIENTRYP PFNGLPOLYGONOFFSETXPROC) (GLfixed factor, GLfixed units); +typedef void (GL_APIENTRYP PFNGLPOPMATRIXPROC) (void); +typedef void (GL_APIENTRYP PFNGLPUSHMATRIXPROC) (void); +typedef void (GL_APIENTRYP PFNGLREADPIXELSPROC) (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, void *pixels); +typedef void (GL_APIENTRYP PFNGLROTATEXPROC) (GLfixed angle, GLfixed x, GLfixed y, GLfixed z); +typedef void (GL_APIENTRYP PFNGLSAMPLECOVERAGEPROC) (GLfloat value, GLboolean invert); +typedef void (GL_APIENTRYP PFNGLSAMPLECOVERAGEXPROC) (GLclampx value, GLboolean invert); +typedef void (GL_APIENTRYP PFNGLSCALEXPROC) (GLfixed x, GLfixed y, GLfixed z); +typedef void (GL_APIENTRYP PFNGLSCISSORPROC) (GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLSHADEMODELPROC) (GLenum mode); +typedef void (GL_APIENTRYP PFNGLSTENCILFUNCPROC) (GLenum func, GLint ref, GLuint mask); +typedef void (GL_APIENTRYP PFNGLSTENCILMASKPROC) (GLuint mask); +typedef void (GL_APIENTRYP PFNGLSTENCILOPPROC) (GLenum fail, GLenum zfail, GLenum zpass); +typedef void (GL_APIENTRYP PFNGLTEXCOORDPOINTERPROC) (GLint size, GLenum type, GLsizei stride, const void *pointer); +typedef void (GL_APIENTRYP PFNGLTEXENVIPROC) (GLenum target, GLenum pname, GLint param); +typedef void (GL_APIENTRYP PFNGLTEXENVXPROC) (GLenum target, GLenum pname, GLfixed param); +typedef void (GL_APIENTRYP PFNGLTEXENVIVPROC) (GLenum target, GLenum pname, const GLint *params); +typedef void (GL_APIENTRYP PFNGLTEXENVXVPROC) (GLenum target, GLenum pname, const GLfixed *params); +typedef void (GL_APIENTRYP PFNGLTEXIMAGE2DPROC) (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, const void *pixels); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERIPROC) (GLenum target, GLenum pname, GLint param); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERXPROC) (GLenum target, GLenum pname, GLfixed param); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERIVPROC) (GLenum target, GLenum pname, const GLint *params); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERXVPROC) (GLenum target, GLenum pname, const GLfixed *params); +typedef void (GL_APIENTRYP PFNGLTEXSUBIMAGE2DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *pixels); +typedef void (GL_APIENTRYP PFNGLTRANSLATEXPROC) (GLfixed x, GLfixed y, GLfixed z); +typedef void (GL_APIENTRYP PFNGLVERTEXPOINTERPROC) (GLint size, GLenum type, GLsizei stride, const void *pointer); +typedef void (GL_APIENTRYP PFNGLVIEWPORTPROC) (GLint x, GLint y, GLsizei width, GLsizei height); +#if GL_GLES_PROTOTYPES +GL_API void GL_APIENTRY glAlphaFunc (GLenum func, GLfloat ref); +GL_API void GL_APIENTRY glClearColor (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +GL_API void GL_APIENTRY glClearDepthf (GLfloat d); +GL_API void GL_APIENTRY glClipPlanef (GLenum p, const GLfloat *eqn); +GL_API void GL_APIENTRY glColor4f (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +GL_API void GL_APIENTRY glDepthRangef (GLfloat n, GLfloat f); +GL_API void GL_APIENTRY glFogf (GLenum pname, GLfloat param); +GL_API void GL_APIENTRY glFogfv (GLenum pname, const GLfloat *params); +GL_API void GL_APIENTRY glFrustumf (GLfloat l, GLfloat r, GLfloat b, GLfloat t, GLfloat n, GLfloat f); +GL_API void GL_APIENTRY glGetClipPlanef (GLenum plane, GLfloat *equation); +GL_API void GL_APIENTRY glGetFloatv (GLenum pname, GLfloat *data); +GL_API void GL_APIENTRY glGetLightfv (GLenum light, GLenum pname, GLfloat *params); +GL_API void GL_APIENTRY glGetMaterialfv (GLenum face, GLenum pname, GLfloat *params); +GL_API void GL_APIENTRY glGetTexEnvfv (GLenum target, GLenum pname, GLfloat *params); +GL_API void GL_APIENTRY glGetTexParameterfv (GLenum target, GLenum pname, GLfloat *params); +GL_API void GL_APIENTRY glLightModelf (GLenum pname, GLfloat param); +GL_API void GL_APIENTRY glLightModelfv (GLenum pname, const GLfloat *params); +GL_API void GL_APIENTRY glLightf (GLenum light, GLenum pname, GLfloat param); +GL_API void GL_APIENTRY glLightfv (GLenum light, GLenum pname, const GLfloat *params); +GL_API void GL_APIENTRY glLineWidth (GLfloat width); +GL_API void GL_APIENTRY glLoadMatrixf (const GLfloat *m); +GL_API void GL_APIENTRY glMaterialf (GLenum face, GLenum pname, GLfloat param); +GL_API void GL_APIENTRY glMaterialfv (GLenum face, GLenum pname, const GLfloat *params); +GL_API void GL_APIENTRY glMultMatrixf (const GLfloat *m); +GL_API void GL_APIENTRY glMultiTexCoord4f (GLenum target, GLfloat s, GLfloat t, GLfloat r, GLfloat q); +GL_API void GL_APIENTRY glNormal3f (GLfloat nx, GLfloat ny, GLfloat nz); +GL_API void GL_APIENTRY glOrthof (GLfloat l, GLfloat r, GLfloat b, GLfloat t, GLfloat n, GLfloat f); +GL_API void GL_APIENTRY glPointParameterf (GLenum pname, GLfloat param); +GL_API void GL_APIENTRY glPointParameterfv (GLenum pname, const GLfloat *params); +GL_API void GL_APIENTRY glPointSize (GLfloat size); +GL_API void GL_APIENTRY glPolygonOffset (GLfloat factor, GLfloat units); +GL_API void GL_APIENTRY glRotatef (GLfloat angle, GLfloat x, GLfloat y, GLfloat z); +GL_API void GL_APIENTRY glScalef (GLfloat x, GLfloat y, GLfloat z); +GL_API void GL_APIENTRY glTexEnvf (GLenum target, GLenum pname, GLfloat param); +GL_API void GL_APIENTRY glTexEnvfv (GLenum target, GLenum pname, const GLfloat *params); +GL_API void GL_APIENTRY glTexParameterf (GLenum target, GLenum pname, GLfloat param); +GL_API void GL_APIENTRY glTexParameterfv (GLenum target, GLenum pname, const GLfloat *params); +GL_API void GL_APIENTRY glTranslatef (GLfloat x, GLfloat y, GLfloat z); +GL_API void GL_APIENTRY glActiveTexture (GLenum texture); +GL_API void GL_APIENTRY glAlphaFuncx (GLenum func, GLfixed ref); +GL_API void GL_APIENTRY glBindBuffer (GLenum target, GLuint buffer); +GL_API void GL_APIENTRY glBindTexture (GLenum target, GLuint texture); +GL_API void GL_APIENTRY glBlendFunc (GLenum sfactor, GLenum dfactor); +GL_API void GL_APIENTRY glBufferData (GLenum target, GLsizeiptr size, const void *data, GLenum usage); +GL_API void GL_APIENTRY glBufferSubData (GLenum target, GLintptr offset, GLsizeiptr size, const void *data); +GL_API void GL_APIENTRY glClear (GLbitfield mask); +GL_API void GL_APIENTRY glClearColorx (GLfixed red, GLfixed green, GLfixed blue, GLfixed alpha); +GL_API void GL_APIENTRY glClearDepthx (GLfixed depth); +GL_API void GL_APIENTRY glClearStencil (GLint s); +GL_API void GL_APIENTRY glClientActiveTexture (GLenum texture); +GL_API void GL_APIENTRY glClipPlanex (GLenum plane, const GLfixed *equation); +GL_API void GL_APIENTRY glColor4ub (GLubyte red, GLubyte green, GLubyte blue, GLubyte alpha); +GL_API void GL_APIENTRY glColor4x (GLfixed red, GLfixed green, GLfixed blue, GLfixed alpha); +GL_API void GL_APIENTRY glColorMask (GLboolean red, GLboolean green, GLboolean blue, GLboolean alpha); +GL_API void GL_APIENTRY glColorPointer (GLint size, GLenum type, GLsizei stride, const void *pointer); +GL_API void GL_APIENTRY glCompressedTexImage2D (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, const void *data); +GL_API void GL_APIENTRY glCompressedTexSubImage2D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *data); +GL_API void GL_APIENTRY glCopyTexImage2D (GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLsizei height, GLint border); +GL_API void GL_APIENTRY glCopyTexSubImage2D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint x, GLint y, GLsizei width, GLsizei height); +GL_API void GL_APIENTRY glCullFace (GLenum mode); +GL_API void GL_APIENTRY glDeleteBuffers (GLsizei n, const GLuint *buffers); +GL_API void GL_APIENTRY glDeleteTextures (GLsizei n, const GLuint *textures); +GL_API void GL_APIENTRY glDepthFunc (GLenum func); +GL_API void GL_APIENTRY glDepthMask (GLboolean flag); +GL_API void GL_APIENTRY glDepthRangex (GLfixed n, GLfixed f); +GL_API void GL_APIENTRY glDisable (GLenum cap); +GL_API void GL_APIENTRY glDisableClientState (GLenum array); +GL_API void GL_APIENTRY glDrawArrays (GLenum mode, GLint first, GLsizei count); +GL_API void GL_APIENTRY glDrawElements (GLenum mode, GLsizei count, GLenum type, const void *indices); +GL_API void GL_APIENTRY glEnable (GLenum cap); +GL_API void GL_APIENTRY glEnableClientState (GLenum array); +GL_API void GL_APIENTRY glFinish (void); +GL_API void GL_APIENTRY glFlush (void); +GL_API void GL_APIENTRY glFogx (GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glFogxv (GLenum pname, const GLfixed *param); +GL_API void GL_APIENTRY glFrontFace (GLenum mode); +GL_API void GL_APIENTRY glFrustumx (GLfixed l, GLfixed r, GLfixed b, GLfixed t, GLfixed n, GLfixed f); +GL_API void GL_APIENTRY glGetBooleanv (GLenum pname, GLboolean *data); +GL_API void GL_APIENTRY glGetBufferParameteriv (GLenum target, GLenum pname, GLint *params); +GL_API void GL_APIENTRY glGetClipPlanex (GLenum plane, GLfixed *equation); +GL_API void GL_APIENTRY glGenBuffers (GLsizei n, GLuint *buffers); +GL_API void GL_APIENTRY glGenTextures (GLsizei n, GLuint *textures); +GL_API GLenum GL_APIENTRY glGetError (void); +GL_API void GL_APIENTRY glGetFixedv (GLenum pname, GLfixed *params); +GL_API void GL_APIENTRY glGetIntegerv (GLenum pname, GLint *data); +GL_API void GL_APIENTRY glGetLightxv (GLenum light, GLenum pname, GLfixed *params); +GL_API void GL_APIENTRY glGetMaterialxv (GLenum face, GLenum pname, GLfixed *params); +GL_API void GL_APIENTRY glGetPointerv (GLenum pname, void **params); +GL_API const GLubyte *GL_APIENTRY glGetString (GLenum name); +GL_API void GL_APIENTRY glGetTexEnviv (GLenum target, GLenum pname, GLint *params); +GL_API void GL_APIENTRY glGetTexEnvxv (GLenum target, GLenum pname, GLfixed *params); +GL_API void GL_APIENTRY glGetTexParameteriv (GLenum target, GLenum pname, GLint *params); +GL_API void GL_APIENTRY glGetTexParameterxv (GLenum target, GLenum pname, GLfixed *params); +GL_API void GL_APIENTRY glHint (GLenum target, GLenum mode); +GL_API GLboolean GL_APIENTRY glIsBuffer (GLuint buffer); +GL_API GLboolean GL_APIENTRY glIsEnabled (GLenum cap); +GL_API GLboolean GL_APIENTRY glIsTexture (GLuint texture); +GL_API void GL_APIENTRY glLightModelx (GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glLightModelxv (GLenum pname, const GLfixed *param); +GL_API void GL_APIENTRY glLightx (GLenum light, GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glLightxv (GLenum light, GLenum pname, const GLfixed *params); +GL_API void GL_APIENTRY glLineWidthx (GLfixed width); +GL_API void GL_APIENTRY glLoadIdentity (void); +GL_API void GL_APIENTRY glLoadMatrixx (const GLfixed *m); +GL_API void GL_APIENTRY glLogicOp (GLenum opcode); +GL_API void GL_APIENTRY glMaterialx (GLenum face, GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glMaterialxv (GLenum face, GLenum pname, const GLfixed *param); +GL_API void GL_APIENTRY glMatrixMode (GLenum mode); +GL_API void GL_APIENTRY glMultMatrixx (const GLfixed *m); +GL_API void GL_APIENTRY glMultiTexCoord4x (GLenum texture, GLfixed s, GLfixed t, GLfixed r, GLfixed q); +GL_API void GL_APIENTRY glNormal3x (GLfixed nx, GLfixed ny, GLfixed nz); +GL_API void GL_APIENTRY glNormalPointer (GLenum type, GLsizei stride, const void *pointer); +GL_API void GL_APIENTRY glOrthox (GLfixed l, GLfixed r, GLfixed b, GLfixed t, GLfixed n, GLfixed f); +GL_API void GL_APIENTRY glPixelStorei (GLenum pname, GLint param); +GL_API void GL_APIENTRY glPointParameterx (GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glPointParameterxv (GLenum pname, const GLfixed *params); +GL_API void GL_APIENTRY glPointSizex (GLfixed size); +GL_API void GL_APIENTRY glPolygonOffsetx (GLfixed factor, GLfixed units); +GL_API void GL_APIENTRY glPopMatrix (void); +GL_API void GL_APIENTRY glPushMatrix (void); +GL_API void GL_APIENTRY glReadPixels (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, void *pixels); +GL_API void GL_APIENTRY glRotatex (GLfixed angle, GLfixed x, GLfixed y, GLfixed z); +GL_API void GL_APIENTRY glSampleCoverage (GLfloat value, GLboolean invert); +GL_API void GL_APIENTRY glSampleCoveragex (GLclampx value, GLboolean invert); +GL_API void GL_APIENTRY glScalex (GLfixed x, GLfixed y, GLfixed z); +GL_API void GL_APIENTRY glScissor (GLint x, GLint y, GLsizei width, GLsizei height); +GL_API void GL_APIENTRY glShadeModel (GLenum mode); +GL_API void GL_APIENTRY glStencilFunc (GLenum func, GLint ref, GLuint mask); +GL_API void GL_APIENTRY glStencilMask (GLuint mask); +GL_API void GL_APIENTRY glStencilOp (GLenum fail, GLenum zfail, GLenum zpass); +GL_API void GL_APIENTRY glTexCoordPointer (GLint size, GLenum type, GLsizei stride, const void *pointer); +GL_API void GL_APIENTRY glTexEnvi (GLenum target, GLenum pname, GLint param); +GL_API void GL_APIENTRY glTexEnvx (GLenum target, GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glTexEnviv (GLenum target, GLenum pname, const GLint *params); +GL_API void GL_APIENTRY glTexEnvxv (GLenum target, GLenum pname, const GLfixed *params); +GL_API void GL_APIENTRY glTexImage2D (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, const void *pixels); +GL_API void GL_APIENTRY glTexParameteri (GLenum target, GLenum pname, GLint param); +GL_API void GL_APIENTRY glTexParameterx (GLenum target, GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glTexParameteriv (GLenum target, GLenum pname, const GLint *params); +GL_API void GL_APIENTRY glTexParameterxv (GLenum target, GLenum pname, const GLfixed *params); +GL_API void GL_APIENTRY glTexSubImage2D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *pixels); +GL_API void GL_APIENTRY glTranslatex (GLfixed x, GLfixed y, GLfixed z); +GL_API void GL_APIENTRY glVertexPointer (GLint size, GLenum type, GLsizei stride, const void *pointer); +GL_API void GL_APIENTRY glViewport (GLint x, GLint y, GLsizei width, GLsizei height); +#endif +#endif /* GL_VERSION_ES_CM_1_0 */ + +#ifndef GL_OES_compressed_paletted_texture +#define GL_OES_compressed_paletted_texture 1 +#define GL_PALETTE4_RGB8_OES 0x8B90 +#define GL_PALETTE4_RGBA8_OES 0x8B91 +#define GL_PALETTE4_R5_G6_B5_OES 0x8B92 +#define GL_PALETTE4_RGBA4_OES 0x8B93 +#define GL_PALETTE4_RGB5_A1_OES 0x8B94 +#define GL_PALETTE8_RGB8_OES 0x8B95 +#define GL_PALETTE8_RGBA8_OES 0x8B96 +#define GL_PALETTE8_R5_G6_B5_OES 0x8B97 +#define GL_PALETTE8_RGBA4_OES 0x8B98 +#define GL_PALETTE8_RGB5_A1_OES 0x8B99 +#endif /* GL_OES_compressed_paletted_texture */ + +#ifndef GL_OES_point_size_array +#define GL_OES_point_size_array 1 +#define GL_POINT_SIZE_ARRAY_OES 0x8B9C +#define GL_POINT_SIZE_ARRAY_TYPE_OES 0x898A +#define GL_POINT_SIZE_ARRAY_STRIDE_OES 0x898B +#define GL_POINT_SIZE_ARRAY_POINTER_OES 0x898C +#define GL_POINT_SIZE_ARRAY_BUFFER_BINDING_OES 0x8B9F +typedef void (GL_APIENTRYP PFNGLPOINTSIZEPOINTEROESPROC) (GLenum type, GLsizei stride, const void *pointer); +#if GL_GLES_PROTOTYPES +GL_API void GL_APIENTRY glPointSizePointerOES (GLenum type, GLsizei stride, const void *pointer); +#endif +#endif /* GL_OES_point_size_array */ + +#ifndef GL_OES_point_sprite +#define GL_OES_point_sprite 1 +#define GL_POINT_SPRITE_OES 0x8861 +#define GL_COORD_REPLACE_OES 0x8862 +#endif /* GL_OES_point_sprite */ + +#ifndef GL_OES_read_format +#define GL_OES_read_format 1 +#define GL_IMPLEMENTATION_COLOR_READ_TYPE_OES 0x8B9A +#define GL_IMPLEMENTATION_COLOR_READ_FORMAT_OES 0x8B9B +#endif /* GL_OES_read_format */ + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/angle/include/GLES/glext.h b/vendor/angle/include/GLES/glext.h new file mode 100644 index 00000000..ddd40781 --- /dev/null +++ b/vendor/angle/include/GLES/glext.h @@ -0,0 +1,965 @@ +#ifndef __glext_h_ +#define __glext_h_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2013-2017 The Khronos Group Inc. +** +** Permission is hereby granted, free of charge, to any person obtaining a +** copy of this software and/or associated documentation files (the +** "Materials"), to deal in the Materials without restriction, including +** without limitation the rights to use, copy, modify, merge, publish, +** distribute, sublicense, and/or sell copies of the Materials, and to +** permit persons to whom the Materials are furnished to do so, subject to +** the following conditions: +** +** The above copyright notice and this permission notice shall be included +** in all copies or substantial portions of the Materials. +** +** THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +** EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +** MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +** IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +** CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +** MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. +*/ +/* +** This header is generated from the Khronos OpenGL / OpenGL ES XML +** API Registry. The current version of the Registry, generator scripts +** used to make the header, and the header can be found at +** https://github.com/KhronosGroup/OpenGL-Registry +*/ + +#ifndef GL_APIENTRYP +#define GL_APIENTRYP GL_APIENTRY* +#endif + +/* Generated on date 20171212 */ + +/* Generated C header for: + * API: gles1 + * Profile: common + * Versions considered: .* + * Versions emitted: _nomatch_^ + * Default extensions included: gles1 + * Additional extensions included: _nomatch_^ + * Extensions removed: ^(GL_OES_read_format|GL_OES_compressed_paletted_texture|GL_OES_point_size_array|GL_OES_point_sprite)$ + */ + +#ifndef GL_OES_EGL_image +#define GL_OES_EGL_image 1 +typedef void *GLeglImageOES; +typedef void (GL_APIENTRYP PFNGLEGLIMAGETARGETTEXTURE2DOESPROC) (GLenum target, GLeglImageOES image); +typedef void (GL_APIENTRYP PFNGLEGLIMAGETARGETRENDERBUFFERSTORAGEOESPROC) (GLenum target, GLeglImageOES image); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glEGLImageTargetTexture2DOES (GLenum target, GLeglImageOES image); +GL_API void GL_APIENTRY glEGLImageTargetRenderbufferStorageOES (GLenum target, GLeglImageOES image); +#endif +#endif /* GL_OES_EGL_image */ + +#ifndef GL_OES_blend_equation_separate +#define GL_OES_blend_equation_separate 1 +#define GL_BLEND_EQUATION_RGB_OES 0x8009 +#define GL_BLEND_EQUATION_ALPHA_OES 0x883D +typedef void (GL_APIENTRYP PFNGLBLENDEQUATIONSEPARATEOESPROC) (GLenum modeRGB, GLenum modeAlpha); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glBlendEquationSeparateOES (GLenum modeRGB, GLenum modeAlpha); +#endif +#endif /* GL_OES_blend_equation_separate */ + +#ifndef GL_OES_blend_func_separate +#define GL_OES_blend_func_separate 1 +#define GL_BLEND_DST_RGB_OES 0x80C8 +#define GL_BLEND_SRC_RGB_OES 0x80C9 +#define GL_BLEND_DST_ALPHA_OES 0x80CA +#define GL_BLEND_SRC_ALPHA_OES 0x80CB +typedef void (GL_APIENTRYP PFNGLBLENDFUNCSEPARATEOESPROC) (GLenum srcRGB, GLenum dstRGB, GLenum srcAlpha, GLenum dstAlpha); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glBlendFuncSeparateOES (GLenum srcRGB, GLenum dstRGB, GLenum srcAlpha, GLenum dstAlpha); +#endif +#endif /* GL_OES_blend_func_separate */ + +#ifndef GL_OES_blend_subtract +#define GL_OES_blend_subtract 1 +#define GL_BLEND_EQUATION_OES 0x8009 +#define GL_FUNC_ADD_OES 0x8006 +#define GL_FUNC_SUBTRACT_OES 0x800A +#define GL_FUNC_REVERSE_SUBTRACT_OES 0x800B +typedef void (GL_APIENTRYP PFNGLBLENDEQUATIONOESPROC) (GLenum mode); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glBlendEquationOES (GLenum mode); +#endif +#endif /* GL_OES_blend_subtract */ + +#ifndef GL_OES_byte_coordinates +#define GL_OES_byte_coordinates 1 +#endif /* GL_OES_byte_coordinates */ + +#ifndef GL_OES_compressed_ETC1_RGB8_sub_texture +#define GL_OES_compressed_ETC1_RGB8_sub_texture 1 +#endif /* GL_OES_compressed_ETC1_RGB8_sub_texture */ + +#ifndef GL_OES_compressed_ETC1_RGB8_texture +#define GL_OES_compressed_ETC1_RGB8_texture 1 +#define GL_ETC1_RGB8_OES 0x8D64 +#endif /* GL_OES_compressed_ETC1_RGB8_texture */ + +#ifndef GL_OES_depth24 +#define GL_OES_depth24 1 +#define GL_DEPTH_COMPONENT24_OES 0x81A6 +#endif /* GL_OES_depth24 */ + +#ifndef GL_OES_depth32 +#define GL_OES_depth32 1 +#define GL_DEPTH_COMPONENT32_OES 0x81A7 +#endif /* GL_OES_depth32 */ + +#ifndef GL_OES_draw_texture +#define GL_OES_draw_texture 1 +#define GL_TEXTURE_CROP_RECT_OES 0x8B9D +typedef void (GL_APIENTRYP PFNGLDRAWTEXSOESPROC) (GLshort x, GLshort y, GLshort z, GLshort width, GLshort height); +typedef void (GL_APIENTRYP PFNGLDRAWTEXIOESPROC) (GLint x, GLint y, GLint z, GLint width, GLint height); +typedef void (GL_APIENTRYP PFNGLDRAWTEXXOESPROC) (GLfixed x, GLfixed y, GLfixed z, GLfixed width, GLfixed height); +typedef void (GL_APIENTRYP PFNGLDRAWTEXSVOESPROC) (const GLshort *coords); +typedef void (GL_APIENTRYP PFNGLDRAWTEXIVOESPROC) (const GLint *coords); +typedef void (GL_APIENTRYP PFNGLDRAWTEXXVOESPROC) (const GLfixed *coords); +typedef void (GL_APIENTRYP PFNGLDRAWTEXFOESPROC) (GLfloat x, GLfloat y, GLfloat z, GLfloat width, GLfloat height); +typedef void (GL_APIENTRYP PFNGLDRAWTEXFVOESPROC) (const GLfloat *coords); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glDrawTexsOES (GLshort x, GLshort y, GLshort z, GLshort width, GLshort height); +GL_API void GL_APIENTRY glDrawTexiOES (GLint x, GLint y, GLint z, GLint width, GLint height); +GL_API void GL_APIENTRY glDrawTexxOES (GLfixed x, GLfixed y, GLfixed z, GLfixed width, GLfixed height); +GL_API void GL_APIENTRY glDrawTexsvOES (const GLshort *coords); +GL_API void GL_APIENTRY glDrawTexivOES (const GLint *coords); +GL_API void GL_APIENTRY glDrawTexxvOES (const GLfixed *coords); +GL_API void GL_APIENTRY glDrawTexfOES (GLfloat x, GLfloat y, GLfloat z, GLfloat width, GLfloat height); +GL_API void GL_APIENTRY glDrawTexfvOES (const GLfloat *coords); +#endif +#endif /* GL_OES_draw_texture */ + +#ifndef GL_OES_element_index_uint +#define GL_OES_element_index_uint 1 +#define GL_UNSIGNED_INT 0x1405 +#endif /* GL_OES_element_index_uint */ + +#ifndef GL_OES_extended_matrix_palette +#define GL_OES_extended_matrix_palette 1 +#endif /* GL_OES_extended_matrix_palette */ + +#ifndef GL_OES_fbo_render_mipmap +#define GL_OES_fbo_render_mipmap 1 +#endif /* GL_OES_fbo_render_mipmap */ + +#ifndef GL_OES_fixed_point +#define GL_OES_fixed_point 1 +#define GL_FIXED_OES 0x140C +typedef void (GL_APIENTRYP PFNGLALPHAFUNCXOESPROC) (GLenum func, GLfixed ref); +typedef void (GL_APIENTRYP PFNGLCLEARCOLORXOESPROC) (GLfixed red, GLfixed green, GLfixed blue, GLfixed alpha); +typedef void (GL_APIENTRYP PFNGLCLEARDEPTHXOESPROC) (GLfixed depth); +typedef void (GL_APIENTRYP PFNGLCLIPPLANEXOESPROC) (GLenum plane, const GLfixed *equation); +typedef void (GL_APIENTRYP PFNGLCOLOR4XOESPROC) (GLfixed red, GLfixed green, GLfixed blue, GLfixed alpha); +typedef void (GL_APIENTRYP PFNGLDEPTHRANGEXOESPROC) (GLfixed n, GLfixed f); +typedef void (GL_APIENTRYP PFNGLFOGXOESPROC) (GLenum pname, GLfixed param); +typedef void (GL_APIENTRYP PFNGLFOGXVOESPROC) (GLenum pname, const GLfixed *param); +typedef void (GL_APIENTRYP PFNGLFRUSTUMXOESPROC) (GLfixed l, GLfixed r, GLfixed b, GLfixed t, GLfixed n, GLfixed f); +typedef void (GL_APIENTRYP PFNGLGETCLIPPLANEXOESPROC) (GLenum plane, GLfixed *equation); +typedef void (GL_APIENTRYP PFNGLGETFIXEDVOESPROC) (GLenum pname, GLfixed *params); +typedef void (GL_APIENTRYP PFNGLGETTEXENVXVOESPROC) (GLenum target, GLenum pname, GLfixed *params); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERXVOESPROC) (GLenum target, GLenum pname, GLfixed *params); +typedef void (GL_APIENTRYP PFNGLLIGHTMODELXOESPROC) (GLenum pname, GLfixed param); +typedef void (GL_APIENTRYP PFNGLLIGHTMODELXVOESPROC) (GLenum pname, const GLfixed *param); +typedef void (GL_APIENTRYP PFNGLLIGHTXOESPROC) (GLenum light, GLenum pname, GLfixed param); +typedef void (GL_APIENTRYP PFNGLLIGHTXVOESPROC) (GLenum light, GLenum pname, const GLfixed *params); +typedef void (GL_APIENTRYP PFNGLLINEWIDTHXOESPROC) (GLfixed width); +typedef void (GL_APIENTRYP PFNGLLOADMATRIXXOESPROC) (const GLfixed *m); +typedef void (GL_APIENTRYP PFNGLMATERIALXOESPROC) (GLenum face, GLenum pname, GLfixed param); +typedef void (GL_APIENTRYP PFNGLMATERIALXVOESPROC) (GLenum face, GLenum pname, const GLfixed *param); +typedef void (GL_APIENTRYP PFNGLMULTMATRIXXOESPROC) (const GLfixed *m); +typedef void (GL_APIENTRYP PFNGLMULTITEXCOORD4XOESPROC) (GLenum texture, GLfixed s, GLfixed t, GLfixed r, GLfixed q); +typedef void (GL_APIENTRYP PFNGLNORMAL3XOESPROC) (GLfixed nx, GLfixed ny, GLfixed nz); +typedef void (GL_APIENTRYP PFNGLORTHOXOESPROC) (GLfixed l, GLfixed r, GLfixed b, GLfixed t, GLfixed n, GLfixed f); +typedef void (GL_APIENTRYP PFNGLPOINTPARAMETERXVOESPROC) (GLenum pname, const GLfixed *params); +typedef void (GL_APIENTRYP PFNGLPOINTSIZEXOESPROC) (GLfixed size); +typedef void (GL_APIENTRYP PFNGLPOLYGONOFFSETXOESPROC) (GLfixed factor, GLfixed units); +typedef void (GL_APIENTRYP PFNGLROTATEXOESPROC) (GLfixed angle, GLfixed x, GLfixed y, GLfixed z); +typedef void (GL_APIENTRYP PFNGLSCALEXOESPROC) (GLfixed x, GLfixed y, GLfixed z); +typedef void (GL_APIENTRYP PFNGLTEXENVXOESPROC) (GLenum target, GLenum pname, GLfixed param); +typedef void (GL_APIENTRYP PFNGLTEXENVXVOESPROC) (GLenum target, GLenum pname, const GLfixed *params); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERXOESPROC) (GLenum target, GLenum pname, GLfixed param); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERXVOESPROC) (GLenum target, GLenum pname, const GLfixed *params); +typedef void (GL_APIENTRYP PFNGLTRANSLATEXOESPROC) (GLfixed x, GLfixed y, GLfixed z); +typedef void (GL_APIENTRYP PFNGLGETLIGHTXVOESPROC) (GLenum light, GLenum pname, GLfixed *params); +typedef void (GL_APIENTRYP PFNGLGETMATERIALXVOESPROC) (GLenum face, GLenum pname, GLfixed *params); +typedef void (GL_APIENTRYP PFNGLPOINTPARAMETERXOESPROC) (GLenum pname, GLfixed param); +typedef void (GL_APIENTRYP PFNGLSAMPLECOVERAGEXOESPROC) (GLclampx value, GLboolean invert); +typedef void (GL_APIENTRYP PFNGLGETTEXGENXVOESPROC) (GLenum coord, GLenum pname, GLfixed *params); +typedef void (GL_APIENTRYP PFNGLTEXGENXOESPROC) (GLenum coord, GLenum pname, GLfixed param); +typedef void (GL_APIENTRYP PFNGLTEXGENXVOESPROC) (GLenum coord, GLenum pname, const GLfixed *params); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glAlphaFuncxOES (GLenum func, GLfixed ref); +GL_API void GL_APIENTRY glClearColorxOES (GLfixed red, GLfixed green, GLfixed blue, GLfixed alpha); +GL_API void GL_APIENTRY glClearDepthxOES (GLfixed depth); +GL_API void GL_APIENTRY glClipPlanexOES (GLenum plane, const GLfixed *equation); +GL_API void GL_APIENTRY glColor4xOES (GLfixed red, GLfixed green, GLfixed blue, GLfixed alpha); +GL_API void GL_APIENTRY glDepthRangexOES (GLfixed n, GLfixed f); +GL_API void GL_APIENTRY glFogxOES (GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glFogxvOES (GLenum pname, const GLfixed *param); +GL_API void GL_APIENTRY glFrustumxOES (GLfixed l, GLfixed r, GLfixed b, GLfixed t, GLfixed n, GLfixed f); +GL_API void GL_APIENTRY glGetClipPlanexOES (GLenum plane, GLfixed *equation); +GL_API void GL_APIENTRY glGetFixedvOES (GLenum pname, GLfixed *params); +GL_API void GL_APIENTRY glGetTexEnvxvOES (GLenum target, GLenum pname, GLfixed *params); +GL_API void GL_APIENTRY glGetTexParameterxvOES (GLenum target, GLenum pname, GLfixed *params); +GL_API void GL_APIENTRY glLightModelxOES (GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glLightModelxvOES (GLenum pname, const GLfixed *param); +GL_API void GL_APIENTRY glLightxOES (GLenum light, GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glLightxvOES (GLenum light, GLenum pname, const GLfixed *params); +GL_API void GL_APIENTRY glLineWidthxOES (GLfixed width); +GL_API void GL_APIENTRY glLoadMatrixxOES (const GLfixed *m); +GL_API void GL_APIENTRY glMaterialxOES (GLenum face, GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glMaterialxvOES (GLenum face, GLenum pname, const GLfixed *param); +GL_API void GL_APIENTRY glMultMatrixxOES (const GLfixed *m); +GL_API void GL_APIENTRY glMultiTexCoord4xOES (GLenum texture, GLfixed s, GLfixed t, GLfixed r, GLfixed q); +GL_API void GL_APIENTRY glNormal3xOES (GLfixed nx, GLfixed ny, GLfixed nz); +GL_API void GL_APIENTRY glOrthoxOES (GLfixed l, GLfixed r, GLfixed b, GLfixed t, GLfixed n, GLfixed f); +GL_API void GL_APIENTRY glPointParameterxvOES (GLenum pname, const GLfixed *params); +GL_API void GL_APIENTRY glPointSizexOES (GLfixed size); +GL_API void GL_APIENTRY glPolygonOffsetxOES (GLfixed factor, GLfixed units); +GL_API void GL_APIENTRY glRotatexOES (GLfixed angle, GLfixed x, GLfixed y, GLfixed z); +GL_API void GL_APIENTRY glScalexOES (GLfixed x, GLfixed y, GLfixed z); +GL_API void GL_APIENTRY glTexEnvxOES (GLenum target, GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glTexEnvxvOES (GLenum target, GLenum pname, const GLfixed *params); +GL_API void GL_APIENTRY glTexParameterxOES (GLenum target, GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glTexParameterxvOES (GLenum target, GLenum pname, const GLfixed *params); +GL_API void GL_APIENTRY glTranslatexOES (GLfixed x, GLfixed y, GLfixed z); +GL_API void GL_APIENTRY glGetLightxvOES (GLenum light, GLenum pname, GLfixed *params); +GL_API void GL_APIENTRY glGetMaterialxvOES (GLenum face, GLenum pname, GLfixed *params); +GL_API void GL_APIENTRY glPointParameterxOES (GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glSampleCoveragexOES (GLclampx value, GLboolean invert); +GL_API void GL_APIENTRY glGetTexGenxvOES (GLenum coord, GLenum pname, GLfixed *params); +GL_API void GL_APIENTRY glTexGenxOES (GLenum coord, GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glTexGenxvOES (GLenum coord, GLenum pname, const GLfixed *params); +#endif +#endif /* GL_OES_fixed_point */ + +#ifndef GL_OES_framebuffer_object +#define GL_OES_framebuffer_object 1 +#define GL_NONE_OES 0 +#define GL_FRAMEBUFFER_OES 0x8D40 +#define GL_RENDERBUFFER_OES 0x8D41 +#define GL_RGBA4_OES 0x8056 +#define GL_RGB5_A1_OES 0x8057 +#define GL_RGB565_OES 0x8D62 +#define GL_DEPTH_COMPONENT16_OES 0x81A5 +#define GL_RENDERBUFFER_WIDTH_OES 0x8D42 +#define GL_RENDERBUFFER_HEIGHT_OES 0x8D43 +#define GL_RENDERBUFFER_INTERNAL_FORMAT_OES 0x8D44 +#define GL_RENDERBUFFER_RED_SIZE_OES 0x8D50 +#define GL_RENDERBUFFER_GREEN_SIZE_OES 0x8D51 +#define GL_RENDERBUFFER_BLUE_SIZE_OES 0x8D52 +#define GL_RENDERBUFFER_ALPHA_SIZE_OES 0x8D53 +#define GL_RENDERBUFFER_DEPTH_SIZE_OES 0x8D54 +#define GL_RENDERBUFFER_STENCIL_SIZE_OES 0x8D55 +#define GL_FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE_OES 0x8CD0 +#define GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME_OES 0x8CD1 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL_OES 0x8CD2 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE_OES 0x8CD3 +#define GL_COLOR_ATTACHMENT0_OES 0x8CE0 +#define GL_DEPTH_ATTACHMENT_OES 0x8D00 +#define GL_STENCIL_ATTACHMENT_OES 0x8D20 +#define GL_FRAMEBUFFER_COMPLETE_OES 0x8CD5 +#define GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT_OES 0x8CD6 +#define GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT_OES 0x8CD7 +#define GL_FRAMEBUFFER_INCOMPLETE_DIMENSIONS_OES 0x8CD9 +#define GL_FRAMEBUFFER_INCOMPLETE_FORMATS_OES 0x8CDA +#define GL_FRAMEBUFFER_UNSUPPORTED_OES 0x8CDD +#define GL_FRAMEBUFFER_BINDING_OES 0x8CA6 +#define GL_RENDERBUFFER_BINDING_OES 0x8CA7 +#define GL_MAX_RENDERBUFFER_SIZE_OES 0x84E8 +#define GL_INVALID_FRAMEBUFFER_OPERATION_OES 0x0506 +typedef GLboolean (GL_APIENTRYP PFNGLISRENDERBUFFEROESPROC) (GLuint renderbuffer); +typedef void (GL_APIENTRYP PFNGLBINDRENDERBUFFEROESPROC) (GLenum target, GLuint renderbuffer); +typedef void (GL_APIENTRYP PFNGLDELETERENDERBUFFERSOESPROC) (GLsizei n, const GLuint *renderbuffers); +typedef void (GL_APIENTRYP PFNGLGENRENDERBUFFERSOESPROC) (GLsizei n, GLuint *renderbuffers); +typedef void (GL_APIENTRYP PFNGLRENDERBUFFERSTORAGEOESPROC) (GLenum target, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLGETRENDERBUFFERPARAMETERIVOESPROC) (GLenum target, GLenum pname, GLint *params); +typedef GLboolean (GL_APIENTRYP PFNGLISFRAMEBUFFEROESPROC) (GLuint framebuffer); +typedef void (GL_APIENTRYP PFNGLBINDFRAMEBUFFEROESPROC) (GLenum target, GLuint framebuffer); +typedef void (GL_APIENTRYP PFNGLDELETEFRAMEBUFFERSOESPROC) (GLsizei n, const GLuint *framebuffers); +typedef void (GL_APIENTRYP PFNGLGENFRAMEBUFFERSOESPROC) (GLsizei n, GLuint *framebuffers); +typedef GLenum (GL_APIENTRYP PFNGLCHECKFRAMEBUFFERSTATUSOESPROC) (GLenum target); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERRENDERBUFFEROESPROC) (GLenum target, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTURE2DOESPROC) (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +typedef void (GL_APIENTRYP PFNGLGETFRAMEBUFFERATTACHMENTPARAMETERIVOESPROC) (GLenum target, GLenum attachment, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGENERATEMIPMAPOESPROC) (GLenum target); +#ifdef GL_GLEXT_PROTOTYPES +GL_API GLboolean GL_APIENTRY glIsRenderbufferOES (GLuint renderbuffer); +GL_API void GL_APIENTRY glBindRenderbufferOES (GLenum target, GLuint renderbuffer); +GL_API void GL_APIENTRY glDeleteRenderbuffersOES (GLsizei n, const GLuint *renderbuffers); +GL_API void GL_APIENTRY glGenRenderbuffersOES (GLsizei n, GLuint *renderbuffers); +GL_API void GL_APIENTRY glRenderbufferStorageOES (GLenum target, GLenum internalformat, GLsizei width, GLsizei height); +GL_API void GL_APIENTRY glGetRenderbufferParameterivOES (GLenum target, GLenum pname, GLint *params); +GL_API GLboolean GL_APIENTRY glIsFramebufferOES (GLuint framebuffer); +GL_API void GL_APIENTRY glBindFramebufferOES (GLenum target, GLuint framebuffer); +GL_API void GL_APIENTRY glDeleteFramebuffersOES (GLsizei n, const GLuint *framebuffers); +GL_API void GL_APIENTRY glGenFramebuffersOES (GLsizei n, GLuint *framebuffers); +GL_API GLenum GL_APIENTRY glCheckFramebufferStatusOES (GLenum target); +GL_API void GL_APIENTRY glFramebufferRenderbufferOES (GLenum target, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer); +GL_API void GL_APIENTRY glFramebufferTexture2DOES (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +GL_API void GL_APIENTRY glGetFramebufferAttachmentParameterivOES (GLenum target, GLenum attachment, GLenum pname, GLint *params); +GL_API void GL_APIENTRY glGenerateMipmapOES (GLenum target); +#endif +#endif /* GL_OES_framebuffer_object */ + +#ifndef GL_OES_mapbuffer +#define GL_OES_mapbuffer 1 +#define GL_WRITE_ONLY_OES 0x88B9 +#define GL_BUFFER_ACCESS_OES 0x88BB +#define GL_BUFFER_MAPPED_OES 0x88BC +#define GL_BUFFER_MAP_POINTER_OES 0x88BD +typedef void *(GL_APIENTRYP PFNGLMAPBUFFEROESPROC) (GLenum target, GLenum access); +typedef GLboolean (GL_APIENTRYP PFNGLUNMAPBUFFEROESPROC) (GLenum target); +typedef void (GL_APIENTRYP PFNGLGETBUFFERPOINTERVOESPROC) (GLenum target, GLenum pname, void **params); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void *GL_APIENTRY glMapBufferOES (GLenum target, GLenum access); +GL_API GLboolean GL_APIENTRY glUnmapBufferOES (GLenum target); +GL_API void GL_APIENTRY glGetBufferPointervOES (GLenum target, GLenum pname, void **params); +#endif +#endif /* GL_OES_mapbuffer */ + +#ifndef GL_OES_matrix_get +#define GL_OES_matrix_get 1 +#define GL_MODELVIEW_MATRIX_FLOAT_AS_INT_BITS_OES 0x898D +#define GL_PROJECTION_MATRIX_FLOAT_AS_INT_BITS_OES 0x898E +#define GL_TEXTURE_MATRIX_FLOAT_AS_INT_BITS_OES 0x898F +#endif /* GL_OES_matrix_get */ + +#ifndef GL_OES_matrix_palette +#define GL_OES_matrix_palette 1 +#define GL_MAX_VERTEX_UNITS_OES 0x86A4 +#define GL_MAX_PALETTE_MATRICES_OES 0x8842 +#define GL_MATRIX_PALETTE_OES 0x8840 +#define GL_MATRIX_INDEX_ARRAY_OES 0x8844 +#define GL_WEIGHT_ARRAY_OES 0x86AD +#define GL_CURRENT_PALETTE_MATRIX_OES 0x8843 +#define GL_MATRIX_INDEX_ARRAY_SIZE_OES 0x8846 +#define GL_MATRIX_INDEX_ARRAY_TYPE_OES 0x8847 +#define GL_MATRIX_INDEX_ARRAY_STRIDE_OES 0x8848 +#define GL_MATRIX_INDEX_ARRAY_POINTER_OES 0x8849 +#define GL_MATRIX_INDEX_ARRAY_BUFFER_BINDING_OES 0x8B9E +#define GL_WEIGHT_ARRAY_SIZE_OES 0x86AB +#define GL_WEIGHT_ARRAY_TYPE_OES 0x86A9 +#define GL_WEIGHT_ARRAY_STRIDE_OES 0x86AA +#define GL_WEIGHT_ARRAY_POINTER_OES 0x86AC +#define GL_WEIGHT_ARRAY_BUFFER_BINDING_OES 0x889E +typedef void (GL_APIENTRYP PFNGLCURRENTPALETTEMATRIXOESPROC) (GLuint matrixpaletteindex); +typedef void (GL_APIENTRYP PFNGLLOADPALETTEFROMMODELVIEWMATRIXOESPROC) (void); +typedef void (GL_APIENTRYP PFNGLMATRIXINDEXPOINTEROESPROC) (GLint size, GLenum type, GLsizei stride, const void *pointer); +typedef void (GL_APIENTRYP PFNGLWEIGHTPOINTEROESPROC) (GLint size, GLenum type, GLsizei stride, const void *pointer); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glCurrentPaletteMatrixOES (GLuint matrixpaletteindex); +GL_API void GL_APIENTRY glLoadPaletteFromModelViewMatrixOES (void); +GL_API void GL_APIENTRY glMatrixIndexPointerOES (GLint size, GLenum type, GLsizei stride, const void *pointer); +GL_API void GL_APIENTRY glWeightPointerOES (GLint size, GLenum type, GLsizei stride, const void *pointer); +#endif +#endif /* GL_OES_matrix_palette */ + +#ifndef GL_OES_packed_depth_stencil +#define GL_OES_packed_depth_stencil 1 +#define GL_DEPTH_STENCIL_OES 0x84F9 +#define GL_UNSIGNED_INT_24_8_OES 0x84FA +#define GL_DEPTH24_STENCIL8_OES 0x88F0 +#endif /* GL_OES_packed_depth_stencil */ + +#ifndef GL_OES_query_matrix +#define GL_OES_query_matrix 1 +typedef GLbitfield (GL_APIENTRYP PFNGLQUERYMATRIXXOESPROC) (GLfixed *mantissa, GLint *exponent); +#ifdef GL_GLEXT_PROTOTYPES +GL_API GLbitfield GL_APIENTRY glQueryMatrixxOES (GLfixed *mantissa, GLint *exponent); +#endif +#endif /* GL_OES_query_matrix */ + +#ifndef GL_OES_required_internalformat +#define GL_OES_required_internalformat 1 +#define GL_ALPHA8_OES 0x803C +#define GL_LUMINANCE4_ALPHA4_OES 0x8043 +#define GL_LUMINANCE8_ALPHA8_OES 0x8045 +#define GL_LUMINANCE8_OES 0x8040 +#define GL_RGB8_OES 0x8051 +#define GL_RGBA8_OES 0x8058 +#define GL_RGB10_EXT 0x8052 +#define GL_RGB10_A2_EXT 0x8059 +#endif /* GL_OES_required_internalformat */ + +#ifndef GL_OES_rgb8_rgba8 +#define GL_OES_rgb8_rgba8 1 +#endif /* GL_OES_rgb8_rgba8 */ + +#ifndef GL_OES_single_precision +#define GL_OES_single_precision 1 +typedef void (GL_APIENTRYP PFNGLCLEARDEPTHFOESPROC) (GLclampf depth); +typedef void (GL_APIENTRYP PFNGLCLIPPLANEFOESPROC) (GLenum plane, const GLfloat *equation); +typedef void (GL_APIENTRYP PFNGLDEPTHRANGEFOESPROC) (GLclampf n, GLclampf f); +typedef void (GL_APIENTRYP PFNGLFRUSTUMFOESPROC) (GLfloat l, GLfloat r, GLfloat b, GLfloat t, GLfloat n, GLfloat f); +typedef void (GL_APIENTRYP PFNGLGETCLIPPLANEFOESPROC) (GLenum plane, GLfloat *equation); +typedef void (GL_APIENTRYP PFNGLORTHOFOESPROC) (GLfloat l, GLfloat r, GLfloat b, GLfloat t, GLfloat n, GLfloat f); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glClearDepthfOES (GLclampf depth); +GL_API void GL_APIENTRY glClipPlanefOES (GLenum plane, const GLfloat *equation); +GL_API void GL_APIENTRY glDepthRangefOES (GLclampf n, GLclampf f); +GL_API void GL_APIENTRY glFrustumfOES (GLfloat l, GLfloat r, GLfloat b, GLfloat t, GLfloat n, GLfloat f); +GL_API void GL_APIENTRY glGetClipPlanefOES (GLenum plane, GLfloat *equation); +GL_API void GL_APIENTRY glOrthofOES (GLfloat l, GLfloat r, GLfloat b, GLfloat t, GLfloat n, GLfloat f); +#endif +#endif /* GL_OES_single_precision */ + +#ifndef GL_OES_stencil1 +#define GL_OES_stencil1 1 +#define GL_STENCIL_INDEX1_OES 0x8D46 +#endif /* GL_OES_stencil1 */ + +#ifndef GL_OES_stencil4 +#define GL_OES_stencil4 1 +#define GL_STENCIL_INDEX4_OES 0x8D47 +#endif /* GL_OES_stencil4 */ + +#ifndef GL_OES_stencil8 +#define GL_OES_stencil8 1 +#define GL_STENCIL_INDEX8_OES 0x8D48 +#endif /* GL_OES_stencil8 */ + +#ifndef GL_OES_stencil_wrap +#define GL_OES_stencil_wrap 1 +#define GL_INCR_WRAP_OES 0x8507 +#define GL_DECR_WRAP_OES 0x8508 +#endif /* GL_OES_stencil_wrap */ + +#ifndef GL_OES_surfaceless_context +#define GL_OES_surfaceless_context 1 +#define GL_FRAMEBUFFER_UNDEFINED_OES 0x8219 +#endif /* GL_OES_surfaceless_context */ + +#ifndef GL_OES_texture_cube_map +#define GL_OES_texture_cube_map 1 +#define GL_NORMAL_MAP_OES 0x8511 +#define GL_REFLECTION_MAP_OES 0x8512 +#define GL_TEXTURE_CUBE_MAP_OES 0x8513 +#define GL_TEXTURE_BINDING_CUBE_MAP_OES 0x8514 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_X_OES 0x8515 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_X_OES 0x8516 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_Y_OES 0x8517 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_Y_OES 0x8518 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_Z_OES 0x8519 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_Z_OES 0x851A +#define GL_MAX_CUBE_MAP_TEXTURE_SIZE_OES 0x851C +#define GL_TEXTURE_GEN_MODE_OES 0x2500 +#define GL_TEXTURE_GEN_STR_OES 0x8D60 +typedef void (GL_APIENTRYP PFNGLTEXGENFOESPROC) (GLenum coord, GLenum pname, GLfloat param); +typedef void (GL_APIENTRYP PFNGLTEXGENFVOESPROC) (GLenum coord, GLenum pname, const GLfloat *params); +typedef void (GL_APIENTRYP PFNGLTEXGENIOESPROC) (GLenum coord, GLenum pname, GLint param); +typedef void (GL_APIENTRYP PFNGLTEXGENIVOESPROC) (GLenum coord, GLenum pname, const GLint *params); +typedef void (GL_APIENTRYP PFNGLGETTEXGENFVOESPROC) (GLenum coord, GLenum pname, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETTEXGENIVOESPROC) (GLenum coord, GLenum pname, GLint *params); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glTexGenfOES (GLenum coord, GLenum pname, GLfloat param); +GL_API void GL_APIENTRY glTexGenfvOES (GLenum coord, GLenum pname, const GLfloat *params); +GL_API void GL_APIENTRY glTexGeniOES (GLenum coord, GLenum pname, GLint param); +GL_API void GL_APIENTRY glTexGenivOES (GLenum coord, GLenum pname, const GLint *params); +GL_API void GL_APIENTRY glGetTexGenfvOES (GLenum coord, GLenum pname, GLfloat *params); +GL_API void GL_APIENTRY glGetTexGenivOES (GLenum coord, GLenum pname, GLint *params); +#endif +#endif /* GL_OES_texture_cube_map */ + +#ifndef GL_OES_texture_env_crossbar +#define GL_OES_texture_env_crossbar 1 +#endif /* GL_OES_texture_env_crossbar */ + +#ifndef GL_OES_texture_mirrored_repeat +#define GL_OES_texture_mirrored_repeat 1 +#define GL_MIRRORED_REPEAT_OES 0x8370 +#endif /* GL_OES_texture_mirrored_repeat */ + +#ifndef GL_OES_texture_npot +#define GL_OES_texture_npot 1 +#endif /* GL_OES_texture_npot */ + +#ifndef GL_OES_vertex_array_object +#define GL_OES_vertex_array_object 1 +#define GL_VERTEX_ARRAY_BINDING_OES 0x85B5 +typedef void (GL_APIENTRYP PFNGLBINDVERTEXARRAYOESPROC) (GLuint array); +typedef void (GL_APIENTRYP PFNGLDELETEVERTEXARRAYSOESPROC) (GLsizei n, const GLuint *arrays); +typedef void (GL_APIENTRYP PFNGLGENVERTEXARRAYSOESPROC) (GLsizei n, GLuint *arrays); +typedef GLboolean (GL_APIENTRYP PFNGLISVERTEXARRAYOESPROC) (GLuint array); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glBindVertexArrayOES (GLuint array); +GL_API void GL_APIENTRY glDeleteVertexArraysOES (GLsizei n, const GLuint *arrays); +GL_API void GL_APIENTRY glGenVertexArraysOES (GLsizei n, GLuint *arrays); +GL_API GLboolean GL_APIENTRY glIsVertexArrayOES (GLuint array); +#endif +#endif /* GL_OES_vertex_array_object */ + +#ifndef GL_AMD_compressed_3DC_texture +#define GL_AMD_compressed_3DC_texture 1 +#define GL_3DC_X_AMD 0x87F9 +#define GL_3DC_XY_AMD 0x87FA +#endif /* GL_AMD_compressed_3DC_texture */ + +#ifndef GL_AMD_compressed_ATC_texture +#define GL_AMD_compressed_ATC_texture 1 +#define GL_ATC_RGB_AMD 0x8C92 +#define GL_ATC_RGBA_EXPLICIT_ALPHA_AMD 0x8C93 +#define GL_ATC_RGBA_INTERPOLATED_ALPHA_AMD 0x87EE +#endif /* GL_AMD_compressed_ATC_texture */ + +#ifndef GL_APPLE_copy_texture_levels +#define GL_APPLE_copy_texture_levels 1 +typedef void (GL_APIENTRYP PFNGLCOPYTEXTURELEVELSAPPLEPROC) (GLuint destinationTexture, GLuint sourceTexture, GLint sourceBaseLevel, GLsizei sourceLevelCount); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glCopyTextureLevelsAPPLE (GLuint destinationTexture, GLuint sourceTexture, GLint sourceBaseLevel, GLsizei sourceLevelCount); +#endif +#endif /* GL_APPLE_copy_texture_levels */ + +#ifndef GL_APPLE_framebuffer_multisample +#define GL_APPLE_framebuffer_multisample 1 +#define GL_RENDERBUFFER_SAMPLES_APPLE 0x8CAB +#define GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE_APPLE 0x8D56 +#define GL_MAX_SAMPLES_APPLE 0x8D57 +#define GL_READ_FRAMEBUFFER_APPLE 0x8CA8 +#define GL_DRAW_FRAMEBUFFER_APPLE 0x8CA9 +#define GL_DRAW_FRAMEBUFFER_BINDING_APPLE 0x8CA6 +#define GL_READ_FRAMEBUFFER_BINDING_APPLE 0x8CAA +typedef void (GL_APIENTRYP PFNGLRENDERBUFFERSTORAGEMULTISAMPLEAPPLEPROC) (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLRESOLVEMULTISAMPLEFRAMEBUFFERAPPLEPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glRenderbufferStorageMultisampleAPPLE (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +GL_API void GL_APIENTRY glResolveMultisampleFramebufferAPPLE (void); +#endif +#endif /* GL_APPLE_framebuffer_multisample */ + +#ifndef GL_APPLE_sync +#define GL_APPLE_sync 1 +typedef struct __GLsync *GLsync; +typedef khronos_uint64_t GLuint64; +typedef khronos_int64_t GLint64; +#define GL_SYNC_OBJECT_APPLE 0x8A53 +#define GL_MAX_SERVER_WAIT_TIMEOUT_APPLE 0x9111 +#define GL_OBJECT_TYPE_APPLE 0x9112 +#define GL_SYNC_CONDITION_APPLE 0x9113 +#define GL_SYNC_STATUS_APPLE 0x9114 +#define GL_SYNC_FLAGS_APPLE 0x9115 +#define GL_SYNC_FENCE_APPLE 0x9116 +#define GL_SYNC_GPU_COMMANDS_COMPLETE_APPLE 0x9117 +#define GL_UNSIGNALED_APPLE 0x9118 +#define GL_SIGNALED_APPLE 0x9119 +#define GL_ALREADY_SIGNALED_APPLE 0x911A +#define GL_TIMEOUT_EXPIRED_APPLE 0x911B +#define GL_CONDITION_SATISFIED_APPLE 0x911C +#define GL_WAIT_FAILED_APPLE 0x911D +#define GL_SYNC_FLUSH_COMMANDS_BIT_APPLE 0x00000001 +#define GL_TIMEOUT_IGNORED_APPLE 0xFFFFFFFFFFFFFFFFull +typedef GLsync (GL_APIENTRYP PFNGLFENCESYNCAPPLEPROC) (GLenum condition, GLbitfield flags); +typedef GLboolean (GL_APIENTRYP PFNGLISSYNCAPPLEPROC) (GLsync sync); +typedef void (GL_APIENTRYP PFNGLDELETESYNCAPPLEPROC) (GLsync sync); +typedef GLenum (GL_APIENTRYP PFNGLCLIENTWAITSYNCAPPLEPROC) (GLsync sync, GLbitfield flags, GLuint64 timeout); +typedef void (GL_APIENTRYP PFNGLWAITSYNCAPPLEPROC) (GLsync sync, GLbitfield flags, GLuint64 timeout); +typedef void (GL_APIENTRYP PFNGLGETINTEGER64VAPPLEPROC) (GLenum pname, GLint64 *params); +typedef void (GL_APIENTRYP PFNGLGETSYNCIVAPPLEPROC) (GLsync sync, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *values); +#ifdef GL_GLEXT_PROTOTYPES +GL_API GLsync GL_APIENTRY glFenceSyncAPPLE (GLenum condition, GLbitfield flags); +GL_API GLboolean GL_APIENTRY glIsSyncAPPLE (GLsync sync); +GL_API void GL_APIENTRY glDeleteSyncAPPLE (GLsync sync); +GL_API GLenum GL_APIENTRY glClientWaitSyncAPPLE (GLsync sync, GLbitfield flags, GLuint64 timeout); +GL_API void GL_APIENTRY glWaitSyncAPPLE (GLsync sync, GLbitfield flags, GLuint64 timeout); +GL_API void GL_APIENTRY glGetInteger64vAPPLE (GLenum pname, GLint64 *params); +GL_API void GL_APIENTRY glGetSyncivAPPLE (GLsync sync, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *values); +#endif +#endif /* GL_APPLE_sync */ + +#ifndef GL_APPLE_texture_2D_limited_npot +#define GL_APPLE_texture_2D_limited_npot 1 +#endif /* GL_APPLE_texture_2D_limited_npot */ + +#ifndef GL_APPLE_texture_format_BGRA8888 +#define GL_APPLE_texture_format_BGRA8888 1 +#define GL_BGRA_EXT 0x80E1 +#define GL_BGRA8_EXT 0x93A1 +#endif /* GL_APPLE_texture_format_BGRA8888 */ + +#ifndef GL_APPLE_texture_max_level +#define GL_APPLE_texture_max_level 1 +#define GL_TEXTURE_MAX_LEVEL_APPLE 0x813D +#endif /* GL_APPLE_texture_max_level */ + +#ifndef GL_ARM_rgba8 +#define GL_ARM_rgba8 1 +#endif /* GL_ARM_rgba8 */ + +#ifndef GL_EXT_blend_minmax +#define GL_EXT_blend_minmax 1 +#define GL_MIN_EXT 0x8007 +#define GL_MAX_EXT 0x8008 +#endif /* GL_EXT_blend_minmax */ + +#ifndef GL_EXT_debug_marker +#define GL_EXT_debug_marker 1 +typedef char GLchar; +typedef void (GL_APIENTRYP PFNGLINSERTEVENTMARKEREXTPROC) (GLsizei length, const GLchar *marker); +typedef void (GL_APIENTRYP PFNGLPUSHGROUPMARKEREXTPROC) (GLsizei length, const GLchar *marker); +typedef void (GL_APIENTRYP PFNGLPOPGROUPMARKEREXTPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glInsertEventMarkerEXT (GLsizei length, const GLchar *marker); +GL_API void GL_APIENTRY glPushGroupMarkerEXT (GLsizei length, const GLchar *marker); +GL_API void GL_APIENTRY glPopGroupMarkerEXT (void); +#endif +#endif /* GL_EXT_debug_marker */ + +#ifndef GL_EXT_discard_framebuffer +#define GL_EXT_discard_framebuffer 1 +#define GL_COLOR_EXT 0x1800 +#define GL_DEPTH_EXT 0x1801 +#define GL_STENCIL_EXT 0x1802 +typedef void (GL_APIENTRYP PFNGLDISCARDFRAMEBUFFEREXTPROC) (GLenum target, GLsizei numAttachments, const GLenum *attachments); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glDiscardFramebufferEXT (GLenum target, GLsizei numAttachments, const GLenum *attachments); +#endif +#endif /* GL_EXT_discard_framebuffer */ + +#ifndef GL_EXT_map_buffer_range +#define GL_EXT_map_buffer_range 1 +#define GL_MAP_READ_BIT_EXT 0x0001 +#define GL_MAP_WRITE_BIT_EXT 0x0002 +#define GL_MAP_INVALIDATE_RANGE_BIT_EXT 0x0004 +#define GL_MAP_INVALIDATE_BUFFER_BIT_EXT 0x0008 +#define GL_MAP_FLUSH_EXPLICIT_BIT_EXT 0x0010 +#define GL_MAP_UNSYNCHRONIZED_BIT_EXT 0x0020 +typedef void *(GL_APIENTRYP PFNGLMAPBUFFERRANGEEXTPROC) (GLenum target, GLintptr offset, GLsizeiptr length, GLbitfield access); +typedef void (GL_APIENTRYP PFNGLFLUSHMAPPEDBUFFERRANGEEXTPROC) (GLenum target, GLintptr offset, GLsizeiptr length); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void *GL_APIENTRY glMapBufferRangeEXT (GLenum target, GLintptr offset, GLsizeiptr length, GLbitfield access); +GL_API void GL_APIENTRY glFlushMappedBufferRangeEXT (GLenum target, GLintptr offset, GLsizeiptr length); +#endif +#endif /* GL_EXT_map_buffer_range */ + +#ifndef GL_EXT_multi_draw_arrays +#define GL_EXT_multi_draw_arrays 1 +typedef void (GL_APIENTRYP PFNGLMULTIDRAWARRAYSEXTPROC) (GLenum mode, const GLint *first, const GLsizei *count, GLsizei primcount); +typedef void (GL_APIENTRYP PFNGLMULTIDRAWELEMENTSEXTPROC) (GLenum mode, const GLsizei *count, GLenum type, const void *const*indices, GLsizei primcount); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glMultiDrawArraysEXT (GLenum mode, const GLint *first, const GLsizei *count, GLsizei primcount); +GL_API void GL_APIENTRY glMultiDrawElementsEXT (GLenum mode, const GLsizei *count, GLenum type, const void *const*indices, GLsizei primcount); +#endif +#endif /* GL_EXT_multi_draw_arrays */ + +#ifndef GL_EXT_multisampled_render_to_texture +#define GL_EXT_multisampled_render_to_texture 1 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_SAMPLES_EXT 0x8D6C +#define GL_RENDERBUFFER_SAMPLES_EXT 0x8CAB +#define GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE_EXT 0x8D56 +#define GL_MAX_SAMPLES_EXT 0x8D57 +typedef void (GL_APIENTRYP PFNGLRENDERBUFFERSTORAGEMULTISAMPLEEXTPROC) (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTURE2DMULTISAMPLEEXTPROC) (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLsizei samples); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glRenderbufferStorageMultisampleEXT (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +GL_API void GL_APIENTRY glFramebufferTexture2DMultisampleEXT (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLsizei samples); +#endif +#endif /* GL_EXT_multisampled_render_to_texture */ + +#ifndef GL_EXT_read_format_bgra +#define GL_EXT_read_format_bgra 1 +#define GL_UNSIGNED_SHORT_4_4_4_4_REV_EXT 0x8365 +#define GL_UNSIGNED_SHORT_1_5_5_5_REV_EXT 0x8366 +#endif /* GL_EXT_read_format_bgra */ + +#ifndef GL_EXT_robustness +#define GL_EXT_robustness 1 +#define GL_GUILTY_CONTEXT_RESET_EXT 0x8253 +#define GL_INNOCENT_CONTEXT_RESET_EXT 0x8254 +#define GL_UNKNOWN_CONTEXT_RESET_EXT 0x8255 +#define GL_CONTEXT_ROBUST_ACCESS_EXT 0x90F3 +#define GL_RESET_NOTIFICATION_STRATEGY_EXT 0x8256 +#define GL_LOSE_CONTEXT_ON_RESET_EXT 0x8252 +#define GL_NO_RESET_NOTIFICATION_EXT 0x8261 +typedef GLenum (GL_APIENTRYP PFNGLGETGRAPHICSRESETSTATUSEXTPROC) (void); +typedef void (GL_APIENTRYP PFNGLREADNPIXELSEXTPROC) (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, void *data); +typedef void (GL_APIENTRYP PFNGLGETNUNIFORMFVEXTPROC) (GLuint program, GLint location, GLsizei bufSize, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETNUNIFORMIVEXTPROC) (GLuint program, GLint location, GLsizei bufSize, GLint *params); +#ifdef GL_GLEXT_PROTOTYPES +GL_API GLenum GL_APIENTRY glGetGraphicsResetStatusEXT (void); +GL_API void GL_APIENTRY glReadnPixelsEXT (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, void *data); +GL_API void GL_APIENTRY glGetnUniformfvEXT (GLuint program, GLint location, GLsizei bufSize, GLfloat *params); +GL_API void GL_APIENTRY glGetnUniformivEXT (GLuint program, GLint location, GLsizei bufSize, GLint *params); +#endif +#endif /* GL_EXT_robustness */ + +#ifndef GL_EXT_sRGB +#define GL_EXT_sRGB 1 +#define GL_SRGB_EXT 0x8C40 +#define GL_SRGB_ALPHA_EXT 0x8C42 +#define GL_SRGB8_ALPHA8_EXT 0x8C43 +#define GL_FRAMEBUFFER_ATTACHMENT_COLOR_ENCODING_EXT 0x8210 +#endif /* GL_EXT_sRGB */ + +#ifndef GL_EXT_texture_compression_dxt1 +#define GL_EXT_texture_compression_dxt1 1 +#define GL_COMPRESSED_RGB_S3TC_DXT1_EXT 0x83F0 +#define GL_COMPRESSED_RGBA_S3TC_DXT1_EXT 0x83F1 +#endif /* GL_EXT_texture_compression_dxt1 */ + +#ifndef GL_EXT_texture_filter_anisotropic +#define GL_EXT_texture_filter_anisotropic 1 +#define GL_TEXTURE_MAX_ANISOTROPY_EXT 0x84FE +#define GL_MAX_TEXTURE_MAX_ANISOTROPY_EXT 0x84FF +#endif /* GL_EXT_texture_filter_anisotropic */ + +#ifndef GL_EXT_texture_format_BGRA8888 +#define GL_EXT_texture_format_BGRA8888 1 +#endif /* GL_EXT_texture_format_BGRA8888 */ + +#ifndef GL_EXT_texture_lod_bias +#define GL_EXT_texture_lod_bias 1 +#define GL_MAX_TEXTURE_LOD_BIAS_EXT 0x84FD +#define GL_TEXTURE_FILTER_CONTROL_EXT 0x8500 +#define GL_TEXTURE_LOD_BIAS_EXT 0x8501 +#endif /* GL_EXT_texture_lod_bias */ + +#ifndef GL_EXT_texture_storage +#define GL_EXT_texture_storage 1 +#define GL_TEXTURE_IMMUTABLE_FORMAT_EXT 0x912F +#define GL_ALPHA8_EXT 0x803C +#define GL_LUMINANCE8_EXT 0x8040 +#define GL_LUMINANCE8_ALPHA8_EXT 0x8045 +#define GL_RGBA32F_EXT 0x8814 +#define GL_RGB32F_EXT 0x8815 +#define GL_ALPHA32F_EXT 0x8816 +#define GL_LUMINANCE32F_EXT 0x8818 +#define GL_LUMINANCE_ALPHA32F_EXT 0x8819 +#define GL_RGBA16F_EXT 0x881A +#define GL_RGB16F_EXT 0x881B +#define GL_ALPHA16F_EXT 0x881C +#define GL_LUMINANCE16F_EXT 0x881E +#define GL_LUMINANCE_ALPHA16F_EXT 0x881F +#define GL_R8_EXT 0x8229 +#define GL_RG8_EXT 0x822B +#define GL_R32F_EXT 0x822E +#define GL_RG32F_EXT 0x8230 +#define GL_R16F_EXT 0x822D +#define GL_RG16F_EXT 0x822F +typedef void (GL_APIENTRYP PFNGLTEXSTORAGE1DEXTPROC) (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGE2DEXTPROC) (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGE3DEXTPROC) (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +typedef void (GL_APIENTRYP PFNGLTEXTURESTORAGE1DEXTPROC) (GLuint texture, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width); +typedef void (GL_APIENTRYP PFNGLTEXTURESTORAGE2DEXTPROC) (GLuint texture, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLTEXTURESTORAGE3DEXTPROC) (GLuint texture, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glTexStorage1DEXT (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width); +GL_API void GL_APIENTRY glTexStorage2DEXT (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +GL_API void GL_APIENTRY glTexStorage3DEXT (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +GL_API void GL_APIENTRY glTextureStorage1DEXT (GLuint texture, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width); +GL_API void GL_APIENTRY glTextureStorage2DEXT (GLuint texture, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +GL_API void GL_APIENTRY glTextureStorage3DEXT (GLuint texture, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +#endif +#endif /* GL_EXT_texture_storage */ + +#ifndef GL_IMG_multisampled_render_to_texture +#define GL_IMG_multisampled_render_to_texture 1 +#define GL_RENDERBUFFER_SAMPLES_IMG 0x9133 +#define GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE_IMG 0x9134 +#define GL_MAX_SAMPLES_IMG 0x9135 +#define GL_TEXTURE_SAMPLES_IMG 0x9136 +typedef void (GL_APIENTRYP PFNGLRENDERBUFFERSTORAGEMULTISAMPLEIMGPROC) (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTURE2DMULTISAMPLEIMGPROC) (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLsizei samples); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glRenderbufferStorageMultisampleIMG (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +GL_API void GL_APIENTRY glFramebufferTexture2DMultisampleIMG (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLsizei samples); +#endif +#endif /* GL_IMG_multisampled_render_to_texture */ + +#ifndef GL_IMG_read_format +#define GL_IMG_read_format 1 +#define GL_BGRA_IMG 0x80E1 +#define GL_UNSIGNED_SHORT_4_4_4_4_REV_IMG 0x8365 +#endif /* GL_IMG_read_format */ + +#ifndef GL_IMG_texture_compression_pvrtc +#define GL_IMG_texture_compression_pvrtc 1 +#define GL_COMPRESSED_RGB_PVRTC_4BPPV1_IMG 0x8C00 +#define GL_COMPRESSED_RGB_PVRTC_2BPPV1_IMG 0x8C01 +#define GL_COMPRESSED_RGBA_PVRTC_4BPPV1_IMG 0x8C02 +#define GL_COMPRESSED_RGBA_PVRTC_2BPPV1_IMG 0x8C03 +#endif /* GL_IMG_texture_compression_pvrtc */ + +#ifndef GL_IMG_texture_env_enhanced_fixed_function +#define GL_IMG_texture_env_enhanced_fixed_function 1 +#define GL_MODULATE_COLOR_IMG 0x8C04 +#define GL_RECIP_ADD_SIGNED_ALPHA_IMG 0x8C05 +#define GL_TEXTURE_ALPHA_MODULATE_IMG 0x8C06 +#define GL_FACTOR_ALPHA_MODULATE_IMG 0x8C07 +#define GL_FRAGMENT_ALPHA_MODULATE_IMG 0x8C08 +#define GL_ADD_BLEND_IMG 0x8C09 +#define GL_DOT3_RGBA_IMG 0x86AF +#endif /* GL_IMG_texture_env_enhanced_fixed_function */ + +#ifndef GL_IMG_user_clip_plane +#define GL_IMG_user_clip_plane 1 +#define GL_CLIP_PLANE0_IMG 0x3000 +#define GL_CLIP_PLANE1_IMG 0x3001 +#define GL_CLIP_PLANE2_IMG 0x3002 +#define GL_CLIP_PLANE3_IMG 0x3003 +#define GL_CLIP_PLANE4_IMG 0x3004 +#define GL_CLIP_PLANE5_IMG 0x3005 +#define GL_MAX_CLIP_PLANES_IMG 0x0D32 +typedef void (GL_APIENTRYP PFNGLCLIPPLANEFIMGPROC) (GLenum p, const GLfloat *eqn); +typedef void (GL_APIENTRYP PFNGLCLIPPLANEXIMGPROC) (GLenum p, const GLfixed *eqn); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glClipPlanefIMG (GLenum p, const GLfloat *eqn); +GL_API void GL_APIENTRY glClipPlanexIMG (GLenum p, const GLfixed *eqn); +#endif +#endif /* GL_IMG_user_clip_plane */ + +#ifndef GL_NV_fence +#define GL_NV_fence 1 +#define GL_ALL_COMPLETED_NV 0x84F2 +#define GL_FENCE_STATUS_NV 0x84F3 +#define GL_FENCE_CONDITION_NV 0x84F4 +typedef void (GL_APIENTRYP PFNGLDELETEFENCESNVPROC) (GLsizei n, const GLuint *fences); +typedef void (GL_APIENTRYP PFNGLGENFENCESNVPROC) (GLsizei n, GLuint *fences); +typedef GLboolean (GL_APIENTRYP PFNGLISFENCENVPROC) (GLuint fence); +typedef GLboolean (GL_APIENTRYP PFNGLTESTFENCENVPROC) (GLuint fence); +typedef void (GL_APIENTRYP PFNGLGETFENCEIVNVPROC) (GLuint fence, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLFINISHFENCENVPROC) (GLuint fence); +typedef void (GL_APIENTRYP PFNGLSETFENCENVPROC) (GLuint fence, GLenum condition); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glDeleteFencesNV (GLsizei n, const GLuint *fences); +GL_API void GL_APIENTRY glGenFencesNV (GLsizei n, GLuint *fences); +GL_API GLboolean GL_APIENTRY glIsFenceNV (GLuint fence); +GL_API GLboolean GL_APIENTRY glTestFenceNV (GLuint fence); +GL_API void GL_APIENTRY glGetFenceivNV (GLuint fence, GLenum pname, GLint *params); +GL_API void GL_APIENTRY glFinishFenceNV (GLuint fence); +GL_API void GL_APIENTRY glSetFenceNV (GLuint fence, GLenum condition); +#endif +#endif /* GL_NV_fence */ + +#ifndef GL_QCOM_driver_control +#define GL_QCOM_driver_control 1 +typedef void (GL_APIENTRYP PFNGLGETDRIVERCONTROLSQCOMPROC) (GLint *num, GLsizei size, GLuint *driverControls); +typedef void (GL_APIENTRYP PFNGLGETDRIVERCONTROLSTRINGQCOMPROC) (GLuint driverControl, GLsizei bufSize, GLsizei *length, GLchar *driverControlString); +typedef void (GL_APIENTRYP PFNGLENABLEDRIVERCONTROLQCOMPROC) (GLuint driverControl); +typedef void (GL_APIENTRYP PFNGLDISABLEDRIVERCONTROLQCOMPROC) (GLuint driverControl); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glGetDriverControlsQCOM (GLint *num, GLsizei size, GLuint *driverControls); +GL_API void GL_APIENTRY glGetDriverControlStringQCOM (GLuint driverControl, GLsizei bufSize, GLsizei *length, GLchar *driverControlString); +GL_API void GL_APIENTRY glEnableDriverControlQCOM (GLuint driverControl); +GL_API void GL_APIENTRY glDisableDriverControlQCOM (GLuint driverControl); +#endif +#endif /* GL_QCOM_driver_control */ + +#ifndef GL_QCOM_extended_get +#define GL_QCOM_extended_get 1 +#define GL_TEXTURE_WIDTH_QCOM 0x8BD2 +#define GL_TEXTURE_HEIGHT_QCOM 0x8BD3 +#define GL_TEXTURE_DEPTH_QCOM 0x8BD4 +#define GL_TEXTURE_INTERNAL_FORMAT_QCOM 0x8BD5 +#define GL_TEXTURE_FORMAT_QCOM 0x8BD6 +#define GL_TEXTURE_TYPE_QCOM 0x8BD7 +#define GL_TEXTURE_IMAGE_VALID_QCOM 0x8BD8 +#define GL_TEXTURE_NUM_LEVELS_QCOM 0x8BD9 +#define GL_TEXTURE_TARGET_QCOM 0x8BDA +#define GL_TEXTURE_OBJECT_VALID_QCOM 0x8BDB +#define GL_STATE_RESTORE 0x8BDC +typedef void (GL_APIENTRYP PFNGLEXTGETTEXTURESQCOMPROC) (GLuint *textures, GLint maxTextures, GLint *numTextures); +typedef void (GL_APIENTRYP PFNGLEXTGETBUFFERSQCOMPROC) (GLuint *buffers, GLint maxBuffers, GLint *numBuffers); +typedef void (GL_APIENTRYP PFNGLEXTGETRENDERBUFFERSQCOMPROC) (GLuint *renderbuffers, GLint maxRenderbuffers, GLint *numRenderbuffers); +typedef void (GL_APIENTRYP PFNGLEXTGETFRAMEBUFFERSQCOMPROC) (GLuint *framebuffers, GLint maxFramebuffers, GLint *numFramebuffers); +typedef void (GL_APIENTRYP PFNGLEXTGETTEXLEVELPARAMETERIVQCOMPROC) (GLuint texture, GLenum face, GLint level, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLEXTTEXOBJECTSTATEOVERRIDEIQCOMPROC) (GLenum target, GLenum pname, GLint param); +typedef void (GL_APIENTRYP PFNGLEXTGETTEXSUBIMAGEQCOMPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, void *texels); +typedef void (GL_APIENTRYP PFNGLEXTGETBUFFERPOINTERVQCOMPROC) (GLenum target, void **params); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glExtGetTexturesQCOM (GLuint *textures, GLint maxTextures, GLint *numTextures); +GL_API void GL_APIENTRY glExtGetBuffersQCOM (GLuint *buffers, GLint maxBuffers, GLint *numBuffers); +GL_API void GL_APIENTRY glExtGetRenderbuffersQCOM (GLuint *renderbuffers, GLint maxRenderbuffers, GLint *numRenderbuffers); +GL_API void GL_APIENTRY glExtGetFramebuffersQCOM (GLuint *framebuffers, GLint maxFramebuffers, GLint *numFramebuffers); +GL_API void GL_APIENTRY glExtGetTexLevelParameterivQCOM (GLuint texture, GLenum face, GLint level, GLenum pname, GLint *params); +GL_API void GL_APIENTRY glExtTexObjectStateOverrideiQCOM (GLenum target, GLenum pname, GLint param); +GL_API void GL_APIENTRY glExtGetTexSubImageQCOM (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, void *texels); +GL_API void GL_APIENTRY glExtGetBufferPointervQCOM (GLenum target, void **params); +#endif +#endif /* GL_QCOM_extended_get */ + +#ifndef GL_QCOM_extended_get2 +#define GL_QCOM_extended_get2 1 +typedef void (GL_APIENTRYP PFNGLEXTGETSHADERSQCOMPROC) (GLuint *shaders, GLint maxShaders, GLint *numShaders); +typedef void (GL_APIENTRYP PFNGLEXTGETPROGRAMSQCOMPROC) (GLuint *programs, GLint maxPrograms, GLint *numPrograms); +typedef GLboolean (GL_APIENTRYP PFNGLEXTISPROGRAMBINARYQCOMPROC) (GLuint program); +typedef void (GL_APIENTRYP PFNGLEXTGETPROGRAMBINARYSOURCEQCOMPROC) (GLuint program, GLenum shadertype, GLchar *source, GLint *length); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glExtGetShadersQCOM (GLuint *shaders, GLint maxShaders, GLint *numShaders); +GL_API void GL_APIENTRY glExtGetProgramsQCOM (GLuint *programs, GLint maxPrograms, GLint *numPrograms); +GL_API GLboolean GL_APIENTRY glExtIsProgramBinaryQCOM (GLuint program); +GL_API void GL_APIENTRY glExtGetProgramBinarySourceQCOM (GLuint program, GLenum shadertype, GLchar *source, GLint *length); +#endif +#endif /* GL_QCOM_extended_get2 */ + +#ifndef GL_QCOM_perfmon_global_mode +#define GL_QCOM_perfmon_global_mode 1 +#define GL_PERFMON_GLOBAL_MODE_QCOM 0x8FA0 +#endif /* GL_QCOM_perfmon_global_mode */ + +#ifndef GL_QCOM_tiled_rendering +#define GL_QCOM_tiled_rendering 1 +#define GL_COLOR_BUFFER_BIT0_QCOM 0x00000001 +#define GL_COLOR_BUFFER_BIT1_QCOM 0x00000002 +#define GL_COLOR_BUFFER_BIT2_QCOM 0x00000004 +#define GL_COLOR_BUFFER_BIT3_QCOM 0x00000008 +#define GL_COLOR_BUFFER_BIT4_QCOM 0x00000010 +#define GL_COLOR_BUFFER_BIT5_QCOM 0x00000020 +#define GL_COLOR_BUFFER_BIT6_QCOM 0x00000040 +#define GL_COLOR_BUFFER_BIT7_QCOM 0x00000080 +#define GL_DEPTH_BUFFER_BIT0_QCOM 0x00000100 +#define GL_DEPTH_BUFFER_BIT1_QCOM 0x00000200 +#define GL_DEPTH_BUFFER_BIT2_QCOM 0x00000400 +#define GL_DEPTH_BUFFER_BIT3_QCOM 0x00000800 +#define GL_DEPTH_BUFFER_BIT4_QCOM 0x00001000 +#define GL_DEPTH_BUFFER_BIT5_QCOM 0x00002000 +#define GL_DEPTH_BUFFER_BIT6_QCOM 0x00004000 +#define GL_DEPTH_BUFFER_BIT7_QCOM 0x00008000 +#define GL_STENCIL_BUFFER_BIT0_QCOM 0x00010000 +#define GL_STENCIL_BUFFER_BIT1_QCOM 0x00020000 +#define GL_STENCIL_BUFFER_BIT2_QCOM 0x00040000 +#define GL_STENCIL_BUFFER_BIT3_QCOM 0x00080000 +#define GL_STENCIL_BUFFER_BIT4_QCOM 0x00100000 +#define GL_STENCIL_BUFFER_BIT5_QCOM 0x00200000 +#define GL_STENCIL_BUFFER_BIT6_QCOM 0x00400000 +#define GL_STENCIL_BUFFER_BIT7_QCOM 0x00800000 +#define GL_MULTISAMPLE_BUFFER_BIT0_QCOM 0x01000000 +#define GL_MULTISAMPLE_BUFFER_BIT1_QCOM 0x02000000 +#define GL_MULTISAMPLE_BUFFER_BIT2_QCOM 0x04000000 +#define GL_MULTISAMPLE_BUFFER_BIT3_QCOM 0x08000000 +#define GL_MULTISAMPLE_BUFFER_BIT4_QCOM 0x10000000 +#define GL_MULTISAMPLE_BUFFER_BIT5_QCOM 0x20000000 +#define GL_MULTISAMPLE_BUFFER_BIT6_QCOM 0x40000000 +#define GL_MULTISAMPLE_BUFFER_BIT7_QCOM 0x80000000 +typedef void (GL_APIENTRYP PFNGLSTARTTILINGQCOMPROC) (GLuint x, GLuint y, GLuint width, GLuint height, GLbitfield preserveMask); +typedef void (GL_APIENTRYP PFNGLENDTILINGQCOMPROC) (GLbitfield preserveMask); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glStartTilingQCOM (GLuint x, GLuint y, GLuint width, GLuint height, GLbitfield preserveMask); +GL_API void GL_APIENTRY glEndTilingQCOM (GLbitfield preserveMask); +#endif +#endif /* GL_QCOM_tiled_rendering */ + +#ifndef GL_QCOM_writeonly_rendering +#define GL_QCOM_writeonly_rendering 1 +#define GL_WRITEONLY_RENDERING_QCOM 0x8823 +#endif /* GL_QCOM_writeonly_rendering */ + +/* ANGLE GLES1 extensions */ +#include "glext_angle.h" + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/angle/include/GLES/glext_angle.h b/vendor/angle/include/GLES/glext_angle.h new file mode 100644 index 00000000..668f128b --- /dev/null +++ b/vendor/angle/include/GLES/glext_angle.h @@ -0,0 +1,23 @@ +// +// Copyright 2018 The ANGLE Project Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// +// glext_angle.h: ANGLE modifications to the glext.h header file. +// Currently we don't include this file directly, we patch glext.h +// to include it implicitly so it is visible throughout our code. + +#ifndef INCLUDE_GLES_GLEXT_ANGLE_H_ +#define INCLUDE_GLES_GLEXT_ANGLE_H_ + +// clang-format off + +#ifndef GL_ANGLE_explicit_context_gles1 +#define GL_ANGLE_explicit_context_gles1 +typedef void *GLeglContext; +#include "glext_explicit_context_autogen.inc" +#endif /* GL_ANGLE_explicit_context_gles1 */ + +// clang-format on + +#endif // INCLUDE_GLES_GLEXT_ANGLE_H_ diff --git a/vendor/angle/include/GLES/glext_explicit_context_autogen.inc b/vendor/angle/include/GLES/glext_explicit_context_autogen.inc new file mode 100644 index 00000000..5528447f --- /dev/null +++ b/vendor/angle/include/GLES/glext_explicit_context_autogen.inc @@ -0,0 +1,258 @@ +// GENERATED FILE - DO NOT EDIT. +// Generated by generate_entry_points.py using data from gl.xml and gl_angle_ext.xml. +// +// Copyright 2020 The ANGLE Project Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// +// glext_explicit_context_autogen.inc: +// Function declarations for the EGL_ANGLE_explicit_context extension + +typedef void (GL_APIENTRYP PFNGLALPHAFUNCCONTEXTANGLEPROC)(GLeglContext ctx, GLenum func, GLfloat ref); +typedef void (GL_APIENTRYP PFNGLALPHAFUNCXCONTEXTANGLEPROC)(GLeglContext ctx, GLenum func, GLfixed ref); +typedef void (GL_APIENTRYP PFNGLCLEARCOLORXCONTEXTANGLEPROC)(GLeglContext ctx, GLfixed red, GLfixed green, GLfixed blue, GLfixed alpha); +typedef void (GL_APIENTRYP PFNGLCLEARDEPTHXCONTEXTANGLEPROC)(GLeglContext ctx, GLfixed depth); +typedef void (GL_APIENTRYP PFNGLCLIENTACTIVETEXTURECONTEXTANGLEPROC)(GLeglContext ctx, GLenum texture); +typedef void (GL_APIENTRYP PFNGLCLIPPLANEFCONTEXTANGLEPROC)(GLeglContext ctx, GLenum p, const GLfloat *eqn); +typedef void (GL_APIENTRYP PFNGLCLIPPLANEXCONTEXTANGLEPROC)(GLeglContext ctx, GLenum plane, const GLfixed *equation); +typedef void (GL_APIENTRYP PFNGLCOLOR4FCONTEXTANGLEPROC)(GLeglContext ctx, GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +typedef void (GL_APIENTRYP PFNGLCOLOR4UBCONTEXTANGLEPROC)(GLeglContext ctx, GLubyte red, GLubyte green, GLubyte blue, GLubyte alpha); +typedef void (GL_APIENTRYP PFNGLCOLOR4XCONTEXTANGLEPROC)(GLeglContext ctx, GLfixed red, GLfixed green, GLfixed blue, GLfixed alpha); +typedef void (GL_APIENTRYP PFNGLCOLORPOINTERCONTEXTANGLEPROC)(GLeglContext ctx, GLint size, GLenum type, GLsizei stride, const void *pointer); +typedef void (GL_APIENTRYP PFNGLDEPTHRANGEXCONTEXTANGLEPROC)(GLeglContext ctx, GLfixed n, GLfixed f); +typedef void (GL_APIENTRYP PFNGLDISABLECLIENTSTATECONTEXTANGLEPROC)(GLeglContext ctx, GLenum array); +typedef void (GL_APIENTRYP PFNGLENABLECLIENTSTATECONTEXTANGLEPROC)(GLeglContext ctx, GLenum array); +typedef void (GL_APIENTRYP PFNGLFOGFCONTEXTANGLEPROC)(GLeglContext ctx, GLenum pname, GLfloat param); +typedef void (GL_APIENTRYP PFNGLFOGFVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum pname, const GLfloat *params); +typedef void (GL_APIENTRYP PFNGLFOGXCONTEXTANGLEPROC)(GLeglContext ctx, GLenum pname, GLfixed param); +typedef void (GL_APIENTRYP PFNGLFOGXVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum pname, const GLfixed *param); +typedef void (GL_APIENTRYP PFNGLFRUSTUMFCONTEXTANGLEPROC)(GLeglContext ctx, GLfloat l, GLfloat r, GLfloat b, GLfloat t, GLfloat n, GLfloat f); +typedef void (GL_APIENTRYP PFNGLFRUSTUMXCONTEXTANGLEPROC)(GLeglContext ctx, GLfixed l, GLfixed r, GLfixed b, GLfixed t, GLfixed n, GLfixed f); +typedef void (GL_APIENTRYP PFNGLGETCLIPPLANEFCONTEXTANGLEPROC)(GLeglContext ctx, GLenum plane, GLfloat *equation); +typedef void (GL_APIENTRYP PFNGLGETCLIPPLANEXCONTEXTANGLEPROC)(GLeglContext ctx, GLenum plane, GLfixed *equation); +typedef void (GL_APIENTRYP PFNGLGETFIXEDVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum pname, GLfixed *params); +typedef void (GL_APIENTRYP PFNGLGETLIGHTFVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum light, GLenum pname, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETLIGHTXVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum light, GLenum pname, GLfixed *params); +typedef void (GL_APIENTRYP PFNGLGETMATERIALFVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum face, GLenum pname, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETMATERIALXVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum face, GLenum pname, GLfixed *params); +typedef void (GL_APIENTRYP PFNGLGETTEXENVFVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETTEXENVIVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETTEXENVXVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLfixed *params); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERXVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLfixed *params); +typedef void (GL_APIENTRYP PFNGLLIGHTMODELFCONTEXTANGLEPROC)(GLeglContext ctx, GLenum pname, GLfloat param); +typedef void (GL_APIENTRYP PFNGLLIGHTMODELFVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum pname, const GLfloat *params); +typedef void (GL_APIENTRYP PFNGLLIGHTMODELXCONTEXTANGLEPROC)(GLeglContext ctx, GLenum pname, GLfixed param); +typedef void (GL_APIENTRYP PFNGLLIGHTMODELXVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum pname, const GLfixed *param); +typedef void (GL_APIENTRYP PFNGLLIGHTFCONTEXTANGLEPROC)(GLeglContext ctx, GLenum light, GLenum pname, GLfloat param); +typedef void (GL_APIENTRYP PFNGLLIGHTFVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum light, GLenum pname, const GLfloat *params); +typedef void (GL_APIENTRYP PFNGLLIGHTXCONTEXTANGLEPROC)(GLeglContext ctx, GLenum light, GLenum pname, GLfixed param); +typedef void (GL_APIENTRYP PFNGLLIGHTXVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum light, GLenum pname, const GLfixed *params); +typedef void (GL_APIENTRYP PFNGLLINEWIDTHXCONTEXTANGLEPROC)(GLeglContext ctx, GLfixed width); +typedef void (GL_APIENTRYP PFNGLLOADIDENTITYCONTEXTANGLEPROC)(GLeglContext ctx); +typedef void (GL_APIENTRYP PFNGLLOADMATRIXFCONTEXTANGLEPROC)(GLeglContext ctx, const GLfloat *m); +typedef void (GL_APIENTRYP PFNGLLOADMATRIXXCONTEXTANGLEPROC)(GLeglContext ctx, const GLfixed *m); +typedef void (GL_APIENTRYP PFNGLLOGICOPCONTEXTANGLEPROC)(GLeglContext ctx, GLenum opcode); +typedef void (GL_APIENTRYP PFNGLMATERIALFCONTEXTANGLEPROC)(GLeglContext ctx, GLenum face, GLenum pname, GLfloat param); +typedef void (GL_APIENTRYP PFNGLMATERIALFVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum face, GLenum pname, const GLfloat *params); +typedef void (GL_APIENTRYP PFNGLMATERIALXCONTEXTANGLEPROC)(GLeglContext ctx, GLenum face, GLenum pname, GLfixed param); +typedef void (GL_APIENTRYP PFNGLMATERIALXVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum face, GLenum pname, const GLfixed *param); +typedef void (GL_APIENTRYP PFNGLMATRIXMODECONTEXTANGLEPROC)(GLeglContext ctx, GLenum mode); +typedef void (GL_APIENTRYP PFNGLMULTMATRIXFCONTEXTANGLEPROC)(GLeglContext ctx, const GLfloat *m); +typedef void (GL_APIENTRYP PFNGLMULTMATRIXXCONTEXTANGLEPROC)(GLeglContext ctx, const GLfixed *m); +typedef void (GL_APIENTRYP PFNGLMULTITEXCOORD4FCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLfloat s, GLfloat t, GLfloat r, GLfloat q); +typedef void (GL_APIENTRYP PFNGLMULTITEXCOORD4XCONTEXTANGLEPROC)(GLeglContext ctx, GLenum texture, GLfixed s, GLfixed t, GLfixed r, GLfixed q); +typedef void (GL_APIENTRYP PFNGLNORMAL3FCONTEXTANGLEPROC)(GLeglContext ctx, GLfloat nx, GLfloat ny, GLfloat nz); +typedef void (GL_APIENTRYP PFNGLNORMAL3XCONTEXTANGLEPROC)(GLeglContext ctx, GLfixed nx, GLfixed ny, GLfixed nz); +typedef void (GL_APIENTRYP PFNGLNORMALPOINTERCONTEXTANGLEPROC)(GLeglContext ctx, GLenum type, GLsizei stride, const void *pointer); +typedef void (GL_APIENTRYP PFNGLORTHOFCONTEXTANGLEPROC)(GLeglContext ctx, GLfloat l, GLfloat r, GLfloat b, GLfloat t, GLfloat n, GLfloat f); +typedef void (GL_APIENTRYP PFNGLORTHOXCONTEXTANGLEPROC)(GLeglContext ctx, GLfixed l, GLfixed r, GLfixed b, GLfixed t, GLfixed n, GLfixed f); +typedef void (GL_APIENTRYP PFNGLPOINTPARAMETERFCONTEXTANGLEPROC)(GLeglContext ctx, GLenum pname, GLfloat param); +typedef void (GL_APIENTRYP PFNGLPOINTPARAMETERFVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum pname, const GLfloat *params); +typedef void (GL_APIENTRYP PFNGLPOINTPARAMETERXCONTEXTANGLEPROC)(GLeglContext ctx, GLenum pname, GLfixed param); +typedef void (GL_APIENTRYP PFNGLPOINTPARAMETERXVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum pname, const GLfixed *params); +typedef void (GL_APIENTRYP PFNGLPOINTSIZECONTEXTANGLEPROC)(GLeglContext ctx, GLfloat size); +typedef void (GL_APIENTRYP PFNGLPOINTSIZEXCONTEXTANGLEPROC)(GLeglContext ctx, GLfixed size); +typedef void (GL_APIENTRYP PFNGLPOLYGONOFFSETXCONTEXTANGLEPROC)(GLeglContext ctx, GLfixed factor, GLfixed units); +typedef void (GL_APIENTRYP PFNGLPOPMATRIXCONTEXTANGLEPROC)(GLeglContext ctx); +typedef void (GL_APIENTRYP PFNGLPUSHMATRIXCONTEXTANGLEPROC)(GLeglContext ctx); +typedef void (GL_APIENTRYP PFNGLROTATEFCONTEXTANGLEPROC)(GLeglContext ctx, GLfloat angle, GLfloat x, GLfloat y, GLfloat z); +typedef void (GL_APIENTRYP PFNGLROTATEXCONTEXTANGLEPROC)(GLeglContext ctx, GLfixed angle, GLfixed x, GLfixed y, GLfixed z); +typedef void (GL_APIENTRYP PFNGLSAMPLECOVERAGEXCONTEXTANGLEPROC)(GLeglContext ctx, GLclampx value, GLboolean invert); +typedef void (GL_APIENTRYP PFNGLSCALEFCONTEXTANGLEPROC)(GLeglContext ctx, GLfloat x, GLfloat y, GLfloat z); +typedef void (GL_APIENTRYP PFNGLSCALEXCONTEXTANGLEPROC)(GLeglContext ctx, GLfixed x, GLfixed y, GLfixed z); +typedef void (GL_APIENTRYP PFNGLSHADEMODELCONTEXTANGLEPROC)(GLeglContext ctx, GLenum mode); +typedef void (GL_APIENTRYP PFNGLTEXCOORDPOINTERCONTEXTANGLEPROC)(GLeglContext ctx, GLint size, GLenum type, GLsizei stride, const void *pointer); +typedef void (GL_APIENTRYP PFNGLTEXENVFCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLfloat param); +typedef void (GL_APIENTRYP PFNGLTEXENVFVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, const GLfloat *params); +typedef void (GL_APIENTRYP PFNGLTEXENVICONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLint param); +typedef void (GL_APIENTRYP PFNGLTEXENVIVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, const GLint *params); +typedef void (GL_APIENTRYP PFNGLTEXENVXCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLfixed param); +typedef void (GL_APIENTRYP PFNGLTEXENVXVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, const GLfixed *params); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERXCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLfixed param); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERXVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, const GLfixed *params); +typedef void (GL_APIENTRYP PFNGLTRANSLATEFCONTEXTANGLEPROC)(GLeglContext ctx, GLfloat x, GLfloat y, GLfloat z); +typedef void (GL_APIENTRYP PFNGLTRANSLATEXCONTEXTANGLEPROC)(GLeglContext ctx, GLfixed x, GLfixed y, GLfixed z); +typedef void (GL_APIENTRYP PFNGLVERTEXPOINTERCONTEXTANGLEPROC)(GLeglContext ctx, GLint size, GLenum type, GLsizei stride, const void *pointer); +typedef void (GL_APIENTRYP PFNGLBINDFRAMEBUFFEROESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLuint framebuffer); +typedef void (GL_APIENTRYP PFNGLBINDRENDERBUFFEROESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLuint renderbuffer); +typedef GLenum (GL_APIENTRYP PFNGLCHECKFRAMEBUFFERSTATUSOESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target); +typedef void (GL_APIENTRYP PFNGLCURRENTPALETTEMATRIXOESCONTEXTANGLEPROC)(GLeglContext ctx, GLuint matrixpaletteindex); +typedef void (GL_APIENTRYP PFNGLDELETEFRAMEBUFFERSOESCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei n, const GLuint *framebuffers); +typedef void (GL_APIENTRYP PFNGLDELETERENDERBUFFERSOESCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei n, const GLuint *renderbuffers); +typedef void (GL_APIENTRYP PFNGLDRAWTEXFOESCONTEXTANGLEPROC)(GLeglContext ctx, GLfloat x, GLfloat y, GLfloat z, GLfloat width, GLfloat height); +typedef void (GL_APIENTRYP PFNGLDRAWTEXFVOESCONTEXTANGLEPROC)(GLeglContext ctx, const GLfloat *coords); +typedef void (GL_APIENTRYP PFNGLDRAWTEXIOESCONTEXTANGLEPROC)(GLeglContext ctx, GLint x, GLint y, GLint z, GLint width, GLint height); +typedef void (GL_APIENTRYP PFNGLDRAWTEXIVOESCONTEXTANGLEPROC)(GLeglContext ctx, const GLint *coords); +typedef void (GL_APIENTRYP PFNGLDRAWTEXSOESCONTEXTANGLEPROC)(GLeglContext ctx, GLshort x, GLshort y, GLshort z, GLshort width, GLshort height); +typedef void (GL_APIENTRYP PFNGLDRAWTEXSVOESCONTEXTANGLEPROC)(GLeglContext ctx, const GLshort *coords); +typedef void (GL_APIENTRYP PFNGLDRAWTEXXOESCONTEXTANGLEPROC)(GLeglContext ctx, GLfixed x, GLfixed y, GLfixed z, GLfixed width, GLfixed height); +typedef void (GL_APIENTRYP PFNGLDRAWTEXXVOESCONTEXTANGLEPROC)(GLeglContext ctx, const GLfixed *coords); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERRENDERBUFFEROESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTURE2DOESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +typedef void (GL_APIENTRYP PFNGLGENFRAMEBUFFERSOESCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei n, GLuint *framebuffers); +typedef void (GL_APIENTRYP PFNGLGENRENDERBUFFERSOESCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei n, GLuint *renderbuffers); +typedef void (GL_APIENTRYP PFNGLGENERATEMIPMAPOESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target); +typedef void (GL_APIENTRYP PFNGLGETFRAMEBUFFERATTACHMENTPARAMETERIVOESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum attachment, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETRENDERBUFFERPARAMETERIVOESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETTEXGENFVOESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum coord, GLenum pname, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETTEXGENIVOESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum coord, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETTEXGENXVOESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum coord, GLenum pname, GLfixed *params); +typedef GLboolean (GL_APIENTRYP PFNGLISFRAMEBUFFEROESCONTEXTANGLEPROC)(GLeglContext ctx, GLuint framebuffer); +typedef GLboolean (GL_APIENTRYP PFNGLISRENDERBUFFEROESCONTEXTANGLEPROC)(GLeglContext ctx, GLuint renderbuffer); +typedef void (GL_APIENTRYP PFNGLLOADPALETTEFROMMODELVIEWMATRIXOESCONTEXTANGLEPROC)(GLeglContext ctx); +typedef void (GL_APIENTRYP PFNGLMATRIXINDEXPOINTEROESCONTEXTANGLEPROC)(GLeglContext ctx, GLint size, GLenum type, GLsizei stride, const void *pointer); +typedef void (GL_APIENTRYP PFNGLPOINTSIZEPOINTEROESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum type, GLsizei stride, const void *pointer); +typedef GLbitfield (GL_APIENTRYP PFNGLQUERYMATRIXXOESCONTEXTANGLEPROC)(GLeglContext ctx, GLfixed *mantissa, GLint *exponent); +typedef void (GL_APIENTRYP PFNGLRENDERBUFFERSTORAGEOESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLTEXGENFOESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum coord, GLenum pname, GLfloat param); +typedef void (GL_APIENTRYP PFNGLTEXGENFVOESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum coord, GLenum pname, const GLfloat *params); +typedef void (GL_APIENTRYP PFNGLTEXGENIOESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum coord, GLenum pname, GLint param); +typedef void (GL_APIENTRYP PFNGLTEXGENIVOESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum coord, GLenum pname, const GLint *params); +typedef void (GL_APIENTRYP PFNGLTEXGENXOESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum coord, GLenum pname, GLfixed param); +typedef void (GL_APIENTRYP PFNGLTEXGENXVOESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum coord, GLenum pname, const GLfixed *params); +typedef void (GL_APIENTRYP PFNGLWEIGHTPOINTEROESCONTEXTANGLEPROC)(GLeglContext ctx, GLint size, GLenum type, GLsizei stride, const void *pointer); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glAlphaFuncContextANGLE(GLeglContext ctx, GLenum func, GLfloat ref); +GL_API void GL_APIENTRY glAlphaFuncxContextANGLE(GLeglContext ctx, GLenum func, GLfixed ref); +GL_API void GL_APIENTRY glClearColorxContextANGLE(GLeglContext ctx, GLfixed red, GLfixed green, GLfixed blue, GLfixed alpha); +GL_API void GL_APIENTRY glClearDepthxContextANGLE(GLeglContext ctx, GLfixed depth); +GL_API void GL_APIENTRY glClientActiveTextureContextANGLE(GLeglContext ctx, GLenum texture); +GL_API void GL_APIENTRY glClipPlanefContextANGLE(GLeglContext ctx, GLenum p, const GLfloat *eqn); +GL_API void GL_APIENTRY glClipPlanexContextANGLE(GLeglContext ctx, GLenum plane, const GLfixed *equation); +GL_API void GL_APIENTRY glColor4fContextANGLE(GLeglContext ctx, GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +GL_API void GL_APIENTRY glColor4ubContextANGLE(GLeglContext ctx, GLubyte red, GLubyte green, GLubyte blue, GLubyte alpha); +GL_API void GL_APIENTRY glColor4xContextANGLE(GLeglContext ctx, GLfixed red, GLfixed green, GLfixed blue, GLfixed alpha); +GL_API void GL_APIENTRY glColorPointerContextANGLE(GLeglContext ctx, GLint size, GLenum type, GLsizei stride, const void *pointer); +GL_API void GL_APIENTRY glDepthRangexContextANGLE(GLeglContext ctx, GLfixed n, GLfixed f); +GL_API void GL_APIENTRY glDisableClientStateContextANGLE(GLeglContext ctx, GLenum array); +GL_API void GL_APIENTRY glEnableClientStateContextANGLE(GLeglContext ctx, GLenum array); +GL_API void GL_APIENTRY glFogfContextANGLE(GLeglContext ctx, GLenum pname, GLfloat param); +GL_API void GL_APIENTRY glFogfvContextANGLE(GLeglContext ctx, GLenum pname, const GLfloat *params); +GL_API void GL_APIENTRY glFogxContextANGLE(GLeglContext ctx, GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glFogxvContextANGLE(GLeglContext ctx, GLenum pname, const GLfixed *param); +GL_API void GL_APIENTRY glFrustumfContextANGLE(GLeglContext ctx, GLfloat l, GLfloat r, GLfloat b, GLfloat t, GLfloat n, GLfloat f); +GL_API void GL_APIENTRY glFrustumxContextANGLE(GLeglContext ctx, GLfixed l, GLfixed r, GLfixed b, GLfixed t, GLfixed n, GLfixed f); +GL_API void GL_APIENTRY glGetClipPlanefContextANGLE(GLeglContext ctx, GLenum plane, GLfloat *equation); +GL_API void GL_APIENTRY glGetClipPlanexContextANGLE(GLeglContext ctx, GLenum plane, GLfixed *equation); +GL_API void GL_APIENTRY glGetFixedvContextANGLE(GLeglContext ctx, GLenum pname, GLfixed *params); +GL_API void GL_APIENTRY glGetLightfvContextANGLE(GLeglContext ctx, GLenum light, GLenum pname, GLfloat *params); +GL_API void GL_APIENTRY glGetLightxvContextANGLE(GLeglContext ctx, GLenum light, GLenum pname, GLfixed *params); +GL_API void GL_APIENTRY glGetMaterialfvContextANGLE(GLeglContext ctx, GLenum face, GLenum pname, GLfloat *params); +GL_API void GL_APIENTRY glGetMaterialxvContextANGLE(GLeglContext ctx, GLenum face, GLenum pname, GLfixed *params); +GL_API void GL_APIENTRY glGetTexEnvfvContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLfloat *params); +GL_API void GL_APIENTRY glGetTexEnvivContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLint *params); +GL_API void GL_APIENTRY glGetTexEnvxvContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLfixed *params); +GL_API void GL_APIENTRY glGetTexParameterxvContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLfixed *params); +GL_API void GL_APIENTRY glLightModelfContextANGLE(GLeglContext ctx, GLenum pname, GLfloat param); +GL_API void GL_APIENTRY glLightModelfvContextANGLE(GLeglContext ctx, GLenum pname, const GLfloat *params); +GL_API void GL_APIENTRY glLightModelxContextANGLE(GLeglContext ctx, GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glLightModelxvContextANGLE(GLeglContext ctx, GLenum pname, const GLfixed *param); +GL_API void GL_APIENTRY glLightfContextANGLE(GLeglContext ctx, GLenum light, GLenum pname, GLfloat param); +GL_API void GL_APIENTRY glLightfvContextANGLE(GLeglContext ctx, GLenum light, GLenum pname, const GLfloat *params); +GL_API void GL_APIENTRY glLightxContextANGLE(GLeglContext ctx, GLenum light, GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glLightxvContextANGLE(GLeglContext ctx, GLenum light, GLenum pname, const GLfixed *params); +GL_API void GL_APIENTRY glLineWidthxContextANGLE(GLeglContext ctx, GLfixed width); +GL_API void GL_APIENTRY glLoadIdentityContextANGLE(GLeglContext ctx); +GL_API void GL_APIENTRY glLoadMatrixfContextANGLE(GLeglContext ctx, const GLfloat *m); +GL_API void GL_APIENTRY glLoadMatrixxContextANGLE(GLeglContext ctx, const GLfixed *m); +GL_API void GL_APIENTRY glLogicOpContextANGLE(GLeglContext ctx, GLenum opcode); +GL_API void GL_APIENTRY glMaterialfContextANGLE(GLeglContext ctx, GLenum face, GLenum pname, GLfloat param); +GL_API void GL_APIENTRY glMaterialfvContextANGLE(GLeglContext ctx, GLenum face, GLenum pname, const GLfloat *params); +GL_API void GL_APIENTRY glMaterialxContextANGLE(GLeglContext ctx, GLenum face, GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glMaterialxvContextANGLE(GLeglContext ctx, GLenum face, GLenum pname, const GLfixed *param); +GL_API void GL_APIENTRY glMatrixModeContextANGLE(GLeglContext ctx, GLenum mode); +GL_API void GL_APIENTRY glMultMatrixfContextANGLE(GLeglContext ctx, const GLfloat *m); +GL_API void GL_APIENTRY glMultMatrixxContextANGLE(GLeglContext ctx, const GLfixed *m); +GL_API void GL_APIENTRY glMultiTexCoord4fContextANGLE(GLeglContext ctx, GLenum target, GLfloat s, GLfloat t, GLfloat r, GLfloat q); +GL_API void GL_APIENTRY glMultiTexCoord4xContextANGLE(GLeglContext ctx, GLenum texture, GLfixed s, GLfixed t, GLfixed r, GLfixed q); +GL_API void GL_APIENTRY glNormal3fContextANGLE(GLeglContext ctx, GLfloat nx, GLfloat ny, GLfloat nz); +GL_API void GL_APIENTRY glNormal3xContextANGLE(GLeglContext ctx, GLfixed nx, GLfixed ny, GLfixed nz); +GL_API void GL_APIENTRY glNormalPointerContextANGLE(GLeglContext ctx, GLenum type, GLsizei stride, const void *pointer); +GL_API void GL_APIENTRY glOrthofContextANGLE(GLeglContext ctx, GLfloat l, GLfloat r, GLfloat b, GLfloat t, GLfloat n, GLfloat f); +GL_API void GL_APIENTRY glOrthoxContextANGLE(GLeglContext ctx, GLfixed l, GLfixed r, GLfixed b, GLfixed t, GLfixed n, GLfixed f); +GL_API void GL_APIENTRY glPointParameterfContextANGLE(GLeglContext ctx, GLenum pname, GLfloat param); +GL_API void GL_APIENTRY glPointParameterfvContextANGLE(GLeglContext ctx, GLenum pname, const GLfloat *params); +GL_API void GL_APIENTRY glPointParameterxContextANGLE(GLeglContext ctx, GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glPointParameterxvContextANGLE(GLeglContext ctx, GLenum pname, const GLfixed *params); +GL_API void GL_APIENTRY glPointSizeContextANGLE(GLeglContext ctx, GLfloat size); +GL_API void GL_APIENTRY glPointSizexContextANGLE(GLeglContext ctx, GLfixed size); +GL_API void GL_APIENTRY glPolygonOffsetxContextANGLE(GLeglContext ctx, GLfixed factor, GLfixed units); +GL_API void GL_APIENTRY glPopMatrixContextANGLE(GLeglContext ctx); +GL_API void GL_APIENTRY glPushMatrixContextANGLE(GLeglContext ctx); +GL_API void GL_APIENTRY glRotatefContextANGLE(GLeglContext ctx, GLfloat angle, GLfloat x, GLfloat y, GLfloat z); +GL_API void GL_APIENTRY glRotatexContextANGLE(GLeglContext ctx, GLfixed angle, GLfixed x, GLfixed y, GLfixed z); +GL_API void GL_APIENTRY glSampleCoveragexContextANGLE(GLeglContext ctx, GLclampx value, GLboolean invert); +GL_API void GL_APIENTRY glScalefContextANGLE(GLeglContext ctx, GLfloat x, GLfloat y, GLfloat z); +GL_API void GL_APIENTRY glScalexContextANGLE(GLeglContext ctx, GLfixed x, GLfixed y, GLfixed z); +GL_API void GL_APIENTRY glShadeModelContextANGLE(GLeglContext ctx, GLenum mode); +GL_API void GL_APIENTRY glTexCoordPointerContextANGLE(GLeglContext ctx, GLint size, GLenum type, GLsizei stride, const void *pointer); +GL_API void GL_APIENTRY glTexEnvfContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLfloat param); +GL_API void GL_APIENTRY glTexEnvfvContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, const GLfloat *params); +GL_API void GL_APIENTRY glTexEnviContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLint param); +GL_API void GL_APIENTRY glTexEnvivContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, const GLint *params); +GL_API void GL_APIENTRY glTexEnvxContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glTexEnvxvContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, const GLfixed *params); +GL_API void GL_APIENTRY glTexParameterxContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glTexParameterxvContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, const GLfixed *params); +GL_API void GL_APIENTRY glTranslatefContextANGLE(GLeglContext ctx, GLfloat x, GLfloat y, GLfloat z); +GL_API void GL_APIENTRY glTranslatexContextANGLE(GLeglContext ctx, GLfixed x, GLfixed y, GLfixed z); +GL_API void GL_APIENTRY glVertexPointerContextANGLE(GLeglContext ctx, GLint size, GLenum type, GLsizei stride, const void *pointer); +GL_API void GL_APIENTRY glBindFramebufferOESContextANGLE(GLeglContext ctx, GLenum target, GLuint framebuffer); +GL_API void GL_APIENTRY glBindRenderbufferOESContextANGLE(GLeglContext ctx, GLenum target, GLuint renderbuffer); +GL_API GLenum GL_APIENTRY glCheckFramebufferStatusOESContextANGLE(GLeglContext ctx, GLenum target); +GL_API void GL_APIENTRY glCurrentPaletteMatrixOESContextANGLE(GLeglContext ctx, GLuint matrixpaletteindex); +GL_API void GL_APIENTRY glDeleteFramebuffersOESContextANGLE(GLeglContext ctx, GLsizei n, const GLuint *framebuffers); +GL_API void GL_APIENTRY glDeleteRenderbuffersOESContextANGLE(GLeglContext ctx, GLsizei n, const GLuint *renderbuffers); +GL_API void GL_APIENTRY glDrawTexfOESContextANGLE(GLeglContext ctx, GLfloat x, GLfloat y, GLfloat z, GLfloat width, GLfloat height); +GL_API void GL_APIENTRY glDrawTexfvOESContextANGLE(GLeglContext ctx, const GLfloat *coords); +GL_API void GL_APIENTRY glDrawTexiOESContextANGLE(GLeglContext ctx, GLint x, GLint y, GLint z, GLint width, GLint height); +GL_API void GL_APIENTRY glDrawTexivOESContextANGLE(GLeglContext ctx, const GLint *coords); +GL_API void GL_APIENTRY glDrawTexsOESContextANGLE(GLeglContext ctx, GLshort x, GLshort y, GLshort z, GLshort width, GLshort height); +GL_API void GL_APIENTRY glDrawTexsvOESContextANGLE(GLeglContext ctx, const GLshort *coords); +GL_API void GL_APIENTRY glDrawTexxOESContextANGLE(GLeglContext ctx, GLfixed x, GLfixed y, GLfixed z, GLfixed width, GLfixed height); +GL_API void GL_APIENTRY glDrawTexxvOESContextANGLE(GLeglContext ctx, const GLfixed *coords); +GL_API void GL_APIENTRY glFramebufferRenderbufferOESContextANGLE(GLeglContext ctx, GLenum target, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer); +GL_API void GL_APIENTRY glFramebufferTexture2DOESContextANGLE(GLeglContext ctx, GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +GL_API void GL_APIENTRY glGenFramebuffersOESContextANGLE(GLeglContext ctx, GLsizei n, GLuint *framebuffers); +GL_API void GL_APIENTRY glGenRenderbuffersOESContextANGLE(GLeglContext ctx, GLsizei n, GLuint *renderbuffers); +GL_API void GL_APIENTRY glGenerateMipmapOESContextANGLE(GLeglContext ctx, GLenum target); +GL_API void GL_APIENTRY glGetFramebufferAttachmentParameterivOESContextANGLE(GLeglContext ctx, GLenum target, GLenum attachment, GLenum pname, GLint *params); +GL_API void GL_APIENTRY glGetRenderbufferParameterivOESContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLint *params); +GL_API void GL_APIENTRY glGetTexGenfvOESContextANGLE(GLeglContext ctx, GLenum coord, GLenum pname, GLfloat *params); +GL_API void GL_APIENTRY glGetTexGenivOESContextANGLE(GLeglContext ctx, GLenum coord, GLenum pname, GLint *params); +GL_API void GL_APIENTRY glGetTexGenxvOESContextANGLE(GLeglContext ctx, GLenum coord, GLenum pname, GLfixed *params); +GL_API GLboolean GL_APIENTRY glIsFramebufferOESContextANGLE(GLeglContext ctx, GLuint framebuffer); +GL_API GLboolean GL_APIENTRY glIsRenderbufferOESContextANGLE(GLeglContext ctx, GLuint renderbuffer); +GL_API void GL_APIENTRY glLoadPaletteFromModelViewMatrixOESContextANGLE(GLeglContext ctx); +GL_API void GL_APIENTRY glMatrixIndexPointerOESContextANGLE(GLeglContext ctx, GLint size, GLenum type, GLsizei stride, const void *pointer); +GL_API void GL_APIENTRY glPointSizePointerOESContextANGLE(GLeglContext ctx, GLenum type, GLsizei stride, const void *pointer); +GL_API GLbitfield GL_APIENTRY glQueryMatrixxOESContextANGLE(GLeglContext ctx, GLfixed *mantissa, GLint *exponent); +GL_API void GL_APIENTRY glRenderbufferStorageOESContextANGLE(GLeglContext ctx, GLenum target, GLenum internalformat, GLsizei width, GLsizei height); +GL_API void GL_APIENTRY glTexGenfOESContextANGLE(GLeglContext ctx, GLenum coord, GLenum pname, GLfloat param); +GL_API void GL_APIENTRY glTexGenfvOESContextANGLE(GLeglContext ctx, GLenum coord, GLenum pname, const GLfloat *params); +GL_API void GL_APIENTRY glTexGeniOESContextANGLE(GLeglContext ctx, GLenum coord, GLenum pname, GLint param); +GL_API void GL_APIENTRY glTexGenivOESContextANGLE(GLeglContext ctx, GLenum coord, GLenum pname, const GLint *params); +GL_API void GL_APIENTRY glTexGenxOESContextANGLE(GLeglContext ctx, GLenum coord, GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glTexGenxvOESContextANGLE(GLeglContext ctx, GLenum coord, GLenum pname, const GLfixed *params); +GL_API void GL_APIENTRY glWeightPointerOESContextANGLE(GLeglContext ctx, GLint size, GLenum type, GLsizei stride, const void *pointer); +#endif diff --git a/vendor/angle/include/GLES/glplatform.h b/vendor/angle/include/GLES/glplatform.h new file mode 100644 index 00000000..16060a9a --- /dev/null +++ b/vendor/angle/include/GLES/glplatform.h @@ -0,0 +1,38 @@ +#ifndef __glplatform_h_ +#define __glplatform_h_ + +/* +** Copyright (c) 2017 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* Platform-specific types and definitions for OpenGL ES 1.X gl.h + * + * Adopters may modify khrplatform.h and this file to suit their platform. + * Please contribute modifications back to Khronos as pull requests on the + * public github repository: + * https://github.com/KhronosGroup/OpenGL-Registry + */ + +#include + +#ifndef GL_API +#define GL_API KHRONOS_APICALL +#endif + +#ifndef GL_APIENTRY +#define GL_APIENTRY KHRONOS_APIENTRY +#endif + +#endif /* __glplatform_h_ */ diff --git a/vendor/angle/include/GLES2/.clang-format b/vendor/angle/include/GLES2/.clang-format new file mode 100644 index 00000000..06147100 --- /dev/null +++ b/vendor/angle/include/GLES2/.clang-format @@ -0,0 +1,3 @@ + + +DisableFormat: true diff --git a/vendor/angle/include/GLES2/gl2.h b/vendor/angle/include/GLES2/gl2.h new file mode 100644 index 00000000..3c5e6d3d --- /dev/null +++ b/vendor/angle/include/GLES2/gl2.h @@ -0,0 +1,656 @@ +#ifndef __gles2_gl2_h_ +#define __gles2_gl2_h_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright 2013-2020 The Khronos Group Inc. +** SPDX-License-Identifier: MIT +** +** This header is generated from the Khronos OpenGL / OpenGL ES XML +** API Registry. The current version of the Registry, generator scripts +** used to make the header, and the header can be found at +** https://github.com/KhronosGroup/OpenGL-Registry +*/ + +#include + +#ifndef GL_APIENTRYP +#define GL_APIENTRYP GL_APIENTRY* +#endif + +#ifndef GL_GLES_PROTOTYPES +#define GL_GLES_PROTOTYPES 1 +#endif + +/* Generated on date 20210107 */ + +/* Generated C header for: + * API: gles2 + * Profile: common + * Versions considered: 2\.[0-9] + * Versions emitted: .* + * Default extensions included: None + * Additional extensions included: _nomatch_^ + * Extensions removed: _nomatch_^ + */ + +#ifndef GL_ES_VERSION_2_0 +#define GL_ES_VERSION_2_0 1 +#include +typedef khronos_int8_t GLbyte; +typedef khronos_float_t GLclampf; +typedef khronos_int32_t GLfixed; +typedef khronos_int16_t GLshort; +typedef khronos_uint16_t GLushort; +typedef void GLvoid; +typedef struct __GLsync *GLsync; +typedef khronos_int64_t GLint64; +typedef khronos_uint64_t GLuint64; +typedef unsigned int GLenum; +typedef unsigned int GLuint; +typedef char GLchar; +typedef khronos_float_t GLfloat; +typedef khronos_ssize_t GLsizeiptr; +typedef khronos_intptr_t GLintptr; +typedef unsigned int GLbitfield; +typedef int GLint; +typedef unsigned char GLboolean; +typedef int GLsizei; +typedef khronos_uint8_t GLubyte; +#define GL_DEPTH_BUFFER_BIT 0x00000100 +#define GL_STENCIL_BUFFER_BIT 0x00000400 +#define GL_COLOR_BUFFER_BIT 0x00004000 +#define GL_FALSE 0 +#define GL_TRUE 1 +#define GL_POINTS 0x0000 +#define GL_LINES 0x0001 +#define GL_LINE_LOOP 0x0002 +#define GL_LINE_STRIP 0x0003 +#define GL_TRIANGLES 0x0004 +#define GL_TRIANGLE_STRIP 0x0005 +#define GL_TRIANGLE_FAN 0x0006 +#define GL_ZERO 0 +#define GL_ONE 1 +#define GL_SRC_COLOR 0x0300 +#define GL_ONE_MINUS_SRC_COLOR 0x0301 +#define GL_SRC_ALPHA 0x0302 +#define GL_ONE_MINUS_SRC_ALPHA 0x0303 +#define GL_DST_ALPHA 0x0304 +#define GL_ONE_MINUS_DST_ALPHA 0x0305 +#define GL_DST_COLOR 0x0306 +#define GL_ONE_MINUS_DST_COLOR 0x0307 +#define GL_SRC_ALPHA_SATURATE 0x0308 +#define GL_FUNC_ADD 0x8006 +#define GL_BLEND_EQUATION 0x8009 +#define GL_BLEND_EQUATION_RGB 0x8009 +#define GL_BLEND_EQUATION_ALPHA 0x883D +#define GL_FUNC_SUBTRACT 0x800A +#define GL_FUNC_REVERSE_SUBTRACT 0x800B +#define GL_BLEND_DST_RGB 0x80C8 +#define GL_BLEND_SRC_RGB 0x80C9 +#define GL_BLEND_DST_ALPHA 0x80CA +#define GL_BLEND_SRC_ALPHA 0x80CB +#define GL_CONSTANT_COLOR 0x8001 +#define GL_ONE_MINUS_CONSTANT_COLOR 0x8002 +#define GL_CONSTANT_ALPHA 0x8003 +#define GL_ONE_MINUS_CONSTANT_ALPHA 0x8004 +#define GL_BLEND_COLOR 0x8005 +#define GL_ARRAY_BUFFER 0x8892 +#define GL_ELEMENT_ARRAY_BUFFER 0x8893 +#define GL_ARRAY_BUFFER_BINDING 0x8894 +#define GL_ELEMENT_ARRAY_BUFFER_BINDING 0x8895 +#define GL_STREAM_DRAW 0x88E0 +#define GL_STATIC_DRAW 0x88E4 +#define GL_DYNAMIC_DRAW 0x88E8 +#define GL_BUFFER_SIZE 0x8764 +#define GL_BUFFER_USAGE 0x8765 +#define GL_CURRENT_VERTEX_ATTRIB 0x8626 +#define GL_FRONT 0x0404 +#define GL_BACK 0x0405 +#define GL_FRONT_AND_BACK 0x0408 +#define GL_TEXTURE_2D 0x0DE1 +#define GL_CULL_FACE 0x0B44 +#define GL_BLEND 0x0BE2 +#define GL_DITHER 0x0BD0 +#define GL_STENCIL_TEST 0x0B90 +#define GL_DEPTH_TEST 0x0B71 +#define GL_SCISSOR_TEST 0x0C11 +#define GL_POLYGON_OFFSET_FILL 0x8037 +#define GL_SAMPLE_ALPHA_TO_COVERAGE 0x809E +#define GL_SAMPLE_COVERAGE 0x80A0 +#define GL_NO_ERROR 0 +#define GL_INVALID_ENUM 0x0500 +#define GL_INVALID_VALUE 0x0501 +#define GL_INVALID_OPERATION 0x0502 +#define GL_OUT_OF_MEMORY 0x0505 +#define GL_CW 0x0900 +#define GL_CCW 0x0901 +#define GL_LINE_WIDTH 0x0B21 +#define GL_ALIASED_POINT_SIZE_RANGE 0x846D +#define GL_ALIASED_LINE_WIDTH_RANGE 0x846E +#define GL_CULL_FACE_MODE 0x0B45 +#define GL_FRONT_FACE 0x0B46 +#define GL_DEPTH_RANGE 0x0B70 +#define GL_DEPTH_WRITEMASK 0x0B72 +#define GL_DEPTH_CLEAR_VALUE 0x0B73 +#define GL_DEPTH_FUNC 0x0B74 +#define GL_STENCIL_CLEAR_VALUE 0x0B91 +#define GL_STENCIL_FUNC 0x0B92 +#define GL_STENCIL_FAIL 0x0B94 +#define GL_STENCIL_PASS_DEPTH_FAIL 0x0B95 +#define GL_STENCIL_PASS_DEPTH_PASS 0x0B96 +#define GL_STENCIL_REF 0x0B97 +#define GL_STENCIL_VALUE_MASK 0x0B93 +#define GL_STENCIL_WRITEMASK 0x0B98 +#define GL_STENCIL_BACK_FUNC 0x8800 +#define GL_STENCIL_BACK_FAIL 0x8801 +#define GL_STENCIL_BACK_PASS_DEPTH_FAIL 0x8802 +#define GL_STENCIL_BACK_PASS_DEPTH_PASS 0x8803 +#define GL_STENCIL_BACK_REF 0x8CA3 +#define GL_STENCIL_BACK_VALUE_MASK 0x8CA4 +#define GL_STENCIL_BACK_WRITEMASK 0x8CA5 +#define GL_VIEWPORT 0x0BA2 +#define GL_SCISSOR_BOX 0x0C10 +#define GL_COLOR_CLEAR_VALUE 0x0C22 +#define GL_COLOR_WRITEMASK 0x0C23 +#define GL_UNPACK_ALIGNMENT 0x0CF5 +#define GL_PACK_ALIGNMENT 0x0D05 +#define GL_MAX_TEXTURE_SIZE 0x0D33 +#define GL_MAX_VIEWPORT_DIMS 0x0D3A +#define GL_SUBPIXEL_BITS 0x0D50 +#define GL_RED_BITS 0x0D52 +#define GL_GREEN_BITS 0x0D53 +#define GL_BLUE_BITS 0x0D54 +#define GL_ALPHA_BITS 0x0D55 +#define GL_DEPTH_BITS 0x0D56 +#define GL_STENCIL_BITS 0x0D57 +#define GL_POLYGON_OFFSET_UNITS 0x2A00 +#define GL_POLYGON_OFFSET_FACTOR 0x8038 +#define GL_TEXTURE_BINDING_2D 0x8069 +#define GL_SAMPLE_BUFFERS 0x80A8 +#define GL_SAMPLES 0x80A9 +#define GL_SAMPLE_COVERAGE_VALUE 0x80AA +#define GL_SAMPLE_COVERAGE_INVERT 0x80AB +#define GL_NUM_COMPRESSED_TEXTURE_FORMATS 0x86A2 +#define GL_COMPRESSED_TEXTURE_FORMATS 0x86A3 +#define GL_DONT_CARE 0x1100 +#define GL_FASTEST 0x1101 +#define GL_NICEST 0x1102 +#define GL_GENERATE_MIPMAP_HINT 0x8192 +#define GL_BYTE 0x1400 +#define GL_UNSIGNED_BYTE 0x1401 +#define GL_SHORT 0x1402 +#define GL_UNSIGNED_SHORT 0x1403 +#define GL_INT 0x1404 +#define GL_UNSIGNED_INT 0x1405 +#define GL_FLOAT 0x1406 +#define GL_FIXED 0x140C +#define GL_DEPTH_COMPONENT 0x1902 +#define GL_ALPHA 0x1906 +#define GL_RGB 0x1907 +#define GL_RGBA 0x1908 +#define GL_LUMINANCE 0x1909 +#define GL_LUMINANCE_ALPHA 0x190A +#define GL_UNSIGNED_SHORT_4_4_4_4 0x8033 +#define GL_UNSIGNED_SHORT_5_5_5_1 0x8034 +#define GL_UNSIGNED_SHORT_5_6_5 0x8363 +#define GL_FRAGMENT_SHADER 0x8B30 +#define GL_VERTEX_SHADER 0x8B31 +#define GL_MAX_VERTEX_ATTRIBS 0x8869 +#define GL_MAX_VERTEX_UNIFORM_VECTORS 0x8DFB +#define GL_MAX_VARYING_VECTORS 0x8DFC +#define GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS 0x8B4D +#define GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS 0x8B4C +#define GL_MAX_TEXTURE_IMAGE_UNITS 0x8872 +#define GL_MAX_FRAGMENT_UNIFORM_VECTORS 0x8DFD +#define GL_SHADER_TYPE 0x8B4F +#define GL_DELETE_STATUS 0x8B80 +#define GL_LINK_STATUS 0x8B82 +#define GL_VALIDATE_STATUS 0x8B83 +#define GL_ATTACHED_SHADERS 0x8B85 +#define GL_ACTIVE_UNIFORMS 0x8B86 +#define GL_ACTIVE_UNIFORM_MAX_LENGTH 0x8B87 +#define GL_ACTIVE_ATTRIBUTES 0x8B89 +#define GL_ACTIVE_ATTRIBUTE_MAX_LENGTH 0x8B8A +#define GL_SHADING_LANGUAGE_VERSION 0x8B8C +#define GL_CURRENT_PROGRAM 0x8B8D +#define GL_NEVER 0x0200 +#define GL_LESS 0x0201 +#define GL_EQUAL 0x0202 +#define GL_LEQUAL 0x0203 +#define GL_GREATER 0x0204 +#define GL_NOTEQUAL 0x0205 +#define GL_GEQUAL 0x0206 +#define GL_ALWAYS 0x0207 +#define GL_KEEP 0x1E00 +#define GL_REPLACE 0x1E01 +#define GL_INCR 0x1E02 +#define GL_DECR 0x1E03 +#define GL_INVERT 0x150A +#define GL_INCR_WRAP 0x8507 +#define GL_DECR_WRAP 0x8508 +#define GL_VENDOR 0x1F00 +#define GL_RENDERER 0x1F01 +#define GL_VERSION 0x1F02 +#define GL_EXTENSIONS 0x1F03 +#define GL_NEAREST 0x2600 +#define GL_LINEAR 0x2601 +#define GL_NEAREST_MIPMAP_NEAREST 0x2700 +#define GL_LINEAR_MIPMAP_NEAREST 0x2701 +#define GL_NEAREST_MIPMAP_LINEAR 0x2702 +#define GL_LINEAR_MIPMAP_LINEAR 0x2703 +#define GL_TEXTURE_MAG_FILTER 0x2800 +#define GL_TEXTURE_MIN_FILTER 0x2801 +#define GL_TEXTURE_WRAP_S 0x2802 +#define GL_TEXTURE_WRAP_T 0x2803 +#define GL_TEXTURE 0x1702 +#define GL_TEXTURE_CUBE_MAP 0x8513 +#define GL_TEXTURE_BINDING_CUBE_MAP 0x8514 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_X 0x8515 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_X 0x8516 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_Y 0x8517 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_Y 0x8518 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_Z 0x8519 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_Z 0x851A +#define GL_MAX_CUBE_MAP_TEXTURE_SIZE 0x851C +#define GL_TEXTURE0 0x84C0 +#define GL_TEXTURE1 0x84C1 +#define GL_TEXTURE2 0x84C2 +#define GL_TEXTURE3 0x84C3 +#define GL_TEXTURE4 0x84C4 +#define GL_TEXTURE5 0x84C5 +#define GL_TEXTURE6 0x84C6 +#define GL_TEXTURE7 0x84C7 +#define GL_TEXTURE8 0x84C8 +#define GL_TEXTURE9 0x84C9 +#define GL_TEXTURE10 0x84CA +#define GL_TEXTURE11 0x84CB +#define GL_TEXTURE12 0x84CC +#define GL_TEXTURE13 0x84CD +#define GL_TEXTURE14 0x84CE +#define GL_TEXTURE15 0x84CF +#define GL_TEXTURE16 0x84D0 +#define GL_TEXTURE17 0x84D1 +#define GL_TEXTURE18 0x84D2 +#define GL_TEXTURE19 0x84D3 +#define GL_TEXTURE20 0x84D4 +#define GL_TEXTURE21 0x84D5 +#define GL_TEXTURE22 0x84D6 +#define GL_TEXTURE23 0x84D7 +#define GL_TEXTURE24 0x84D8 +#define GL_TEXTURE25 0x84D9 +#define GL_TEXTURE26 0x84DA +#define GL_TEXTURE27 0x84DB +#define GL_TEXTURE28 0x84DC +#define GL_TEXTURE29 0x84DD +#define GL_TEXTURE30 0x84DE +#define GL_TEXTURE31 0x84DF +#define GL_ACTIVE_TEXTURE 0x84E0 +#define GL_REPEAT 0x2901 +#define GL_CLAMP_TO_EDGE 0x812F +#define GL_MIRRORED_REPEAT 0x8370 +#define GL_FLOAT_VEC2 0x8B50 +#define GL_FLOAT_VEC3 0x8B51 +#define GL_FLOAT_VEC4 0x8B52 +#define GL_INT_VEC2 0x8B53 +#define GL_INT_VEC3 0x8B54 +#define GL_INT_VEC4 0x8B55 +#define GL_BOOL 0x8B56 +#define GL_BOOL_VEC2 0x8B57 +#define GL_BOOL_VEC3 0x8B58 +#define GL_BOOL_VEC4 0x8B59 +#define GL_FLOAT_MAT2 0x8B5A +#define GL_FLOAT_MAT3 0x8B5B +#define GL_FLOAT_MAT4 0x8B5C +#define GL_SAMPLER_2D 0x8B5E +#define GL_SAMPLER_CUBE 0x8B60 +#define GL_VERTEX_ATTRIB_ARRAY_ENABLED 0x8622 +#define GL_VERTEX_ATTRIB_ARRAY_SIZE 0x8623 +#define GL_VERTEX_ATTRIB_ARRAY_STRIDE 0x8624 +#define GL_VERTEX_ATTRIB_ARRAY_TYPE 0x8625 +#define GL_VERTEX_ATTRIB_ARRAY_NORMALIZED 0x886A +#define GL_VERTEX_ATTRIB_ARRAY_POINTER 0x8645 +#define GL_VERTEX_ATTRIB_ARRAY_BUFFER_BINDING 0x889F +#define GL_IMPLEMENTATION_COLOR_READ_TYPE 0x8B9A +#define GL_IMPLEMENTATION_COLOR_READ_FORMAT 0x8B9B +#define GL_COMPILE_STATUS 0x8B81 +#define GL_INFO_LOG_LENGTH 0x8B84 +#define GL_SHADER_SOURCE_LENGTH 0x8B88 +#define GL_SHADER_COMPILER 0x8DFA +#define GL_SHADER_BINARY_FORMATS 0x8DF8 +#define GL_NUM_SHADER_BINARY_FORMATS 0x8DF9 +#define GL_LOW_FLOAT 0x8DF0 +#define GL_MEDIUM_FLOAT 0x8DF1 +#define GL_HIGH_FLOAT 0x8DF2 +#define GL_LOW_INT 0x8DF3 +#define GL_MEDIUM_INT 0x8DF4 +#define GL_HIGH_INT 0x8DF5 +#define GL_FRAMEBUFFER 0x8D40 +#define GL_RENDERBUFFER 0x8D41 +#define GL_RGBA4 0x8056 +#define GL_RGB5_A1 0x8057 +#define GL_RGB565 0x8D62 +#define GL_DEPTH_COMPONENT16 0x81A5 +#define GL_STENCIL_INDEX8 0x8D48 +#define GL_RENDERBUFFER_WIDTH 0x8D42 +#define GL_RENDERBUFFER_HEIGHT 0x8D43 +#define GL_RENDERBUFFER_INTERNAL_FORMAT 0x8D44 +#define GL_RENDERBUFFER_RED_SIZE 0x8D50 +#define GL_RENDERBUFFER_GREEN_SIZE 0x8D51 +#define GL_RENDERBUFFER_BLUE_SIZE 0x8D52 +#define GL_RENDERBUFFER_ALPHA_SIZE 0x8D53 +#define GL_RENDERBUFFER_DEPTH_SIZE 0x8D54 +#define GL_RENDERBUFFER_STENCIL_SIZE 0x8D55 +#define GL_FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE 0x8CD0 +#define GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME 0x8CD1 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL 0x8CD2 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE 0x8CD3 +#define GL_COLOR_ATTACHMENT0 0x8CE0 +#define GL_DEPTH_ATTACHMENT 0x8D00 +#define GL_STENCIL_ATTACHMENT 0x8D20 +#define GL_NONE 0 +#define GL_FRAMEBUFFER_COMPLETE 0x8CD5 +#define GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT 0x8CD6 +#define GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT 0x8CD7 +#define GL_FRAMEBUFFER_INCOMPLETE_DIMENSIONS 0x8CD9 +#define GL_FRAMEBUFFER_UNSUPPORTED 0x8CDD +#define GL_FRAMEBUFFER_BINDING 0x8CA6 +#define GL_RENDERBUFFER_BINDING 0x8CA7 +#define GL_MAX_RENDERBUFFER_SIZE 0x84E8 +#define GL_INVALID_FRAMEBUFFER_OPERATION 0x0506 +typedef void (GL_APIENTRYP PFNGLACTIVETEXTUREPROC) (GLenum texture); +typedef void (GL_APIENTRYP PFNGLATTACHSHADERPROC) (GLuint program, GLuint shader); +typedef void (GL_APIENTRYP PFNGLBINDATTRIBLOCATIONPROC) (GLuint program, GLuint index, const GLchar *name); +typedef void (GL_APIENTRYP PFNGLBINDBUFFERPROC) (GLenum target, GLuint buffer); +typedef void (GL_APIENTRYP PFNGLBINDFRAMEBUFFERPROC) (GLenum target, GLuint framebuffer); +typedef void (GL_APIENTRYP PFNGLBINDRENDERBUFFERPROC) (GLenum target, GLuint renderbuffer); +typedef void (GL_APIENTRYP PFNGLBINDTEXTUREPROC) (GLenum target, GLuint texture); +typedef void (GL_APIENTRYP PFNGLBLENDCOLORPROC) (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +typedef void (GL_APIENTRYP PFNGLBLENDEQUATIONPROC) (GLenum mode); +typedef void (GL_APIENTRYP PFNGLBLENDEQUATIONSEPARATEPROC) (GLenum modeRGB, GLenum modeAlpha); +typedef void (GL_APIENTRYP PFNGLBLENDFUNCPROC) (GLenum sfactor, GLenum dfactor); +typedef void (GL_APIENTRYP PFNGLBLENDFUNCSEPARATEPROC) (GLenum sfactorRGB, GLenum dfactorRGB, GLenum sfactorAlpha, GLenum dfactorAlpha); +typedef void (GL_APIENTRYP PFNGLBUFFERDATAPROC) (GLenum target, GLsizeiptr size, const void *data, GLenum usage); +typedef void (GL_APIENTRYP PFNGLBUFFERSUBDATAPROC) (GLenum target, GLintptr offset, GLsizeiptr size, const void *data); +typedef GLenum (GL_APIENTRYP PFNGLCHECKFRAMEBUFFERSTATUSPROC) (GLenum target); +typedef void (GL_APIENTRYP PFNGLCLEARPROC) (GLbitfield mask); +typedef void (GL_APIENTRYP PFNGLCLEARCOLORPROC) (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +typedef void (GL_APIENTRYP PFNGLCLEARDEPTHFPROC) (GLfloat d); +typedef void (GL_APIENTRYP PFNGLCLEARSTENCILPROC) (GLint s); +typedef void (GL_APIENTRYP PFNGLCOLORMASKPROC) (GLboolean red, GLboolean green, GLboolean blue, GLboolean alpha); +typedef void (GL_APIENTRYP PFNGLCOMPILESHADERPROC) (GLuint shader); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXIMAGE2DPROC) (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, const void *data); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXSUBIMAGE2DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *data); +typedef void (GL_APIENTRYP PFNGLCOPYTEXIMAGE2DPROC) (GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLsizei height, GLint border); +typedef void (GL_APIENTRYP PFNGLCOPYTEXSUBIMAGE2DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint x, GLint y, GLsizei width, GLsizei height); +typedef GLuint (GL_APIENTRYP PFNGLCREATEPROGRAMPROC) (void); +typedef GLuint (GL_APIENTRYP PFNGLCREATESHADERPROC) (GLenum type); +typedef void (GL_APIENTRYP PFNGLCULLFACEPROC) (GLenum mode); +typedef void (GL_APIENTRYP PFNGLDELETEBUFFERSPROC) (GLsizei n, const GLuint *buffers); +typedef void (GL_APIENTRYP PFNGLDELETEFRAMEBUFFERSPROC) (GLsizei n, const GLuint *framebuffers); +typedef void (GL_APIENTRYP PFNGLDELETEPROGRAMPROC) (GLuint program); +typedef void (GL_APIENTRYP PFNGLDELETERENDERBUFFERSPROC) (GLsizei n, const GLuint *renderbuffers); +typedef void (GL_APIENTRYP PFNGLDELETESHADERPROC) (GLuint shader); +typedef void (GL_APIENTRYP PFNGLDELETETEXTURESPROC) (GLsizei n, const GLuint *textures); +typedef void (GL_APIENTRYP PFNGLDEPTHFUNCPROC) (GLenum func); +typedef void (GL_APIENTRYP PFNGLDEPTHMASKPROC) (GLboolean flag); +typedef void (GL_APIENTRYP PFNGLDEPTHRANGEFPROC) (GLfloat n, GLfloat f); +typedef void (GL_APIENTRYP PFNGLDETACHSHADERPROC) (GLuint program, GLuint shader); +typedef void (GL_APIENTRYP PFNGLDISABLEPROC) (GLenum cap); +typedef void (GL_APIENTRYP PFNGLDISABLEVERTEXATTRIBARRAYPROC) (GLuint index); +typedef void (GL_APIENTRYP PFNGLDRAWARRAYSPROC) (GLenum mode, GLint first, GLsizei count); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices); +typedef void (GL_APIENTRYP PFNGLENABLEPROC) (GLenum cap); +typedef void (GL_APIENTRYP PFNGLENABLEVERTEXATTRIBARRAYPROC) (GLuint index); +typedef void (GL_APIENTRYP PFNGLFINISHPROC) (void); +typedef void (GL_APIENTRYP PFNGLFLUSHPROC) (void); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERRENDERBUFFERPROC) (GLenum target, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTURE2DPROC) (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +typedef void (GL_APIENTRYP PFNGLFRONTFACEPROC) (GLenum mode); +typedef void (GL_APIENTRYP PFNGLGENBUFFERSPROC) (GLsizei n, GLuint *buffers); +typedef void (GL_APIENTRYP PFNGLGENERATEMIPMAPPROC) (GLenum target); +typedef void (GL_APIENTRYP PFNGLGENFRAMEBUFFERSPROC) (GLsizei n, GLuint *framebuffers); +typedef void (GL_APIENTRYP PFNGLGENRENDERBUFFERSPROC) (GLsizei n, GLuint *renderbuffers); +typedef void (GL_APIENTRYP PFNGLGENTEXTURESPROC) (GLsizei n, GLuint *textures); +typedef void (GL_APIENTRYP PFNGLGETACTIVEATTRIBPROC) (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name); +typedef void (GL_APIENTRYP PFNGLGETACTIVEUNIFORMPROC) (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name); +typedef void (GL_APIENTRYP PFNGLGETATTACHEDSHADERSPROC) (GLuint program, GLsizei maxCount, GLsizei *count, GLuint *shaders); +typedef GLint (GL_APIENTRYP PFNGLGETATTRIBLOCATIONPROC) (GLuint program, const GLchar *name); +typedef void (GL_APIENTRYP PFNGLGETBOOLEANVPROC) (GLenum pname, GLboolean *data); +typedef void (GL_APIENTRYP PFNGLGETBUFFERPARAMETERIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef GLenum (GL_APIENTRYP PFNGLGETERRORPROC) (void); +typedef void (GL_APIENTRYP PFNGLGETFLOATVPROC) (GLenum pname, GLfloat *data); +typedef void (GL_APIENTRYP PFNGLGETFRAMEBUFFERATTACHMENTPARAMETERIVPROC) (GLenum target, GLenum attachment, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETINTEGERVPROC) (GLenum pname, GLint *data); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMIVPROC) (GLuint program, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMINFOLOGPROC) (GLuint program, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +typedef void (GL_APIENTRYP PFNGLGETRENDERBUFFERPARAMETERIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETSHADERIVPROC) (GLuint shader, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETSHADERINFOLOGPROC) (GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +typedef void (GL_APIENTRYP PFNGLGETSHADERPRECISIONFORMATPROC) (GLenum shadertype, GLenum precisiontype, GLint *range, GLint *precision); +typedef void (GL_APIENTRYP PFNGLGETSHADERSOURCEPROC) (GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *source); +typedef const GLubyte *(GL_APIENTRYP PFNGLGETSTRINGPROC) (GLenum name); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERFVPROC) (GLenum target, GLenum pname, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETUNIFORMFVPROC) (GLuint program, GLint location, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETUNIFORMIVPROC) (GLuint program, GLint location, GLint *params); +typedef GLint (GL_APIENTRYP PFNGLGETUNIFORMLOCATIONPROC) (GLuint program, const GLchar *name); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBFVPROC) (GLuint index, GLenum pname, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBIVPROC) (GLuint index, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBPOINTERVPROC) (GLuint index, GLenum pname, void **pointer); +typedef void (GL_APIENTRYP PFNGLHINTPROC) (GLenum target, GLenum mode); +typedef GLboolean (GL_APIENTRYP PFNGLISBUFFERPROC) (GLuint buffer); +typedef GLboolean (GL_APIENTRYP PFNGLISENABLEDPROC) (GLenum cap); +typedef GLboolean (GL_APIENTRYP PFNGLISFRAMEBUFFERPROC) (GLuint framebuffer); +typedef GLboolean (GL_APIENTRYP PFNGLISPROGRAMPROC) (GLuint program); +typedef GLboolean (GL_APIENTRYP PFNGLISRENDERBUFFERPROC) (GLuint renderbuffer); +typedef GLboolean (GL_APIENTRYP PFNGLISSHADERPROC) (GLuint shader); +typedef GLboolean (GL_APIENTRYP PFNGLISTEXTUREPROC) (GLuint texture); +typedef void (GL_APIENTRYP PFNGLLINEWIDTHPROC) (GLfloat width); +typedef void (GL_APIENTRYP PFNGLLINKPROGRAMPROC) (GLuint program); +typedef void (GL_APIENTRYP PFNGLPIXELSTOREIPROC) (GLenum pname, GLint param); +typedef void (GL_APIENTRYP PFNGLPOLYGONOFFSETPROC) (GLfloat factor, GLfloat units); +typedef void (GL_APIENTRYP PFNGLREADPIXELSPROC) (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, void *pixels); +typedef void (GL_APIENTRYP PFNGLRELEASESHADERCOMPILERPROC) (void); +typedef void (GL_APIENTRYP PFNGLRENDERBUFFERSTORAGEPROC) (GLenum target, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLSAMPLECOVERAGEPROC) (GLfloat value, GLboolean invert); +typedef void (GL_APIENTRYP PFNGLSCISSORPROC) (GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLSHADERBINARYPROC) (GLsizei count, const GLuint *shaders, GLenum binaryFormat, const void *binary, GLsizei length); +typedef void (GL_APIENTRYP PFNGLSHADERSOURCEPROC) (GLuint shader, GLsizei count, const GLchar *const*string, const GLint *length); +typedef void (GL_APIENTRYP PFNGLSTENCILFUNCPROC) (GLenum func, GLint ref, GLuint mask); +typedef void (GL_APIENTRYP PFNGLSTENCILFUNCSEPARATEPROC) (GLenum face, GLenum func, GLint ref, GLuint mask); +typedef void (GL_APIENTRYP PFNGLSTENCILMASKPROC) (GLuint mask); +typedef void (GL_APIENTRYP PFNGLSTENCILMASKSEPARATEPROC) (GLenum face, GLuint mask); +typedef void (GL_APIENTRYP PFNGLSTENCILOPPROC) (GLenum fail, GLenum zfail, GLenum zpass); +typedef void (GL_APIENTRYP PFNGLSTENCILOPSEPARATEPROC) (GLenum face, GLenum sfail, GLenum dpfail, GLenum dppass); +typedef void (GL_APIENTRYP PFNGLTEXIMAGE2DPROC) (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, const void *pixels); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERFPROC) (GLenum target, GLenum pname, GLfloat param); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERFVPROC) (GLenum target, GLenum pname, const GLfloat *params); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERIPROC) (GLenum target, GLenum pname, GLint param); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERIVPROC) (GLenum target, GLenum pname, const GLint *params); +typedef void (GL_APIENTRYP PFNGLTEXSUBIMAGE2DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *pixels); +typedef void (GL_APIENTRYP PFNGLUNIFORM1FPROC) (GLint location, GLfloat v0); +typedef void (GL_APIENTRYP PFNGLUNIFORM1FVPROC) (GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM1IPROC) (GLint location, GLint v0); +typedef void (GL_APIENTRYP PFNGLUNIFORM1IVPROC) (GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM2FPROC) (GLint location, GLfloat v0, GLfloat v1); +typedef void (GL_APIENTRYP PFNGLUNIFORM2FVPROC) (GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM2IPROC) (GLint location, GLint v0, GLint v1); +typedef void (GL_APIENTRYP PFNGLUNIFORM2IVPROC) (GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM3FPROC) (GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +typedef void (GL_APIENTRYP PFNGLUNIFORM3FVPROC) (GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM3IPROC) (GLint location, GLint v0, GLint v1, GLint v2); +typedef void (GL_APIENTRYP PFNGLUNIFORM3IVPROC) (GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM4FPROC) (GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +typedef void (GL_APIENTRYP PFNGLUNIFORM4FVPROC) (GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM4IPROC) (GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +typedef void (GL_APIENTRYP PFNGLUNIFORM4IVPROC) (GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX2FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX3FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX4FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUSEPROGRAMPROC) (GLuint program); +typedef void (GL_APIENTRYP PFNGLVALIDATEPROGRAMPROC) (GLuint program); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB1FPROC) (GLuint index, GLfloat x); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB1FVPROC) (GLuint index, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB2FPROC) (GLuint index, GLfloat x, GLfloat y); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB2FVPROC) (GLuint index, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB3FPROC) (GLuint index, GLfloat x, GLfloat y, GLfloat z); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB3FVPROC) (GLuint index, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB4FPROC) (GLuint index, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB4FVPROC) (GLuint index, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBPOINTERPROC) (GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride, const void *pointer); +typedef void (GL_APIENTRYP PFNGLVIEWPORTPROC) (GLint x, GLint y, GLsizei width, GLsizei height); +#if GL_GLES_PROTOTYPES +GL_APICALL void GL_APIENTRY glActiveTexture (GLenum texture); +GL_APICALL void GL_APIENTRY glAttachShader (GLuint program, GLuint shader); +GL_APICALL void GL_APIENTRY glBindAttribLocation (GLuint program, GLuint index, const GLchar *name); +GL_APICALL void GL_APIENTRY glBindBuffer (GLenum target, GLuint buffer); +GL_APICALL void GL_APIENTRY glBindFramebuffer (GLenum target, GLuint framebuffer); +GL_APICALL void GL_APIENTRY glBindRenderbuffer (GLenum target, GLuint renderbuffer); +GL_APICALL void GL_APIENTRY glBindTexture (GLenum target, GLuint texture); +GL_APICALL void GL_APIENTRY glBlendColor (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +GL_APICALL void GL_APIENTRY glBlendEquation (GLenum mode); +GL_APICALL void GL_APIENTRY glBlendEquationSeparate (GLenum modeRGB, GLenum modeAlpha); +GL_APICALL void GL_APIENTRY glBlendFunc (GLenum sfactor, GLenum dfactor); +GL_APICALL void GL_APIENTRY glBlendFuncSeparate (GLenum sfactorRGB, GLenum dfactorRGB, GLenum sfactorAlpha, GLenum dfactorAlpha); +GL_APICALL void GL_APIENTRY glBufferData (GLenum target, GLsizeiptr size, const void *data, GLenum usage); +GL_APICALL void GL_APIENTRY glBufferSubData (GLenum target, GLintptr offset, GLsizeiptr size, const void *data); +GL_APICALL GLenum GL_APIENTRY glCheckFramebufferStatus (GLenum target); +GL_APICALL void GL_APIENTRY glClear (GLbitfield mask); +GL_APICALL void GL_APIENTRY glClearColor (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +GL_APICALL void GL_APIENTRY glClearDepthf (GLfloat d); +GL_APICALL void GL_APIENTRY glClearStencil (GLint s); +GL_APICALL void GL_APIENTRY glColorMask (GLboolean red, GLboolean green, GLboolean blue, GLboolean alpha); +GL_APICALL void GL_APIENTRY glCompileShader (GLuint shader); +GL_APICALL void GL_APIENTRY glCompressedTexImage2D (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, const void *data); +GL_APICALL void GL_APIENTRY glCompressedTexSubImage2D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *data); +GL_APICALL void GL_APIENTRY glCopyTexImage2D (GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLsizei height, GLint border); +GL_APICALL void GL_APIENTRY glCopyTexSubImage2D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint x, GLint y, GLsizei width, GLsizei height); +GL_APICALL GLuint GL_APIENTRY glCreateProgram (void); +GL_APICALL GLuint GL_APIENTRY glCreateShader (GLenum type); +GL_APICALL void GL_APIENTRY glCullFace (GLenum mode); +GL_APICALL void GL_APIENTRY glDeleteBuffers (GLsizei n, const GLuint *buffers); +GL_APICALL void GL_APIENTRY glDeleteFramebuffers (GLsizei n, const GLuint *framebuffers); +GL_APICALL void GL_APIENTRY glDeleteProgram (GLuint program); +GL_APICALL void GL_APIENTRY glDeleteRenderbuffers (GLsizei n, const GLuint *renderbuffers); +GL_APICALL void GL_APIENTRY glDeleteShader (GLuint shader); +GL_APICALL void GL_APIENTRY glDeleteTextures (GLsizei n, const GLuint *textures); +GL_APICALL void GL_APIENTRY glDepthFunc (GLenum func); +GL_APICALL void GL_APIENTRY glDepthMask (GLboolean flag); +GL_APICALL void GL_APIENTRY glDepthRangef (GLfloat n, GLfloat f); +GL_APICALL void GL_APIENTRY glDetachShader (GLuint program, GLuint shader); +GL_APICALL void GL_APIENTRY glDisable (GLenum cap); +GL_APICALL void GL_APIENTRY glDisableVertexAttribArray (GLuint index); +GL_APICALL void GL_APIENTRY glDrawArrays (GLenum mode, GLint first, GLsizei count); +GL_APICALL void GL_APIENTRY glDrawElements (GLenum mode, GLsizei count, GLenum type, const void *indices); +GL_APICALL void GL_APIENTRY glEnable (GLenum cap); +GL_APICALL void GL_APIENTRY glEnableVertexAttribArray (GLuint index); +GL_APICALL void GL_APIENTRY glFinish (void); +GL_APICALL void GL_APIENTRY glFlush (void); +GL_APICALL void GL_APIENTRY glFramebufferRenderbuffer (GLenum target, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer); +GL_APICALL void GL_APIENTRY glFramebufferTexture2D (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +GL_APICALL void GL_APIENTRY glFrontFace (GLenum mode); +GL_APICALL void GL_APIENTRY glGenBuffers (GLsizei n, GLuint *buffers); +GL_APICALL void GL_APIENTRY glGenerateMipmap (GLenum target); +GL_APICALL void GL_APIENTRY glGenFramebuffers (GLsizei n, GLuint *framebuffers); +GL_APICALL void GL_APIENTRY glGenRenderbuffers (GLsizei n, GLuint *renderbuffers); +GL_APICALL void GL_APIENTRY glGenTextures (GLsizei n, GLuint *textures); +GL_APICALL void GL_APIENTRY glGetActiveAttrib (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name); +GL_APICALL void GL_APIENTRY glGetActiveUniform (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name); +GL_APICALL void GL_APIENTRY glGetAttachedShaders (GLuint program, GLsizei maxCount, GLsizei *count, GLuint *shaders); +GL_APICALL GLint GL_APIENTRY glGetAttribLocation (GLuint program, const GLchar *name); +GL_APICALL void GL_APIENTRY glGetBooleanv (GLenum pname, GLboolean *data); +GL_APICALL void GL_APIENTRY glGetBufferParameteriv (GLenum target, GLenum pname, GLint *params); +GL_APICALL GLenum GL_APIENTRY glGetError (void); +GL_APICALL void GL_APIENTRY glGetFloatv (GLenum pname, GLfloat *data); +GL_APICALL void GL_APIENTRY glGetFramebufferAttachmentParameteriv (GLenum target, GLenum attachment, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetIntegerv (GLenum pname, GLint *data); +GL_APICALL void GL_APIENTRY glGetProgramiv (GLuint program, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetProgramInfoLog (GLuint program, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +GL_APICALL void GL_APIENTRY glGetRenderbufferParameteriv (GLenum target, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetShaderiv (GLuint shader, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetShaderInfoLog (GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +GL_APICALL void GL_APIENTRY glGetShaderPrecisionFormat (GLenum shadertype, GLenum precisiontype, GLint *range, GLint *precision); +GL_APICALL void GL_APIENTRY glGetShaderSource (GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *source); +GL_APICALL const GLubyte *GL_APIENTRY glGetString (GLenum name); +GL_APICALL void GL_APIENTRY glGetTexParameterfv (GLenum target, GLenum pname, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetTexParameteriv (GLenum target, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetUniformfv (GLuint program, GLint location, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetUniformiv (GLuint program, GLint location, GLint *params); +GL_APICALL GLint GL_APIENTRY glGetUniformLocation (GLuint program, const GLchar *name); +GL_APICALL void GL_APIENTRY glGetVertexAttribfv (GLuint index, GLenum pname, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetVertexAttribiv (GLuint index, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetVertexAttribPointerv (GLuint index, GLenum pname, void **pointer); +GL_APICALL void GL_APIENTRY glHint (GLenum target, GLenum mode); +GL_APICALL GLboolean GL_APIENTRY glIsBuffer (GLuint buffer); +GL_APICALL GLboolean GL_APIENTRY glIsEnabled (GLenum cap); +GL_APICALL GLboolean GL_APIENTRY glIsFramebuffer (GLuint framebuffer); +GL_APICALL GLboolean GL_APIENTRY glIsProgram (GLuint program); +GL_APICALL GLboolean GL_APIENTRY glIsRenderbuffer (GLuint renderbuffer); +GL_APICALL GLboolean GL_APIENTRY glIsShader (GLuint shader); +GL_APICALL GLboolean GL_APIENTRY glIsTexture (GLuint texture); +GL_APICALL void GL_APIENTRY glLineWidth (GLfloat width); +GL_APICALL void GL_APIENTRY glLinkProgram (GLuint program); +GL_APICALL void GL_APIENTRY glPixelStorei (GLenum pname, GLint param); +GL_APICALL void GL_APIENTRY glPolygonOffset (GLfloat factor, GLfloat units); +GL_APICALL void GL_APIENTRY glReadPixels (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, void *pixels); +GL_APICALL void GL_APIENTRY glReleaseShaderCompiler (void); +GL_APICALL void GL_APIENTRY glRenderbufferStorage (GLenum target, GLenum internalformat, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glSampleCoverage (GLfloat value, GLboolean invert); +GL_APICALL void GL_APIENTRY glScissor (GLint x, GLint y, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glShaderBinary (GLsizei count, const GLuint *shaders, GLenum binaryFormat, const void *binary, GLsizei length); +GL_APICALL void GL_APIENTRY glShaderSource (GLuint shader, GLsizei count, const GLchar *const*string, const GLint *length); +GL_APICALL void GL_APIENTRY glStencilFunc (GLenum func, GLint ref, GLuint mask); +GL_APICALL void GL_APIENTRY glStencilFuncSeparate (GLenum face, GLenum func, GLint ref, GLuint mask); +GL_APICALL void GL_APIENTRY glStencilMask (GLuint mask); +GL_APICALL void GL_APIENTRY glStencilMaskSeparate (GLenum face, GLuint mask); +GL_APICALL void GL_APIENTRY glStencilOp (GLenum fail, GLenum zfail, GLenum zpass); +GL_APICALL void GL_APIENTRY glStencilOpSeparate (GLenum face, GLenum sfail, GLenum dpfail, GLenum dppass); +GL_APICALL void GL_APIENTRY glTexImage2D (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, const void *pixels); +GL_APICALL void GL_APIENTRY glTexParameterf (GLenum target, GLenum pname, GLfloat param); +GL_APICALL void GL_APIENTRY glTexParameterfv (GLenum target, GLenum pname, const GLfloat *params); +GL_APICALL void GL_APIENTRY glTexParameteri (GLenum target, GLenum pname, GLint param); +GL_APICALL void GL_APIENTRY glTexParameteriv (GLenum target, GLenum pname, const GLint *params); +GL_APICALL void GL_APIENTRY glTexSubImage2D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *pixels); +GL_APICALL void GL_APIENTRY glUniform1f (GLint location, GLfloat v0); +GL_APICALL void GL_APIENTRY glUniform1fv (GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniform1i (GLint location, GLint v0); +GL_APICALL void GL_APIENTRY glUniform1iv (GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glUniform2f (GLint location, GLfloat v0, GLfloat v1); +GL_APICALL void GL_APIENTRY glUniform2fv (GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniform2i (GLint location, GLint v0, GLint v1); +GL_APICALL void GL_APIENTRY glUniform2iv (GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glUniform3f (GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +GL_APICALL void GL_APIENTRY glUniform3fv (GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniform3i (GLint location, GLint v0, GLint v1, GLint v2); +GL_APICALL void GL_APIENTRY glUniform3iv (GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glUniform4f (GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +GL_APICALL void GL_APIENTRY glUniform4fv (GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniform4i (GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +GL_APICALL void GL_APIENTRY glUniform4iv (GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glUniformMatrix2fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix3fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix4fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUseProgram (GLuint program); +GL_APICALL void GL_APIENTRY glValidateProgram (GLuint program); +GL_APICALL void GL_APIENTRY glVertexAttrib1f (GLuint index, GLfloat x); +GL_APICALL void GL_APIENTRY glVertexAttrib1fv (GLuint index, const GLfloat *v); +GL_APICALL void GL_APIENTRY glVertexAttrib2f (GLuint index, GLfloat x, GLfloat y); +GL_APICALL void GL_APIENTRY glVertexAttrib2fv (GLuint index, const GLfloat *v); +GL_APICALL void GL_APIENTRY glVertexAttrib3f (GLuint index, GLfloat x, GLfloat y, GLfloat z); +GL_APICALL void GL_APIENTRY glVertexAttrib3fv (GLuint index, const GLfloat *v); +GL_APICALL void GL_APIENTRY glVertexAttrib4f (GLuint index, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +GL_APICALL void GL_APIENTRY glVertexAttrib4fv (GLuint index, const GLfloat *v); +GL_APICALL void GL_APIENTRY glVertexAttribPointer (GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride, const void *pointer); +GL_APICALL void GL_APIENTRY glViewport (GLint x, GLint y, GLsizei width, GLsizei height); +#endif +#endif /* GL_ES_VERSION_2_0 */ + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/angle/include/GLES2/gl2ext.h b/vendor/angle/include/GLES2/gl2ext.h new file mode 100644 index 00000000..948cd835 --- /dev/null +++ b/vendor/angle/include/GLES2/gl2ext.h @@ -0,0 +1,3949 @@ +#ifndef __gles2_gl2ext_h_ +#define __gles2_gl2ext_h_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright 2013-2020 The Khronos Group Inc. +** SPDX-License-Identifier: MIT +** +** This header is generated from the Khronos OpenGL / OpenGL ES XML +** API Registry. The current version of the Registry, generator scripts +** used to make the header, and the header can be found at +** https://github.com/KhronosGroup/OpenGL-Registry +*/ + +#ifndef GL_APIENTRYP +#define GL_APIENTRYP GL_APIENTRY* +#endif + +/* Generated on date 20210107 */ + +/* Generated C header for: + * API: gles2 + * Profile: common + * Versions considered: 2\.[0-9] + * Versions emitted: _nomatch_^ + * Default extensions included: gles2 + * Additional extensions included: _nomatch_^ + * Extensions removed: _nomatch_^ + */ + +#ifndef GL_KHR_blend_equation_advanced +#define GL_KHR_blend_equation_advanced 1 +#define GL_MULTIPLY_KHR 0x9294 +#define GL_SCREEN_KHR 0x9295 +#define GL_OVERLAY_KHR 0x9296 +#define GL_DARKEN_KHR 0x9297 +#define GL_LIGHTEN_KHR 0x9298 +#define GL_COLORDODGE_KHR 0x9299 +#define GL_COLORBURN_KHR 0x929A +#define GL_HARDLIGHT_KHR 0x929B +#define GL_SOFTLIGHT_KHR 0x929C +#define GL_DIFFERENCE_KHR 0x929E +#define GL_EXCLUSION_KHR 0x92A0 +#define GL_HSL_HUE_KHR 0x92AD +#define GL_HSL_SATURATION_KHR 0x92AE +#define GL_HSL_COLOR_KHR 0x92AF +#define GL_HSL_LUMINOSITY_KHR 0x92B0 +typedef void (GL_APIENTRYP PFNGLBLENDBARRIERKHRPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glBlendBarrierKHR (void); +#endif +#endif /* GL_KHR_blend_equation_advanced */ + +#ifndef GL_KHR_blend_equation_advanced_coherent +#define GL_KHR_blend_equation_advanced_coherent 1 +#define GL_BLEND_ADVANCED_COHERENT_KHR 0x9285 +#endif /* GL_KHR_blend_equation_advanced_coherent */ + +#ifndef GL_KHR_context_flush_control +#define GL_KHR_context_flush_control 1 +#define GL_CONTEXT_RELEASE_BEHAVIOR_KHR 0x82FB +#define GL_CONTEXT_RELEASE_BEHAVIOR_FLUSH_KHR 0x82FC +#endif /* GL_KHR_context_flush_control */ + +#ifndef GL_KHR_debug +#define GL_KHR_debug 1 +typedef void (GL_APIENTRY *GLDEBUGPROCKHR)(GLenum source,GLenum type,GLuint id,GLenum severity,GLsizei length,const GLchar *message,const void *userParam); +#define GL_SAMPLER 0x82E6 +#define GL_DEBUG_OUTPUT_SYNCHRONOUS_KHR 0x8242 +#define GL_DEBUG_NEXT_LOGGED_MESSAGE_LENGTH_KHR 0x8243 +#define GL_DEBUG_CALLBACK_FUNCTION_KHR 0x8244 +#define GL_DEBUG_CALLBACK_USER_PARAM_KHR 0x8245 +#define GL_DEBUG_SOURCE_API_KHR 0x8246 +#define GL_DEBUG_SOURCE_WINDOW_SYSTEM_KHR 0x8247 +#define GL_DEBUG_SOURCE_SHADER_COMPILER_KHR 0x8248 +#define GL_DEBUG_SOURCE_THIRD_PARTY_KHR 0x8249 +#define GL_DEBUG_SOURCE_APPLICATION_KHR 0x824A +#define GL_DEBUG_SOURCE_OTHER_KHR 0x824B +#define GL_DEBUG_TYPE_ERROR_KHR 0x824C +#define GL_DEBUG_TYPE_DEPRECATED_BEHAVIOR_KHR 0x824D +#define GL_DEBUG_TYPE_UNDEFINED_BEHAVIOR_KHR 0x824E +#define GL_DEBUG_TYPE_PORTABILITY_KHR 0x824F +#define GL_DEBUG_TYPE_PERFORMANCE_KHR 0x8250 +#define GL_DEBUG_TYPE_OTHER_KHR 0x8251 +#define GL_DEBUG_TYPE_MARKER_KHR 0x8268 +#define GL_DEBUG_TYPE_PUSH_GROUP_KHR 0x8269 +#define GL_DEBUG_TYPE_POP_GROUP_KHR 0x826A +#define GL_DEBUG_SEVERITY_NOTIFICATION_KHR 0x826B +#define GL_MAX_DEBUG_GROUP_STACK_DEPTH_KHR 0x826C +#define GL_DEBUG_GROUP_STACK_DEPTH_KHR 0x826D +#define GL_BUFFER_KHR 0x82E0 +#define GL_SHADER_KHR 0x82E1 +#define GL_PROGRAM_KHR 0x82E2 +#define GL_VERTEX_ARRAY_KHR 0x8074 +#define GL_QUERY_KHR 0x82E3 +#define GL_PROGRAM_PIPELINE_KHR 0x82E4 +#define GL_SAMPLER_KHR 0x82E6 +#define GL_MAX_LABEL_LENGTH_KHR 0x82E8 +#define GL_MAX_DEBUG_MESSAGE_LENGTH_KHR 0x9143 +#define GL_MAX_DEBUG_LOGGED_MESSAGES_KHR 0x9144 +#define GL_DEBUG_LOGGED_MESSAGES_KHR 0x9145 +#define GL_DEBUG_SEVERITY_HIGH_KHR 0x9146 +#define GL_DEBUG_SEVERITY_MEDIUM_KHR 0x9147 +#define GL_DEBUG_SEVERITY_LOW_KHR 0x9148 +#define GL_DEBUG_OUTPUT_KHR 0x92E0 +#define GL_CONTEXT_FLAG_DEBUG_BIT_KHR 0x00000002 +#define GL_STACK_OVERFLOW_KHR 0x0503 +#define GL_STACK_UNDERFLOW_KHR 0x0504 +typedef void (GL_APIENTRYP PFNGLDEBUGMESSAGECONTROLKHRPROC) (GLenum source, GLenum type, GLenum severity, GLsizei count, const GLuint *ids, GLboolean enabled); +typedef void (GL_APIENTRYP PFNGLDEBUGMESSAGEINSERTKHRPROC) (GLenum source, GLenum type, GLuint id, GLenum severity, GLsizei length, const GLchar *buf); +typedef void (GL_APIENTRYP PFNGLDEBUGMESSAGECALLBACKKHRPROC) (GLDEBUGPROCKHR callback, const void *userParam); +typedef GLuint (GL_APIENTRYP PFNGLGETDEBUGMESSAGELOGKHRPROC) (GLuint count, GLsizei bufSize, GLenum *sources, GLenum *types, GLuint *ids, GLenum *severities, GLsizei *lengths, GLchar *messageLog); +typedef void (GL_APIENTRYP PFNGLPUSHDEBUGGROUPKHRPROC) (GLenum source, GLuint id, GLsizei length, const GLchar *message); +typedef void (GL_APIENTRYP PFNGLPOPDEBUGGROUPKHRPROC) (void); +typedef void (GL_APIENTRYP PFNGLOBJECTLABELKHRPROC) (GLenum identifier, GLuint name, GLsizei length, const GLchar *label); +typedef void (GL_APIENTRYP PFNGLGETOBJECTLABELKHRPROC) (GLenum identifier, GLuint name, GLsizei bufSize, GLsizei *length, GLchar *label); +typedef void (GL_APIENTRYP PFNGLOBJECTPTRLABELKHRPROC) (const void *ptr, GLsizei length, const GLchar *label); +typedef void (GL_APIENTRYP PFNGLGETOBJECTPTRLABELKHRPROC) (const void *ptr, GLsizei bufSize, GLsizei *length, GLchar *label); +typedef void (GL_APIENTRYP PFNGLGETPOINTERVKHRPROC) (GLenum pname, void **params); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glDebugMessageControlKHR (GLenum source, GLenum type, GLenum severity, GLsizei count, const GLuint *ids, GLboolean enabled); +GL_APICALL void GL_APIENTRY glDebugMessageInsertKHR (GLenum source, GLenum type, GLuint id, GLenum severity, GLsizei length, const GLchar *buf); +GL_APICALL void GL_APIENTRY glDebugMessageCallbackKHR (GLDEBUGPROCKHR callback, const void *userParam); +GL_APICALL GLuint GL_APIENTRY glGetDebugMessageLogKHR (GLuint count, GLsizei bufSize, GLenum *sources, GLenum *types, GLuint *ids, GLenum *severities, GLsizei *lengths, GLchar *messageLog); +GL_APICALL void GL_APIENTRY glPushDebugGroupKHR (GLenum source, GLuint id, GLsizei length, const GLchar *message); +GL_APICALL void GL_APIENTRY glPopDebugGroupKHR (void); +GL_APICALL void GL_APIENTRY glObjectLabelKHR (GLenum identifier, GLuint name, GLsizei length, const GLchar *label); +GL_APICALL void GL_APIENTRY glGetObjectLabelKHR (GLenum identifier, GLuint name, GLsizei bufSize, GLsizei *length, GLchar *label); +GL_APICALL void GL_APIENTRY glObjectPtrLabelKHR (const void *ptr, GLsizei length, const GLchar *label); +GL_APICALL void GL_APIENTRY glGetObjectPtrLabelKHR (const void *ptr, GLsizei bufSize, GLsizei *length, GLchar *label); +GL_APICALL void GL_APIENTRY glGetPointervKHR (GLenum pname, void **params); +#endif +#endif /* GL_KHR_debug */ + +#ifndef GL_KHR_no_error +#define GL_KHR_no_error 1 +#define GL_CONTEXT_FLAG_NO_ERROR_BIT_KHR 0x00000008 +#endif /* GL_KHR_no_error */ + +#ifndef GL_KHR_parallel_shader_compile +#define GL_KHR_parallel_shader_compile 1 +#define GL_MAX_SHADER_COMPILER_THREADS_KHR 0x91B0 +#define GL_COMPLETION_STATUS_KHR 0x91B1 +typedef void (GL_APIENTRYP PFNGLMAXSHADERCOMPILERTHREADSKHRPROC) (GLuint count); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glMaxShaderCompilerThreadsKHR (GLuint count); +#endif +#endif /* GL_KHR_parallel_shader_compile */ + +#ifndef GL_KHR_robust_buffer_access_behavior +#define GL_KHR_robust_buffer_access_behavior 1 +#endif /* GL_KHR_robust_buffer_access_behavior */ + +#ifndef GL_KHR_robustness +#define GL_KHR_robustness 1 +#define GL_CONTEXT_ROBUST_ACCESS_KHR 0x90F3 +#define GL_LOSE_CONTEXT_ON_RESET_KHR 0x8252 +#define GL_GUILTY_CONTEXT_RESET_KHR 0x8253 +#define GL_INNOCENT_CONTEXT_RESET_KHR 0x8254 +#define GL_UNKNOWN_CONTEXT_RESET_KHR 0x8255 +#define GL_RESET_NOTIFICATION_STRATEGY_KHR 0x8256 +#define GL_NO_RESET_NOTIFICATION_KHR 0x8261 +#define GL_CONTEXT_LOST_KHR 0x0507 +typedef GLenum (GL_APIENTRYP PFNGLGETGRAPHICSRESETSTATUSKHRPROC) (void); +typedef void (GL_APIENTRYP PFNGLREADNPIXELSKHRPROC) (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, void *data); +typedef void (GL_APIENTRYP PFNGLGETNUNIFORMFVKHRPROC) (GLuint program, GLint location, GLsizei bufSize, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETNUNIFORMIVKHRPROC) (GLuint program, GLint location, GLsizei bufSize, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETNUNIFORMUIVKHRPROC) (GLuint program, GLint location, GLsizei bufSize, GLuint *params); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL GLenum GL_APIENTRY glGetGraphicsResetStatusKHR (void); +GL_APICALL void GL_APIENTRY glReadnPixelsKHR (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, void *data); +GL_APICALL void GL_APIENTRY glGetnUniformfvKHR (GLuint program, GLint location, GLsizei bufSize, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetnUniformivKHR (GLuint program, GLint location, GLsizei bufSize, GLint *params); +GL_APICALL void GL_APIENTRY glGetnUniformuivKHR (GLuint program, GLint location, GLsizei bufSize, GLuint *params); +#endif +#endif /* GL_KHR_robustness */ + +#ifndef GL_KHR_shader_subgroup +#define GL_KHR_shader_subgroup 1 +#define GL_SUBGROUP_SIZE_KHR 0x9532 +#define GL_SUBGROUP_SUPPORTED_STAGES_KHR 0x9533 +#define GL_SUBGROUP_SUPPORTED_FEATURES_KHR 0x9534 +#define GL_SUBGROUP_QUAD_ALL_STAGES_KHR 0x9535 +#define GL_SUBGROUP_FEATURE_BASIC_BIT_KHR 0x00000001 +#define GL_SUBGROUP_FEATURE_VOTE_BIT_KHR 0x00000002 +#define GL_SUBGROUP_FEATURE_ARITHMETIC_BIT_KHR 0x00000004 +#define GL_SUBGROUP_FEATURE_BALLOT_BIT_KHR 0x00000008 +#define GL_SUBGROUP_FEATURE_SHUFFLE_BIT_KHR 0x00000010 +#define GL_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT_KHR 0x00000020 +#define GL_SUBGROUP_FEATURE_CLUSTERED_BIT_KHR 0x00000040 +#define GL_SUBGROUP_FEATURE_QUAD_BIT_KHR 0x00000080 +#endif /* GL_KHR_shader_subgroup */ + +#ifndef GL_KHR_texture_compression_astc_hdr +#define GL_KHR_texture_compression_astc_hdr 1 +#define GL_COMPRESSED_RGBA_ASTC_4x4_KHR 0x93B0 +#define GL_COMPRESSED_RGBA_ASTC_5x4_KHR 0x93B1 +#define GL_COMPRESSED_RGBA_ASTC_5x5_KHR 0x93B2 +#define GL_COMPRESSED_RGBA_ASTC_6x5_KHR 0x93B3 +#define GL_COMPRESSED_RGBA_ASTC_6x6_KHR 0x93B4 +#define GL_COMPRESSED_RGBA_ASTC_8x5_KHR 0x93B5 +#define GL_COMPRESSED_RGBA_ASTC_8x6_KHR 0x93B6 +#define GL_COMPRESSED_RGBA_ASTC_8x8_KHR 0x93B7 +#define GL_COMPRESSED_RGBA_ASTC_10x5_KHR 0x93B8 +#define GL_COMPRESSED_RGBA_ASTC_10x6_KHR 0x93B9 +#define GL_COMPRESSED_RGBA_ASTC_10x8_KHR 0x93BA +#define GL_COMPRESSED_RGBA_ASTC_10x10_KHR 0x93BB +#define GL_COMPRESSED_RGBA_ASTC_12x10_KHR 0x93BC +#define GL_COMPRESSED_RGBA_ASTC_12x12_KHR 0x93BD +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_4x4_KHR 0x93D0 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x4_KHR 0x93D1 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x5_KHR 0x93D2 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x5_KHR 0x93D3 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x6_KHR 0x93D4 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x5_KHR 0x93D5 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x6_KHR 0x93D6 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x8_KHR 0x93D7 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x5_KHR 0x93D8 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x6_KHR 0x93D9 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x8_KHR 0x93DA +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x10_KHR 0x93DB +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_12x10_KHR 0x93DC +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_12x12_KHR 0x93DD +#endif /* GL_KHR_texture_compression_astc_hdr */ + +#ifndef GL_KHR_texture_compression_astc_ldr +#define GL_KHR_texture_compression_astc_ldr 1 +#endif /* GL_KHR_texture_compression_astc_ldr */ + +#ifndef GL_KHR_texture_compression_astc_sliced_3d +#define GL_KHR_texture_compression_astc_sliced_3d 1 +#endif /* GL_KHR_texture_compression_astc_sliced_3d */ + +#ifndef GL_OES_EGL_image +#define GL_OES_EGL_image 1 +typedef void *GLeglImageOES; +typedef void (GL_APIENTRYP PFNGLEGLIMAGETARGETTEXTURE2DOESPROC) (GLenum target, GLeglImageOES image); +typedef void (GL_APIENTRYP PFNGLEGLIMAGETARGETRENDERBUFFERSTORAGEOESPROC) (GLenum target, GLeglImageOES image); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glEGLImageTargetTexture2DOES (GLenum target, GLeglImageOES image); +GL_APICALL void GL_APIENTRY glEGLImageTargetRenderbufferStorageOES (GLenum target, GLeglImageOES image); +#endif +#endif /* GL_OES_EGL_image */ + +#ifndef GL_OES_EGL_image_external +#define GL_OES_EGL_image_external 1 +#define GL_TEXTURE_EXTERNAL_OES 0x8D65 +#define GL_TEXTURE_BINDING_EXTERNAL_OES 0x8D67 +#define GL_REQUIRED_TEXTURE_IMAGE_UNITS_OES 0x8D68 +#define GL_SAMPLER_EXTERNAL_OES 0x8D66 +#endif /* GL_OES_EGL_image_external */ + +#ifndef GL_OES_EGL_image_external_essl3 +#define GL_OES_EGL_image_external_essl3 1 +#endif /* GL_OES_EGL_image_external_essl3 */ + +#ifndef GL_OES_compressed_ETC1_RGB8_sub_texture +#define GL_OES_compressed_ETC1_RGB8_sub_texture 1 +#endif /* GL_OES_compressed_ETC1_RGB8_sub_texture */ + +#ifndef GL_OES_compressed_ETC1_RGB8_texture +#define GL_OES_compressed_ETC1_RGB8_texture 1 +#define GL_ETC1_RGB8_OES 0x8D64 +#endif /* GL_OES_compressed_ETC1_RGB8_texture */ + +#ifndef GL_OES_compressed_paletted_texture +#define GL_OES_compressed_paletted_texture 1 +#define GL_PALETTE4_RGB8_OES 0x8B90 +#define GL_PALETTE4_RGBA8_OES 0x8B91 +#define GL_PALETTE4_R5_G6_B5_OES 0x8B92 +#define GL_PALETTE4_RGBA4_OES 0x8B93 +#define GL_PALETTE4_RGB5_A1_OES 0x8B94 +#define GL_PALETTE8_RGB8_OES 0x8B95 +#define GL_PALETTE8_RGBA8_OES 0x8B96 +#define GL_PALETTE8_R5_G6_B5_OES 0x8B97 +#define GL_PALETTE8_RGBA4_OES 0x8B98 +#define GL_PALETTE8_RGB5_A1_OES 0x8B99 +#endif /* GL_OES_compressed_paletted_texture */ + +#ifndef GL_OES_copy_image +#define GL_OES_copy_image 1 +typedef void (GL_APIENTRYP PFNGLCOPYIMAGESUBDATAOESPROC) (GLuint srcName, GLenum srcTarget, GLint srcLevel, GLint srcX, GLint srcY, GLint srcZ, GLuint dstName, GLenum dstTarget, GLint dstLevel, GLint dstX, GLint dstY, GLint dstZ, GLsizei srcWidth, GLsizei srcHeight, GLsizei srcDepth); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glCopyImageSubDataOES (GLuint srcName, GLenum srcTarget, GLint srcLevel, GLint srcX, GLint srcY, GLint srcZ, GLuint dstName, GLenum dstTarget, GLint dstLevel, GLint dstX, GLint dstY, GLint dstZ, GLsizei srcWidth, GLsizei srcHeight, GLsizei srcDepth); +#endif +#endif /* GL_OES_copy_image */ + +#ifndef GL_OES_depth24 +#define GL_OES_depth24 1 +#define GL_DEPTH_COMPONENT24_OES 0x81A6 +#endif /* GL_OES_depth24 */ + +#ifndef GL_OES_depth32 +#define GL_OES_depth32 1 +#define GL_DEPTH_COMPONENT32_OES 0x81A7 +#endif /* GL_OES_depth32 */ + +#ifndef GL_OES_depth_texture +#define GL_OES_depth_texture 1 +#endif /* GL_OES_depth_texture */ + +#ifndef GL_OES_draw_buffers_indexed +#define GL_OES_draw_buffers_indexed 1 +#define GL_MIN 0x8007 +#define GL_MAX 0x8008 +typedef void (GL_APIENTRYP PFNGLENABLEIOESPROC) (GLenum target, GLuint index); +typedef void (GL_APIENTRYP PFNGLDISABLEIOESPROC) (GLenum target, GLuint index); +typedef void (GL_APIENTRYP PFNGLBLENDEQUATIONIOESPROC) (GLuint buf, GLenum mode); +typedef void (GL_APIENTRYP PFNGLBLENDEQUATIONSEPARATEIOESPROC) (GLuint buf, GLenum modeRGB, GLenum modeAlpha); +typedef void (GL_APIENTRYP PFNGLBLENDFUNCIOESPROC) (GLuint buf, GLenum src, GLenum dst); +typedef void (GL_APIENTRYP PFNGLBLENDFUNCSEPARATEIOESPROC) (GLuint buf, GLenum srcRGB, GLenum dstRGB, GLenum srcAlpha, GLenum dstAlpha); +typedef void (GL_APIENTRYP PFNGLCOLORMASKIOESPROC) (GLuint index, GLboolean r, GLboolean g, GLboolean b, GLboolean a); +typedef GLboolean (GL_APIENTRYP PFNGLISENABLEDIOESPROC) (GLenum target, GLuint index); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glEnableiOES (GLenum target, GLuint index); +GL_APICALL void GL_APIENTRY glDisableiOES (GLenum target, GLuint index); +GL_APICALL void GL_APIENTRY glBlendEquationiOES (GLuint buf, GLenum mode); +GL_APICALL void GL_APIENTRY glBlendEquationSeparateiOES (GLuint buf, GLenum modeRGB, GLenum modeAlpha); +GL_APICALL void GL_APIENTRY glBlendFunciOES (GLuint buf, GLenum src, GLenum dst); +GL_APICALL void GL_APIENTRY glBlendFuncSeparateiOES (GLuint buf, GLenum srcRGB, GLenum dstRGB, GLenum srcAlpha, GLenum dstAlpha); +GL_APICALL void GL_APIENTRY glColorMaskiOES (GLuint index, GLboolean r, GLboolean g, GLboolean b, GLboolean a); +GL_APICALL GLboolean GL_APIENTRY glIsEnablediOES (GLenum target, GLuint index); +#endif +#endif /* GL_OES_draw_buffers_indexed */ + +#ifndef GL_OES_draw_elements_base_vertex +#define GL_OES_draw_elements_base_vertex 1 +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSBASEVERTEXOESPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLint basevertex); +typedef void (GL_APIENTRYP PFNGLDRAWRANGEELEMENTSBASEVERTEXOESPROC) (GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices, GLint basevertex); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSINSTANCEDBASEVERTEXOESPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLint basevertex); +typedef void (GL_APIENTRYP PFNGLMULTIDRAWELEMENTSBASEVERTEXEXTPROC) (GLenum mode, const GLsizei *count, GLenum type, const void *const*indices, GLsizei drawcount, const GLint *basevertex); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glDrawElementsBaseVertexOES (GLenum mode, GLsizei count, GLenum type, const void *indices, GLint basevertex); +GL_APICALL void GL_APIENTRY glDrawRangeElementsBaseVertexOES (GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices, GLint basevertex); +GL_APICALL void GL_APIENTRY glDrawElementsInstancedBaseVertexOES (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLint basevertex); +GL_APICALL void GL_APIENTRY glMultiDrawElementsBaseVertexEXT (GLenum mode, const GLsizei *count, GLenum type, const void *const*indices, GLsizei drawcount, const GLint *basevertex); +#endif +#endif /* GL_OES_draw_elements_base_vertex */ + +#ifndef GL_OES_element_index_uint +#define GL_OES_element_index_uint 1 +#endif /* GL_OES_element_index_uint */ + +#ifndef GL_OES_fbo_render_mipmap +#define GL_OES_fbo_render_mipmap 1 +#endif /* GL_OES_fbo_render_mipmap */ + +#ifndef GL_OES_fragment_precision_high +#define GL_OES_fragment_precision_high 1 +#endif /* GL_OES_fragment_precision_high */ + +#ifndef GL_OES_geometry_point_size +#define GL_OES_geometry_point_size 1 +#endif /* GL_OES_geometry_point_size */ + +#ifndef GL_OES_geometry_shader +#define GL_OES_geometry_shader 1 +#define GL_GEOMETRY_SHADER_OES 0x8DD9 +#define GL_GEOMETRY_SHADER_BIT_OES 0x00000004 +#define GL_GEOMETRY_LINKED_VERTICES_OUT_OES 0x8916 +#define GL_GEOMETRY_LINKED_INPUT_TYPE_OES 0x8917 +#define GL_GEOMETRY_LINKED_OUTPUT_TYPE_OES 0x8918 +#define GL_GEOMETRY_SHADER_INVOCATIONS_OES 0x887F +#define GL_LAYER_PROVOKING_VERTEX_OES 0x825E +#define GL_LINES_ADJACENCY_OES 0x000A +#define GL_LINE_STRIP_ADJACENCY_OES 0x000B +#define GL_TRIANGLES_ADJACENCY_OES 0x000C +#define GL_TRIANGLE_STRIP_ADJACENCY_OES 0x000D +#define GL_MAX_GEOMETRY_UNIFORM_COMPONENTS_OES 0x8DDF +#define GL_MAX_GEOMETRY_UNIFORM_BLOCKS_OES 0x8A2C +#define GL_MAX_COMBINED_GEOMETRY_UNIFORM_COMPONENTS_OES 0x8A32 +#define GL_MAX_GEOMETRY_INPUT_COMPONENTS_OES 0x9123 +#define GL_MAX_GEOMETRY_OUTPUT_COMPONENTS_OES 0x9124 +#define GL_MAX_GEOMETRY_OUTPUT_VERTICES_OES 0x8DE0 +#define GL_MAX_GEOMETRY_TOTAL_OUTPUT_COMPONENTS_OES 0x8DE1 +#define GL_MAX_GEOMETRY_SHADER_INVOCATIONS_OES 0x8E5A +#define GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS_OES 0x8C29 +#define GL_MAX_GEOMETRY_ATOMIC_COUNTER_BUFFERS_OES 0x92CF +#define GL_MAX_GEOMETRY_ATOMIC_COUNTERS_OES 0x92D5 +#define GL_MAX_GEOMETRY_IMAGE_UNIFORMS_OES 0x90CD +#define GL_MAX_GEOMETRY_SHADER_STORAGE_BLOCKS_OES 0x90D7 +#define GL_FIRST_VERTEX_CONVENTION_OES 0x8E4D +#define GL_LAST_VERTEX_CONVENTION_OES 0x8E4E +#define GL_UNDEFINED_VERTEX_OES 0x8260 +#define GL_PRIMITIVES_GENERATED_OES 0x8C87 +#define GL_FRAMEBUFFER_DEFAULT_LAYERS_OES 0x9312 +#define GL_MAX_FRAMEBUFFER_LAYERS_OES 0x9317 +#define GL_FRAMEBUFFER_INCOMPLETE_LAYER_TARGETS_OES 0x8DA8 +#define GL_FRAMEBUFFER_ATTACHMENT_LAYERED_OES 0x8DA7 +#define GL_REFERENCED_BY_GEOMETRY_SHADER_OES 0x9309 +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTUREOESPROC) (GLenum target, GLenum attachment, GLuint texture, GLint level); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glFramebufferTextureOES (GLenum target, GLenum attachment, GLuint texture, GLint level); +#endif +#endif /* GL_OES_geometry_shader */ + +#ifndef GL_OES_get_program_binary +#define GL_OES_get_program_binary 1 +#define GL_PROGRAM_BINARY_LENGTH_OES 0x8741 +#define GL_NUM_PROGRAM_BINARY_FORMATS_OES 0x87FE +#define GL_PROGRAM_BINARY_FORMATS_OES 0x87FF +typedef void (GL_APIENTRYP PFNGLGETPROGRAMBINARYOESPROC) (GLuint program, GLsizei bufSize, GLsizei *length, GLenum *binaryFormat, void *binary); +typedef void (GL_APIENTRYP PFNGLPROGRAMBINARYOESPROC) (GLuint program, GLenum binaryFormat, const void *binary, GLint length); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glGetProgramBinaryOES (GLuint program, GLsizei bufSize, GLsizei *length, GLenum *binaryFormat, void *binary); +GL_APICALL void GL_APIENTRY glProgramBinaryOES (GLuint program, GLenum binaryFormat, const void *binary, GLint length); +#endif +#endif /* GL_OES_get_program_binary */ + +#ifndef GL_OES_gpu_shader5 +#define GL_OES_gpu_shader5 1 +#endif /* GL_OES_gpu_shader5 */ + +#ifndef GL_OES_mapbuffer +#define GL_OES_mapbuffer 1 +#define GL_WRITE_ONLY_OES 0x88B9 +#define GL_BUFFER_ACCESS_OES 0x88BB +#define GL_BUFFER_MAPPED_OES 0x88BC +#define GL_BUFFER_MAP_POINTER_OES 0x88BD +typedef void *(GL_APIENTRYP PFNGLMAPBUFFEROESPROC) (GLenum target, GLenum access); +typedef GLboolean (GL_APIENTRYP PFNGLUNMAPBUFFEROESPROC) (GLenum target); +typedef void (GL_APIENTRYP PFNGLGETBUFFERPOINTERVOESPROC) (GLenum target, GLenum pname, void **params); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void *GL_APIENTRY glMapBufferOES (GLenum target, GLenum access); +GL_APICALL GLboolean GL_APIENTRY glUnmapBufferOES (GLenum target); +GL_APICALL void GL_APIENTRY glGetBufferPointervOES (GLenum target, GLenum pname, void **params); +#endif +#endif /* GL_OES_mapbuffer */ + +#ifndef GL_OES_packed_depth_stencil +#define GL_OES_packed_depth_stencil 1 +#define GL_DEPTH_STENCIL_OES 0x84F9 +#define GL_UNSIGNED_INT_24_8_OES 0x84FA +#define GL_DEPTH24_STENCIL8_OES 0x88F0 +#endif /* GL_OES_packed_depth_stencil */ + +#ifndef GL_OES_primitive_bounding_box +#define GL_OES_primitive_bounding_box 1 +#define GL_PRIMITIVE_BOUNDING_BOX_OES 0x92BE +typedef void (GL_APIENTRYP PFNGLPRIMITIVEBOUNDINGBOXOESPROC) (GLfloat minX, GLfloat minY, GLfloat minZ, GLfloat minW, GLfloat maxX, GLfloat maxY, GLfloat maxZ, GLfloat maxW); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glPrimitiveBoundingBoxOES (GLfloat minX, GLfloat minY, GLfloat minZ, GLfloat minW, GLfloat maxX, GLfloat maxY, GLfloat maxZ, GLfloat maxW); +#endif +#endif /* GL_OES_primitive_bounding_box */ + +#ifndef GL_OES_required_internalformat +#define GL_OES_required_internalformat 1 +#define GL_ALPHA8_OES 0x803C +#define GL_DEPTH_COMPONENT16_OES 0x81A5 +#define GL_LUMINANCE4_ALPHA4_OES 0x8043 +#define GL_LUMINANCE8_ALPHA8_OES 0x8045 +#define GL_LUMINANCE8_OES 0x8040 +#define GL_RGBA4_OES 0x8056 +#define GL_RGB5_A1_OES 0x8057 +#define GL_RGB565_OES 0x8D62 +#define GL_RGB8_OES 0x8051 +#define GL_RGBA8_OES 0x8058 +#define GL_RGB10_EXT 0x8052 +#define GL_RGB10_A2_EXT 0x8059 +#endif /* GL_OES_required_internalformat */ + +#ifndef GL_OES_rgb8_rgba8 +#define GL_OES_rgb8_rgba8 1 +#endif /* GL_OES_rgb8_rgba8 */ + +#ifndef GL_OES_sample_shading +#define GL_OES_sample_shading 1 +#define GL_SAMPLE_SHADING_OES 0x8C36 +#define GL_MIN_SAMPLE_SHADING_VALUE_OES 0x8C37 +typedef void (GL_APIENTRYP PFNGLMINSAMPLESHADINGOESPROC) (GLfloat value); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glMinSampleShadingOES (GLfloat value); +#endif +#endif /* GL_OES_sample_shading */ + +#ifndef GL_OES_sample_variables +#define GL_OES_sample_variables 1 +#endif /* GL_OES_sample_variables */ + +#ifndef GL_OES_shader_image_atomic +#define GL_OES_shader_image_atomic 1 +#endif /* GL_OES_shader_image_atomic */ + +#ifndef GL_OES_shader_io_blocks +#define GL_OES_shader_io_blocks 1 +#endif /* GL_OES_shader_io_blocks */ + +#ifndef GL_OES_shader_multisample_interpolation +#define GL_OES_shader_multisample_interpolation 1 +#define GL_MIN_FRAGMENT_INTERPOLATION_OFFSET_OES 0x8E5B +#define GL_MAX_FRAGMENT_INTERPOLATION_OFFSET_OES 0x8E5C +#define GL_FRAGMENT_INTERPOLATION_OFFSET_BITS_OES 0x8E5D +#endif /* GL_OES_shader_multisample_interpolation */ + +#ifndef GL_OES_standard_derivatives +#define GL_OES_standard_derivatives 1 +#define GL_FRAGMENT_SHADER_DERIVATIVE_HINT_OES 0x8B8B +#endif /* GL_OES_standard_derivatives */ + +#ifndef GL_OES_stencil1 +#define GL_OES_stencil1 1 +#define GL_STENCIL_INDEX1_OES 0x8D46 +#endif /* GL_OES_stencil1 */ + +#ifndef GL_OES_stencil4 +#define GL_OES_stencil4 1 +#define GL_STENCIL_INDEX4_OES 0x8D47 +#endif /* GL_OES_stencil4 */ + +#ifndef GL_OES_surfaceless_context +#define GL_OES_surfaceless_context 1 +#define GL_FRAMEBUFFER_UNDEFINED_OES 0x8219 +#endif /* GL_OES_surfaceless_context */ + +#ifndef GL_OES_tessellation_point_size +#define GL_OES_tessellation_point_size 1 +#endif /* GL_OES_tessellation_point_size */ + +#ifndef GL_OES_tessellation_shader +#define GL_OES_tessellation_shader 1 +#define GL_PATCHES_OES 0x000E +#define GL_PATCH_VERTICES_OES 0x8E72 +#define GL_TESS_CONTROL_OUTPUT_VERTICES_OES 0x8E75 +#define GL_TESS_GEN_MODE_OES 0x8E76 +#define GL_TESS_GEN_SPACING_OES 0x8E77 +#define GL_TESS_GEN_VERTEX_ORDER_OES 0x8E78 +#define GL_TESS_GEN_POINT_MODE_OES 0x8E79 +#define GL_ISOLINES_OES 0x8E7A +#define GL_QUADS_OES 0x0007 +#define GL_FRACTIONAL_ODD_OES 0x8E7B +#define GL_FRACTIONAL_EVEN_OES 0x8E7C +#define GL_MAX_PATCH_VERTICES_OES 0x8E7D +#define GL_MAX_TESS_GEN_LEVEL_OES 0x8E7E +#define GL_MAX_TESS_CONTROL_UNIFORM_COMPONENTS_OES 0x8E7F +#define GL_MAX_TESS_EVALUATION_UNIFORM_COMPONENTS_OES 0x8E80 +#define GL_MAX_TESS_CONTROL_TEXTURE_IMAGE_UNITS_OES 0x8E81 +#define GL_MAX_TESS_EVALUATION_TEXTURE_IMAGE_UNITS_OES 0x8E82 +#define GL_MAX_TESS_CONTROL_OUTPUT_COMPONENTS_OES 0x8E83 +#define GL_MAX_TESS_PATCH_COMPONENTS_OES 0x8E84 +#define GL_MAX_TESS_CONTROL_TOTAL_OUTPUT_COMPONENTS_OES 0x8E85 +#define GL_MAX_TESS_EVALUATION_OUTPUT_COMPONENTS_OES 0x8E86 +#define GL_MAX_TESS_CONTROL_UNIFORM_BLOCKS_OES 0x8E89 +#define GL_MAX_TESS_EVALUATION_UNIFORM_BLOCKS_OES 0x8E8A +#define GL_MAX_TESS_CONTROL_INPUT_COMPONENTS_OES 0x886C +#define GL_MAX_TESS_EVALUATION_INPUT_COMPONENTS_OES 0x886D +#define GL_MAX_COMBINED_TESS_CONTROL_UNIFORM_COMPONENTS_OES 0x8E1E +#define GL_MAX_COMBINED_TESS_EVALUATION_UNIFORM_COMPONENTS_OES 0x8E1F +#define GL_MAX_TESS_CONTROL_ATOMIC_COUNTER_BUFFERS_OES 0x92CD +#define GL_MAX_TESS_EVALUATION_ATOMIC_COUNTER_BUFFERS_OES 0x92CE +#define GL_MAX_TESS_CONTROL_ATOMIC_COUNTERS_OES 0x92D3 +#define GL_MAX_TESS_EVALUATION_ATOMIC_COUNTERS_OES 0x92D4 +#define GL_MAX_TESS_CONTROL_IMAGE_UNIFORMS_OES 0x90CB +#define GL_MAX_TESS_EVALUATION_IMAGE_UNIFORMS_OES 0x90CC +#define GL_MAX_TESS_CONTROL_SHADER_STORAGE_BLOCKS_OES 0x90D8 +#define GL_MAX_TESS_EVALUATION_SHADER_STORAGE_BLOCKS_OES 0x90D9 +#define GL_PRIMITIVE_RESTART_FOR_PATCHES_SUPPORTED_OES 0x8221 +#define GL_IS_PER_PATCH_OES 0x92E7 +#define GL_REFERENCED_BY_TESS_CONTROL_SHADER_OES 0x9307 +#define GL_REFERENCED_BY_TESS_EVALUATION_SHADER_OES 0x9308 +#define GL_TESS_CONTROL_SHADER_OES 0x8E88 +#define GL_TESS_EVALUATION_SHADER_OES 0x8E87 +#define GL_TESS_CONTROL_SHADER_BIT_OES 0x00000008 +#define GL_TESS_EVALUATION_SHADER_BIT_OES 0x00000010 +typedef void (GL_APIENTRYP PFNGLPATCHPARAMETERIOESPROC) (GLenum pname, GLint value); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glPatchParameteriOES (GLenum pname, GLint value); +#endif +#endif /* GL_OES_tessellation_shader */ + +#ifndef GL_OES_texture_3D +#define GL_OES_texture_3D 1 +#define GL_TEXTURE_WRAP_R_OES 0x8072 +#define GL_TEXTURE_3D_OES 0x806F +#define GL_TEXTURE_BINDING_3D_OES 0x806A +#define GL_MAX_3D_TEXTURE_SIZE_OES 0x8073 +#define GL_SAMPLER_3D_OES 0x8B5F +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_3D_ZOFFSET_OES 0x8CD4 +typedef void (GL_APIENTRYP PFNGLTEXIMAGE3DOESPROC) (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, const void *pixels); +typedef void (GL_APIENTRYP PFNGLTEXSUBIMAGE3DOESPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *pixels); +typedef void (GL_APIENTRYP PFNGLCOPYTEXSUBIMAGE3DOESPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXIMAGE3DOESPROC) (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, const void *data); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXSUBIMAGE3DOESPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void *data); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTURE3DOESPROC) (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLint zoffset); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glTexImage3DOES (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, const void *pixels); +GL_APICALL void GL_APIENTRY glTexSubImage3DOES (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *pixels); +GL_APICALL void GL_APIENTRY glCopyTexSubImage3DOES (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glCompressedTexImage3DOES (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, const void *data); +GL_APICALL void GL_APIENTRY glCompressedTexSubImage3DOES (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void *data); +GL_APICALL void GL_APIENTRY glFramebufferTexture3DOES (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLint zoffset); +#endif +#endif /* GL_OES_texture_3D */ + +#ifndef GL_OES_texture_border_clamp +#define GL_OES_texture_border_clamp 1 +#define GL_TEXTURE_BORDER_COLOR_OES 0x1004 +#define GL_CLAMP_TO_BORDER_OES 0x812D +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERIIVOESPROC) (GLenum target, GLenum pname, const GLint *params); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERIUIVOESPROC) (GLenum target, GLenum pname, const GLuint *params); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERIIVOESPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERIUIVOESPROC) (GLenum target, GLenum pname, GLuint *params); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERIIVOESPROC) (GLuint sampler, GLenum pname, const GLint *param); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERIUIVOESPROC) (GLuint sampler, GLenum pname, const GLuint *param); +typedef void (GL_APIENTRYP PFNGLGETSAMPLERPARAMETERIIVOESPROC) (GLuint sampler, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETSAMPLERPARAMETERIUIVOESPROC) (GLuint sampler, GLenum pname, GLuint *params); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glTexParameterIivOES (GLenum target, GLenum pname, const GLint *params); +GL_APICALL void GL_APIENTRY glTexParameterIuivOES (GLenum target, GLenum pname, const GLuint *params); +GL_APICALL void GL_APIENTRY glGetTexParameterIivOES (GLenum target, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetTexParameterIuivOES (GLenum target, GLenum pname, GLuint *params); +GL_APICALL void GL_APIENTRY glSamplerParameterIivOES (GLuint sampler, GLenum pname, const GLint *param); +GL_APICALL void GL_APIENTRY glSamplerParameterIuivOES (GLuint sampler, GLenum pname, const GLuint *param); +GL_APICALL void GL_APIENTRY glGetSamplerParameterIivOES (GLuint sampler, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetSamplerParameterIuivOES (GLuint sampler, GLenum pname, GLuint *params); +#endif +#endif /* GL_OES_texture_border_clamp */ + +#ifndef GL_OES_texture_buffer +#define GL_OES_texture_buffer 1 +#define GL_TEXTURE_BUFFER_OES 0x8C2A +#define GL_TEXTURE_BUFFER_BINDING_OES 0x8C2A +#define GL_MAX_TEXTURE_BUFFER_SIZE_OES 0x8C2B +#define GL_TEXTURE_BINDING_BUFFER_OES 0x8C2C +#define GL_TEXTURE_BUFFER_DATA_STORE_BINDING_OES 0x8C2D +#define GL_TEXTURE_BUFFER_OFFSET_ALIGNMENT_OES 0x919F +#define GL_SAMPLER_BUFFER_OES 0x8DC2 +#define GL_INT_SAMPLER_BUFFER_OES 0x8DD0 +#define GL_UNSIGNED_INT_SAMPLER_BUFFER_OES 0x8DD8 +#define GL_IMAGE_BUFFER_OES 0x9051 +#define GL_INT_IMAGE_BUFFER_OES 0x905C +#define GL_UNSIGNED_INT_IMAGE_BUFFER_OES 0x9067 +#define GL_TEXTURE_BUFFER_OFFSET_OES 0x919D +#define GL_TEXTURE_BUFFER_SIZE_OES 0x919E +typedef void (GL_APIENTRYP PFNGLTEXBUFFEROESPROC) (GLenum target, GLenum internalformat, GLuint buffer); +typedef void (GL_APIENTRYP PFNGLTEXBUFFERRANGEOESPROC) (GLenum target, GLenum internalformat, GLuint buffer, GLintptr offset, GLsizeiptr size); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glTexBufferOES (GLenum target, GLenum internalformat, GLuint buffer); +GL_APICALL void GL_APIENTRY glTexBufferRangeOES (GLenum target, GLenum internalformat, GLuint buffer, GLintptr offset, GLsizeiptr size); +#endif +#endif /* GL_OES_texture_buffer */ + +#ifndef GL_OES_texture_compression_astc +#define GL_OES_texture_compression_astc 1 +#define GL_COMPRESSED_RGBA_ASTC_3x3x3_OES 0x93C0 +#define GL_COMPRESSED_RGBA_ASTC_4x3x3_OES 0x93C1 +#define GL_COMPRESSED_RGBA_ASTC_4x4x3_OES 0x93C2 +#define GL_COMPRESSED_RGBA_ASTC_4x4x4_OES 0x93C3 +#define GL_COMPRESSED_RGBA_ASTC_5x4x4_OES 0x93C4 +#define GL_COMPRESSED_RGBA_ASTC_5x5x4_OES 0x93C5 +#define GL_COMPRESSED_RGBA_ASTC_5x5x5_OES 0x93C6 +#define GL_COMPRESSED_RGBA_ASTC_6x5x5_OES 0x93C7 +#define GL_COMPRESSED_RGBA_ASTC_6x6x5_OES 0x93C8 +#define GL_COMPRESSED_RGBA_ASTC_6x6x6_OES 0x93C9 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_3x3x3_OES 0x93E0 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_4x3x3_OES 0x93E1 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_4x4x3_OES 0x93E2 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_4x4x4_OES 0x93E3 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x4x4_OES 0x93E4 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x5x4_OES 0x93E5 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x5x5_OES 0x93E6 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x5x5_OES 0x93E7 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x6x5_OES 0x93E8 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x6x6_OES 0x93E9 +#endif /* GL_OES_texture_compression_astc */ + +#ifndef GL_OES_texture_cube_map_array +#define GL_OES_texture_cube_map_array 1 +#define GL_TEXTURE_CUBE_MAP_ARRAY_OES 0x9009 +#define GL_TEXTURE_BINDING_CUBE_MAP_ARRAY_OES 0x900A +#define GL_SAMPLER_CUBE_MAP_ARRAY_OES 0x900C +#define GL_SAMPLER_CUBE_MAP_ARRAY_SHADOW_OES 0x900D +#define GL_INT_SAMPLER_CUBE_MAP_ARRAY_OES 0x900E +#define GL_UNSIGNED_INT_SAMPLER_CUBE_MAP_ARRAY_OES 0x900F +#define GL_IMAGE_CUBE_MAP_ARRAY_OES 0x9054 +#define GL_INT_IMAGE_CUBE_MAP_ARRAY_OES 0x905F +#define GL_UNSIGNED_INT_IMAGE_CUBE_MAP_ARRAY_OES 0x906A +#endif /* GL_OES_texture_cube_map_array */ + +#ifndef GL_OES_texture_float +#define GL_OES_texture_float 1 +#endif /* GL_OES_texture_float */ + +#ifndef GL_OES_texture_float_linear +#define GL_OES_texture_float_linear 1 +#endif /* GL_OES_texture_float_linear */ + +#ifndef GL_OES_texture_half_float +#define GL_OES_texture_half_float 1 +#define GL_HALF_FLOAT_OES 0x8D61 +#endif /* GL_OES_texture_half_float */ + +#ifndef GL_OES_texture_half_float_linear +#define GL_OES_texture_half_float_linear 1 +#endif /* GL_OES_texture_half_float_linear */ + +#ifndef GL_OES_texture_npot +#define GL_OES_texture_npot 1 +#endif /* GL_OES_texture_npot */ + +#ifndef GL_OES_texture_stencil8 +#define GL_OES_texture_stencil8 1 +#define GL_STENCIL_INDEX_OES 0x1901 +#define GL_STENCIL_INDEX8_OES 0x8D48 +#endif /* GL_OES_texture_stencil8 */ + +#ifndef GL_OES_texture_storage_multisample_2d_array +#define GL_OES_texture_storage_multisample_2d_array 1 +#define GL_TEXTURE_2D_MULTISAMPLE_ARRAY_OES 0x9102 +#define GL_TEXTURE_BINDING_2D_MULTISAMPLE_ARRAY_OES 0x9105 +#define GL_SAMPLER_2D_MULTISAMPLE_ARRAY_OES 0x910B +#define GL_INT_SAMPLER_2D_MULTISAMPLE_ARRAY_OES 0x910C +#define GL_UNSIGNED_INT_SAMPLER_2D_MULTISAMPLE_ARRAY_OES 0x910D +typedef void (GL_APIENTRYP PFNGLTEXSTORAGE3DMULTISAMPLEOESPROC) (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedsamplelocations); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glTexStorage3DMultisampleOES (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedsamplelocations); +#endif +#endif /* GL_OES_texture_storage_multisample_2d_array */ + +#ifndef GL_OES_texture_view +#define GL_OES_texture_view 1 +#define GL_TEXTURE_VIEW_MIN_LEVEL_OES 0x82DB +#define GL_TEXTURE_VIEW_NUM_LEVELS_OES 0x82DC +#define GL_TEXTURE_VIEW_MIN_LAYER_OES 0x82DD +#define GL_TEXTURE_VIEW_NUM_LAYERS_OES 0x82DE +#define GL_TEXTURE_IMMUTABLE_LEVELS 0x82DF +typedef void (GL_APIENTRYP PFNGLTEXTUREVIEWOESPROC) (GLuint texture, GLenum target, GLuint origtexture, GLenum internalformat, GLuint minlevel, GLuint numlevels, GLuint minlayer, GLuint numlayers); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glTextureViewOES (GLuint texture, GLenum target, GLuint origtexture, GLenum internalformat, GLuint minlevel, GLuint numlevels, GLuint minlayer, GLuint numlayers); +#endif +#endif /* GL_OES_texture_view */ + +#ifndef GL_OES_vertex_array_object +#define GL_OES_vertex_array_object 1 +#define GL_VERTEX_ARRAY_BINDING_OES 0x85B5 +typedef void (GL_APIENTRYP PFNGLBINDVERTEXARRAYOESPROC) (GLuint array); +typedef void (GL_APIENTRYP PFNGLDELETEVERTEXARRAYSOESPROC) (GLsizei n, const GLuint *arrays); +typedef void (GL_APIENTRYP PFNGLGENVERTEXARRAYSOESPROC) (GLsizei n, GLuint *arrays); +typedef GLboolean (GL_APIENTRYP PFNGLISVERTEXARRAYOESPROC) (GLuint array); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glBindVertexArrayOES (GLuint array); +GL_APICALL void GL_APIENTRY glDeleteVertexArraysOES (GLsizei n, const GLuint *arrays); +GL_APICALL void GL_APIENTRY glGenVertexArraysOES (GLsizei n, GLuint *arrays); +GL_APICALL GLboolean GL_APIENTRY glIsVertexArrayOES (GLuint array); +#endif +#endif /* GL_OES_vertex_array_object */ + +#ifndef GL_OES_vertex_half_float +#define GL_OES_vertex_half_float 1 +#endif /* GL_OES_vertex_half_float */ + +#ifndef GL_OES_vertex_type_10_10_10_2 +#define GL_OES_vertex_type_10_10_10_2 1 +#define GL_UNSIGNED_INT_10_10_10_2_OES 0x8DF6 +#define GL_INT_10_10_10_2_OES 0x8DF7 +#endif /* GL_OES_vertex_type_10_10_10_2 */ + +#ifndef GL_OES_viewport_array +#define GL_OES_viewport_array 1 +#define GL_MAX_VIEWPORTS_OES 0x825B +#define GL_VIEWPORT_SUBPIXEL_BITS_OES 0x825C +#define GL_VIEWPORT_BOUNDS_RANGE_OES 0x825D +#define GL_VIEWPORT_INDEX_PROVOKING_VERTEX_OES 0x825F +typedef void (GL_APIENTRYP PFNGLVIEWPORTARRAYVOESPROC) (GLuint first, GLsizei count, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLVIEWPORTINDEXEDFOESPROC) (GLuint index, GLfloat x, GLfloat y, GLfloat w, GLfloat h); +typedef void (GL_APIENTRYP PFNGLVIEWPORTINDEXEDFVOESPROC) (GLuint index, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLSCISSORARRAYVOESPROC) (GLuint first, GLsizei count, const GLint *v); +typedef void (GL_APIENTRYP PFNGLSCISSORINDEXEDOESPROC) (GLuint index, GLint left, GLint bottom, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLSCISSORINDEXEDVOESPROC) (GLuint index, const GLint *v); +typedef void (GL_APIENTRYP PFNGLDEPTHRANGEARRAYFVOESPROC) (GLuint first, GLsizei count, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLDEPTHRANGEINDEXEDFOESPROC) (GLuint index, GLfloat n, GLfloat f); +typedef void (GL_APIENTRYP PFNGLGETFLOATI_VOESPROC) (GLenum target, GLuint index, GLfloat *data); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glViewportArrayvOES (GLuint first, GLsizei count, const GLfloat *v); +GL_APICALL void GL_APIENTRY glViewportIndexedfOES (GLuint index, GLfloat x, GLfloat y, GLfloat w, GLfloat h); +GL_APICALL void GL_APIENTRY glViewportIndexedfvOES (GLuint index, const GLfloat *v); +GL_APICALL void GL_APIENTRY glScissorArrayvOES (GLuint first, GLsizei count, const GLint *v); +GL_APICALL void GL_APIENTRY glScissorIndexedOES (GLuint index, GLint left, GLint bottom, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glScissorIndexedvOES (GLuint index, const GLint *v); +GL_APICALL void GL_APIENTRY glDepthRangeArrayfvOES (GLuint first, GLsizei count, const GLfloat *v); +GL_APICALL void GL_APIENTRY glDepthRangeIndexedfOES (GLuint index, GLfloat n, GLfloat f); +GL_APICALL void GL_APIENTRY glGetFloati_vOES (GLenum target, GLuint index, GLfloat *data); +#endif +#endif /* GL_OES_viewport_array */ + +#ifndef GL_AMD_compressed_3DC_texture +#define GL_AMD_compressed_3DC_texture 1 +#define GL_3DC_X_AMD 0x87F9 +#define GL_3DC_XY_AMD 0x87FA +#endif /* GL_AMD_compressed_3DC_texture */ + +#ifndef GL_AMD_compressed_ATC_texture +#define GL_AMD_compressed_ATC_texture 1 +#define GL_ATC_RGB_AMD 0x8C92 +#define GL_ATC_RGBA_EXPLICIT_ALPHA_AMD 0x8C93 +#define GL_ATC_RGBA_INTERPOLATED_ALPHA_AMD 0x87EE +#endif /* GL_AMD_compressed_ATC_texture */ + +#ifndef GL_AMD_framebuffer_multisample_advanced +#define GL_AMD_framebuffer_multisample_advanced 1 +#define GL_RENDERBUFFER_STORAGE_SAMPLES_AMD 0x91B2 +#define GL_MAX_COLOR_FRAMEBUFFER_SAMPLES_AMD 0x91B3 +#define GL_MAX_COLOR_FRAMEBUFFER_STORAGE_SAMPLES_AMD 0x91B4 +#define GL_MAX_DEPTH_STENCIL_FRAMEBUFFER_SAMPLES_AMD 0x91B5 +#define GL_NUM_SUPPORTED_MULTISAMPLE_MODES_AMD 0x91B6 +#define GL_SUPPORTED_MULTISAMPLE_MODES_AMD 0x91B7 +typedef void (GL_APIENTRYP PFNGLRENDERBUFFERSTORAGEMULTISAMPLEADVANCEDAMDPROC) (GLenum target, GLsizei samples, GLsizei storageSamples, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLNAMEDRENDERBUFFERSTORAGEMULTISAMPLEADVANCEDAMDPROC) (GLuint renderbuffer, GLsizei samples, GLsizei storageSamples, GLenum internalformat, GLsizei width, GLsizei height); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glRenderbufferStorageMultisampleAdvancedAMD (GLenum target, GLsizei samples, GLsizei storageSamples, GLenum internalformat, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glNamedRenderbufferStorageMultisampleAdvancedAMD (GLuint renderbuffer, GLsizei samples, GLsizei storageSamples, GLenum internalformat, GLsizei width, GLsizei height); +#endif +#endif /* GL_AMD_framebuffer_multisample_advanced */ + +#ifndef GL_AMD_performance_monitor +#define GL_AMD_performance_monitor 1 +#define GL_COUNTER_TYPE_AMD 0x8BC0 +#define GL_COUNTER_RANGE_AMD 0x8BC1 +#define GL_UNSIGNED_INT64_AMD 0x8BC2 +#define GL_PERCENTAGE_AMD 0x8BC3 +#define GL_PERFMON_RESULT_AVAILABLE_AMD 0x8BC4 +#define GL_PERFMON_RESULT_SIZE_AMD 0x8BC5 +#define GL_PERFMON_RESULT_AMD 0x8BC6 +typedef void (GL_APIENTRYP PFNGLGETPERFMONITORGROUPSAMDPROC) (GLint *numGroups, GLsizei groupsSize, GLuint *groups); +typedef void (GL_APIENTRYP PFNGLGETPERFMONITORCOUNTERSAMDPROC) (GLuint group, GLint *numCounters, GLint *maxActiveCounters, GLsizei counterSize, GLuint *counters); +typedef void (GL_APIENTRYP PFNGLGETPERFMONITORGROUPSTRINGAMDPROC) (GLuint group, GLsizei bufSize, GLsizei *length, GLchar *groupString); +typedef void (GL_APIENTRYP PFNGLGETPERFMONITORCOUNTERSTRINGAMDPROC) (GLuint group, GLuint counter, GLsizei bufSize, GLsizei *length, GLchar *counterString); +typedef void (GL_APIENTRYP PFNGLGETPERFMONITORCOUNTERINFOAMDPROC) (GLuint group, GLuint counter, GLenum pname, void *data); +typedef void (GL_APIENTRYP PFNGLGENPERFMONITORSAMDPROC) (GLsizei n, GLuint *monitors); +typedef void (GL_APIENTRYP PFNGLDELETEPERFMONITORSAMDPROC) (GLsizei n, GLuint *monitors); +typedef void (GL_APIENTRYP PFNGLSELECTPERFMONITORCOUNTERSAMDPROC) (GLuint monitor, GLboolean enable, GLuint group, GLint numCounters, GLuint *counterList); +typedef void (GL_APIENTRYP PFNGLBEGINPERFMONITORAMDPROC) (GLuint monitor); +typedef void (GL_APIENTRYP PFNGLENDPERFMONITORAMDPROC) (GLuint monitor); +typedef void (GL_APIENTRYP PFNGLGETPERFMONITORCOUNTERDATAAMDPROC) (GLuint monitor, GLenum pname, GLsizei dataSize, GLuint *data, GLint *bytesWritten); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glGetPerfMonitorGroupsAMD (GLint *numGroups, GLsizei groupsSize, GLuint *groups); +GL_APICALL void GL_APIENTRY glGetPerfMonitorCountersAMD (GLuint group, GLint *numCounters, GLint *maxActiveCounters, GLsizei counterSize, GLuint *counters); +GL_APICALL void GL_APIENTRY glGetPerfMonitorGroupStringAMD (GLuint group, GLsizei bufSize, GLsizei *length, GLchar *groupString); +GL_APICALL void GL_APIENTRY glGetPerfMonitorCounterStringAMD (GLuint group, GLuint counter, GLsizei bufSize, GLsizei *length, GLchar *counterString); +GL_APICALL void GL_APIENTRY glGetPerfMonitorCounterInfoAMD (GLuint group, GLuint counter, GLenum pname, void *data); +GL_APICALL void GL_APIENTRY glGenPerfMonitorsAMD (GLsizei n, GLuint *monitors); +GL_APICALL void GL_APIENTRY glDeletePerfMonitorsAMD (GLsizei n, GLuint *monitors); +GL_APICALL void GL_APIENTRY glSelectPerfMonitorCountersAMD (GLuint monitor, GLboolean enable, GLuint group, GLint numCounters, GLuint *counterList); +GL_APICALL void GL_APIENTRY glBeginPerfMonitorAMD (GLuint monitor); +GL_APICALL void GL_APIENTRY glEndPerfMonitorAMD (GLuint monitor); +GL_APICALL void GL_APIENTRY glGetPerfMonitorCounterDataAMD (GLuint monitor, GLenum pname, GLsizei dataSize, GLuint *data, GLint *bytesWritten); +#endif +#endif /* GL_AMD_performance_monitor */ + +#ifndef GL_AMD_program_binary_Z400 +#define GL_AMD_program_binary_Z400 1 +#define GL_Z400_BINARY_AMD 0x8740 +#endif /* GL_AMD_program_binary_Z400 */ + +#ifndef GL_ANDROID_extension_pack_es31a +#define GL_ANDROID_extension_pack_es31a 1 +#endif /* GL_ANDROID_extension_pack_es31a */ + +#ifndef GL_ANGLE_depth_texture +#define GL_ANGLE_depth_texture 1 +#endif /* GL_ANGLE_depth_texture */ + +#ifndef GL_ANGLE_framebuffer_blit +#define GL_ANGLE_framebuffer_blit 1 +#define GL_READ_FRAMEBUFFER_ANGLE 0x8CA8 +#define GL_DRAW_FRAMEBUFFER_ANGLE 0x8CA9 +#define GL_DRAW_FRAMEBUFFER_BINDING_ANGLE 0x8CA6 +#define GL_READ_FRAMEBUFFER_BINDING_ANGLE 0x8CAA +typedef void (GL_APIENTRYP PFNGLBLITFRAMEBUFFERANGLEPROC) (GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glBlitFramebufferANGLE (GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +#endif +#endif /* GL_ANGLE_framebuffer_blit */ + +#ifndef GL_ANGLE_framebuffer_multisample +#define GL_ANGLE_framebuffer_multisample 1 +#define GL_RENDERBUFFER_SAMPLES_ANGLE 0x8CAB +#define GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE_ANGLE 0x8D56 +#define GL_MAX_SAMPLES_ANGLE 0x8D57 +typedef void (GL_APIENTRYP PFNGLRENDERBUFFERSTORAGEMULTISAMPLEANGLEPROC) (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glRenderbufferStorageMultisampleANGLE (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +#endif +#endif /* GL_ANGLE_framebuffer_multisample */ + +#ifndef GL_ANGLE_instanced_arrays +#define GL_ANGLE_instanced_arrays 1 +#define GL_VERTEX_ATTRIB_ARRAY_DIVISOR_ANGLE 0x88FE +typedef void (GL_APIENTRYP PFNGLDRAWARRAYSINSTANCEDANGLEPROC) (GLenum mode, GLint first, GLsizei count, GLsizei primcount); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSINSTANCEDANGLEPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei primcount); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBDIVISORANGLEPROC) (GLuint index, GLuint divisor); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glDrawArraysInstancedANGLE (GLenum mode, GLint first, GLsizei count, GLsizei primcount); +GL_APICALL void GL_APIENTRY glDrawElementsInstancedANGLE (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei primcount); +GL_APICALL void GL_APIENTRY glVertexAttribDivisorANGLE (GLuint index, GLuint divisor); +#endif +#endif /* GL_ANGLE_instanced_arrays */ + +#ifndef GL_ANGLE_pack_reverse_row_order +#define GL_ANGLE_pack_reverse_row_order 1 +#define GL_PACK_REVERSE_ROW_ORDER_ANGLE 0x93A4 +#endif /* GL_ANGLE_pack_reverse_row_order */ + +#ifndef GL_ANGLE_program_binary +#define GL_ANGLE_program_binary 1 +#define GL_PROGRAM_BINARY_ANGLE 0x93A6 +#endif /* GL_ANGLE_program_binary */ + +#ifndef GL_ANGLE_texture_compression_dxt3 +#define GL_ANGLE_texture_compression_dxt3 1 +#define GL_COMPRESSED_RGBA_S3TC_DXT3_ANGLE 0x83F2 +#endif /* GL_ANGLE_texture_compression_dxt3 */ + +#ifndef GL_ANGLE_texture_compression_dxt5 +#define GL_ANGLE_texture_compression_dxt5 1 +#define GL_COMPRESSED_RGBA_S3TC_DXT5_ANGLE 0x83F3 +#endif /* GL_ANGLE_texture_compression_dxt5 */ + +#ifndef GL_ANGLE_texture_usage +#define GL_ANGLE_texture_usage 1 +#define GL_TEXTURE_USAGE_ANGLE 0x93A2 +#define GL_FRAMEBUFFER_ATTACHMENT_ANGLE 0x93A3 +#endif /* GL_ANGLE_texture_usage */ + +#ifndef GL_ANGLE_translated_shader_source +#define GL_ANGLE_translated_shader_source 1 +#define GL_TRANSLATED_SHADER_SOURCE_LENGTH_ANGLE 0x93A0 +typedef void (GL_APIENTRYP PFNGLGETTRANSLATEDSHADERSOURCEANGLEPROC) (GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *source); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glGetTranslatedShaderSourceANGLE (GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *source); +#endif +#endif /* GL_ANGLE_translated_shader_source */ + +#ifndef GL_APPLE_clip_distance +#define GL_APPLE_clip_distance 1 +#define GL_MAX_CLIP_DISTANCES_APPLE 0x0D32 +#define GL_CLIP_DISTANCE0_APPLE 0x3000 +#define GL_CLIP_DISTANCE1_APPLE 0x3001 +#define GL_CLIP_DISTANCE2_APPLE 0x3002 +#define GL_CLIP_DISTANCE3_APPLE 0x3003 +#define GL_CLIP_DISTANCE4_APPLE 0x3004 +#define GL_CLIP_DISTANCE5_APPLE 0x3005 +#define GL_CLIP_DISTANCE6_APPLE 0x3006 +#define GL_CLIP_DISTANCE7_APPLE 0x3007 +#endif /* GL_APPLE_clip_distance */ + +#ifndef GL_APPLE_color_buffer_packed_float +#define GL_APPLE_color_buffer_packed_float 1 +#endif /* GL_APPLE_color_buffer_packed_float */ + +#ifndef GL_APPLE_copy_texture_levels +#define GL_APPLE_copy_texture_levels 1 +typedef void (GL_APIENTRYP PFNGLCOPYTEXTURELEVELSAPPLEPROC) (GLuint destinationTexture, GLuint sourceTexture, GLint sourceBaseLevel, GLsizei sourceLevelCount); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glCopyTextureLevelsAPPLE (GLuint destinationTexture, GLuint sourceTexture, GLint sourceBaseLevel, GLsizei sourceLevelCount); +#endif +#endif /* GL_APPLE_copy_texture_levels */ + +#ifndef GL_APPLE_framebuffer_multisample +#define GL_APPLE_framebuffer_multisample 1 +#define GL_RENDERBUFFER_SAMPLES_APPLE 0x8CAB +#define GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE_APPLE 0x8D56 +#define GL_MAX_SAMPLES_APPLE 0x8D57 +#define GL_READ_FRAMEBUFFER_APPLE 0x8CA8 +#define GL_DRAW_FRAMEBUFFER_APPLE 0x8CA9 +#define GL_DRAW_FRAMEBUFFER_BINDING_APPLE 0x8CA6 +#define GL_READ_FRAMEBUFFER_BINDING_APPLE 0x8CAA +typedef void (GL_APIENTRYP PFNGLRENDERBUFFERSTORAGEMULTISAMPLEAPPLEPROC) (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLRESOLVEMULTISAMPLEFRAMEBUFFERAPPLEPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glRenderbufferStorageMultisampleAPPLE (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glResolveMultisampleFramebufferAPPLE (void); +#endif +#endif /* GL_APPLE_framebuffer_multisample */ + +#ifndef GL_APPLE_rgb_422 +#define GL_APPLE_rgb_422 1 +#define GL_RGB_422_APPLE 0x8A1F +#define GL_UNSIGNED_SHORT_8_8_APPLE 0x85BA +#define GL_UNSIGNED_SHORT_8_8_REV_APPLE 0x85BB +#define GL_RGB_RAW_422_APPLE 0x8A51 +#endif /* GL_APPLE_rgb_422 */ + +#ifndef GL_APPLE_sync +#define GL_APPLE_sync 1 +#define GL_SYNC_OBJECT_APPLE 0x8A53 +#define GL_MAX_SERVER_WAIT_TIMEOUT_APPLE 0x9111 +#define GL_OBJECT_TYPE_APPLE 0x9112 +#define GL_SYNC_CONDITION_APPLE 0x9113 +#define GL_SYNC_STATUS_APPLE 0x9114 +#define GL_SYNC_FLAGS_APPLE 0x9115 +#define GL_SYNC_FENCE_APPLE 0x9116 +#define GL_SYNC_GPU_COMMANDS_COMPLETE_APPLE 0x9117 +#define GL_UNSIGNALED_APPLE 0x9118 +#define GL_SIGNALED_APPLE 0x9119 +#define GL_ALREADY_SIGNALED_APPLE 0x911A +#define GL_TIMEOUT_EXPIRED_APPLE 0x911B +#define GL_CONDITION_SATISFIED_APPLE 0x911C +#define GL_WAIT_FAILED_APPLE 0x911D +#define GL_SYNC_FLUSH_COMMANDS_BIT_APPLE 0x00000001 +#define GL_TIMEOUT_IGNORED_APPLE 0xFFFFFFFFFFFFFFFFull +typedef GLsync (GL_APIENTRYP PFNGLFENCESYNCAPPLEPROC) (GLenum condition, GLbitfield flags); +typedef GLboolean (GL_APIENTRYP PFNGLISSYNCAPPLEPROC) (GLsync sync); +typedef void (GL_APIENTRYP PFNGLDELETESYNCAPPLEPROC) (GLsync sync); +typedef GLenum (GL_APIENTRYP PFNGLCLIENTWAITSYNCAPPLEPROC) (GLsync sync, GLbitfield flags, GLuint64 timeout); +typedef void (GL_APIENTRYP PFNGLWAITSYNCAPPLEPROC) (GLsync sync, GLbitfield flags, GLuint64 timeout); +typedef void (GL_APIENTRYP PFNGLGETINTEGER64VAPPLEPROC) (GLenum pname, GLint64 *params); +typedef void (GL_APIENTRYP PFNGLGETSYNCIVAPPLEPROC) (GLsync sync, GLenum pname, GLsizei count, GLsizei *length, GLint *values); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL GLsync GL_APIENTRY glFenceSyncAPPLE (GLenum condition, GLbitfield flags); +GL_APICALL GLboolean GL_APIENTRY glIsSyncAPPLE (GLsync sync); +GL_APICALL void GL_APIENTRY glDeleteSyncAPPLE (GLsync sync); +GL_APICALL GLenum GL_APIENTRY glClientWaitSyncAPPLE (GLsync sync, GLbitfield flags, GLuint64 timeout); +GL_APICALL void GL_APIENTRY glWaitSyncAPPLE (GLsync sync, GLbitfield flags, GLuint64 timeout); +GL_APICALL void GL_APIENTRY glGetInteger64vAPPLE (GLenum pname, GLint64 *params); +GL_APICALL void GL_APIENTRY glGetSyncivAPPLE (GLsync sync, GLenum pname, GLsizei count, GLsizei *length, GLint *values); +#endif +#endif /* GL_APPLE_sync */ + +#ifndef GL_APPLE_texture_format_BGRA8888 +#define GL_APPLE_texture_format_BGRA8888 1 +#define GL_BGRA_EXT 0x80E1 +#define GL_BGRA8_EXT 0x93A1 +#endif /* GL_APPLE_texture_format_BGRA8888 */ + +#ifndef GL_APPLE_texture_max_level +#define GL_APPLE_texture_max_level 1 +#define GL_TEXTURE_MAX_LEVEL_APPLE 0x813D +#endif /* GL_APPLE_texture_max_level */ + +#ifndef GL_APPLE_texture_packed_float +#define GL_APPLE_texture_packed_float 1 +#define GL_UNSIGNED_INT_10F_11F_11F_REV_APPLE 0x8C3B +#define GL_UNSIGNED_INT_5_9_9_9_REV_APPLE 0x8C3E +#define GL_R11F_G11F_B10F_APPLE 0x8C3A +#define GL_RGB9_E5_APPLE 0x8C3D +#endif /* GL_APPLE_texture_packed_float */ + +#ifndef GL_ARM_mali_program_binary +#define GL_ARM_mali_program_binary 1 +#define GL_MALI_PROGRAM_BINARY_ARM 0x8F61 +#endif /* GL_ARM_mali_program_binary */ + +#ifndef GL_ARM_mali_shader_binary +#define GL_ARM_mali_shader_binary 1 +#define GL_MALI_SHADER_BINARY_ARM 0x8F60 +#endif /* GL_ARM_mali_shader_binary */ + +#ifndef GL_ARM_rgba8 +#define GL_ARM_rgba8 1 +#endif /* GL_ARM_rgba8 */ + +#ifndef GL_ARM_shader_framebuffer_fetch +#define GL_ARM_shader_framebuffer_fetch 1 +#define GL_FETCH_PER_SAMPLE_ARM 0x8F65 +#define GL_FRAGMENT_SHADER_FRAMEBUFFER_FETCH_MRT_ARM 0x8F66 +#endif /* GL_ARM_shader_framebuffer_fetch */ + +#ifndef GL_ARM_shader_framebuffer_fetch_depth_stencil +#define GL_ARM_shader_framebuffer_fetch_depth_stencil 1 +#endif /* GL_ARM_shader_framebuffer_fetch_depth_stencil */ + +#ifndef GL_ARM_texture_unnormalized_coordinates +#define GL_ARM_texture_unnormalized_coordinates 1 +#define GL_TEXTURE_UNNORMALIZED_COORDINATES_ARM 0x8F6A +#endif /* GL_ARM_texture_unnormalized_coordinates */ + +#ifndef GL_DMP_program_binary +#define GL_DMP_program_binary 1 +#define GL_SMAPHS30_PROGRAM_BINARY_DMP 0x9251 +#define GL_SMAPHS_PROGRAM_BINARY_DMP 0x9252 +#define GL_DMP_PROGRAM_BINARY_DMP 0x9253 +#endif /* GL_DMP_program_binary */ + +#ifndef GL_DMP_shader_binary +#define GL_DMP_shader_binary 1 +#define GL_SHADER_BINARY_DMP 0x9250 +#endif /* GL_DMP_shader_binary */ + +#ifndef GL_EXT_EGL_image_array +#define GL_EXT_EGL_image_array 1 +#endif /* GL_EXT_EGL_image_array */ + +#ifndef GL_EXT_EGL_image_storage +#define GL_EXT_EGL_image_storage 1 +typedef void (GL_APIENTRYP PFNGLEGLIMAGETARGETTEXSTORAGEEXTPROC) (GLenum target, GLeglImageOES image, const GLint* attrib_list); +typedef void (GL_APIENTRYP PFNGLEGLIMAGETARGETTEXTURESTORAGEEXTPROC) (GLuint texture, GLeglImageOES image, const GLint* attrib_list); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glEGLImageTargetTexStorageEXT (GLenum target, GLeglImageOES image, const GLint* attrib_list); +GL_APICALL void GL_APIENTRY glEGLImageTargetTextureStorageEXT (GLuint texture, GLeglImageOES image, const GLint* attrib_list); +#endif +#endif /* GL_EXT_EGL_image_storage */ + +#ifndef GL_EXT_YUV_target +#define GL_EXT_YUV_target 1 +#define GL_SAMPLER_EXTERNAL_2D_Y2Y_EXT 0x8BE7 +#endif /* GL_EXT_YUV_target */ + +#ifndef GL_EXT_base_instance +#define GL_EXT_base_instance 1 +typedef void (GL_APIENTRYP PFNGLDRAWARRAYSINSTANCEDBASEINSTANCEEXTPROC) (GLenum mode, GLint first, GLsizei count, GLsizei instancecount, GLuint baseinstance); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSINSTANCEDBASEINSTANCEEXTPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLuint baseinstance); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSINSTANCEDBASEVERTEXBASEINSTANCEEXTPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLint basevertex, GLuint baseinstance); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glDrawArraysInstancedBaseInstanceEXT (GLenum mode, GLint first, GLsizei count, GLsizei instancecount, GLuint baseinstance); +GL_APICALL void GL_APIENTRY glDrawElementsInstancedBaseInstanceEXT (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLuint baseinstance); +GL_APICALL void GL_APIENTRY glDrawElementsInstancedBaseVertexBaseInstanceEXT (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLint basevertex, GLuint baseinstance); +#endif +#endif /* GL_EXT_base_instance */ + +#ifndef GL_EXT_blend_func_extended +#define GL_EXT_blend_func_extended 1 +#define GL_SRC1_COLOR_EXT 0x88F9 +#define GL_SRC1_ALPHA_EXT 0x8589 +#define GL_ONE_MINUS_SRC1_COLOR_EXT 0x88FA +#define GL_ONE_MINUS_SRC1_ALPHA_EXT 0x88FB +#define GL_SRC_ALPHA_SATURATE_EXT 0x0308 +#define GL_LOCATION_INDEX_EXT 0x930F +#define GL_MAX_DUAL_SOURCE_DRAW_BUFFERS_EXT 0x88FC +typedef void (GL_APIENTRYP PFNGLBINDFRAGDATALOCATIONINDEXEDEXTPROC) (GLuint program, GLuint colorNumber, GLuint index, const GLchar *name); +typedef void (GL_APIENTRYP PFNGLBINDFRAGDATALOCATIONEXTPROC) (GLuint program, GLuint color, const GLchar *name); +typedef GLint (GL_APIENTRYP PFNGLGETPROGRAMRESOURCELOCATIONINDEXEXTPROC) (GLuint program, GLenum programInterface, const GLchar *name); +typedef GLint (GL_APIENTRYP PFNGLGETFRAGDATAINDEXEXTPROC) (GLuint program, const GLchar *name); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glBindFragDataLocationIndexedEXT (GLuint program, GLuint colorNumber, GLuint index, const GLchar *name); +GL_APICALL void GL_APIENTRY glBindFragDataLocationEXT (GLuint program, GLuint color, const GLchar *name); +GL_APICALL GLint GL_APIENTRY glGetProgramResourceLocationIndexEXT (GLuint program, GLenum programInterface, const GLchar *name); +GL_APICALL GLint GL_APIENTRY glGetFragDataIndexEXT (GLuint program, const GLchar *name); +#endif +#endif /* GL_EXT_blend_func_extended */ + +#ifndef GL_EXT_blend_minmax +#define GL_EXT_blend_minmax 1 +#define GL_MIN_EXT 0x8007 +#define GL_MAX_EXT 0x8008 +#endif /* GL_EXT_blend_minmax */ + +#ifndef GL_EXT_buffer_storage +#define GL_EXT_buffer_storage 1 +#define GL_MAP_READ_BIT 0x0001 +#define GL_MAP_WRITE_BIT 0x0002 +#define GL_MAP_PERSISTENT_BIT_EXT 0x0040 +#define GL_MAP_COHERENT_BIT_EXT 0x0080 +#define GL_DYNAMIC_STORAGE_BIT_EXT 0x0100 +#define GL_CLIENT_STORAGE_BIT_EXT 0x0200 +#define GL_CLIENT_MAPPED_BUFFER_BARRIER_BIT_EXT 0x00004000 +#define GL_BUFFER_IMMUTABLE_STORAGE_EXT 0x821F +#define GL_BUFFER_STORAGE_FLAGS_EXT 0x8220 +typedef void (GL_APIENTRYP PFNGLBUFFERSTORAGEEXTPROC) (GLenum target, GLsizeiptr size, const void *data, GLbitfield flags); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glBufferStorageEXT (GLenum target, GLsizeiptr size, const void *data, GLbitfield flags); +#endif +#endif /* GL_EXT_buffer_storage */ + +#ifndef GL_EXT_clear_texture +#define GL_EXT_clear_texture 1 +typedef void (GL_APIENTRYP PFNGLCLEARTEXIMAGEEXTPROC) (GLuint texture, GLint level, GLenum format, GLenum type, const void *data); +typedef void (GL_APIENTRYP PFNGLCLEARTEXSUBIMAGEEXTPROC) (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *data); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glClearTexImageEXT (GLuint texture, GLint level, GLenum format, GLenum type, const void *data); +GL_APICALL void GL_APIENTRY glClearTexSubImageEXT (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *data); +#endif +#endif /* GL_EXT_clear_texture */ + +#ifndef GL_EXT_clip_control +#define GL_EXT_clip_control 1 +#define GL_LOWER_LEFT_EXT 0x8CA1 +#define GL_UPPER_LEFT_EXT 0x8CA2 +#define GL_NEGATIVE_ONE_TO_ONE_EXT 0x935E +#define GL_ZERO_TO_ONE_EXT 0x935F +#define GL_CLIP_ORIGIN_EXT 0x935C +#define GL_CLIP_DEPTH_MODE_EXT 0x935D +typedef void (GL_APIENTRYP PFNGLCLIPCONTROLEXTPROC) (GLenum origin, GLenum depth); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glClipControlEXT (GLenum origin, GLenum depth); +#endif +#endif /* GL_EXT_clip_control */ + +#ifndef GL_EXT_clip_cull_distance +#define GL_EXT_clip_cull_distance 1 +#define GL_MAX_CLIP_DISTANCES_EXT 0x0D32 +#define GL_MAX_CULL_DISTANCES_EXT 0x82F9 +#define GL_MAX_COMBINED_CLIP_AND_CULL_DISTANCES_EXT 0x82FA +#define GL_CLIP_DISTANCE0_EXT 0x3000 +#define GL_CLIP_DISTANCE1_EXT 0x3001 +#define GL_CLIP_DISTANCE2_EXT 0x3002 +#define GL_CLIP_DISTANCE3_EXT 0x3003 +#define GL_CLIP_DISTANCE4_EXT 0x3004 +#define GL_CLIP_DISTANCE5_EXT 0x3005 +#define GL_CLIP_DISTANCE6_EXT 0x3006 +#define GL_CLIP_DISTANCE7_EXT 0x3007 +#endif /* GL_EXT_clip_cull_distance */ + +#ifndef GL_EXT_color_buffer_float +#define GL_EXT_color_buffer_float 1 +#endif /* GL_EXT_color_buffer_float */ + +#ifndef GL_EXT_color_buffer_half_float +#define GL_EXT_color_buffer_half_float 1 +#define GL_RGBA16F_EXT 0x881A +#define GL_RGB16F_EXT 0x881B +#define GL_RG16F_EXT 0x822F +#define GL_R16F_EXT 0x822D +#define GL_FRAMEBUFFER_ATTACHMENT_COMPONENT_TYPE_EXT 0x8211 +#define GL_UNSIGNED_NORMALIZED_EXT 0x8C17 +#endif /* GL_EXT_color_buffer_half_float */ + +#ifndef GL_EXT_conservative_depth +#define GL_EXT_conservative_depth 1 +#endif /* GL_EXT_conservative_depth */ + +#ifndef GL_EXT_copy_image +#define GL_EXT_copy_image 1 +typedef void (GL_APIENTRYP PFNGLCOPYIMAGESUBDATAEXTPROC) (GLuint srcName, GLenum srcTarget, GLint srcLevel, GLint srcX, GLint srcY, GLint srcZ, GLuint dstName, GLenum dstTarget, GLint dstLevel, GLint dstX, GLint dstY, GLint dstZ, GLsizei srcWidth, GLsizei srcHeight, GLsizei srcDepth); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glCopyImageSubDataEXT (GLuint srcName, GLenum srcTarget, GLint srcLevel, GLint srcX, GLint srcY, GLint srcZ, GLuint dstName, GLenum dstTarget, GLint dstLevel, GLint dstX, GLint dstY, GLint dstZ, GLsizei srcWidth, GLsizei srcHeight, GLsizei srcDepth); +#endif +#endif /* GL_EXT_copy_image */ + +#ifndef GL_EXT_debug_label +#define GL_EXT_debug_label 1 +#define GL_PROGRAM_PIPELINE_OBJECT_EXT 0x8A4F +#define GL_PROGRAM_OBJECT_EXT 0x8B40 +#define GL_SHADER_OBJECT_EXT 0x8B48 +#define GL_BUFFER_OBJECT_EXT 0x9151 +#define GL_QUERY_OBJECT_EXT 0x9153 +#define GL_VERTEX_ARRAY_OBJECT_EXT 0x9154 +#define GL_TRANSFORM_FEEDBACK 0x8E22 +typedef void (GL_APIENTRYP PFNGLLABELOBJECTEXTPROC) (GLenum type, GLuint object, GLsizei length, const GLchar *label); +typedef void (GL_APIENTRYP PFNGLGETOBJECTLABELEXTPROC) (GLenum type, GLuint object, GLsizei bufSize, GLsizei *length, GLchar *label); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glLabelObjectEXT (GLenum type, GLuint object, GLsizei length, const GLchar *label); +GL_APICALL void GL_APIENTRY glGetObjectLabelEXT (GLenum type, GLuint object, GLsizei bufSize, GLsizei *length, GLchar *label); +#endif +#endif /* GL_EXT_debug_label */ + +#ifndef GL_EXT_debug_marker +#define GL_EXT_debug_marker 1 +typedef void (GL_APIENTRYP PFNGLINSERTEVENTMARKEREXTPROC) (GLsizei length, const GLchar *marker); +typedef void (GL_APIENTRYP PFNGLPUSHGROUPMARKEREXTPROC) (GLsizei length, const GLchar *marker); +typedef void (GL_APIENTRYP PFNGLPOPGROUPMARKEREXTPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glInsertEventMarkerEXT (GLsizei length, const GLchar *marker); +GL_APICALL void GL_APIENTRY glPushGroupMarkerEXT (GLsizei length, const GLchar *marker); +GL_APICALL void GL_APIENTRY glPopGroupMarkerEXT (void); +#endif +#endif /* GL_EXT_debug_marker */ + +#ifndef GL_EXT_depth_clamp +#define GL_EXT_depth_clamp 1 +#define GL_DEPTH_CLAMP_EXT 0x864F +#endif /* GL_EXT_depth_clamp */ + +#ifndef GL_EXT_discard_framebuffer +#define GL_EXT_discard_framebuffer 1 +#define GL_COLOR_EXT 0x1800 +#define GL_DEPTH_EXT 0x1801 +#define GL_STENCIL_EXT 0x1802 +typedef void (GL_APIENTRYP PFNGLDISCARDFRAMEBUFFEREXTPROC) (GLenum target, GLsizei numAttachments, const GLenum *attachments); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glDiscardFramebufferEXT (GLenum target, GLsizei numAttachments, const GLenum *attachments); +#endif +#endif /* GL_EXT_discard_framebuffer */ + +#ifndef GL_EXT_disjoint_timer_query +#define GL_EXT_disjoint_timer_query 1 +#define GL_QUERY_COUNTER_BITS_EXT 0x8864 +#define GL_CURRENT_QUERY_EXT 0x8865 +#define GL_QUERY_RESULT_EXT 0x8866 +#define GL_QUERY_RESULT_AVAILABLE_EXT 0x8867 +#define GL_TIME_ELAPSED_EXT 0x88BF +#define GL_TIMESTAMP_EXT 0x8E28 +#define GL_GPU_DISJOINT_EXT 0x8FBB +typedef void (GL_APIENTRYP PFNGLGENQUERIESEXTPROC) (GLsizei n, GLuint *ids); +typedef void (GL_APIENTRYP PFNGLDELETEQUERIESEXTPROC) (GLsizei n, const GLuint *ids); +typedef GLboolean (GL_APIENTRYP PFNGLISQUERYEXTPROC) (GLuint id); +typedef void (GL_APIENTRYP PFNGLBEGINQUERYEXTPROC) (GLenum target, GLuint id); +typedef void (GL_APIENTRYP PFNGLENDQUERYEXTPROC) (GLenum target); +typedef void (GL_APIENTRYP PFNGLQUERYCOUNTEREXTPROC) (GLuint id, GLenum target); +typedef void (GL_APIENTRYP PFNGLGETQUERYIVEXTPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETQUERYOBJECTIVEXTPROC) (GLuint id, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETQUERYOBJECTUIVEXTPROC) (GLuint id, GLenum pname, GLuint *params); +typedef void (GL_APIENTRYP PFNGLGETQUERYOBJECTI64VEXTPROC) (GLuint id, GLenum pname, GLint64 *params); +typedef void (GL_APIENTRYP PFNGLGETQUERYOBJECTUI64VEXTPROC) (GLuint id, GLenum pname, GLuint64 *params); +typedef void (GL_APIENTRYP PFNGLGETINTEGER64VEXTPROC) (GLenum pname, GLint64 *data); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glGenQueriesEXT (GLsizei n, GLuint *ids); +GL_APICALL void GL_APIENTRY glDeleteQueriesEXT (GLsizei n, const GLuint *ids); +GL_APICALL GLboolean GL_APIENTRY glIsQueryEXT (GLuint id); +GL_APICALL void GL_APIENTRY glBeginQueryEXT (GLenum target, GLuint id); +GL_APICALL void GL_APIENTRY glEndQueryEXT (GLenum target); +GL_APICALL void GL_APIENTRY glQueryCounterEXT (GLuint id, GLenum target); +GL_APICALL void GL_APIENTRY glGetQueryivEXT (GLenum target, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetQueryObjectivEXT (GLuint id, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetQueryObjectuivEXT (GLuint id, GLenum pname, GLuint *params); +GL_APICALL void GL_APIENTRY glGetQueryObjecti64vEXT (GLuint id, GLenum pname, GLint64 *params); +GL_APICALL void GL_APIENTRY glGetQueryObjectui64vEXT (GLuint id, GLenum pname, GLuint64 *params); +GL_APICALL void GL_APIENTRY glGetInteger64vEXT (GLenum pname, GLint64 *data); +#endif +#endif /* GL_EXT_disjoint_timer_query */ + +#ifndef GL_EXT_draw_buffers +#define GL_EXT_draw_buffers 1 +#define GL_MAX_COLOR_ATTACHMENTS_EXT 0x8CDF +#define GL_MAX_DRAW_BUFFERS_EXT 0x8824 +#define GL_DRAW_BUFFER0_EXT 0x8825 +#define GL_DRAW_BUFFER1_EXT 0x8826 +#define GL_DRAW_BUFFER2_EXT 0x8827 +#define GL_DRAW_BUFFER3_EXT 0x8828 +#define GL_DRAW_BUFFER4_EXT 0x8829 +#define GL_DRAW_BUFFER5_EXT 0x882A +#define GL_DRAW_BUFFER6_EXT 0x882B +#define GL_DRAW_BUFFER7_EXT 0x882C +#define GL_DRAW_BUFFER8_EXT 0x882D +#define GL_DRAW_BUFFER9_EXT 0x882E +#define GL_DRAW_BUFFER10_EXT 0x882F +#define GL_DRAW_BUFFER11_EXT 0x8830 +#define GL_DRAW_BUFFER12_EXT 0x8831 +#define GL_DRAW_BUFFER13_EXT 0x8832 +#define GL_DRAW_BUFFER14_EXT 0x8833 +#define GL_DRAW_BUFFER15_EXT 0x8834 +#define GL_COLOR_ATTACHMENT0_EXT 0x8CE0 +#define GL_COLOR_ATTACHMENT1_EXT 0x8CE1 +#define GL_COLOR_ATTACHMENT2_EXT 0x8CE2 +#define GL_COLOR_ATTACHMENT3_EXT 0x8CE3 +#define GL_COLOR_ATTACHMENT4_EXT 0x8CE4 +#define GL_COLOR_ATTACHMENT5_EXT 0x8CE5 +#define GL_COLOR_ATTACHMENT6_EXT 0x8CE6 +#define GL_COLOR_ATTACHMENT7_EXT 0x8CE7 +#define GL_COLOR_ATTACHMENT8_EXT 0x8CE8 +#define GL_COLOR_ATTACHMENT9_EXT 0x8CE9 +#define GL_COLOR_ATTACHMENT10_EXT 0x8CEA +#define GL_COLOR_ATTACHMENT11_EXT 0x8CEB +#define GL_COLOR_ATTACHMENT12_EXT 0x8CEC +#define GL_COLOR_ATTACHMENT13_EXT 0x8CED +#define GL_COLOR_ATTACHMENT14_EXT 0x8CEE +#define GL_COLOR_ATTACHMENT15_EXT 0x8CEF +typedef void (GL_APIENTRYP PFNGLDRAWBUFFERSEXTPROC) (GLsizei n, const GLenum *bufs); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glDrawBuffersEXT (GLsizei n, const GLenum *bufs); +#endif +#endif /* GL_EXT_draw_buffers */ + +#ifndef GL_EXT_draw_buffers_indexed +#define GL_EXT_draw_buffers_indexed 1 +typedef void (GL_APIENTRYP PFNGLENABLEIEXTPROC) (GLenum target, GLuint index); +typedef void (GL_APIENTRYP PFNGLDISABLEIEXTPROC) (GLenum target, GLuint index); +typedef void (GL_APIENTRYP PFNGLBLENDEQUATIONIEXTPROC) (GLuint buf, GLenum mode); +typedef void (GL_APIENTRYP PFNGLBLENDEQUATIONSEPARATEIEXTPROC) (GLuint buf, GLenum modeRGB, GLenum modeAlpha); +typedef void (GL_APIENTRYP PFNGLBLENDFUNCIEXTPROC) (GLuint buf, GLenum src, GLenum dst); +typedef void (GL_APIENTRYP PFNGLBLENDFUNCSEPARATEIEXTPROC) (GLuint buf, GLenum srcRGB, GLenum dstRGB, GLenum srcAlpha, GLenum dstAlpha); +typedef void (GL_APIENTRYP PFNGLCOLORMASKIEXTPROC) (GLuint index, GLboolean r, GLboolean g, GLboolean b, GLboolean a); +typedef GLboolean (GL_APIENTRYP PFNGLISENABLEDIEXTPROC) (GLenum target, GLuint index); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glEnableiEXT (GLenum target, GLuint index); +GL_APICALL void GL_APIENTRY glDisableiEXT (GLenum target, GLuint index); +GL_APICALL void GL_APIENTRY glBlendEquationiEXT (GLuint buf, GLenum mode); +GL_APICALL void GL_APIENTRY glBlendEquationSeparateiEXT (GLuint buf, GLenum modeRGB, GLenum modeAlpha); +GL_APICALL void GL_APIENTRY glBlendFunciEXT (GLuint buf, GLenum src, GLenum dst); +GL_APICALL void GL_APIENTRY glBlendFuncSeparateiEXT (GLuint buf, GLenum srcRGB, GLenum dstRGB, GLenum srcAlpha, GLenum dstAlpha); +GL_APICALL void GL_APIENTRY glColorMaskiEXT (GLuint index, GLboolean r, GLboolean g, GLboolean b, GLboolean a); +GL_APICALL GLboolean GL_APIENTRY glIsEnablediEXT (GLenum target, GLuint index); +#endif +#endif /* GL_EXT_draw_buffers_indexed */ + +#ifndef GL_EXT_draw_elements_base_vertex +#define GL_EXT_draw_elements_base_vertex 1 +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSBASEVERTEXEXTPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLint basevertex); +typedef void (GL_APIENTRYP PFNGLDRAWRANGEELEMENTSBASEVERTEXEXTPROC) (GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices, GLint basevertex); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSINSTANCEDBASEVERTEXEXTPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLint basevertex); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glDrawElementsBaseVertexEXT (GLenum mode, GLsizei count, GLenum type, const void *indices, GLint basevertex); +GL_APICALL void GL_APIENTRY glDrawRangeElementsBaseVertexEXT (GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices, GLint basevertex); +GL_APICALL void GL_APIENTRY glDrawElementsInstancedBaseVertexEXT (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLint basevertex); +#endif +#endif /* GL_EXT_draw_elements_base_vertex */ + +#ifndef GL_EXT_draw_instanced +#define GL_EXT_draw_instanced 1 +typedef void (GL_APIENTRYP PFNGLDRAWARRAYSINSTANCEDEXTPROC) (GLenum mode, GLint start, GLsizei count, GLsizei primcount); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSINSTANCEDEXTPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei primcount); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glDrawArraysInstancedEXT (GLenum mode, GLint start, GLsizei count, GLsizei primcount); +GL_APICALL void GL_APIENTRY glDrawElementsInstancedEXT (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei primcount); +#endif +#endif /* GL_EXT_draw_instanced */ + +#ifndef GL_EXT_draw_transform_feedback +#define GL_EXT_draw_transform_feedback 1 +typedef void (GL_APIENTRYP PFNGLDRAWTRANSFORMFEEDBACKEXTPROC) (GLenum mode, GLuint id); +typedef void (GL_APIENTRYP PFNGLDRAWTRANSFORMFEEDBACKINSTANCEDEXTPROC) (GLenum mode, GLuint id, GLsizei instancecount); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glDrawTransformFeedbackEXT (GLenum mode, GLuint id); +GL_APICALL void GL_APIENTRY glDrawTransformFeedbackInstancedEXT (GLenum mode, GLuint id, GLsizei instancecount); +#endif +#endif /* GL_EXT_draw_transform_feedback */ + +#ifndef GL_EXT_external_buffer +#define GL_EXT_external_buffer 1 +typedef void *GLeglClientBufferEXT; +typedef void (GL_APIENTRYP PFNGLBUFFERSTORAGEEXTERNALEXTPROC) (GLenum target, GLintptr offset, GLsizeiptr size, GLeglClientBufferEXT clientBuffer, GLbitfield flags); +typedef void (GL_APIENTRYP PFNGLNAMEDBUFFERSTORAGEEXTERNALEXTPROC) (GLuint buffer, GLintptr offset, GLsizeiptr size, GLeglClientBufferEXT clientBuffer, GLbitfield flags); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glBufferStorageExternalEXT (GLenum target, GLintptr offset, GLsizeiptr size, GLeglClientBufferEXT clientBuffer, GLbitfield flags); +GL_APICALL void GL_APIENTRY glNamedBufferStorageExternalEXT (GLuint buffer, GLintptr offset, GLsizeiptr size, GLeglClientBufferEXT clientBuffer, GLbitfield flags); +#endif +#endif /* GL_EXT_external_buffer */ + +#ifndef GL_EXT_float_blend +#define GL_EXT_float_blend 1 +#endif /* GL_EXT_float_blend */ + +#ifndef GL_EXT_geometry_point_size +#define GL_EXT_geometry_point_size 1 +#endif /* GL_EXT_geometry_point_size */ + +#ifndef GL_EXT_geometry_shader +#define GL_EXT_geometry_shader 1 +#define GL_GEOMETRY_SHADER_EXT 0x8DD9 +#define GL_GEOMETRY_SHADER_BIT_EXT 0x00000004 +#define GL_GEOMETRY_LINKED_VERTICES_OUT_EXT 0x8916 +#define GL_GEOMETRY_LINKED_INPUT_TYPE_EXT 0x8917 +#define GL_GEOMETRY_LINKED_OUTPUT_TYPE_EXT 0x8918 +#define GL_GEOMETRY_SHADER_INVOCATIONS_EXT 0x887F +#define GL_LAYER_PROVOKING_VERTEX_EXT 0x825E +#define GL_LINES_ADJACENCY_EXT 0x000A +#define GL_LINE_STRIP_ADJACENCY_EXT 0x000B +#define GL_TRIANGLES_ADJACENCY_EXT 0x000C +#define GL_TRIANGLE_STRIP_ADJACENCY_EXT 0x000D +#define GL_MAX_GEOMETRY_UNIFORM_COMPONENTS_EXT 0x8DDF +#define GL_MAX_GEOMETRY_UNIFORM_BLOCKS_EXT 0x8A2C +#define GL_MAX_COMBINED_GEOMETRY_UNIFORM_COMPONENTS_EXT 0x8A32 +#define GL_MAX_GEOMETRY_INPUT_COMPONENTS_EXT 0x9123 +#define GL_MAX_GEOMETRY_OUTPUT_COMPONENTS_EXT 0x9124 +#define GL_MAX_GEOMETRY_OUTPUT_VERTICES_EXT 0x8DE0 +#define GL_MAX_GEOMETRY_TOTAL_OUTPUT_COMPONENTS_EXT 0x8DE1 +#define GL_MAX_GEOMETRY_SHADER_INVOCATIONS_EXT 0x8E5A +#define GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS_EXT 0x8C29 +#define GL_MAX_GEOMETRY_ATOMIC_COUNTER_BUFFERS_EXT 0x92CF +#define GL_MAX_GEOMETRY_ATOMIC_COUNTERS_EXT 0x92D5 +#define GL_MAX_GEOMETRY_IMAGE_UNIFORMS_EXT 0x90CD +#define GL_MAX_GEOMETRY_SHADER_STORAGE_BLOCKS_EXT 0x90D7 +#define GL_FIRST_VERTEX_CONVENTION_EXT 0x8E4D +#define GL_LAST_VERTEX_CONVENTION_EXT 0x8E4E +#define GL_UNDEFINED_VERTEX_EXT 0x8260 +#define GL_PRIMITIVES_GENERATED_EXT 0x8C87 +#define GL_FRAMEBUFFER_DEFAULT_LAYERS_EXT 0x9312 +#define GL_MAX_FRAMEBUFFER_LAYERS_EXT 0x9317 +#define GL_FRAMEBUFFER_INCOMPLETE_LAYER_TARGETS_EXT 0x8DA8 +#define GL_FRAMEBUFFER_ATTACHMENT_LAYERED_EXT 0x8DA7 +#define GL_REFERENCED_BY_GEOMETRY_SHADER_EXT 0x9309 +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTUREEXTPROC) (GLenum target, GLenum attachment, GLuint texture, GLint level); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glFramebufferTextureEXT (GLenum target, GLenum attachment, GLuint texture, GLint level); +#endif +#endif /* GL_EXT_geometry_shader */ + +#ifndef GL_EXT_gpu_shader5 +#define GL_EXT_gpu_shader5 1 +#endif /* GL_EXT_gpu_shader5 */ + +#ifndef GL_EXT_instanced_arrays +#define GL_EXT_instanced_arrays 1 +#define GL_VERTEX_ATTRIB_ARRAY_DIVISOR_EXT 0x88FE +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBDIVISOREXTPROC) (GLuint index, GLuint divisor); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glVertexAttribDivisorEXT (GLuint index, GLuint divisor); +#endif +#endif /* GL_EXT_instanced_arrays */ + +#ifndef GL_EXT_map_buffer_range +#define GL_EXT_map_buffer_range 1 +#define GL_MAP_READ_BIT_EXT 0x0001 +#define GL_MAP_WRITE_BIT_EXT 0x0002 +#define GL_MAP_INVALIDATE_RANGE_BIT_EXT 0x0004 +#define GL_MAP_INVALIDATE_BUFFER_BIT_EXT 0x0008 +#define GL_MAP_FLUSH_EXPLICIT_BIT_EXT 0x0010 +#define GL_MAP_UNSYNCHRONIZED_BIT_EXT 0x0020 +typedef void *(GL_APIENTRYP PFNGLMAPBUFFERRANGEEXTPROC) (GLenum target, GLintptr offset, GLsizeiptr length, GLbitfield access); +typedef void (GL_APIENTRYP PFNGLFLUSHMAPPEDBUFFERRANGEEXTPROC) (GLenum target, GLintptr offset, GLsizeiptr length); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void *GL_APIENTRY glMapBufferRangeEXT (GLenum target, GLintptr offset, GLsizeiptr length, GLbitfield access); +GL_APICALL void GL_APIENTRY glFlushMappedBufferRangeEXT (GLenum target, GLintptr offset, GLsizeiptr length); +#endif +#endif /* GL_EXT_map_buffer_range */ + +#ifndef GL_EXT_memory_object +#define GL_EXT_memory_object 1 +#define GL_TEXTURE_TILING_EXT 0x9580 +#define GL_DEDICATED_MEMORY_OBJECT_EXT 0x9581 +#define GL_PROTECTED_MEMORY_OBJECT_EXT 0x959B +#define GL_NUM_TILING_TYPES_EXT 0x9582 +#define GL_TILING_TYPES_EXT 0x9583 +#define GL_OPTIMAL_TILING_EXT 0x9584 +#define GL_LINEAR_TILING_EXT 0x9585 +#define GL_NUM_DEVICE_UUIDS_EXT 0x9596 +#define GL_DEVICE_UUID_EXT 0x9597 +#define GL_DRIVER_UUID_EXT 0x9598 +#define GL_UUID_SIZE_EXT 16 +typedef void (GL_APIENTRYP PFNGLGETUNSIGNEDBYTEVEXTPROC) (GLenum pname, GLubyte *data); +typedef void (GL_APIENTRYP PFNGLGETUNSIGNEDBYTEI_VEXTPROC) (GLenum target, GLuint index, GLubyte *data); +typedef void (GL_APIENTRYP PFNGLDELETEMEMORYOBJECTSEXTPROC) (GLsizei n, const GLuint *memoryObjects); +typedef GLboolean (GL_APIENTRYP PFNGLISMEMORYOBJECTEXTPROC) (GLuint memoryObject); +typedef void (GL_APIENTRYP PFNGLCREATEMEMORYOBJECTSEXTPROC) (GLsizei n, GLuint *memoryObjects); +typedef void (GL_APIENTRYP PFNGLMEMORYOBJECTPARAMETERIVEXTPROC) (GLuint memoryObject, GLenum pname, const GLint *params); +typedef void (GL_APIENTRYP PFNGLGETMEMORYOBJECTPARAMETERIVEXTPROC) (GLuint memoryObject, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGEMEM2DEXTPROC) (GLenum target, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLuint memory, GLuint64 offset); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGEMEM2DMULTISAMPLEEXTPROC) (GLenum target, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGEMEM3DEXTPROC) (GLenum target, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLuint memory, GLuint64 offset); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGEMEM3DMULTISAMPLEEXTPROC) (GLenum target, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset); +typedef void (GL_APIENTRYP PFNGLBUFFERSTORAGEMEMEXTPROC) (GLenum target, GLsizeiptr size, GLuint memory, GLuint64 offset); +typedef void (GL_APIENTRYP PFNGLTEXTURESTORAGEMEM2DEXTPROC) (GLuint texture, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLuint memory, GLuint64 offset); +typedef void (GL_APIENTRYP PFNGLTEXTURESTORAGEMEM2DMULTISAMPLEEXTPROC) (GLuint texture, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset); +typedef void (GL_APIENTRYP PFNGLTEXTURESTORAGEMEM3DEXTPROC) (GLuint texture, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLuint memory, GLuint64 offset); +typedef void (GL_APIENTRYP PFNGLTEXTURESTORAGEMEM3DMULTISAMPLEEXTPROC) (GLuint texture, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset); +typedef void (GL_APIENTRYP PFNGLNAMEDBUFFERSTORAGEMEMEXTPROC) (GLuint buffer, GLsizeiptr size, GLuint memory, GLuint64 offset); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glGetUnsignedBytevEXT (GLenum pname, GLubyte *data); +GL_APICALL void GL_APIENTRY glGetUnsignedBytei_vEXT (GLenum target, GLuint index, GLubyte *data); +GL_APICALL void GL_APIENTRY glDeleteMemoryObjectsEXT (GLsizei n, const GLuint *memoryObjects); +GL_APICALL GLboolean GL_APIENTRY glIsMemoryObjectEXT (GLuint memoryObject); +GL_APICALL void GL_APIENTRY glCreateMemoryObjectsEXT (GLsizei n, GLuint *memoryObjects); +GL_APICALL void GL_APIENTRY glMemoryObjectParameterivEXT (GLuint memoryObject, GLenum pname, const GLint *params); +GL_APICALL void GL_APIENTRY glGetMemoryObjectParameterivEXT (GLuint memoryObject, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glTexStorageMem2DEXT (GLenum target, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLuint memory, GLuint64 offset); +GL_APICALL void GL_APIENTRY glTexStorageMem2DMultisampleEXT (GLenum target, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset); +GL_APICALL void GL_APIENTRY glTexStorageMem3DEXT (GLenum target, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLuint memory, GLuint64 offset); +GL_APICALL void GL_APIENTRY glTexStorageMem3DMultisampleEXT (GLenum target, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset); +GL_APICALL void GL_APIENTRY glBufferStorageMemEXT (GLenum target, GLsizeiptr size, GLuint memory, GLuint64 offset); +GL_APICALL void GL_APIENTRY glTextureStorageMem2DEXT (GLuint texture, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLuint memory, GLuint64 offset); +GL_APICALL void GL_APIENTRY glTextureStorageMem2DMultisampleEXT (GLuint texture, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset); +GL_APICALL void GL_APIENTRY glTextureStorageMem3DEXT (GLuint texture, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLuint memory, GLuint64 offset); +GL_APICALL void GL_APIENTRY glTextureStorageMem3DMultisampleEXT (GLuint texture, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset); +GL_APICALL void GL_APIENTRY glNamedBufferStorageMemEXT (GLuint buffer, GLsizeiptr size, GLuint memory, GLuint64 offset); +#endif +#endif /* GL_EXT_memory_object */ + +#ifndef GL_EXT_memory_object_fd +#define GL_EXT_memory_object_fd 1 +#define GL_HANDLE_TYPE_OPAQUE_FD_EXT 0x9586 +typedef void (GL_APIENTRYP PFNGLIMPORTMEMORYFDEXTPROC) (GLuint memory, GLuint64 size, GLenum handleType, GLint fd); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glImportMemoryFdEXT (GLuint memory, GLuint64 size, GLenum handleType, GLint fd); +#endif +#endif /* GL_EXT_memory_object_fd */ + +#ifndef GL_EXT_memory_object_win32 +#define GL_EXT_memory_object_win32 1 +#define GL_HANDLE_TYPE_OPAQUE_WIN32_EXT 0x9587 +#define GL_HANDLE_TYPE_OPAQUE_WIN32_KMT_EXT 0x9588 +#define GL_DEVICE_LUID_EXT 0x9599 +#define GL_DEVICE_NODE_MASK_EXT 0x959A +#define GL_LUID_SIZE_EXT 8 +#define GL_HANDLE_TYPE_D3D12_TILEPOOL_EXT 0x9589 +#define GL_HANDLE_TYPE_D3D12_RESOURCE_EXT 0x958A +#define GL_HANDLE_TYPE_D3D11_IMAGE_EXT 0x958B +#define GL_HANDLE_TYPE_D3D11_IMAGE_KMT_EXT 0x958C +typedef void (GL_APIENTRYP PFNGLIMPORTMEMORYWIN32HANDLEEXTPROC) (GLuint memory, GLuint64 size, GLenum handleType, void *handle); +typedef void (GL_APIENTRYP PFNGLIMPORTMEMORYWIN32NAMEEXTPROC) (GLuint memory, GLuint64 size, GLenum handleType, const void *name); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glImportMemoryWin32HandleEXT (GLuint memory, GLuint64 size, GLenum handleType, void *handle); +GL_APICALL void GL_APIENTRY glImportMemoryWin32NameEXT (GLuint memory, GLuint64 size, GLenum handleType, const void *name); +#endif +#endif /* GL_EXT_memory_object_win32 */ + +#ifndef GL_EXT_multi_draw_arrays +#define GL_EXT_multi_draw_arrays 1 +typedef void (GL_APIENTRYP PFNGLMULTIDRAWARRAYSEXTPROC) (GLenum mode, const GLint *first, const GLsizei *count, GLsizei primcount); +typedef void (GL_APIENTRYP PFNGLMULTIDRAWELEMENTSEXTPROC) (GLenum mode, const GLsizei *count, GLenum type, const void *const*indices, GLsizei primcount); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glMultiDrawArraysEXT (GLenum mode, const GLint *first, const GLsizei *count, GLsizei primcount); +GL_APICALL void GL_APIENTRY glMultiDrawElementsEXT (GLenum mode, const GLsizei *count, GLenum type, const void *const*indices, GLsizei primcount); +#endif +#endif /* GL_EXT_multi_draw_arrays */ + +#ifndef GL_EXT_multi_draw_indirect +#define GL_EXT_multi_draw_indirect 1 +typedef void (GL_APIENTRYP PFNGLMULTIDRAWARRAYSINDIRECTEXTPROC) (GLenum mode, const void *indirect, GLsizei drawcount, GLsizei stride); +typedef void (GL_APIENTRYP PFNGLMULTIDRAWELEMENTSINDIRECTEXTPROC) (GLenum mode, GLenum type, const void *indirect, GLsizei drawcount, GLsizei stride); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glMultiDrawArraysIndirectEXT (GLenum mode, const void *indirect, GLsizei drawcount, GLsizei stride); +GL_APICALL void GL_APIENTRY glMultiDrawElementsIndirectEXT (GLenum mode, GLenum type, const void *indirect, GLsizei drawcount, GLsizei stride); +#endif +#endif /* GL_EXT_multi_draw_indirect */ + +#ifndef GL_EXT_multisampled_compatibility +#define GL_EXT_multisampled_compatibility 1 +#define GL_MULTISAMPLE_EXT 0x809D +#define GL_SAMPLE_ALPHA_TO_ONE_EXT 0x809F +#endif /* GL_EXT_multisampled_compatibility */ + +#ifndef GL_EXT_multisampled_render_to_texture +#define GL_EXT_multisampled_render_to_texture 1 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_SAMPLES_EXT 0x8D6C +#define GL_RENDERBUFFER_SAMPLES_EXT 0x8CAB +#define GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE_EXT 0x8D56 +#define GL_MAX_SAMPLES_EXT 0x8D57 +typedef void (GL_APIENTRYP PFNGLRENDERBUFFERSTORAGEMULTISAMPLEEXTPROC) (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTURE2DMULTISAMPLEEXTPROC) (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLsizei samples); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glRenderbufferStorageMultisampleEXT (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glFramebufferTexture2DMultisampleEXT (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLsizei samples); +#endif +#endif /* GL_EXT_multisampled_render_to_texture */ + +#ifndef GL_EXT_multisampled_render_to_texture2 +#define GL_EXT_multisampled_render_to_texture2 1 +#endif /* GL_EXT_multisampled_render_to_texture2 */ + +#ifndef GL_EXT_multiview_draw_buffers +#define GL_EXT_multiview_draw_buffers 1 +#define GL_COLOR_ATTACHMENT_EXT 0x90F0 +#define GL_MULTIVIEW_EXT 0x90F1 +#define GL_DRAW_BUFFER_EXT 0x0C01 +#define GL_READ_BUFFER_EXT 0x0C02 +#define GL_MAX_MULTIVIEW_BUFFERS_EXT 0x90F2 +typedef void (GL_APIENTRYP PFNGLREADBUFFERINDEXEDEXTPROC) (GLenum src, GLint index); +typedef void (GL_APIENTRYP PFNGLDRAWBUFFERSINDEXEDEXTPROC) (GLint n, const GLenum *location, const GLint *indices); +typedef void (GL_APIENTRYP PFNGLGETINTEGERI_VEXTPROC) (GLenum target, GLuint index, GLint *data); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glReadBufferIndexedEXT (GLenum src, GLint index); +GL_APICALL void GL_APIENTRY glDrawBuffersIndexedEXT (GLint n, const GLenum *location, const GLint *indices); +GL_APICALL void GL_APIENTRY glGetIntegeri_vEXT (GLenum target, GLuint index, GLint *data); +#endif +#endif /* GL_EXT_multiview_draw_buffers */ + +#ifndef GL_EXT_multiview_tessellation_geometry_shader +#define GL_EXT_multiview_tessellation_geometry_shader 1 +#endif /* GL_EXT_multiview_tessellation_geometry_shader */ + +#ifndef GL_EXT_multiview_texture_multisample +#define GL_EXT_multiview_texture_multisample 1 +#endif /* GL_EXT_multiview_texture_multisample */ + +#ifndef GL_EXT_multiview_timer_query +#define GL_EXT_multiview_timer_query 1 +#endif /* GL_EXT_multiview_timer_query */ + +#ifndef GL_EXT_occlusion_query_boolean +#define GL_EXT_occlusion_query_boolean 1 +#define GL_ANY_SAMPLES_PASSED_EXT 0x8C2F +#define GL_ANY_SAMPLES_PASSED_CONSERVATIVE_EXT 0x8D6A +#endif /* GL_EXT_occlusion_query_boolean */ + +#ifndef GL_EXT_polygon_offset_clamp +#define GL_EXT_polygon_offset_clamp 1 +#define GL_POLYGON_OFFSET_CLAMP_EXT 0x8E1B +typedef void (GL_APIENTRYP PFNGLPOLYGONOFFSETCLAMPEXTPROC) (GLfloat factor, GLfloat units, GLfloat clamp); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glPolygonOffsetClampEXT (GLfloat factor, GLfloat units, GLfloat clamp); +#endif +#endif /* GL_EXT_polygon_offset_clamp */ + +#ifndef GL_EXT_post_depth_coverage +#define GL_EXT_post_depth_coverage 1 +#endif /* GL_EXT_post_depth_coverage */ + +#ifndef GL_EXT_primitive_bounding_box +#define GL_EXT_primitive_bounding_box 1 +#define GL_PRIMITIVE_BOUNDING_BOX_EXT 0x92BE +typedef void (GL_APIENTRYP PFNGLPRIMITIVEBOUNDINGBOXEXTPROC) (GLfloat minX, GLfloat minY, GLfloat minZ, GLfloat minW, GLfloat maxX, GLfloat maxY, GLfloat maxZ, GLfloat maxW); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glPrimitiveBoundingBoxEXT (GLfloat minX, GLfloat minY, GLfloat minZ, GLfloat minW, GLfloat maxX, GLfloat maxY, GLfloat maxZ, GLfloat maxW); +#endif +#endif /* GL_EXT_primitive_bounding_box */ + +#ifndef GL_EXT_protected_textures +#define GL_EXT_protected_textures 1 +#define GL_CONTEXT_FLAG_PROTECTED_CONTENT_BIT_EXT 0x00000010 +#define GL_TEXTURE_PROTECTED_EXT 0x8BFA +#endif /* GL_EXT_protected_textures */ + +#ifndef GL_EXT_pvrtc_sRGB +#define GL_EXT_pvrtc_sRGB 1 +#define GL_COMPRESSED_SRGB_PVRTC_2BPPV1_EXT 0x8A54 +#define GL_COMPRESSED_SRGB_PVRTC_4BPPV1_EXT 0x8A55 +#define GL_COMPRESSED_SRGB_ALPHA_PVRTC_2BPPV1_EXT 0x8A56 +#define GL_COMPRESSED_SRGB_ALPHA_PVRTC_4BPPV1_EXT 0x8A57 +#define GL_COMPRESSED_SRGB_ALPHA_PVRTC_2BPPV2_IMG 0x93F0 +#define GL_COMPRESSED_SRGB_ALPHA_PVRTC_4BPPV2_IMG 0x93F1 +#endif /* GL_EXT_pvrtc_sRGB */ + +#ifndef GL_EXT_raster_multisample +#define GL_EXT_raster_multisample 1 +#define GL_RASTER_MULTISAMPLE_EXT 0x9327 +#define GL_RASTER_SAMPLES_EXT 0x9328 +#define GL_MAX_RASTER_SAMPLES_EXT 0x9329 +#define GL_RASTER_FIXED_SAMPLE_LOCATIONS_EXT 0x932A +#define GL_MULTISAMPLE_RASTERIZATION_ALLOWED_EXT 0x932B +#define GL_EFFECTIVE_RASTER_SAMPLES_EXT 0x932C +typedef void (GL_APIENTRYP PFNGLRASTERSAMPLESEXTPROC) (GLuint samples, GLboolean fixedsamplelocations); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glRasterSamplesEXT (GLuint samples, GLboolean fixedsamplelocations); +#endif +#endif /* GL_EXT_raster_multisample */ + +#ifndef GL_EXT_read_format_bgra +#define GL_EXT_read_format_bgra 1 +#define GL_UNSIGNED_SHORT_4_4_4_4_REV_EXT 0x8365 +#define GL_UNSIGNED_SHORT_1_5_5_5_REV_EXT 0x8366 +#endif /* GL_EXT_read_format_bgra */ + +#ifndef GL_EXT_render_snorm +#define GL_EXT_render_snorm 1 +#define GL_R8_SNORM 0x8F94 +#define GL_RG8_SNORM 0x8F95 +#define GL_RGBA8_SNORM 0x8F97 +#define GL_R16_SNORM_EXT 0x8F98 +#define GL_RG16_SNORM_EXT 0x8F99 +#define GL_RGBA16_SNORM_EXT 0x8F9B +#endif /* GL_EXT_render_snorm */ + +#ifndef GL_EXT_robustness +#define GL_EXT_robustness 1 +#define GL_GUILTY_CONTEXT_RESET_EXT 0x8253 +#define GL_INNOCENT_CONTEXT_RESET_EXT 0x8254 +#define GL_UNKNOWN_CONTEXT_RESET_EXT 0x8255 +#define GL_CONTEXT_ROBUST_ACCESS_EXT 0x90F3 +#define GL_RESET_NOTIFICATION_STRATEGY_EXT 0x8256 +#define GL_LOSE_CONTEXT_ON_RESET_EXT 0x8252 +#define GL_NO_RESET_NOTIFICATION_EXT 0x8261 +typedef GLenum (GL_APIENTRYP PFNGLGETGRAPHICSRESETSTATUSEXTPROC) (void); +typedef void (GL_APIENTRYP PFNGLREADNPIXELSEXTPROC) (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, void *data); +typedef void (GL_APIENTRYP PFNGLGETNUNIFORMFVEXTPROC) (GLuint program, GLint location, GLsizei bufSize, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETNUNIFORMIVEXTPROC) (GLuint program, GLint location, GLsizei bufSize, GLint *params); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL GLenum GL_APIENTRY glGetGraphicsResetStatusEXT (void); +GL_APICALL void GL_APIENTRY glReadnPixelsEXT (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, void *data); +GL_APICALL void GL_APIENTRY glGetnUniformfvEXT (GLuint program, GLint location, GLsizei bufSize, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetnUniformivEXT (GLuint program, GLint location, GLsizei bufSize, GLint *params); +#endif +#endif /* GL_EXT_robustness */ + +#ifndef GL_EXT_sRGB +#define GL_EXT_sRGB 1 +#define GL_SRGB_EXT 0x8C40 +#define GL_SRGB_ALPHA_EXT 0x8C42 +#define GL_SRGB8_ALPHA8_EXT 0x8C43 +#define GL_FRAMEBUFFER_ATTACHMENT_COLOR_ENCODING_EXT 0x8210 +#endif /* GL_EXT_sRGB */ + +#ifndef GL_EXT_sRGB_write_control +#define GL_EXT_sRGB_write_control 1 +#define GL_FRAMEBUFFER_SRGB_EXT 0x8DB9 +#endif /* GL_EXT_sRGB_write_control */ + +#ifndef GL_EXT_semaphore +#define GL_EXT_semaphore 1 +#define GL_LAYOUT_GENERAL_EXT 0x958D +#define GL_LAYOUT_COLOR_ATTACHMENT_EXT 0x958E +#define GL_LAYOUT_DEPTH_STENCIL_ATTACHMENT_EXT 0x958F +#define GL_LAYOUT_DEPTH_STENCIL_READ_ONLY_EXT 0x9590 +#define GL_LAYOUT_SHADER_READ_ONLY_EXT 0x9591 +#define GL_LAYOUT_TRANSFER_SRC_EXT 0x9592 +#define GL_LAYOUT_TRANSFER_DST_EXT 0x9593 +#define GL_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_EXT 0x9530 +#define GL_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_EXT 0x9531 +typedef void (GL_APIENTRYP PFNGLGENSEMAPHORESEXTPROC) (GLsizei n, GLuint *semaphores); +typedef void (GL_APIENTRYP PFNGLDELETESEMAPHORESEXTPROC) (GLsizei n, const GLuint *semaphores); +typedef GLboolean (GL_APIENTRYP PFNGLISSEMAPHOREEXTPROC) (GLuint semaphore); +typedef void (GL_APIENTRYP PFNGLSEMAPHOREPARAMETERUI64VEXTPROC) (GLuint semaphore, GLenum pname, const GLuint64 *params); +typedef void (GL_APIENTRYP PFNGLGETSEMAPHOREPARAMETERUI64VEXTPROC) (GLuint semaphore, GLenum pname, GLuint64 *params); +typedef void (GL_APIENTRYP PFNGLWAITSEMAPHOREEXTPROC) (GLuint semaphore, GLuint numBufferBarriers, const GLuint *buffers, GLuint numTextureBarriers, const GLuint *textures, const GLenum *srcLayouts); +typedef void (GL_APIENTRYP PFNGLSIGNALSEMAPHOREEXTPROC) (GLuint semaphore, GLuint numBufferBarriers, const GLuint *buffers, GLuint numTextureBarriers, const GLuint *textures, const GLenum *dstLayouts); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glGenSemaphoresEXT (GLsizei n, GLuint *semaphores); +GL_APICALL void GL_APIENTRY glDeleteSemaphoresEXT (GLsizei n, const GLuint *semaphores); +GL_APICALL GLboolean GL_APIENTRY glIsSemaphoreEXT (GLuint semaphore); +GL_APICALL void GL_APIENTRY glSemaphoreParameterui64vEXT (GLuint semaphore, GLenum pname, const GLuint64 *params); +GL_APICALL void GL_APIENTRY glGetSemaphoreParameterui64vEXT (GLuint semaphore, GLenum pname, GLuint64 *params); +GL_APICALL void GL_APIENTRY glWaitSemaphoreEXT (GLuint semaphore, GLuint numBufferBarriers, const GLuint *buffers, GLuint numTextureBarriers, const GLuint *textures, const GLenum *srcLayouts); +GL_APICALL void GL_APIENTRY glSignalSemaphoreEXT (GLuint semaphore, GLuint numBufferBarriers, const GLuint *buffers, GLuint numTextureBarriers, const GLuint *textures, const GLenum *dstLayouts); +#endif +#endif /* GL_EXT_semaphore */ + +#ifndef GL_EXT_semaphore_fd +#define GL_EXT_semaphore_fd 1 +typedef void (GL_APIENTRYP PFNGLIMPORTSEMAPHOREFDEXTPROC) (GLuint semaphore, GLenum handleType, GLint fd); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glImportSemaphoreFdEXT (GLuint semaphore, GLenum handleType, GLint fd); +#endif +#endif /* GL_EXT_semaphore_fd */ + +#ifndef GL_EXT_semaphore_win32 +#define GL_EXT_semaphore_win32 1 +#define GL_HANDLE_TYPE_D3D12_FENCE_EXT 0x9594 +#define GL_D3D12_FENCE_VALUE_EXT 0x9595 +typedef void (GL_APIENTRYP PFNGLIMPORTSEMAPHOREWIN32HANDLEEXTPROC) (GLuint semaphore, GLenum handleType, void *handle); +typedef void (GL_APIENTRYP PFNGLIMPORTSEMAPHOREWIN32NAMEEXTPROC) (GLuint semaphore, GLenum handleType, const void *name); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glImportSemaphoreWin32HandleEXT (GLuint semaphore, GLenum handleType, void *handle); +GL_APICALL void GL_APIENTRY glImportSemaphoreWin32NameEXT (GLuint semaphore, GLenum handleType, const void *name); +#endif +#endif /* GL_EXT_semaphore_win32 */ + +#ifndef GL_EXT_separate_shader_objects +#define GL_EXT_separate_shader_objects 1 +#define GL_ACTIVE_PROGRAM_EXT 0x8259 +#define GL_VERTEX_SHADER_BIT_EXT 0x00000001 +#define GL_FRAGMENT_SHADER_BIT_EXT 0x00000002 +#define GL_ALL_SHADER_BITS_EXT 0xFFFFFFFF +#define GL_PROGRAM_SEPARABLE_EXT 0x8258 +#define GL_PROGRAM_PIPELINE_BINDING_EXT 0x825A +typedef void (GL_APIENTRYP PFNGLACTIVESHADERPROGRAMEXTPROC) (GLuint pipeline, GLuint program); +typedef void (GL_APIENTRYP PFNGLBINDPROGRAMPIPELINEEXTPROC) (GLuint pipeline); +typedef GLuint (GL_APIENTRYP PFNGLCREATESHADERPROGRAMVEXTPROC) (GLenum type, GLsizei count, const GLchar **strings); +typedef void (GL_APIENTRYP PFNGLDELETEPROGRAMPIPELINESEXTPROC) (GLsizei n, const GLuint *pipelines); +typedef void (GL_APIENTRYP PFNGLGENPROGRAMPIPELINESEXTPROC) (GLsizei n, GLuint *pipelines); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMPIPELINEINFOLOGEXTPROC) (GLuint pipeline, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMPIPELINEIVEXTPROC) (GLuint pipeline, GLenum pname, GLint *params); +typedef GLboolean (GL_APIENTRYP PFNGLISPROGRAMPIPELINEEXTPROC) (GLuint pipeline); +typedef void (GL_APIENTRYP PFNGLPROGRAMPARAMETERIEXTPROC) (GLuint program, GLenum pname, GLint value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1FEXTPROC) (GLuint program, GLint location, GLfloat v0); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1FVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1IEXTPROC) (GLuint program, GLint location, GLint v0); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1IVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2FEXTPROC) (GLuint program, GLint location, GLfloat v0, GLfloat v1); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2FVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2IEXTPROC) (GLuint program, GLint location, GLint v0, GLint v1); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2IVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3FEXTPROC) (GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3FVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3IEXTPROC) (GLuint program, GLint location, GLint v0, GLint v1, GLint v2); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3IVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4FEXTPROC) (GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4FVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4IEXTPROC) (GLuint program, GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4IVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2FVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3FVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4FVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUSEPROGRAMSTAGESEXTPROC) (GLuint pipeline, GLbitfield stages, GLuint program); +typedef void (GL_APIENTRYP PFNGLVALIDATEPROGRAMPIPELINEEXTPROC) (GLuint pipeline); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1UIEXTPROC) (GLuint program, GLint location, GLuint v0); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2UIEXTPROC) (GLuint program, GLint location, GLuint v0, GLuint v1); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3UIEXTPROC) (GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4UIEXTPROC) (GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1UIVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2UIVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3UIVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4UIVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2X3FVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3X2FVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2X4FVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4X2FVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3X4FVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4X3FVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glActiveShaderProgramEXT (GLuint pipeline, GLuint program); +GL_APICALL void GL_APIENTRY glBindProgramPipelineEXT (GLuint pipeline); +GL_APICALL GLuint GL_APIENTRY glCreateShaderProgramvEXT (GLenum type, GLsizei count, const GLchar **strings); +GL_APICALL void GL_APIENTRY glDeleteProgramPipelinesEXT (GLsizei n, const GLuint *pipelines); +GL_APICALL void GL_APIENTRY glGenProgramPipelinesEXT (GLsizei n, GLuint *pipelines); +GL_APICALL void GL_APIENTRY glGetProgramPipelineInfoLogEXT (GLuint pipeline, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +GL_APICALL void GL_APIENTRY glGetProgramPipelineivEXT (GLuint pipeline, GLenum pname, GLint *params); +GL_APICALL GLboolean GL_APIENTRY glIsProgramPipelineEXT (GLuint pipeline); +GL_APICALL void GL_APIENTRY glProgramParameteriEXT (GLuint program, GLenum pname, GLint value); +GL_APICALL void GL_APIENTRY glProgramUniform1fEXT (GLuint program, GLint location, GLfloat v0); +GL_APICALL void GL_APIENTRY glProgramUniform1fvEXT (GLuint program, GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniform1iEXT (GLuint program, GLint location, GLint v0); +GL_APICALL void GL_APIENTRY glProgramUniform1ivEXT (GLuint program, GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glProgramUniform2fEXT (GLuint program, GLint location, GLfloat v0, GLfloat v1); +GL_APICALL void GL_APIENTRY glProgramUniform2fvEXT (GLuint program, GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniform2iEXT (GLuint program, GLint location, GLint v0, GLint v1); +GL_APICALL void GL_APIENTRY glProgramUniform2ivEXT (GLuint program, GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glProgramUniform3fEXT (GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +GL_APICALL void GL_APIENTRY glProgramUniform3fvEXT (GLuint program, GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniform3iEXT (GLuint program, GLint location, GLint v0, GLint v1, GLint v2); +GL_APICALL void GL_APIENTRY glProgramUniform3ivEXT (GLuint program, GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glProgramUniform4fEXT (GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +GL_APICALL void GL_APIENTRY glProgramUniform4fvEXT (GLuint program, GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniform4iEXT (GLuint program, GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +GL_APICALL void GL_APIENTRY glProgramUniform4ivEXT (GLuint program, GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix2fvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix3fvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix4fvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUseProgramStagesEXT (GLuint pipeline, GLbitfield stages, GLuint program); +GL_APICALL void GL_APIENTRY glValidateProgramPipelineEXT (GLuint pipeline); +GL_APICALL void GL_APIENTRY glProgramUniform1uiEXT (GLuint program, GLint location, GLuint v0); +GL_APICALL void GL_APIENTRY glProgramUniform2uiEXT (GLuint program, GLint location, GLuint v0, GLuint v1); +GL_APICALL void GL_APIENTRY glProgramUniform3uiEXT (GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2); +GL_APICALL void GL_APIENTRY glProgramUniform4uiEXT (GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3); +GL_APICALL void GL_APIENTRY glProgramUniform1uivEXT (GLuint program, GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glProgramUniform2uivEXT (GLuint program, GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glProgramUniform3uivEXT (GLuint program, GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glProgramUniform4uivEXT (GLuint program, GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix2x3fvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix3x2fvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix2x4fvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix4x2fvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix3x4fvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix4x3fvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +#endif +#endif /* GL_EXT_separate_shader_objects */ + +#ifndef GL_EXT_shader_framebuffer_fetch +#define GL_EXT_shader_framebuffer_fetch 1 +#define GL_FRAGMENT_SHADER_DISCARDS_SAMPLES_EXT 0x8A52 +#endif /* GL_EXT_shader_framebuffer_fetch */ + +#ifndef GL_EXT_shader_framebuffer_fetch_non_coherent +#define GL_EXT_shader_framebuffer_fetch_non_coherent 1 +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERFETCHBARRIEREXTPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glFramebufferFetchBarrierEXT (void); +#endif +#endif /* GL_EXT_shader_framebuffer_fetch_non_coherent */ + +#ifndef GL_EXT_shader_group_vote +#define GL_EXT_shader_group_vote 1 +#endif /* GL_EXT_shader_group_vote */ + +#ifndef GL_EXT_shader_implicit_conversions +#define GL_EXT_shader_implicit_conversions 1 +#endif /* GL_EXT_shader_implicit_conversions */ + +#ifndef GL_EXT_shader_integer_mix +#define GL_EXT_shader_integer_mix 1 +#endif /* GL_EXT_shader_integer_mix */ + +#ifndef GL_EXT_shader_io_blocks +#define GL_EXT_shader_io_blocks 1 +#endif /* GL_EXT_shader_io_blocks */ + +#ifndef GL_EXT_shader_non_constant_global_initializers +#define GL_EXT_shader_non_constant_global_initializers 1 +#endif /* GL_EXT_shader_non_constant_global_initializers */ + +#ifndef GL_EXT_shader_pixel_local_storage +#define GL_EXT_shader_pixel_local_storage 1 +#define GL_MAX_SHADER_PIXEL_LOCAL_STORAGE_FAST_SIZE_EXT 0x8F63 +#define GL_MAX_SHADER_PIXEL_LOCAL_STORAGE_SIZE_EXT 0x8F67 +#define GL_SHADER_PIXEL_LOCAL_STORAGE_EXT 0x8F64 +#endif /* GL_EXT_shader_pixel_local_storage */ + +#ifndef GL_EXT_shader_pixel_local_storage2 +#define GL_EXT_shader_pixel_local_storage2 1 +#define GL_MAX_SHADER_COMBINED_LOCAL_STORAGE_FAST_SIZE_EXT 0x9650 +#define GL_MAX_SHADER_COMBINED_LOCAL_STORAGE_SIZE_EXT 0x9651 +#define GL_FRAMEBUFFER_INCOMPLETE_INSUFFICIENT_SHADER_COMBINED_LOCAL_STORAGE_EXT 0x9652 +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERPIXELLOCALSTORAGESIZEEXTPROC) (GLuint target, GLsizei size); +typedef GLsizei (GL_APIENTRYP PFNGLGETFRAMEBUFFERPIXELLOCALSTORAGESIZEEXTPROC) (GLuint target); +typedef void (GL_APIENTRYP PFNGLCLEARPIXELLOCALSTORAGEUIEXTPROC) (GLsizei offset, GLsizei n, const GLuint *values); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glFramebufferPixelLocalStorageSizeEXT (GLuint target, GLsizei size); +GL_APICALL GLsizei GL_APIENTRY glGetFramebufferPixelLocalStorageSizeEXT (GLuint target); +GL_APICALL void GL_APIENTRY glClearPixelLocalStorageuiEXT (GLsizei offset, GLsizei n, const GLuint *values); +#endif +#endif /* GL_EXT_shader_pixel_local_storage2 */ + +#ifndef GL_EXT_shader_texture_lod +#define GL_EXT_shader_texture_lod 1 +#endif /* GL_EXT_shader_texture_lod */ + +#ifndef GL_EXT_shadow_samplers +#define GL_EXT_shadow_samplers 1 +#define GL_TEXTURE_COMPARE_MODE_EXT 0x884C +#define GL_TEXTURE_COMPARE_FUNC_EXT 0x884D +#define GL_COMPARE_REF_TO_TEXTURE_EXT 0x884E +#define GL_SAMPLER_2D_SHADOW_EXT 0x8B62 +#endif /* GL_EXT_shadow_samplers */ + +#ifndef GL_EXT_sparse_texture +#define GL_EXT_sparse_texture 1 +#define GL_TEXTURE_SPARSE_EXT 0x91A6 +#define GL_VIRTUAL_PAGE_SIZE_INDEX_EXT 0x91A7 +#define GL_NUM_SPARSE_LEVELS_EXT 0x91AA +#define GL_NUM_VIRTUAL_PAGE_SIZES_EXT 0x91A8 +#define GL_VIRTUAL_PAGE_SIZE_X_EXT 0x9195 +#define GL_VIRTUAL_PAGE_SIZE_Y_EXT 0x9196 +#define GL_VIRTUAL_PAGE_SIZE_Z_EXT 0x9197 +#define GL_TEXTURE_2D_ARRAY 0x8C1A +#define GL_TEXTURE_3D 0x806F +#define GL_MAX_SPARSE_TEXTURE_SIZE_EXT 0x9198 +#define GL_MAX_SPARSE_3D_TEXTURE_SIZE_EXT 0x9199 +#define GL_MAX_SPARSE_ARRAY_TEXTURE_LAYERS_EXT 0x919A +#define GL_SPARSE_TEXTURE_FULL_ARRAY_CUBE_MIPMAPS_EXT 0x91A9 +typedef void (GL_APIENTRYP PFNGLTEXPAGECOMMITMENTEXTPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLboolean commit); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glTexPageCommitmentEXT (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLboolean commit); +#endif +#endif /* GL_EXT_sparse_texture */ + +#ifndef GL_EXT_sparse_texture2 +#define GL_EXT_sparse_texture2 1 +#endif /* GL_EXT_sparse_texture2 */ + +#ifndef GL_EXT_tessellation_point_size +#define GL_EXT_tessellation_point_size 1 +#endif /* GL_EXT_tessellation_point_size */ + +#ifndef GL_EXT_tessellation_shader +#define GL_EXT_tessellation_shader 1 +#define GL_PATCHES_EXT 0x000E +#define GL_PATCH_VERTICES_EXT 0x8E72 +#define GL_TESS_CONTROL_OUTPUT_VERTICES_EXT 0x8E75 +#define GL_TESS_GEN_MODE_EXT 0x8E76 +#define GL_TESS_GEN_SPACING_EXT 0x8E77 +#define GL_TESS_GEN_VERTEX_ORDER_EXT 0x8E78 +#define GL_TESS_GEN_POINT_MODE_EXT 0x8E79 +#define GL_ISOLINES_EXT 0x8E7A +#define GL_QUADS_EXT 0x0007 +#define GL_FRACTIONAL_ODD_EXT 0x8E7B +#define GL_FRACTIONAL_EVEN_EXT 0x8E7C +#define GL_MAX_PATCH_VERTICES_EXT 0x8E7D +#define GL_MAX_TESS_GEN_LEVEL_EXT 0x8E7E +#define GL_MAX_TESS_CONTROL_UNIFORM_COMPONENTS_EXT 0x8E7F +#define GL_MAX_TESS_EVALUATION_UNIFORM_COMPONENTS_EXT 0x8E80 +#define GL_MAX_TESS_CONTROL_TEXTURE_IMAGE_UNITS_EXT 0x8E81 +#define GL_MAX_TESS_EVALUATION_TEXTURE_IMAGE_UNITS_EXT 0x8E82 +#define GL_MAX_TESS_CONTROL_OUTPUT_COMPONENTS_EXT 0x8E83 +#define GL_MAX_TESS_PATCH_COMPONENTS_EXT 0x8E84 +#define GL_MAX_TESS_CONTROL_TOTAL_OUTPUT_COMPONENTS_EXT 0x8E85 +#define GL_MAX_TESS_EVALUATION_OUTPUT_COMPONENTS_EXT 0x8E86 +#define GL_MAX_TESS_CONTROL_UNIFORM_BLOCKS_EXT 0x8E89 +#define GL_MAX_TESS_EVALUATION_UNIFORM_BLOCKS_EXT 0x8E8A +#define GL_MAX_TESS_CONTROL_INPUT_COMPONENTS_EXT 0x886C +#define GL_MAX_TESS_EVALUATION_INPUT_COMPONENTS_EXT 0x886D +#define GL_MAX_COMBINED_TESS_CONTROL_UNIFORM_COMPONENTS_EXT 0x8E1E +#define GL_MAX_COMBINED_TESS_EVALUATION_UNIFORM_COMPONENTS_EXT 0x8E1F +#define GL_MAX_TESS_CONTROL_ATOMIC_COUNTER_BUFFERS_EXT 0x92CD +#define GL_MAX_TESS_EVALUATION_ATOMIC_COUNTER_BUFFERS_EXT 0x92CE +#define GL_MAX_TESS_CONTROL_ATOMIC_COUNTERS_EXT 0x92D3 +#define GL_MAX_TESS_EVALUATION_ATOMIC_COUNTERS_EXT 0x92D4 +#define GL_MAX_TESS_CONTROL_IMAGE_UNIFORMS_EXT 0x90CB +#define GL_MAX_TESS_EVALUATION_IMAGE_UNIFORMS_EXT 0x90CC +#define GL_MAX_TESS_CONTROL_SHADER_STORAGE_BLOCKS_EXT 0x90D8 +#define GL_MAX_TESS_EVALUATION_SHADER_STORAGE_BLOCKS_EXT 0x90D9 +#define GL_PRIMITIVE_RESTART_FOR_PATCHES_SUPPORTED 0x8221 +#define GL_IS_PER_PATCH_EXT 0x92E7 +#define GL_REFERENCED_BY_TESS_CONTROL_SHADER_EXT 0x9307 +#define GL_REFERENCED_BY_TESS_EVALUATION_SHADER_EXT 0x9308 +#define GL_TESS_CONTROL_SHADER_EXT 0x8E88 +#define GL_TESS_EVALUATION_SHADER_EXT 0x8E87 +#define GL_TESS_CONTROL_SHADER_BIT_EXT 0x00000008 +#define GL_TESS_EVALUATION_SHADER_BIT_EXT 0x00000010 +typedef void (GL_APIENTRYP PFNGLPATCHPARAMETERIEXTPROC) (GLenum pname, GLint value); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glPatchParameteriEXT (GLenum pname, GLint value); +#endif +#endif /* GL_EXT_tessellation_shader */ + +#ifndef GL_EXT_texture_border_clamp +#define GL_EXT_texture_border_clamp 1 +#define GL_TEXTURE_BORDER_COLOR_EXT 0x1004 +#define GL_CLAMP_TO_BORDER_EXT 0x812D +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERIIVEXTPROC) (GLenum target, GLenum pname, const GLint *params); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERIUIVEXTPROC) (GLenum target, GLenum pname, const GLuint *params); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERIIVEXTPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERIUIVEXTPROC) (GLenum target, GLenum pname, GLuint *params); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERIIVEXTPROC) (GLuint sampler, GLenum pname, const GLint *param); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERIUIVEXTPROC) (GLuint sampler, GLenum pname, const GLuint *param); +typedef void (GL_APIENTRYP PFNGLGETSAMPLERPARAMETERIIVEXTPROC) (GLuint sampler, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETSAMPLERPARAMETERIUIVEXTPROC) (GLuint sampler, GLenum pname, GLuint *params); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glTexParameterIivEXT (GLenum target, GLenum pname, const GLint *params); +GL_APICALL void GL_APIENTRY glTexParameterIuivEXT (GLenum target, GLenum pname, const GLuint *params); +GL_APICALL void GL_APIENTRY glGetTexParameterIivEXT (GLenum target, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetTexParameterIuivEXT (GLenum target, GLenum pname, GLuint *params); +GL_APICALL void GL_APIENTRY glSamplerParameterIivEXT (GLuint sampler, GLenum pname, const GLint *param); +GL_APICALL void GL_APIENTRY glSamplerParameterIuivEXT (GLuint sampler, GLenum pname, const GLuint *param); +GL_APICALL void GL_APIENTRY glGetSamplerParameterIivEXT (GLuint sampler, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetSamplerParameterIuivEXT (GLuint sampler, GLenum pname, GLuint *params); +#endif +#endif /* GL_EXT_texture_border_clamp */ + +#ifndef GL_EXT_texture_buffer +#define GL_EXT_texture_buffer 1 +#define GL_TEXTURE_BUFFER_EXT 0x8C2A +#define GL_TEXTURE_BUFFER_BINDING_EXT 0x8C2A +#define GL_MAX_TEXTURE_BUFFER_SIZE_EXT 0x8C2B +#define GL_TEXTURE_BINDING_BUFFER_EXT 0x8C2C +#define GL_TEXTURE_BUFFER_DATA_STORE_BINDING_EXT 0x8C2D +#define GL_TEXTURE_BUFFER_OFFSET_ALIGNMENT_EXT 0x919F +#define GL_SAMPLER_BUFFER_EXT 0x8DC2 +#define GL_INT_SAMPLER_BUFFER_EXT 0x8DD0 +#define GL_UNSIGNED_INT_SAMPLER_BUFFER_EXT 0x8DD8 +#define GL_IMAGE_BUFFER_EXT 0x9051 +#define GL_INT_IMAGE_BUFFER_EXT 0x905C +#define GL_UNSIGNED_INT_IMAGE_BUFFER_EXT 0x9067 +#define GL_TEXTURE_BUFFER_OFFSET_EXT 0x919D +#define GL_TEXTURE_BUFFER_SIZE_EXT 0x919E +typedef void (GL_APIENTRYP PFNGLTEXBUFFEREXTPROC) (GLenum target, GLenum internalformat, GLuint buffer); +typedef void (GL_APIENTRYP PFNGLTEXBUFFERRANGEEXTPROC) (GLenum target, GLenum internalformat, GLuint buffer, GLintptr offset, GLsizeiptr size); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glTexBufferEXT (GLenum target, GLenum internalformat, GLuint buffer); +GL_APICALL void GL_APIENTRY glTexBufferRangeEXT (GLenum target, GLenum internalformat, GLuint buffer, GLintptr offset, GLsizeiptr size); +#endif +#endif /* GL_EXT_texture_buffer */ + +#ifndef GL_EXT_texture_compression_astc_decode_mode +#define GL_EXT_texture_compression_astc_decode_mode 1 +#define GL_TEXTURE_ASTC_DECODE_PRECISION_EXT 0x8F69 +#endif /* GL_EXT_texture_compression_astc_decode_mode */ + +#ifndef GL_EXT_texture_compression_bptc +#define GL_EXT_texture_compression_bptc 1 +#define GL_COMPRESSED_RGBA_BPTC_UNORM_EXT 0x8E8C +#define GL_COMPRESSED_SRGB_ALPHA_BPTC_UNORM_EXT 0x8E8D +#define GL_COMPRESSED_RGB_BPTC_SIGNED_FLOAT_EXT 0x8E8E +#define GL_COMPRESSED_RGB_BPTC_UNSIGNED_FLOAT_EXT 0x8E8F +#endif /* GL_EXT_texture_compression_bptc */ + +#ifndef GL_EXT_texture_compression_dxt1 +#define GL_EXT_texture_compression_dxt1 1 +#define GL_COMPRESSED_RGB_S3TC_DXT1_EXT 0x83F0 +#define GL_COMPRESSED_RGBA_S3TC_DXT1_EXT 0x83F1 +#endif /* GL_EXT_texture_compression_dxt1 */ + +#ifndef GL_EXT_texture_compression_rgtc +#define GL_EXT_texture_compression_rgtc 1 +#define GL_COMPRESSED_RED_RGTC1_EXT 0x8DBB +#define GL_COMPRESSED_SIGNED_RED_RGTC1_EXT 0x8DBC +#define GL_COMPRESSED_RED_GREEN_RGTC2_EXT 0x8DBD +#define GL_COMPRESSED_SIGNED_RED_GREEN_RGTC2_EXT 0x8DBE +#endif /* GL_EXT_texture_compression_rgtc */ + +#ifndef GL_EXT_texture_compression_s3tc +#define GL_EXT_texture_compression_s3tc 1 +#define GL_COMPRESSED_RGBA_S3TC_DXT3_EXT 0x83F2 +#define GL_COMPRESSED_RGBA_S3TC_DXT5_EXT 0x83F3 +#endif /* GL_EXT_texture_compression_s3tc */ + +#ifndef GL_EXT_texture_compression_s3tc_srgb +#define GL_EXT_texture_compression_s3tc_srgb 1 +#define GL_COMPRESSED_SRGB_S3TC_DXT1_EXT 0x8C4C +#define GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT1_EXT 0x8C4D +#define GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT3_EXT 0x8C4E +#define GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT5_EXT 0x8C4F +#endif /* GL_EXT_texture_compression_s3tc_srgb */ + +#ifndef GL_EXT_texture_cube_map_array +#define GL_EXT_texture_cube_map_array 1 +#define GL_TEXTURE_CUBE_MAP_ARRAY_EXT 0x9009 +#define GL_TEXTURE_BINDING_CUBE_MAP_ARRAY_EXT 0x900A +#define GL_SAMPLER_CUBE_MAP_ARRAY_EXT 0x900C +#define GL_SAMPLER_CUBE_MAP_ARRAY_SHADOW_EXT 0x900D +#define GL_INT_SAMPLER_CUBE_MAP_ARRAY_EXT 0x900E +#define GL_UNSIGNED_INT_SAMPLER_CUBE_MAP_ARRAY_EXT 0x900F +#define GL_IMAGE_CUBE_MAP_ARRAY_EXT 0x9054 +#define GL_INT_IMAGE_CUBE_MAP_ARRAY_EXT 0x905F +#define GL_UNSIGNED_INT_IMAGE_CUBE_MAP_ARRAY_EXT 0x906A +#endif /* GL_EXT_texture_cube_map_array */ + +#ifndef GL_EXT_texture_filter_anisotropic +#define GL_EXT_texture_filter_anisotropic 1 +#define GL_TEXTURE_MAX_ANISOTROPY_EXT 0x84FE +#define GL_MAX_TEXTURE_MAX_ANISOTROPY_EXT 0x84FF +#endif /* GL_EXT_texture_filter_anisotropic */ + +#ifndef GL_EXT_texture_filter_minmax +#define GL_EXT_texture_filter_minmax 1 +#define GL_TEXTURE_REDUCTION_MODE_EXT 0x9366 +#define GL_WEIGHTED_AVERAGE_EXT 0x9367 +#endif /* GL_EXT_texture_filter_minmax */ + +#ifndef GL_EXT_texture_format_BGRA8888 +#define GL_EXT_texture_format_BGRA8888 1 +#endif /* GL_EXT_texture_format_BGRA8888 */ + +#ifndef GL_EXT_texture_format_sRGB_override +#define GL_EXT_texture_format_sRGB_override 1 +#define GL_TEXTURE_FORMAT_SRGB_OVERRIDE_EXT 0x8FBF +#endif /* GL_EXT_texture_format_sRGB_override */ + +#ifndef GL_EXT_texture_mirror_clamp_to_edge +#define GL_EXT_texture_mirror_clamp_to_edge 1 +#define GL_MIRROR_CLAMP_TO_EDGE_EXT 0x8743 +#endif /* GL_EXT_texture_mirror_clamp_to_edge */ + +#ifndef GL_EXT_texture_norm16 +#define GL_EXT_texture_norm16 1 +#define GL_R16_EXT 0x822A +#define GL_RG16_EXT 0x822C +#define GL_RGBA16_EXT 0x805B +#define GL_RGB16_EXT 0x8054 +#define GL_RGB16_SNORM_EXT 0x8F9A +#endif /* GL_EXT_texture_norm16 */ + +#ifndef GL_EXT_texture_query_lod +#define GL_EXT_texture_query_lod 1 +#endif /* GL_EXT_texture_query_lod */ + +#ifndef GL_EXT_texture_rg +#define GL_EXT_texture_rg 1 +#define GL_RED_EXT 0x1903 +#define GL_RG_EXT 0x8227 +#define GL_R8_EXT 0x8229 +#define GL_RG8_EXT 0x822B +#endif /* GL_EXT_texture_rg */ + +#ifndef GL_EXT_texture_sRGB_R8 +#define GL_EXT_texture_sRGB_R8 1 +#define GL_SR8_EXT 0x8FBD +#endif /* GL_EXT_texture_sRGB_R8 */ + +#ifndef GL_EXT_texture_sRGB_RG8 +#define GL_EXT_texture_sRGB_RG8 1 +#define GL_SRG8_EXT 0x8FBE +#endif /* GL_EXT_texture_sRGB_RG8 */ + +#ifndef GL_EXT_texture_sRGB_decode +#define GL_EXT_texture_sRGB_decode 1 +#define GL_TEXTURE_SRGB_DECODE_EXT 0x8A48 +#define GL_DECODE_EXT 0x8A49 +#define GL_SKIP_DECODE_EXT 0x8A4A +#endif /* GL_EXT_texture_sRGB_decode */ + +#ifndef GL_EXT_texture_shadow_lod +#define GL_EXT_texture_shadow_lod 1 +#endif /* GL_EXT_texture_shadow_lod */ + +#ifndef GL_EXT_texture_storage +#define GL_EXT_texture_storage 1 +#define GL_TEXTURE_IMMUTABLE_FORMAT_EXT 0x912F +#define GL_ALPHA8_EXT 0x803C +#define GL_LUMINANCE8_EXT 0x8040 +#define GL_LUMINANCE8_ALPHA8_EXT 0x8045 +#define GL_RGBA32F_EXT 0x8814 +#define GL_RGB32F_EXT 0x8815 +#define GL_ALPHA32F_EXT 0x8816 +#define GL_LUMINANCE32F_EXT 0x8818 +#define GL_LUMINANCE_ALPHA32F_EXT 0x8819 +#define GL_ALPHA16F_EXT 0x881C +#define GL_LUMINANCE16F_EXT 0x881E +#define GL_LUMINANCE_ALPHA16F_EXT 0x881F +#define GL_R32F_EXT 0x822E +#define GL_RG32F_EXT 0x8230 +typedef void (GL_APIENTRYP PFNGLTEXSTORAGE1DEXTPROC) (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGE2DEXTPROC) (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGE3DEXTPROC) (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +typedef void (GL_APIENTRYP PFNGLTEXTURESTORAGE1DEXTPROC) (GLuint texture, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width); +typedef void (GL_APIENTRYP PFNGLTEXTURESTORAGE2DEXTPROC) (GLuint texture, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLTEXTURESTORAGE3DEXTPROC) (GLuint texture, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glTexStorage1DEXT (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width); +GL_APICALL void GL_APIENTRY glTexStorage2DEXT (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glTexStorage3DEXT (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +GL_APICALL void GL_APIENTRY glTextureStorage1DEXT (GLuint texture, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width); +GL_APICALL void GL_APIENTRY glTextureStorage2DEXT (GLuint texture, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glTextureStorage3DEXT (GLuint texture, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +#endif +#endif /* GL_EXT_texture_storage */ + +#ifndef GL_EXT_texture_type_2_10_10_10_REV +#define GL_EXT_texture_type_2_10_10_10_REV 1 +#define GL_UNSIGNED_INT_2_10_10_10_REV_EXT 0x8368 +#endif /* GL_EXT_texture_type_2_10_10_10_REV */ + +#ifndef GL_EXT_texture_view +#define GL_EXT_texture_view 1 +#define GL_TEXTURE_VIEW_MIN_LEVEL_EXT 0x82DB +#define GL_TEXTURE_VIEW_NUM_LEVELS_EXT 0x82DC +#define GL_TEXTURE_VIEW_MIN_LAYER_EXT 0x82DD +#define GL_TEXTURE_VIEW_NUM_LAYERS_EXT 0x82DE +typedef void (GL_APIENTRYP PFNGLTEXTUREVIEWEXTPROC) (GLuint texture, GLenum target, GLuint origtexture, GLenum internalformat, GLuint minlevel, GLuint numlevels, GLuint minlayer, GLuint numlayers); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glTextureViewEXT (GLuint texture, GLenum target, GLuint origtexture, GLenum internalformat, GLuint minlevel, GLuint numlevels, GLuint minlayer, GLuint numlayers); +#endif +#endif /* GL_EXT_texture_view */ + +#ifndef GL_EXT_unpack_subimage +#define GL_EXT_unpack_subimage 1 +#define GL_UNPACK_ROW_LENGTH_EXT 0x0CF2 +#define GL_UNPACK_SKIP_ROWS_EXT 0x0CF3 +#define GL_UNPACK_SKIP_PIXELS_EXT 0x0CF4 +#endif /* GL_EXT_unpack_subimage */ + +#ifndef GL_EXT_win32_keyed_mutex +#define GL_EXT_win32_keyed_mutex 1 +typedef GLboolean (GL_APIENTRYP PFNGLACQUIREKEYEDMUTEXWIN32EXTPROC) (GLuint memory, GLuint64 key, GLuint timeout); +typedef GLboolean (GL_APIENTRYP PFNGLRELEASEKEYEDMUTEXWIN32EXTPROC) (GLuint memory, GLuint64 key); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL GLboolean GL_APIENTRY glAcquireKeyedMutexWin32EXT (GLuint memory, GLuint64 key, GLuint timeout); +GL_APICALL GLboolean GL_APIENTRY glReleaseKeyedMutexWin32EXT (GLuint memory, GLuint64 key); +#endif +#endif /* GL_EXT_win32_keyed_mutex */ + +#ifndef GL_EXT_window_rectangles +#define GL_EXT_window_rectangles 1 +#define GL_INCLUSIVE_EXT 0x8F10 +#define GL_EXCLUSIVE_EXT 0x8F11 +#define GL_WINDOW_RECTANGLE_EXT 0x8F12 +#define GL_WINDOW_RECTANGLE_MODE_EXT 0x8F13 +#define GL_MAX_WINDOW_RECTANGLES_EXT 0x8F14 +#define GL_NUM_WINDOW_RECTANGLES_EXT 0x8F15 +typedef void (GL_APIENTRYP PFNGLWINDOWRECTANGLESEXTPROC) (GLenum mode, GLsizei count, const GLint *box); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glWindowRectanglesEXT (GLenum mode, GLsizei count, const GLint *box); +#endif +#endif /* GL_EXT_window_rectangles */ + +#ifndef GL_FJ_shader_binary_GCCSO +#define GL_FJ_shader_binary_GCCSO 1 +#define GL_GCCSO_SHADER_BINARY_FJ 0x9260 +#endif /* GL_FJ_shader_binary_GCCSO */ + +#ifndef GL_IMG_bindless_texture +#define GL_IMG_bindless_texture 1 +typedef GLuint64 (GL_APIENTRYP PFNGLGETTEXTUREHANDLEIMGPROC) (GLuint texture); +typedef GLuint64 (GL_APIENTRYP PFNGLGETTEXTURESAMPLERHANDLEIMGPROC) (GLuint texture, GLuint sampler); +typedef void (GL_APIENTRYP PFNGLUNIFORMHANDLEUI64IMGPROC) (GLint location, GLuint64 value); +typedef void (GL_APIENTRYP PFNGLUNIFORMHANDLEUI64VIMGPROC) (GLint location, GLsizei count, const GLuint64 *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMHANDLEUI64IMGPROC) (GLuint program, GLint location, GLuint64 value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMHANDLEUI64VIMGPROC) (GLuint program, GLint location, GLsizei count, const GLuint64 *values); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL GLuint64 GL_APIENTRY glGetTextureHandleIMG (GLuint texture); +GL_APICALL GLuint64 GL_APIENTRY glGetTextureSamplerHandleIMG (GLuint texture, GLuint sampler); +GL_APICALL void GL_APIENTRY glUniformHandleui64IMG (GLint location, GLuint64 value); +GL_APICALL void GL_APIENTRY glUniformHandleui64vIMG (GLint location, GLsizei count, const GLuint64 *value); +GL_APICALL void GL_APIENTRY glProgramUniformHandleui64IMG (GLuint program, GLint location, GLuint64 value); +GL_APICALL void GL_APIENTRY glProgramUniformHandleui64vIMG (GLuint program, GLint location, GLsizei count, const GLuint64 *values); +#endif +#endif /* GL_IMG_bindless_texture */ + +#ifndef GL_IMG_framebuffer_downsample +#define GL_IMG_framebuffer_downsample 1 +#define GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE_AND_DOWNSAMPLE_IMG 0x913C +#define GL_NUM_DOWNSAMPLE_SCALES_IMG 0x913D +#define GL_DOWNSAMPLE_SCALES_IMG 0x913E +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_SCALE_IMG 0x913F +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTURE2DDOWNSAMPLEIMGPROC) (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLint xscale, GLint yscale); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTURELAYERDOWNSAMPLEIMGPROC) (GLenum target, GLenum attachment, GLuint texture, GLint level, GLint layer, GLint xscale, GLint yscale); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glFramebufferTexture2DDownsampleIMG (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLint xscale, GLint yscale); +GL_APICALL void GL_APIENTRY glFramebufferTextureLayerDownsampleIMG (GLenum target, GLenum attachment, GLuint texture, GLint level, GLint layer, GLint xscale, GLint yscale); +#endif +#endif /* GL_IMG_framebuffer_downsample */ + +#ifndef GL_IMG_multisampled_render_to_texture +#define GL_IMG_multisampled_render_to_texture 1 +#define GL_RENDERBUFFER_SAMPLES_IMG 0x9133 +#define GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE_IMG 0x9134 +#define GL_MAX_SAMPLES_IMG 0x9135 +#define GL_TEXTURE_SAMPLES_IMG 0x9136 +typedef void (GL_APIENTRYP PFNGLRENDERBUFFERSTORAGEMULTISAMPLEIMGPROC) (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTURE2DMULTISAMPLEIMGPROC) (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLsizei samples); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glRenderbufferStorageMultisampleIMG (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glFramebufferTexture2DMultisampleIMG (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLsizei samples); +#endif +#endif /* GL_IMG_multisampled_render_to_texture */ + +#ifndef GL_IMG_program_binary +#define GL_IMG_program_binary 1 +#define GL_SGX_PROGRAM_BINARY_IMG 0x9130 +#endif /* GL_IMG_program_binary */ + +#ifndef GL_IMG_read_format +#define GL_IMG_read_format 1 +#define GL_BGRA_IMG 0x80E1 +#define GL_UNSIGNED_SHORT_4_4_4_4_REV_IMG 0x8365 +#endif /* GL_IMG_read_format */ + +#ifndef GL_IMG_shader_binary +#define GL_IMG_shader_binary 1 +#define GL_SGX_BINARY_IMG 0x8C0A +#endif /* GL_IMG_shader_binary */ + +#ifndef GL_IMG_texture_compression_pvrtc +#define GL_IMG_texture_compression_pvrtc 1 +#define GL_COMPRESSED_RGB_PVRTC_4BPPV1_IMG 0x8C00 +#define GL_COMPRESSED_RGB_PVRTC_2BPPV1_IMG 0x8C01 +#define GL_COMPRESSED_RGBA_PVRTC_4BPPV1_IMG 0x8C02 +#define GL_COMPRESSED_RGBA_PVRTC_2BPPV1_IMG 0x8C03 +#endif /* GL_IMG_texture_compression_pvrtc */ + +#ifndef GL_IMG_texture_compression_pvrtc2 +#define GL_IMG_texture_compression_pvrtc2 1 +#define GL_COMPRESSED_RGBA_PVRTC_2BPPV2_IMG 0x9137 +#define GL_COMPRESSED_RGBA_PVRTC_4BPPV2_IMG 0x9138 +#endif /* GL_IMG_texture_compression_pvrtc2 */ + +#ifndef GL_IMG_texture_filter_cubic +#define GL_IMG_texture_filter_cubic 1 +#define GL_CUBIC_IMG 0x9139 +#define GL_CUBIC_MIPMAP_NEAREST_IMG 0x913A +#define GL_CUBIC_MIPMAP_LINEAR_IMG 0x913B +#endif /* GL_IMG_texture_filter_cubic */ + +#ifndef GL_INTEL_blackhole_render +#define GL_INTEL_blackhole_render 1 +#define GL_BLACKHOLE_RENDER_INTEL 0x83FC +#endif /* GL_INTEL_blackhole_render */ + +#ifndef GL_INTEL_conservative_rasterization +#define GL_INTEL_conservative_rasterization 1 +#define GL_CONSERVATIVE_RASTERIZATION_INTEL 0x83FE +#endif /* GL_INTEL_conservative_rasterization */ + +#ifndef GL_INTEL_framebuffer_CMAA +#define GL_INTEL_framebuffer_CMAA 1 +typedef void (GL_APIENTRYP PFNGLAPPLYFRAMEBUFFERATTACHMENTCMAAINTELPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glApplyFramebufferAttachmentCMAAINTEL (void); +#endif +#endif /* GL_INTEL_framebuffer_CMAA */ + +#ifndef GL_INTEL_performance_query +#define GL_INTEL_performance_query 1 +#define GL_PERFQUERY_SINGLE_CONTEXT_INTEL 0x00000000 +#define GL_PERFQUERY_GLOBAL_CONTEXT_INTEL 0x00000001 +#define GL_PERFQUERY_WAIT_INTEL 0x83FB +#define GL_PERFQUERY_FLUSH_INTEL 0x83FA +#define GL_PERFQUERY_DONOT_FLUSH_INTEL 0x83F9 +#define GL_PERFQUERY_COUNTER_EVENT_INTEL 0x94F0 +#define GL_PERFQUERY_COUNTER_DURATION_NORM_INTEL 0x94F1 +#define GL_PERFQUERY_COUNTER_DURATION_RAW_INTEL 0x94F2 +#define GL_PERFQUERY_COUNTER_THROUGHPUT_INTEL 0x94F3 +#define GL_PERFQUERY_COUNTER_RAW_INTEL 0x94F4 +#define GL_PERFQUERY_COUNTER_TIMESTAMP_INTEL 0x94F5 +#define GL_PERFQUERY_COUNTER_DATA_UINT32_INTEL 0x94F8 +#define GL_PERFQUERY_COUNTER_DATA_UINT64_INTEL 0x94F9 +#define GL_PERFQUERY_COUNTER_DATA_FLOAT_INTEL 0x94FA +#define GL_PERFQUERY_COUNTER_DATA_DOUBLE_INTEL 0x94FB +#define GL_PERFQUERY_COUNTER_DATA_BOOL32_INTEL 0x94FC +#define GL_PERFQUERY_QUERY_NAME_LENGTH_MAX_INTEL 0x94FD +#define GL_PERFQUERY_COUNTER_NAME_LENGTH_MAX_INTEL 0x94FE +#define GL_PERFQUERY_COUNTER_DESC_LENGTH_MAX_INTEL 0x94FF +#define GL_PERFQUERY_GPA_EXTENDED_COUNTERS_INTEL 0x9500 +typedef void (GL_APIENTRYP PFNGLBEGINPERFQUERYINTELPROC) (GLuint queryHandle); +typedef void (GL_APIENTRYP PFNGLCREATEPERFQUERYINTELPROC) (GLuint queryId, GLuint *queryHandle); +typedef void (GL_APIENTRYP PFNGLDELETEPERFQUERYINTELPROC) (GLuint queryHandle); +typedef void (GL_APIENTRYP PFNGLENDPERFQUERYINTELPROC) (GLuint queryHandle); +typedef void (GL_APIENTRYP PFNGLGETFIRSTPERFQUERYIDINTELPROC) (GLuint *queryId); +typedef void (GL_APIENTRYP PFNGLGETNEXTPERFQUERYIDINTELPROC) (GLuint queryId, GLuint *nextQueryId); +typedef void (GL_APIENTRYP PFNGLGETPERFCOUNTERINFOINTELPROC) (GLuint queryId, GLuint counterId, GLuint counterNameLength, GLchar *counterName, GLuint counterDescLength, GLchar *counterDesc, GLuint *counterOffset, GLuint *counterDataSize, GLuint *counterTypeEnum, GLuint *counterDataTypeEnum, GLuint64 *rawCounterMaxValue); +typedef void (GL_APIENTRYP PFNGLGETPERFQUERYDATAINTELPROC) (GLuint queryHandle, GLuint flags, GLsizei dataSize, void *data, GLuint *bytesWritten); +typedef void (GL_APIENTRYP PFNGLGETPERFQUERYIDBYNAMEINTELPROC) (GLchar *queryName, GLuint *queryId); +typedef void (GL_APIENTRYP PFNGLGETPERFQUERYINFOINTELPROC) (GLuint queryId, GLuint queryNameLength, GLchar *queryName, GLuint *dataSize, GLuint *noCounters, GLuint *noInstances, GLuint *capsMask); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glBeginPerfQueryINTEL (GLuint queryHandle); +GL_APICALL void GL_APIENTRY glCreatePerfQueryINTEL (GLuint queryId, GLuint *queryHandle); +GL_APICALL void GL_APIENTRY glDeletePerfQueryINTEL (GLuint queryHandle); +GL_APICALL void GL_APIENTRY glEndPerfQueryINTEL (GLuint queryHandle); +GL_APICALL void GL_APIENTRY glGetFirstPerfQueryIdINTEL (GLuint *queryId); +GL_APICALL void GL_APIENTRY glGetNextPerfQueryIdINTEL (GLuint queryId, GLuint *nextQueryId); +GL_APICALL void GL_APIENTRY glGetPerfCounterInfoINTEL (GLuint queryId, GLuint counterId, GLuint counterNameLength, GLchar *counterName, GLuint counterDescLength, GLchar *counterDesc, GLuint *counterOffset, GLuint *counterDataSize, GLuint *counterTypeEnum, GLuint *counterDataTypeEnum, GLuint64 *rawCounterMaxValue); +GL_APICALL void GL_APIENTRY glGetPerfQueryDataINTEL (GLuint queryHandle, GLuint flags, GLsizei dataSize, void *data, GLuint *bytesWritten); +GL_APICALL void GL_APIENTRY glGetPerfQueryIdByNameINTEL (GLchar *queryName, GLuint *queryId); +GL_APICALL void GL_APIENTRY glGetPerfQueryInfoINTEL (GLuint queryId, GLuint queryNameLength, GLchar *queryName, GLuint *dataSize, GLuint *noCounters, GLuint *noInstances, GLuint *capsMask); +#endif +#endif /* GL_INTEL_performance_query */ + +#ifndef GL_MESA_framebuffer_flip_x +#define GL_MESA_framebuffer_flip_x 1 +#define GL_FRAMEBUFFER_FLIP_X_MESA 0x8BBC +#endif /* GL_MESA_framebuffer_flip_x */ + +#ifndef GL_MESA_framebuffer_flip_y +#define GL_MESA_framebuffer_flip_y 1 +#define GL_FRAMEBUFFER_FLIP_Y_MESA 0x8BBB +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERPARAMETERIMESAPROC) (GLenum target, GLenum pname, GLint param); +typedef void (GL_APIENTRYP PFNGLGETFRAMEBUFFERPARAMETERIVMESAPROC) (GLenum target, GLenum pname, GLint *params); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glFramebufferParameteriMESA (GLenum target, GLenum pname, GLint param); +GL_APICALL void GL_APIENTRY glGetFramebufferParameterivMESA (GLenum target, GLenum pname, GLint *params); +#endif +#endif /* GL_MESA_framebuffer_flip_y */ + +#ifndef GL_MESA_framebuffer_swap_xy +#define GL_MESA_framebuffer_swap_xy 1 +#define GL_FRAMEBUFFER_SWAP_XY_MESA 0x8BBD +#endif /* GL_MESA_framebuffer_swap_xy */ + +#ifndef GL_MESA_program_binary_formats +#define GL_MESA_program_binary_formats 1 +#define GL_PROGRAM_BINARY_FORMAT_MESA 0x875F +#endif /* GL_MESA_program_binary_formats */ + +#ifndef GL_MESA_shader_integer_functions +#define GL_MESA_shader_integer_functions 1 +#endif /* GL_MESA_shader_integer_functions */ + +#ifndef GL_NVX_blend_equation_advanced_multi_draw_buffers +#define GL_NVX_blend_equation_advanced_multi_draw_buffers 1 +#endif /* GL_NVX_blend_equation_advanced_multi_draw_buffers */ + +#ifndef GL_NV_bindless_texture +#define GL_NV_bindless_texture 1 +typedef GLuint64 (GL_APIENTRYP PFNGLGETTEXTUREHANDLENVPROC) (GLuint texture); +typedef GLuint64 (GL_APIENTRYP PFNGLGETTEXTURESAMPLERHANDLENVPROC) (GLuint texture, GLuint sampler); +typedef void (GL_APIENTRYP PFNGLMAKETEXTUREHANDLERESIDENTNVPROC) (GLuint64 handle); +typedef void (GL_APIENTRYP PFNGLMAKETEXTUREHANDLENONRESIDENTNVPROC) (GLuint64 handle); +typedef GLuint64 (GL_APIENTRYP PFNGLGETIMAGEHANDLENVPROC) (GLuint texture, GLint level, GLboolean layered, GLint layer, GLenum format); +typedef void (GL_APIENTRYP PFNGLMAKEIMAGEHANDLERESIDENTNVPROC) (GLuint64 handle, GLenum access); +typedef void (GL_APIENTRYP PFNGLMAKEIMAGEHANDLENONRESIDENTNVPROC) (GLuint64 handle); +typedef void (GL_APIENTRYP PFNGLUNIFORMHANDLEUI64NVPROC) (GLint location, GLuint64 value); +typedef void (GL_APIENTRYP PFNGLUNIFORMHANDLEUI64VNVPROC) (GLint location, GLsizei count, const GLuint64 *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMHANDLEUI64NVPROC) (GLuint program, GLint location, GLuint64 value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMHANDLEUI64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLuint64 *values); +typedef GLboolean (GL_APIENTRYP PFNGLISTEXTUREHANDLERESIDENTNVPROC) (GLuint64 handle); +typedef GLboolean (GL_APIENTRYP PFNGLISIMAGEHANDLERESIDENTNVPROC) (GLuint64 handle); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL GLuint64 GL_APIENTRY glGetTextureHandleNV (GLuint texture); +GL_APICALL GLuint64 GL_APIENTRY glGetTextureSamplerHandleNV (GLuint texture, GLuint sampler); +GL_APICALL void GL_APIENTRY glMakeTextureHandleResidentNV (GLuint64 handle); +GL_APICALL void GL_APIENTRY glMakeTextureHandleNonResidentNV (GLuint64 handle); +GL_APICALL GLuint64 GL_APIENTRY glGetImageHandleNV (GLuint texture, GLint level, GLboolean layered, GLint layer, GLenum format); +GL_APICALL void GL_APIENTRY glMakeImageHandleResidentNV (GLuint64 handle, GLenum access); +GL_APICALL void GL_APIENTRY glMakeImageHandleNonResidentNV (GLuint64 handle); +GL_APICALL void GL_APIENTRY glUniformHandleui64NV (GLint location, GLuint64 value); +GL_APICALL void GL_APIENTRY glUniformHandleui64vNV (GLint location, GLsizei count, const GLuint64 *value); +GL_APICALL void GL_APIENTRY glProgramUniformHandleui64NV (GLuint program, GLint location, GLuint64 value); +GL_APICALL void GL_APIENTRY glProgramUniformHandleui64vNV (GLuint program, GLint location, GLsizei count, const GLuint64 *values); +GL_APICALL GLboolean GL_APIENTRY glIsTextureHandleResidentNV (GLuint64 handle); +GL_APICALL GLboolean GL_APIENTRY glIsImageHandleResidentNV (GLuint64 handle); +#endif +#endif /* GL_NV_bindless_texture */ + +#ifndef GL_NV_blend_equation_advanced +#define GL_NV_blend_equation_advanced 1 +#define GL_BLEND_OVERLAP_NV 0x9281 +#define GL_BLEND_PREMULTIPLIED_SRC_NV 0x9280 +#define GL_BLUE_NV 0x1905 +#define GL_COLORBURN_NV 0x929A +#define GL_COLORDODGE_NV 0x9299 +#define GL_CONJOINT_NV 0x9284 +#define GL_CONTRAST_NV 0x92A1 +#define GL_DARKEN_NV 0x9297 +#define GL_DIFFERENCE_NV 0x929E +#define GL_DISJOINT_NV 0x9283 +#define GL_DST_ATOP_NV 0x928F +#define GL_DST_IN_NV 0x928B +#define GL_DST_NV 0x9287 +#define GL_DST_OUT_NV 0x928D +#define GL_DST_OVER_NV 0x9289 +#define GL_EXCLUSION_NV 0x92A0 +#define GL_GREEN_NV 0x1904 +#define GL_HARDLIGHT_NV 0x929B +#define GL_HARDMIX_NV 0x92A9 +#define GL_HSL_COLOR_NV 0x92AF +#define GL_HSL_HUE_NV 0x92AD +#define GL_HSL_LUMINOSITY_NV 0x92B0 +#define GL_HSL_SATURATION_NV 0x92AE +#define GL_INVERT_OVG_NV 0x92B4 +#define GL_INVERT_RGB_NV 0x92A3 +#define GL_LIGHTEN_NV 0x9298 +#define GL_LINEARBURN_NV 0x92A5 +#define GL_LINEARDODGE_NV 0x92A4 +#define GL_LINEARLIGHT_NV 0x92A7 +#define GL_MINUS_CLAMPED_NV 0x92B3 +#define GL_MINUS_NV 0x929F +#define GL_MULTIPLY_NV 0x9294 +#define GL_OVERLAY_NV 0x9296 +#define GL_PINLIGHT_NV 0x92A8 +#define GL_PLUS_CLAMPED_ALPHA_NV 0x92B2 +#define GL_PLUS_CLAMPED_NV 0x92B1 +#define GL_PLUS_DARKER_NV 0x9292 +#define GL_PLUS_NV 0x9291 +#define GL_RED_NV 0x1903 +#define GL_SCREEN_NV 0x9295 +#define GL_SOFTLIGHT_NV 0x929C +#define GL_SRC_ATOP_NV 0x928E +#define GL_SRC_IN_NV 0x928A +#define GL_SRC_NV 0x9286 +#define GL_SRC_OUT_NV 0x928C +#define GL_SRC_OVER_NV 0x9288 +#define GL_UNCORRELATED_NV 0x9282 +#define GL_VIVIDLIGHT_NV 0x92A6 +#define GL_XOR_NV 0x1506 +typedef void (GL_APIENTRYP PFNGLBLENDPARAMETERINVPROC) (GLenum pname, GLint value); +typedef void (GL_APIENTRYP PFNGLBLENDBARRIERNVPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glBlendParameteriNV (GLenum pname, GLint value); +GL_APICALL void GL_APIENTRY glBlendBarrierNV (void); +#endif +#endif /* GL_NV_blend_equation_advanced */ + +#ifndef GL_NV_blend_equation_advanced_coherent +#define GL_NV_blend_equation_advanced_coherent 1 +#define GL_BLEND_ADVANCED_COHERENT_NV 0x9285 +#endif /* GL_NV_blend_equation_advanced_coherent */ + +#ifndef GL_NV_blend_minmax_factor +#define GL_NV_blend_minmax_factor 1 +#define GL_FACTOR_MIN_AMD 0x901C +#define GL_FACTOR_MAX_AMD 0x901D +#endif /* GL_NV_blend_minmax_factor */ + +#ifndef GL_NV_clip_space_w_scaling +#define GL_NV_clip_space_w_scaling 1 +#define GL_VIEWPORT_POSITION_W_SCALE_NV 0x937C +#define GL_VIEWPORT_POSITION_W_SCALE_X_COEFF_NV 0x937D +#define GL_VIEWPORT_POSITION_W_SCALE_Y_COEFF_NV 0x937E +typedef void (GL_APIENTRYP PFNGLVIEWPORTPOSITIONWSCALENVPROC) (GLuint index, GLfloat xcoeff, GLfloat ycoeff); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glViewportPositionWScaleNV (GLuint index, GLfloat xcoeff, GLfloat ycoeff); +#endif +#endif /* GL_NV_clip_space_w_scaling */ + +#ifndef GL_NV_compute_shader_derivatives +#define GL_NV_compute_shader_derivatives 1 +#endif /* GL_NV_compute_shader_derivatives */ + +#ifndef GL_NV_conditional_render +#define GL_NV_conditional_render 1 +#define GL_QUERY_WAIT_NV 0x8E13 +#define GL_QUERY_NO_WAIT_NV 0x8E14 +#define GL_QUERY_BY_REGION_WAIT_NV 0x8E15 +#define GL_QUERY_BY_REGION_NO_WAIT_NV 0x8E16 +typedef void (GL_APIENTRYP PFNGLBEGINCONDITIONALRENDERNVPROC) (GLuint id, GLenum mode); +typedef void (GL_APIENTRYP PFNGLENDCONDITIONALRENDERNVPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glBeginConditionalRenderNV (GLuint id, GLenum mode); +GL_APICALL void GL_APIENTRY glEndConditionalRenderNV (void); +#endif +#endif /* GL_NV_conditional_render */ + +#ifndef GL_NV_conservative_raster +#define GL_NV_conservative_raster 1 +#define GL_CONSERVATIVE_RASTERIZATION_NV 0x9346 +#define GL_SUBPIXEL_PRECISION_BIAS_X_BITS_NV 0x9347 +#define GL_SUBPIXEL_PRECISION_BIAS_Y_BITS_NV 0x9348 +#define GL_MAX_SUBPIXEL_PRECISION_BIAS_BITS_NV 0x9349 +typedef void (GL_APIENTRYP PFNGLSUBPIXELPRECISIONBIASNVPROC) (GLuint xbits, GLuint ybits); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glSubpixelPrecisionBiasNV (GLuint xbits, GLuint ybits); +#endif +#endif /* GL_NV_conservative_raster */ + +#ifndef GL_NV_conservative_raster_pre_snap +#define GL_NV_conservative_raster_pre_snap 1 +#define GL_CONSERVATIVE_RASTER_MODE_PRE_SNAP_NV 0x9550 +#endif /* GL_NV_conservative_raster_pre_snap */ + +#ifndef GL_NV_conservative_raster_pre_snap_triangles +#define GL_NV_conservative_raster_pre_snap_triangles 1 +#define GL_CONSERVATIVE_RASTER_MODE_NV 0x954D +#define GL_CONSERVATIVE_RASTER_MODE_POST_SNAP_NV 0x954E +#define GL_CONSERVATIVE_RASTER_MODE_PRE_SNAP_TRIANGLES_NV 0x954F +typedef void (GL_APIENTRYP PFNGLCONSERVATIVERASTERPARAMETERINVPROC) (GLenum pname, GLint param); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glConservativeRasterParameteriNV (GLenum pname, GLint param); +#endif +#endif /* GL_NV_conservative_raster_pre_snap_triangles */ + +#ifndef GL_NV_copy_buffer +#define GL_NV_copy_buffer 1 +#define GL_COPY_READ_BUFFER_NV 0x8F36 +#define GL_COPY_WRITE_BUFFER_NV 0x8F37 +typedef void (GL_APIENTRYP PFNGLCOPYBUFFERSUBDATANVPROC) (GLenum readTarget, GLenum writeTarget, GLintptr readOffset, GLintptr writeOffset, GLsizeiptr size); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glCopyBufferSubDataNV (GLenum readTarget, GLenum writeTarget, GLintptr readOffset, GLintptr writeOffset, GLsizeiptr size); +#endif +#endif /* GL_NV_copy_buffer */ + +#ifndef GL_NV_coverage_sample +#define GL_NV_coverage_sample 1 +#define GL_COVERAGE_COMPONENT_NV 0x8ED0 +#define GL_COVERAGE_COMPONENT4_NV 0x8ED1 +#define GL_COVERAGE_ATTACHMENT_NV 0x8ED2 +#define GL_COVERAGE_BUFFERS_NV 0x8ED3 +#define GL_COVERAGE_SAMPLES_NV 0x8ED4 +#define GL_COVERAGE_ALL_FRAGMENTS_NV 0x8ED5 +#define GL_COVERAGE_EDGE_FRAGMENTS_NV 0x8ED6 +#define GL_COVERAGE_AUTOMATIC_NV 0x8ED7 +#define GL_COVERAGE_BUFFER_BIT_NV 0x00008000 +typedef void (GL_APIENTRYP PFNGLCOVERAGEMASKNVPROC) (GLboolean mask); +typedef void (GL_APIENTRYP PFNGLCOVERAGEOPERATIONNVPROC) (GLenum operation); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glCoverageMaskNV (GLboolean mask); +GL_APICALL void GL_APIENTRY glCoverageOperationNV (GLenum operation); +#endif +#endif /* GL_NV_coverage_sample */ + +#ifndef GL_NV_depth_nonlinear +#define GL_NV_depth_nonlinear 1 +#define GL_DEPTH_COMPONENT16_NONLINEAR_NV 0x8E2C +#endif /* GL_NV_depth_nonlinear */ + +#ifndef GL_NV_draw_buffers +#define GL_NV_draw_buffers 1 +#define GL_MAX_DRAW_BUFFERS_NV 0x8824 +#define GL_DRAW_BUFFER0_NV 0x8825 +#define GL_DRAW_BUFFER1_NV 0x8826 +#define GL_DRAW_BUFFER2_NV 0x8827 +#define GL_DRAW_BUFFER3_NV 0x8828 +#define GL_DRAW_BUFFER4_NV 0x8829 +#define GL_DRAW_BUFFER5_NV 0x882A +#define GL_DRAW_BUFFER6_NV 0x882B +#define GL_DRAW_BUFFER7_NV 0x882C +#define GL_DRAW_BUFFER8_NV 0x882D +#define GL_DRAW_BUFFER9_NV 0x882E +#define GL_DRAW_BUFFER10_NV 0x882F +#define GL_DRAW_BUFFER11_NV 0x8830 +#define GL_DRAW_BUFFER12_NV 0x8831 +#define GL_DRAW_BUFFER13_NV 0x8832 +#define GL_DRAW_BUFFER14_NV 0x8833 +#define GL_DRAW_BUFFER15_NV 0x8834 +#define GL_COLOR_ATTACHMENT0_NV 0x8CE0 +#define GL_COLOR_ATTACHMENT1_NV 0x8CE1 +#define GL_COLOR_ATTACHMENT2_NV 0x8CE2 +#define GL_COLOR_ATTACHMENT3_NV 0x8CE3 +#define GL_COLOR_ATTACHMENT4_NV 0x8CE4 +#define GL_COLOR_ATTACHMENT5_NV 0x8CE5 +#define GL_COLOR_ATTACHMENT6_NV 0x8CE6 +#define GL_COLOR_ATTACHMENT7_NV 0x8CE7 +#define GL_COLOR_ATTACHMENT8_NV 0x8CE8 +#define GL_COLOR_ATTACHMENT9_NV 0x8CE9 +#define GL_COLOR_ATTACHMENT10_NV 0x8CEA +#define GL_COLOR_ATTACHMENT11_NV 0x8CEB +#define GL_COLOR_ATTACHMENT12_NV 0x8CEC +#define GL_COLOR_ATTACHMENT13_NV 0x8CED +#define GL_COLOR_ATTACHMENT14_NV 0x8CEE +#define GL_COLOR_ATTACHMENT15_NV 0x8CEF +typedef void (GL_APIENTRYP PFNGLDRAWBUFFERSNVPROC) (GLsizei n, const GLenum *bufs); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glDrawBuffersNV (GLsizei n, const GLenum *bufs); +#endif +#endif /* GL_NV_draw_buffers */ + +#ifndef GL_NV_draw_instanced +#define GL_NV_draw_instanced 1 +typedef void (GL_APIENTRYP PFNGLDRAWARRAYSINSTANCEDNVPROC) (GLenum mode, GLint first, GLsizei count, GLsizei primcount); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSINSTANCEDNVPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei primcount); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glDrawArraysInstancedNV (GLenum mode, GLint first, GLsizei count, GLsizei primcount); +GL_APICALL void GL_APIENTRY glDrawElementsInstancedNV (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei primcount); +#endif +#endif /* GL_NV_draw_instanced */ + +#ifndef GL_NV_draw_vulkan_image +#define GL_NV_draw_vulkan_image 1 +typedef void (GL_APIENTRY *GLVULKANPROCNV)(void); +typedef void (GL_APIENTRYP PFNGLDRAWVKIMAGENVPROC) (GLuint64 vkImage, GLuint sampler, GLfloat x0, GLfloat y0, GLfloat x1, GLfloat y1, GLfloat z, GLfloat s0, GLfloat t0, GLfloat s1, GLfloat t1); +typedef GLVULKANPROCNV (GL_APIENTRYP PFNGLGETVKPROCADDRNVPROC) (const GLchar *name); +typedef void (GL_APIENTRYP PFNGLWAITVKSEMAPHORENVPROC) (GLuint64 vkSemaphore); +typedef void (GL_APIENTRYP PFNGLSIGNALVKSEMAPHORENVPROC) (GLuint64 vkSemaphore); +typedef void (GL_APIENTRYP PFNGLSIGNALVKFENCENVPROC) (GLuint64 vkFence); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glDrawVkImageNV (GLuint64 vkImage, GLuint sampler, GLfloat x0, GLfloat y0, GLfloat x1, GLfloat y1, GLfloat z, GLfloat s0, GLfloat t0, GLfloat s1, GLfloat t1); +GL_APICALL GLVULKANPROCNV GL_APIENTRY glGetVkProcAddrNV (const GLchar *name); +GL_APICALL void GL_APIENTRY glWaitVkSemaphoreNV (GLuint64 vkSemaphore); +GL_APICALL void GL_APIENTRY glSignalVkSemaphoreNV (GLuint64 vkSemaphore); +GL_APICALL void GL_APIENTRY glSignalVkFenceNV (GLuint64 vkFence); +#endif +#endif /* GL_NV_draw_vulkan_image */ + +#ifndef GL_NV_explicit_attrib_location +#define GL_NV_explicit_attrib_location 1 +#endif /* GL_NV_explicit_attrib_location */ + +#ifndef GL_NV_fbo_color_attachments +#define GL_NV_fbo_color_attachments 1 +#define GL_MAX_COLOR_ATTACHMENTS_NV 0x8CDF +#endif /* GL_NV_fbo_color_attachments */ + +#ifndef GL_NV_fence +#define GL_NV_fence 1 +#define GL_ALL_COMPLETED_NV 0x84F2 +#define GL_FENCE_STATUS_NV 0x84F3 +#define GL_FENCE_CONDITION_NV 0x84F4 +typedef void (GL_APIENTRYP PFNGLDELETEFENCESNVPROC) (GLsizei n, const GLuint *fences); +typedef void (GL_APIENTRYP PFNGLGENFENCESNVPROC) (GLsizei n, GLuint *fences); +typedef GLboolean (GL_APIENTRYP PFNGLISFENCENVPROC) (GLuint fence); +typedef GLboolean (GL_APIENTRYP PFNGLTESTFENCENVPROC) (GLuint fence); +typedef void (GL_APIENTRYP PFNGLGETFENCEIVNVPROC) (GLuint fence, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLFINISHFENCENVPROC) (GLuint fence); +typedef void (GL_APIENTRYP PFNGLSETFENCENVPROC) (GLuint fence, GLenum condition); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glDeleteFencesNV (GLsizei n, const GLuint *fences); +GL_APICALL void GL_APIENTRY glGenFencesNV (GLsizei n, GLuint *fences); +GL_APICALL GLboolean GL_APIENTRY glIsFenceNV (GLuint fence); +GL_APICALL GLboolean GL_APIENTRY glTestFenceNV (GLuint fence); +GL_APICALL void GL_APIENTRY glGetFenceivNV (GLuint fence, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glFinishFenceNV (GLuint fence); +GL_APICALL void GL_APIENTRY glSetFenceNV (GLuint fence, GLenum condition); +#endif +#endif /* GL_NV_fence */ + +#ifndef GL_NV_fill_rectangle +#define GL_NV_fill_rectangle 1 +#define GL_FILL_RECTANGLE_NV 0x933C +#endif /* GL_NV_fill_rectangle */ + +#ifndef GL_NV_fragment_coverage_to_color +#define GL_NV_fragment_coverage_to_color 1 +#define GL_FRAGMENT_COVERAGE_TO_COLOR_NV 0x92DD +#define GL_FRAGMENT_COVERAGE_COLOR_NV 0x92DE +typedef void (GL_APIENTRYP PFNGLFRAGMENTCOVERAGECOLORNVPROC) (GLuint color); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glFragmentCoverageColorNV (GLuint color); +#endif +#endif /* GL_NV_fragment_coverage_to_color */ + +#ifndef GL_NV_fragment_shader_barycentric +#define GL_NV_fragment_shader_barycentric 1 +#endif /* GL_NV_fragment_shader_barycentric */ + +#ifndef GL_NV_fragment_shader_interlock +#define GL_NV_fragment_shader_interlock 1 +#endif /* GL_NV_fragment_shader_interlock */ + +#ifndef GL_NV_framebuffer_blit +#define GL_NV_framebuffer_blit 1 +#define GL_READ_FRAMEBUFFER_NV 0x8CA8 +#define GL_DRAW_FRAMEBUFFER_NV 0x8CA9 +#define GL_DRAW_FRAMEBUFFER_BINDING_NV 0x8CA6 +#define GL_READ_FRAMEBUFFER_BINDING_NV 0x8CAA +typedef void (GL_APIENTRYP PFNGLBLITFRAMEBUFFERNVPROC) (GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glBlitFramebufferNV (GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +#endif +#endif /* GL_NV_framebuffer_blit */ + +#ifndef GL_NV_framebuffer_mixed_samples +#define GL_NV_framebuffer_mixed_samples 1 +#define GL_COVERAGE_MODULATION_TABLE_NV 0x9331 +#define GL_COLOR_SAMPLES_NV 0x8E20 +#define GL_DEPTH_SAMPLES_NV 0x932D +#define GL_STENCIL_SAMPLES_NV 0x932E +#define GL_MIXED_DEPTH_SAMPLES_SUPPORTED_NV 0x932F +#define GL_MIXED_STENCIL_SAMPLES_SUPPORTED_NV 0x9330 +#define GL_COVERAGE_MODULATION_NV 0x9332 +#define GL_COVERAGE_MODULATION_TABLE_SIZE_NV 0x9333 +typedef void (GL_APIENTRYP PFNGLCOVERAGEMODULATIONTABLENVPROC) (GLsizei n, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLGETCOVERAGEMODULATIONTABLENVPROC) (GLsizei bufSize, GLfloat *v); +typedef void (GL_APIENTRYP PFNGLCOVERAGEMODULATIONNVPROC) (GLenum components); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glCoverageModulationTableNV (GLsizei n, const GLfloat *v); +GL_APICALL void GL_APIENTRY glGetCoverageModulationTableNV (GLsizei bufSize, GLfloat *v); +GL_APICALL void GL_APIENTRY glCoverageModulationNV (GLenum components); +#endif +#endif /* GL_NV_framebuffer_mixed_samples */ + +#ifndef GL_NV_framebuffer_multisample +#define GL_NV_framebuffer_multisample 1 +#define GL_RENDERBUFFER_SAMPLES_NV 0x8CAB +#define GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE_NV 0x8D56 +#define GL_MAX_SAMPLES_NV 0x8D57 +typedef void (GL_APIENTRYP PFNGLRENDERBUFFERSTORAGEMULTISAMPLENVPROC) (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glRenderbufferStorageMultisampleNV (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +#endif +#endif /* GL_NV_framebuffer_multisample */ + +#ifndef GL_NV_generate_mipmap_sRGB +#define GL_NV_generate_mipmap_sRGB 1 +#endif /* GL_NV_generate_mipmap_sRGB */ + +#ifndef GL_NV_geometry_shader_passthrough +#define GL_NV_geometry_shader_passthrough 1 +#endif /* GL_NV_geometry_shader_passthrough */ + +#ifndef GL_NV_gpu_shader5 +#define GL_NV_gpu_shader5 1 +typedef khronos_int64_t GLint64EXT; +typedef khronos_uint64_t GLuint64EXT; +#define GL_INT64_NV 0x140E +#define GL_UNSIGNED_INT64_NV 0x140F +#define GL_INT8_NV 0x8FE0 +#define GL_INT8_VEC2_NV 0x8FE1 +#define GL_INT8_VEC3_NV 0x8FE2 +#define GL_INT8_VEC4_NV 0x8FE3 +#define GL_INT16_NV 0x8FE4 +#define GL_INT16_VEC2_NV 0x8FE5 +#define GL_INT16_VEC3_NV 0x8FE6 +#define GL_INT16_VEC4_NV 0x8FE7 +#define GL_INT64_VEC2_NV 0x8FE9 +#define GL_INT64_VEC3_NV 0x8FEA +#define GL_INT64_VEC4_NV 0x8FEB +#define GL_UNSIGNED_INT8_NV 0x8FEC +#define GL_UNSIGNED_INT8_VEC2_NV 0x8FED +#define GL_UNSIGNED_INT8_VEC3_NV 0x8FEE +#define GL_UNSIGNED_INT8_VEC4_NV 0x8FEF +#define GL_UNSIGNED_INT16_NV 0x8FF0 +#define GL_UNSIGNED_INT16_VEC2_NV 0x8FF1 +#define GL_UNSIGNED_INT16_VEC3_NV 0x8FF2 +#define GL_UNSIGNED_INT16_VEC4_NV 0x8FF3 +#define GL_UNSIGNED_INT64_VEC2_NV 0x8FF5 +#define GL_UNSIGNED_INT64_VEC3_NV 0x8FF6 +#define GL_UNSIGNED_INT64_VEC4_NV 0x8FF7 +#define GL_FLOAT16_NV 0x8FF8 +#define GL_FLOAT16_VEC2_NV 0x8FF9 +#define GL_FLOAT16_VEC3_NV 0x8FFA +#define GL_FLOAT16_VEC4_NV 0x8FFB +#define GL_PATCHES 0x000E +typedef void (GL_APIENTRYP PFNGLUNIFORM1I64NVPROC) (GLint location, GLint64EXT x); +typedef void (GL_APIENTRYP PFNGLUNIFORM2I64NVPROC) (GLint location, GLint64EXT x, GLint64EXT y); +typedef void (GL_APIENTRYP PFNGLUNIFORM3I64NVPROC) (GLint location, GLint64EXT x, GLint64EXT y, GLint64EXT z); +typedef void (GL_APIENTRYP PFNGLUNIFORM4I64NVPROC) (GLint location, GLint64EXT x, GLint64EXT y, GLint64EXT z, GLint64EXT w); +typedef void (GL_APIENTRYP PFNGLUNIFORM1I64VNVPROC) (GLint location, GLsizei count, const GLint64EXT *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM2I64VNVPROC) (GLint location, GLsizei count, const GLint64EXT *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM3I64VNVPROC) (GLint location, GLsizei count, const GLint64EXT *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM4I64VNVPROC) (GLint location, GLsizei count, const GLint64EXT *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM1UI64NVPROC) (GLint location, GLuint64EXT x); +typedef void (GL_APIENTRYP PFNGLUNIFORM2UI64NVPROC) (GLint location, GLuint64EXT x, GLuint64EXT y); +typedef void (GL_APIENTRYP PFNGLUNIFORM3UI64NVPROC) (GLint location, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z); +typedef void (GL_APIENTRYP PFNGLUNIFORM4UI64NVPROC) (GLint location, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z, GLuint64EXT w); +typedef void (GL_APIENTRYP PFNGLUNIFORM1UI64VNVPROC) (GLint location, GLsizei count, const GLuint64EXT *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM2UI64VNVPROC) (GLint location, GLsizei count, const GLuint64EXT *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM3UI64VNVPROC) (GLint location, GLsizei count, const GLuint64EXT *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM4UI64VNVPROC) (GLint location, GLsizei count, const GLuint64EXT *value); +typedef void (GL_APIENTRYP PFNGLGETUNIFORMI64VNVPROC) (GLuint program, GLint location, GLint64EXT *params); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1I64NVPROC) (GLuint program, GLint location, GLint64EXT x); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2I64NVPROC) (GLuint program, GLint location, GLint64EXT x, GLint64EXT y); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3I64NVPROC) (GLuint program, GLint location, GLint64EXT x, GLint64EXT y, GLint64EXT z); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4I64NVPROC) (GLuint program, GLint location, GLint64EXT x, GLint64EXT y, GLint64EXT z, GLint64EXT w); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1I64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLint64EXT *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2I64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLint64EXT *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3I64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLint64EXT *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4I64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLint64EXT *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1UI64NVPROC) (GLuint program, GLint location, GLuint64EXT x); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2UI64NVPROC) (GLuint program, GLint location, GLuint64EXT x, GLuint64EXT y); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3UI64NVPROC) (GLuint program, GLint location, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4UI64NVPROC) (GLuint program, GLint location, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z, GLuint64EXT w); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1UI64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLuint64EXT *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2UI64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLuint64EXT *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3UI64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLuint64EXT *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4UI64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLuint64EXT *value); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glUniform1i64NV (GLint location, GLint64EXT x); +GL_APICALL void GL_APIENTRY glUniform2i64NV (GLint location, GLint64EXT x, GLint64EXT y); +GL_APICALL void GL_APIENTRY glUniform3i64NV (GLint location, GLint64EXT x, GLint64EXT y, GLint64EXT z); +GL_APICALL void GL_APIENTRY glUniform4i64NV (GLint location, GLint64EXT x, GLint64EXT y, GLint64EXT z, GLint64EXT w); +GL_APICALL void GL_APIENTRY glUniform1i64vNV (GLint location, GLsizei count, const GLint64EXT *value); +GL_APICALL void GL_APIENTRY glUniform2i64vNV (GLint location, GLsizei count, const GLint64EXT *value); +GL_APICALL void GL_APIENTRY glUniform3i64vNV (GLint location, GLsizei count, const GLint64EXT *value); +GL_APICALL void GL_APIENTRY glUniform4i64vNV (GLint location, GLsizei count, const GLint64EXT *value); +GL_APICALL void GL_APIENTRY glUniform1ui64NV (GLint location, GLuint64EXT x); +GL_APICALL void GL_APIENTRY glUniform2ui64NV (GLint location, GLuint64EXT x, GLuint64EXT y); +GL_APICALL void GL_APIENTRY glUniform3ui64NV (GLint location, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z); +GL_APICALL void GL_APIENTRY glUniform4ui64NV (GLint location, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z, GLuint64EXT w); +GL_APICALL void GL_APIENTRY glUniform1ui64vNV (GLint location, GLsizei count, const GLuint64EXT *value); +GL_APICALL void GL_APIENTRY glUniform2ui64vNV (GLint location, GLsizei count, const GLuint64EXT *value); +GL_APICALL void GL_APIENTRY glUniform3ui64vNV (GLint location, GLsizei count, const GLuint64EXT *value); +GL_APICALL void GL_APIENTRY glUniform4ui64vNV (GLint location, GLsizei count, const GLuint64EXT *value); +GL_APICALL void GL_APIENTRY glGetUniformi64vNV (GLuint program, GLint location, GLint64EXT *params); +GL_APICALL void GL_APIENTRY glProgramUniform1i64NV (GLuint program, GLint location, GLint64EXT x); +GL_APICALL void GL_APIENTRY glProgramUniform2i64NV (GLuint program, GLint location, GLint64EXT x, GLint64EXT y); +GL_APICALL void GL_APIENTRY glProgramUniform3i64NV (GLuint program, GLint location, GLint64EXT x, GLint64EXT y, GLint64EXT z); +GL_APICALL void GL_APIENTRY glProgramUniform4i64NV (GLuint program, GLint location, GLint64EXT x, GLint64EXT y, GLint64EXT z, GLint64EXT w); +GL_APICALL void GL_APIENTRY glProgramUniform1i64vNV (GLuint program, GLint location, GLsizei count, const GLint64EXT *value); +GL_APICALL void GL_APIENTRY glProgramUniform2i64vNV (GLuint program, GLint location, GLsizei count, const GLint64EXT *value); +GL_APICALL void GL_APIENTRY glProgramUniform3i64vNV (GLuint program, GLint location, GLsizei count, const GLint64EXT *value); +GL_APICALL void GL_APIENTRY glProgramUniform4i64vNV (GLuint program, GLint location, GLsizei count, const GLint64EXT *value); +GL_APICALL void GL_APIENTRY glProgramUniform1ui64NV (GLuint program, GLint location, GLuint64EXT x); +GL_APICALL void GL_APIENTRY glProgramUniform2ui64NV (GLuint program, GLint location, GLuint64EXT x, GLuint64EXT y); +GL_APICALL void GL_APIENTRY glProgramUniform3ui64NV (GLuint program, GLint location, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z); +GL_APICALL void GL_APIENTRY glProgramUniform4ui64NV (GLuint program, GLint location, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z, GLuint64EXT w); +GL_APICALL void GL_APIENTRY glProgramUniform1ui64vNV (GLuint program, GLint location, GLsizei count, const GLuint64EXT *value); +GL_APICALL void GL_APIENTRY glProgramUniform2ui64vNV (GLuint program, GLint location, GLsizei count, const GLuint64EXT *value); +GL_APICALL void GL_APIENTRY glProgramUniform3ui64vNV (GLuint program, GLint location, GLsizei count, const GLuint64EXT *value); +GL_APICALL void GL_APIENTRY glProgramUniform4ui64vNV (GLuint program, GLint location, GLsizei count, const GLuint64EXT *value); +#endif +#endif /* GL_NV_gpu_shader5 */ + +#ifndef GL_NV_image_formats +#define GL_NV_image_formats 1 +#endif /* GL_NV_image_formats */ + +#ifndef GL_NV_instanced_arrays +#define GL_NV_instanced_arrays 1 +#define GL_VERTEX_ATTRIB_ARRAY_DIVISOR_NV 0x88FE +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBDIVISORNVPROC) (GLuint index, GLuint divisor); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glVertexAttribDivisorNV (GLuint index, GLuint divisor); +#endif +#endif /* GL_NV_instanced_arrays */ + +#ifndef GL_NV_internalformat_sample_query +#define GL_NV_internalformat_sample_query 1 +#define GL_TEXTURE_2D_MULTISAMPLE 0x9100 +#define GL_TEXTURE_2D_MULTISAMPLE_ARRAY 0x9102 +#define GL_MULTISAMPLES_NV 0x9371 +#define GL_SUPERSAMPLE_SCALE_X_NV 0x9372 +#define GL_SUPERSAMPLE_SCALE_Y_NV 0x9373 +#define GL_CONFORMANT_NV 0x9374 +typedef void (GL_APIENTRYP PFNGLGETINTERNALFORMATSAMPLEIVNVPROC) (GLenum target, GLenum internalformat, GLsizei samples, GLenum pname, GLsizei count, GLint *params); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glGetInternalformatSampleivNV (GLenum target, GLenum internalformat, GLsizei samples, GLenum pname, GLsizei count, GLint *params); +#endif +#endif /* GL_NV_internalformat_sample_query */ + +#ifndef GL_NV_memory_attachment +#define GL_NV_memory_attachment 1 +#define GL_ATTACHED_MEMORY_OBJECT_NV 0x95A4 +#define GL_ATTACHED_MEMORY_OFFSET_NV 0x95A5 +#define GL_MEMORY_ATTACHABLE_ALIGNMENT_NV 0x95A6 +#define GL_MEMORY_ATTACHABLE_SIZE_NV 0x95A7 +#define GL_MEMORY_ATTACHABLE_NV 0x95A8 +#define GL_DETACHED_MEMORY_INCARNATION_NV 0x95A9 +#define GL_DETACHED_TEXTURES_NV 0x95AA +#define GL_DETACHED_BUFFERS_NV 0x95AB +#define GL_MAX_DETACHED_TEXTURES_NV 0x95AC +#define GL_MAX_DETACHED_BUFFERS_NV 0x95AD +typedef void (GL_APIENTRYP PFNGLGETMEMORYOBJECTDETACHEDRESOURCESUIVNVPROC) (GLuint memory, GLenum pname, GLint first, GLsizei count, GLuint *params); +typedef void (GL_APIENTRYP PFNGLRESETMEMORYOBJECTPARAMETERNVPROC) (GLuint memory, GLenum pname); +typedef void (GL_APIENTRYP PFNGLTEXATTACHMEMORYNVPROC) (GLenum target, GLuint memory, GLuint64 offset); +typedef void (GL_APIENTRYP PFNGLBUFFERATTACHMEMORYNVPROC) (GLenum target, GLuint memory, GLuint64 offset); +typedef void (GL_APIENTRYP PFNGLTEXTUREATTACHMEMORYNVPROC) (GLuint texture, GLuint memory, GLuint64 offset); +typedef void (GL_APIENTRYP PFNGLNAMEDBUFFERATTACHMEMORYNVPROC) (GLuint buffer, GLuint memory, GLuint64 offset); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glGetMemoryObjectDetachedResourcesuivNV (GLuint memory, GLenum pname, GLint first, GLsizei count, GLuint *params); +GL_APICALL void GL_APIENTRY glResetMemoryObjectParameterNV (GLuint memory, GLenum pname); +GL_APICALL void GL_APIENTRY glTexAttachMemoryNV (GLenum target, GLuint memory, GLuint64 offset); +GL_APICALL void GL_APIENTRY glBufferAttachMemoryNV (GLenum target, GLuint memory, GLuint64 offset); +GL_APICALL void GL_APIENTRY glTextureAttachMemoryNV (GLuint texture, GLuint memory, GLuint64 offset); +GL_APICALL void GL_APIENTRY glNamedBufferAttachMemoryNV (GLuint buffer, GLuint memory, GLuint64 offset); +#endif +#endif /* GL_NV_memory_attachment */ + +#ifndef GL_NV_memory_object_sparse +#define GL_NV_memory_object_sparse 1 +typedef void (GL_APIENTRYP PFNGLBUFFERPAGECOMMITMENTMEMNVPROC) (GLenum target, GLintptr offset, GLsizeiptr size, GLuint memory, GLuint64 memOffset, GLboolean commit); +typedef void (GL_APIENTRYP PFNGLTEXPAGECOMMITMENTMEMNVPROC) (GLenum target, GLint layer, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLuint memory, GLuint64 offset, GLboolean commit); +typedef void (GL_APIENTRYP PFNGLNAMEDBUFFERPAGECOMMITMENTMEMNVPROC) (GLuint buffer, GLintptr offset, GLsizeiptr size, GLuint memory, GLuint64 memOffset, GLboolean commit); +typedef void (GL_APIENTRYP PFNGLTEXTUREPAGECOMMITMENTMEMNVPROC) (GLuint texture, GLint layer, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLuint memory, GLuint64 offset, GLboolean commit); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glBufferPageCommitmentMemNV (GLenum target, GLintptr offset, GLsizeiptr size, GLuint memory, GLuint64 memOffset, GLboolean commit); +GL_APICALL void GL_APIENTRY glTexPageCommitmentMemNV (GLenum target, GLint layer, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLuint memory, GLuint64 offset, GLboolean commit); +GL_APICALL void GL_APIENTRY glNamedBufferPageCommitmentMemNV (GLuint buffer, GLintptr offset, GLsizeiptr size, GLuint memory, GLuint64 memOffset, GLboolean commit); +GL_APICALL void GL_APIENTRY glTexturePageCommitmentMemNV (GLuint texture, GLint layer, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLuint memory, GLuint64 offset, GLboolean commit); +#endif +#endif /* GL_NV_memory_object_sparse */ + +#ifndef GL_NV_mesh_shader +#define GL_NV_mesh_shader 1 +#define GL_MESH_SHADER_NV 0x9559 +#define GL_TASK_SHADER_NV 0x955A +#define GL_MAX_MESH_UNIFORM_BLOCKS_NV 0x8E60 +#define GL_MAX_MESH_TEXTURE_IMAGE_UNITS_NV 0x8E61 +#define GL_MAX_MESH_IMAGE_UNIFORMS_NV 0x8E62 +#define GL_MAX_MESH_UNIFORM_COMPONENTS_NV 0x8E63 +#define GL_MAX_MESH_ATOMIC_COUNTER_BUFFERS_NV 0x8E64 +#define GL_MAX_MESH_ATOMIC_COUNTERS_NV 0x8E65 +#define GL_MAX_MESH_SHADER_STORAGE_BLOCKS_NV 0x8E66 +#define GL_MAX_COMBINED_MESH_UNIFORM_COMPONENTS_NV 0x8E67 +#define GL_MAX_TASK_UNIFORM_BLOCKS_NV 0x8E68 +#define GL_MAX_TASK_TEXTURE_IMAGE_UNITS_NV 0x8E69 +#define GL_MAX_TASK_IMAGE_UNIFORMS_NV 0x8E6A +#define GL_MAX_TASK_UNIFORM_COMPONENTS_NV 0x8E6B +#define GL_MAX_TASK_ATOMIC_COUNTER_BUFFERS_NV 0x8E6C +#define GL_MAX_TASK_ATOMIC_COUNTERS_NV 0x8E6D +#define GL_MAX_TASK_SHADER_STORAGE_BLOCKS_NV 0x8E6E +#define GL_MAX_COMBINED_TASK_UNIFORM_COMPONENTS_NV 0x8E6F +#define GL_MAX_MESH_WORK_GROUP_INVOCATIONS_NV 0x95A2 +#define GL_MAX_TASK_WORK_GROUP_INVOCATIONS_NV 0x95A3 +#define GL_MAX_MESH_TOTAL_MEMORY_SIZE_NV 0x9536 +#define GL_MAX_TASK_TOTAL_MEMORY_SIZE_NV 0x9537 +#define GL_MAX_MESH_OUTPUT_VERTICES_NV 0x9538 +#define GL_MAX_MESH_OUTPUT_PRIMITIVES_NV 0x9539 +#define GL_MAX_TASK_OUTPUT_COUNT_NV 0x953A +#define GL_MAX_DRAW_MESH_TASKS_COUNT_NV 0x953D +#define GL_MAX_MESH_VIEWS_NV 0x9557 +#define GL_MESH_OUTPUT_PER_VERTEX_GRANULARITY_NV 0x92DF +#define GL_MESH_OUTPUT_PER_PRIMITIVE_GRANULARITY_NV 0x9543 +#define GL_MAX_MESH_WORK_GROUP_SIZE_NV 0x953B +#define GL_MAX_TASK_WORK_GROUP_SIZE_NV 0x953C +#define GL_MESH_WORK_GROUP_SIZE_NV 0x953E +#define GL_TASK_WORK_GROUP_SIZE_NV 0x953F +#define GL_MESH_VERTICES_OUT_NV 0x9579 +#define GL_MESH_PRIMITIVES_OUT_NV 0x957A +#define GL_MESH_OUTPUT_TYPE_NV 0x957B +#define GL_UNIFORM_BLOCK_REFERENCED_BY_MESH_SHADER_NV 0x959C +#define GL_UNIFORM_BLOCK_REFERENCED_BY_TASK_SHADER_NV 0x959D +#define GL_REFERENCED_BY_MESH_SHADER_NV 0x95A0 +#define GL_REFERENCED_BY_TASK_SHADER_NV 0x95A1 +#define GL_MESH_SHADER_BIT_NV 0x00000040 +#define GL_TASK_SHADER_BIT_NV 0x00000080 +#define GL_MESH_SUBROUTINE_NV 0x957C +#define GL_TASK_SUBROUTINE_NV 0x957D +#define GL_MESH_SUBROUTINE_UNIFORM_NV 0x957E +#define GL_TASK_SUBROUTINE_UNIFORM_NV 0x957F +#define GL_ATOMIC_COUNTER_BUFFER_REFERENCED_BY_MESH_SHADER_NV 0x959E +#define GL_ATOMIC_COUNTER_BUFFER_REFERENCED_BY_TASK_SHADER_NV 0x959F +typedef void (GL_APIENTRYP PFNGLDRAWMESHTASKSNVPROC) (GLuint first, GLuint count); +typedef void (GL_APIENTRYP PFNGLDRAWMESHTASKSINDIRECTNVPROC) (GLintptr indirect); +typedef void (GL_APIENTRYP PFNGLMULTIDRAWMESHTASKSINDIRECTNVPROC) (GLintptr indirect, GLsizei drawcount, GLsizei stride); +typedef void (GL_APIENTRYP PFNGLMULTIDRAWMESHTASKSINDIRECTCOUNTNVPROC) (GLintptr indirect, GLintptr drawcount, GLsizei maxdrawcount, GLsizei stride); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glDrawMeshTasksNV (GLuint first, GLuint count); +GL_APICALL void GL_APIENTRY glDrawMeshTasksIndirectNV (GLintptr indirect); +GL_APICALL void GL_APIENTRY glMultiDrawMeshTasksIndirectNV (GLintptr indirect, GLsizei drawcount, GLsizei stride); +GL_APICALL void GL_APIENTRY glMultiDrawMeshTasksIndirectCountNV (GLintptr indirect, GLintptr drawcount, GLsizei maxdrawcount, GLsizei stride); +#endif +#endif /* GL_NV_mesh_shader */ + +#ifndef GL_NV_non_square_matrices +#define GL_NV_non_square_matrices 1 +#define GL_FLOAT_MAT2x3_NV 0x8B65 +#define GL_FLOAT_MAT2x4_NV 0x8B66 +#define GL_FLOAT_MAT3x2_NV 0x8B67 +#define GL_FLOAT_MAT3x4_NV 0x8B68 +#define GL_FLOAT_MAT4x2_NV 0x8B69 +#define GL_FLOAT_MAT4x3_NV 0x8B6A +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX2X3FVNVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX3X2FVNVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX2X4FVNVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX4X2FVNVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX3X4FVNVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX4X3FVNVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glUniformMatrix2x3fvNV (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix3x2fvNV (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix2x4fvNV (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix4x2fvNV (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix3x4fvNV (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix4x3fvNV (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +#endif +#endif /* GL_NV_non_square_matrices */ + +#ifndef GL_NV_path_rendering +#define GL_NV_path_rendering 1 +typedef double GLdouble; +#define GL_PATH_FORMAT_SVG_NV 0x9070 +#define GL_PATH_FORMAT_PS_NV 0x9071 +#define GL_STANDARD_FONT_NAME_NV 0x9072 +#define GL_SYSTEM_FONT_NAME_NV 0x9073 +#define GL_FILE_NAME_NV 0x9074 +#define GL_PATH_STROKE_WIDTH_NV 0x9075 +#define GL_PATH_END_CAPS_NV 0x9076 +#define GL_PATH_INITIAL_END_CAP_NV 0x9077 +#define GL_PATH_TERMINAL_END_CAP_NV 0x9078 +#define GL_PATH_JOIN_STYLE_NV 0x9079 +#define GL_PATH_MITER_LIMIT_NV 0x907A +#define GL_PATH_DASH_CAPS_NV 0x907B +#define GL_PATH_INITIAL_DASH_CAP_NV 0x907C +#define GL_PATH_TERMINAL_DASH_CAP_NV 0x907D +#define GL_PATH_DASH_OFFSET_NV 0x907E +#define GL_PATH_CLIENT_LENGTH_NV 0x907F +#define GL_PATH_FILL_MODE_NV 0x9080 +#define GL_PATH_FILL_MASK_NV 0x9081 +#define GL_PATH_FILL_COVER_MODE_NV 0x9082 +#define GL_PATH_STROKE_COVER_MODE_NV 0x9083 +#define GL_PATH_STROKE_MASK_NV 0x9084 +#define GL_COUNT_UP_NV 0x9088 +#define GL_COUNT_DOWN_NV 0x9089 +#define GL_PATH_OBJECT_BOUNDING_BOX_NV 0x908A +#define GL_CONVEX_HULL_NV 0x908B +#define GL_BOUNDING_BOX_NV 0x908D +#define GL_TRANSLATE_X_NV 0x908E +#define GL_TRANSLATE_Y_NV 0x908F +#define GL_TRANSLATE_2D_NV 0x9090 +#define GL_TRANSLATE_3D_NV 0x9091 +#define GL_AFFINE_2D_NV 0x9092 +#define GL_AFFINE_3D_NV 0x9094 +#define GL_TRANSPOSE_AFFINE_2D_NV 0x9096 +#define GL_TRANSPOSE_AFFINE_3D_NV 0x9098 +#define GL_UTF8_NV 0x909A +#define GL_UTF16_NV 0x909B +#define GL_BOUNDING_BOX_OF_BOUNDING_BOXES_NV 0x909C +#define GL_PATH_COMMAND_COUNT_NV 0x909D +#define GL_PATH_COORD_COUNT_NV 0x909E +#define GL_PATH_DASH_ARRAY_COUNT_NV 0x909F +#define GL_PATH_COMPUTED_LENGTH_NV 0x90A0 +#define GL_PATH_FILL_BOUNDING_BOX_NV 0x90A1 +#define GL_PATH_STROKE_BOUNDING_BOX_NV 0x90A2 +#define GL_SQUARE_NV 0x90A3 +#define GL_ROUND_NV 0x90A4 +#define GL_TRIANGULAR_NV 0x90A5 +#define GL_BEVEL_NV 0x90A6 +#define GL_MITER_REVERT_NV 0x90A7 +#define GL_MITER_TRUNCATE_NV 0x90A8 +#define GL_SKIP_MISSING_GLYPH_NV 0x90A9 +#define GL_USE_MISSING_GLYPH_NV 0x90AA +#define GL_PATH_ERROR_POSITION_NV 0x90AB +#define GL_ACCUM_ADJACENT_PAIRS_NV 0x90AD +#define GL_ADJACENT_PAIRS_NV 0x90AE +#define GL_FIRST_TO_REST_NV 0x90AF +#define GL_PATH_GEN_MODE_NV 0x90B0 +#define GL_PATH_GEN_COEFF_NV 0x90B1 +#define GL_PATH_GEN_COMPONENTS_NV 0x90B3 +#define GL_PATH_STENCIL_FUNC_NV 0x90B7 +#define GL_PATH_STENCIL_REF_NV 0x90B8 +#define GL_PATH_STENCIL_VALUE_MASK_NV 0x90B9 +#define GL_PATH_STENCIL_DEPTH_OFFSET_FACTOR_NV 0x90BD +#define GL_PATH_STENCIL_DEPTH_OFFSET_UNITS_NV 0x90BE +#define GL_PATH_COVER_DEPTH_FUNC_NV 0x90BF +#define GL_PATH_DASH_OFFSET_RESET_NV 0x90B4 +#define GL_MOVE_TO_RESETS_NV 0x90B5 +#define GL_MOVE_TO_CONTINUES_NV 0x90B6 +#define GL_CLOSE_PATH_NV 0x00 +#define GL_MOVE_TO_NV 0x02 +#define GL_RELATIVE_MOVE_TO_NV 0x03 +#define GL_LINE_TO_NV 0x04 +#define GL_RELATIVE_LINE_TO_NV 0x05 +#define GL_HORIZONTAL_LINE_TO_NV 0x06 +#define GL_RELATIVE_HORIZONTAL_LINE_TO_NV 0x07 +#define GL_VERTICAL_LINE_TO_NV 0x08 +#define GL_RELATIVE_VERTICAL_LINE_TO_NV 0x09 +#define GL_QUADRATIC_CURVE_TO_NV 0x0A +#define GL_RELATIVE_QUADRATIC_CURVE_TO_NV 0x0B +#define GL_CUBIC_CURVE_TO_NV 0x0C +#define GL_RELATIVE_CUBIC_CURVE_TO_NV 0x0D +#define GL_SMOOTH_QUADRATIC_CURVE_TO_NV 0x0E +#define GL_RELATIVE_SMOOTH_QUADRATIC_CURVE_TO_NV 0x0F +#define GL_SMOOTH_CUBIC_CURVE_TO_NV 0x10 +#define GL_RELATIVE_SMOOTH_CUBIC_CURVE_TO_NV 0x11 +#define GL_SMALL_CCW_ARC_TO_NV 0x12 +#define GL_RELATIVE_SMALL_CCW_ARC_TO_NV 0x13 +#define GL_SMALL_CW_ARC_TO_NV 0x14 +#define GL_RELATIVE_SMALL_CW_ARC_TO_NV 0x15 +#define GL_LARGE_CCW_ARC_TO_NV 0x16 +#define GL_RELATIVE_LARGE_CCW_ARC_TO_NV 0x17 +#define GL_LARGE_CW_ARC_TO_NV 0x18 +#define GL_RELATIVE_LARGE_CW_ARC_TO_NV 0x19 +#define GL_RESTART_PATH_NV 0xF0 +#define GL_DUP_FIRST_CUBIC_CURVE_TO_NV 0xF2 +#define GL_DUP_LAST_CUBIC_CURVE_TO_NV 0xF4 +#define GL_RECT_NV 0xF6 +#define GL_CIRCULAR_CCW_ARC_TO_NV 0xF8 +#define GL_CIRCULAR_CW_ARC_TO_NV 0xFA +#define GL_CIRCULAR_TANGENT_ARC_TO_NV 0xFC +#define GL_ARC_TO_NV 0xFE +#define GL_RELATIVE_ARC_TO_NV 0xFF +#define GL_BOLD_BIT_NV 0x01 +#define GL_ITALIC_BIT_NV 0x02 +#define GL_GLYPH_WIDTH_BIT_NV 0x01 +#define GL_GLYPH_HEIGHT_BIT_NV 0x02 +#define GL_GLYPH_HORIZONTAL_BEARING_X_BIT_NV 0x04 +#define GL_GLYPH_HORIZONTAL_BEARING_Y_BIT_NV 0x08 +#define GL_GLYPH_HORIZONTAL_BEARING_ADVANCE_BIT_NV 0x10 +#define GL_GLYPH_VERTICAL_BEARING_X_BIT_NV 0x20 +#define GL_GLYPH_VERTICAL_BEARING_Y_BIT_NV 0x40 +#define GL_GLYPH_VERTICAL_BEARING_ADVANCE_BIT_NV 0x80 +#define GL_GLYPH_HAS_KERNING_BIT_NV 0x100 +#define GL_FONT_X_MIN_BOUNDS_BIT_NV 0x00010000 +#define GL_FONT_Y_MIN_BOUNDS_BIT_NV 0x00020000 +#define GL_FONT_X_MAX_BOUNDS_BIT_NV 0x00040000 +#define GL_FONT_Y_MAX_BOUNDS_BIT_NV 0x00080000 +#define GL_FONT_UNITS_PER_EM_BIT_NV 0x00100000 +#define GL_FONT_ASCENDER_BIT_NV 0x00200000 +#define GL_FONT_DESCENDER_BIT_NV 0x00400000 +#define GL_FONT_HEIGHT_BIT_NV 0x00800000 +#define GL_FONT_MAX_ADVANCE_WIDTH_BIT_NV 0x01000000 +#define GL_FONT_MAX_ADVANCE_HEIGHT_BIT_NV 0x02000000 +#define GL_FONT_UNDERLINE_POSITION_BIT_NV 0x04000000 +#define GL_FONT_UNDERLINE_THICKNESS_BIT_NV 0x08000000 +#define GL_FONT_HAS_KERNING_BIT_NV 0x10000000 +#define GL_ROUNDED_RECT_NV 0xE8 +#define GL_RELATIVE_ROUNDED_RECT_NV 0xE9 +#define GL_ROUNDED_RECT2_NV 0xEA +#define GL_RELATIVE_ROUNDED_RECT2_NV 0xEB +#define GL_ROUNDED_RECT4_NV 0xEC +#define GL_RELATIVE_ROUNDED_RECT4_NV 0xED +#define GL_ROUNDED_RECT8_NV 0xEE +#define GL_RELATIVE_ROUNDED_RECT8_NV 0xEF +#define GL_RELATIVE_RECT_NV 0xF7 +#define GL_FONT_GLYPHS_AVAILABLE_NV 0x9368 +#define GL_FONT_TARGET_UNAVAILABLE_NV 0x9369 +#define GL_FONT_UNAVAILABLE_NV 0x936A +#define GL_FONT_UNINTELLIGIBLE_NV 0x936B +#define GL_CONIC_CURVE_TO_NV 0x1A +#define GL_RELATIVE_CONIC_CURVE_TO_NV 0x1B +#define GL_FONT_NUM_GLYPH_INDICES_BIT_NV 0x20000000 +#define GL_STANDARD_FONT_FORMAT_NV 0x936C +#define GL_PATH_PROJECTION_NV 0x1701 +#define GL_PATH_MODELVIEW_NV 0x1700 +#define GL_PATH_MODELVIEW_STACK_DEPTH_NV 0x0BA3 +#define GL_PATH_MODELVIEW_MATRIX_NV 0x0BA6 +#define GL_PATH_MAX_MODELVIEW_STACK_DEPTH_NV 0x0D36 +#define GL_PATH_TRANSPOSE_MODELVIEW_MATRIX_NV 0x84E3 +#define GL_PATH_PROJECTION_STACK_DEPTH_NV 0x0BA4 +#define GL_PATH_PROJECTION_MATRIX_NV 0x0BA7 +#define GL_PATH_MAX_PROJECTION_STACK_DEPTH_NV 0x0D38 +#define GL_PATH_TRANSPOSE_PROJECTION_MATRIX_NV 0x84E4 +#define GL_FRAGMENT_INPUT_NV 0x936D +typedef GLuint (GL_APIENTRYP PFNGLGENPATHSNVPROC) (GLsizei range); +typedef void (GL_APIENTRYP PFNGLDELETEPATHSNVPROC) (GLuint path, GLsizei range); +typedef GLboolean (GL_APIENTRYP PFNGLISPATHNVPROC) (GLuint path); +typedef void (GL_APIENTRYP PFNGLPATHCOMMANDSNVPROC) (GLuint path, GLsizei numCommands, const GLubyte *commands, GLsizei numCoords, GLenum coordType, const void *coords); +typedef void (GL_APIENTRYP PFNGLPATHCOORDSNVPROC) (GLuint path, GLsizei numCoords, GLenum coordType, const void *coords); +typedef void (GL_APIENTRYP PFNGLPATHSUBCOMMANDSNVPROC) (GLuint path, GLsizei commandStart, GLsizei commandsToDelete, GLsizei numCommands, const GLubyte *commands, GLsizei numCoords, GLenum coordType, const void *coords); +typedef void (GL_APIENTRYP PFNGLPATHSUBCOORDSNVPROC) (GLuint path, GLsizei coordStart, GLsizei numCoords, GLenum coordType, const void *coords); +typedef void (GL_APIENTRYP PFNGLPATHSTRINGNVPROC) (GLuint path, GLenum format, GLsizei length, const void *pathString); +typedef void (GL_APIENTRYP PFNGLPATHGLYPHSNVPROC) (GLuint firstPathName, GLenum fontTarget, const void *fontName, GLbitfield fontStyle, GLsizei numGlyphs, GLenum type, const void *charcodes, GLenum handleMissingGlyphs, GLuint pathParameterTemplate, GLfloat emScale); +typedef void (GL_APIENTRYP PFNGLPATHGLYPHRANGENVPROC) (GLuint firstPathName, GLenum fontTarget, const void *fontName, GLbitfield fontStyle, GLuint firstGlyph, GLsizei numGlyphs, GLenum handleMissingGlyphs, GLuint pathParameterTemplate, GLfloat emScale); +typedef void (GL_APIENTRYP PFNGLWEIGHTPATHSNVPROC) (GLuint resultPath, GLsizei numPaths, const GLuint *paths, const GLfloat *weights); +typedef void (GL_APIENTRYP PFNGLCOPYPATHNVPROC) (GLuint resultPath, GLuint srcPath); +typedef void (GL_APIENTRYP PFNGLINTERPOLATEPATHSNVPROC) (GLuint resultPath, GLuint pathA, GLuint pathB, GLfloat weight); +typedef void (GL_APIENTRYP PFNGLTRANSFORMPATHNVPROC) (GLuint resultPath, GLuint srcPath, GLenum transformType, const GLfloat *transformValues); +typedef void (GL_APIENTRYP PFNGLPATHPARAMETERIVNVPROC) (GLuint path, GLenum pname, const GLint *value); +typedef void (GL_APIENTRYP PFNGLPATHPARAMETERINVPROC) (GLuint path, GLenum pname, GLint value); +typedef void (GL_APIENTRYP PFNGLPATHPARAMETERFVNVPROC) (GLuint path, GLenum pname, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPATHPARAMETERFNVPROC) (GLuint path, GLenum pname, GLfloat value); +typedef void (GL_APIENTRYP PFNGLPATHDASHARRAYNVPROC) (GLuint path, GLsizei dashCount, const GLfloat *dashArray); +typedef void (GL_APIENTRYP PFNGLPATHSTENCILFUNCNVPROC) (GLenum func, GLint ref, GLuint mask); +typedef void (GL_APIENTRYP PFNGLPATHSTENCILDEPTHOFFSETNVPROC) (GLfloat factor, GLfloat units); +typedef void (GL_APIENTRYP PFNGLSTENCILFILLPATHNVPROC) (GLuint path, GLenum fillMode, GLuint mask); +typedef void (GL_APIENTRYP PFNGLSTENCILSTROKEPATHNVPROC) (GLuint path, GLint reference, GLuint mask); +typedef void (GL_APIENTRYP PFNGLSTENCILFILLPATHINSTANCEDNVPROC) (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLenum fillMode, GLuint mask, GLenum transformType, const GLfloat *transformValues); +typedef void (GL_APIENTRYP PFNGLSTENCILSTROKEPATHINSTANCEDNVPROC) (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLint reference, GLuint mask, GLenum transformType, const GLfloat *transformValues); +typedef void (GL_APIENTRYP PFNGLPATHCOVERDEPTHFUNCNVPROC) (GLenum func); +typedef void (GL_APIENTRYP PFNGLCOVERFILLPATHNVPROC) (GLuint path, GLenum coverMode); +typedef void (GL_APIENTRYP PFNGLCOVERSTROKEPATHNVPROC) (GLuint path, GLenum coverMode); +typedef void (GL_APIENTRYP PFNGLCOVERFILLPATHINSTANCEDNVPROC) (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLenum coverMode, GLenum transformType, const GLfloat *transformValues); +typedef void (GL_APIENTRYP PFNGLCOVERSTROKEPATHINSTANCEDNVPROC) (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLenum coverMode, GLenum transformType, const GLfloat *transformValues); +typedef void (GL_APIENTRYP PFNGLGETPATHPARAMETERIVNVPROC) (GLuint path, GLenum pname, GLint *value); +typedef void (GL_APIENTRYP PFNGLGETPATHPARAMETERFVNVPROC) (GLuint path, GLenum pname, GLfloat *value); +typedef void (GL_APIENTRYP PFNGLGETPATHCOMMANDSNVPROC) (GLuint path, GLubyte *commands); +typedef void (GL_APIENTRYP PFNGLGETPATHCOORDSNVPROC) (GLuint path, GLfloat *coords); +typedef void (GL_APIENTRYP PFNGLGETPATHDASHARRAYNVPROC) (GLuint path, GLfloat *dashArray); +typedef void (GL_APIENTRYP PFNGLGETPATHMETRICSNVPROC) (GLbitfield metricQueryMask, GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLsizei stride, GLfloat *metrics); +typedef void (GL_APIENTRYP PFNGLGETPATHMETRICRANGENVPROC) (GLbitfield metricQueryMask, GLuint firstPathName, GLsizei numPaths, GLsizei stride, GLfloat *metrics); +typedef void (GL_APIENTRYP PFNGLGETPATHSPACINGNVPROC) (GLenum pathListMode, GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLfloat advanceScale, GLfloat kerningScale, GLenum transformType, GLfloat *returnedSpacing); +typedef GLboolean (GL_APIENTRYP PFNGLISPOINTINFILLPATHNVPROC) (GLuint path, GLuint mask, GLfloat x, GLfloat y); +typedef GLboolean (GL_APIENTRYP PFNGLISPOINTINSTROKEPATHNVPROC) (GLuint path, GLfloat x, GLfloat y); +typedef GLfloat (GL_APIENTRYP PFNGLGETPATHLENGTHNVPROC) (GLuint path, GLsizei startSegment, GLsizei numSegments); +typedef GLboolean (GL_APIENTRYP PFNGLPOINTALONGPATHNVPROC) (GLuint path, GLsizei startSegment, GLsizei numSegments, GLfloat distance, GLfloat *x, GLfloat *y, GLfloat *tangentX, GLfloat *tangentY); +typedef void (GL_APIENTRYP PFNGLMATRIXLOAD3X2FNVPROC) (GLenum matrixMode, const GLfloat *m); +typedef void (GL_APIENTRYP PFNGLMATRIXLOAD3X3FNVPROC) (GLenum matrixMode, const GLfloat *m); +typedef void (GL_APIENTRYP PFNGLMATRIXLOADTRANSPOSE3X3FNVPROC) (GLenum matrixMode, const GLfloat *m); +typedef void (GL_APIENTRYP PFNGLMATRIXMULT3X2FNVPROC) (GLenum matrixMode, const GLfloat *m); +typedef void (GL_APIENTRYP PFNGLMATRIXMULT3X3FNVPROC) (GLenum matrixMode, const GLfloat *m); +typedef void (GL_APIENTRYP PFNGLMATRIXMULTTRANSPOSE3X3FNVPROC) (GLenum matrixMode, const GLfloat *m); +typedef void (GL_APIENTRYP PFNGLSTENCILTHENCOVERFILLPATHNVPROC) (GLuint path, GLenum fillMode, GLuint mask, GLenum coverMode); +typedef void (GL_APIENTRYP PFNGLSTENCILTHENCOVERSTROKEPATHNVPROC) (GLuint path, GLint reference, GLuint mask, GLenum coverMode); +typedef void (GL_APIENTRYP PFNGLSTENCILTHENCOVERFILLPATHINSTANCEDNVPROC) (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLenum fillMode, GLuint mask, GLenum coverMode, GLenum transformType, const GLfloat *transformValues); +typedef void (GL_APIENTRYP PFNGLSTENCILTHENCOVERSTROKEPATHINSTANCEDNVPROC) (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLint reference, GLuint mask, GLenum coverMode, GLenum transformType, const GLfloat *transformValues); +typedef GLenum (GL_APIENTRYP PFNGLPATHGLYPHINDEXRANGENVPROC) (GLenum fontTarget, const void *fontName, GLbitfield fontStyle, GLuint pathParameterTemplate, GLfloat emScale, GLuint baseAndCount[2]); +typedef GLenum (GL_APIENTRYP PFNGLPATHGLYPHINDEXARRAYNVPROC) (GLuint firstPathName, GLenum fontTarget, const void *fontName, GLbitfield fontStyle, GLuint firstGlyphIndex, GLsizei numGlyphs, GLuint pathParameterTemplate, GLfloat emScale); +typedef GLenum (GL_APIENTRYP PFNGLPATHMEMORYGLYPHINDEXARRAYNVPROC) (GLuint firstPathName, GLenum fontTarget, GLsizeiptr fontSize, const void *fontData, GLsizei faceIndex, GLuint firstGlyphIndex, GLsizei numGlyphs, GLuint pathParameterTemplate, GLfloat emScale); +typedef void (GL_APIENTRYP PFNGLPROGRAMPATHFRAGMENTINPUTGENNVPROC) (GLuint program, GLint location, GLenum genMode, GLint components, const GLfloat *coeffs); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMRESOURCEFVNVPROC) (GLuint program, GLenum programInterface, GLuint index, GLsizei propCount, const GLenum *props, GLsizei count, GLsizei *length, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLMATRIXFRUSTUMEXTPROC) (GLenum mode, GLdouble left, GLdouble right, GLdouble bottom, GLdouble top, GLdouble zNear, GLdouble zFar); +typedef void (GL_APIENTRYP PFNGLMATRIXLOADIDENTITYEXTPROC) (GLenum mode); +typedef void (GL_APIENTRYP PFNGLMATRIXLOADTRANSPOSEFEXTPROC) (GLenum mode, const GLfloat *m); +typedef void (GL_APIENTRYP PFNGLMATRIXLOADTRANSPOSEDEXTPROC) (GLenum mode, const GLdouble *m); +typedef void (GL_APIENTRYP PFNGLMATRIXLOADFEXTPROC) (GLenum mode, const GLfloat *m); +typedef void (GL_APIENTRYP PFNGLMATRIXLOADDEXTPROC) (GLenum mode, const GLdouble *m); +typedef void (GL_APIENTRYP PFNGLMATRIXMULTTRANSPOSEFEXTPROC) (GLenum mode, const GLfloat *m); +typedef void (GL_APIENTRYP PFNGLMATRIXMULTTRANSPOSEDEXTPROC) (GLenum mode, const GLdouble *m); +typedef void (GL_APIENTRYP PFNGLMATRIXMULTFEXTPROC) (GLenum mode, const GLfloat *m); +typedef void (GL_APIENTRYP PFNGLMATRIXMULTDEXTPROC) (GLenum mode, const GLdouble *m); +typedef void (GL_APIENTRYP PFNGLMATRIXORTHOEXTPROC) (GLenum mode, GLdouble left, GLdouble right, GLdouble bottom, GLdouble top, GLdouble zNear, GLdouble zFar); +typedef void (GL_APIENTRYP PFNGLMATRIXPOPEXTPROC) (GLenum mode); +typedef void (GL_APIENTRYP PFNGLMATRIXPUSHEXTPROC) (GLenum mode); +typedef void (GL_APIENTRYP PFNGLMATRIXROTATEFEXTPROC) (GLenum mode, GLfloat angle, GLfloat x, GLfloat y, GLfloat z); +typedef void (GL_APIENTRYP PFNGLMATRIXROTATEDEXTPROC) (GLenum mode, GLdouble angle, GLdouble x, GLdouble y, GLdouble z); +typedef void (GL_APIENTRYP PFNGLMATRIXSCALEFEXTPROC) (GLenum mode, GLfloat x, GLfloat y, GLfloat z); +typedef void (GL_APIENTRYP PFNGLMATRIXSCALEDEXTPROC) (GLenum mode, GLdouble x, GLdouble y, GLdouble z); +typedef void (GL_APIENTRYP PFNGLMATRIXTRANSLATEFEXTPROC) (GLenum mode, GLfloat x, GLfloat y, GLfloat z); +typedef void (GL_APIENTRYP PFNGLMATRIXTRANSLATEDEXTPROC) (GLenum mode, GLdouble x, GLdouble y, GLdouble z); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL GLuint GL_APIENTRY glGenPathsNV (GLsizei range); +GL_APICALL void GL_APIENTRY glDeletePathsNV (GLuint path, GLsizei range); +GL_APICALL GLboolean GL_APIENTRY glIsPathNV (GLuint path); +GL_APICALL void GL_APIENTRY glPathCommandsNV (GLuint path, GLsizei numCommands, const GLubyte *commands, GLsizei numCoords, GLenum coordType, const void *coords); +GL_APICALL void GL_APIENTRY glPathCoordsNV (GLuint path, GLsizei numCoords, GLenum coordType, const void *coords); +GL_APICALL void GL_APIENTRY glPathSubCommandsNV (GLuint path, GLsizei commandStart, GLsizei commandsToDelete, GLsizei numCommands, const GLubyte *commands, GLsizei numCoords, GLenum coordType, const void *coords); +GL_APICALL void GL_APIENTRY glPathSubCoordsNV (GLuint path, GLsizei coordStart, GLsizei numCoords, GLenum coordType, const void *coords); +GL_APICALL void GL_APIENTRY glPathStringNV (GLuint path, GLenum format, GLsizei length, const void *pathString); +GL_APICALL void GL_APIENTRY glPathGlyphsNV (GLuint firstPathName, GLenum fontTarget, const void *fontName, GLbitfield fontStyle, GLsizei numGlyphs, GLenum type, const void *charcodes, GLenum handleMissingGlyphs, GLuint pathParameterTemplate, GLfloat emScale); +GL_APICALL void GL_APIENTRY glPathGlyphRangeNV (GLuint firstPathName, GLenum fontTarget, const void *fontName, GLbitfield fontStyle, GLuint firstGlyph, GLsizei numGlyphs, GLenum handleMissingGlyphs, GLuint pathParameterTemplate, GLfloat emScale); +GL_APICALL void GL_APIENTRY glWeightPathsNV (GLuint resultPath, GLsizei numPaths, const GLuint *paths, const GLfloat *weights); +GL_APICALL void GL_APIENTRY glCopyPathNV (GLuint resultPath, GLuint srcPath); +GL_APICALL void GL_APIENTRY glInterpolatePathsNV (GLuint resultPath, GLuint pathA, GLuint pathB, GLfloat weight); +GL_APICALL void GL_APIENTRY glTransformPathNV (GLuint resultPath, GLuint srcPath, GLenum transformType, const GLfloat *transformValues); +GL_APICALL void GL_APIENTRY glPathParameterivNV (GLuint path, GLenum pname, const GLint *value); +GL_APICALL void GL_APIENTRY glPathParameteriNV (GLuint path, GLenum pname, GLint value); +GL_APICALL void GL_APIENTRY glPathParameterfvNV (GLuint path, GLenum pname, const GLfloat *value); +GL_APICALL void GL_APIENTRY glPathParameterfNV (GLuint path, GLenum pname, GLfloat value); +GL_APICALL void GL_APIENTRY glPathDashArrayNV (GLuint path, GLsizei dashCount, const GLfloat *dashArray); +GL_APICALL void GL_APIENTRY glPathStencilFuncNV (GLenum func, GLint ref, GLuint mask); +GL_APICALL void GL_APIENTRY glPathStencilDepthOffsetNV (GLfloat factor, GLfloat units); +GL_APICALL void GL_APIENTRY glStencilFillPathNV (GLuint path, GLenum fillMode, GLuint mask); +GL_APICALL void GL_APIENTRY glStencilStrokePathNV (GLuint path, GLint reference, GLuint mask); +GL_APICALL void GL_APIENTRY glStencilFillPathInstancedNV (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLenum fillMode, GLuint mask, GLenum transformType, const GLfloat *transformValues); +GL_APICALL void GL_APIENTRY glStencilStrokePathInstancedNV (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLint reference, GLuint mask, GLenum transformType, const GLfloat *transformValues); +GL_APICALL void GL_APIENTRY glPathCoverDepthFuncNV (GLenum func); +GL_APICALL void GL_APIENTRY glCoverFillPathNV (GLuint path, GLenum coverMode); +GL_APICALL void GL_APIENTRY glCoverStrokePathNV (GLuint path, GLenum coverMode); +GL_APICALL void GL_APIENTRY glCoverFillPathInstancedNV (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLenum coverMode, GLenum transformType, const GLfloat *transformValues); +GL_APICALL void GL_APIENTRY glCoverStrokePathInstancedNV (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLenum coverMode, GLenum transformType, const GLfloat *transformValues); +GL_APICALL void GL_APIENTRY glGetPathParameterivNV (GLuint path, GLenum pname, GLint *value); +GL_APICALL void GL_APIENTRY glGetPathParameterfvNV (GLuint path, GLenum pname, GLfloat *value); +GL_APICALL void GL_APIENTRY glGetPathCommandsNV (GLuint path, GLubyte *commands); +GL_APICALL void GL_APIENTRY glGetPathCoordsNV (GLuint path, GLfloat *coords); +GL_APICALL void GL_APIENTRY glGetPathDashArrayNV (GLuint path, GLfloat *dashArray); +GL_APICALL void GL_APIENTRY glGetPathMetricsNV (GLbitfield metricQueryMask, GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLsizei stride, GLfloat *metrics); +GL_APICALL void GL_APIENTRY glGetPathMetricRangeNV (GLbitfield metricQueryMask, GLuint firstPathName, GLsizei numPaths, GLsizei stride, GLfloat *metrics); +GL_APICALL void GL_APIENTRY glGetPathSpacingNV (GLenum pathListMode, GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLfloat advanceScale, GLfloat kerningScale, GLenum transformType, GLfloat *returnedSpacing); +GL_APICALL GLboolean GL_APIENTRY glIsPointInFillPathNV (GLuint path, GLuint mask, GLfloat x, GLfloat y); +GL_APICALL GLboolean GL_APIENTRY glIsPointInStrokePathNV (GLuint path, GLfloat x, GLfloat y); +GL_APICALL GLfloat GL_APIENTRY glGetPathLengthNV (GLuint path, GLsizei startSegment, GLsizei numSegments); +GL_APICALL GLboolean GL_APIENTRY glPointAlongPathNV (GLuint path, GLsizei startSegment, GLsizei numSegments, GLfloat distance, GLfloat *x, GLfloat *y, GLfloat *tangentX, GLfloat *tangentY); +GL_APICALL void GL_APIENTRY glMatrixLoad3x2fNV (GLenum matrixMode, const GLfloat *m); +GL_APICALL void GL_APIENTRY glMatrixLoad3x3fNV (GLenum matrixMode, const GLfloat *m); +GL_APICALL void GL_APIENTRY glMatrixLoadTranspose3x3fNV (GLenum matrixMode, const GLfloat *m); +GL_APICALL void GL_APIENTRY glMatrixMult3x2fNV (GLenum matrixMode, const GLfloat *m); +GL_APICALL void GL_APIENTRY glMatrixMult3x3fNV (GLenum matrixMode, const GLfloat *m); +GL_APICALL void GL_APIENTRY glMatrixMultTranspose3x3fNV (GLenum matrixMode, const GLfloat *m); +GL_APICALL void GL_APIENTRY glStencilThenCoverFillPathNV (GLuint path, GLenum fillMode, GLuint mask, GLenum coverMode); +GL_APICALL void GL_APIENTRY glStencilThenCoverStrokePathNV (GLuint path, GLint reference, GLuint mask, GLenum coverMode); +GL_APICALL void GL_APIENTRY glStencilThenCoverFillPathInstancedNV (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLenum fillMode, GLuint mask, GLenum coverMode, GLenum transformType, const GLfloat *transformValues); +GL_APICALL void GL_APIENTRY glStencilThenCoverStrokePathInstancedNV (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLint reference, GLuint mask, GLenum coverMode, GLenum transformType, const GLfloat *transformValues); +GL_APICALL GLenum GL_APIENTRY glPathGlyphIndexRangeNV (GLenum fontTarget, const void *fontName, GLbitfield fontStyle, GLuint pathParameterTemplate, GLfloat emScale, GLuint baseAndCount[2]); +GL_APICALL GLenum GL_APIENTRY glPathGlyphIndexArrayNV (GLuint firstPathName, GLenum fontTarget, const void *fontName, GLbitfield fontStyle, GLuint firstGlyphIndex, GLsizei numGlyphs, GLuint pathParameterTemplate, GLfloat emScale); +GL_APICALL GLenum GL_APIENTRY glPathMemoryGlyphIndexArrayNV (GLuint firstPathName, GLenum fontTarget, GLsizeiptr fontSize, const void *fontData, GLsizei faceIndex, GLuint firstGlyphIndex, GLsizei numGlyphs, GLuint pathParameterTemplate, GLfloat emScale); +GL_APICALL void GL_APIENTRY glProgramPathFragmentInputGenNV (GLuint program, GLint location, GLenum genMode, GLint components, const GLfloat *coeffs); +GL_APICALL void GL_APIENTRY glGetProgramResourcefvNV (GLuint program, GLenum programInterface, GLuint index, GLsizei propCount, const GLenum *props, GLsizei count, GLsizei *length, GLfloat *params); +GL_APICALL void GL_APIENTRY glMatrixFrustumEXT (GLenum mode, GLdouble left, GLdouble right, GLdouble bottom, GLdouble top, GLdouble zNear, GLdouble zFar); +GL_APICALL void GL_APIENTRY glMatrixLoadIdentityEXT (GLenum mode); +GL_APICALL void GL_APIENTRY glMatrixLoadTransposefEXT (GLenum mode, const GLfloat *m); +GL_APICALL void GL_APIENTRY glMatrixLoadTransposedEXT (GLenum mode, const GLdouble *m); +GL_APICALL void GL_APIENTRY glMatrixLoadfEXT (GLenum mode, const GLfloat *m); +GL_APICALL void GL_APIENTRY glMatrixLoaddEXT (GLenum mode, const GLdouble *m); +GL_APICALL void GL_APIENTRY glMatrixMultTransposefEXT (GLenum mode, const GLfloat *m); +GL_APICALL void GL_APIENTRY glMatrixMultTransposedEXT (GLenum mode, const GLdouble *m); +GL_APICALL void GL_APIENTRY glMatrixMultfEXT (GLenum mode, const GLfloat *m); +GL_APICALL void GL_APIENTRY glMatrixMultdEXT (GLenum mode, const GLdouble *m); +GL_APICALL void GL_APIENTRY glMatrixOrthoEXT (GLenum mode, GLdouble left, GLdouble right, GLdouble bottom, GLdouble top, GLdouble zNear, GLdouble zFar); +GL_APICALL void GL_APIENTRY glMatrixPopEXT (GLenum mode); +GL_APICALL void GL_APIENTRY glMatrixPushEXT (GLenum mode); +GL_APICALL void GL_APIENTRY glMatrixRotatefEXT (GLenum mode, GLfloat angle, GLfloat x, GLfloat y, GLfloat z); +GL_APICALL void GL_APIENTRY glMatrixRotatedEXT (GLenum mode, GLdouble angle, GLdouble x, GLdouble y, GLdouble z); +GL_APICALL void GL_APIENTRY glMatrixScalefEXT (GLenum mode, GLfloat x, GLfloat y, GLfloat z); +GL_APICALL void GL_APIENTRY glMatrixScaledEXT (GLenum mode, GLdouble x, GLdouble y, GLdouble z); +GL_APICALL void GL_APIENTRY glMatrixTranslatefEXT (GLenum mode, GLfloat x, GLfloat y, GLfloat z); +GL_APICALL void GL_APIENTRY glMatrixTranslatedEXT (GLenum mode, GLdouble x, GLdouble y, GLdouble z); +#endif +#endif /* GL_NV_path_rendering */ + +#ifndef GL_NV_path_rendering_shared_edge +#define GL_NV_path_rendering_shared_edge 1 +#define GL_SHARED_EDGE_NV 0xC0 +#endif /* GL_NV_path_rendering_shared_edge */ + +#ifndef GL_NV_pixel_buffer_object +#define GL_NV_pixel_buffer_object 1 +#define GL_PIXEL_PACK_BUFFER_NV 0x88EB +#define GL_PIXEL_UNPACK_BUFFER_NV 0x88EC +#define GL_PIXEL_PACK_BUFFER_BINDING_NV 0x88ED +#define GL_PIXEL_UNPACK_BUFFER_BINDING_NV 0x88EF +#endif /* GL_NV_pixel_buffer_object */ + +#ifndef GL_NV_polygon_mode +#define GL_NV_polygon_mode 1 +#define GL_POLYGON_MODE_NV 0x0B40 +#define GL_POLYGON_OFFSET_POINT_NV 0x2A01 +#define GL_POLYGON_OFFSET_LINE_NV 0x2A02 +#define GL_POINT_NV 0x1B00 +#define GL_LINE_NV 0x1B01 +#define GL_FILL_NV 0x1B02 +typedef void (GL_APIENTRYP PFNGLPOLYGONMODENVPROC) (GLenum face, GLenum mode); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glPolygonModeNV (GLenum face, GLenum mode); +#endif +#endif /* GL_NV_polygon_mode */ + +#ifndef GL_NV_primitive_shading_rate +#define GL_NV_primitive_shading_rate 1 +#define GL_SHADING_RATE_IMAGE_PER_PRIMITIVE_NV 0x95B1 +#define GL_SHADING_RATE_IMAGE_PALETTE_COUNT_NV 0x95B2 +#endif /* GL_NV_primitive_shading_rate */ + +#ifndef GL_NV_read_buffer +#define GL_NV_read_buffer 1 +#define GL_READ_BUFFER_NV 0x0C02 +typedef void (GL_APIENTRYP PFNGLREADBUFFERNVPROC) (GLenum mode); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glReadBufferNV (GLenum mode); +#endif +#endif /* GL_NV_read_buffer */ + +#ifndef GL_NV_read_buffer_front +#define GL_NV_read_buffer_front 1 +#endif /* GL_NV_read_buffer_front */ + +#ifndef GL_NV_read_depth +#define GL_NV_read_depth 1 +#endif /* GL_NV_read_depth */ + +#ifndef GL_NV_read_depth_stencil +#define GL_NV_read_depth_stencil 1 +#endif /* GL_NV_read_depth_stencil */ + +#ifndef GL_NV_read_stencil +#define GL_NV_read_stencil 1 +#endif /* GL_NV_read_stencil */ + +#ifndef GL_NV_representative_fragment_test +#define GL_NV_representative_fragment_test 1 +#define GL_REPRESENTATIVE_FRAGMENT_TEST_NV 0x937F +#endif /* GL_NV_representative_fragment_test */ + +#ifndef GL_NV_sRGB_formats +#define GL_NV_sRGB_formats 1 +#define GL_SLUMINANCE_NV 0x8C46 +#define GL_SLUMINANCE_ALPHA_NV 0x8C44 +#define GL_SRGB8_NV 0x8C41 +#define GL_SLUMINANCE8_NV 0x8C47 +#define GL_SLUMINANCE8_ALPHA8_NV 0x8C45 +#define GL_COMPRESSED_SRGB_S3TC_DXT1_NV 0x8C4C +#define GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT1_NV 0x8C4D +#define GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT3_NV 0x8C4E +#define GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT5_NV 0x8C4F +#define GL_ETC1_SRGB8_NV 0x88EE +#endif /* GL_NV_sRGB_formats */ + +#ifndef GL_NV_sample_locations +#define GL_NV_sample_locations 1 +#define GL_SAMPLE_LOCATION_SUBPIXEL_BITS_NV 0x933D +#define GL_SAMPLE_LOCATION_PIXEL_GRID_WIDTH_NV 0x933E +#define GL_SAMPLE_LOCATION_PIXEL_GRID_HEIGHT_NV 0x933F +#define GL_PROGRAMMABLE_SAMPLE_LOCATION_TABLE_SIZE_NV 0x9340 +#define GL_SAMPLE_LOCATION_NV 0x8E50 +#define GL_PROGRAMMABLE_SAMPLE_LOCATION_NV 0x9341 +#define GL_FRAMEBUFFER_PROGRAMMABLE_SAMPLE_LOCATIONS_NV 0x9342 +#define GL_FRAMEBUFFER_SAMPLE_LOCATION_PIXEL_GRID_NV 0x9343 +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERSAMPLELOCATIONSFVNVPROC) (GLenum target, GLuint start, GLsizei count, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLNAMEDFRAMEBUFFERSAMPLELOCATIONSFVNVPROC) (GLuint framebuffer, GLuint start, GLsizei count, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLRESOLVEDEPTHVALUESNVPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glFramebufferSampleLocationsfvNV (GLenum target, GLuint start, GLsizei count, const GLfloat *v); +GL_APICALL void GL_APIENTRY glNamedFramebufferSampleLocationsfvNV (GLuint framebuffer, GLuint start, GLsizei count, const GLfloat *v); +GL_APICALL void GL_APIENTRY glResolveDepthValuesNV (void); +#endif +#endif /* GL_NV_sample_locations */ + +#ifndef GL_NV_sample_mask_override_coverage +#define GL_NV_sample_mask_override_coverage 1 +#endif /* GL_NV_sample_mask_override_coverage */ + +#ifndef GL_NV_scissor_exclusive +#define GL_NV_scissor_exclusive 1 +#define GL_SCISSOR_TEST_EXCLUSIVE_NV 0x9555 +#define GL_SCISSOR_BOX_EXCLUSIVE_NV 0x9556 +typedef void (GL_APIENTRYP PFNGLSCISSOREXCLUSIVENVPROC) (GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLSCISSOREXCLUSIVEARRAYVNVPROC) (GLuint first, GLsizei count, const GLint *v); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glScissorExclusiveNV (GLint x, GLint y, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glScissorExclusiveArrayvNV (GLuint first, GLsizei count, const GLint *v); +#endif +#endif /* GL_NV_scissor_exclusive */ + +#ifndef GL_NV_shader_atomic_fp16_vector +#define GL_NV_shader_atomic_fp16_vector 1 +#endif /* GL_NV_shader_atomic_fp16_vector */ + +#ifndef GL_NV_shader_noperspective_interpolation +#define GL_NV_shader_noperspective_interpolation 1 +#endif /* GL_NV_shader_noperspective_interpolation */ + +#ifndef GL_NV_shader_subgroup_partitioned +#define GL_NV_shader_subgroup_partitioned 1 +#define GL_SUBGROUP_FEATURE_PARTITIONED_BIT_NV 0x00000100 +#endif /* GL_NV_shader_subgroup_partitioned */ + +#ifndef GL_NV_shader_texture_footprint +#define GL_NV_shader_texture_footprint 1 +#endif /* GL_NV_shader_texture_footprint */ + +#ifndef GL_NV_shading_rate_image +#define GL_NV_shading_rate_image 1 +#define GL_SHADING_RATE_IMAGE_NV 0x9563 +#define GL_SHADING_RATE_NO_INVOCATIONS_NV 0x9564 +#define GL_SHADING_RATE_1_INVOCATION_PER_PIXEL_NV 0x9565 +#define GL_SHADING_RATE_1_INVOCATION_PER_1X2_PIXELS_NV 0x9566 +#define GL_SHADING_RATE_1_INVOCATION_PER_2X1_PIXELS_NV 0x9567 +#define GL_SHADING_RATE_1_INVOCATION_PER_2X2_PIXELS_NV 0x9568 +#define GL_SHADING_RATE_1_INVOCATION_PER_2X4_PIXELS_NV 0x9569 +#define GL_SHADING_RATE_1_INVOCATION_PER_4X2_PIXELS_NV 0x956A +#define GL_SHADING_RATE_1_INVOCATION_PER_4X4_PIXELS_NV 0x956B +#define GL_SHADING_RATE_2_INVOCATIONS_PER_PIXEL_NV 0x956C +#define GL_SHADING_RATE_4_INVOCATIONS_PER_PIXEL_NV 0x956D +#define GL_SHADING_RATE_8_INVOCATIONS_PER_PIXEL_NV 0x956E +#define GL_SHADING_RATE_16_INVOCATIONS_PER_PIXEL_NV 0x956F +#define GL_SHADING_RATE_IMAGE_BINDING_NV 0x955B +#define GL_SHADING_RATE_IMAGE_TEXEL_WIDTH_NV 0x955C +#define GL_SHADING_RATE_IMAGE_TEXEL_HEIGHT_NV 0x955D +#define GL_SHADING_RATE_IMAGE_PALETTE_SIZE_NV 0x955E +#define GL_MAX_COARSE_FRAGMENT_SAMPLES_NV 0x955F +#define GL_SHADING_RATE_SAMPLE_ORDER_DEFAULT_NV 0x95AE +#define GL_SHADING_RATE_SAMPLE_ORDER_PIXEL_MAJOR_NV 0x95AF +#define GL_SHADING_RATE_SAMPLE_ORDER_SAMPLE_MAJOR_NV 0x95B0 +typedef void (GL_APIENTRYP PFNGLBINDSHADINGRATEIMAGENVPROC) (GLuint texture); +typedef void (GL_APIENTRYP PFNGLGETSHADINGRATEIMAGEPALETTENVPROC) (GLuint viewport, GLuint entry, GLenum *rate); +typedef void (GL_APIENTRYP PFNGLGETSHADINGRATESAMPLELOCATIONIVNVPROC) (GLenum rate, GLuint samples, GLuint index, GLint *location); +typedef void (GL_APIENTRYP PFNGLSHADINGRATEIMAGEBARRIERNVPROC) (GLboolean synchronize); +typedef void (GL_APIENTRYP PFNGLSHADINGRATEIMAGEPALETTENVPROC) (GLuint viewport, GLuint first, GLsizei count, const GLenum *rates); +typedef void (GL_APIENTRYP PFNGLSHADINGRATESAMPLEORDERNVPROC) (GLenum order); +typedef void (GL_APIENTRYP PFNGLSHADINGRATESAMPLEORDERCUSTOMNVPROC) (GLenum rate, GLuint samples, const GLint *locations); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glBindShadingRateImageNV (GLuint texture); +GL_APICALL void GL_APIENTRY glGetShadingRateImagePaletteNV (GLuint viewport, GLuint entry, GLenum *rate); +GL_APICALL void GL_APIENTRY glGetShadingRateSampleLocationivNV (GLenum rate, GLuint samples, GLuint index, GLint *location); +GL_APICALL void GL_APIENTRY glShadingRateImageBarrierNV (GLboolean synchronize); +GL_APICALL void GL_APIENTRY glShadingRateImagePaletteNV (GLuint viewport, GLuint first, GLsizei count, const GLenum *rates); +GL_APICALL void GL_APIENTRY glShadingRateSampleOrderNV (GLenum order); +GL_APICALL void GL_APIENTRY glShadingRateSampleOrderCustomNV (GLenum rate, GLuint samples, const GLint *locations); +#endif +#endif /* GL_NV_shading_rate_image */ + +#ifndef GL_NV_shadow_samplers_array +#define GL_NV_shadow_samplers_array 1 +#define GL_SAMPLER_2D_ARRAY_SHADOW_NV 0x8DC4 +#endif /* GL_NV_shadow_samplers_array */ + +#ifndef GL_NV_shadow_samplers_cube +#define GL_NV_shadow_samplers_cube 1 +#define GL_SAMPLER_CUBE_SHADOW_NV 0x8DC5 +#endif /* GL_NV_shadow_samplers_cube */ + +#ifndef GL_NV_stereo_view_rendering +#define GL_NV_stereo_view_rendering 1 +#endif /* GL_NV_stereo_view_rendering */ + +#ifndef GL_NV_texture_border_clamp +#define GL_NV_texture_border_clamp 1 +#define GL_TEXTURE_BORDER_COLOR_NV 0x1004 +#define GL_CLAMP_TO_BORDER_NV 0x812D +#endif /* GL_NV_texture_border_clamp */ + +#ifndef GL_NV_texture_compression_s3tc_update +#define GL_NV_texture_compression_s3tc_update 1 +#endif /* GL_NV_texture_compression_s3tc_update */ + +#ifndef GL_NV_texture_npot_2D_mipmap +#define GL_NV_texture_npot_2D_mipmap 1 +#endif /* GL_NV_texture_npot_2D_mipmap */ + +#ifndef GL_NV_timeline_semaphore +#define GL_NV_timeline_semaphore 1 +#define GL_TIMELINE_SEMAPHORE_VALUE_NV 0x9595 +#define GL_SEMAPHORE_TYPE_NV 0x95B3 +#define GL_SEMAPHORE_TYPE_BINARY_NV 0x95B4 +#define GL_SEMAPHORE_TYPE_TIMELINE_NV 0x95B5 +#define GL_MAX_TIMELINE_SEMAPHORE_VALUE_DIFFERENCE_NV 0x95B6 +typedef void (GL_APIENTRYP PFNGLCREATESEMAPHORESNVPROC) (GLsizei n, GLuint *semaphores); +typedef void (GL_APIENTRYP PFNGLSEMAPHOREPARAMETERIVNVPROC) (GLuint semaphore, GLenum pname, const GLint *params); +typedef void (GL_APIENTRYP PFNGLGETSEMAPHOREPARAMETERIVNVPROC) (GLuint semaphore, GLenum pname, GLint *params); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glCreateSemaphoresNV (GLsizei n, GLuint *semaphores); +GL_APICALL void GL_APIENTRY glSemaphoreParameterivNV (GLuint semaphore, GLenum pname, const GLint *params); +GL_APICALL void GL_APIENTRY glGetSemaphoreParameterivNV (GLuint semaphore, GLenum pname, GLint *params); +#endif +#endif /* GL_NV_timeline_semaphore */ + +#ifndef GL_NV_viewport_array +#define GL_NV_viewport_array 1 +#define GL_MAX_VIEWPORTS_NV 0x825B +#define GL_VIEWPORT_SUBPIXEL_BITS_NV 0x825C +#define GL_VIEWPORT_BOUNDS_RANGE_NV 0x825D +#define GL_VIEWPORT_INDEX_PROVOKING_VERTEX_NV 0x825F +typedef void (GL_APIENTRYP PFNGLVIEWPORTARRAYVNVPROC) (GLuint first, GLsizei count, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLVIEWPORTINDEXEDFNVPROC) (GLuint index, GLfloat x, GLfloat y, GLfloat w, GLfloat h); +typedef void (GL_APIENTRYP PFNGLVIEWPORTINDEXEDFVNVPROC) (GLuint index, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLSCISSORARRAYVNVPROC) (GLuint first, GLsizei count, const GLint *v); +typedef void (GL_APIENTRYP PFNGLSCISSORINDEXEDNVPROC) (GLuint index, GLint left, GLint bottom, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLSCISSORINDEXEDVNVPROC) (GLuint index, const GLint *v); +typedef void (GL_APIENTRYP PFNGLDEPTHRANGEARRAYFVNVPROC) (GLuint first, GLsizei count, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLDEPTHRANGEINDEXEDFNVPROC) (GLuint index, GLfloat n, GLfloat f); +typedef void (GL_APIENTRYP PFNGLGETFLOATI_VNVPROC) (GLenum target, GLuint index, GLfloat *data); +typedef void (GL_APIENTRYP PFNGLENABLEINVPROC) (GLenum target, GLuint index); +typedef void (GL_APIENTRYP PFNGLDISABLEINVPROC) (GLenum target, GLuint index); +typedef GLboolean (GL_APIENTRYP PFNGLISENABLEDINVPROC) (GLenum target, GLuint index); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glViewportArrayvNV (GLuint first, GLsizei count, const GLfloat *v); +GL_APICALL void GL_APIENTRY glViewportIndexedfNV (GLuint index, GLfloat x, GLfloat y, GLfloat w, GLfloat h); +GL_APICALL void GL_APIENTRY glViewportIndexedfvNV (GLuint index, const GLfloat *v); +GL_APICALL void GL_APIENTRY glScissorArrayvNV (GLuint first, GLsizei count, const GLint *v); +GL_APICALL void GL_APIENTRY glScissorIndexedNV (GLuint index, GLint left, GLint bottom, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glScissorIndexedvNV (GLuint index, const GLint *v); +GL_APICALL void GL_APIENTRY glDepthRangeArrayfvNV (GLuint first, GLsizei count, const GLfloat *v); +GL_APICALL void GL_APIENTRY glDepthRangeIndexedfNV (GLuint index, GLfloat n, GLfloat f); +GL_APICALL void GL_APIENTRY glGetFloati_vNV (GLenum target, GLuint index, GLfloat *data); +GL_APICALL void GL_APIENTRY glEnableiNV (GLenum target, GLuint index); +GL_APICALL void GL_APIENTRY glDisableiNV (GLenum target, GLuint index); +GL_APICALL GLboolean GL_APIENTRY glIsEnablediNV (GLenum target, GLuint index); +#endif +#endif /* GL_NV_viewport_array */ + +#ifndef GL_NV_viewport_array2 +#define GL_NV_viewport_array2 1 +#endif /* GL_NV_viewport_array2 */ + +#ifndef GL_NV_viewport_swizzle +#define GL_NV_viewport_swizzle 1 +#define GL_VIEWPORT_SWIZZLE_POSITIVE_X_NV 0x9350 +#define GL_VIEWPORT_SWIZZLE_NEGATIVE_X_NV 0x9351 +#define GL_VIEWPORT_SWIZZLE_POSITIVE_Y_NV 0x9352 +#define GL_VIEWPORT_SWIZZLE_NEGATIVE_Y_NV 0x9353 +#define GL_VIEWPORT_SWIZZLE_POSITIVE_Z_NV 0x9354 +#define GL_VIEWPORT_SWIZZLE_NEGATIVE_Z_NV 0x9355 +#define GL_VIEWPORT_SWIZZLE_POSITIVE_W_NV 0x9356 +#define GL_VIEWPORT_SWIZZLE_NEGATIVE_W_NV 0x9357 +#define GL_VIEWPORT_SWIZZLE_X_NV 0x9358 +#define GL_VIEWPORT_SWIZZLE_Y_NV 0x9359 +#define GL_VIEWPORT_SWIZZLE_Z_NV 0x935A +#define GL_VIEWPORT_SWIZZLE_W_NV 0x935B +typedef void (GL_APIENTRYP PFNGLVIEWPORTSWIZZLENVPROC) (GLuint index, GLenum swizzlex, GLenum swizzley, GLenum swizzlez, GLenum swizzlew); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glViewportSwizzleNV (GLuint index, GLenum swizzlex, GLenum swizzley, GLenum swizzlez, GLenum swizzlew); +#endif +#endif /* GL_NV_viewport_swizzle */ + +#ifndef GL_OVR_multiview +#define GL_OVR_multiview 1 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_NUM_VIEWS_OVR 0x9630 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_BASE_VIEW_INDEX_OVR 0x9632 +#define GL_MAX_VIEWS_OVR 0x9631 +#define GL_FRAMEBUFFER_INCOMPLETE_VIEW_TARGETS_OVR 0x9633 +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTUREMULTIVIEWOVRPROC) (GLenum target, GLenum attachment, GLuint texture, GLint level, GLint baseViewIndex, GLsizei numViews); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glFramebufferTextureMultiviewOVR (GLenum target, GLenum attachment, GLuint texture, GLint level, GLint baseViewIndex, GLsizei numViews); +#endif +#endif /* GL_OVR_multiview */ + +#ifndef GL_OVR_multiview2 +#define GL_OVR_multiview2 1 +#endif /* GL_OVR_multiview2 */ + +#ifndef GL_OVR_multiview_multisampled_render_to_texture +#define GL_OVR_multiview_multisampled_render_to_texture 1 +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTUREMULTISAMPLEMULTIVIEWOVRPROC) (GLenum target, GLenum attachment, GLuint texture, GLint level, GLsizei samples, GLint baseViewIndex, GLsizei numViews); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glFramebufferTextureMultisampleMultiviewOVR (GLenum target, GLenum attachment, GLuint texture, GLint level, GLsizei samples, GLint baseViewIndex, GLsizei numViews); +#endif +#endif /* GL_OVR_multiview_multisampled_render_to_texture */ + +#ifndef GL_QCOM_YUV_texture_gather +#define GL_QCOM_YUV_texture_gather 1 +#endif /* GL_QCOM_YUV_texture_gather */ + +#ifndef GL_QCOM_alpha_test +#define GL_QCOM_alpha_test 1 +#define GL_ALPHA_TEST_QCOM 0x0BC0 +#define GL_ALPHA_TEST_FUNC_QCOM 0x0BC1 +#define GL_ALPHA_TEST_REF_QCOM 0x0BC2 +typedef void (GL_APIENTRYP PFNGLALPHAFUNCQCOMPROC) (GLenum func, GLclampf ref); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glAlphaFuncQCOM (GLenum func, GLclampf ref); +#endif +#endif /* GL_QCOM_alpha_test */ + +#ifndef GL_QCOM_binning_control +#define GL_QCOM_binning_control 1 +#define GL_BINNING_CONTROL_HINT_QCOM 0x8FB0 +#define GL_CPU_OPTIMIZED_QCOM 0x8FB1 +#define GL_GPU_OPTIMIZED_QCOM 0x8FB2 +#define GL_RENDER_DIRECT_TO_FRAMEBUFFER_QCOM 0x8FB3 +#endif /* GL_QCOM_binning_control */ + +#ifndef GL_QCOM_driver_control +#define GL_QCOM_driver_control 1 +typedef void (GL_APIENTRYP PFNGLGETDRIVERCONTROLSQCOMPROC) (GLint *num, GLsizei size, GLuint *driverControls); +typedef void (GL_APIENTRYP PFNGLGETDRIVERCONTROLSTRINGQCOMPROC) (GLuint driverControl, GLsizei bufSize, GLsizei *length, GLchar *driverControlString); +typedef void (GL_APIENTRYP PFNGLENABLEDRIVERCONTROLQCOMPROC) (GLuint driverControl); +typedef void (GL_APIENTRYP PFNGLDISABLEDRIVERCONTROLQCOMPROC) (GLuint driverControl); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glGetDriverControlsQCOM (GLint *num, GLsizei size, GLuint *driverControls); +GL_APICALL void GL_APIENTRY glGetDriverControlStringQCOM (GLuint driverControl, GLsizei bufSize, GLsizei *length, GLchar *driverControlString); +GL_APICALL void GL_APIENTRY glEnableDriverControlQCOM (GLuint driverControl); +GL_APICALL void GL_APIENTRY glDisableDriverControlQCOM (GLuint driverControl); +#endif +#endif /* GL_QCOM_driver_control */ + +#ifndef GL_QCOM_extended_get +#define GL_QCOM_extended_get 1 +#define GL_TEXTURE_WIDTH_QCOM 0x8BD2 +#define GL_TEXTURE_HEIGHT_QCOM 0x8BD3 +#define GL_TEXTURE_DEPTH_QCOM 0x8BD4 +#define GL_TEXTURE_INTERNAL_FORMAT_QCOM 0x8BD5 +#define GL_TEXTURE_FORMAT_QCOM 0x8BD6 +#define GL_TEXTURE_TYPE_QCOM 0x8BD7 +#define GL_TEXTURE_IMAGE_VALID_QCOM 0x8BD8 +#define GL_TEXTURE_NUM_LEVELS_QCOM 0x8BD9 +#define GL_TEXTURE_TARGET_QCOM 0x8BDA +#define GL_TEXTURE_OBJECT_VALID_QCOM 0x8BDB +#define GL_STATE_RESTORE 0x8BDC +typedef void (GL_APIENTRYP PFNGLEXTGETTEXTURESQCOMPROC) (GLuint *textures, GLint maxTextures, GLint *numTextures); +typedef void (GL_APIENTRYP PFNGLEXTGETBUFFERSQCOMPROC) (GLuint *buffers, GLint maxBuffers, GLint *numBuffers); +typedef void (GL_APIENTRYP PFNGLEXTGETRENDERBUFFERSQCOMPROC) (GLuint *renderbuffers, GLint maxRenderbuffers, GLint *numRenderbuffers); +typedef void (GL_APIENTRYP PFNGLEXTGETFRAMEBUFFERSQCOMPROC) (GLuint *framebuffers, GLint maxFramebuffers, GLint *numFramebuffers); +typedef void (GL_APIENTRYP PFNGLEXTGETTEXLEVELPARAMETERIVQCOMPROC) (GLuint texture, GLenum face, GLint level, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLEXTTEXOBJECTSTATEOVERRIDEIQCOMPROC) (GLenum target, GLenum pname, GLint param); +typedef void (GL_APIENTRYP PFNGLEXTGETTEXSUBIMAGEQCOMPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, void *texels); +typedef void (GL_APIENTRYP PFNGLEXTGETBUFFERPOINTERVQCOMPROC) (GLenum target, void **params); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glExtGetTexturesQCOM (GLuint *textures, GLint maxTextures, GLint *numTextures); +GL_APICALL void GL_APIENTRY glExtGetBuffersQCOM (GLuint *buffers, GLint maxBuffers, GLint *numBuffers); +GL_APICALL void GL_APIENTRY glExtGetRenderbuffersQCOM (GLuint *renderbuffers, GLint maxRenderbuffers, GLint *numRenderbuffers); +GL_APICALL void GL_APIENTRY glExtGetFramebuffersQCOM (GLuint *framebuffers, GLint maxFramebuffers, GLint *numFramebuffers); +GL_APICALL void GL_APIENTRY glExtGetTexLevelParameterivQCOM (GLuint texture, GLenum face, GLint level, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glExtTexObjectStateOverrideiQCOM (GLenum target, GLenum pname, GLint param); +GL_APICALL void GL_APIENTRY glExtGetTexSubImageQCOM (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, void *texels); +GL_APICALL void GL_APIENTRY glExtGetBufferPointervQCOM (GLenum target, void **params); +#endif +#endif /* GL_QCOM_extended_get */ + +#ifndef GL_QCOM_extended_get2 +#define GL_QCOM_extended_get2 1 +typedef void (GL_APIENTRYP PFNGLEXTGETSHADERSQCOMPROC) (GLuint *shaders, GLint maxShaders, GLint *numShaders); +typedef void (GL_APIENTRYP PFNGLEXTGETPROGRAMSQCOMPROC) (GLuint *programs, GLint maxPrograms, GLint *numPrograms); +typedef GLboolean (GL_APIENTRYP PFNGLEXTISPROGRAMBINARYQCOMPROC) (GLuint program); +typedef void (GL_APIENTRYP PFNGLEXTGETPROGRAMBINARYSOURCEQCOMPROC) (GLuint program, GLenum shadertype, GLchar *source, GLint *length); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glExtGetShadersQCOM (GLuint *shaders, GLint maxShaders, GLint *numShaders); +GL_APICALL void GL_APIENTRY glExtGetProgramsQCOM (GLuint *programs, GLint maxPrograms, GLint *numPrograms); +GL_APICALL GLboolean GL_APIENTRY glExtIsProgramBinaryQCOM (GLuint program); +GL_APICALL void GL_APIENTRY glExtGetProgramBinarySourceQCOM (GLuint program, GLenum shadertype, GLchar *source, GLint *length); +#endif +#endif /* GL_QCOM_extended_get2 */ + +#ifndef GL_QCOM_frame_extrapolation +#define GL_QCOM_frame_extrapolation 1 +typedef void (GL_APIENTRYP PFNGLEXTRAPOLATETEX2DQCOMPROC) (GLuint src1, GLuint src2, GLuint output, GLfloat scaleFactor); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glExtrapolateTex2DQCOM (GLuint src1, GLuint src2, GLuint output, GLfloat scaleFactor); +#endif +#endif /* GL_QCOM_frame_extrapolation */ + +#ifndef GL_QCOM_framebuffer_foveated +#define GL_QCOM_framebuffer_foveated 1 +#define GL_FOVEATION_ENABLE_BIT_QCOM 0x00000001 +#define GL_FOVEATION_SCALED_BIN_METHOD_BIT_QCOM 0x00000002 +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERFOVEATIONCONFIGQCOMPROC) (GLuint framebuffer, GLuint numLayers, GLuint focalPointsPerLayer, GLuint requestedFeatures, GLuint *providedFeatures); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERFOVEATIONPARAMETERSQCOMPROC) (GLuint framebuffer, GLuint layer, GLuint focalPoint, GLfloat focalX, GLfloat focalY, GLfloat gainX, GLfloat gainY, GLfloat foveaArea); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glFramebufferFoveationConfigQCOM (GLuint framebuffer, GLuint numLayers, GLuint focalPointsPerLayer, GLuint requestedFeatures, GLuint *providedFeatures); +GL_APICALL void GL_APIENTRY glFramebufferFoveationParametersQCOM (GLuint framebuffer, GLuint layer, GLuint focalPoint, GLfloat focalX, GLfloat focalY, GLfloat gainX, GLfloat gainY, GLfloat foveaArea); +#endif +#endif /* GL_QCOM_framebuffer_foveated */ + +#ifndef GL_QCOM_motion_estimation +#define GL_QCOM_motion_estimation 1 +#define GL_MOTION_ESTIMATION_SEARCH_BLOCK_X_QCOM 0x8C90 +#define GL_MOTION_ESTIMATION_SEARCH_BLOCK_Y_QCOM 0x8C91 +typedef void (GL_APIENTRYP PFNGLTEXESTIMATEMOTIONQCOMPROC) (GLuint ref, GLuint target, GLuint output); +typedef void (GL_APIENTRYP PFNGLTEXESTIMATEMOTIONREGIONSQCOMPROC) (GLuint ref, GLuint target, GLuint output, GLuint mask); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glTexEstimateMotionQCOM (GLuint ref, GLuint target, GLuint output); +GL_APICALL void GL_APIENTRY glTexEstimateMotionRegionsQCOM (GLuint ref, GLuint target, GLuint output, GLuint mask); +#endif +#endif /* GL_QCOM_motion_estimation */ + +#ifndef GL_QCOM_perfmon_global_mode +#define GL_QCOM_perfmon_global_mode 1 +#define GL_PERFMON_GLOBAL_MODE_QCOM 0x8FA0 +#endif /* GL_QCOM_perfmon_global_mode */ + +#ifndef GL_QCOM_shader_framebuffer_fetch_noncoherent +#define GL_QCOM_shader_framebuffer_fetch_noncoherent 1 +#define GL_FRAMEBUFFER_FETCH_NONCOHERENT_QCOM 0x96A2 +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERFETCHBARRIERQCOMPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glFramebufferFetchBarrierQCOM (void); +#endif +#endif /* GL_QCOM_shader_framebuffer_fetch_noncoherent */ + +#ifndef GL_QCOM_shader_framebuffer_fetch_rate +#define GL_QCOM_shader_framebuffer_fetch_rate 1 +#endif /* GL_QCOM_shader_framebuffer_fetch_rate */ + +#ifndef GL_QCOM_shading_rate +#define GL_QCOM_shading_rate 1 +#define GL_SHADING_RATE_QCOM 0x96A4 +#define GL_SHADING_RATE_PRESERVE_ASPECT_RATIO_QCOM 0x96A5 +#define GL_SHADING_RATE_1X1_PIXELS_QCOM 0x96A6 +#define GL_SHADING_RATE_1X2_PIXELS_QCOM 0x96A7 +#define GL_SHADING_RATE_2X1_PIXELS_QCOM 0x96A8 +#define GL_SHADING_RATE_2X2_PIXELS_QCOM 0x96A9 +#define GL_SHADING_RATE_4X2_PIXELS_QCOM 0x96AC +#define GL_SHADING_RATE_4X4_PIXELS_QCOM 0x96AE +typedef void (GL_APIENTRYP PFNGLSHADINGRATEQCOMPROC) (GLenum rate); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glShadingRateQCOM (GLenum rate); +#endif +#endif /* GL_QCOM_shading_rate */ + +#ifndef GL_QCOM_texture_foveated +#define GL_QCOM_texture_foveated 1 +#define GL_TEXTURE_FOVEATED_FEATURE_BITS_QCOM 0x8BFB +#define GL_TEXTURE_FOVEATED_MIN_PIXEL_DENSITY_QCOM 0x8BFC +#define GL_TEXTURE_FOVEATED_FEATURE_QUERY_QCOM 0x8BFD +#define GL_TEXTURE_FOVEATED_NUM_FOCAL_POINTS_QUERY_QCOM 0x8BFE +#define GL_FRAMEBUFFER_INCOMPLETE_FOVEATION_QCOM 0x8BFF +typedef void (GL_APIENTRYP PFNGLTEXTUREFOVEATIONPARAMETERSQCOMPROC) (GLuint texture, GLuint layer, GLuint focalPoint, GLfloat focalX, GLfloat focalY, GLfloat gainX, GLfloat gainY, GLfloat foveaArea); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glTextureFoveationParametersQCOM (GLuint texture, GLuint layer, GLuint focalPoint, GLfloat focalX, GLfloat focalY, GLfloat gainX, GLfloat gainY, GLfloat foveaArea); +#endif +#endif /* GL_QCOM_texture_foveated */ + +#ifndef GL_QCOM_texture_foveated2 +#define GL_QCOM_texture_foveated2 1 +#define GL_TEXTURE_FOVEATED_CUTOFF_DENSITY_QCOM 0x96A0 +#endif /* GL_QCOM_texture_foveated2 */ + +#ifndef GL_QCOM_texture_foveated_subsampled_layout +#define GL_QCOM_texture_foveated_subsampled_layout 1 +#define GL_FOVEATION_SUBSAMPLED_LAYOUT_METHOD_BIT_QCOM 0x00000004 +#define GL_MAX_SHADER_SUBSAMPLED_IMAGE_UNITS_QCOM 0x8FA1 +#endif /* GL_QCOM_texture_foveated_subsampled_layout */ + +#ifndef GL_QCOM_tiled_rendering +#define GL_QCOM_tiled_rendering 1 +#define GL_COLOR_BUFFER_BIT0_QCOM 0x00000001 +#define GL_COLOR_BUFFER_BIT1_QCOM 0x00000002 +#define GL_COLOR_BUFFER_BIT2_QCOM 0x00000004 +#define GL_COLOR_BUFFER_BIT3_QCOM 0x00000008 +#define GL_COLOR_BUFFER_BIT4_QCOM 0x00000010 +#define GL_COLOR_BUFFER_BIT5_QCOM 0x00000020 +#define GL_COLOR_BUFFER_BIT6_QCOM 0x00000040 +#define GL_COLOR_BUFFER_BIT7_QCOM 0x00000080 +#define GL_DEPTH_BUFFER_BIT0_QCOM 0x00000100 +#define GL_DEPTH_BUFFER_BIT1_QCOM 0x00000200 +#define GL_DEPTH_BUFFER_BIT2_QCOM 0x00000400 +#define GL_DEPTH_BUFFER_BIT3_QCOM 0x00000800 +#define GL_DEPTH_BUFFER_BIT4_QCOM 0x00001000 +#define GL_DEPTH_BUFFER_BIT5_QCOM 0x00002000 +#define GL_DEPTH_BUFFER_BIT6_QCOM 0x00004000 +#define GL_DEPTH_BUFFER_BIT7_QCOM 0x00008000 +#define GL_STENCIL_BUFFER_BIT0_QCOM 0x00010000 +#define GL_STENCIL_BUFFER_BIT1_QCOM 0x00020000 +#define GL_STENCIL_BUFFER_BIT2_QCOM 0x00040000 +#define GL_STENCIL_BUFFER_BIT3_QCOM 0x00080000 +#define GL_STENCIL_BUFFER_BIT4_QCOM 0x00100000 +#define GL_STENCIL_BUFFER_BIT5_QCOM 0x00200000 +#define GL_STENCIL_BUFFER_BIT6_QCOM 0x00400000 +#define GL_STENCIL_BUFFER_BIT7_QCOM 0x00800000 +#define GL_MULTISAMPLE_BUFFER_BIT0_QCOM 0x01000000 +#define GL_MULTISAMPLE_BUFFER_BIT1_QCOM 0x02000000 +#define GL_MULTISAMPLE_BUFFER_BIT2_QCOM 0x04000000 +#define GL_MULTISAMPLE_BUFFER_BIT3_QCOM 0x08000000 +#define GL_MULTISAMPLE_BUFFER_BIT4_QCOM 0x10000000 +#define GL_MULTISAMPLE_BUFFER_BIT5_QCOM 0x20000000 +#define GL_MULTISAMPLE_BUFFER_BIT6_QCOM 0x40000000 +#define GL_MULTISAMPLE_BUFFER_BIT7_QCOM 0x80000000 +typedef void (GL_APIENTRYP PFNGLSTARTTILINGQCOMPROC) (GLuint x, GLuint y, GLuint width, GLuint height, GLbitfield preserveMask); +typedef void (GL_APIENTRYP PFNGLENDTILINGQCOMPROC) (GLbitfield preserveMask); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glStartTilingQCOM (GLuint x, GLuint y, GLuint width, GLuint height, GLbitfield preserveMask); +GL_APICALL void GL_APIENTRY glEndTilingQCOM (GLbitfield preserveMask); +#endif +#endif /* GL_QCOM_tiled_rendering */ + +#ifndef GL_QCOM_writeonly_rendering +#define GL_QCOM_writeonly_rendering 1 +#define GL_WRITEONLY_RENDERING_QCOM 0x8823 +#endif /* GL_QCOM_writeonly_rendering */ + +#ifndef GL_VIV_shader_binary +#define GL_VIV_shader_binary 1 +#define GL_SHADER_BINARY_VIV 0x8FC4 +#endif /* GL_VIV_shader_binary */ + +/* ANGLE GLES2 extensions */ +#include "gl2ext_angle.h" + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/angle/include/GLES2/gl2ext_angle.h b/vendor/angle/include/GLES2/gl2ext_angle.h new file mode 100644 index 00000000..4b948b5e --- /dev/null +++ b/vendor/angle/include/GLES2/gl2ext_angle.h @@ -0,0 +1,575 @@ +// +// Copyright 2017 The ANGLE Project Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// +// gl2ext_angle.h: ANGLE modifications to the gl2ext.h header file. +// Currently we don't include this file directly, we patch gl2ext.h +// to include it implicitly so it is visible throughout our code. + +#ifndef INCLUDE_GLES2_GL2EXT_ANGLE_H_ +#define INCLUDE_GLES2_GL2EXT_ANGLE_H_ + +// clang-format off + +#ifndef GL_ANGLE_client_arrays +#define GL_ANGLE_client_arrays 1 +#define GL_CLIENT_ARRAYS_ANGLE 0x93AA +#endif /* GL_ANGLE_client_arrays */ + +#ifndef GL_ANGLE_request_extension +#define GL_ANGLE_request_extension 1 +#define GL_REQUESTABLE_EXTENSIONS_ANGLE 0x93A8 +#define GL_NUM_REQUESTABLE_EXTENSIONS_ANGLE 0x93A9 +typedef void (GL_APIENTRYP PFNGLREQUESTEXTENSIONANGLEPROC) (const GLchar *name); +typedef void (GL_APIENTRYP PFNGLDISABLEEXTENSIONANGLEPROC) (const GLchar *name); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glRequestExtensionANGLE (const GLchar *name); +#endif +#endif /* GL_ANGLE_webgl_compatibility */ + +#ifndef GL_ANGLE_robust_resource_initialization +#define GL_ANGLE_robust_resource_initialization 1 +#define GL_ROBUST_RESOURCE_INITIALIZATION_ANGLE 0x93AB +#define GL_RESOURCE_INITIALIZED_ANGLE 0x969F +#endif /* GL_ANGLE_robust_resource_initialization */ + +#ifndef GL_ANGLE_provoking_vertex +#define GL_ANGLE_provoking_vertex 1 +#define GL_FIRST_VERTEX_CONVENTION 0x8E4D +#define GL_LAST_VERTEX_CONVENTION 0x8E4E +#define GL_PROVOKING_VERTEX 0x8E4F +typedef void (GL_APIENTRYP PFNGLPROVOKINGVERTEXANGLEPROC) (GLenum); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glProvokingVertexANGLE(GLenum mode); +#endif +#endif /* GL_ANGLE_provoking_vertex */ + +#ifndef GL_CHROMIUM_framebuffer_mixed_samples +#define GL_CHROMIUM_frambuffer_mixed_samples 1 +#define GL_COVERAGE_MODULATION_CHROMIUM 0x9332 +typedef void (GL_APIENTRYP PFNGLCOVERAGEMODULATIONCHROMIUMPROC) (GLenum components); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glCoverageModulationCHROMIUM(GLenum components); +#endif +#endif /* GL_CHROMIUM_framebuffer_mixed_samples */ + +#ifndef GL_CHROMIUM_bind_generates_resource +#define GL_CHROMIUM_bind_generates_resource 1 +#define GL_BIND_GENERATES_RESOURCE_CHROMIUM 0x9244 +#endif /* GL_CHROMIUM_bind_generates_resource */ + +#ifndef GL_ANGLE_memory_size +#define GL_ANGLE_memory_size +#define GL_MEMORY_SIZE_ANGLE 0x93AD +#endif /* GL_ANGLE_memory_size */ + +// needed by NV_path_rendering (and thus CHROMIUM_path_rendering) +// but CHROMIUM_path_rendering only needs MatrixLoadfEXT, MatrixLoadIdentityEXT +#ifndef GL_EXT_direct_state_access +#define GL_EXT_direct_state_access 1 +typedef void(GL_APIENTRYP PFNGLMATRIXLOADFEXTPROC)(GLenum matrixMode, const GLfloat *m); +typedef void(GL_APIENTRYP PFNGLMATRIXLOADIDENTITYEXTPROC)(GLenum matrixMode); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glMatrixLoadfEXT(GLenum matrixMode, const GLfloat *m); +GL_APICALL void GL_APIENTRY glMatrixLoadIdentityEXT(GLenum matrixMode); +#endif +#endif /* GL_EXT_direct_state_access */ + +#ifndef GL_CHROMIUM_copy_texture +#define GL_CHROMIUM_copy_texture 1 +typedef void(GL_APIENTRYP PFNGLCOPYTEXTURECHROMIUMPROC)(GLuint sourceId, + GLint sourceLevel, + GLenum destTarget, + GLuint destId, + GLint destLevel, + GLint internalFormat, + GLenum destType, + GLboolean unpackFlipY, + GLboolean unpackPremultiplyAlpha, + GLboolean unpackUnmultiplyAlpha); +typedef void(GL_APIENTRYP PFNGLCOPYSUBTEXTURECHROMIUMPROC)(GLuint sourceId, + GLint sourceLevel, + GLenum destTarget, + GLuint destId, + GLint destLevel, + GLint xoffset, + GLint yoffset, + GLint x, + GLint y, + GLsizei width, + GLsizei height, + GLboolean unpackFlipY, + GLboolean unpackPremultiplyAlpha, + GLboolean unpackUnmultiplyAlpha); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glCopyTextureCHROMIUM(GLuint sourceId, + GLint sourceLevel, + GLenum destTarget, + GLuint destId, + GLint destLevel, + GLint internalFormat, + GLenum destType, + GLboolean unpackFlipY, + GLboolean unpackPremultiplyAlpha, + GLboolean unpackUnmultiplyAlpha); +GL_APICALL void GL_APIENTRY glCopySubTextureCHROMIUM(GLuint sourceId, + GLint sourceLevel, + GLenum destTarget, + GLuint destId, + GLint destLevel, + GLint xoffset, + GLint yoffset, + GLint x, + GLint y, + GLsizei width, + GLsizei height, + GLboolean unpackFlipY, + GLboolean unpackPremultiplyAlpha, + GLboolean unpackUnmultiplyAlpha); +#endif +#endif /* GL_CHROMIUM_copy_texture */ + +#ifndef GL_CHROMIUM_compressed_copy_texture +#define GL_CHROMIUM_compressed_copy_texture 1 +typedef void(GL_APIENTRYP PFNGLCOMPRESSEDCOPYTEXTURECHROMIUMPROC)(GLuint sourceId, GLuint destId); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glCompressedCopyTextureCHROMIUM(GLuint sourceId, GLuint destId); +#endif +#endif /* GL_CHROMIUM_compressed_copy_texture */ + + +#ifndef GL_ANGLE_copy_texture_3d +#define GL_ANGLE_copy_texture_3d 1 +typedef void(GL_APIENTRYP PFNGLCOPYTEXTURE3DANGLEPROC)(GLuint sourceId, + GLint sourceLevel, + GLenum destTarget, + GLuint destId, + GLint destLevel, + GLint internalFormat, + GLenum destType, + GLboolean unpackFlipY, + GLboolean unpackPremultiplyAlpha, + GLboolean unpackUnmultiplyAlpha); +typedef void(GL_APIENTRYP PFNGLCOPYSUBTEXTURE3DANGLEPROC)(GLuint sourceId, + GLint sourceLevel, + GLenum destTarget, + GLuint destId, + GLint destLevel, + GLint xoffset, + GLint yoffset, + GLint zoffset, + GLint x, + GLint y, + GLint z, + GLsizei width, + GLsizei height, + GLsizei depth, + GLboolean unpackFlipY, + GLboolean unpackPremultiplyAlpha, + GLboolean unpackUnmultiplyAlpha); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glCopyTexture3DANGLE(GLuint sourceId, + GLint sourceLevel, + GLenum destTarget, + GLuint destId, + GLint destLevel, + GLint internalFormat, + GLenum destType, + GLboolean unpackFlipY, + GLboolean unpackPremultiplyAlpha, + GLboolean unpackUnmultiplyAlpha); +GL_APICALL void GL_APIENTRY glCopySubTexture3DANGLE(GLuint sourceId, + GLint sourceLevel, + GLenum destTarget, + GLuint destId, + GLint destLevel, + GLint xoffset, + GLint yoffset, + GLint zoffset, + GLint x, + GLint y, + GLint z, + GLsizei width, + GLsizei height, + GLsizei depth, + GLboolean unpackFlipY, + GLboolean unpackPremultiplyAlpha, + GLboolean unpackUnmultiplyAlpha); +#endif +#endif /* GL_ANGLE_copy_texture_3d */ + +#ifndef GL_CHROMIUM_sync_query +#define GL_CHROMIUM_sync_query 1 +#define GL_COMMANDS_COMPLETED_CHROMIUM 0x84F7 +#endif /* GL_CHROMIUM_sync_query */ + +#ifndef GL_EXT_texture_compression_s3tc_srgb +#define GL_EXT_texture_compression_s3tc_srgb 1 +#define GL_COMPRESSED_SRGB_S3TC_DXT1_EXT 0x8C4C +#define GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT1_EXT 0x8C4D +#define GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT3_EXT 0x8C4E +#define GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT5_EXT 0x8C4F +#endif /* GL_EXT_texture_compression_s3tc_srgb */ + +#ifndef GL_ANGLE_lossy_etc_decode +#define GL_ANGLE_lossy_etc_decode 1 +#define GL_ETC1_RGB8_LOSSY_DECODE_ANGLE 0x9690 +#define GL_COMPRESSED_R11_LOSSY_DECODE_EAC_ANGLE 0x9691 +#define GL_COMPRESSED_SIGNED_R11_LOSSY_DECODE_EAC_ANGLE 0x9692 +#define GL_COMPRESSED_RG11_LOSSY_DECODE_EAC_ANGLE 0x9693 +#define GL_COMPRESSED_SIGNED_RG11_LOSSY_DECODE_EAC_ANGLE 0x9694 +#define GL_COMPRESSED_RGB8_LOSSY_DECODE_ETC2_ANGLE 0x9695 +#define GL_COMPRESSED_SRGB8_LOSSY_DECODE_ETC2_ANGLE 0x9696 +#define GL_COMPRESSED_RGB8_PUNCHTHROUGH_ALPHA1_LOSSY_DECODE_ETC2_ANGLE 0x9697 +#define GL_COMPRESSED_SRGB8_PUNCHTHROUGH_ALPHA1_LOSSY_DECODE_ETC2_ANGLE 0x9698 +#define GL_COMPRESSED_RGBA8_LOSSY_DECODE_ETC2_EAC_ANGLE 0x9699 +#define GL_COMPRESSED_SRGB8_ALPHA8_LOSSY_DECODE_ETC2_EAC_ANGLE 0x969A +#endif /* GL_ANGLE_lossy_etc_decode */ + +#ifndef GL_ANGLE_robust_client_memory +#define GL_ANGLE_robust_client_memory 1 +typedef void (GL_APIENTRYP PFNGLGETBOOLEANVROBUSTANGLEPROC) (GLenum pname, GLsizei bufSize, GLsizei *length, GLboolean *data); +typedef void (GL_APIENTRYP PFNGLGETBUFFERPARAMETERIVROBUSTANGLEPROC) (GLenum target, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETFLOATVROBUSTANGLEPROC) (GLenum pname, GLsizei bufSize, GLsizei *length, GLfloat *data); +typedef void (GL_APIENTRYP PFNGLGETFRAMEBUFFERATTACHMENTPARAMETERIVROBUSTANGLEPROC) (GLenum target, GLenum attachment, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETINTEGERVROBUSTANGLEPROC) (GLenum pname, GLsizei bufSize, GLsizei *length, GLint *data); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMIVROBUSTANGLEPROC) (GLuint program, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETRENDERBUFFERPARAMETERIVROBUSTANGLEPROC) (GLenum target, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETSHADERIVROBUSTANGLEPROC) (GLuint shader, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERFVROBUSTANGLEPROC) (GLenum target, GLenum pname, GLsizei bufSize, GLsizei *length, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERIVROBUSTANGLEPROC) (GLenum target, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETUNIFORMFVROBUSTANGLEPROC) (GLuint program, GLint location, GLsizei bufSize, GLsizei *length, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETUNIFORMIVROBUSTANGLEPROC) (GLuint program, GLint location, GLsizei bufSize, GLsizei *length, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBFVROBUSTANGLEPROC) (GLuint index, GLenum pname, GLsizei bufSize, GLsizei *length, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBIVROBUSTANGLEPROC) (GLuint index, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBPOINTERVROBUSTANGLEPROC) (GLuint index, GLenum pname, GLsizei bufSize, GLsizei *length, void **pointer); +typedef void (GL_APIENTRYP PFNGLREADPIXELSROBUSTANGLEPROC) (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, GLsizei *length, GLsizei *columns, GLsizei *rows, void *pixels); +typedef void (GL_APIENTRYP PFNGLTEXIMAGE2DROBUSTANGLEPROC) (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, GLsizei bufSize, const void *pixels); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERFVROBUSTANGLEPROC) (GLenum target, GLenum pname, GLsizei bufSize, const GLfloat *params); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERIVROBUSTANGLEPROC) (GLenum target, GLenum pname, GLsizei bufSize, const GLint *params); +typedef void (GL_APIENTRYP PFNGLTEXSUBIMAGE2DROBUSTANGLEPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, const void *pixels); +typedef void (GL_APIENTRYP PFNGLTEXIMAGE3DROBUSTANGLEPROC) (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, GLsizei bufSize, const void *pixels); +typedef void (GL_APIENTRYP PFNGLTEXSUBIMAGE3DROBUSTANGLEPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, GLsizei bufSize, const void *pixels); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXIMAGE2DROBUSTANGLEPROC) (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, GLsizei bufSize, const void *data); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXSUBIMAGE2DROBUSTANGLEPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, GLsizei bufSize, const void *data); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXIMAGE3DROBUSTANGLEPROC) (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, GLsizei bufSize, const void *data); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXSUBIMAGE3DROBUSTANGLEPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, GLsizei bufSize, const void *data); +typedef void (GL_APIENTRYP PFNGLGETQUERYIVROBUSTANGLEPROC) (GLenum target, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETQUERYOBJECTUIVROBUSTANGLEPROC) (GLuint id, GLenum pname, GLsizei bufSize, GLsizei *length, GLuint *params); +typedef void (GL_APIENTRYP PFNGLGETBUFFERPOINTERVROBUSTANGLEPROC) (GLenum target, GLenum pname, GLsizei bufSize, GLsizei *length, void **params); +typedef void (GL_APIENTRYP PFNGLGETINTEGERI_VROBUSTANGLEPROC) (GLenum target, GLuint index, GLsizei bufSize, GLsizei *length, GLint *data); +typedef void (GL_APIENTRYP PFNGLGETINTERNALFORMATIVROBUSTANGLEPROC) (GLenum target, GLenum internalformat, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBIIVROBUSTANGLEPROC) (GLuint index, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBIUIVROBUSTANGLEPROC) (GLuint index, GLenum pname, GLsizei bufSize, GLsizei *length, GLuint *params); +typedef void (GL_APIENTRYP PFNGLGETUNIFORMUIVROBUSTANGLEPROC) (GLuint program, GLint location, GLsizei bufSize, GLsizei *length, GLuint *params); +typedef void (GL_APIENTRYP PFNGLGETACTIVEUNIFORMBLOCKIVROBUSTANGLEPROC) (GLuint program, GLuint uniformBlockIndex, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETINTEGER64VROBUSTANGLEPROC) (GLenum pname, GLsizei bufSize, GLsizei *length, GLint64 *data); +typedef void (GL_APIENTRYP PFNGLGETINTEGER64I_VROBUSTANGLEPROC) (GLenum target, GLuint index, GLsizei bufSize, GLsizei *length, GLint64 *data); +typedef void (GL_APIENTRYP PFNGLGETBUFFERPARAMETERI64VROBUSTANGLEPROC) (GLenum target, GLenum pname, GLsizei bufSize, GLsizei *length, GLint64 *params); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERIVROBUSTANGLEPROC) (GLuint sampler, GLenum pname, GLsizei bufSize, const GLint *param); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERFVROBUSTANGLEPROC) (GLuint sampler, GLenum pname, GLsizei bufSize, const GLfloat *param); +typedef void (GL_APIENTRYP PFNGLGETSAMPLERPARAMETERIVROBUSTANGLEPROC) (GLuint sampler, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETSAMPLERPARAMETERFVROBUSTANGLEPROC) (GLuint sampler, GLenum pname, GLsizei bufSize, GLsizei *length, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETFRAMEBUFFERPARAMETERIVROBUSTANGLEPROC) (GLenum target, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMINTERFACEIVROBUSTANGLEPROC) (GLuint program, GLenum programInterface, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETBOOLEANI_VROBUSTANGLEPROC) (GLenum target, GLuint index, GLsizei bufSize, GLsizei *length, GLboolean *data); +typedef void (GL_APIENTRYP PFNGLGETMULTISAMPLEFVROBUSTANGLEPROC) (GLenum pname, GLuint index, GLsizei bufSize, GLsizei *length, GLfloat *val); +typedef void (GL_APIENTRYP PFNGLGETTEXLEVELPARAMETERIVROBUSTANGLEPROC) (GLenum target, GLint level, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETTEXLEVELPARAMETERFVROBUSTANGLEPROC) (GLenum target, GLint level, GLenum pname, GLsizei bufSize, GLsizei *length, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETPOINTERVROBUSTANGLEROBUSTANGLEPROC) (GLenum pname, GLsizei bufSize, GLsizei *length, void **params); +typedef void (GL_APIENTRYP PFNGLREADNPIXELSROBUSTANGLEPROC) (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, GLsizei *length, GLsizei *columns, GLsizei *rows, void *data); +typedef void (GL_APIENTRYP PFNGLGETNUNIFORMFVROBUSTANGLEPROC) (GLuint program, GLint location, GLsizei bufSize, GLsizei *length, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETNUNIFORMIVROBUSTANGLEPROC) (GLuint program, GLint location, GLsizei bufSize, GLsizei *length, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETNUNIFORMUIVROBUSTANGLEPROC) (GLuint program, GLint location, GLsizei bufSize, GLsizei *length, GLuint *params); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERIIVROBUSTANGLEPROC) (GLenum target, GLenum pname, GLsizei bufSize, const GLint *params); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERIUIVROBUSTANGLEPROC) (GLenum target, GLenum pname, GLsizei bufSize, const GLuint *params); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERIIVROBUSTANGLEPROC) (GLenum target, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERIUIVROBUSTANGLEPROC) (GLenum target, GLenum pname, GLsizei bufSize, GLsizei *length, GLuint *params); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERIIVROBUSTANGLEPROC) (GLuint sampler, GLenum pname, GLsizei bufSize, const GLint *param); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERIUIVROBUSTANGLEPROC) (GLuint sampler, GLenum pname, GLsizei bufSize, const GLuint *param); +typedef void (GL_APIENTRYP PFNGLGETSAMPLERPARAMETERIIVROBUSTANGLEPROC) (GLuint sampler, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETSAMPLERPARAMETERIUIVROBUSTANGLEPROC) (GLuint sampler, GLenum pname, GLsizei bufSize, GLsizei *length, GLuint *params); +typedef void (GL_APIENTRYP PFNGLGETQUERYOBJECTIVROBUSTANGLEPROC)(GLuint id, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETQUERYOBJECTI64VROBUSTANGLEPROC)(GLuint id, GLenum pname, GLsizei bufSize, GLsizei *length, GLint64 *params); +typedef void (GL_APIENTRYP PFNGLGETQUERYOBJECTUI64VROBUSTANGLEPROC)(GLuint id, GLenum pname, GLsizei bufSize, GLsizei *length, GLuint64 *params); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glGetBooleanvRobustANGLE (GLenum pname, GLsizei bufSize, GLsizei *length, GLboolean *data); +GL_APICALL void GL_APIENTRY glGetBufferParameterivRobustANGLE (GLenum target, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *params); +GL_APICALL void GL_APIENTRY glGetFloatvRobustANGLE (GLenum pname, GLsizei bufSize, GLsizei *length, GLfloat *data); +GL_APICALL void GL_APIENTRY glGetFramebufferAttachmentParameterivRobustANGLE (GLenum target, GLenum attachment, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *params); +GL_APICALL void GL_APIENTRY glGetIntegervRobustANGLE (GLenum pname, GLsizei bufSize, GLsizei *length, GLint *data); +GL_APICALL void GL_APIENTRY glGetProgramivRobustANGLE (GLuint program, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *params); +GL_APICALL void GL_APIENTRY glGetRenderbufferParameterivRobustANGLE (GLenum target, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *params); +GL_APICALL void GL_APIENTRY glGetShaderivRobustANGLE (GLuint shader, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *params); +GL_APICALL void GL_APIENTRY glGetTexParameterfvRobustANGLE (GLenum target, GLenum pname, GLsizei bufSize, GLsizei *length, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetTexParameterivRobustANGLE (GLenum target, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *params); +GL_APICALL void GL_APIENTRY glGetUniformfvRobustANGLE (GLuint program, GLint location, GLsizei bufSize, GLsizei *length, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetUniformivRobustANGLE (GLuint program, GLint location, GLsizei bufSize, GLsizei *length, GLint *params); +GL_APICALL void GL_APIENTRY glGetVertexAttribfvRobustANGLE (GLuint index, GLenum pname, GLsizei bufSize, GLsizei *length, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetVertexAttribivRobustANGLE (GLuint index, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *params); +GL_APICALL void GL_APIENTRY glGetVertexAttribPointervRobustANGLE (GLuint index, GLenum pname, GLsizei bufSize, GLsizei *length, void **pointer); +GL_APICALL void GL_APIENTRY glReadPixelsRobustANGLE (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, GLsizei *length, GLsizei *columns, GLsizei *rows, void *pixels); +GL_APICALL void GL_APIENTRY glTexImage2DRobustANGLE (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, GLsizei bufSize, const void *pixels); +GL_APICALL void GL_APIENTRY glTexParameterfvRobustANGLE (GLenum target, GLenum pname, GLsizei bufSize, const GLfloat *params); +GL_APICALL void GL_APIENTRY glTexParameterivRobustANGLE (GLenum target, GLenum pname, GLsizei bufSize, const GLint *params); +GL_APICALL void GL_APIENTRY glTexSubImage2DRobustANGLE (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, const void *pixels); +GL_APICALL void GL_APIENTRY glTexImage3DRobustANGLE (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, GLsizei bufSize, const void *pixels); +GL_APICALL void GL_APIENTRY glTexSubImage3DRobustANGLE (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, GLsizei bufSize, const void *pixels); +GL_APICALL void GL_APIENTRY glCompressedTexImage2DRobustANGLE(GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, GLsizei bufSize, const void *data); +GL_APICALL void GL_APIENTRY glCompressedTexSubImage2DRobustANGLE(GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, GLsizei bufSize, const void *data); +GL_APICALL void GL_APIENTRY glCompressedTexImage3DRobustANGLE(GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, GLsizei bufSize, const void *data); +GL_APICALL void GL_APIENTRY glCompressedTexSubImage3DRobustANGLE(GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, GLsizei bufSize, const void *data); +GL_APICALL void GL_APIENTRY glGetQueryivRobustANGLE (GLenum target, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *params); +GL_APICALL void GL_APIENTRY glGetQueryObjectuivRobustANGLE (GLuint id, GLenum pname, GLsizei bufSize, GLsizei *length, GLuint *params); +GL_APICALL void GL_APIENTRY glGetBufferPointervRobustANGLE (GLenum target, GLenum pname, GLsizei bufSize, GLsizei *length, void **params); +GL_APICALL void GL_APIENTRY glGetIntegeri_vRobustANGLE (GLenum target, GLuint index, GLsizei bufSize, GLsizei *length, GLint *data); +GL_APICALL void GL_APIENTRY glGetInternalformativRobustANGLE (GLenum target, GLenum internalformat, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *params); +GL_APICALL void GL_APIENTRY glGetVertexAttribIivRobustANGLE (GLuint index, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *params); +GL_APICALL void GL_APIENTRY glGetVertexAttribIuivRobustANGLE (GLuint index, GLenum pname, GLsizei bufSize, GLsizei *length, GLuint *params); +GL_APICALL void GL_APIENTRY glGetUniformuivRobustANGLE (GLuint program, GLint location, GLsizei bufSize, GLsizei *length, GLuint *params); +GL_APICALL void GL_APIENTRY glGetActiveUniformBlockivRobustANGLE (GLuint program, GLuint uniformBlockIndex, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *params); +GL_APICALL void GL_APIENTRY glGetInteger64vRobustANGLE (GLenum pname, GLsizei bufSize, GLsizei *length, GLint64 *data); +GL_APICALL void GL_APIENTRY glGetInteger64i_vRobustANGLE (GLenum target, GLuint index, GLsizei bufSize, GLsizei *length, GLint64 *data); +GL_APICALL void GL_APIENTRY glGetBufferParameteri64vRobustANGLE (GLenum target, GLenum pname, GLsizei bufSize, GLsizei *length, GLint64 *params); +GL_APICALL void GL_APIENTRY glSamplerParameterivRobustANGLE (GLuint sampler, GLenum pname, GLsizei bufSize, const GLint *param); +GL_APICALL void GL_APIENTRY glSamplerParameterfvRobustANGLE (GLuint sampler, GLenum pname, GLsizei bufSize, const GLfloat *param); +GL_APICALL void GL_APIENTRY glGetSamplerParameterivRobustANGLE (GLuint sampler, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *params); +GL_APICALL void GL_APIENTRY glGetSamplerParameterfvRobustANGLE (GLuint sampler, GLenum pname, GLsizei bufSize, GLsizei *length, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetFramebufferParameterivRobustANGLE (GLenum target, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *params); +GL_APICALL void GL_APIENTRY glGetProgramInterfaceivRobustANGLE (GLuint program, GLenum programInterface, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *params); +GL_APICALL void GL_APIENTRY glGetBooleani_vRobustANGLE (GLenum target, GLuint index, GLsizei bufSize, GLsizei *length, GLboolean *data); +GL_APICALL void GL_APIENTRY glGetMultisamplefvRobustANGLE (GLenum pname, GLuint index, GLsizei bufSize, GLsizei *length, GLfloat *val); +GL_APICALL void GL_APIENTRY glGetTexLevelParameterivRobustANGLE (GLenum target, GLint level, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *params); +GL_APICALL void GL_APIENTRY glGetTexLevelParameterfvRobustANGLE (GLenum target, GLint level, GLenum pname, GLsizei bufSize, GLsizei *length, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetPointervRobustANGLERobustANGLE (GLenum pname, GLsizei bufSize, GLsizei *length, void **params); +GL_APICALL void GL_APIENTRY glReadnPixelsRobustANGLE (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, GLsizei *length, GLsizei *columns, GLsizei *rows, void *data); +GL_APICALL void GL_APIENTRY glGetnUniformfvRobustANGLE (GLuint program, GLint location, GLsizei bufSize, GLsizei *length, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetnUniformivRobustANGLE (GLuint program, GLint location, GLsizei bufSize, GLsizei *length, GLint *params); +GL_APICALL void GL_APIENTRY glGetnUniformuivRobustANGLE (GLuint program, GLint location, GLsizei bufSize, GLsizei *length, GLuint *params); +GL_APICALL void GL_APIENTRY glTexParameterIivRobustANGLE (GLenum target, GLenum pname, GLsizei bufSize, const GLint *params); +GL_APICALL void GL_APIENTRY glTexParameterIuivRobustANGLE (GLenum target, GLenum pname, GLsizei bufSize, const GLuint *params); +GL_APICALL void GL_APIENTRY glGetTexParameterIivRobustANGLE (GLenum target, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *params); +GL_APICALL void GL_APIENTRY glGetTexParameterIuivRobustANGLE (GLenum target, GLenum pname, GLsizei bufSize, GLsizei *length, GLuint *params); +GL_APICALL void GL_APIENTRY glSamplerParameterIivRobustANGLE (GLuint sampler, GLenum pname, GLsizei bufSize, const GLint *param); +GL_APICALL void GL_APIENTRY glSamplerParameterIuivRobustANGLE (GLuint sampler, GLenum pname, GLsizei bufSize, const GLuint *param); +GL_APICALL void GL_APIENTRY glGetSamplerParameterIivRobustANGLE (GLuint sampler, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *params); +GL_APICALL void GL_APIENTRY glGetSamplerParameterIuivRobustANGLE (GLuint sampler, GLenum pname, GLsizei bufSize, GLsizei *length, GLuint *params); +GL_APICALL void GL_APIENTRY glGetQueryObjectivRobustANGLE(GLuint id, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *params); +GL_APICALL void GL_APIENTRY glGetQueryObjecti64vRobustANGLE(GLuint id, GLenum pname, GLsizei bufSize, GLsizei *length, GLint64 *params); +GL_APICALL void GL_APIENTRY glGetQueryObjectui64vRobustANGLE(GLuint id, GLenum pname, GLsizei bufSize, GLsizei *length, GLuint64 *params); +#endif +#endif /* GL_ANGLE_robust_client_memory */ + +#ifndef GL_ANGLE_program_cache_control +#define GL_ANGLE_program_cache_control 1 +#define GL_PROGRAM_CACHE_ENABLED_ANGLE 0x93AC +#endif /* GL_ANGLE_program_cache_control */ + +#ifndef GL_ANGLE_texture_rectangle +#define GL_ANGLE_texture_rectangle 1 +#define GL_MAX_RECTANGLE_TEXTURE_SIZE_ANGLE 0x84F8 +#define GL_TEXTURE_RECTANGLE_ANGLE 0x84F5 +#define GL_TEXTURE_BINDING_RECTANGLE_ANGLE 0x84F6 +#define GL_SAMPLER_2D_RECT_ANGLE 0x8B63 +#endif /* GL_ANGLE_texture_rectangle */ + +#ifndef GL_ANGLE_texture_multisample +#define GL_ANGLE_texture_multisample 1 +#define GL_SAMPLE_POSITION_ANGLE 0x8E50 +#define GL_SAMPLE_MASK_ANGLE 0x8E51 +#define GL_SAMPLE_MASK_VALUE_ANGLE 0x8E52 +#define GL_TEXTURE_2D_MULTISAMPLE_ANGLE 0x9100 +#define GL_MAX_SAMPLE_MASK_WORDS_ANGLE 0x8E59 +#define GL_MAX_COLOR_TEXTURE_SAMPLES_ANGLE 0x910E +#define GL_MAX_DEPTH_TEXTURE_SAMPLES_ANGLE 0x910F +#define GL_MAX_INTEGER_SAMPLES_ANGLE 0x9110 +#define GL_TEXTURE_BINDING_2D_MULTISAMPLE_ANGLE 0x9104 +#define GL_TEXTURE_SAMPLES_ANGLE 0x9106 +#define GL_TEXTURE_FIXED_SAMPLE_LOCATIONS_ANGLE 0x9107 +typedef void(GL_APIENTRYP PFNGLTEXSTORAGE2DMULTISAMPLEANGLEPROC)(GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLboolean fixedsamplelocations); +typedef void(GL_APIENTRYP PFNGLGETTEXLEVELPARAMETERFVANGLEPROC)(GLenum target, GLint level, GLenum pname, GLfloat *params); +typedef void(GL_APIENTRYP PFNGLGETTEXLEVELPARAMETERIVANGLEPROC)(GLenum target, GLint level, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETMULTISAMPLEFVANGLEPROC)(GLenum pname, GLuint index, GLfloat *val); +typedef void (GL_APIENTRYP PFNGLSAMPLEMASKIANGLEPROC)(GLuint maskNumber, GLbitfield mask); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glTexStorage2DMultisampleANGLE(GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLboolean fixedsamplelocations); +GL_APICALL void GL_APIENTRY glGetTexLevelParameterfvANGLE(GLenum target, GLint level, GLenum pname, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetTexLevelParameterivANGLE(GLenum target, GLint level, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetMultisamplefvANGLE(GLenum pname, GLuint index, GLfloat *val); +GL_APICALL void GL_APIENTRY glSampleMaskiANGLE(GLuint maskNumber, GLbitfield mask); +#endif +#endif // !GL_ANGLE_texture_multisample + +#ifndef GL_ANGLE_get_tex_level_parameter +#define GL_ANGLE_get_tex_level_parameter 1 +typedef void(GL_APIENTRYP PFNGLGETTEXLEVELPARAMETERFVANGLEPROC)(GLenum target, GLint level, GLenum pname, GLfloat *params); +typedef void(GL_APIENTRYP PFNGLGETTEXLEVELPARAMETERIVANGLEPROC)(GLenum target, GLint level, GLenum pname, GLint *params); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glGetTexLevelParameterfvANGLE(GLenum target, GLint level, GLenum pname, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetTexLevelParameterivANGLE(GLenum target, GLint level, GLenum pname, GLint *params); +#endif +#endif /* GL_ANGLE_get_tex_level_parameter */ + +#ifndef GL_ANGLE_explicit_context +#define GL_ANGLE_explicit_context +typedef void *GLeglContext; +#include "gl2ext_explicit_context_autogen.inc" +#include "../GLES3/gl3ext_explicit_context_autogen.inc" +#include "../GLES3/gl31ext_explicit_context_autogen.inc" +#include "../GLES3/gl32.h" +#include "../GLES3/gl32ext_explicit_context_autogen.inc" +#endif /* GL_ANGLE_explicit_context */ + +#ifndef GL_ANGLE_multi_draw +#define GL_ANGLE_multi_draw 1 +typedef void (GL_APIENTRYP PFNGLMULTIDRAWARRAYSANGLEPROC) (GLenum mode, const GLint *firsts, const GLsizei *counts, GLsizei drawcount); +typedef void (GL_APIENTRYP PFNGLMULTIDRAWARRAYSINSTANCEDANGLEPROC) (GLenum mode, const GLint *firsts, const GLsizei *counts, const GLsizei *instanceCounts, GLsizei drawcount); +typedef void (GL_APIENTRYP PFNGLMULTIDRAWELEMENTSANGLEPROC) (GLenum mode, const GLsizei *counts, GLenum type, const GLvoid* const *indices, GLsizei drawcount); +typedef void (GL_APIENTRYP PFNGLMULTIDRAWELEMENTSINSTANCEDANGLEPROC) (GLenum mode, const GLsizei *counts, GLenum type, const GLvoid* const *indices, const GLsizei *instanceCounts, GLsizei drawcount); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glMultiDrawArraysANGLE (GLenum mode, const GLint *firsts, const GLsizei *counts, GLsizei drawcount); +GL_APICALL void GL_APIENTRY glMultiDrawArraysInstancedANGLE (GLenum mode, const GLint *firsts, const GLsizei *counts, const GLsizei *instanceCounts, GLsizei drawcount); +GL_APICALL void GL_APIENTRY glMultiDrawElementsANGLE (GLenum mode, const GLsizei *counts, GLenum type, const GLvoid* const *indices, GLsizei drawcount); +GL_APICALL void GL_APIENTRY glMultiDrawElementsInstancedANGLE (GLenum mode, const GLsizei *counts, GLenum type, const GLvoid* const *indices, const GLsizei *instanceCounts, GLsizei drawcount); +#endif +#endif /* GL_ANGLE_multi_draw */ + +#ifndef GL_ANGLE_base_vertex_base_instance +#define GL_ANGLE_base_vertex_base_instance 1 +typedef void (GL_APIENTRYP PFNGLDRAWARRAYSINSTANCEDBASEINSTANCEANGLEPROC) (GLenum mode, GLint first, GLsizei count, GLsizei instanceCount, GLuint baseInstance); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSINSTANCEDBASEVERTEXBASEINSTANCEANGLEPROC) (GLenum mode, GLsizei count, GLenum type, const GLvoid *indices, GLsizei instanceCount, GLint baseVertex, GLuint baseInstance); +typedef void (GL_APIENTRYP PFNGLMULTIDRAWARRAYSINSTANCEDBASEINSTANCEANGLEPROC) (GLenum mode, const GLsizei *firsts, const GLsizei *counts, const GLsizei *instanceCounts, const GLuint *baseInstances, GLsizei drawCount); +typedef void (GL_APIENTRYP PFNGLMULTIDRAWELEMENTSINSTANCEDBASEVERTEXBASEINSTANCEANGLEPROC) (GLenum mode, const GLsizei *counts, GLenum type, const GLvoid* const *indices, const GLsizei *instanceCounts, const GLint *baseVertices, const GLuint *baseInstances, GLsizei drawCount); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glDrawArraysInstancedBaseInstanceANGLE (GLenum mode, GLint first, GLsizei count, GLsizei instanceCount, GLuint baseInstance); +GL_APICALL void GL_APIENTRY glDrawElementsInstancedBaseVertexBaseInstanceANGLE (GLenum mode, GLsizei count, GLenum type, const GLvoid *indices, GLsizei instanceCount, GLint baseVertex, GLuint baseInstance); +GL_APICALL void GL_APIENTRY glMultiDrawArraysInstancedBaseInstanceANGLE (GLenum mode, const GLsizei *firsts, const GLsizei *counts, const GLsizei *instanceCounts, const GLuint *baseInstances, GLsizei drawCount); +GL_APICALL void GL_APIENTRY glMultiDrawElementsInstancedBaseVertexBaseInstanceANGLE (GLenum mode, const GLsizei *counts, GLenum type, const GLvoid* const *indices, const GLsizei *instanceCounts, const GLint *baseVertices, const GLuint *baseInstances, GLsizei drawCount); +#endif +#endif + +#ifndef GL_CHROMIUM_bind_uniform_location +#define GL_CHROMIUM_bind_uniform_location 1 +typedef void (GL_APIENTRYP PFNGLBINDUNIFORMLOCATIONCHROMIUMPROC)(GLuint program, GLint location, const GLchar *name); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glBindUniformLocationCHROMIUM(GLuint program, GLint location, const GLchar *name); +#endif +#endif /* GL_CHROMIUM_bind_uniform_location */ + +/* GL_CHROMIUM_lose_context */ +#ifndef GL_CHROMIUM_lose_context +#define GL_CHROMIUM_lose_context 1 +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glLoseContextCHROMIUM(GLenum current, GLenum other); +#endif +typedef void (GL_APIENTRYP PFNGLLOSECONTEXTCHROMIUMPROC) (GLenum current, GLenum other); +#endif /* GL_CHROMIUM_lose_context */ + +#ifndef GL_ANGLE_texture_external_update +#define GL_ANGLE_texture_external_update 1 +#define GL_TEXTURE_NATIVE_ID_ANGLE 0x3481 +typedef void (GL_APIENTRYP PFNGLTEXIMAGE2DEXTERNALANGLEPROC) (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type); +typedef void (GL_APIENTRYP PFNGLINVALIDATETEXTUREANGLEPROC) (GLenum target); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glTexImage2DExternalANGLE (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type); +GL_APICALL void GL_APIENTRY glInvalidateTextureANGLE (GLenum target); +#endif +#endif /* GL_ANGLE_texture_external_update */ + +#ifndef GL_ANGLE_get_image +#define GL_ANGLE_get_image +typedef void (GL_APIENTRYP PFNGLGETTEXIMAGEANGLEPROC) (GLenum target, GLint level, GLenum format, GLenum type, void *pixels); +typedef void (GL_APIENTRYP PFNGLGETRENDERBUFFERIMAGEANGLEPROC) (GLenum target, GLenum format, GLenum type, void *pixels); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glGetTexImageANGLE (GLenum target, GLint level, GLenum format, GLenum type, void *pixels); +GL_APICALL void GL_APIENTRY glGetRenderbufferImageANGLE (GLenum target, GLenum format, GLenum type, void *pixels); +#endif +#endif /* GL_ANGLE_texture_external_update */ + +#ifndef GL_WEBGL_video_texture +#define GL_WEBGL_video_texture 1 +#define GL_TEXTURE_VIDEO_IMAGE_WEBGL 0x9248 +#define GL_SAMPLER_VIDEO_IMAGE_WEBGL 0x9249 +#endif /* GL_WEBGL_video_texture */ + +#ifndef GL_ANGLE_memory_object_flags +#define GL_ANGLE_memory_object_flags 1 +#define GL_CREATE_SPARSE_BINDING_BIT_ANGLE 0x00000001 +#define GL_CREATE_SPARSE_RESIDENCY_BIT_ANGLE 0x00000002 +#define GL_CREATE_SPARSE_ALIASED_BIT_ANGLE 0x00000004 +#define GL_CREATE_MUTABLE_FORMAT_BIT_ANGLE 0x00000008 +#define GL_CREATE_CUBE_COMPATIBLE_BIT_ANGLE 0x00000010 +#define GL_CREATE_ALIAS_BIT_ANGLE 0x00000400 +#define GL_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_ANGLE 0x00000040 +#define GL_CREATE_2D_ARRAY_COMPATIBLE_BIT_ANGLE 0x00000020 +#define GL_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_ANGLE 0x00000080 +#define GL_CREATE_EXTENDED_USAGE_BIT_ANGLE 0x00000100 +#define GL_CREATE_PROTECTED_BIT_ANGLE 0x00000800 +#define GL_CREATE_DISJOINT_BIT_ANGLE 0x00000200 +#define GL_CREATE_CORNER_SAMPLED_BIT_ANGLE 0x00002000 +#define GL_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_ANGLE 0x00001000 +#define GL_CREATE_SUBSAMPLED_BIT_ANGLE 0x00004000 +#define GL_USAGE_TRANSFER_SRC_BIT_ANGLE 0x00000001 +#define GL_USAGE_TRANSFER_DST_BIT_ANGLE 0x00000002 +#define GL_USAGE_SAMPLED_BIT_ANGLE 0x00000004 +#define GL_USAGE_STORAGE_BIT_ANGLE 0x00000008 +#define GL_USAGE_COLOR_ATTACHMENT_BIT_ANGLE 0x00000010 +#define GL_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT_ANGLE 0x00000020 +#define GL_USAGE_TRANSIENT_ATTACHMENT_BIT_ANGLE 0x00000040 +#define GL_USAGE_INPUT_ATTACHMENT_BIT_ANGLE 0x00000080 +#define GL_USAGE_SHADING_RATE_IMAGE_BIT_ANGLE 0x00000100 +#define GL_USAGE_FRAGMENT_DENSITY_MAP_BIT_ANGLE 0x00000200 +typedef void (GL_APIENTRYP PFNGLTEXSTORAGEMEMFLAGS2DANGLEPROC) (GLenum target, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLuint memory, GLuint64 offset, GLbitfield createFlags, GLbitfield usageFlags); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGEMEMFLAGS2DMULTISAMPLEANGLEPROC) (GLenum target, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset, GLbitfield createFlags, GLbitfield usageFlags); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGEMEMFLAGS3DANGLEPROC) (GLenum target, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLuint memory, GLuint64 offset, GLbitfield createFlags, GLbitfield usageFlags); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGEMEMFLAGS3DMULTISAMPLEANGLEPROC) (GLenum target, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset, GLbitfield createFlags, GLbitfield usageFlags); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glTexStorageMemFlags2DANGLE (GLenum target, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLuint memory, GLuint64 offset, GLbitfield createFlags, GLbitfield usageFlags); +GL_APICALL void GL_APIENTRY glTexStorageMemFlags2DMultisampleANGLE (GLenum target, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset, GLbitfield createFlags, GLbitfield usageFlags); +GL_APICALL void GL_APIENTRY glTexStorageMemFlags3DANGLE (GLenum target, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLuint memory, GLuint64 offset, GLbitfield createFlags, GLbitfield usageFlags); +GL_APICALL void GL_APIENTRY glTexStorageMemFlags3DMultisampleANGLE (GLenum target, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset, GLbitfield createFlags, GLbitfield usageFlags); +#endif +#endif /* GL_ANGLE_memory_object_flags */ + +#ifndef GL_ANGLE_memory_object_fuchsia +#define GL_ANGLE_memory_object_fuchsia 1 +#define GL_HANDLE_TYPE_ZIRCON_VMO_ANGLE 0x93AE +typedef void(GL_APIENTRYP PFNGLIMPORTMEMORYZIRCONHANDLEANGLEPROC)(GLuint memory, + GLuint64 size, + GLenum handleType, + GLuint handle); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glImportMemoryZirconHandleANGLE(GLuint memory, + GLuint64 size, + GLenum handleType, + GLuint handle); +#endif +#endif /* GL_ANGLE_memory_object_fuchsia */ + +#ifndef GL_ANGLE_semaphore_fuchsia +#define GL_ANGLE_semaphore_fuchsia 1 +#define GL_HANDLE_TYPE_ZIRCON_EVENT_ANGLE 0x93AF +typedef void(GL_APIENTRYP PFNGLIMPORTSEMAPHOREZIRCONHANDLEANGLEPROC)(GLuint semaphore, + GLenum handleType, + GLuint handle); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glImportSemaphoreZirconHandleANGLE(GLuint memory, + GLenum handleType, + GLuint handle); +#endif +#endif /* GL_ANGLE_semaphore_fuchsia */ + +#ifndef GL_CHROMIUM_texture_filtering_hint +#define GL_CHROMIUM_texture_filtering_hint +#define GL_TEXTURE_FILTERING_HINT_CHROMIUM 0x8AF0 +#endif /* GL_CHROMIUM_texture_filtering_hint */ + +#ifndef GL_NV_robustness_video_memory +#define GL_NV_robustness_video_memory +#define GL_PURGED_CONTEXT_RESET_NV 0x92BB +#endif /* GL_NV_robustness_video_memory */ + +// clang-format on + +#endif // INCLUDE_GLES2_GL2EXT_ANGLE_H_ diff --git a/vendor/angle/include/GLES2/gl2ext_explicit_context_autogen.inc b/vendor/angle/include/GLES2/gl2ext_explicit_context_autogen.inc new file mode 100644 index 00000000..11dd4d85 --- /dev/null +++ b/vendor/angle/include/GLES2/gl2ext_explicit_context_autogen.inc @@ -0,0 +1,876 @@ +// GENERATED FILE - DO NOT EDIT. +// Generated by generate_entry_points.py using data from gl.xml and gl_angle_ext.xml. +// +// Copyright 2020 The ANGLE Project Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// +// gl2ext_explicit_context_autogen.inc: +// Function declarations for the EGL_ANGLE_explicit_context extension + +typedef void (GL_APIENTRYP PFNGLACTIVETEXTURECONTEXTANGLEPROC)(GLeglContext ctx, GLenum texture); +typedef void (GL_APIENTRYP PFNGLATTACHSHADERCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLuint shader); +typedef void (GL_APIENTRYP PFNGLBINDATTRIBLOCATIONCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLuint index, const GLchar *name); +typedef void (GL_APIENTRYP PFNGLBINDBUFFERCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLuint buffer); +typedef void (GL_APIENTRYP PFNGLBINDFRAMEBUFFERCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLuint framebuffer); +typedef void (GL_APIENTRYP PFNGLBINDRENDERBUFFERCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLuint renderbuffer); +typedef void (GL_APIENTRYP PFNGLBINDTEXTURECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLuint texture); +typedef void (GL_APIENTRYP PFNGLBLENDCOLORCONTEXTANGLEPROC)(GLeglContext ctx, GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +typedef void (GL_APIENTRYP PFNGLBLENDEQUATIONCONTEXTANGLEPROC)(GLeglContext ctx, GLenum mode); +typedef void (GL_APIENTRYP PFNGLBLENDEQUATIONSEPARATECONTEXTANGLEPROC)(GLeglContext ctx, GLenum modeRGB, GLenum modeAlpha); +typedef void (GL_APIENTRYP PFNGLBLENDFUNCCONTEXTANGLEPROC)(GLeglContext ctx, GLenum sfactor, GLenum dfactor); +typedef void (GL_APIENTRYP PFNGLBLENDFUNCSEPARATECONTEXTANGLEPROC)(GLeglContext ctx, GLenum sfactorRGB, GLenum dfactorRGB, GLenum sfactorAlpha, GLenum dfactorAlpha); +typedef void (GL_APIENTRYP PFNGLBUFFERDATACONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLsizeiptr size, const void *data, GLenum usage); +typedef void (GL_APIENTRYP PFNGLBUFFERSUBDATACONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLintptr offset, GLsizeiptr size, const void *data); +typedef GLenum (GL_APIENTRYP PFNGLCHECKFRAMEBUFFERSTATUSCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target); +typedef void (GL_APIENTRYP PFNGLCLEARCONTEXTANGLEPROC)(GLeglContext ctx, GLbitfield mask); +typedef void (GL_APIENTRYP PFNGLCLEARCOLORCONTEXTANGLEPROC)(GLeglContext ctx, GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +typedef void (GL_APIENTRYP PFNGLCLEARDEPTHFCONTEXTANGLEPROC)(GLeglContext ctx, GLfloat d); +typedef void (GL_APIENTRYP PFNGLCLEARSTENCILCONTEXTANGLEPROC)(GLeglContext ctx, GLint s); +typedef void (GL_APIENTRYP PFNGLCOLORMASKCONTEXTANGLEPROC)(GLeglContext ctx, GLboolean red, GLboolean green, GLboolean blue, GLboolean alpha); +typedef void (GL_APIENTRYP PFNGLCOMPILESHADERCONTEXTANGLEPROC)(GLeglContext ctx, GLuint shader); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXIMAGE2DCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, const void *data); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXSUBIMAGE2DCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *data); +typedef void (GL_APIENTRYP PFNGLCOPYTEXIMAGE2DCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLsizei height, GLint border); +typedef void (GL_APIENTRYP PFNGLCOPYTEXSUBIMAGE2DCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint x, GLint y, GLsizei width, GLsizei height); +typedef GLuint (GL_APIENTRYP PFNGLCREATEPROGRAMCONTEXTANGLEPROC)(GLeglContext ctx); +typedef GLuint (GL_APIENTRYP PFNGLCREATESHADERCONTEXTANGLEPROC)(GLeglContext ctx, GLenum type); +typedef void (GL_APIENTRYP PFNGLCULLFACECONTEXTANGLEPROC)(GLeglContext ctx, GLenum mode); +typedef void (GL_APIENTRYP PFNGLDELETEBUFFERSCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei n, const GLuint *buffers); +typedef void (GL_APIENTRYP PFNGLDELETEFRAMEBUFFERSCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei n, const GLuint *framebuffers); +typedef void (GL_APIENTRYP PFNGLDELETEPROGRAMCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program); +typedef void (GL_APIENTRYP PFNGLDELETERENDERBUFFERSCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei n, const GLuint *renderbuffers); +typedef void (GL_APIENTRYP PFNGLDELETESHADERCONTEXTANGLEPROC)(GLeglContext ctx, GLuint shader); +typedef void (GL_APIENTRYP PFNGLDELETETEXTURESCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei n, const GLuint *textures); +typedef void (GL_APIENTRYP PFNGLDEPTHFUNCCONTEXTANGLEPROC)(GLeglContext ctx, GLenum func); +typedef void (GL_APIENTRYP PFNGLDEPTHMASKCONTEXTANGLEPROC)(GLeglContext ctx, GLboolean flag); +typedef void (GL_APIENTRYP PFNGLDEPTHRANGEFCONTEXTANGLEPROC)(GLeglContext ctx, GLfloat n, GLfloat f); +typedef void (GL_APIENTRYP PFNGLDETACHSHADERCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLuint shader); +typedef void (GL_APIENTRYP PFNGLDISABLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum cap); +typedef void (GL_APIENTRYP PFNGLDISABLEVERTEXATTRIBARRAYCONTEXTANGLEPROC)(GLeglContext ctx, GLuint index); +typedef void (GL_APIENTRYP PFNGLDRAWARRAYSCONTEXTANGLEPROC)(GLeglContext ctx, GLenum mode, GLint first, GLsizei count); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSCONTEXTANGLEPROC)(GLeglContext ctx, GLenum mode, GLsizei count, GLenum type, const void *indices); +typedef void (GL_APIENTRYP PFNGLENABLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum cap); +typedef void (GL_APIENTRYP PFNGLENABLEVERTEXATTRIBARRAYCONTEXTANGLEPROC)(GLeglContext ctx, GLuint index); +typedef void (GL_APIENTRYP PFNGLFINISHCONTEXTANGLEPROC)(GLeglContext ctx); +typedef void (GL_APIENTRYP PFNGLFLUSHCONTEXTANGLEPROC)(GLeglContext ctx); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERRENDERBUFFERCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTURE2DCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +typedef void (GL_APIENTRYP PFNGLFRONTFACECONTEXTANGLEPROC)(GLeglContext ctx, GLenum mode); +typedef void (GL_APIENTRYP PFNGLGENBUFFERSCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei n, GLuint *buffers); +typedef void (GL_APIENTRYP PFNGLGENFRAMEBUFFERSCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei n, GLuint *framebuffers); +typedef void (GL_APIENTRYP PFNGLGENRENDERBUFFERSCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei n, GLuint *renderbuffers); +typedef void (GL_APIENTRYP PFNGLGENTEXTURESCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei n, GLuint *textures); +typedef void (GL_APIENTRYP PFNGLGENERATEMIPMAPCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target); +typedef void (GL_APIENTRYP PFNGLGETACTIVEATTRIBCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name); +typedef void (GL_APIENTRYP PFNGLGETACTIVEUNIFORMCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name); +typedef void (GL_APIENTRYP PFNGLGETATTACHEDSHADERSCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLsizei maxCount, GLsizei *count, GLuint *shaders); +typedef GLint (GL_APIENTRYP PFNGLGETATTRIBLOCATIONCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, const GLchar *name); +typedef void (GL_APIENTRYP PFNGLGETBOOLEANVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum pname, GLboolean *data); +typedef void (GL_APIENTRYP PFNGLGETBUFFERPARAMETERIVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLint *params); +typedef GLenum (GL_APIENTRYP PFNGLGETERRORCONTEXTANGLEPROC)(GLeglContext ctx); +typedef void (GL_APIENTRYP PFNGLGETFLOATVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum pname, GLfloat *data); +typedef void (GL_APIENTRYP PFNGLGETFRAMEBUFFERATTACHMENTPARAMETERIVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum attachment, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETINTEGERVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum pname, GLint *data); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMINFOLOGCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMIVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETRENDERBUFFERPARAMETERIVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETSHADERINFOLOGCONTEXTANGLEPROC)(GLeglContext ctx, GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +typedef void (GL_APIENTRYP PFNGLGETSHADERPRECISIONFORMATCONTEXTANGLEPROC)(GLeglContext ctx, GLenum shadertype, GLenum precisiontype, GLint *range, GLint *precision); +typedef void (GL_APIENTRYP PFNGLGETSHADERSOURCECONTEXTANGLEPROC)(GLeglContext ctx, GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *source); +typedef void (GL_APIENTRYP PFNGLGETSHADERIVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint shader, GLenum pname, GLint *params); +typedef const GLubyte *(GL_APIENTRYP PFNGLGETSTRINGCONTEXTANGLEPROC)(GLeglContext ctx, GLenum name); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERFVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERIVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLint *params); +typedef GLint (GL_APIENTRYP PFNGLGETUNIFORMLOCATIONCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, const GLchar *name); +typedef void (GL_APIENTRYP PFNGLGETUNIFORMFVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETUNIFORMIVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBPOINTERVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint index, GLenum pname, void **pointer); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBFVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint index, GLenum pname, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBIVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint index, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLHINTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum mode); +typedef GLboolean (GL_APIENTRYP PFNGLISBUFFERCONTEXTANGLEPROC)(GLeglContext ctx, GLuint buffer); +typedef GLboolean (GL_APIENTRYP PFNGLISENABLEDCONTEXTANGLEPROC)(GLeglContext ctx, GLenum cap); +typedef GLboolean (GL_APIENTRYP PFNGLISFRAMEBUFFERCONTEXTANGLEPROC)(GLeglContext ctx, GLuint framebuffer); +typedef GLboolean (GL_APIENTRYP PFNGLISPROGRAMCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program); +typedef GLboolean (GL_APIENTRYP PFNGLISRENDERBUFFERCONTEXTANGLEPROC)(GLeglContext ctx, GLuint renderbuffer); +typedef GLboolean (GL_APIENTRYP PFNGLISSHADERCONTEXTANGLEPROC)(GLeglContext ctx, GLuint shader); +typedef GLboolean (GL_APIENTRYP PFNGLISTEXTURECONTEXTANGLEPROC)(GLeglContext ctx, GLuint texture); +typedef void (GL_APIENTRYP PFNGLLINEWIDTHCONTEXTANGLEPROC)(GLeglContext ctx, GLfloat width); +typedef void (GL_APIENTRYP PFNGLLINKPROGRAMCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program); +typedef void (GL_APIENTRYP PFNGLPIXELSTOREICONTEXTANGLEPROC)(GLeglContext ctx, GLenum pname, GLint param); +typedef void (GL_APIENTRYP PFNGLPOLYGONOFFSETCONTEXTANGLEPROC)(GLeglContext ctx, GLfloat factor, GLfloat units); +typedef void (GL_APIENTRYP PFNGLREADPIXELSCONTEXTANGLEPROC)(GLeglContext ctx, GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, void *pixels); +typedef void (GL_APIENTRYP PFNGLRELEASESHADERCOMPILERCONTEXTANGLEPROC)(GLeglContext ctx); +typedef void (GL_APIENTRYP PFNGLRENDERBUFFERSTORAGECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLSAMPLECOVERAGECONTEXTANGLEPROC)(GLeglContext ctx, GLfloat value, GLboolean invert); +typedef void (GL_APIENTRYP PFNGLSCISSORCONTEXTANGLEPROC)(GLeglContext ctx, GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLSHADERBINARYCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei count, const GLuint *shaders, GLenum binaryformat, const void *binary, GLsizei length); +typedef void (GL_APIENTRYP PFNGLSHADERSOURCECONTEXTANGLEPROC)(GLeglContext ctx, GLuint shader, GLsizei count, const GLchar *const*string, const GLint *length); +typedef void (GL_APIENTRYP PFNGLSTENCILFUNCCONTEXTANGLEPROC)(GLeglContext ctx, GLenum func, GLint ref, GLuint mask); +typedef void (GL_APIENTRYP PFNGLSTENCILFUNCSEPARATECONTEXTANGLEPROC)(GLeglContext ctx, GLenum face, GLenum func, GLint ref, GLuint mask); +typedef void (GL_APIENTRYP PFNGLSTENCILMASKCONTEXTANGLEPROC)(GLeglContext ctx, GLuint mask); +typedef void (GL_APIENTRYP PFNGLSTENCILMASKSEPARATECONTEXTANGLEPROC)(GLeglContext ctx, GLenum face, GLuint mask); +typedef void (GL_APIENTRYP PFNGLSTENCILOPCONTEXTANGLEPROC)(GLeglContext ctx, GLenum fail, GLenum zfail, GLenum zpass); +typedef void (GL_APIENTRYP PFNGLSTENCILOPSEPARATECONTEXTANGLEPROC)(GLeglContext ctx, GLenum face, GLenum sfail, GLenum dpfail, GLenum dppass); +typedef void (GL_APIENTRYP PFNGLTEXIMAGE2DCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, const void *pixels); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERFCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLfloat param); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERFVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, const GLfloat *params); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERICONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLint param); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERIVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, const GLint *params); +typedef void (GL_APIENTRYP PFNGLTEXSUBIMAGE2DCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *pixels); +typedef void (GL_APIENTRYP PFNGLUNIFORM1FCONTEXTANGLEPROC)(GLeglContext ctx, GLint location, GLfloat v0); +typedef void (GL_APIENTRYP PFNGLUNIFORM1FVCONTEXTANGLEPROC)(GLeglContext ctx, GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM1ICONTEXTANGLEPROC)(GLeglContext ctx, GLint location, GLint v0); +typedef void (GL_APIENTRYP PFNGLUNIFORM1IVCONTEXTANGLEPROC)(GLeglContext ctx, GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM2FCONTEXTANGLEPROC)(GLeglContext ctx, GLint location, GLfloat v0, GLfloat v1); +typedef void (GL_APIENTRYP PFNGLUNIFORM2FVCONTEXTANGLEPROC)(GLeglContext ctx, GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM2ICONTEXTANGLEPROC)(GLeglContext ctx, GLint location, GLint v0, GLint v1); +typedef void (GL_APIENTRYP PFNGLUNIFORM2IVCONTEXTANGLEPROC)(GLeglContext ctx, GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM3FCONTEXTANGLEPROC)(GLeglContext ctx, GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +typedef void (GL_APIENTRYP PFNGLUNIFORM3FVCONTEXTANGLEPROC)(GLeglContext ctx, GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM3ICONTEXTANGLEPROC)(GLeglContext ctx, GLint location, GLint v0, GLint v1, GLint v2); +typedef void (GL_APIENTRYP PFNGLUNIFORM3IVCONTEXTANGLEPROC)(GLeglContext ctx, GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM4FCONTEXTANGLEPROC)(GLeglContext ctx, GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +typedef void (GL_APIENTRYP PFNGLUNIFORM4FVCONTEXTANGLEPROC)(GLeglContext ctx, GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM4ICONTEXTANGLEPROC)(GLeglContext ctx, GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +typedef void (GL_APIENTRYP PFNGLUNIFORM4IVCONTEXTANGLEPROC)(GLeglContext ctx, GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX2FVCONTEXTANGLEPROC)(GLeglContext ctx, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX3FVCONTEXTANGLEPROC)(GLeglContext ctx, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX4FVCONTEXTANGLEPROC)(GLeglContext ctx, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUSEPROGRAMCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program); +typedef void (GL_APIENTRYP PFNGLVALIDATEPROGRAMCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB1FCONTEXTANGLEPROC)(GLeglContext ctx, GLuint index, GLfloat x); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB1FVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint index, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB2FCONTEXTANGLEPROC)(GLeglContext ctx, GLuint index, GLfloat x, GLfloat y); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB2FVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint index, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB3FCONTEXTANGLEPROC)(GLeglContext ctx, GLuint index, GLfloat x, GLfloat y, GLfloat z); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB3FVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint index, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB4FCONTEXTANGLEPROC)(GLeglContext ctx, GLuint index, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB4FVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint index, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBPOINTERCONTEXTANGLEPROC)(GLeglContext ctx, GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride, const void *pointer); +typedef void (GL_APIENTRYP PFNGLVIEWPORTCONTEXTANGLEPROC)(GLeglContext ctx, GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLACTIVESHADERPROGRAMEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint pipeline, GLuint program); +typedef void (GL_APIENTRYP PFNGLBEGINQUERYEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLuint id); +typedef void (GL_APIENTRYP PFNGLBINDFRAGDATALOCATIONEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLuint color, const GLchar *name); +typedef void (GL_APIENTRYP PFNGLBINDFRAGDATALOCATIONINDEXEDEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLuint colorNumber, GLuint index, const GLchar *name); +typedef void (GL_APIENTRYP PFNGLBINDPROGRAMPIPELINEEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint pipeline); +typedef void (GL_APIENTRYP PFNGLBINDVERTEXARRAYOESCONTEXTANGLEPROC)(GLeglContext ctx, GLuint array); +typedef void (GL_APIENTRYP PFNGLBLENDEQUATIONSEPARATEIEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint buf, GLenum modeRGB, GLenum modeAlpha); +typedef void (GL_APIENTRYP PFNGLBLENDEQUATIONSEPARATEIOESCONTEXTANGLEPROC)(GLeglContext ctx, GLuint buf, GLenum modeRGB, GLenum modeAlpha); +typedef void (GL_APIENTRYP PFNGLBLENDEQUATIONIEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint buf, GLenum mode); +typedef void (GL_APIENTRYP PFNGLBLENDEQUATIONIOESCONTEXTANGLEPROC)(GLeglContext ctx, GLuint buf, GLenum mode); +typedef void (GL_APIENTRYP PFNGLBLENDFUNCSEPARATEIEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint buf, GLenum srcRGB, GLenum dstRGB, GLenum srcAlpha, GLenum dstAlpha); +typedef void (GL_APIENTRYP PFNGLBLENDFUNCSEPARATEIOESCONTEXTANGLEPROC)(GLeglContext ctx, GLuint buf, GLenum srcRGB, GLenum dstRGB, GLenum srcAlpha, GLenum dstAlpha); +typedef void (GL_APIENTRYP PFNGLBLENDFUNCIEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint buf, GLenum src, GLenum dst); +typedef void (GL_APIENTRYP PFNGLBLENDFUNCIOESCONTEXTANGLEPROC)(GLeglContext ctx, GLuint buf, GLenum src, GLenum dst); +typedef void (GL_APIENTRYP PFNGLBLITFRAMEBUFFERANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +typedef void (GL_APIENTRYP PFNGLBLITFRAMEBUFFERNVCONTEXTANGLEPROC)(GLeglContext ctx, GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +typedef void (GL_APIENTRYP PFNGLBUFFERSTORAGEEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLsizeiptr size, const void *data, GLbitfield flags); +typedef void (GL_APIENTRYP PFNGLBUFFERSTORAGEEXTERNALEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLintptr offset, GLsizeiptr size, GLeglClientBufferEXT clientBuffer, GLbitfield flags); +typedef void (GL_APIENTRYP PFNGLBUFFERSTORAGEMEMEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLsizeiptr size, GLuint memory, GLuint64 offset); +typedef void (GL_APIENTRYP PFNGLCLIPCONTROLEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum origin, GLenum depth); +typedef void (GL_APIENTRYP PFNGLCOLORMASKIEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint index, GLboolean r, GLboolean g, GLboolean b, GLboolean a); +typedef void (GL_APIENTRYP PFNGLCOLORMASKIOESCONTEXTANGLEPROC)(GLeglContext ctx, GLuint index, GLboolean r, GLboolean g, GLboolean b, GLboolean a); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXIMAGE3DOESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, const void *data); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXSUBIMAGE3DOESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void *data); +typedef void (GL_APIENTRYP PFNGLCOPYIMAGESUBDATAEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint srcName, GLenum srcTarget, GLint srcLevel, GLint srcX, GLint srcY, GLint srcZ, GLuint dstName, GLenum dstTarget, GLint dstLevel, GLint dstX, GLint dstY, GLint dstZ, GLsizei srcWidth, GLsizei srcHeight, GLsizei srcDepth); +typedef void (GL_APIENTRYP PFNGLCOPYIMAGESUBDATAOESCONTEXTANGLEPROC)(GLeglContext ctx, GLuint srcName, GLenum srcTarget, GLint srcLevel, GLint srcX, GLint srcY, GLint srcZ, GLuint dstName, GLenum dstTarget, GLint dstLevel, GLint dstX, GLint dstY, GLint dstZ, GLsizei srcWidth, GLsizei srcHeight, GLsizei srcDepth); +typedef void (GL_APIENTRYP PFNGLCOPYTEXSUBIMAGE3DOESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLCREATEMEMORYOBJECTSEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei n, GLuint *memoryObjects); +typedef GLuint (GL_APIENTRYP PFNGLCREATESHADERPROGRAMVEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum type, GLsizei count, const GLchar **strings); +typedef void (GL_APIENTRYP PFNGLDEBUGMESSAGECALLBACKKHRCONTEXTANGLEPROC)(GLeglContext ctx, GLDEBUGPROCKHR callback, const void *userParam); +typedef void (GL_APIENTRYP PFNGLDEBUGMESSAGECONTROLKHRCONTEXTANGLEPROC)(GLeglContext ctx, GLenum source, GLenum type, GLenum severity, GLsizei count, const GLuint *ids, GLboolean enabled); +typedef void (GL_APIENTRYP PFNGLDEBUGMESSAGEINSERTKHRCONTEXTANGLEPROC)(GLeglContext ctx, GLenum source, GLenum type, GLuint id, GLenum severity, GLsizei length, const GLchar *buf); +typedef void (GL_APIENTRYP PFNGLDELETEFENCESNVCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei n, const GLuint *fences); +typedef void (GL_APIENTRYP PFNGLDELETEMEMORYOBJECTSEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei n, const GLuint *memoryObjects); +typedef void (GL_APIENTRYP PFNGLDELETEPROGRAMPIPELINESEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei n, const GLuint *pipelines); +typedef void (GL_APIENTRYP PFNGLDELETEQUERIESEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei n, const GLuint *ids); +typedef void (GL_APIENTRYP PFNGLDELETESEMAPHORESEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei n, const GLuint *semaphores); +typedef void (GL_APIENTRYP PFNGLDELETEVERTEXARRAYSOESCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei n, const GLuint *arrays); +typedef void (GL_APIENTRYP PFNGLDISABLEIEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLuint index); +typedef void (GL_APIENTRYP PFNGLDISABLEIOESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLuint index); +typedef void (GL_APIENTRYP PFNGLDISCARDFRAMEBUFFEREXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLsizei numAttachments, const GLenum *attachments); +typedef void (GL_APIENTRYP PFNGLDRAWARRAYSINSTANCEDANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum mode, GLint first, GLsizei count, GLsizei primcount); +typedef void (GL_APIENTRYP PFNGLDRAWARRAYSINSTANCEDEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum mode, GLint start, GLsizei count, GLsizei primcount); +typedef void (GL_APIENTRYP PFNGLDRAWBUFFERSEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei n, const GLenum *bufs); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSBASEVERTEXEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum mode, GLsizei count, GLenum type, const void *indices, GLint basevertex); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSBASEVERTEXOESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum mode, GLsizei count, GLenum type, const void *indices, GLint basevertex); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSINSTANCEDANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei primcount); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSINSTANCEDBASEVERTEXEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLint basevertex); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSINSTANCEDBASEVERTEXOESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLint basevertex); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSINSTANCEDEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei primcount); +typedef void (GL_APIENTRYP PFNGLDRAWRANGEELEMENTSBASEVERTEXEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices, GLint basevertex); +typedef void (GL_APIENTRYP PFNGLDRAWRANGEELEMENTSBASEVERTEXOESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices, GLint basevertex); +typedef void (GL_APIENTRYP PFNGLEGLIMAGETARGETRENDERBUFFERSTORAGEOESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLeglImageOES image); +typedef void (GL_APIENTRYP PFNGLEGLIMAGETARGETTEXTURE2DOESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLeglImageOES image); +typedef void (GL_APIENTRYP PFNGLENABLEIEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLuint index); +typedef void (GL_APIENTRYP PFNGLENABLEIOESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLuint index); +typedef void (GL_APIENTRYP PFNGLENDQUERYEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target); +typedef void (GL_APIENTRYP PFNGLFINISHFENCENVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint fence); +typedef void (GL_APIENTRYP PFNGLFLUSHMAPPEDBUFFERRANGEEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLintptr offset, GLsizeiptr length); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERFETCHBARRIEREXTCONTEXTANGLEPROC)(GLeglContext ctx); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTURE2DMULTISAMPLEEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLsizei samples); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTURE3DOESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLint zoffset); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTUREEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum attachment, GLuint texture, GLint level); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTUREMULTIVIEWOVRCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum attachment, GLuint texture, GLint level, GLint baseViewIndex, GLsizei numViews); +typedef void (GL_APIENTRYP PFNGLGENFENCESNVCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei n, GLuint *fences); +typedef void (GL_APIENTRYP PFNGLGENPROGRAMPIPELINESEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei n, GLuint *pipelines); +typedef void (GL_APIENTRYP PFNGLGENQUERIESEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei n, GLuint *ids); +typedef void (GL_APIENTRYP PFNGLGENSEMAPHORESEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei n, GLuint *semaphores); +typedef void (GL_APIENTRYP PFNGLGENVERTEXARRAYSOESCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei n, GLuint *arrays); +typedef void (GL_APIENTRYP PFNGLGETBUFFERPOINTERVOESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, void **params); +typedef GLuint (GL_APIENTRYP PFNGLGETDEBUGMESSAGELOGKHRCONTEXTANGLEPROC)(GLeglContext ctx, GLuint count, GLsizei bufSize, GLenum *sources, GLenum *types, GLuint *ids, GLenum *severities, GLsizei *lengths, GLchar *messageLog); +typedef void (GL_APIENTRYP PFNGLGETFENCEIVNVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint fence, GLenum pname, GLint *params); +typedef GLint (GL_APIENTRYP PFNGLGETFRAGDATAINDEXEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, const GLchar *name); +typedef GLenum (GL_APIENTRYP PFNGLGETGRAPHICSRESETSTATUSEXTCONTEXTANGLEPROC)(GLeglContext ctx); +typedef void (GL_APIENTRYP PFNGLGETINTEGER64VEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum pname, GLint64 *data); +typedef void (GL_APIENTRYP PFNGLGETMEMORYOBJECTPARAMETERIVEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint memoryObject, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETOBJECTLABELEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum type, GLuint object, GLsizei bufSize, GLsizei *length, GLchar *label); +typedef void (GL_APIENTRYP PFNGLGETOBJECTLABELKHRCONTEXTANGLEPROC)(GLeglContext ctx, GLenum identifier, GLuint name, GLsizei bufSize, GLsizei *length, GLchar *label); +typedef void (GL_APIENTRYP PFNGLGETOBJECTPTRLABELKHRCONTEXTANGLEPROC)(GLeglContext ctx, const void *ptr, GLsizei bufSize, GLsizei *length, GLchar *label); +typedef void (GL_APIENTRYP PFNGLGETPOINTERVKHRCONTEXTANGLEPROC)(GLeglContext ctx, GLenum pname, void **params); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMBINARYOESCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLsizei bufSize, GLsizei *length, GLenum *binaryFormat, void *binary); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMPIPELINEINFOLOGEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint pipeline, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMPIPELINEIVEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint pipeline, GLenum pname, GLint *params); +typedef GLint (GL_APIENTRYP PFNGLGETPROGRAMRESOURCELOCATIONINDEXEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLenum programInterface, const GLchar *name); +typedef void (GL_APIENTRYP PFNGLGETQUERYOBJECTI64VEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint id, GLenum pname, GLint64 *params); +typedef void (GL_APIENTRYP PFNGLGETQUERYOBJECTIVEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint id, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETQUERYOBJECTUI64VEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint id, GLenum pname, GLuint64 *params); +typedef void (GL_APIENTRYP PFNGLGETQUERYOBJECTUIVEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint id, GLenum pname, GLuint *params); +typedef void (GL_APIENTRYP PFNGLGETQUERYIVEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETSAMPLERPARAMETERIIVOESCONTEXTANGLEPROC)(GLeglContext ctx, GLuint sampler, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETSAMPLERPARAMETERIUIVOESCONTEXTANGLEPROC)(GLeglContext ctx, GLuint sampler, GLenum pname, GLuint *params); +typedef void (GL_APIENTRYP PFNGLGETSEMAPHOREPARAMETERUI64VEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint semaphore, GLenum pname, GLuint64 *params); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERIIVOESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERIUIVOESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLuint *params); +typedef void (GL_APIENTRYP PFNGLGETTRANSLATEDSHADERSOURCEANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLuint shader, GLsizei bufsize, GLsizei *length, GLchar *source); +typedef void (GL_APIENTRYP PFNGLGETUNSIGNEDBYTEVEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum pname, GLubyte *data); +typedef void (GL_APIENTRYP PFNGLGETUNSIGNEDBYTEI_VEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLuint index, GLubyte *data); +typedef void (GL_APIENTRYP PFNGLGETNUNIFORMFVEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei bufSize, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETNUNIFORMIVEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei bufSize, GLint *params); +typedef void (GL_APIENTRYP PFNGLIMPORTMEMORYFDEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint memory, GLuint64 size, GLenum handleType, GLint fd); +typedef void (GL_APIENTRYP PFNGLIMPORTSEMAPHOREFDEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint semaphore, GLenum handleType, GLint fd); +typedef void (GL_APIENTRYP PFNGLINSERTEVENTMARKEREXTCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei length, const GLchar *marker); +typedef GLboolean (GL_APIENTRYP PFNGLISENABLEDIEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLuint index); +typedef GLboolean (GL_APIENTRYP PFNGLISENABLEDIOESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLuint index); +typedef GLboolean (GL_APIENTRYP PFNGLISFENCENVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint fence); +typedef GLboolean (GL_APIENTRYP PFNGLISMEMORYOBJECTEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint memoryObject); +typedef GLboolean (GL_APIENTRYP PFNGLISPROGRAMPIPELINEEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint pipeline); +typedef GLboolean (GL_APIENTRYP PFNGLISQUERYEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint id); +typedef GLboolean (GL_APIENTRYP PFNGLISSEMAPHOREEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint semaphore); +typedef GLboolean (GL_APIENTRYP PFNGLISVERTEXARRAYOESCONTEXTANGLEPROC)(GLeglContext ctx, GLuint array); +typedef void (GL_APIENTRYP PFNGLLABELOBJECTEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum type, GLuint object, GLsizei length, const GLchar *label); +typedef void *(GL_APIENTRYP PFNGLMAPBUFFEROESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum access); +typedef void *(GL_APIENTRYP PFNGLMAPBUFFERRANGEEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLintptr offset, GLsizeiptr length, GLbitfield access); +typedef void (GL_APIENTRYP PFNGLMAXSHADERCOMPILERTHREADSKHRCONTEXTANGLEPROC)(GLeglContext ctx, GLuint count); +typedef void (GL_APIENTRYP PFNGLMEMORYOBJECTPARAMETERIVEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint memoryObject, GLenum pname, const GLint *params); +typedef void (GL_APIENTRYP PFNGLMINSAMPLESHADINGOESCONTEXTANGLEPROC)(GLeglContext ctx, GLfloat value); +typedef void (GL_APIENTRYP PFNGLMULTIDRAWELEMENTSBASEVERTEXEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum mode, const GLsizei *count, GLenum type, const void *const*indices, GLsizei primcount, const GLint *basevertex); +typedef void (GL_APIENTRYP PFNGLNAMEDBUFFERSTORAGEEXTERNALEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint buffer, GLintptr offset, GLsizeiptr size, GLeglClientBufferEXT clientBuffer, GLbitfield flags); +typedef void (GL_APIENTRYP PFNGLOBJECTLABELKHRCONTEXTANGLEPROC)(GLeglContext ctx, GLenum identifier, GLuint name, GLsizei length, const GLchar *label); +typedef void (GL_APIENTRYP PFNGLOBJECTPTRLABELKHRCONTEXTANGLEPROC)(GLeglContext ctx, const void *ptr, GLsizei length, const GLchar *label); +typedef void (GL_APIENTRYP PFNGLPATCHPARAMETERIEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum pname, GLint value); +typedef void (GL_APIENTRYP PFNGLPOPDEBUGGROUPKHRCONTEXTANGLEPROC)(GLeglContext ctx); +typedef void (GL_APIENTRYP PFNGLPOPGROUPMARKEREXTCONTEXTANGLEPROC)(GLeglContext ctx); +typedef void (GL_APIENTRYP PFNGLPROGRAMBINARYOESCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLenum binaryFormat, const void *binary, GLint length); +typedef void (GL_APIENTRYP PFNGLPROGRAMPARAMETERIEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLenum pname, GLint value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1FEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLfloat v0); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1FVEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1IEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLint v0); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1IVEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1UIEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLuint v0); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1UIVEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2FEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLfloat v0, GLfloat v1); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2FVEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2IEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLint v0, GLint v1); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2IVEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2UIEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLuint v0, GLuint v1); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2UIVEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3FEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3FVEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3IEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLint v0, GLint v1, GLint v2); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3IVEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3UIEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3UIVEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4FEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4FVEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4IEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4IVEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4UIEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4UIVEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2FVEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2X3FVEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2X4FVEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3FVEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3X2FVEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3X4FVEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4FVEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4X2FVEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4X3FVEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPUSHDEBUGGROUPKHRCONTEXTANGLEPROC)(GLeglContext ctx, GLenum source, GLuint id, GLsizei length, const GLchar *message); +typedef void (GL_APIENTRYP PFNGLPUSHGROUPMARKEREXTCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei length, const GLchar *marker); +typedef void (GL_APIENTRYP PFNGLQUERYCOUNTEREXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint id, GLenum target); +typedef void (GL_APIENTRYP PFNGLREADNPIXELSEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, void *data); +typedef void (GL_APIENTRYP PFNGLRENDERBUFFERSTORAGEMULTISAMPLEANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLRENDERBUFFERSTORAGEMULTISAMPLEEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERIIVOESCONTEXTANGLEPROC)(GLeglContext ctx, GLuint sampler, GLenum pname, const GLint *param); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERIUIVOESCONTEXTANGLEPROC)(GLeglContext ctx, GLuint sampler, GLenum pname, const GLuint *param); +typedef void (GL_APIENTRYP PFNGLSEMAPHOREPARAMETERUI64VEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint semaphore, GLenum pname, const GLuint64 *params); +typedef void (GL_APIENTRYP PFNGLSETFENCENVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint fence, GLenum condition); +typedef void (GL_APIENTRYP PFNGLSIGNALSEMAPHOREEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint semaphore, GLuint numBufferBarriers, const GLuint *buffers, GLuint numTextureBarriers, const GLuint *textures, const GLenum *dstLayouts); +typedef GLboolean (GL_APIENTRYP PFNGLTESTFENCENVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint fence); +typedef void (GL_APIENTRYP PFNGLTEXBUFFEREXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum internalformat, GLuint buffer); +typedef void (GL_APIENTRYP PFNGLTEXBUFFEROESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum internalformat, GLuint buffer); +typedef void (GL_APIENTRYP PFNGLTEXBUFFERRANGEEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum internalformat, GLuint buffer, GLintptr offset, GLsizeiptr size); +typedef void (GL_APIENTRYP PFNGLTEXBUFFERRANGEOESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum internalformat, GLuint buffer, GLintptr offset, GLsizeiptr size); +typedef void (GL_APIENTRYP PFNGLTEXIMAGE3DOESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, const void *pixels); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERIIVOESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, const GLint *params); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERIUIVOESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, const GLuint *params); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGE1DEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGE2DEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGE3DEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGE3DMULTISAMPLEOESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedsamplelocations); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGEMEM2DEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLuint memory, GLuint64 offset); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGEMEM2DMULTISAMPLEEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGEMEM3DEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLuint memory, GLuint64 offset); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGEMEM3DMULTISAMPLEEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset); +typedef void (GL_APIENTRYP PFNGLTEXSUBIMAGE3DOESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *pixels); +typedef GLboolean (GL_APIENTRYP PFNGLUNMAPBUFFEROESCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target); +typedef void (GL_APIENTRYP PFNGLUSEPROGRAMSTAGESEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint pipeline, GLbitfield stages, GLuint program); +typedef void (GL_APIENTRYP PFNGLVALIDATEPROGRAMPIPELINEEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint pipeline); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBDIVISORANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLuint index, GLuint divisor); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBDIVISOREXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint index, GLuint divisor); +typedef void (GL_APIENTRYP PFNGLWAITSEMAPHOREEXTCONTEXTANGLEPROC)(GLeglContext ctx, GLuint semaphore, GLuint numBufferBarriers, const GLuint *buffers, GLuint numTextureBarriers, const GLuint *textures, const GLenum *srcLayouts); +typedef void (GL_APIENTRYP PFNGLBINDUNIFORMLOCATIONCHROMIUMCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, const GLchar* name); +typedef void (GL_APIENTRYP PFNGLCOVERAGEMODULATIONCHROMIUMCONTEXTANGLEPROC)(GLeglContext ctx, GLenum components); +typedef void (GL_APIENTRYP PFNGLCOPYTEXTURECHROMIUMCONTEXTANGLEPROC)(GLeglContext ctx, GLuint sourceId, GLint sourceLevel, GLenum destTarget, GLuint destId, GLint destLevel, GLint internalFormat, GLenum destType, GLboolean unpackFlipY, GLboolean unpackPremultiplyAlpha, GLboolean unpackUnmultiplyAlpha); +typedef void (GL_APIENTRYP PFNGLCOPYSUBTEXTURECHROMIUMCONTEXTANGLEPROC)(GLeglContext ctx, GLuint sourceId, GLint sourceLevel, GLenum destTarget, GLuint destId, GLint destLevel, GLint xoffset, GLint yoffset, GLint x, GLint y, GLint width, GLint height, GLboolean unpackFlipY, GLboolean unpackPremultiplyAlpha, GLboolean unpackUnmultiplyAlpha); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDCOPYTEXTURECHROMIUMCONTEXTANGLEPROC)(GLeglContext ctx, GLuint sourceId, GLuint destId); +typedef void (GL_APIENTRYP PFNGLREQUESTEXTENSIONANGLECONTEXTANGLEPROC)(GLeglContext ctx, const GLchar * name); +typedef void (GL_APIENTRYP PFNGLDISABLEEXTENSIONANGLECONTEXTANGLEPROC)(GLeglContext ctx, const GLchar * name); +typedef void (GL_APIENTRYP PFNGLGETBOOLEANVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum pname, GLsizei bufSize, GLsizei * length, GLboolean * params); +typedef void (GL_APIENTRYP PFNGLGETBUFFERPARAMETERIVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * params); +typedef void (GL_APIENTRYP PFNGLGETFLOATVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum pname, GLsizei bufSize, GLsizei * length, GLfloat * params); +typedef void (GL_APIENTRYP PFNGLGETFRAMEBUFFERATTACHMENTPARAMETERIVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum attachment, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * params); +typedef void (GL_APIENTRYP PFNGLGETINTEGERVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * data); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMIVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * params); +typedef void (GL_APIENTRYP PFNGLGETRENDERBUFFERPARAMETERIVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * params); +typedef void (GL_APIENTRYP PFNGLGETSHADERIVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLuint shader, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * params); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERFVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLsizei bufSize, GLsizei * length, GLfloat * params); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERIVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * params); +typedef void (GL_APIENTRYP PFNGLGETUNIFORMFVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei bufSize, GLsizei * length, GLfloat * params); +typedef void (GL_APIENTRYP PFNGLGETUNIFORMIVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei bufSize, GLsizei * length, GLint * params); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBFVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLuint index, GLenum pname, GLsizei bufSize, GLsizei * length, GLfloat * params); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBIVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLuint index, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * params); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBPOINTERVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLuint index, GLenum pname, GLsizei bufSize, GLsizei * length, void ** pointer); +typedef void (GL_APIENTRYP PFNGLREADPIXELSROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, GLsizei * length, GLsizei * columns, GLsizei * rows, void * pixels); +typedef void (GL_APIENTRYP PFNGLTEXIMAGE2DROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, GLsizei bufSize, const void * pixels); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERFVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLsizei bufSize, const GLfloat * params); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERIVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLsizei bufSize, const GLint * params); +typedef void (GL_APIENTRYP PFNGLTEXSUBIMAGE2DROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, const void * pixels); +typedef void (GL_APIENTRYP PFNGLTEXIMAGE3DROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, GLsizei bufSize, const void * pixels); +typedef void (GL_APIENTRYP PFNGLTEXSUBIMAGE3DROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, GLsizei bufSize, const void * pixels); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXIMAGE2DROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, GLsizei dataSize, const GLvoid * data); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXSUBIMAGE2DROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLint level, GLsizei xoffset, GLsizei yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, GLsizei dataSize, const GLvoid * data); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXIMAGE3DROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, GLsizei dataSize, const GLvoid * data); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXSUBIMAGE3DROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, GLsizei dataSize, const GLvoid * data); +typedef void (GL_APIENTRYP PFNGLGETQUERYIVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * params); +typedef void (GL_APIENTRYP PFNGLGETQUERYOBJECTUIVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLuint id, GLenum pname, GLsizei bufSize, GLsizei * length, GLuint * params); +typedef void (GL_APIENTRYP PFNGLGETBUFFERPOINTERVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLsizei bufSize, GLsizei * length, void ** params); +typedef void (GL_APIENTRYP PFNGLGETINTEGERI_VROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLuint index, GLsizei bufSize, GLsizei * length, GLint * data); +typedef void (GL_APIENTRYP PFNGLGETINTERNALFORMATIVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum internalformat, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * params); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBIIVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLuint index, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * params); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBIUIVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLuint index, GLenum pname, GLsizei bufSize, GLsizei * length, GLuint * params); +typedef void (GL_APIENTRYP PFNGLGETUNIFORMUIVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei bufSize, GLsizei * length, GLuint * params); +typedef void (GL_APIENTRYP PFNGLGETACTIVEUNIFORMBLOCKIVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLuint uniformBlockIndex, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * params); +typedef void (GL_APIENTRYP PFNGLGETINTEGER64VROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum pname, GLsizei bufSize, GLsizei * length, GLint64 * data); +typedef void (GL_APIENTRYP PFNGLGETINTEGER64I_VROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLuint index, GLsizei bufSize, GLsizei * length, GLint64 * data); +typedef void (GL_APIENTRYP PFNGLGETBUFFERPARAMETERI64VROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLsizei bufSize, GLsizei * length, GLint64 * params); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERIVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLuint sampler, GLuint pname, GLsizei bufSize, const GLint * param); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERFVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLuint sampler, GLenum pname, GLsizei bufSize, const GLfloat * param); +typedef void (GL_APIENTRYP PFNGLGETSAMPLERPARAMETERIVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLuint sampler, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * params); +typedef void (GL_APIENTRYP PFNGLGETSAMPLERPARAMETERFVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLuint sampler, GLenum pname, GLsizei bufSize, GLsizei * length, GLfloat * params); +typedef void (GL_APIENTRYP PFNGLGETFRAMEBUFFERPARAMETERIVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * params); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMINTERFACEIVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLenum programInterface, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * params); +typedef void (GL_APIENTRYP PFNGLGETBOOLEANI_VROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLuint index, GLsizei bufSize, GLsizei * length, GLboolean * data); +typedef void (GL_APIENTRYP PFNGLGETMULTISAMPLEFVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum pname, GLuint index, GLsizei bufSize, GLsizei * length, GLfloat * val); +typedef void (GL_APIENTRYP PFNGLGETTEXLEVELPARAMETERIVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLint level, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * params); +typedef void (GL_APIENTRYP PFNGLGETTEXLEVELPARAMETERFVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLint level, GLenum pname, GLsizei bufSize, GLsizei * length, GLfloat * params); +typedef void (GL_APIENTRYP PFNGLGETPOINTERVROBUSTANGLEROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum pname, GLsizei bufSize, GLsizei * length, void ** params); +typedef void (GL_APIENTRYP PFNGLREADNPIXELSROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, GLsizei * length, GLsizei * columns, GLsizei * rows, void * data); +typedef void (GL_APIENTRYP PFNGLGETNUNIFORMFVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei bufSize, GLsizei * length, GLfloat * params); +typedef void (GL_APIENTRYP PFNGLGETNUNIFORMIVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei bufSize, GLsizei * length, GLint * params); +typedef void (GL_APIENTRYP PFNGLGETNUNIFORMUIVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei bufSize, GLsizei * length, GLuint * params); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERIIVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLsizei bufSize, const GLint * params); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERIUIVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLsizei bufSize, const GLuint * params); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERIIVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * params); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERIUIVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLsizei bufSize, GLsizei * length, GLuint * params); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERIIVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLuint sampler, GLenum pname, GLsizei bufSize, const GLint * param); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERIUIVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLuint sampler, GLenum pname, GLsizei bufSize, const GLuint * param); +typedef void (GL_APIENTRYP PFNGLGETSAMPLERPARAMETERIIVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLuint sampler, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * params); +typedef void (GL_APIENTRYP PFNGLGETSAMPLERPARAMETERIUIVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLuint sampler, GLenum pname, GLsizei bufSize, GLsizei * length, GLuint * params); +typedef void (GL_APIENTRYP PFNGLGETQUERYOBJECTIVROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLuint id, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * params); +typedef void (GL_APIENTRYP PFNGLGETQUERYOBJECTI64VROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLuint id, GLenum pname, GLsizei bufSize, GLsizei * length, GLint64 * params); +typedef void (GL_APIENTRYP PFNGLGETQUERYOBJECTUI64VROBUSTANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLuint id, GLenum pname, GLsizei bufSize, GLsizei * length, GLuint64 * params); +typedef void (GL_APIENTRYP PFNGLCOPYTEXTURE3DANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLuint sourceId, GLint sourceLevel, GLenum destTarget, GLuint destId, GLint destLevel, GLint internalFormat, GLenum destType, GLboolean unpackFlipY, GLboolean unpackPremultiplyAlpha, GLboolean unpackUnmultiplyAlpha); +typedef void (GL_APIENTRYP PFNGLCOPYSUBTEXTURE3DANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLuint sourceId, GLint sourceLevel, GLenum destTarget, GLuint destId, GLint destLevel, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLint z, GLint width, GLint height, GLint depth, GLboolean unpackFlipY, GLboolean unpackPremultiplyAlpha, GLboolean unpackUnmultiplyAlpha); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGE2DMULTISAMPLEANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLboolean fixedsamplelocations); +typedef void (GL_APIENTRYP PFNGLGETTEXLEVELPARAMETERIVANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLint level, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETTEXLEVELPARAMETERFVANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLint level, GLenum pname, GLfloat * params); +typedef void (GL_APIENTRYP PFNGLMULTIDRAWARRAYSANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum mode, const GLint *firsts, const GLsizei *counts, GLsizei drawcount); +typedef void (GL_APIENTRYP PFNGLMULTIDRAWARRAYSINSTANCEDANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum mode, const GLint *firsts, const GLsizei *counts, const GLsizei *instanceCounts, GLsizei drawcount); +typedef void (GL_APIENTRYP PFNGLMULTIDRAWELEMENTSANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum mode, const GLsizei *counts, GLenum type, const GLvoid *const*indices, GLsizei drawcount); +typedef void (GL_APIENTRYP PFNGLMULTIDRAWELEMENTSINSTANCEDANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum mode, const GLsizei *counts, GLenum type, const GLvoid *const*indices, const GLsizei*instanceCounts, GLsizei drawcount); +typedef void (GL_APIENTRYP PFNGLDRAWARRAYSINSTANCEDBASEINSTANCEANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum mode, GLint first, GLsizei count, GLsizei instanceCount, GLuint baseInstance); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSINSTANCEDBASEVERTEXBASEINSTANCEANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum mode, GLsizei count, GLenum type, const GLvoid *indices, GLsizei instanceCounts, GLint baseVertex, GLuint baseInstance); +typedef void (GL_APIENTRYP PFNGLMULTIDRAWARRAYSINSTANCEDBASEINSTANCEANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum mode, const GLint *firsts, const GLsizei *counts, const GLsizei *instanceCounts, const GLuint *baseInstances, GLsizei drawcount); +typedef void (GL_APIENTRYP PFNGLMULTIDRAWELEMENTSINSTANCEDBASEVERTEXBASEINSTANCEANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum mode, const GLsizei *counts, GLenum type, const GLvoid *const*indices, const GLsizei *instanceCounts, const GLint *baseVertices, const GLuint *baseInstances, GLsizei drawcount); +typedef void (GL_APIENTRYP PFNGLGETMULTISAMPLEFVANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum pname, GLuint index, GLfloat * val); +typedef void (GL_APIENTRYP PFNGLSAMPLEMASKIANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLuint maskNumber, GLbitfield mask); +typedef void (GL_APIENTRYP PFNGLPROVOKINGVERTEXANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum mode); +typedef void (GL_APIENTRYP PFNGLLOSECONTEXTCHROMIUMCONTEXTANGLEPROC)(GLeglContext ctx, GLenum current, GLenum other); +typedef void (GL_APIENTRYP PFNGLTEXIMAGE2DEXTERNALANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type); +typedef void (GL_APIENTRYP PFNGLINVALIDATETEXTUREANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target); +typedef void (GL_APIENTRYP PFNGLGETTEXIMAGEANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLint level, GLenum format, GLenum type, void *pixels); +typedef void (GL_APIENTRYP PFNGLGETRENDERBUFFERIMAGEANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum format, GLenum type, void *pixels); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGEMEMFLAGS2DANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLuint memory, GLuint64 offset, GLbitfield createFlags, GLbitfield usageFlags); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGEMEMFLAGS2DMULTISAMPLEANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset, GLbitfield createFlags, GLbitfield usageFlags); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGEMEMFLAGS3DANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLuint memory, GLuint64 offset, GLbitfield createFlags, GLbitfield usageFlags); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGEMEMFLAGS3DMULTISAMPLEANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset, GLbitfield createFlags, GLbitfield usageFlags); +typedef void (GL_APIENTRYP PFNGLIMPORTMEMORYZIRCONHANDLEANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLuint memory, GLuint64 size, GLenum handleType, GLuint handle); +typedef void (GL_APIENTRYP PFNGLIMPORTSEMAPHOREZIRCONHANDLEANGLECONTEXTANGLEPROC)(GLeglContext ctx, GLuint semaphore, GLenum handleType, GLuint handle); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glActiveTextureContextANGLE(GLeglContext ctx, GLenum texture); +GL_APICALL void GL_APIENTRY glAttachShaderContextANGLE(GLeglContext ctx, GLuint program, GLuint shader); +GL_APICALL void GL_APIENTRY glBindAttribLocationContextANGLE(GLeglContext ctx, GLuint program, GLuint index, const GLchar *name); +GL_APICALL void GL_APIENTRY glBindBufferContextANGLE(GLeglContext ctx, GLenum target, GLuint buffer); +GL_APICALL void GL_APIENTRY glBindFramebufferContextANGLE(GLeglContext ctx, GLenum target, GLuint framebuffer); +GL_APICALL void GL_APIENTRY glBindRenderbufferContextANGLE(GLeglContext ctx, GLenum target, GLuint renderbuffer); +GL_APICALL void GL_APIENTRY glBindTextureContextANGLE(GLeglContext ctx, GLenum target, GLuint texture); +GL_APICALL void GL_APIENTRY glBlendColorContextANGLE(GLeglContext ctx, GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +GL_APICALL void GL_APIENTRY glBlendEquationContextANGLE(GLeglContext ctx, GLenum mode); +GL_APICALL void GL_APIENTRY glBlendEquationSeparateContextANGLE(GLeglContext ctx, GLenum modeRGB, GLenum modeAlpha); +GL_APICALL void GL_APIENTRY glBlendFuncContextANGLE(GLeglContext ctx, GLenum sfactor, GLenum dfactor); +GL_APICALL void GL_APIENTRY glBlendFuncSeparateContextANGLE(GLeglContext ctx, GLenum sfactorRGB, GLenum dfactorRGB, GLenum sfactorAlpha, GLenum dfactorAlpha); +GL_APICALL void GL_APIENTRY glBufferDataContextANGLE(GLeglContext ctx, GLenum target, GLsizeiptr size, const void *data, GLenum usage); +GL_APICALL void GL_APIENTRY glBufferSubDataContextANGLE(GLeglContext ctx, GLenum target, GLintptr offset, GLsizeiptr size, const void *data); +GL_APICALL GLenum GL_APIENTRY glCheckFramebufferStatusContextANGLE(GLeglContext ctx, GLenum target); +GL_APICALL void GL_APIENTRY glClearContextANGLE(GLeglContext ctx, GLbitfield mask); +GL_APICALL void GL_APIENTRY glClearColorContextANGLE(GLeglContext ctx, GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +GL_APICALL void GL_APIENTRY glClearDepthfContextANGLE(GLeglContext ctx, GLfloat d); +GL_APICALL void GL_APIENTRY glClearStencilContextANGLE(GLeglContext ctx, GLint s); +GL_APICALL void GL_APIENTRY glColorMaskContextANGLE(GLeglContext ctx, GLboolean red, GLboolean green, GLboolean blue, GLboolean alpha); +GL_APICALL void GL_APIENTRY glCompileShaderContextANGLE(GLeglContext ctx, GLuint shader); +GL_APICALL void GL_APIENTRY glCompressedTexImage2DContextANGLE(GLeglContext ctx, GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, const void *data); +GL_APICALL void GL_APIENTRY glCompressedTexSubImage2DContextANGLE(GLeglContext ctx, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *data); +GL_APICALL void GL_APIENTRY glCopyTexImage2DContextANGLE(GLeglContext ctx, GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLsizei height, GLint border); +GL_APICALL void GL_APIENTRY glCopyTexSubImage2DContextANGLE(GLeglContext ctx, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint x, GLint y, GLsizei width, GLsizei height); +GL_APICALL GLuint GL_APIENTRY glCreateProgramContextANGLE(GLeglContext ctx); +GL_APICALL GLuint GL_APIENTRY glCreateShaderContextANGLE(GLeglContext ctx, GLenum type); +GL_APICALL void GL_APIENTRY glCullFaceContextANGLE(GLeglContext ctx, GLenum mode); +GL_APICALL void GL_APIENTRY glDeleteBuffersContextANGLE(GLeglContext ctx, GLsizei n, const GLuint *buffers); +GL_APICALL void GL_APIENTRY glDeleteFramebuffersContextANGLE(GLeglContext ctx, GLsizei n, const GLuint *framebuffers); +GL_APICALL void GL_APIENTRY glDeleteProgramContextANGLE(GLeglContext ctx, GLuint program); +GL_APICALL void GL_APIENTRY glDeleteRenderbuffersContextANGLE(GLeglContext ctx, GLsizei n, const GLuint *renderbuffers); +GL_APICALL void GL_APIENTRY glDeleteShaderContextANGLE(GLeglContext ctx, GLuint shader); +GL_APICALL void GL_APIENTRY glDeleteTexturesContextANGLE(GLeglContext ctx, GLsizei n, const GLuint *textures); +GL_APICALL void GL_APIENTRY glDepthFuncContextANGLE(GLeglContext ctx, GLenum func); +GL_APICALL void GL_APIENTRY glDepthMaskContextANGLE(GLeglContext ctx, GLboolean flag); +GL_APICALL void GL_APIENTRY glDepthRangefContextANGLE(GLeglContext ctx, GLfloat n, GLfloat f); +GL_APICALL void GL_APIENTRY glDetachShaderContextANGLE(GLeglContext ctx, GLuint program, GLuint shader); +GL_APICALL void GL_APIENTRY glDisableContextANGLE(GLeglContext ctx, GLenum cap); +GL_APICALL void GL_APIENTRY glDisableVertexAttribArrayContextANGLE(GLeglContext ctx, GLuint index); +GL_APICALL void GL_APIENTRY glDrawArraysContextANGLE(GLeglContext ctx, GLenum mode, GLint first, GLsizei count); +GL_APICALL void GL_APIENTRY glDrawElementsContextANGLE(GLeglContext ctx, GLenum mode, GLsizei count, GLenum type, const void *indices); +GL_APICALL void GL_APIENTRY glEnableContextANGLE(GLeglContext ctx, GLenum cap); +GL_APICALL void GL_APIENTRY glEnableVertexAttribArrayContextANGLE(GLeglContext ctx, GLuint index); +GL_APICALL void GL_APIENTRY glFinishContextANGLE(GLeglContext ctx); +GL_APICALL void GL_APIENTRY glFlushContextANGLE(GLeglContext ctx); +GL_APICALL void GL_APIENTRY glFramebufferRenderbufferContextANGLE(GLeglContext ctx, GLenum target, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer); +GL_APICALL void GL_APIENTRY glFramebufferTexture2DContextANGLE(GLeglContext ctx, GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +GL_APICALL void GL_APIENTRY glFrontFaceContextANGLE(GLeglContext ctx, GLenum mode); +GL_APICALL void GL_APIENTRY glGenBuffersContextANGLE(GLeglContext ctx, GLsizei n, GLuint *buffers); +GL_APICALL void GL_APIENTRY glGenFramebuffersContextANGLE(GLeglContext ctx, GLsizei n, GLuint *framebuffers); +GL_APICALL void GL_APIENTRY glGenRenderbuffersContextANGLE(GLeglContext ctx, GLsizei n, GLuint *renderbuffers); +GL_APICALL void GL_APIENTRY glGenTexturesContextANGLE(GLeglContext ctx, GLsizei n, GLuint *textures); +GL_APICALL void GL_APIENTRY glGenerateMipmapContextANGLE(GLeglContext ctx, GLenum target); +GL_APICALL void GL_APIENTRY glGetActiveAttribContextANGLE(GLeglContext ctx, GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name); +GL_APICALL void GL_APIENTRY glGetActiveUniformContextANGLE(GLeglContext ctx, GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name); +GL_APICALL void GL_APIENTRY glGetAttachedShadersContextANGLE(GLeglContext ctx, GLuint program, GLsizei maxCount, GLsizei *count, GLuint *shaders); +GL_APICALL GLint GL_APIENTRY glGetAttribLocationContextANGLE(GLeglContext ctx, GLuint program, const GLchar *name); +GL_APICALL void GL_APIENTRY glGetBooleanvContextANGLE(GLeglContext ctx, GLenum pname, GLboolean *data); +GL_APICALL void GL_APIENTRY glGetBufferParameterivContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLint *params); +GL_APICALL GLenum GL_APIENTRY glGetErrorContextANGLE(GLeglContext ctx); +GL_APICALL void GL_APIENTRY glGetFloatvContextANGLE(GLeglContext ctx, GLenum pname, GLfloat *data); +GL_APICALL void GL_APIENTRY glGetFramebufferAttachmentParameterivContextANGLE(GLeglContext ctx, GLenum target, GLenum attachment, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetIntegervContextANGLE(GLeglContext ctx, GLenum pname, GLint *data); +GL_APICALL void GL_APIENTRY glGetProgramInfoLogContextANGLE(GLeglContext ctx, GLuint program, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +GL_APICALL void GL_APIENTRY glGetProgramivContextANGLE(GLeglContext ctx, GLuint program, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetRenderbufferParameterivContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetShaderInfoLogContextANGLE(GLeglContext ctx, GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +GL_APICALL void GL_APIENTRY glGetShaderPrecisionFormatContextANGLE(GLeglContext ctx, GLenum shadertype, GLenum precisiontype, GLint *range, GLint *precision); +GL_APICALL void GL_APIENTRY glGetShaderSourceContextANGLE(GLeglContext ctx, GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *source); +GL_APICALL void GL_APIENTRY glGetShaderivContextANGLE(GLeglContext ctx, GLuint shader, GLenum pname, GLint *params); +GL_APICALL const GLubyte *GL_APIENTRY glGetStringContextANGLE(GLeglContext ctx, GLenum name); +GL_APICALL void GL_APIENTRY glGetTexParameterfvContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetTexParameterivContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLint *params); +GL_APICALL GLint GL_APIENTRY glGetUniformLocationContextANGLE(GLeglContext ctx, GLuint program, const GLchar *name); +GL_APICALL void GL_APIENTRY glGetUniformfvContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetUniformivContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLint *params); +GL_APICALL void GL_APIENTRY glGetVertexAttribPointervContextANGLE(GLeglContext ctx, GLuint index, GLenum pname, void **pointer); +GL_APICALL void GL_APIENTRY glGetVertexAttribfvContextANGLE(GLeglContext ctx, GLuint index, GLenum pname, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetVertexAttribivContextANGLE(GLeglContext ctx, GLuint index, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glHintContextANGLE(GLeglContext ctx, GLenum target, GLenum mode); +GL_APICALL GLboolean GL_APIENTRY glIsBufferContextANGLE(GLeglContext ctx, GLuint buffer); +GL_APICALL GLboolean GL_APIENTRY glIsEnabledContextANGLE(GLeglContext ctx, GLenum cap); +GL_APICALL GLboolean GL_APIENTRY glIsFramebufferContextANGLE(GLeglContext ctx, GLuint framebuffer); +GL_APICALL GLboolean GL_APIENTRY glIsProgramContextANGLE(GLeglContext ctx, GLuint program); +GL_APICALL GLboolean GL_APIENTRY glIsRenderbufferContextANGLE(GLeglContext ctx, GLuint renderbuffer); +GL_APICALL GLboolean GL_APIENTRY glIsShaderContextANGLE(GLeglContext ctx, GLuint shader); +GL_APICALL GLboolean GL_APIENTRY glIsTextureContextANGLE(GLeglContext ctx, GLuint texture); +GL_APICALL void GL_APIENTRY glLineWidthContextANGLE(GLeglContext ctx, GLfloat width); +GL_APICALL void GL_APIENTRY glLinkProgramContextANGLE(GLeglContext ctx, GLuint program); +GL_APICALL void GL_APIENTRY glPixelStoreiContextANGLE(GLeglContext ctx, GLenum pname, GLint param); +GL_APICALL void GL_APIENTRY glPolygonOffsetContextANGLE(GLeglContext ctx, GLfloat factor, GLfloat units); +GL_APICALL void GL_APIENTRY glReadPixelsContextANGLE(GLeglContext ctx, GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, void *pixels); +GL_APICALL void GL_APIENTRY glReleaseShaderCompilerContextANGLE(GLeglContext ctx); +GL_APICALL void GL_APIENTRY glRenderbufferStorageContextANGLE(GLeglContext ctx, GLenum target, GLenum internalformat, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glSampleCoverageContextANGLE(GLeglContext ctx, GLfloat value, GLboolean invert); +GL_APICALL void GL_APIENTRY glScissorContextANGLE(GLeglContext ctx, GLint x, GLint y, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glShaderBinaryContextANGLE(GLeglContext ctx, GLsizei count, const GLuint *shaders, GLenum binaryformat, const void *binary, GLsizei length); +GL_APICALL void GL_APIENTRY glShaderSourceContextANGLE(GLeglContext ctx, GLuint shader, GLsizei count, const GLchar *const*string, const GLint *length); +GL_APICALL void GL_APIENTRY glStencilFuncContextANGLE(GLeglContext ctx, GLenum func, GLint ref, GLuint mask); +GL_APICALL void GL_APIENTRY glStencilFuncSeparateContextANGLE(GLeglContext ctx, GLenum face, GLenum func, GLint ref, GLuint mask); +GL_APICALL void GL_APIENTRY glStencilMaskContextANGLE(GLeglContext ctx, GLuint mask); +GL_APICALL void GL_APIENTRY glStencilMaskSeparateContextANGLE(GLeglContext ctx, GLenum face, GLuint mask); +GL_APICALL void GL_APIENTRY glStencilOpContextANGLE(GLeglContext ctx, GLenum fail, GLenum zfail, GLenum zpass); +GL_APICALL void GL_APIENTRY glStencilOpSeparateContextANGLE(GLeglContext ctx, GLenum face, GLenum sfail, GLenum dpfail, GLenum dppass); +GL_APICALL void GL_APIENTRY glTexImage2DContextANGLE(GLeglContext ctx, GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, const void *pixels); +GL_APICALL void GL_APIENTRY glTexParameterfContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLfloat param); +GL_APICALL void GL_APIENTRY glTexParameterfvContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, const GLfloat *params); +GL_APICALL void GL_APIENTRY glTexParameteriContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLint param); +GL_APICALL void GL_APIENTRY glTexParameterivContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, const GLint *params); +GL_APICALL void GL_APIENTRY glTexSubImage2DContextANGLE(GLeglContext ctx, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *pixels); +GL_APICALL void GL_APIENTRY glUniform1fContextANGLE(GLeglContext ctx, GLint location, GLfloat v0); +GL_APICALL void GL_APIENTRY glUniform1fvContextANGLE(GLeglContext ctx, GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniform1iContextANGLE(GLeglContext ctx, GLint location, GLint v0); +GL_APICALL void GL_APIENTRY glUniform1ivContextANGLE(GLeglContext ctx, GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glUniform2fContextANGLE(GLeglContext ctx, GLint location, GLfloat v0, GLfloat v1); +GL_APICALL void GL_APIENTRY glUniform2fvContextANGLE(GLeglContext ctx, GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniform2iContextANGLE(GLeglContext ctx, GLint location, GLint v0, GLint v1); +GL_APICALL void GL_APIENTRY glUniform2ivContextANGLE(GLeglContext ctx, GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glUniform3fContextANGLE(GLeglContext ctx, GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +GL_APICALL void GL_APIENTRY glUniform3fvContextANGLE(GLeglContext ctx, GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniform3iContextANGLE(GLeglContext ctx, GLint location, GLint v0, GLint v1, GLint v2); +GL_APICALL void GL_APIENTRY glUniform3ivContextANGLE(GLeglContext ctx, GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glUniform4fContextANGLE(GLeglContext ctx, GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +GL_APICALL void GL_APIENTRY glUniform4fvContextANGLE(GLeglContext ctx, GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniform4iContextANGLE(GLeglContext ctx, GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +GL_APICALL void GL_APIENTRY glUniform4ivContextANGLE(GLeglContext ctx, GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glUniformMatrix2fvContextANGLE(GLeglContext ctx, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix3fvContextANGLE(GLeglContext ctx, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix4fvContextANGLE(GLeglContext ctx, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUseProgramContextANGLE(GLeglContext ctx, GLuint program); +GL_APICALL void GL_APIENTRY glValidateProgramContextANGLE(GLeglContext ctx, GLuint program); +GL_APICALL void GL_APIENTRY glVertexAttrib1fContextANGLE(GLeglContext ctx, GLuint index, GLfloat x); +GL_APICALL void GL_APIENTRY glVertexAttrib1fvContextANGLE(GLeglContext ctx, GLuint index, const GLfloat *v); +GL_APICALL void GL_APIENTRY glVertexAttrib2fContextANGLE(GLeglContext ctx, GLuint index, GLfloat x, GLfloat y); +GL_APICALL void GL_APIENTRY glVertexAttrib2fvContextANGLE(GLeglContext ctx, GLuint index, const GLfloat *v); +GL_APICALL void GL_APIENTRY glVertexAttrib3fContextANGLE(GLeglContext ctx, GLuint index, GLfloat x, GLfloat y, GLfloat z); +GL_APICALL void GL_APIENTRY glVertexAttrib3fvContextANGLE(GLeglContext ctx, GLuint index, const GLfloat *v); +GL_APICALL void GL_APIENTRY glVertexAttrib4fContextANGLE(GLeglContext ctx, GLuint index, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +GL_APICALL void GL_APIENTRY glVertexAttrib4fvContextANGLE(GLeglContext ctx, GLuint index, const GLfloat *v); +GL_APICALL void GL_APIENTRY glVertexAttribPointerContextANGLE(GLeglContext ctx, GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride, const void *pointer); +GL_APICALL void GL_APIENTRY glViewportContextANGLE(GLeglContext ctx, GLint x, GLint y, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glActiveShaderProgramEXTContextANGLE(GLeglContext ctx, GLuint pipeline, GLuint program); +GL_APICALL void GL_APIENTRY glBeginQueryEXTContextANGLE(GLeglContext ctx, GLenum target, GLuint id); +GL_APICALL void GL_APIENTRY glBindFragDataLocationEXTContextANGLE(GLeglContext ctx, GLuint program, GLuint color, const GLchar *name); +GL_APICALL void GL_APIENTRY glBindFragDataLocationIndexedEXTContextANGLE(GLeglContext ctx, GLuint program, GLuint colorNumber, GLuint index, const GLchar *name); +GL_APICALL void GL_APIENTRY glBindProgramPipelineEXTContextANGLE(GLeglContext ctx, GLuint pipeline); +GL_APICALL void GL_APIENTRY glBindVertexArrayOESContextANGLE(GLeglContext ctx, GLuint array); +GL_APICALL void GL_APIENTRY glBlendEquationSeparateiEXTContextANGLE(GLeglContext ctx, GLuint buf, GLenum modeRGB, GLenum modeAlpha); +GL_APICALL void GL_APIENTRY glBlendEquationSeparateiOESContextANGLE(GLeglContext ctx, GLuint buf, GLenum modeRGB, GLenum modeAlpha); +GL_APICALL void GL_APIENTRY glBlendEquationiEXTContextANGLE(GLeglContext ctx, GLuint buf, GLenum mode); +GL_APICALL void GL_APIENTRY glBlendEquationiOESContextANGLE(GLeglContext ctx, GLuint buf, GLenum mode); +GL_APICALL void GL_APIENTRY glBlendFuncSeparateiEXTContextANGLE(GLeglContext ctx, GLuint buf, GLenum srcRGB, GLenum dstRGB, GLenum srcAlpha, GLenum dstAlpha); +GL_APICALL void GL_APIENTRY glBlendFuncSeparateiOESContextANGLE(GLeglContext ctx, GLuint buf, GLenum srcRGB, GLenum dstRGB, GLenum srcAlpha, GLenum dstAlpha); +GL_APICALL void GL_APIENTRY glBlendFunciEXTContextANGLE(GLeglContext ctx, GLuint buf, GLenum src, GLenum dst); +GL_APICALL void GL_APIENTRY glBlendFunciOESContextANGLE(GLeglContext ctx, GLuint buf, GLenum src, GLenum dst); +GL_APICALL void GL_APIENTRY glBlitFramebufferANGLEContextANGLE(GLeglContext ctx, GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +GL_APICALL void GL_APIENTRY glBlitFramebufferNVContextANGLE(GLeglContext ctx, GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +GL_APICALL void GL_APIENTRY glBufferStorageEXTContextANGLE(GLeglContext ctx, GLenum target, GLsizeiptr size, const void *data, GLbitfield flags); +GL_APICALL void GL_APIENTRY glBufferStorageExternalEXTContextANGLE(GLeglContext ctx, GLenum target, GLintptr offset, GLsizeiptr size, GLeglClientBufferEXT clientBuffer, GLbitfield flags); +GL_APICALL void GL_APIENTRY glBufferStorageMemEXTContextANGLE(GLeglContext ctx, GLenum target, GLsizeiptr size, GLuint memory, GLuint64 offset); +GL_APICALL void GL_APIENTRY glClipControlEXTContextANGLE(GLeglContext ctx, GLenum origin, GLenum depth); +GL_APICALL void GL_APIENTRY glColorMaskiEXTContextANGLE(GLeglContext ctx, GLuint index, GLboolean r, GLboolean g, GLboolean b, GLboolean a); +GL_APICALL void GL_APIENTRY glColorMaskiOESContextANGLE(GLeglContext ctx, GLuint index, GLboolean r, GLboolean g, GLboolean b, GLboolean a); +GL_APICALL void GL_APIENTRY glCompressedTexImage3DOESContextANGLE(GLeglContext ctx, GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, const void *data); +GL_APICALL void GL_APIENTRY glCompressedTexSubImage3DOESContextANGLE(GLeglContext ctx, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void *data); +GL_APICALL void GL_APIENTRY glCopyImageSubDataEXTContextANGLE(GLeglContext ctx, GLuint srcName, GLenum srcTarget, GLint srcLevel, GLint srcX, GLint srcY, GLint srcZ, GLuint dstName, GLenum dstTarget, GLint dstLevel, GLint dstX, GLint dstY, GLint dstZ, GLsizei srcWidth, GLsizei srcHeight, GLsizei srcDepth); +GL_APICALL void GL_APIENTRY glCopyImageSubDataOESContextANGLE(GLeglContext ctx, GLuint srcName, GLenum srcTarget, GLint srcLevel, GLint srcX, GLint srcY, GLint srcZ, GLuint dstName, GLenum dstTarget, GLint dstLevel, GLint dstX, GLint dstY, GLint dstZ, GLsizei srcWidth, GLsizei srcHeight, GLsizei srcDepth); +GL_APICALL void GL_APIENTRY glCopyTexSubImage3DOESContextANGLE(GLeglContext ctx, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glCreateMemoryObjectsEXTContextANGLE(GLeglContext ctx, GLsizei n, GLuint *memoryObjects); +GL_APICALL GLuint GL_APIENTRY glCreateShaderProgramvEXTContextANGLE(GLeglContext ctx, GLenum type, GLsizei count, const GLchar **strings); +GL_APICALL void GL_APIENTRY glDebugMessageCallbackKHRContextANGLE(GLeglContext ctx, GLDEBUGPROCKHR callback, const void *userParam); +GL_APICALL void GL_APIENTRY glDebugMessageControlKHRContextANGLE(GLeglContext ctx, GLenum source, GLenum type, GLenum severity, GLsizei count, const GLuint *ids, GLboolean enabled); +GL_APICALL void GL_APIENTRY glDebugMessageInsertKHRContextANGLE(GLeglContext ctx, GLenum source, GLenum type, GLuint id, GLenum severity, GLsizei length, const GLchar *buf); +GL_APICALL void GL_APIENTRY glDeleteFencesNVContextANGLE(GLeglContext ctx, GLsizei n, const GLuint *fences); +GL_APICALL void GL_APIENTRY glDeleteMemoryObjectsEXTContextANGLE(GLeglContext ctx, GLsizei n, const GLuint *memoryObjects); +GL_APICALL void GL_APIENTRY glDeleteProgramPipelinesEXTContextANGLE(GLeglContext ctx, GLsizei n, const GLuint *pipelines); +GL_APICALL void GL_APIENTRY glDeleteQueriesEXTContextANGLE(GLeglContext ctx, GLsizei n, const GLuint *ids); +GL_APICALL void GL_APIENTRY glDeleteSemaphoresEXTContextANGLE(GLeglContext ctx, GLsizei n, const GLuint *semaphores); +GL_APICALL void GL_APIENTRY glDeleteVertexArraysOESContextANGLE(GLeglContext ctx, GLsizei n, const GLuint *arrays); +GL_APICALL void GL_APIENTRY glDisableiEXTContextANGLE(GLeglContext ctx, GLenum target, GLuint index); +GL_APICALL void GL_APIENTRY glDisableiOESContextANGLE(GLeglContext ctx, GLenum target, GLuint index); +GL_APICALL void GL_APIENTRY glDiscardFramebufferEXTContextANGLE(GLeglContext ctx, GLenum target, GLsizei numAttachments, const GLenum *attachments); +GL_APICALL void GL_APIENTRY glDrawArraysInstancedANGLEContextANGLE(GLeglContext ctx, GLenum mode, GLint first, GLsizei count, GLsizei primcount); +GL_APICALL void GL_APIENTRY glDrawArraysInstancedEXTContextANGLE(GLeglContext ctx, GLenum mode, GLint start, GLsizei count, GLsizei primcount); +GL_APICALL void GL_APIENTRY glDrawBuffersEXTContextANGLE(GLeglContext ctx, GLsizei n, const GLenum *bufs); +GL_APICALL void GL_APIENTRY glDrawElementsBaseVertexEXTContextANGLE(GLeglContext ctx, GLenum mode, GLsizei count, GLenum type, const void *indices, GLint basevertex); +GL_APICALL void GL_APIENTRY glDrawElementsBaseVertexOESContextANGLE(GLeglContext ctx, GLenum mode, GLsizei count, GLenum type, const void *indices, GLint basevertex); +GL_APICALL void GL_APIENTRY glDrawElementsInstancedANGLEContextANGLE(GLeglContext ctx, GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei primcount); +GL_APICALL void GL_APIENTRY glDrawElementsInstancedBaseVertexEXTContextANGLE(GLeglContext ctx, GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLint basevertex); +GL_APICALL void GL_APIENTRY glDrawElementsInstancedBaseVertexOESContextANGLE(GLeglContext ctx, GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLint basevertex); +GL_APICALL void GL_APIENTRY glDrawElementsInstancedEXTContextANGLE(GLeglContext ctx, GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei primcount); +GL_APICALL void GL_APIENTRY glDrawRangeElementsBaseVertexEXTContextANGLE(GLeglContext ctx, GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices, GLint basevertex); +GL_APICALL void GL_APIENTRY glDrawRangeElementsBaseVertexOESContextANGLE(GLeglContext ctx, GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices, GLint basevertex); +GL_APICALL void GL_APIENTRY glEGLImageTargetRenderbufferStorageOESContextANGLE(GLeglContext ctx, GLenum target, GLeglImageOES image); +GL_APICALL void GL_APIENTRY glEGLImageTargetTexture2DOESContextANGLE(GLeglContext ctx, GLenum target, GLeglImageOES image); +GL_APICALL void GL_APIENTRY glEnableiEXTContextANGLE(GLeglContext ctx, GLenum target, GLuint index); +GL_APICALL void GL_APIENTRY glEnableiOESContextANGLE(GLeglContext ctx, GLenum target, GLuint index); +GL_APICALL void GL_APIENTRY glEndQueryEXTContextANGLE(GLeglContext ctx, GLenum target); +GL_APICALL void GL_APIENTRY glFinishFenceNVContextANGLE(GLeglContext ctx, GLuint fence); +GL_APICALL void GL_APIENTRY glFlushMappedBufferRangeEXTContextANGLE(GLeglContext ctx, GLenum target, GLintptr offset, GLsizeiptr length); +GL_APICALL void GL_APIENTRY glFramebufferFetchBarrierEXTContextANGLE(GLeglContext ctx); +GL_APICALL void GL_APIENTRY glFramebufferTexture2DMultisampleEXTContextANGLE(GLeglContext ctx, GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLsizei samples); +GL_APICALL void GL_APIENTRY glFramebufferTexture3DOESContextANGLE(GLeglContext ctx, GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLint zoffset); +GL_APICALL void GL_APIENTRY glFramebufferTextureEXTContextANGLE(GLeglContext ctx, GLenum target, GLenum attachment, GLuint texture, GLint level); +GL_APICALL void GL_APIENTRY glFramebufferTextureMultiviewOVRContextANGLE(GLeglContext ctx, GLenum target, GLenum attachment, GLuint texture, GLint level, GLint baseViewIndex, GLsizei numViews); +GL_APICALL void GL_APIENTRY glGenFencesNVContextANGLE(GLeglContext ctx, GLsizei n, GLuint *fences); +GL_APICALL void GL_APIENTRY glGenProgramPipelinesEXTContextANGLE(GLeglContext ctx, GLsizei n, GLuint *pipelines); +GL_APICALL void GL_APIENTRY glGenQueriesEXTContextANGLE(GLeglContext ctx, GLsizei n, GLuint *ids); +GL_APICALL void GL_APIENTRY glGenSemaphoresEXTContextANGLE(GLeglContext ctx, GLsizei n, GLuint *semaphores); +GL_APICALL void GL_APIENTRY glGenVertexArraysOESContextANGLE(GLeglContext ctx, GLsizei n, GLuint *arrays); +GL_APICALL void GL_APIENTRY glGetBufferPointervOESContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, void **params); +GL_APICALL GLuint GL_APIENTRY glGetDebugMessageLogKHRContextANGLE(GLeglContext ctx, GLuint count, GLsizei bufSize, GLenum *sources, GLenum *types, GLuint *ids, GLenum *severities, GLsizei *lengths, GLchar *messageLog); +GL_APICALL void GL_APIENTRY glGetFenceivNVContextANGLE(GLeglContext ctx, GLuint fence, GLenum pname, GLint *params); +GL_APICALL GLint GL_APIENTRY glGetFragDataIndexEXTContextANGLE(GLeglContext ctx, GLuint program, const GLchar *name); +GL_APICALL GLenum GL_APIENTRY glGetGraphicsResetStatusEXTContextANGLE(GLeglContext ctx); +GL_APICALL void GL_APIENTRY glGetInteger64vEXTContextANGLE(GLeglContext ctx, GLenum pname, GLint64 *data); +GL_APICALL void GL_APIENTRY glGetMemoryObjectParameterivEXTContextANGLE(GLeglContext ctx, GLuint memoryObject, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetObjectLabelEXTContextANGLE(GLeglContext ctx, GLenum type, GLuint object, GLsizei bufSize, GLsizei *length, GLchar *label); +GL_APICALL void GL_APIENTRY glGetObjectLabelKHRContextANGLE(GLeglContext ctx, GLenum identifier, GLuint name, GLsizei bufSize, GLsizei *length, GLchar *label); +GL_APICALL void GL_APIENTRY glGetObjectPtrLabelKHRContextANGLE(GLeglContext ctx, const void *ptr, GLsizei bufSize, GLsizei *length, GLchar *label); +GL_APICALL void GL_APIENTRY glGetPointervKHRContextANGLE(GLeglContext ctx, GLenum pname, void **params); +GL_APICALL void GL_APIENTRY glGetProgramBinaryOESContextANGLE(GLeglContext ctx, GLuint program, GLsizei bufSize, GLsizei *length, GLenum *binaryFormat, void *binary); +GL_APICALL void GL_APIENTRY glGetProgramPipelineInfoLogEXTContextANGLE(GLeglContext ctx, GLuint pipeline, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +GL_APICALL void GL_APIENTRY glGetProgramPipelineivEXTContextANGLE(GLeglContext ctx, GLuint pipeline, GLenum pname, GLint *params); +GL_APICALL GLint GL_APIENTRY glGetProgramResourceLocationIndexEXTContextANGLE(GLeglContext ctx, GLuint program, GLenum programInterface, const GLchar *name); +GL_APICALL void GL_APIENTRY glGetQueryObjecti64vEXTContextANGLE(GLeglContext ctx, GLuint id, GLenum pname, GLint64 *params); +GL_APICALL void GL_APIENTRY glGetQueryObjectivEXTContextANGLE(GLeglContext ctx, GLuint id, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetQueryObjectui64vEXTContextANGLE(GLeglContext ctx, GLuint id, GLenum pname, GLuint64 *params); +GL_APICALL void GL_APIENTRY glGetQueryObjectuivEXTContextANGLE(GLeglContext ctx, GLuint id, GLenum pname, GLuint *params); +GL_APICALL void GL_APIENTRY glGetQueryivEXTContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetSamplerParameterIivOESContextANGLE(GLeglContext ctx, GLuint sampler, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetSamplerParameterIuivOESContextANGLE(GLeglContext ctx, GLuint sampler, GLenum pname, GLuint *params); +GL_APICALL void GL_APIENTRY glGetSemaphoreParameterui64vEXTContextANGLE(GLeglContext ctx, GLuint semaphore, GLenum pname, GLuint64 *params); +GL_APICALL void GL_APIENTRY glGetTexParameterIivOESContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetTexParameterIuivOESContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLuint *params); +GL_APICALL void GL_APIENTRY glGetTranslatedShaderSourceANGLEContextANGLE(GLeglContext ctx, GLuint shader, GLsizei bufsize, GLsizei *length, GLchar *source); +GL_APICALL void GL_APIENTRY glGetUnsignedBytevEXTContextANGLE(GLeglContext ctx, GLenum pname, GLubyte *data); +GL_APICALL void GL_APIENTRY glGetUnsignedBytei_vEXTContextANGLE(GLeglContext ctx, GLenum target, GLuint index, GLubyte *data); +GL_APICALL void GL_APIENTRY glGetnUniformfvEXTContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei bufSize, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetnUniformivEXTContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei bufSize, GLint *params); +GL_APICALL void GL_APIENTRY glImportMemoryFdEXTContextANGLE(GLeglContext ctx, GLuint memory, GLuint64 size, GLenum handleType, GLint fd); +GL_APICALL void GL_APIENTRY glImportSemaphoreFdEXTContextANGLE(GLeglContext ctx, GLuint semaphore, GLenum handleType, GLint fd); +GL_APICALL void GL_APIENTRY glInsertEventMarkerEXTContextANGLE(GLeglContext ctx, GLsizei length, const GLchar *marker); +GL_APICALL GLboolean GL_APIENTRY glIsEnablediEXTContextANGLE(GLeglContext ctx, GLenum target, GLuint index); +GL_APICALL GLboolean GL_APIENTRY glIsEnablediOESContextANGLE(GLeglContext ctx, GLenum target, GLuint index); +GL_APICALL GLboolean GL_APIENTRY glIsFenceNVContextANGLE(GLeglContext ctx, GLuint fence); +GL_APICALL GLboolean GL_APIENTRY glIsMemoryObjectEXTContextANGLE(GLeglContext ctx, GLuint memoryObject); +GL_APICALL GLboolean GL_APIENTRY glIsProgramPipelineEXTContextANGLE(GLeglContext ctx, GLuint pipeline); +GL_APICALL GLboolean GL_APIENTRY glIsQueryEXTContextANGLE(GLeglContext ctx, GLuint id); +GL_APICALL GLboolean GL_APIENTRY glIsSemaphoreEXTContextANGLE(GLeglContext ctx, GLuint semaphore); +GL_APICALL GLboolean GL_APIENTRY glIsVertexArrayOESContextANGLE(GLeglContext ctx, GLuint array); +GL_APICALL void GL_APIENTRY glLabelObjectEXTContextANGLE(GLeglContext ctx, GLenum type, GLuint object, GLsizei length, const GLchar *label); +GL_APICALL void *GL_APIENTRY glMapBufferOESContextANGLE(GLeglContext ctx, GLenum target, GLenum access); +GL_APICALL void *GL_APIENTRY glMapBufferRangeEXTContextANGLE(GLeglContext ctx, GLenum target, GLintptr offset, GLsizeiptr length, GLbitfield access); +GL_APICALL void GL_APIENTRY glMaxShaderCompilerThreadsKHRContextANGLE(GLeglContext ctx, GLuint count); +GL_APICALL void GL_APIENTRY glMemoryObjectParameterivEXTContextANGLE(GLeglContext ctx, GLuint memoryObject, GLenum pname, const GLint *params); +GL_APICALL void GL_APIENTRY glMinSampleShadingOESContextANGLE(GLeglContext ctx, GLfloat value); +GL_APICALL void GL_APIENTRY glMultiDrawElementsBaseVertexEXTContextANGLE(GLeglContext ctx, GLenum mode, const GLsizei *count, GLenum type, const void *const*indices, GLsizei primcount, const GLint *basevertex); +GL_APICALL void GL_APIENTRY glNamedBufferStorageExternalEXTContextANGLE(GLeglContext ctx, GLuint buffer, GLintptr offset, GLsizeiptr size, GLeglClientBufferEXT clientBuffer, GLbitfield flags); +GL_APICALL void GL_APIENTRY glObjectLabelKHRContextANGLE(GLeglContext ctx, GLenum identifier, GLuint name, GLsizei length, const GLchar *label); +GL_APICALL void GL_APIENTRY glObjectPtrLabelKHRContextANGLE(GLeglContext ctx, const void *ptr, GLsizei length, const GLchar *label); +GL_APICALL void GL_APIENTRY glPatchParameteriEXTContextANGLE(GLeglContext ctx, GLenum pname, GLint value); +GL_APICALL void GL_APIENTRY glPopDebugGroupKHRContextANGLE(GLeglContext ctx); +GL_APICALL void GL_APIENTRY glPopGroupMarkerEXTContextANGLE(GLeglContext ctx); +GL_APICALL void GL_APIENTRY glProgramBinaryOESContextANGLE(GLeglContext ctx, GLuint program, GLenum binaryFormat, const void *binary, GLint length); +GL_APICALL void GL_APIENTRY glProgramParameteriEXTContextANGLE(GLeglContext ctx, GLuint program, GLenum pname, GLint value); +GL_APICALL void GL_APIENTRY glProgramUniform1fEXTContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLfloat v0); +GL_APICALL void GL_APIENTRY glProgramUniform1fvEXTContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniform1iEXTContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLint v0); +GL_APICALL void GL_APIENTRY glProgramUniform1ivEXTContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glProgramUniform1uiEXTContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLuint v0); +GL_APICALL void GL_APIENTRY glProgramUniform1uivEXTContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glProgramUniform2fEXTContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLfloat v0, GLfloat v1); +GL_APICALL void GL_APIENTRY glProgramUniform2fvEXTContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniform2iEXTContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLint v0, GLint v1); +GL_APICALL void GL_APIENTRY glProgramUniform2ivEXTContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glProgramUniform2uiEXTContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLuint v0, GLuint v1); +GL_APICALL void GL_APIENTRY glProgramUniform2uivEXTContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glProgramUniform3fEXTContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +GL_APICALL void GL_APIENTRY glProgramUniform3fvEXTContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniform3iEXTContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLint v0, GLint v1, GLint v2); +GL_APICALL void GL_APIENTRY glProgramUniform3ivEXTContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glProgramUniform3uiEXTContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2); +GL_APICALL void GL_APIENTRY glProgramUniform3uivEXTContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glProgramUniform4fEXTContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +GL_APICALL void GL_APIENTRY glProgramUniform4fvEXTContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniform4iEXTContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +GL_APICALL void GL_APIENTRY glProgramUniform4ivEXTContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glProgramUniform4uiEXTContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3); +GL_APICALL void GL_APIENTRY glProgramUniform4uivEXTContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix2fvEXTContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix2x3fvEXTContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix2x4fvEXTContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix3fvEXTContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix3x2fvEXTContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix3x4fvEXTContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix4fvEXTContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix4x2fvEXTContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix4x3fvEXTContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glPushDebugGroupKHRContextANGLE(GLeglContext ctx, GLenum source, GLuint id, GLsizei length, const GLchar *message); +GL_APICALL void GL_APIENTRY glPushGroupMarkerEXTContextANGLE(GLeglContext ctx, GLsizei length, const GLchar *marker); +GL_APICALL void GL_APIENTRY glQueryCounterEXTContextANGLE(GLeglContext ctx, GLuint id, GLenum target); +GL_APICALL void GL_APIENTRY glReadnPixelsEXTContextANGLE(GLeglContext ctx, GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, void *data); +GL_APICALL void GL_APIENTRY glRenderbufferStorageMultisampleANGLEContextANGLE(GLeglContext ctx, GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glRenderbufferStorageMultisampleEXTContextANGLE(GLeglContext ctx, GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glSamplerParameterIivOESContextANGLE(GLeglContext ctx, GLuint sampler, GLenum pname, const GLint *param); +GL_APICALL void GL_APIENTRY glSamplerParameterIuivOESContextANGLE(GLeglContext ctx, GLuint sampler, GLenum pname, const GLuint *param); +GL_APICALL void GL_APIENTRY glSemaphoreParameterui64vEXTContextANGLE(GLeglContext ctx, GLuint semaphore, GLenum pname, const GLuint64 *params); +GL_APICALL void GL_APIENTRY glSetFenceNVContextANGLE(GLeglContext ctx, GLuint fence, GLenum condition); +GL_APICALL void GL_APIENTRY glSignalSemaphoreEXTContextANGLE(GLeglContext ctx, GLuint semaphore, GLuint numBufferBarriers, const GLuint *buffers, GLuint numTextureBarriers, const GLuint *textures, const GLenum *dstLayouts); +GL_APICALL GLboolean GL_APIENTRY glTestFenceNVContextANGLE(GLeglContext ctx, GLuint fence); +GL_APICALL void GL_APIENTRY glTexBufferEXTContextANGLE(GLeglContext ctx, GLenum target, GLenum internalformat, GLuint buffer); +GL_APICALL void GL_APIENTRY glTexBufferOESContextANGLE(GLeglContext ctx, GLenum target, GLenum internalformat, GLuint buffer); +GL_APICALL void GL_APIENTRY glTexBufferRangeEXTContextANGLE(GLeglContext ctx, GLenum target, GLenum internalformat, GLuint buffer, GLintptr offset, GLsizeiptr size); +GL_APICALL void GL_APIENTRY glTexBufferRangeOESContextANGLE(GLeglContext ctx, GLenum target, GLenum internalformat, GLuint buffer, GLintptr offset, GLsizeiptr size); +GL_APICALL void GL_APIENTRY glTexImage3DOESContextANGLE(GLeglContext ctx, GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, const void *pixels); +GL_APICALL void GL_APIENTRY glTexParameterIivOESContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, const GLint *params); +GL_APICALL void GL_APIENTRY glTexParameterIuivOESContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, const GLuint *params); +GL_APICALL void GL_APIENTRY glTexStorage1DEXTContextANGLE(GLeglContext ctx, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width); +GL_APICALL void GL_APIENTRY glTexStorage2DEXTContextANGLE(GLeglContext ctx, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glTexStorage3DEXTContextANGLE(GLeglContext ctx, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +GL_APICALL void GL_APIENTRY glTexStorage3DMultisampleOESContextANGLE(GLeglContext ctx, GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedsamplelocations); +GL_APICALL void GL_APIENTRY glTexStorageMem2DEXTContextANGLE(GLeglContext ctx, GLenum target, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLuint memory, GLuint64 offset); +GL_APICALL void GL_APIENTRY glTexStorageMem2DMultisampleEXTContextANGLE(GLeglContext ctx, GLenum target, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset); +GL_APICALL void GL_APIENTRY glTexStorageMem3DEXTContextANGLE(GLeglContext ctx, GLenum target, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLuint memory, GLuint64 offset); +GL_APICALL void GL_APIENTRY glTexStorageMem3DMultisampleEXTContextANGLE(GLeglContext ctx, GLenum target, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset); +GL_APICALL void GL_APIENTRY glTexSubImage3DOESContextANGLE(GLeglContext ctx, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *pixels); +GL_APICALL GLboolean GL_APIENTRY glUnmapBufferOESContextANGLE(GLeglContext ctx, GLenum target); +GL_APICALL void GL_APIENTRY glUseProgramStagesEXTContextANGLE(GLeglContext ctx, GLuint pipeline, GLbitfield stages, GLuint program); +GL_APICALL void GL_APIENTRY glValidateProgramPipelineEXTContextANGLE(GLeglContext ctx, GLuint pipeline); +GL_APICALL void GL_APIENTRY glVertexAttribDivisorANGLEContextANGLE(GLeglContext ctx, GLuint index, GLuint divisor); +GL_APICALL void GL_APIENTRY glVertexAttribDivisorEXTContextANGLE(GLeglContext ctx, GLuint index, GLuint divisor); +GL_APICALL void GL_APIENTRY glWaitSemaphoreEXTContextANGLE(GLeglContext ctx, GLuint semaphore, GLuint numBufferBarriers, const GLuint *buffers, GLuint numTextureBarriers, const GLuint *textures, const GLenum *srcLayouts); +GL_APICALL void GL_APIENTRY glBindUniformLocationCHROMIUMContextANGLE(GLeglContext ctx, GLuint program, GLint location, const GLchar* name); +GL_APICALL void GL_APIENTRY glCoverageModulationCHROMIUMContextANGLE(GLeglContext ctx, GLenum components); +GL_APICALL void GL_APIENTRY glCopyTextureCHROMIUMContextANGLE(GLeglContext ctx, GLuint sourceId, GLint sourceLevel, GLenum destTarget, GLuint destId, GLint destLevel, GLint internalFormat, GLenum destType, GLboolean unpackFlipY, GLboolean unpackPremultiplyAlpha, GLboolean unpackUnmultiplyAlpha); +GL_APICALL void GL_APIENTRY glCopySubTextureCHROMIUMContextANGLE(GLeglContext ctx, GLuint sourceId, GLint sourceLevel, GLenum destTarget, GLuint destId, GLint destLevel, GLint xoffset, GLint yoffset, GLint x, GLint y, GLint width, GLint height, GLboolean unpackFlipY, GLboolean unpackPremultiplyAlpha, GLboolean unpackUnmultiplyAlpha); +GL_APICALL void GL_APIENTRY glCompressedCopyTextureCHROMIUMContextANGLE(GLeglContext ctx, GLuint sourceId, GLuint destId); +GL_APICALL void GL_APIENTRY glRequestExtensionANGLEContextANGLE(GLeglContext ctx, const GLchar * name); +GL_APICALL void GL_APIENTRY glDisableExtensionANGLEContextANGLE(GLeglContext ctx, const GLchar * name); +GL_APICALL void GL_APIENTRY glGetBooleanvRobustANGLEContextANGLE(GLeglContext ctx, GLenum pname, GLsizei bufSize, GLsizei * length, GLboolean * params); +GL_APICALL void GL_APIENTRY glGetBufferParameterivRobustANGLEContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * params); +GL_APICALL void GL_APIENTRY glGetFloatvRobustANGLEContextANGLE(GLeglContext ctx, GLenum pname, GLsizei bufSize, GLsizei * length, GLfloat * params); +GL_APICALL void GL_APIENTRY glGetFramebufferAttachmentParameterivRobustANGLEContextANGLE(GLeglContext ctx, GLenum target, GLenum attachment, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * params); +GL_APICALL void GL_APIENTRY glGetIntegervRobustANGLEContextANGLE(GLeglContext ctx, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * data); +GL_APICALL void GL_APIENTRY glGetProgramivRobustANGLEContextANGLE(GLeglContext ctx, GLuint program, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * params); +GL_APICALL void GL_APIENTRY glGetRenderbufferParameterivRobustANGLEContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * params); +GL_APICALL void GL_APIENTRY glGetShaderivRobustANGLEContextANGLE(GLeglContext ctx, GLuint shader, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * params); +GL_APICALL void GL_APIENTRY glGetTexParameterfvRobustANGLEContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLsizei bufSize, GLsizei * length, GLfloat * params); +GL_APICALL void GL_APIENTRY glGetTexParameterivRobustANGLEContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * params); +GL_APICALL void GL_APIENTRY glGetUniformfvRobustANGLEContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei bufSize, GLsizei * length, GLfloat * params); +GL_APICALL void GL_APIENTRY glGetUniformivRobustANGLEContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei bufSize, GLsizei * length, GLint * params); +GL_APICALL void GL_APIENTRY glGetVertexAttribfvRobustANGLEContextANGLE(GLeglContext ctx, GLuint index, GLenum pname, GLsizei bufSize, GLsizei * length, GLfloat * params); +GL_APICALL void GL_APIENTRY glGetVertexAttribivRobustANGLEContextANGLE(GLeglContext ctx, GLuint index, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * params); +GL_APICALL void GL_APIENTRY glGetVertexAttribPointervRobustANGLEContextANGLE(GLeglContext ctx, GLuint index, GLenum pname, GLsizei bufSize, GLsizei * length, void ** pointer); +GL_APICALL void GL_APIENTRY glReadPixelsRobustANGLEContextANGLE(GLeglContext ctx, GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, GLsizei * length, GLsizei * columns, GLsizei * rows, void * pixels); +GL_APICALL void GL_APIENTRY glTexImage2DRobustANGLEContextANGLE(GLeglContext ctx, GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, GLsizei bufSize, const void * pixels); +GL_APICALL void GL_APIENTRY glTexParameterfvRobustANGLEContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLsizei bufSize, const GLfloat * params); +GL_APICALL void GL_APIENTRY glTexParameterivRobustANGLEContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLsizei bufSize, const GLint * params); +GL_APICALL void GL_APIENTRY glTexSubImage2DRobustANGLEContextANGLE(GLeglContext ctx, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, const void * pixels); +GL_APICALL void GL_APIENTRY glTexImage3DRobustANGLEContextANGLE(GLeglContext ctx, GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, GLsizei bufSize, const void * pixels); +GL_APICALL void GL_APIENTRY glTexSubImage3DRobustANGLEContextANGLE(GLeglContext ctx, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, GLsizei bufSize, const void * pixels); +GL_APICALL void GL_APIENTRY glCompressedTexImage2DRobustANGLEContextANGLE(GLeglContext ctx, GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, GLsizei dataSize, const GLvoid * data); +GL_APICALL void GL_APIENTRY glCompressedTexSubImage2DRobustANGLEContextANGLE(GLeglContext ctx, GLenum target, GLint level, GLsizei xoffset, GLsizei yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, GLsizei dataSize, const GLvoid * data); +GL_APICALL void GL_APIENTRY glCompressedTexImage3DRobustANGLEContextANGLE(GLeglContext ctx, GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, GLsizei dataSize, const GLvoid * data); +GL_APICALL void GL_APIENTRY glCompressedTexSubImage3DRobustANGLEContextANGLE(GLeglContext ctx, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, GLsizei dataSize, const GLvoid * data); +GL_APICALL void GL_APIENTRY glGetQueryivRobustANGLEContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * params); +GL_APICALL void GL_APIENTRY glGetQueryObjectuivRobustANGLEContextANGLE(GLeglContext ctx, GLuint id, GLenum pname, GLsizei bufSize, GLsizei * length, GLuint * params); +GL_APICALL void GL_APIENTRY glGetBufferPointervRobustANGLEContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLsizei bufSize, GLsizei * length, void ** params); +GL_APICALL void GL_APIENTRY glGetIntegeri_vRobustANGLEContextANGLE(GLeglContext ctx, GLenum target, GLuint index, GLsizei bufSize, GLsizei * length, GLint * data); +GL_APICALL void GL_APIENTRY glGetInternalformativRobustANGLEContextANGLE(GLeglContext ctx, GLenum target, GLenum internalformat, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * params); +GL_APICALL void GL_APIENTRY glGetVertexAttribIivRobustANGLEContextANGLE(GLeglContext ctx, GLuint index, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * params); +GL_APICALL void GL_APIENTRY glGetVertexAttribIuivRobustANGLEContextANGLE(GLeglContext ctx, GLuint index, GLenum pname, GLsizei bufSize, GLsizei * length, GLuint * params); +GL_APICALL void GL_APIENTRY glGetUniformuivRobustANGLEContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei bufSize, GLsizei * length, GLuint * params); +GL_APICALL void GL_APIENTRY glGetActiveUniformBlockivRobustANGLEContextANGLE(GLeglContext ctx, GLuint program, GLuint uniformBlockIndex, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * params); +GL_APICALL void GL_APIENTRY glGetInteger64vRobustANGLEContextANGLE(GLeglContext ctx, GLenum pname, GLsizei bufSize, GLsizei * length, GLint64 * data); +GL_APICALL void GL_APIENTRY glGetInteger64i_vRobustANGLEContextANGLE(GLeglContext ctx, GLenum target, GLuint index, GLsizei bufSize, GLsizei * length, GLint64 * data); +GL_APICALL void GL_APIENTRY glGetBufferParameteri64vRobustANGLEContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLsizei bufSize, GLsizei * length, GLint64 * params); +GL_APICALL void GL_APIENTRY glSamplerParameterivRobustANGLEContextANGLE(GLeglContext ctx, GLuint sampler, GLuint pname, GLsizei bufSize, const GLint * param); +GL_APICALL void GL_APIENTRY glSamplerParameterfvRobustANGLEContextANGLE(GLeglContext ctx, GLuint sampler, GLenum pname, GLsizei bufSize, const GLfloat * param); +GL_APICALL void GL_APIENTRY glGetSamplerParameterivRobustANGLEContextANGLE(GLeglContext ctx, GLuint sampler, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * params); +GL_APICALL void GL_APIENTRY glGetSamplerParameterfvRobustANGLEContextANGLE(GLeglContext ctx, GLuint sampler, GLenum pname, GLsizei bufSize, GLsizei * length, GLfloat * params); +GL_APICALL void GL_APIENTRY glGetFramebufferParameterivRobustANGLEContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * params); +GL_APICALL void GL_APIENTRY glGetProgramInterfaceivRobustANGLEContextANGLE(GLeglContext ctx, GLuint program, GLenum programInterface, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * params); +GL_APICALL void GL_APIENTRY glGetBooleani_vRobustANGLEContextANGLE(GLeglContext ctx, GLenum target, GLuint index, GLsizei bufSize, GLsizei * length, GLboolean * data); +GL_APICALL void GL_APIENTRY glGetMultisamplefvRobustANGLEContextANGLE(GLeglContext ctx, GLenum pname, GLuint index, GLsizei bufSize, GLsizei * length, GLfloat * val); +GL_APICALL void GL_APIENTRY glGetTexLevelParameterivRobustANGLEContextANGLE(GLeglContext ctx, GLenum target, GLint level, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * params); +GL_APICALL void GL_APIENTRY glGetTexLevelParameterfvRobustANGLEContextANGLE(GLeglContext ctx, GLenum target, GLint level, GLenum pname, GLsizei bufSize, GLsizei * length, GLfloat * params); +GL_APICALL void GL_APIENTRY glGetPointervRobustANGLERobustANGLEContextANGLE(GLeglContext ctx, GLenum pname, GLsizei bufSize, GLsizei * length, void ** params); +GL_APICALL void GL_APIENTRY glReadnPixelsRobustANGLEContextANGLE(GLeglContext ctx, GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, GLsizei * length, GLsizei * columns, GLsizei * rows, void * data); +GL_APICALL void GL_APIENTRY glGetnUniformfvRobustANGLEContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei bufSize, GLsizei * length, GLfloat * params); +GL_APICALL void GL_APIENTRY glGetnUniformivRobustANGLEContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei bufSize, GLsizei * length, GLint * params); +GL_APICALL void GL_APIENTRY glGetnUniformuivRobustANGLEContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei bufSize, GLsizei * length, GLuint * params); +GL_APICALL void GL_APIENTRY glTexParameterIivRobustANGLEContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLsizei bufSize, const GLint * params); +GL_APICALL void GL_APIENTRY glTexParameterIuivRobustANGLEContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLsizei bufSize, const GLuint * params); +GL_APICALL void GL_APIENTRY glGetTexParameterIivRobustANGLEContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * params); +GL_APICALL void GL_APIENTRY glGetTexParameterIuivRobustANGLEContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLsizei bufSize, GLsizei * length, GLuint * params); +GL_APICALL void GL_APIENTRY glSamplerParameterIivRobustANGLEContextANGLE(GLeglContext ctx, GLuint sampler, GLenum pname, GLsizei bufSize, const GLint * param); +GL_APICALL void GL_APIENTRY glSamplerParameterIuivRobustANGLEContextANGLE(GLeglContext ctx, GLuint sampler, GLenum pname, GLsizei bufSize, const GLuint * param); +GL_APICALL void GL_APIENTRY glGetSamplerParameterIivRobustANGLEContextANGLE(GLeglContext ctx, GLuint sampler, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * params); +GL_APICALL void GL_APIENTRY glGetSamplerParameterIuivRobustANGLEContextANGLE(GLeglContext ctx, GLuint sampler, GLenum pname, GLsizei bufSize, GLsizei * length, GLuint * params); +GL_APICALL void GL_APIENTRY glGetQueryObjectivRobustANGLEContextANGLE(GLeglContext ctx, GLuint id, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * params); +GL_APICALL void GL_APIENTRY glGetQueryObjecti64vRobustANGLEContextANGLE(GLeglContext ctx, GLuint id, GLenum pname, GLsizei bufSize, GLsizei * length, GLint64 * params); +GL_APICALL void GL_APIENTRY glGetQueryObjectui64vRobustANGLEContextANGLE(GLeglContext ctx, GLuint id, GLenum pname, GLsizei bufSize, GLsizei * length, GLuint64 * params); +GL_APICALL void GL_APIENTRY glCopyTexture3DANGLEContextANGLE(GLeglContext ctx, GLuint sourceId, GLint sourceLevel, GLenum destTarget, GLuint destId, GLint destLevel, GLint internalFormat, GLenum destType, GLboolean unpackFlipY, GLboolean unpackPremultiplyAlpha, GLboolean unpackUnmultiplyAlpha); +GL_APICALL void GL_APIENTRY glCopySubTexture3DANGLEContextANGLE(GLeglContext ctx, GLuint sourceId, GLint sourceLevel, GLenum destTarget, GLuint destId, GLint destLevel, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLint z, GLint width, GLint height, GLint depth, GLboolean unpackFlipY, GLboolean unpackPremultiplyAlpha, GLboolean unpackUnmultiplyAlpha); +GL_APICALL void GL_APIENTRY glTexStorage2DMultisampleANGLEContextANGLE(GLeglContext ctx, GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLboolean fixedsamplelocations); +GL_APICALL void GL_APIENTRY glGetTexLevelParameterivANGLEContextANGLE(GLeglContext ctx, GLenum target, GLint level, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetTexLevelParameterfvANGLEContextANGLE(GLeglContext ctx, GLenum target, GLint level, GLenum pname, GLfloat * params); +GL_APICALL void GL_APIENTRY glMultiDrawArraysANGLEContextANGLE(GLeglContext ctx, GLenum mode, const GLint *firsts, const GLsizei *counts, GLsizei drawcount); +GL_APICALL void GL_APIENTRY glMultiDrawArraysInstancedANGLEContextANGLE(GLeglContext ctx, GLenum mode, const GLint *firsts, const GLsizei *counts, const GLsizei *instanceCounts, GLsizei drawcount); +GL_APICALL void GL_APIENTRY glMultiDrawElementsANGLEContextANGLE(GLeglContext ctx, GLenum mode, const GLsizei *counts, GLenum type, const GLvoid *const*indices, GLsizei drawcount); +GL_APICALL void GL_APIENTRY glMultiDrawElementsInstancedANGLEContextANGLE(GLeglContext ctx, GLenum mode, const GLsizei *counts, GLenum type, const GLvoid *const*indices, const GLsizei*instanceCounts, GLsizei drawcount); +GL_APICALL void GL_APIENTRY glDrawArraysInstancedBaseInstanceANGLEContextANGLE(GLeglContext ctx, GLenum mode, GLint first, GLsizei count, GLsizei instanceCount, GLuint baseInstance); +GL_APICALL void GL_APIENTRY glDrawElementsInstancedBaseVertexBaseInstanceANGLEContextANGLE(GLeglContext ctx, GLenum mode, GLsizei count, GLenum type, const GLvoid *indices, GLsizei instanceCounts, GLint baseVertex, GLuint baseInstance); +GL_APICALL void GL_APIENTRY glMultiDrawArraysInstancedBaseInstanceANGLEContextANGLE(GLeglContext ctx, GLenum mode, const GLint *firsts, const GLsizei *counts, const GLsizei *instanceCounts, const GLuint *baseInstances, GLsizei drawcount); +GL_APICALL void GL_APIENTRY glMultiDrawElementsInstancedBaseVertexBaseInstanceANGLEContextANGLE(GLeglContext ctx, GLenum mode, const GLsizei *counts, GLenum type, const GLvoid *const*indices, const GLsizei *instanceCounts, const GLint *baseVertices, const GLuint *baseInstances, GLsizei drawcount); +GL_APICALL void GL_APIENTRY glGetMultisamplefvANGLEContextANGLE(GLeglContext ctx, GLenum pname, GLuint index, GLfloat * val); +GL_APICALL void GL_APIENTRY glSampleMaskiANGLEContextANGLE(GLeglContext ctx, GLuint maskNumber, GLbitfield mask); +GL_APICALL void GL_APIENTRY glProvokingVertexANGLEContextANGLE(GLeglContext ctx, GLenum mode); +GL_APICALL void GL_APIENTRY glLoseContextCHROMIUMContextANGLE(GLeglContext ctx, GLenum current, GLenum other); +GL_APICALL void GL_APIENTRY glTexImage2DExternalANGLEContextANGLE(GLeglContext ctx, GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type); +GL_APICALL void GL_APIENTRY glInvalidateTextureANGLEContextANGLE(GLeglContext ctx, GLenum target); +GL_APICALL void GL_APIENTRY glGetTexImageANGLEContextANGLE(GLeglContext ctx, GLenum target, GLint level, GLenum format, GLenum type, void *pixels); +GL_APICALL void GL_APIENTRY glGetRenderbufferImageANGLEContextANGLE(GLeglContext ctx, GLenum target, GLenum format, GLenum type, void *pixels); +GL_APICALL void GL_APIENTRY glTexStorageMemFlags2DANGLEContextANGLE(GLeglContext ctx, GLenum target, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLuint memory, GLuint64 offset, GLbitfield createFlags, GLbitfield usageFlags); +GL_APICALL void GL_APIENTRY glTexStorageMemFlags2DMultisampleANGLEContextANGLE(GLeglContext ctx, GLenum target, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset, GLbitfield createFlags, GLbitfield usageFlags); +GL_APICALL void GL_APIENTRY glTexStorageMemFlags3DANGLEContextANGLE(GLeglContext ctx, GLenum target, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLuint memory, GLuint64 offset, GLbitfield createFlags, GLbitfield usageFlags); +GL_APICALL void GL_APIENTRY glTexStorageMemFlags3DMultisampleANGLEContextANGLE(GLeglContext ctx, GLenum target, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset, GLbitfield createFlags, GLbitfield usageFlags); +GL_APICALL void GL_APIENTRY glImportMemoryZirconHandleANGLEContextANGLE(GLeglContext ctx, GLuint memory, GLuint64 size, GLenum handleType, GLuint handle); +GL_APICALL void GL_APIENTRY glImportSemaphoreZirconHandleANGLEContextANGLE(GLeglContext ctx, GLuint semaphore, GLenum handleType, GLuint handle); +#endif diff --git a/vendor/angle/include/GLES2/gl2platform.h b/vendor/angle/include/GLES2/gl2platform.h new file mode 100644 index 00000000..5bcce6d8 --- /dev/null +++ b/vendor/angle/include/GLES2/gl2platform.h @@ -0,0 +1,27 @@ +#ifndef __gl2platform_h_ +#define __gl2platform_h_ + +/* +** Copyright 2017-2020 The Khronos Group Inc. +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* Platform-specific types and definitions for OpenGL ES 2.X gl2.h + * + * Adopters may modify khrplatform.h and this file to suit their platform. + * Please contribute modifications back to Khronos as pull requests on the + * public github repository: + * https://github.com/KhronosGroup/OpenGL-Registry + */ + +#include + +#ifndef GL_APICALL +#define GL_APICALL KHRONOS_APICALL +#endif + +#ifndef GL_APIENTRY +#define GL_APIENTRY KHRONOS_APIENTRY +#endif + +#endif /* __gl2platform_h_ */ diff --git a/vendor/angle/include/GLES3/.clang-format b/vendor/angle/include/GLES3/.clang-format new file mode 100644 index 00000000..06147100 --- /dev/null +++ b/vendor/angle/include/GLES3/.clang-format @@ -0,0 +1,3 @@ + + +DisableFormat: true diff --git a/vendor/angle/include/GLES3/gl3.h b/vendor/angle/include/GLES3/gl3.h new file mode 100644 index 00000000..6bb4d8f9 --- /dev/null +++ b/vendor/angle/include/GLES3/gl3.h @@ -0,0 +1,1192 @@ +#ifndef __gles2_gl3_h_ +#define __gles2_gl3_h_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright 2013-2020 The Khronos Group Inc. +** SPDX-License-Identifier: MIT +** +** This header is generated from the Khronos OpenGL / OpenGL ES XML +** API Registry. The current version of the Registry, generator scripts +** used to make the header, and the header can be found at +** https://github.com/KhronosGroup/OpenGL-Registry +*/ + +#include + +#ifndef GL_APIENTRYP +#define GL_APIENTRYP GL_APIENTRY* +#endif + +#ifndef GL_GLES_PROTOTYPES +#define GL_GLES_PROTOTYPES 1 +#endif + +/* Generated on date 20210107 */ + +/* Generated C header for: + * API: gles2 + * Profile: common + * Versions considered: 2\.[0-9]|3\.0 + * Versions emitted: .* + * Default extensions included: None + * Additional extensions included: _nomatch_^ + * Extensions removed: _nomatch_^ + */ + +#ifndef GL_ES_VERSION_2_0 +#define GL_ES_VERSION_2_0 1 +#include +typedef khronos_int8_t GLbyte; +typedef khronos_float_t GLclampf; +typedef khronos_int32_t GLfixed; +typedef khronos_int16_t GLshort; +typedef khronos_uint16_t GLushort; +typedef void GLvoid; +typedef struct __GLsync *GLsync; +typedef khronos_int64_t GLint64; +typedef khronos_uint64_t GLuint64; +typedef unsigned int GLenum; +typedef unsigned int GLuint; +typedef char GLchar; +typedef khronos_float_t GLfloat; +typedef khronos_ssize_t GLsizeiptr; +typedef khronos_intptr_t GLintptr; +typedef unsigned int GLbitfield; +typedef int GLint; +typedef unsigned char GLboolean; +typedef int GLsizei; +typedef khronos_uint8_t GLubyte; +#define GL_DEPTH_BUFFER_BIT 0x00000100 +#define GL_STENCIL_BUFFER_BIT 0x00000400 +#define GL_COLOR_BUFFER_BIT 0x00004000 +#define GL_FALSE 0 +#define GL_TRUE 1 +#define GL_POINTS 0x0000 +#define GL_LINES 0x0001 +#define GL_LINE_LOOP 0x0002 +#define GL_LINE_STRIP 0x0003 +#define GL_TRIANGLES 0x0004 +#define GL_TRIANGLE_STRIP 0x0005 +#define GL_TRIANGLE_FAN 0x0006 +#define GL_ZERO 0 +#define GL_ONE 1 +#define GL_SRC_COLOR 0x0300 +#define GL_ONE_MINUS_SRC_COLOR 0x0301 +#define GL_SRC_ALPHA 0x0302 +#define GL_ONE_MINUS_SRC_ALPHA 0x0303 +#define GL_DST_ALPHA 0x0304 +#define GL_ONE_MINUS_DST_ALPHA 0x0305 +#define GL_DST_COLOR 0x0306 +#define GL_ONE_MINUS_DST_COLOR 0x0307 +#define GL_SRC_ALPHA_SATURATE 0x0308 +#define GL_FUNC_ADD 0x8006 +#define GL_BLEND_EQUATION 0x8009 +#define GL_BLEND_EQUATION_RGB 0x8009 +#define GL_BLEND_EQUATION_ALPHA 0x883D +#define GL_FUNC_SUBTRACT 0x800A +#define GL_FUNC_REVERSE_SUBTRACT 0x800B +#define GL_BLEND_DST_RGB 0x80C8 +#define GL_BLEND_SRC_RGB 0x80C9 +#define GL_BLEND_DST_ALPHA 0x80CA +#define GL_BLEND_SRC_ALPHA 0x80CB +#define GL_CONSTANT_COLOR 0x8001 +#define GL_ONE_MINUS_CONSTANT_COLOR 0x8002 +#define GL_CONSTANT_ALPHA 0x8003 +#define GL_ONE_MINUS_CONSTANT_ALPHA 0x8004 +#define GL_BLEND_COLOR 0x8005 +#define GL_ARRAY_BUFFER 0x8892 +#define GL_ELEMENT_ARRAY_BUFFER 0x8893 +#define GL_ARRAY_BUFFER_BINDING 0x8894 +#define GL_ELEMENT_ARRAY_BUFFER_BINDING 0x8895 +#define GL_STREAM_DRAW 0x88E0 +#define GL_STATIC_DRAW 0x88E4 +#define GL_DYNAMIC_DRAW 0x88E8 +#define GL_BUFFER_SIZE 0x8764 +#define GL_BUFFER_USAGE 0x8765 +#define GL_CURRENT_VERTEX_ATTRIB 0x8626 +#define GL_FRONT 0x0404 +#define GL_BACK 0x0405 +#define GL_FRONT_AND_BACK 0x0408 +#define GL_TEXTURE_2D 0x0DE1 +#define GL_CULL_FACE 0x0B44 +#define GL_BLEND 0x0BE2 +#define GL_DITHER 0x0BD0 +#define GL_STENCIL_TEST 0x0B90 +#define GL_DEPTH_TEST 0x0B71 +#define GL_SCISSOR_TEST 0x0C11 +#define GL_POLYGON_OFFSET_FILL 0x8037 +#define GL_SAMPLE_ALPHA_TO_COVERAGE 0x809E +#define GL_SAMPLE_COVERAGE 0x80A0 +#define GL_NO_ERROR 0 +#define GL_INVALID_ENUM 0x0500 +#define GL_INVALID_VALUE 0x0501 +#define GL_INVALID_OPERATION 0x0502 +#define GL_OUT_OF_MEMORY 0x0505 +#define GL_CW 0x0900 +#define GL_CCW 0x0901 +#define GL_LINE_WIDTH 0x0B21 +#define GL_ALIASED_POINT_SIZE_RANGE 0x846D +#define GL_ALIASED_LINE_WIDTH_RANGE 0x846E +#define GL_CULL_FACE_MODE 0x0B45 +#define GL_FRONT_FACE 0x0B46 +#define GL_DEPTH_RANGE 0x0B70 +#define GL_DEPTH_WRITEMASK 0x0B72 +#define GL_DEPTH_CLEAR_VALUE 0x0B73 +#define GL_DEPTH_FUNC 0x0B74 +#define GL_STENCIL_CLEAR_VALUE 0x0B91 +#define GL_STENCIL_FUNC 0x0B92 +#define GL_STENCIL_FAIL 0x0B94 +#define GL_STENCIL_PASS_DEPTH_FAIL 0x0B95 +#define GL_STENCIL_PASS_DEPTH_PASS 0x0B96 +#define GL_STENCIL_REF 0x0B97 +#define GL_STENCIL_VALUE_MASK 0x0B93 +#define GL_STENCIL_WRITEMASK 0x0B98 +#define GL_STENCIL_BACK_FUNC 0x8800 +#define GL_STENCIL_BACK_FAIL 0x8801 +#define GL_STENCIL_BACK_PASS_DEPTH_FAIL 0x8802 +#define GL_STENCIL_BACK_PASS_DEPTH_PASS 0x8803 +#define GL_STENCIL_BACK_REF 0x8CA3 +#define GL_STENCIL_BACK_VALUE_MASK 0x8CA4 +#define GL_STENCIL_BACK_WRITEMASK 0x8CA5 +#define GL_VIEWPORT 0x0BA2 +#define GL_SCISSOR_BOX 0x0C10 +#define GL_COLOR_CLEAR_VALUE 0x0C22 +#define GL_COLOR_WRITEMASK 0x0C23 +#define GL_UNPACK_ALIGNMENT 0x0CF5 +#define GL_PACK_ALIGNMENT 0x0D05 +#define GL_MAX_TEXTURE_SIZE 0x0D33 +#define GL_MAX_VIEWPORT_DIMS 0x0D3A +#define GL_SUBPIXEL_BITS 0x0D50 +#define GL_RED_BITS 0x0D52 +#define GL_GREEN_BITS 0x0D53 +#define GL_BLUE_BITS 0x0D54 +#define GL_ALPHA_BITS 0x0D55 +#define GL_DEPTH_BITS 0x0D56 +#define GL_STENCIL_BITS 0x0D57 +#define GL_POLYGON_OFFSET_UNITS 0x2A00 +#define GL_POLYGON_OFFSET_FACTOR 0x8038 +#define GL_TEXTURE_BINDING_2D 0x8069 +#define GL_SAMPLE_BUFFERS 0x80A8 +#define GL_SAMPLES 0x80A9 +#define GL_SAMPLE_COVERAGE_VALUE 0x80AA +#define GL_SAMPLE_COVERAGE_INVERT 0x80AB +#define GL_NUM_COMPRESSED_TEXTURE_FORMATS 0x86A2 +#define GL_COMPRESSED_TEXTURE_FORMATS 0x86A3 +#define GL_DONT_CARE 0x1100 +#define GL_FASTEST 0x1101 +#define GL_NICEST 0x1102 +#define GL_GENERATE_MIPMAP_HINT 0x8192 +#define GL_BYTE 0x1400 +#define GL_UNSIGNED_BYTE 0x1401 +#define GL_SHORT 0x1402 +#define GL_UNSIGNED_SHORT 0x1403 +#define GL_INT 0x1404 +#define GL_UNSIGNED_INT 0x1405 +#define GL_FLOAT 0x1406 +#define GL_FIXED 0x140C +#define GL_DEPTH_COMPONENT 0x1902 +#define GL_ALPHA 0x1906 +#define GL_RGB 0x1907 +#define GL_RGBA 0x1908 +#define GL_LUMINANCE 0x1909 +#define GL_LUMINANCE_ALPHA 0x190A +#define GL_UNSIGNED_SHORT_4_4_4_4 0x8033 +#define GL_UNSIGNED_SHORT_5_5_5_1 0x8034 +#define GL_UNSIGNED_SHORT_5_6_5 0x8363 +#define GL_FRAGMENT_SHADER 0x8B30 +#define GL_VERTEX_SHADER 0x8B31 +#define GL_MAX_VERTEX_ATTRIBS 0x8869 +#define GL_MAX_VERTEX_UNIFORM_VECTORS 0x8DFB +#define GL_MAX_VARYING_VECTORS 0x8DFC +#define GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS 0x8B4D +#define GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS 0x8B4C +#define GL_MAX_TEXTURE_IMAGE_UNITS 0x8872 +#define GL_MAX_FRAGMENT_UNIFORM_VECTORS 0x8DFD +#define GL_SHADER_TYPE 0x8B4F +#define GL_DELETE_STATUS 0x8B80 +#define GL_LINK_STATUS 0x8B82 +#define GL_VALIDATE_STATUS 0x8B83 +#define GL_ATTACHED_SHADERS 0x8B85 +#define GL_ACTIVE_UNIFORMS 0x8B86 +#define GL_ACTIVE_UNIFORM_MAX_LENGTH 0x8B87 +#define GL_ACTIVE_ATTRIBUTES 0x8B89 +#define GL_ACTIVE_ATTRIBUTE_MAX_LENGTH 0x8B8A +#define GL_SHADING_LANGUAGE_VERSION 0x8B8C +#define GL_CURRENT_PROGRAM 0x8B8D +#define GL_NEVER 0x0200 +#define GL_LESS 0x0201 +#define GL_EQUAL 0x0202 +#define GL_LEQUAL 0x0203 +#define GL_GREATER 0x0204 +#define GL_NOTEQUAL 0x0205 +#define GL_GEQUAL 0x0206 +#define GL_ALWAYS 0x0207 +#define GL_KEEP 0x1E00 +#define GL_REPLACE 0x1E01 +#define GL_INCR 0x1E02 +#define GL_DECR 0x1E03 +#define GL_INVERT 0x150A +#define GL_INCR_WRAP 0x8507 +#define GL_DECR_WRAP 0x8508 +#define GL_VENDOR 0x1F00 +#define GL_RENDERER 0x1F01 +#define GL_VERSION 0x1F02 +#define GL_EXTENSIONS 0x1F03 +#define GL_NEAREST 0x2600 +#define GL_LINEAR 0x2601 +#define GL_NEAREST_MIPMAP_NEAREST 0x2700 +#define GL_LINEAR_MIPMAP_NEAREST 0x2701 +#define GL_NEAREST_MIPMAP_LINEAR 0x2702 +#define GL_LINEAR_MIPMAP_LINEAR 0x2703 +#define GL_TEXTURE_MAG_FILTER 0x2800 +#define GL_TEXTURE_MIN_FILTER 0x2801 +#define GL_TEXTURE_WRAP_S 0x2802 +#define GL_TEXTURE_WRAP_T 0x2803 +#define GL_TEXTURE 0x1702 +#define GL_TEXTURE_CUBE_MAP 0x8513 +#define GL_TEXTURE_BINDING_CUBE_MAP 0x8514 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_X 0x8515 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_X 0x8516 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_Y 0x8517 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_Y 0x8518 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_Z 0x8519 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_Z 0x851A +#define GL_MAX_CUBE_MAP_TEXTURE_SIZE 0x851C +#define GL_TEXTURE0 0x84C0 +#define GL_TEXTURE1 0x84C1 +#define GL_TEXTURE2 0x84C2 +#define GL_TEXTURE3 0x84C3 +#define GL_TEXTURE4 0x84C4 +#define GL_TEXTURE5 0x84C5 +#define GL_TEXTURE6 0x84C6 +#define GL_TEXTURE7 0x84C7 +#define GL_TEXTURE8 0x84C8 +#define GL_TEXTURE9 0x84C9 +#define GL_TEXTURE10 0x84CA +#define GL_TEXTURE11 0x84CB +#define GL_TEXTURE12 0x84CC +#define GL_TEXTURE13 0x84CD +#define GL_TEXTURE14 0x84CE +#define GL_TEXTURE15 0x84CF +#define GL_TEXTURE16 0x84D0 +#define GL_TEXTURE17 0x84D1 +#define GL_TEXTURE18 0x84D2 +#define GL_TEXTURE19 0x84D3 +#define GL_TEXTURE20 0x84D4 +#define GL_TEXTURE21 0x84D5 +#define GL_TEXTURE22 0x84D6 +#define GL_TEXTURE23 0x84D7 +#define GL_TEXTURE24 0x84D8 +#define GL_TEXTURE25 0x84D9 +#define GL_TEXTURE26 0x84DA +#define GL_TEXTURE27 0x84DB +#define GL_TEXTURE28 0x84DC +#define GL_TEXTURE29 0x84DD +#define GL_TEXTURE30 0x84DE +#define GL_TEXTURE31 0x84DF +#define GL_ACTIVE_TEXTURE 0x84E0 +#define GL_REPEAT 0x2901 +#define GL_CLAMP_TO_EDGE 0x812F +#define GL_MIRRORED_REPEAT 0x8370 +#define GL_FLOAT_VEC2 0x8B50 +#define GL_FLOAT_VEC3 0x8B51 +#define GL_FLOAT_VEC4 0x8B52 +#define GL_INT_VEC2 0x8B53 +#define GL_INT_VEC3 0x8B54 +#define GL_INT_VEC4 0x8B55 +#define GL_BOOL 0x8B56 +#define GL_BOOL_VEC2 0x8B57 +#define GL_BOOL_VEC3 0x8B58 +#define GL_BOOL_VEC4 0x8B59 +#define GL_FLOAT_MAT2 0x8B5A +#define GL_FLOAT_MAT3 0x8B5B +#define GL_FLOAT_MAT4 0x8B5C +#define GL_SAMPLER_2D 0x8B5E +#define GL_SAMPLER_CUBE 0x8B60 +#define GL_VERTEX_ATTRIB_ARRAY_ENABLED 0x8622 +#define GL_VERTEX_ATTRIB_ARRAY_SIZE 0x8623 +#define GL_VERTEX_ATTRIB_ARRAY_STRIDE 0x8624 +#define GL_VERTEX_ATTRIB_ARRAY_TYPE 0x8625 +#define GL_VERTEX_ATTRIB_ARRAY_NORMALIZED 0x886A +#define GL_VERTEX_ATTRIB_ARRAY_POINTER 0x8645 +#define GL_VERTEX_ATTRIB_ARRAY_BUFFER_BINDING 0x889F +#define GL_IMPLEMENTATION_COLOR_READ_TYPE 0x8B9A +#define GL_IMPLEMENTATION_COLOR_READ_FORMAT 0x8B9B +#define GL_COMPILE_STATUS 0x8B81 +#define GL_INFO_LOG_LENGTH 0x8B84 +#define GL_SHADER_SOURCE_LENGTH 0x8B88 +#define GL_SHADER_COMPILER 0x8DFA +#define GL_SHADER_BINARY_FORMATS 0x8DF8 +#define GL_NUM_SHADER_BINARY_FORMATS 0x8DF9 +#define GL_LOW_FLOAT 0x8DF0 +#define GL_MEDIUM_FLOAT 0x8DF1 +#define GL_HIGH_FLOAT 0x8DF2 +#define GL_LOW_INT 0x8DF3 +#define GL_MEDIUM_INT 0x8DF4 +#define GL_HIGH_INT 0x8DF5 +#define GL_FRAMEBUFFER 0x8D40 +#define GL_RENDERBUFFER 0x8D41 +#define GL_RGBA4 0x8056 +#define GL_RGB5_A1 0x8057 +#define GL_RGB565 0x8D62 +#define GL_DEPTH_COMPONENT16 0x81A5 +#define GL_STENCIL_INDEX8 0x8D48 +#define GL_RENDERBUFFER_WIDTH 0x8D42 +#define GL_RENDERBUFFER_HEIGHT 0x8D43 +#define GL_RENDERBUFFER_INTERNAL_FORMAT 0x8D44 +#define GL_RENDERBUFFER_RED_SIZE 0x8D50 +#define GL_RENDERBUFFER_GREEN_SIZE 0x8D51 +#define GL_RENDERBUFFER_BLUE_SIZE 0x8D52 +#define GL_RENDERBUFFER_ALPHA_SIZE 0x8D53 +#define GL_RENDERBUFFER_DEPTH_SIZE 0x8D54 +#define GL_RENDERBUFFER_STENCIL_SIZE 0x8D55 +#define GL_FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE 0x8CD0 +#define GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME 0x8CD1 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL 0x8CD2 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE 0x8CD3 +#define GL_COLOR_ATTACHMENT0 0x8CE0 +#define GL_DEPTH_ATTACHMENT 0x8D00 +#define GL_STENCIL_ATTACHMENT 0x8D20 +#define GL_NONE 0 +#define GL_FRAMEBUFFER_COMPLETE 0x8CD5 +#define GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT 0x8CD6 +#define GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT 0x8CD7 +#define GL_FRAMEBUFFER_INCOMPLETE_DIMENSIONS 0x8CD9 +#define GL_FRAMEBUFFER_UNSUPPORTED 0x8CDD +#define GL_FRAMEBUFFER_BINDING 0x8CA6 +#define GL_RENDERBUFFER_BINDING 0x8CA7 +#define GL_MAX_RENDERBUFFER_SIZE 0x84E8 +#define GL_INVALID_FRAMEBUFFER_OPERATION 0x0506 +typedef void (GL_APIENTRYP PFNGLACTIVETEXTUREPROC) (GLenum texture); +typedef void (GL_APIENTRYP PFNGLATTACHSHADERPROC) (GLuint program, GLuint shader); +typedef void (GL_APIENTRYP PFNGLBINDATTRIBLOCATIONPROC) (GLuint program, GLuint index, const GLchar *name); +typedef void (GL_APIENTRYP PFNGLBINDBUFFERPROC) (GLenum target, GLuint buffer); +typedef void (GL_APIENTRYP PFNGLBINDFRAMEBUFFERPROC) (GLenum target, GLuint framebuffer); +typedef void (GL_APIENTRYP PFNGLBINDRENDERBUFFERPROC) (GLenum target, GLuint renderbuffer); +typedef void (GL_APIENTRYP PFNGLBINDTEXTUREPROC) (GLenum target, GLuint texture); +typedef void (GL_APIENTRYP PFNGLBLENDCOLORPROC) (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +typedef void (GL_APIENTRYP PFNGLBLENDEQUATIONPROC) (GLenum mode); +typedef void (GL_APIENTRYP PFNGLBLENDEQUATIONSEPARATEPROC) (GLenum modeRGB, GLenum modeAlpha); +typedef void (GL_APIENTRYP PFNGLBLENDFUNCPROC) (GLenum sfactor, GLenum dfactor); +typedef void (GL_APIENTRYP PFNGLBLENDFUNCSEPARATEPROC) (GLenum sfactorRGB, GLenum dfactorRGB, GLenum sfactorAlpha, GLenum dfactorAlpha); +typedef void (GL_APIENTRYP PFNGLBUFFERDATAPROC) (GLenum target, GLsizeiptr size, const void *data, GLenum usage); +typedef void (GL_APIENTRYP PFNGLBUFFERSUBDATAPROC) (GLenum target, GLintptr offset, GLsizeiptr size, const void *data); +typedef GLenum (GL_APIENTRYP PFNGLCHECKFRAMEBUFFERSTATUSPROC) (GLenum target); +typedef void (GL_APIENTRYP PFNGLCLEARPROC) (GLbitfield mask); +typedef void (GL_APIENTRYP PFNGLCLEARCOLORPROC) (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +typedef void (GL_APIENTRYP PFNGLCLEARDEPTHFPROC) (GLfloat d); +typedef void (GL_APIENTRYP PFNGLCLEARSTENCILPROC) (GLint s); +typedef void (GL_APIENTRYP PFNGLCOLORMASKPROC) (GLboolean red, GLboolean green, GLboolean blue, GLboolean alpha); +typedef void (GL_APIENTRYP PFNGLCOMPILESHADERPROC) (GLuint shader); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXIMAGE2DPROC) (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, const void *data); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXSUBIMAGE2DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *data); +typedef void (GL_APIENTRYP PFNGLCOPYTEXIMAGE2DPROC) (GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLsizei height, GLint border); +typedef void (GL_APIENTRYP PFNGLCOPYTEXSUBIMAGE2DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint x, GLint y, GLsizei width, GLsizei height); +typedef GLuint (GL_APIENTRYP PFNGLCREATEPROGRAMPROC) (void); +typedef GLuint (GL_APIENTRYP PFNGLCREATESHADERPROC) (GLenum type); +typedef void (GL_APIENTRYP PFNGLCULLFACEPROC) (GLenum mode); +typedef void (GL_APIENTRYP PFNGLDELETEBUFFERSPROC) (GLsizei n, const GLuint *buffers); +typedef void (GL_APIENTRYP PFNGLDELETEFRAMEBUFFERSPROC) (GLsizei n, const GLuint *framebuffers); +typedef void (GL_APIENTRYP PFNGLDELETEPROGRAMPROC) (GLuint program); +typedef void (GL_APIENTRYP PFNGLDELETERENDERBUFFERSPROC) (GLsizei n, const GLuint *renderbuffers); +typedef void (GL_APIENTRYP PFNGLDELETESHADERPROC) (GLuint shader); +typedef void (GL_APIENTRYP PFNGLDELETETEXTURESPROC) (GLsizei n, const GLuint *textures); +typedef void (GL_APIENTRYP PFNGLDEPTHFUNCPROC) (GLenum func); +typedef void (GL_APIENTRYP PFNGLDEPTHMASKPROC) (GLboolean flag); +typedef void (GL_APIENTRYP PFNGLDEPTHRANGEFPROC) (GLfloat n, GLfloat f); +typedef void (GL_APIENTRYP PFNGLDETACHSHADERPROC) (GLuint program, GLuint shader); +typedef void (GL_APIENTRYP PFNGLDISABLEPROC) (GLenum cap); +typedef void (GL_APIENTRYP PFNGLDISABLEVERTEXATTRIBARRAYPROC) (GLuint index); +typedef void (GL_APIENTRYP PFNGLDRAWARRAYSPROC) (GLenum mode, GLint first, GLsizei count); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices); +typedef void (GL_APIENTRYP PFNGLENABLEPROC) (GLenum cap); +typedef void (GL_APIENTRYP PFNGLENABLEVERTEXATTRIBARRAYPROC) (GLuint index); +typedef void (GL_APIENTRYP PFNGLFINISHPROC) (void); +typedef void (GL_APIENTRYP PFNGLFLUSHPROC) (void); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERRENDERBUFFERPROC) (GLenum target, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTURE2DPROC) (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +typedef void (GL_APIENTRYP PFNGLFRONTFACEPROC) (GLenum mode); +typedef void (GL_APIENTRYP PFNGLGENBUFFERSPROC) (GLsizei n, GLuint *buffers); +typedef void (GL_APIENTRYP PFNGLGENERATEMIPMAPPROC) (GLenum target); +typedef void (GL_APIENTRYP PFNGLGENFRAMEBUFFERSPROC) (GLsizei n, GLuint *framebuffers); +typedef void (GL_APIENTRYP PFNGLGENRENDERBUFFERSPROC) (GLsizei n, GLuint *renderbuffers); +typedef void (GL_APIENTRYP PFNGLGENTEXTURESPROC) (GLsizei n, GLuint *textures); +typedef void (GL_APIENTRYP PFNGLGETACTIVEATTRIBPROC) (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name); +typedef void (GL_APIENTRYP PFNGLGETACTIVEUNIFORMPROC) (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name); +typedef void (GL_APIENTRYP PFNGLGETATTACHEDSHADERSPROC) (GLuint program, GLsizei maxCount, GLsizei *count, GLuint *shaders); +typedef GLint (GL_APIENTRYP PFNGLGETATTRIBLOCATIONPROC) (GLuint program, const GLchar *name); +typedef void (GL_APIENTRYP PFNGLGETBOOLEANVPROC) (GLenum pname, GLboolean *data); +typedef void (GL_APIENTRYP PFNGLGETBUFFERPARAMETERIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef GLenum (GL_APIENTRYP PFNGLGETERRORPROC) (void); +typedef void (GL_APIENTRYP PFNGLGETFLOATVPROC) (GLenum pname, GLfloat *data); +typedef void (GL_APIENTRYP PFNGLGETFRAMEBUFFERATTACHMENTPARAMETERIVPROC) (GLenum target, GLenum attachment, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETINTEGERVPROC) (GLenum pname, GLint *data); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMIVPROC) (GLuint program, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMINFOLOGPROC) (GLuint program, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +typedef void (GL_APIENTRYP PFNGLGETRENDERBUFFERPARAMETERIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETSHADERIVPROC) (GLuint shader, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETSHADERINFOLOGPROC) (GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +typedef void (GL_APIENTRYP PFNGLGETSHADERPRECISIONFORMATPROC) (GLenum shadertype, GLenum precisiontype, GLint *range, GLint *precision); +typedef void (GL_APIENTRYP PFNGLGETSHADERSOURCEPROC) (GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *source); +typedef const GLubyte *(GL_APIENTRYP PFNGLGETSTRINGPROC) (GLenum name); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERFVPROC) (GLenum target, GLenum pname, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETUNIFORMFVPROC) (GLuint program, GLint location, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETUNIFORMIVPROC) (GLuint program, GLint location, GLint *params); +typedef GLint (GL_APIENTRYP PFNGLGETUNIFORMLOCATIONPROC) (GLuint program, const GLchar *name); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBFVPROC) (GLuint index, GLenum pname, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBIVPROC) (GLuint index, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBPOINTERVPROC) (GLuint index, GLenum pname, void **pointer); +typedef void (GL_APIENTRYP PFNGLHINTPROC) (GLenum target, GLenum mode); +typedef GLboolean (GL_APIENTRYP PFNGLISBUFFERPROC) (GLuint buffer); +typedef GLboolean (GL_APIENTRYP PFNGLISENABLEDPROC) (GLenum cap); +typedef GLboolean (GL_APIENTRYP PFNGLISFRAMEBUFFERPROC) (GLuint framebuffer); +typedef GLboolean (GL_APIENTRYP PFNGLISPROGRAMPROC) (GLuint program); +typedef GLboolean (GL_APIENTRYP PFNGLISRENDERBUFFERPROC) (GLuint renderbuffer); +typedef GLboolean (GL_APIENTRYP PFNGLISSHADERPROC) (GLuint shader); +typedef GLboolean (GL_APIENTRYP PFNGLISTEXTUREPROC) (GLuint texture); +typedef void (GL_APIENTRYP PFNGLLINEWIDTHPROC) (GLfloat width); +typedef void (GL_APIENTRYP PFNGLLINKPROGRAMPROC) (GLuint program); +typedef void (GL_APIENTRYP PFNGLPIXELSTOREIPROC) (GLenum pname, GLint param); +typedef void (GL_APIENTRYP PFNGLPOLYGONOFFSETPROC) (GLfloat factor, GLfloat units); +typedef void (GL_APIENTRYP PFNGLREADPIXELSPROC) (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, void *pixels); +typedef void (GL_APIENTRYP PFNGLRELEASESHADERCOMPILERPROC) (void); +typedef void (GL_APIENTRYP PFNGLRENDERBUFFERSTORAGEPROC) (GLenum target, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLSAMPLECOVERAGEPROC) (GLfloat value, GLboolean invert); +typedef void (GL_APIENTRYP PFNGLSCISSORPROC) (GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLSHADERBINARYPROC) (GLsizei count, const GLuint *shaders, GLenum binaryFormat, const void *binary, GLsizei length); +typedef void (GL_APIENTRYP PFNGLSHADERSOURCEPROC) (GLuint shader, GLsizei count, const GLchar *const*string, const GLint *length); +typedef void (GL_APIENTRYP PFNGLSTENCILFUNCPROC) (GLenum func, GLint ref, GLuint mask); +typedef void (GL_APIENTRYP PFNGLSTENCILFUNCSEPARATEPROC) (GLenum face, GLenum func, GLint ref, GLuint mask); +typedef void (GL_APIENTRYP PFNGLSTENCILMASKPROC) (GLuint mask); +typedef void (GL_APIENTRYP PFNGLSTENCILMASKSEPARATEPROC) (GLenum face, GLuint mask); +typedef void (GL_APIENTRYP PFNGLSTENCILOPPROC) (GLenum fail, GLenum zfail, GLenum zpass); +typedef void (GL_APIENTRYP PFNGLSTENCILOPSEPARATEPROC) (GLenum face, GLenum sfail, GLenum dpfail, GLenum dppass); +typedef void (GL_APIENTRYP PFNGLTEXIMAGE2DPROC) (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, const void *pixels); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERFPROC) (GLenum target, GLenum pname, GLfloat param); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERFVPROC) (GLenum target, GLenum pname, const GLfloat *params); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERIPROC) (GLenum target, GLenum pname, GLint param); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERIVPROC) (GLenum target, GLenum pname, const GLint *params); +typedef void (GL_APIENTRYP PFNGLTEXSUBIMAGE2DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *pixels); +typedef void (GL_APIENTRYP PFNGLUNIFORM1FPROC) (GLint location, GLfloat v0); +typedef void (GL_APIENTRYP PFNGLUNIFORM1FVPROC) (GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM1IPROC) (GLint location, GLint v0); +typedef void (GL_APIENTRYP PFNGLUNIFORM1IVPROC) (GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM2FPROC) (GLint location, GLfloat v0, GLfloat v1); +typedef void (GL_APIENTRYP PFNGLUNIFORM2FVPROC) (GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM2IPROC) (GLint location, GLint v0, GLint v1); +typedef void (GL_APIENTRYP PFNGLUNIFORM2IVPROC) (GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM3FPROC) (GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +typedef void (GL_APIENTRYP PFNGLUNIFORM3FVPROC) (GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM3IPROC) (GLint location, GLint v0, GLint v1, GLint v2); +typedef void (GL_APIENTRYP PFNGLUNIFORM3IVPROC) (GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM4FPROC) (GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +typedef void (GL_APIENTRYP PFNGLUNIFORM4FVPROC) (GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM4IPROC) (GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +typedef void (GL_APIENTRYP PFNGLUNIFORM4IVPROC) (GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX2FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX3FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX4FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUSEPROGRAMPROC) (GLuint program); +typedef void (GL_APIENTRYP PFNGLVALIDATEPROGRAMPROC) (GLuint program); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB1FPROC) (GLuint index, GLfloat x); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB1FVPROC) (GLuint index, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB2FPROC) (GLuint index, GLfloat x, GLfloat y); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB2FVPROC) (GLuint index, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB3FPROC) (GLuint index, GLfloat x, GLfloat y, GLfloat z); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB3FVPROC) (GLuint index, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB4FPROC) (GLuint index, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB4FVPROC) (GLuint index, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBPOINTERPROC) (GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride, const void *pointer); +typedef void (GL_APIENTRYP PFNGLVIEWPORTPROC) (GLint x, GLint y, GLsizei width, GLsizei height); +#if GL_GLES_PROTOTYPES +GL_APICALL void GL_APIENTRY glActiveTexture (GLenum texture); +GL_APICALL void GL_APIENTRY glAttachShader (GLuint program, GLuint shader); +GL_APICALL void GL_APIENTRY glBindAttribLocation (GLuint program, GLuint index, const GLchar *name); +GL_APICALL void GL_APIENTRY glBindBuffer (GLenum target, GLuint buffer); +GL_APICALL void GL_APIENTRY glBindFramebuffer (GLenum target, GLuint framebuffer); +GL_APICALL void GL_APIENTRY glBindRenderbuffer (GLenum target, GLuint renderbuffer); +GL_APICALL void GL_APIENTRY glBindTexture (GLenum target, GLuint texture); +GL_APICALL void GL_APIENTRY glBlendColor (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +GL_APICALL void GL_APIENTRY glBlendEquation (GLenum mode); +GL_APICALL void GL_APIENTRY glBlendEquationSeparate (GLenum modeRGB, GLenum modeAlpha); +GL_APICALL void GL_APIENTRY glBlendFunc (GLenum sfactor, GLenum dfactor); +GL_APICALL void GL_APIENTRY glBlendFuncSeparate (GLenum sfactorRGB, GLenum dfactorRGB, GLenum sfactorAlpha, GLenum dfactorAlpha); +GL_APICALL void GL_APIENTRY glBufferData (GLenum target, GLsizeiptr size, const void *data, GLenum usage); +GL_APICALL void GL_APIENTRY glBufferSubData (GLenum target, GLintptr offset, GLsizeiptr size, const void *data); +GL_APICALL GLenum GL_APIENTRY glCheckFramebufferStatus (GLenum target); +GL_APICALL void GL_APIENTRY glClear (GLbitfield mask); +GL_APICALL void GL_APIENTRY glClearColor (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +GL_APICALL void GL_APIENTRY glClearDepthf (GLfloat d); +GL_APICALL void GL_APIENTRY glClearStencil (GLint s); +GL_APICALL void GL_APIENTRY glColorMask (GLboolean red, GLboolean green, GLboolean blue, GLboolean alpha); +GL_APICALL void GL_APIENTRY glCompileShader (GLuint shader); +GL_APICALL void GL_APIENTRY glCompressedTexImage2D (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, const void *data); +GL_APICALL void GL_APIENTRY glCompressedTexSubImage2D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *data); +GL_APICALL void GL_APIENTRY glCopyTexImage2D (GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLsizei height, GLint border); +GL_APICALL void GL_APIENTRY glCopyTexSubImage2D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint x, GLint y, GLsizei width, GLsizei height); +GL_APICALL GLuint GL_APIENTRY glCreateProgram (void); +GL_APICALL GLuint GL_APIENTRY glCreateShader (GLenum type); +GL_APICALL void GL_APIENTRY glCullFace (GLenum mode); +GL_APICALL void GL_APIENTRY glDeleteBuffers (GLsizei n, const GLuint *buffers); +GL_APICALL void GL_APIENTRY glDeleteFramebuffers (GLsizei n, const GLuint *framebuffers); +GL_APICALL void GL_APIENTRY glDeleteProgram (GLuint program); +GL_APICALL void GL_APIENTRY glDeleteRenderbuffers (GLsizei n, const GLuint *renderbuffers); +GL_APICALL void GL_APIENTRY glDeleteShader (GLuint shader); +GL_APICALL void GL_APIENTRY glDeleteTextures (GLsizei n, const GLuint *textures); +GL_APICALL void GL_APIENTRY glDepthFunc (GLenum func); +GL_APICALL void GL_APIENTRY glDepthMask (GLboolean flag); +GL_APICALL void GL_APIENTRY glDepthRangef (GLfloat n, GLfloat f); +GL_APICALL void GL_APIENTRY glDetachShader (GLuint program, GLuint shader); +GL_APICALL void GL_APIENTRY glDisable (GLenum cap); +GL_APICALL void GL_APIENTRY glDisableVertexAttribArray (GLuint index); +GL_APICALL void GL_APIENTRY glDrawArrays (GLenum mode, GLint first, GLsizei count); +GL_APICALL void GL_APIENTRY glDrawElements (GLenum mode, GLsizei count, GLenum type, const void *indices); +GL_APICALL void GL_APIENTRY glEnable (GLenum cap); +GL_APICALL void GL_APIENTRY glEnableVertexAttribArray (GLuint index); +GL_APICALL void GL_APIENTRY glFinish (void); +GL_APICALL void GL_APIENTRY glFlush (void); +GL_APICALL void GL_APIENTRY glFramebufferRenderbuffer (GLenum target, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer); +GL_APICALL void GL_APIENTRY glFramebufferTexture2D (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +GL_APICALL void GL_APIENTRY glFrontFace (GLenum mode); +GL_APICALL void GL_APIENTRY glGenBuffers (GLsizei n, GLuint *buffers); +GL_APICALL void GL_APIENTRY glGenerateMipmap (GLenum target); +GL_APICALL void GL_APIENTRY glGenFramebuffers (GLsizei n, GLuint *framebuffers); +GL_APICALL void GL_APIENTRY glGenRenderbuffers (GLsizei n, GLuint *renderbuffers); +GL_APICALL void GL_APIENTRY glGenTextures (GLsizei n, GLuint *textures); +GL_APICALL void GL_APIENTRY glGetActiveAttrib (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name); +GL_APICALL void GL_APIENTRY glGetActiveUniform (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name); +GL_APICALL void GL_APIENTRY glGetAttachedShaders (GLuint program, GLsizei maxCount, GLsizei *count, GLuint *shaders); +GL_APICALL GLint GL_APIENTRY glGetAttribLocation (GLuint program, const GLchar *name); +GL_APICALL void GL_APIENTRY glGetBooleanv (GLenum pname, GLboolean *data); +GL_APICALL void GL_APIENTRY glGetBufferParameteriv (GLenum target, GLenum pname, GLint *params); +GL_APICALL GLenum GL_APIENTRY glGetError (void); +GL_APICALL void GL_APIENTRY glGetFloatv (GLenum pname, GLfloat *data); +GL_APICALL void GL_APIENTRY glGetFramebufferAttachmentParameteriv (GLenum target, GLenum attachment, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetIntegerv (GLenum pname, GLint *data); +GL_APICALL void GL_APIENTRY glGetProgramiv (GLuint program, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetProgramInfoLog (GLuint program, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +GL_APICALL void GL_APIENTRY glGetRenderbufferParameteriv (GLenum target, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetShaderiv (GLuint shader, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetShaderInfoLog (GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +GL_APICALL void GL_APIENTRY glGetShaderPrecisionFormat (GLenum shadertype, GLenum precisiontype, GLint *range, GLint *precision); +GL_APICALL void GL_APIENTRY glGetShaderSource (GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *source); +GL_APICALL const GLubyte *GL_APIENTRY glGetString (GLenum name); +GL_APICALL void GL_APIENTRY glGetTexParameterfv (GLenum target, GLenum pname, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetTexParameteriv (GLenum target, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetUniformfv (GLuint program, GLint location, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetUniformiv (GLuint program, GLint location, GLint *params); +GL_APICALL GLint GL_APIENTRY glGetUniformLocation (GLuint program, const GLchar *name); +GL_APICALL void GL_APIENTRY glGetVertexAttribfv (GLuint index, GLenum pname, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetVertexAttribiv (GLuint index, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetVertexAttribPointerv (GLuint index, GLenum pname, void **pointer); +GL_APICALL void GL_APIENTRY glHint (GLenum target, GLenum mode); +GL_APICALL GLboolean GL_APIENTRY glIsBuffer (GLuint buffer); +GL_APICALL GLboolean GL_APIENTRY glIsEnabled (GLenum cap); +GL_APICALL GLboolean GL_APIENTRY glIsFramebuffer (GLuint framebuffer); +GL_APICALL GLboolean GL_APIENTRY glIsProgram (GLuint program); +GL_APICALL GLboolean GL_APIENTRY glIsRenderbuffer (GLuint renderbuffer); +GL_APICALL GLboolean GL_APIENTRY glIsShader (GLuint shader); +GL_APICALL GLboolean GL_APIENTRY glIsTexture (GLuint texture); +GL_APICALL void GL_APIENTRY glLineWidth (GLfloat width); +GL_APICALL void GL_APIENTRY glLinkProgram (GLuint program); +GL_APICALL void GL_APIENTRY glPixelStorei (GLenum pname, GLint param); +GL_APICALL void GL_APIENTRY glPolygonOffset (GLfloat factor, GLfloat units); +GL_APICALL void GL_APIENTRY glReadPixels (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, void *pixels); +GL_APICALL void GL_APIENTRY glReleaseShaderCompiler (void); +GL_APICALL void GL_APIENTRY glRenderbufferStorage (GLenum target, GLenum internalformat, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glSampleCoverage (GLfloat value, GLboolean invert); +GL_APICALL void GL_APIENTRY glScissor (GLint x, GLint y, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glShaderBinary (GLsizei count, const GLuint *shaders, GLenum binaryFormat, const void *binary, GLsizei length); +GL_APICALL void GL_APIENTRY glShaderSource (GLuint shader, GLsizei count, const GLchar *const*string, const GLint *length); +GL_APICALL void GL_APIENTRY glStencilFunc (GLenum func, GLint ref, GLuint mask); +GL_APICALL void GL_APIENTRY glStencilFuncSeparate (GLenum face, GLenum func, GLint ref, GLuint mask); +GL_APICALL void GL_APIENTRY glStencilMask (GLuint mask); +GL_APICALL void GL_APIENTRY glStencilMaskSeparate (GLenum face, GLuint mask); +GL_APICALL void GL_APIENTRY glStencilOp (GLenum fail, GLenum zfail, GLenum zpass); +GL_APICALL void GL_APIENTRY glStencilOpSeparate (GLenum face, GLenum sfail, GLenum dpfail, GLenum dppass); +GL_APICALL void GL_APIENTRY glTexImage2D (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, const void *pixels); +GL_APICALL void GL_APIENTRY glTexParameterf (GLenum target, GLenum pname, GLfloat param); +GL_APICALL void GL_APIENTRY glTexParameterfv (GLenum target, GLenum pname, const GLfloat *params); +GL_APICALL void GL_APIENTRY glTexParameteri (GLenum target, GLenum pname, GLint param); +GL_APICALL void GL_APIENTRY glTexParameteriv (GLenum target, GLenum pname, const GLint *params); +GL_APICALL void GL_APIENTRY glTexSubImage2D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *pixels); +GL_APICALL void GL_APIENTRY glUniform1f (GLint location, GLfloat v0); +GL_APICALL void GL_APIENTRY glUniform1fv (GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniform1i (GLint location, GLint v0); +GL_APICALL void GL_APIENTRY glUniform1iv (GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glUniform2f (GLint location, GLfloat v0, GLfloat v1); +GL_APICALL void GL_APIENTRY glUniform2fv (GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniform2i (GLint location, GLint v0, GLint v1); +GL_APICALL void GL_APIENTRY glUniform2iv (GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glUniform3f (GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +GL_APICALL void GL_APIENTRY glUniform3fv (GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniform3i (GLint location, GLint v0, GLint v1, GLint v2); +GL_APICALL void GL_APIENTRY glUniform3iv (GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glUniform4f (GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +GL_APICALL void GL_APIENTRY glUniform4fv (GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniform4i (GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +GL_APICALL void GL_APIENTRY glUniform4iv (GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glUniformMatrix2fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix3fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix4fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUseProgram (GLuint program); +GL_APICALL void GL_APIENTRY glValidateProgram (GLuint program); +GL_APICALL void GL_APIENTRY glVertexAttrib1f (GLuint index, GLfloat x); +GL_APICALL void GL_APIENTRY glVertexAttrib1fv (GLuint index, const GLfloat *v); +GL_APICALL void GL_APIENTRY glVertexAttrib2f (GLuint index, GLfloat x, GLfloat y); +GL_APICALL void GL_APIENTRY glVertexAttrib2fv (GLuint index, const GLfloat *v); +GL_APICALL void GL_APIENTRY glVertexAttrib3f (GLuint index, GLfloat x, GLfloat y, GLfloat z); +GL_APICALL void GL_APIENTRY glVertexAttrib3fv (GLuint index, const GLfloat *v); +GL_APICALL void GL_APIENTRY glVertexAttrib4f (GLuint index, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +GL_APICALL void GL_APIENTRY glVertexAttrib4fv (GLuint index, const GLfloat *v); +GL_APICALL void GL_APIENTRY glVertexAttribPointer (GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride, const void *pointer); +GL_APICALL void GL_APIENTRY glViewport (GLint x, GLint y, GLsizei width, GLsizei height); +#endif +#endif /* GL_ES_VERSION_2_0 */ + +#ifndef GL_ES_VERSION_3_0 +#define GL_ES_VERSION_3_0 1 +typedef khronos_uint16_t GLhalf; +#define GL_READ_BUFFER 0x0C02 +#define GL_UNPACK_ROW_LENGTH 0x0CF2 +#define GL_UNPACK_SKIP_ROWS 0x0CF3 +#define GL_UNPACK_SKIP_PIXELS 0x0CF4 +#define GL_PACK_ROW_LENGTH 0x0D02 +#define GL_PACK_SKIP_ROWS 0x0D03 +#define GL_PACK_SKIP_PIXELS 0x0D04 +#define GL_COLOR 0x1800 +#define GL_DEPTH 0x1801 +#define GL_STENCIL 0x1802 +#define GL_RED 0x1903 +#define GL_RGB8 0x8051 +#define GL_RGBA8 0x8058 +#define GL_RGB10_A2 0x8059 +#define GL_TEXTURE_BINDING_3D 0x806A +#define GL_UNPACK_SKIP_IMAGES 0x806D +#define GL_UNPACK_IMAGE_HEIGHT 0x806E +#define GL_TEXTURE_3D 0x806F +#define GL_TEXTURE_WRAP_R 0x8072 +#define GL_MAX_3D_TEXTURE_SIZE 0x8073 +#define GL_UNSIGNED_INT_2_10_10_10_REV 0x8368 +#define GL_MAX_ELEMENTS_VERTICES 0x80E8 +#define GL_MAX_ELEMENTS_INDICES 0x80E9 +#define GL_TEXTURE_MIN_LOD 0x813A +#define GL_TEXTURE_MAX_LOD 0x813B +#define GL_TEXTURE_BASE_LEVEL 0x813C +#define GL_TEXTURE_MAX_LEVEL 0x813D +#define GL_MIN 0x8007 +#define GL_MAX 0x8008 +#define GL_DEPTH_COMPONENT24 0x81A6 +#define GL_MAX_TEXTURE_LOD_BIAS 0x84FD +#define GL_TEXTURE_COMPARE_MODE 0x884C +#define GL_TEXTURE_COMPARE_FUNC 0x884D +#define GL_CURRENT_QUERY 0x8865 +#define GL_QUERY_RESULT 0x8866 +#define GL_QUERY_RESULT_AVAILABLE 0x8867 +#define GL_BUFFER_MAPPED 0x88BC +#define GL_BUFFER_MAP_POINTER 0x88BD +#define GL_STREAM_READ 0x88E1 +#define GL_STREAM_COPY 0x88E2 +#define GL_STATIC_READ 0x88E5 +#define GL_STATIC_COPY 0x88E6 +#define GL_DYNAMIC_READ 0x88E9 +#define GL_DYNAMIC_COPY 0x88EA +#define GL_MAX_DRAW_BUFFERS 0x8824 +#define GL_DRAW_BUFFER0 0x8825 +#define GL_DRAW_BUFFER1 0x8826 +#define GL_DRAW_BUFFER2 0x8827 +#define GL_DRAW_BUFFER3 0x8828 +#define GL_DRAW_BUFFER4 0x8829 +#define GL_DRAW_BUFFER5 0x882A +#define GL_DRAW_BUFFER6 0x882B +#define GL_DRAW_BUFFER7 0x882C +#define GL_DRAW_BUFFER8 0x882D +#define GL_DRAW_BUFFER9 0x882E +#define GL_DRAW_BUFFER10 0x882F +#define GL_DRAW_BUFFER11 0x8830 +#define GL_DRAW_BUFFER12 0x8831 +#define GL_DRAW_BUFFER13 0x8832 +#define GL_DRAW_BUFFER14 0x8833 +#define GL_DRAW_BUFFER15 0x8834 +#define GL_MAX_FRAGMENT_UNIFORM_COMPONENTS 0x8B49 +#define GL_MAX_VERTEX_UNIFORM_COMPONENTS 0x8B4A +#define GL_SAMPLER_3D 0x8B5F +#define GL_SAMPLER_2D_SHADOW 0x8B62 +#define GL_FRAGMENT_SHADER_DERIVATIVE_HINT 0x8B8B +#define GL_PIXEL_PACK_BUFFER 0x88EB +#define GL_PIXEL_UNPACK_BUFFER 0x88EC +#define GL_PIXEL_PACK_BUFFER_BINDING 0x88ED +#define GL_PIXEL_UNPACK_BUFFER_BINDING 0x88EF +#define GL_FLOAT_MAT2x3 0x8B65 +#define GL_FLOAT_MAT2x4 0x8B66 +#define GL_FLOAT_MAT3x2 0x8B67 +#define GL_FLOAT_MAT3x4 0x8B68 +#define GL_FLOAT_MAT4x2 0x8B69 +#define GL_FLOAT_MAT4x3 0x8B6A +#define GL_SRGB 0x8C40 +#define GL_SRGB8 0x8C41 +#define GL_SRGB8_ALPHA8 0x8C43 +#define GL_COMPARE_REF_TO_TEXTURE 0x884E +#define GL_MAJOR_VERSION 0x821B +#define GL_MINOR_VERSION 0x821C +#define GL_NUM_EXTENSIONS 0x821D +#define GL_RGBA32F 0x8814 +#define GL_RGB32F 0x8815 +#define GL_RGBA16F 0x881A +#define GL_RGB16F 0x881B +#define GL_VERTEX_ATTRIB_ARRAY_INTEGER 0x88FD +#define GL_MAX_ARRAY_TEXTURE_LAYERS 0x88FF +#define GL_MIN_PROGRAM_TEXEL_OFFSET 0x8904 +#define GL_MAX_PROGRAM_TEXEL_OFFSET 0x8905 +#define GL_MAX_VARYING_COMPONENTS 0x8B4B +#define GL_TEXTURE_2D_ARRAY 0x8C1A +#define GL_TEXTURE_BINDING_2D_ARRAY 0x8C1D +#define GL_R11F_G11F_B10F 0x8C3A +#define GL_UNSIGNED_INT_10F_11F_11F_REV 0x8C3B +#define GL_RGB9_E5 0x8C3D +#define GL_UNSIGNED_INT_5_9_9_9_REV 0x8C3E +#define GL_TRANSFORM_FEEDBACK_VARYING_MAX_LENGTH 0x8C76 +#define GL_TRANSFORM_FEEDBACK_BUFFER_MODE 0x8C7F +#define GL_MAX_TRANSFORM_FEEDBACK_SEPARATE_COMPONENTS 0x8C80 +#define GL_TRANSFORM_FEEDBACK_VARYINGS 0x8C83 +#define GL_TRANSFORM_FEEDBACK_BUFFER_START 0x8C84 +#define GL_TRANSFORM_FEEDBACK_BUFFER_SIZE 0x8C85 +#define GL_TRANSFORM_FEEDBACK_PRIMITIVES_WRITTEN 0x8C88 +#define GL_RASTERIZER_DISCARD 0x8C89 +#define GL_MAX_TRANSFORM_FEEDBACK_INTERLEAVED_COMPONENTS 0x8C8A +#define GL_MAX_TRANSFORM_FEEDBACK_SEPARATE_ATTRIBS 0x8C8B +#define GL_INTERLEAVED_ATTRIBS 0x8C8C +#define GL_SEPARATE_ATTRIBS 0x8C8D +#define GL_TRANSFORM_FEEDBACK_BUFFER 0x8C8E +#define GL_TRANSFORM_FEEDBACK_BUFFER_BINDING 0x8C8F +#define GL_RGBA32UI 0x8D70 +#define GL_RGB32UI 0x8D71 +#define GL_RGBA16UI 0x8D76 +#define GL_RGB16UI 0x8D77 +#define GL_RGBA8UI 0x8D7C +#define GL_RGB8UI 0x8D7D +#define GL_RGBA32I 0x8D82 +#define GL_RGB32I 0x8D83 +#define GL_RGBA16I 0x8D88 +#define GL_RGB16I 0x8D89 +#define GL_RGBA8I 0x8D8E +#define GL_RGB8I 0x8D8F +#define GL_RED_INTEGER 0x8D94 +#define GL_RGB_INTEGER 0x8D98 +#define GL_RGBA_INTEGER 0x8D99 +#define GL_SAMPLER_2D_ARRAY 0x8DC1 +#define GL_SAMPLER_2D_ARRAY_SHADOW 0x8DC4 +#define GL_SAMPLER_CUBE_SHADOW 0x8DC5 +#define GL_UNSIGNED_INT_VEC2 0x8DC6 +#define GL_UNSIGNED_INT_VEC3 0x8DC7 +#define GL_UNSIGNED_INT_VEC4 0x8DC8 +#define GL_INT_SAMPLER_2D 0x8DCA +#define GL_INT_SAMPLER_3D 0x8DCB +#define GL_INT_SAMPLER_CUBE 0x8DCC +#define GL_INT_SAMPLER_2D_ARRAY 0x8DCF +#define GL_UNSIGNED_INT_SAMPLER_2D 0x8DD2 +#define GL_UNSIGNED_INT_SAMPLER_3D 0x8DD3 +#define GL_UNSIGNED_INT_SAMPLER_CUBE 0x8DD4 +#define GL_UNSIGNED_INT_SAMPLER_2D_ARRAY 0x8DD7 +#define GL_BUFFER_ACCESS_FLAGS 0x911F +#define GL_BUFFER_MAP_LENGTH 0x9120 +#define GL_BUFFER_MAP_OFFSET 0x9121 +#define GL_DEPTH_COMPONENT32F 0x8CAC +#define GL_DEPTH32F_STENCIL8 0x8CAD +#define GL_FLOAT_32_UNSIGNED_INT_24_8_REV 0x8DAD +#define GL_FRAMEBUFFER_ATTACHMENT_COLOR_ENCODING 0x8210 +#define GL_FRAMEBUFFER_ATTACHMENT_COMPONENT_TYPE 0x8211 +#define GL_FRAMEBUFFER_ATTACHMENT_RED_SIZE 0x8212 +#define GL_FRAMEBUFFER_ATTACHMENT_GREEN_SIZE 0x8213 +#define GL_FRAMEBUFFER_ATTACHMENT_BLUE_SIZE 0x8214 +#define GL_FRAMEBUFFER_ATTACHMENT_ALPHA_SIZE 0x8215 +#define GL_FRAMEBUFFER_ATTACHMENT_DEPTH_SIZE 0x8216 +#define GL_FRAMEBUFFER_ATTACHMENT_STENCIL_SIZE 0x8217 +#define GL_FRAMEBUFFER_DEFAULT 0x8218 +#define GL_FRAMEBUFFER_UNDEFINED 0x8219 +#define GL_DEPTH_STENCIL_ATTACHMENT 0x821A +#define GL_DEPTH_STENCIL 0x84F9 +#define GL_UNSIGNED_INT_24_8 0x84FA +#define GL_DEPTH24_STENCIL8 0x88F0 +#define GL_UNSIGNED_NORMALIZED 0x8C17 +#define GL_DRAW_FRAMEBUFFER_BINDING 0x8CA6 +#define GL_READ_FRAMEBUFFER 0x8CA8 +#define GL_DRAW_FRAMEBUFFER 0x8CA9 +#define GL_READ_FRAMEBUFFER_BINDING 0x8CAA +#define GL_RENDERBUFFER_SAMPLES 0x8CAB +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LAYER 0x8CD4 +#define GL_MAX_COLOR_ATTACHMENTS 0x8CDF +#define GL_COLOR_ATTACHMENT1 0x8CE1 +#define GL_COLOR_ATTACHMENT2 0x8CE2 +#define GL_COLOR_ATTACHMENT3 0x8CE3 +#define GL_COLOR_ATTACHMENT4 0x8CE4 +#define GL_COLOR_ATTACHMENT5 0x8CE5 +#define GL_COLOR_ATTACHMENT6 0x8CE6 +#define GL_COLOR_ATTACHMENT7 0x8CE7 +#define GL_COLOR_ATTACHMENT8 0x8CE8 +#define GL_COLOR_ATTACHMENT9 0x8CE9 +#define GL_COLOR_ATTACHMENT10 0x8CEA +#define GL_COLOR_ATTACHMENT11 0x8CEB +#define GL_COLOR_ATTACHMENT12 0x8CEC +#define GL_COLOR_ATTACHMENT13 0x8CED +#define GL_COLOR_ATTACHMENT14 0x8CEE +#define GL_COLOR_ATTACHMENT15 0x8CEF +#define GL_COLOR_ATTACHMENT16 0x8CF0 +#define GL_COLOR_ATTACHMENT17 0x8CF1 +#define GL_COLOR_ATTACHMENT18 0x8CF2 +#define GL_COLOR_ATTACHMENT19 0x8CF3 +#define GL_COLOR_ATTACHMENT20 0x8CF4 +#define GL_COLOR_ATTACHMENT21 0x8CF5 +#define GL_COLOR_ATTACHMENT22 0x8CF6 +#define GL_COLOR_ATTACHMENT23 0x8CF7 +#define GL_COLOR_ATTACHMENT24 0x8CF8 +#define GL_COLOR_ATTACHMENT25 0x8CF9 +#define GL_COLOR_ATTACHMENT26 0x8CFA +#define GL_COLOR_ATTACHMENT27 0x8CFB +#define GL_COLOR_ATTACHMENT28 0x8CFC +#define GL_COLOR_ATTACHMENT29 0x8CFD +#define GL_COLOR_ATTACHMENT30 0x8CFE +#define GL_COLOR_ATTACHMENT31 0x8CFF +#define GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE 0x8D56 +#define GL_MAX_SAMPLES 0x8D57 +#define GL_HALF_FLOAT 0x140B +#define GL_MAP_READ_BIT 0x0001 +#define GL_MAP_WRITE_BIT 0x0002 +#define GL_MAP_INVALIDATE_RANGE_BIT 0x0004 +#define GL_MAP_INVALIDATE_BUFFER_BIT 0x0008 +#define GL_MAP_FLUSH_EXPLICIT_BIT 0x0010 +#define GL_MAP_UNSYNCHRONIZED_BIT 0x0020 +#define GL_RG 0x8227 +#define GL_RG_INTEGER 0x8228 +#define GL_R8 0x8229 +#define GL_RG8 0x822B +#define GL_R16F 0x822D +#define GL_R32F 0x822E +#define GL_RG16F 0x822F +#define GL_RG32F 0x8230 +#define GL_R8I 0x8231 +#define GL_R8UI 0x8232 +#define GL_R16I 0x8233 +#define GL_R16UI 0x8234 +#define GL_R32I 0x8235 +#define GL_R32UI 0x8236 +#define GL_RG8I 0x8237 +#define GL_RG8UI 0x8238 +#define GL_RG16I 0x8239 +#define GL_RG16UI 0x823A +#define GL_RG32I 0x823B +#define GL_RG32UI 0x823C +#define GL_VERTEX_ARRAY_BINDING 0x85B5 +#define GL_R8_SNORM 0x8F94 +#define GL_RG8_SNORM 0x8F95 +#define GL_RGB8_SNORM 0x8F96 +#define GL_RGBA8_SNORM 0x8F97 +#define GL_SIGNED_NORMALIZED 0x8F9C +#define GL_PRIMITIVE_RESTART_FIXED_INDEX 0x8D69 +#define GL_COPY_READ_BUFFER 0x8F36 +#define GL_COPY_WRITE_BUFFER 0x8F37 +#define GL_COPY_READ_BUFFER_BINDING 0x8F36 +#define GL_COPY_WRITE_BUFFER_BINDING 0x8F37 +#define GL_UNIFORM_BUFFER 0x8A11 +#define GL_UNIFORM_BUFFER_BINDING 0x8A28 +#define GL_UNIFORM_BUFFER_START 0x8A29 +#define GL_UNIFORM_BUFFER_SIZE 0x8A2A +#define GL_MAX_VERTEX_UNIFORM_BLOCKS 0x8A2B +#define GL_MAX_FRAGMENT_UNIFORM_BLOCKS 0x8A2D +#define GL_MAX_COMBINED_UNIFORM_BLOCKS 0x8A2E +#define GL_MAX_UNIFORM_BUFFER_BINDINGS 0x8A2F +#define GL_MAX_UNIFORM_BLOCK_SIZE 0x8A30 +#define GL_MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS 0x8A31 +#define GL_MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS 0x8A33 +#define GL_UNIFORM_BUFFER_OFFSET_ALIGNMENT 0x8A34 +#define GL_ACTIVE_UNIFORM_BLOCK_MAX_NAME_LENGTH 0x8A35 +#define GL_ACTIVE_UNIFORM_BLOCKS 0x8A36 +#define GL_UNIFORM_TYPE 0x8A37 +#define GL_UNIFORM_SIZE 0x8A38 +#define GL_UNIFORM_NAME_LENGTH 0x8A39 +#define GL_UNIFORM_BLOCK_INDEX 0x8A3A +#define GL_UNIFORM_OFFSET 0x8A3B +#define GL_UNIFORM_ARRAY_STRIDE 0x8A3C +#define GL_UNIFORM_MATRIX_STRIDE 0x8A3D +#define GL_UNIFORM_IS_ROW_MAJOR 0x8A3E +#define GL_UNIFORM_BLOCK_BINDING 0x8A3F +#define GL_UNIFORM_BLOCK_DATA_SIZE 0x8A40 +#define GL_UNIFORM_BLOCK_NAME_LENGTH 0x8A41 +#define GL_UNIFORM_BLOCK_ACTIVE_UNIFORMS 0x8A42 +#define GL_UNIFORM_BLOCK_ACTIVE_UNIFORM_INDICES 0x8A43 +#define GL_UNIFORM_BLOCK_REFERENCED_BY_VERTEX_SHADER 0x8A44 +#define GL_UNIFORM_BLOCK_REFERENCED_BY_FRAGMENT_SHADER 0x8A46 +#define GL_INVALID_INDEX 0xFFFFFFFFu +#define GL_MAX_VERTEX_OUTPUT_COMPONENTS 0x9122 +#define GL_MAX_FRAGMENT_INPUT_COMPONENTS 0x9125 +#define GL_MAX_SERVER_WAIT_TIMEOUT 0x9111 +#define GL_OBJECT_TYPE 0x9112 +#define GL_SYNC_CONDITION 0x9113 +#define GL_SYNC_STATUS 0x9114 +#define GL_SYNC_FLAGS 0x9115 +#define GL_SYNC_FENCE 0x9116 +#define GL_SYNC_GPU_COMMANDS_COMPLETE 0x9117 +#define GL_UNSIGNALED 0x9118 +#define GL_SIGNALED 0x9119 +#define GL_ALREADY_SIGNALED 0x911A +#define GL_TIMEOUT_EXPIRED 0x911B +#define GL_CONDITION_SATISFIED 0x911C +#define GL_WAIT_FAILED 0x911D +#define GL_SYNC_FLUSH_COMMANDS_BIT 0x00000001 +#define GL_TIMEOUT_IGNORED 0xFFFFFFFFFFFFFFFFull +#define GL_VERTEX_ATTRIB_ARRAY_DIVISOR 0x88FE +#define GL_ANY_SAMPLES_PASSED 0x8C2F +#define GL_ANY_SAMPLES_PASSED_CONSERVATIVE 0x8D6A +#define GL_SAMPLER_BINDING 0x8919 +#define GL_RGB10_A2UI 0x906F +#define GL_TEXTURE_SWIZZLE_R 0x8E42 +#define GL_TEXTURE_SWIZZLE_G 0x8E43 +#define GL_TEXTURE_SWIZZLE_B 0x8E44 +#define GL_TEXTURE_SWIZZLE_A 0x8E45 +#define GL_GREEN 0x1904 +#define GL_BLUE 0x1905 +#define GL_INT_2_10_10_10_REV 0x8D9F +#define GL_TRANSFORM_FEEDBACK 0x8E22 +#define GL_TRANSFORM_FEEDBACK_PAUSED 0x8E23 +#define GL_TRANSFORM_FEEDBACK_ACTIVE 0x8E24 +#define GL_TRANSFORM_FEEDBACK_BINDING 0x8E25 +#define GL_PROGRAM_BINARY_RETRIEVABLE_HINT 0x8257 +#define GL_PROGRAM_BINARY_LENGTH 0x8741 +#define GL_NUM_PROGRAM_BINARY_FORMATS 0x87FE +#define GL_PROGRAM_BINARY_FORMATS 0x87FF +#define GL_COMPRESSED_R11_EAC 0x9270 +#define GL_COMPRESSED_SIGNED_R11_EAC 0x9271 +#define GL_COMPRESSED_RG11_EAC 0x9272 +#define GL_COMPRESSED_SIGNED_RG11_EAC 0x9273 +#define GL_COMPRESSED_RGB8_ETC2 0x9274 +#define GL_COMPRESSED_SRGB8_ETC2 0x9275 +#define GL_COMPRESSED_RGB8_PUNCHTHROUGH_ALPHA1_ETC2 0x9276 +#define GL_COMPRESSED_SRGB8_PUNCHTHROUGH_ALPHA1_ETC2 0x9277 +#define GL_COMPRESSED_RGBA8_ETC2_EAC 0x9278 +#define GL_COMPRESSED_SRGB8_ALPHA8_ETC2_EAC 0x9279 +#define GL_TEXTURE_IMMUTABLE_FORMAT 0x912F +#define GL_MAX_ELEMENT_INDEX 0x8D6B +#define GL_NUM_SAMPLE_COUNTS 0x9380 +#define GL_TEXTURE_IMMUTABLE_LEVELS 0x82DF +typedef void (GL_APIENTRYP PFNGLREADBUFFERPROC) (GLenum src); +typedef void (GL_APIENTRYP PFNGLDRAWRANGEELEMENTSPROC) (GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices); +typedef void (GL_APIENTRYP PFNGLTEXIMAGE3DPROC) (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, const void *pixels); +typedef void (GL_APIENTRYP PFNGLTEXSUBIMAGE3DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *pixels); +typedef void (GL_APIENTRYP PFNGLCOPYTEXSUBIMAGE3DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXIMAGE3DPROC) (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, const void *data); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXSUBIMAGE3DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void *data); +typedef void (GL_APIENTRYP PFNGLGENQUERIESPROC) (GLsizei n, GLuint *ids); +typedef void (GL_APIENTRYP PFNGLDELETEQUERIESPROC) (GLsizei n, const GLuint *ids); +typedef GLboolean (GL_APIENTRYP PFNGLISQUERYPROC) (GLuint id); +typedef void (GL_APIENTRYP PFNGLBEGINQUERYPROC) (GLenum target, GLuint id); +typedef void (GL_APIENTRYP PFNGLENDQUERYPROC) (GLenum target); +typedef void (GL_APIENTRYP PFNGLGETQUERYIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETQUERYOBJECTUIVPROC) (GLuint id, GLenum pname, GLuint *params); +typedef GLboolean (GL_APIENTRYP PFNGLUNMAPBUFFERPROC) (GLenum target); +typedef void (GL_APIENTRYP PFNGLGETBUFFERPOINTERVPROC) (GLenum target, GLenum pname, void **params); +typedef void (GL_APIENTRYP PFNGLDRAWBUFFERSPROC) (GLsizei n, const GLenum *bufs); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX2X3FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX3X2FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX2X4FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX4X2FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX3X4FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX4X3FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLBLITFRAMEBUFFERPROC) (GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +typedef void (GL_APIENTRYP PFNGLRENDERBUFFERSTORAGEMULTISAMPLEPROC) (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTURELAYERPROC) (GLenum target, GLenum attachment, GLuint texture, GLint level, GLint layer); +typedef void *(GL_APIENTRYP PFNGLMAPBUFFERRANGEPROC) (GLenum target, GLintptr offset, GLsizeiptr length, GLbitfield access); +typedef void (GL_APIENTRYP PFNGLFLUSHMAPPEDBUFFERRANGEPROC) (GLenum target, GLintptr offset, GLsizeiptr length); +typedef void (GL_APIENTRYP PFNGLBINDVERTEXARRAYPROC) (GLuint array); +typedef void (GL_APIENTRYP PFNGLDELETEVERTEXARRAYSPROC) (GLsizei n, const GLuint *arrays); +typedef void (GL_APIENTRYP PFNGLGENVERTEXARRAYSPROC) (GLsizei n, GLuint *arrays); +typedef GLboolean (GL_APIENTRYP PFNGLISVERTEXARRAYPROC) (GLuint array); +typedef void (GL_APIENTRYP PFNGLGETINTEGERI_VPROC) (GLenum target, GLuint index, GLint *data); +typedef void (GL_APIENTRYP PFNGLBEGINTRANSFORMFEEDBACKPROC) (GLenum primitiveMode); +typedef void (GL_APIENTRYP PFNGLENDTRANSFORMFEEDBACKPROC) (void); +typedef void (GL_APIENTRYP PFNGLBINDBUFFERRANGEPROC) (GLenum target, GLuint index, GLuint buffer, GLintptr offset, GLsizeiptr size); +typedef void (GL_APIENTRYP PFNGLBINDBUFFERBASEPROC) (GLenum target, GLuint index, GLuint buffer); +typedef void (GL_APIENTRYP PFNGLTRANSFORMFEEDBACKVARYINGSPROC) (GLuint program, GLsizei count, const GLchar *const*varyings, GLenum bufferMode); +typedef void (GL_APIENTRYP PFNGLGETTRANSFORMFEEDBACKVARYINGPROC) (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLsizei *size, GLenum *type, GLchar *name); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBIPOINTERPROC) (GLuint index, GLint size, GLenum type, GLsizei stride, const void *pointer); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBIIVPROC) (GLuint index, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBIUIVPROC) (GLuint index, GLenum pname, GLuint *params); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBI4IPROC) (GLuint index, GLint x, GLint y, GLint z, GLint w); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBI4UIPROC) (GLuint index, GLuint x, GLuint y, GLuint z, GLuint w); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBI4IVPROC) (GLuint index, const GLint *v); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBI4UIVPROC) (GLuint index, const GLuint *v); +typedef void (GL_APIENTRYP PFNGLGETUNIFORMUIVPROC) (GLuint program, GLint location, GLuint *params); +typedef GLint (GL_APIENTRYP PFNGLGETFRAGDATALOCATIONPROC) (GLuint program, const GLchar *name); +typedef void (GL_APIENTRYP PFNGLUNIFORM1UIPROC) (GLint location, GLuint v0); +typedef void (GL_APIENTRYP PFNGLUNIFORM2UIPROC) (GLint location, GLuint v0, GLuint v1); +typedef void (GL_APIENTRYP PFNGLUNIFORM3UIPROC) (GLint location, GLuint v0, GLuint v1, GLuint v2); +typedef void (GL_APIENTRYP PFNGLUNIFORM4UIPROC) (GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3); +typedef void (GL_APIENTRYP PFNGLUNIFORM1UIVPROC) (GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM2UIVPROC) (GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM3UIVPROC) (GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM4UIVPROC) (GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLCLEARBUFFERIVPROC) (GLenum buffer, GLint drawbuffer, const GLint *value); +typedef void (GL_APIENTRYP PFNGLCLEARBUFFERUIVPROC) (GLenum buffer, GLint drawbuffer, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLCLEARBUFFERFVPROC) (GLenum buffer, GLint drawbuffer, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLCLEARBUFFERFIPROC) (GLenum buffer, GLint drawbuffer, GLfloat depth, GLint stencil); +typedef const GLubyte *(GL_APIENTRYP PFNGLGETSTRINGIPROC) (GLenum name, GLuint index); +typedef void (GL_APIENTRYP PFNGLCOPYBUFFERSUBDATAPROC) (GLenum readTarget, GLenum writeTarget, GLintptr readOffset, GLintptr writeOffset, GLsizeiptr size); +typedef void (GL_APIENTRYP PFNGLGETUNIFORMINDICESPROC) (GLuint program, GLsizei uniformCount, const GLchar *const*uniformNames, GLuint *uniformIndices); +typedef void (GL_APIENTRYP PFNGLGETACTIVEUNIFORMSIVPROC) (GLuint program, GLsizei uniformCount, const GLuint *uniformIndices, GLenum pname, GLint *params); +typedef GLuint (GL_APIENTRYP PFNGLGETUNIFORMBLOCKINDEXPROC) (GLuint program, const GLchar *uniformBlockName); +typedef void (GL_APIENTRYP PFNGLGETACTIVEUNIFORMBLOCKIVPROC) (GLuint program, GLuint uniformBlockIndex, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETACTIVEUNIFORMBLOCKNAMEPROC) (GLuint program, GLuint uniformBlockIndex, GLsizei bufSize, GLsizei *length, GLchar *uniformBlockName); +typedef void (GL_APIENTRYP PFNGLUNIFORMBLOCKBINDINGPROC) (GLuint program, GLuint uniformBlockIndex, GLuint uniformBlockBinding); +typedef void (GL_APIENTRYP PFNGLDRAWARRAYSINSTANCEDPROC) (GLenum mode, GLint first, GLsizei count, GLsizei instancecount); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSINSTANCEDPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount); +typedef GLsync (GL_APIENTRYP PFNGLFENCESYNCPROC) (GLenum condition, GLbitfield flags); +typedef GLboolean (GL_APIENTRYP PFNGLISSYNCPROC) (GLsync sync); +typedef void (GL_APIENTRYP PFNGLDELETESYNCPROC) (GLsync sync); +typedef GLenum (GL_APIENTRYP PFNGLCLIENTWAITSYNCPROC) (GLsync sync, GLbitfield flags, GLuint64 timeout); +typedef void (GL_APIENTRYP PFNGLWAITSYNCPROC) (GLsync sync, GLbitfield flags, GLuint64 timeout); +typedef void (GL_APIENTRYP PFNGLGETINTEGER64VPROC) (GLenum pname, GLint64 *data); +typedef void (GL_APIENTRYP PFNGLGETSYNCIVPROC) (GLsync sync, GLenum pname, GLsizei count, GLsizei *length, GLint *values); +typedef void (GL_APIENTRYP PFNGLGETINTEGER64I_VPROC) (GLenum target, GLuint index, GLint64 *data); +typedef void (GL_APIENTRYP PFNGLGETBUFFERPARAMETERI64VPROC) (GLenum target, GLenum pname, GLint64 *params); +typedef void (GL_APIENTRYP PFNGLGENSAMPLERSPROC) (GLsizei count, GLuint *samplers); +typedef void (GL_APIENTRYP PFNGLDELETESAMPLERSPROC) (GLsizei count, const GLuint *samplers); +typedef GLboolean (GL_APIENTRYP PFNGLISSAMPLERPROC) (GLuint sampler); +typedef void (GL_APIENTRYP PFNGLBINDSAMPLERPROC) (GLuint unit, GLuint sampler); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERIPROC) (GLuint sampler, GLenum pname, GLint param); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERIVPROC) (GLuint sampler, GLenum pname, const GLint *param); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERFPROC) (GLuint sampler, GLenum pname, GLfloat param); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERFVPROC) (GLuint sampler, GLenum pname, const GLfloat *param); +typedef void (GL_APIENTRYP PFNGLGETSAMPLERPARAMETERIVPROC) (GLuint sampler, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETSAMPLERPARAMETERFVPROC) (GLuint sampler, GLenum pname, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBDIVISORPROC) (GLuint index, GLuint divisor); +typedef void (GL_APIENTRYP PFNGLBINDTRANSFORMFEEDBACKPROC) (GLenum target, GLuint id); +typedef void (GL_APIENTRYP PFNGLDELETETRANSFORMFEEDBACKSPROC) (GLsizei n, const GLuint *ids); +typedef void (GL_APIENTRYP PFNGLGENTRANSFORMFEEDBACKSPROC) (GLsizei n, GLuint *ids); +typedef GLboolean (GL_APIENTRYP PFNGLISTRANSFORMFEEDBACKPROC) (GLuint id); +typedef void (GL_APIENTRYP PFNGLPAUSETRANSFORMFEEDBACKPROC) (void); +typedef void (GL_APIENTRYP PFNGLRESUMETRANSFORMFEEDBACKPROC) (void); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMBINARYPROC) (GLuint program, GLsizei bufSize, GLsizei *length, GLenum *binaryFormat, void *binary); +typedef void (GL_APIENTRYP PFNGLPROGRAMBINARYPROC) (GLuint program, GLenum binaryFormat, const void *binary, GLsizei length); +typedef void (GL_APIENTRYP PFNGLPROGRAMPARAMETERIPROC) (GLuint program, GLenum pname, GLint value); +typedef void (GL_APIENTRYP PFNGLINVALIDATEFRAMEBUFFERPROC) (GLenum target, GLsizei numAttachments, const GLenum *attachments); +typedef void (GL_APIENTRYP PFNGLINVALIDATESUBFRAMEBUFFERPROC) (GLenum target, GLsizei numAttachments, const GLenum *attachments, GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGE2DPROC) (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGE3DPROC) (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +typedef void (GL_APIENTRYP PFNGLGETINTERNALFORMATIVPROC) (GLenum target, GLenum internalformat, GLenum pname, GLsizei count, GLint *params); +#if GL_GLES_PROTOTYPES +GL_APICALL void GL_APIENTRY glReadBuffer (GLenum src); +GL_APICALL void GL_APIENTRY glDrawRangeElements (GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices); +GL_APICALL void GL_APIENTRY glTexImage3D (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, const void *pixels); +GL_APICALL void GL_APIENTRY glTexSubImage3D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *pixels); +GL_APICALL void GL_APIENTRY glCopyTexSubImage3D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glCompressedTexImage3D (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, const void *data); +GL_APICALL void GL_APIENTRY glCompressedTexSubImage3D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void *data); +GL_APICALL void GL_APIENTRY glGenQueries (GLsizei n, GLuint *ids); +GL_APICALL void GL_APIENTRY glDeleteQueries (GLsizei n, const GLuint *ids); +GL_APICALL GLboolean GL_APIENTRY glIsQuery (GLuint id); +GL_APICALL void GL_APIENTRY glBeginQuery (GLenum target, GLuint id); +GL_APICALL void GL_APIENTRY glEndQuery (GLenum target); +GL_APICALL void GL_APIENTRY glGetQueryiv (GLenum target, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetQueryObjectuiv (GLuint id, GLenum pname, GLuint *params); +GL_APICALL GLboolean GL_APIENTRY glUnmapBuffer (GLenum target); +GL_APICALL void GL_APIENTRY glGetBufferPointerv (GLenum target, GLenum pname, void **params); +GL_APICALL void GL_APIENTRY glDrawBuffers (GLsizei n, const GLenum *bufs); +GL_APICALL void GL_APIENTRY glUniformMatrix2x3fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix3x2fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix2x4fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix4x2fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix3x4fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix4x3fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glBlitFramebuffer (GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +GL_APICALL void GL_APIENTRY glRenderbufferStorageMultisample (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glFramebufferTextureLayer (GLenum target, GLenum attachment, GLuint texture, GLint level, GLint layer); +GL_APICALL void *GL_APIENTRY glMapBufferRange (GLenum target, GLintptr offset, GLsizeiptr length, GLbitfield access); +GL_APICALL void GL_APIENTRY glFlushMappedBufferRange (GLenum target, GLintptr offset, GLsizeiptr length); +GL_APICALL void GL_APIENTRY glBindVertexArray (GLuint array); +GL_APICALL void GL_APIENTRY glDeleteVertexArrays (GLsizei n, const GLuint *arrays); +GL_APICALL void GL_APIENTRY glGenVertexArrays (GLsizei n, GLuint *arrays); +GL_APICALL GLboolean GL_APIENTRY glIsVertexArray (GLuint array); +GL_APICALL void GL_APIENTRY glGetIntegeri_v (GLenum target, GLuint index, GLint *data); +GL_APICALL void GL_APIENTRY glBeginTransformFeedback (GLenum primitiveMode); +GL_APICALL void GL_APIENTRY glEndTransformFeedback (void); +GL_APICALL void GL_APIENTRY glBindBufferRange (GLenum target, GLuint index, GLuint buffer, GLintptr offset, GLsizeiptr size); +GL_APICALL void GL_APIENTRY glBindBufferBase (GLenum target, GLuint index, GLuint buffer); +GL_APICALL void GL_APIENTRY glTransformFeedbackVaryings (GLuint program, GLsizei count, const GLchar *const*varyings, GLenum bufferMode); +GL_APICALL void GL_APIENTRY glGetTransformFeedbackVarying (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLsizei *size, GLenum *type, GLchar *name); +GL_APICALL void GL_APIENTRY glVertexAttribIPointer (GLuint index, GLint size, GLenum type, GLsizei stride, const void *pointer); +GL_APICALL void GL_APIENTRY glGetVertexAttribIiv (GLuint index, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetVertexAttribIuiv (GLuint index, GLenum pname, GLuint *params); +GL_APICALL void GL_APIENTRY glVertexAttribI4i (GLuint index, GLint x, GLint y, GLint z, GLint w); +GL_APICALL void GL_APIENTRY glVertexAttribI4ui (GLuint index, GLuint x, GLuint y, GLuint z, GLuint w); +GL_APICALL void GL_APIENTRY glVertexAttribI4iv (GLuint index, const GLint *v); +GL_APICALL void GL_APIENTRY glVertexAttribI4uiv (GLuint index, const GLuint *v); +GL_APICALL void GL_APIENTRY glGetUniformuiv (GLuint program, GLint location, GLuint *params); +GL_APICALL GLint GL_APIENTRY glGetFragDataLocation (GLuint program, const GLchar *name); +GL_APICALL void GL_APIENTRY glUniform1ui (GLint location, GLuint v0); +GL_APICALL void GL_APIENTRY glUniform2ui (GLint location, GLuint v0, GLuint v1); +GL_APICALL void GL_APIENTRY glUniform3ui (GLint location, GLuint v0, GLuint v1, GLuint v2); +GL_APICALL void GL_APIENTRY glUniform4ui (GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3); +GL_APICALL void GL_APIENTRY glUniform1uiv (GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glUniform2uiv (GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glUniform3uiv (GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glUniform4uiv (GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glClearBufferiv (GLenum buffer, GLint drawbuffer, const GLint *value); +GL_APICALL void GL_APIENTRY glClearBufferuiv (GLenum buffer, GLint drawbuffer, const GLuint *value); +GL_APICALL void GL_APIENTRY glClearBufferfv (GLenum buffer, GLint drawbuffer, const GLfloat *value); +GL_APICALL void GL_APIENTRY glClearBufferfi (GLenum buffer, GLint drawbuffer, GLfloat depth, GLint stencil); +GL_APICALL const GLubyte *GL_APIENTRY glGetStringi (GLenum name, GLuint index); +GL_APICALL void GL_APIENTRY glCopyBufferSubData (GLenum readTarget, GLenum writeTarget, GLintptr readOffset, GLintptr writeOffset, GLsizeiptr size); +GL_APICALL void GL_APIENTRY glGetUniformIndices (GLuint program, GLsizei uniformCount, const GLchar *const*uniformNames, GLuint *uniformIndices); +GL_APICALL void GL_APIENTRY glGetActiveUniformsiv (GLuint program, GLsizei uniformCount, const GLuint *uniformIndices, GLenum pname, GLint *params); +GL_APICALL GLuint GL_APIENTRY glGetUniformBlockIndex (GLuint program, const GLchar *uniformBlockName); +GL_APICALL void GL_APIENTRY glGetActiveUniformBlockiv (GLuint program, GLuint uniformBlockIndex, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetActiveUniformBlockName (GLuint program, GLuint uniformBlockIndex, GLsizei bufSize, GLsizei *length, GLchar *uniformBlockName); +GL_APICALL void GL_APIENTRY glUniformBlockBinding (GLuint program, GLuint uniformBlockIndex, GLuint uniformBlockBinding); +GL_APICALL void GL_APIENTRY glDrawArraysInstanced (GLenum mode, GLint first, GLsizei count, GLsizei instancecount); +GL_APICALL void GL_APIENTRY glDrawElementsInstanced (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount); +GL_APICALL GLsync GL_APIENTRY glFenceSync (GLenum condition, GLbitfield flags); +GL_APICALL GLboolean GL_APIENTRY glIsSync (GLsync sync); +GL_APICALL void GL_APIENTRY glDeleteSync (GLsync sync); +GL_APICALL GLenum GL_APIENTRY glClientWaitSync (GLsync sync, GLbitfield flags, GLuint64 timeout); +GL_APICALL void GL_APIENTRY glWaitSync (GLsync sync, GLbitfield flags, GLuint64 timeout); +GL_APICALL void GL_APIENTRY glGetInteger64v (GLenum pname, GLint64 *data); +GL_APICALL void GL_APIENTRY glGetSynciv (GLsync sync, GLenum pname, GLsizei count, GLsizei *length, GLint *values); +GL_APICALL void GL_APIENTRY glGetInteger64i_v (GLenum target, GLuint index, GLint64 *data); +GL_APICALL void GL_APIENTRY glGetBufferParameteri64v (GLenum target, GLenum pname, GLint64 *params); +GL_APICALL void GL_APIENTRY glGenSamplers (GLsizei count, GLuint *samplers); +GL_APICALL void GL_APIENTRY glDeleteSamplers (GLsizei count, const GLuint *samplers); +GL_APICALL GLboolean GL_APIENTRY glIsSampler (GLuint sampler); +GL_APICALL void GL_APIENTRY glBindSampler (GLuint unit, GLuint sampler); +GL_APICALL void GL_APIENTRY glSamplerParameteri (GLuint sampler, GLenum pname, GLint param); +GL_APICALL void GL_APIENTRY glSamplerParameteriv (GLuint sampler, GLenum pname, const GLint *param); +GL_APICALL void GL_APIENTRY glSamplerParameterf (GLuint sampler, GLenum pname, GLfloat param); +GL_APICALL void GL_APIENTRY glSamplerParameterfv (GLuint sampler, GLenum pname, const GLfloat *param); +GL_APICALL void GL_APIENTRY glGetSamplerParameteriv (GLuint sampler, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetSamplerParameterfv (GLuint sampler, GLenum pname, GLfloat *params); +GL_APICALL void GL_APIENTRY glVertexAttribDivisor (GLuint index, GLuint divisor); +GL_APICALL void GL_APIENTRY glBindTransformFeedback (GLenum target, GLuint id); +GL_APICALL void GL_APIENTRY glDeleteTransformFeedbacks (GLsizei n, const GLuint *ids); +GL_APICALL void GL_APIENTRY glGenTransformFeedbacks (GLsizei n, GLuint *ids); +GL_APICALL GLboolean GL_APIENTRY glIsTransformFeedback (GLuint id); +GL_APICALL void GL_APIENTRY glPauseTransformFeedback (void); +GL_APICALL void GL_APIENTRY glResumeTransformFeedback (void); +GL_APICALL void GL_APIENTRY glGetProgramBinary (GLuint program, GLsizei bufSize, GLsizei *length, GLenum *binaryFormat, void *binary); +GL_APICALL void GL_APIENTRY glProgramBinary (GLuint program, GLenum binaryFormat, const void *binary, GLsizei length); +GL_APICALL void GL_APIENTRY glProgramParameteri (GLuint program, GLenum pname, GLint value); +GL_APICALL void GL_APIENTRY glInvalidateFramebuffer (GLenum target, GLsizei numAttachments, const GLenum *attachments); +GL_APICALL void GL_APIENTRY glInvalidateSubFramebuffer (GLenum target, GLsizei numAttachments, const GLenum *attachments, GLint x, GLint y, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glTexStorage2D (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glTexStorage3D (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +GL_APICALL void GL_APIENTRY glGetInternalformativ (GLenum target, GLenum internalformat, GLenum pname, GLsizei count, GLint *params); +#endif +#endif /* GL_ES_VERSION_3_0 */ + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/angle/include/GLES3/gl31.h b/vendor/angle/include/GLES3/gl31.h new file mode 100644 index 00000000..502d6fee --- /dev/null +++ b/vendor/angle/include/GLES3/gl31.h @@ -0,0 +1,1507 @@ +#ifndef __gles2_gl31_h_ +#define __gles2_gl31_h_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright 2013-2020 The Khronos Group Inc. +** SPDX-License-Identifier: MIT +** +** This header is generated from the Khronos OpenGL / OpenGL ES XML +** API Registry. The current version of the Registry, generator scripts +** used to make the header, and the header can be found at +** https://github.com/KhronosGroup/OpenGL-Registry +*/ + +#include + +#ifndef GL_APIENTRYP +#define GL_APIENTRYP GL_APIENTRY* +#endif + +#ifndef GL_GLES_PROTOTYPES +#define GL_GLES_PROTOTYPES 1 +#endif + +/* Generated on date 20191013 */ + +/* Generated C header for: + * API: gles2 + * Profile: common + * Versions considered: 2\.[0-9]|3\.[01] + * Versions emitted: .* + * Default extensions included: None + * Additional extensions included: _nomatch_^ + * Extensions removed: _nomatch_^ + */ + +#ifndef GL_ES_VERSION_2_0 +#define GL_ES_VERSION_2_0 1 +#include +typedef khronos_int8_t GLbyte; +typedef khronos_float_t GLclampf; +typedef khronos_int32_t GLfixed; +typedef khronos_int16_t GLshort; +typedef khronos_uint16_t GLushort; +typedef void GLvoid; +typedef struct __GLsync *GLsync; +typedef khronos_int64_t GLint64; +typedef khronos_uint64_t GLuint64; +typedef unsigned int GLenum; +typedef unsigned int GLuint; +typedef char GLchar; +typedef khronos_float_t GLfloat; +typedef khronos_ssize_t GLsizeiptr; +typedef khronos_intptr_t GLintptr; +typedef unsigned int GLbitfield; +typedef int GLint; +typedef unsigned char GLboolean; +typedef int GLsizei; +typedef khronos_uint8_t GLubyte; +#define GL_DEPTH_BUFFER_BIT 0x00000100 +#define GL_STENCIL_BUFFER_BIT 0x00000400 +#define GL_COLOR_BUFFER_BIT 0x00004000 +#define GL_FALSE 0 +#define GL_TRUE 1 +#define GL_POINTS 0x0000 +#define GL_LINES 0x0001 +#define GL_LINE_LOOP 0x0002 +#define GL_LINE_STRIP 0x0003 +#define GL_TRIANGLES 0x0004 +#define GL_TRIANGLE_STRIP 0x0005 +#define GL_TRIANGLE_FAN 0x0006 +#define GL_ZERO 0 +#define GL_ONE 1 +#define GL_SRC_COLOR 0x0300 +#define GL_ONE_MINUS_SRC_COLOR 0x0301 +#define GL_SRC_ALPHA 0x0302 +#define GL_ONE_MINUS_SRC_ALPHA 0x0303 +#define GL_DST_ALPHA 0x0304 +#define GL_ONE_MINUS_DST_ALPHA 0x0305 +#define GL_DST_COLOR 0x0306 +#define GL_ONE_MINUS_DST_COLOR 0x0307 +#define GL_SRC_ALPHA_SATURATE 0x0308 +#define GL_FUNC_ADD 0x8006 +#define GL_BLEND_EQUATION 0x8009 +#define GL_BLEND_EQUATION_RGB 0x8009 +#define GL_BLEND_EQUATION_ALPHA 0x883D +#define GL_FUNC_SUBTRACT 0x800A +#define GL_FUNC_REVERSE_SUBTRACT 0x800B +#define GL_BLEND_DST_RGB 0x80C8 +#define GL_BLEND_SRC_RGB 0x80C9 +#define GL_BLEND_DST_ALPHA 0x80CA +#define GL_BLEND_SRC_ALPHA 0x80CB +#define GL_CONSTANT_COLOR 0x8001 +#define GL_ONE_MINUS_CONSTANT_COLOR 0x8002 +#define GL_CONSTANT_ALPHA 0x8003 +#define GL_ONE_MINUS_CONSTANT_ALPHA 0x8004 +#define GL_BLEND_COLOR 0x8005 +#define GL_ARRAY_BUFFER 0x8892 +#define GL_ELEMENT_ARRAY_BUFFER 0x8893 +#define GL_ARRAY_BUFFER_BINDING 0x8894 +#define GL_ELEMENT_ARRAY_BUFFER_BINDING 0x8895 +#define GL_STREAM_DRAW 0x88E0 +#define GL_STATIC_DRAW 0x88E4 +#define GL_DYNAMIC_DRAW 0x88E8 +#define GL_BUFFER_SIZE 0x8764 +#define GL_BUFFER_USAGE 0x8765 +#define GL_CURRENT_VERTEX_ATTRIB 0x8626 +#define GL_FRONT 0x0404 +#define GL_BACK 0x0405 +#define GL_FRONT_AND_BACK 0x0408 +#define GL_TEXTURE_2D 0x0DE1 +#define GL_CULL_FACE 0x0B44 +#define GL_BLEND 0x0BE2 +#define GL_DITHER 0x0BD0 +#define GL_STENCIL_TEST 0x0B90 +#define GL_DEPTH_TEST 0x0B71 +#define GL_SCISSOR_TEST 0x0C11 +#define GL_POLYGON_OFFSET_FILL 0x8037 +#define GL_SAMPLE_ALPHA_TO_COVERAGE 0x809E +#define GL_SAMPLE_COVERAGE 0x80A0 +#define GL_NO_ERROR 0 +#define GL_INVALID_ENUM 0x0500 +#define GL_INVALID_VALUE 0x0501 +#define GL_INVALID_OPERATION 0x0502 +#define GL_OUT_OF_MEMORY 0x0505 +#define GL_CW 0x0900 +#define GL_CCW 0x0901 +#define GL_LINE_WIDTH 0x0B21 +#define GL_ALIASED_POINT_SIZE_RANGE 0x846D +#define GL_ALIASED_LINE_WIDTH_RANGE 0x846E +#define GL_CULL_FACE_MODE 0x0B45 +#define GL_FRONT_FACE 0x0B46 +#define GL_DEPTH_RANGE 0x0B70 +#define GL_DEPTH_WRITEMASK 0x0B72 +#define GL_DEPTH_CLEAR_VALUE 0x0B73 +#define GL_DEPTH_FUNC 0x0B74 +#define GL_STENCIL_CLEAR_VALUE 0x0B91 +#define GL_STENCIL_FUNC 0x0B92 +#define GL_STENCIL_FAIL 0x0B94 +#define GL_STENCIL_PASS_DEPTH_FAIL 0x0B95 +#define GL_STENCIL_PASS_DEPTH_PASS 0x0B96 +#define GL_STENCIL_REF 0x0B97 +#define GL_STENCIL_VALUE_MASK 0x0B93 +#define GL_STENCIL_WRITEMASK 0x0B98 +#define GL_STENCIL_BACK_FUNC 0x8800 +#define GL_STENCIL_BACK_FAIL 0x8801 +#define GL_STENCIL_BACK_PASS_DEPTH_FAIL 0x8802 +#define GL_STENCIL_BACK_PASS_DEPTH_PASS 0x8803 +#define GL_STENCIL_BACK_REF 0x8CA3 +#define GL_STENCIL_BACK_VALUE_MASK 0x8CA4 +#define GL_STENCIL_BACK_WRITEMASK 0x8CA5 +#define GL_VIEWPORT 0x0BA2 +#define GL_SCISSOR_BOX 0x0C10 +#define GL_COLOR_CLEAR_VALUE 0x0C22 +#define GL_COLOR_WRITEMASK 0x0C23 +#define GL_UNPACK_ALIGNMENT 0x0CF5 +#define GL_PACK_ALIGNMENT 0x0D05 +#define GL_MAX_TEXTURE_SIZE 0x0D33 +#define GL_MAX_VIEWPORT_DIMS 0x0D3A +#define GL_SUBPIXEL_BITS 0x0D50 +#define GL_RED_BITS 0x0D52 +#define GL_GREEN_BITS 0x0D53 +#define GL_BLUE_BITS 0x0D54 +#define GL_ALPHA_BITS 0x0D55 +#define GL_DEPTH_BITS 0x0D56 +#define GL_STENCIL_BITS 0x0D57 +#define GL_POLYGON_OFFSET_UNITS 0x2A00 +#define GL_POLYGON_OFFSET_FACTOR 0x8038 +#define GL_TEXTURE_BINDING_2D 0x8069 +#define GL_SAMPLE_BUFFERS 0x80A8 +#define GL_SAMPLES 0x80A9 +#define GL_SAMPLE_COVERAGE_VALUE 0x80AA +#define GL_SAMPLE_COVERAGE_INVERT 0x80AB +#define GL_NUM_COMPRESSED_TEXTURE_FORMATS 0x86A2 +#define GL_COMPRESSED_TEXTURE_FORMATS 0x86A3 +#define GL_DONT_CARE 0x1100 +#define GL_FASTEST 0x1101 +#define GL_NICEST 0x1102 +#define GL_GENERATE_MIPMAP_HINT 0x8192 +#define GL_BYTE 0x1400 +#define GL_UNSIGNED_BYTE 0x1401 +#define GL_SHORT 0x1402 +#define GL_UNSIGNED_SHORT 0x1403 +#define GL_INT 0x1404 +#define GL_UNSIGNED_INT 0x1405 +#define GL_FLOAT 0x1406 +#define GL_FIXED 0x140C +#define GL_DEPTH_COMPONENT 0x1902 +#define GL_ALPHA 0x1906 +#define GL_RGB 0x1907 +#define GL_RGBA 0x1908 +#define GL_LUMINANCE 0x1909 +#define GL_LUMINANCE_ALPHA 0x190A +#define GL_UNSIGNED_SHORT_4_4_4_4 0x8033 +#define GL_UNSIGNED_SHORT_5_5_5_1 0x8034 +#define GL_UNSIGNED_SHORT_5_6_5 0x8363 +#define GL_FRAGMENT_SHADER 0x8B30 +#define GL_VERTEX_SHADER 0x8B31 +#define GL_MAX_VERTEX_ATTRIBS 0x8869 +#define GL_MAX_VERTEX_UNIFORM_VECTORS 0x8DFB +#define GL_MAX_VARYING_VECTORS 0x8DFC +#define GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS 0x8B4D +#define GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS 0x8B4C +#define GL_MAX_TEXTURE_IMAGE_UNITS 0x8872 +#define GL_MAX_FRAGMENT_UNIFORM_VECTORS 0x8DFD +#define GL_SHADER_TYPE 0x8B4F +#define GL_DELETE_STATUS 0x8B80 +#define GL_LINK_STATUS 0x8B82 +#define GL_VALIDATE_STATUS 0x8B83 +#define GL_ATTACHED_SHADERS 0x8B85 +#define GL_ACTIVE_UNIFORMS 0x8B86 +#define GL_ACTIVE_UNIFORM_MAX_LENGTH 0x8B87 +#define GL_ACTIVE_ATTRIBUTES 0x8B89 +#define GL_ACTIVE_ATTRIBUTE_MAX_LENGTH 0x8B8A +#define GL_SHADING_LANGUAGE_VERSION 0x8B8C +#define GL_CURRENT_PROGRAM 0x8B8D +#define GL_NEVER 0x0200 +#define GL_LESS 0x0201 +#define GL_EQUAL 0x0202 +#define GL_LEQUAL 0x0203 +#define GL_GREATER 0x0204 +#define GL_NOTEQUAL 0x0205 +#define GL_GEQUAL 0x0206 +#define GL_ALWAYS 0x0207 +#define GL_KEEP 0x1E00 +#define GL_REPLACE 0x1E01 +#define GL_INCR 0x1E02 +#define GL_DECR 0x1E03 +#define GL_INVERT 0x150A +#define GL_INCR_WRAP 0x8507 +#define GL_DECR_WRAP 0x8508 +#define GL_VENDOR 0x1F00 +#define GL_RENDERER 0x1F01 +#define GL_VERSION 0x1F02 +#define GL_EXTENSIONS 0x1F03 +#define GL_NEAREST 0x2600 +#define GL_LINEAR 0x2601 +#define GL_NEAREST_MIPMAP_NEAREST 0x2700 +#define GL_LINEAR_MIPMAP_NEAREST 0x2701 +#define GL_NEAREST_MIPMAP_LINEAR 0x2702 +#define GL_LINEAR_MIPMAP_LINEAR 0x2703 +#define GL_TEXTURE_MAG_FILTER 0x2800 +#define GL_TEXTURE_MIN_FILTER 0x2801 +#define GL_TEXTURE_WRAP_S 0x2802 +#define GL_TEXTURE_WRAP_T 0x2803 +#define GL_TEXTURE 0x1702 +#define GL_TEXTURE_CUBE_MAP 0x8513 +#define GL_TEXTURE_BINDING_CUBE_MAP 0x8514 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_X 0x8515 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_X 0x8516 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_Y 0x8517 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_Y 0x8518 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_Z 0x8519 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_Z 0x851A +#define GL_MAX_CUBE_MAP_TEXTURE_SIZE 0x851C +#define GL_TEXTURE0 0x84C0 +#define GL_TEXTURE1 0x84C1 +#define GL_TEXTURE2 0x84C2 +#define GL_TEXTURE3 0x84C3 +#define GL_TEXTURE4 0x84C4 +#define GL_TEXTURE5 0x84C5 +#define GL_TEXTURE6 0x84C6 +#define GL_TEXTURE7 0x84C7 +#define GL_TEXTURE8 0x84C8 +#define GL_TEXTURE9 0x84C9 +#define GL_TEXTURE10 0x84CA +#define GL_TEXTURE11 0x84CB +#define GL_TEXTURE12 0x84CC +#define GL_TEXTURE13 0x84CD +#define GL_TEXTURE14 0x84CE +#define GL_TEXTURE15 0x84CF +#define GL_TEXTURE16 0x84D0 +#define GL_TEXTURE17 0x84D1 +#define GL_TEXTURE18 0x84D2 +#define GL_TEXTURE19 0x84D3 +#define GL_TEXTURE20 0x84D4 +#define GL_TEXTURE21 0x84D5 +#define GL_TEXTURE22 0x84D6 +#define GL_TEXTURE23 0x84D7 +#define GL_TEXTURE24 0x84D8 +#define GL_TEXTURE25 0x84D9 +#define GL_TEXTURE26 0x84DA +#define GL_TEXTURE27 0x84DB +#define GL_TEXTURE28 0x84DC +#define GL_TEXTURE29 0x84DD +#define GL_TEXTURE30 0x84DE +#define GL_TEXTURE31 0x84DF +#define GL_ACTIVE_TEXTURE 0x84E0 +#define GL_REPEAT 0x2901 +#define GL_CLAMP_TO_EDGE 0x812F +#define GL_MIRRORED_REPEAT 0x8370 +#define GL_FLOAT_VEC2 0x8B50 +#define GL_FLOAT_VEC3 0x8B51 +#define GL_FLOAT_VEC4 0x8B52 +#define GL_INT_VEC2 0x8B53 +#define GL_INT_VEC3 0x8B54 +#define GL_INT_VEC4 0x8B55 +#define GL_BOOL 0x8B56 +#define GL_BOOL_VEC2 0x8B57 +#define GL_BOOL_VEC3 0x8B58 +#define GL_BOOL_VEC4 0x8B59 +#define GL_FLOAT_MAT2 0x8B5A +#define GL_FLOAT_MAT3 0x8B5B +#define GL_FLOAT_MAT4 0x8B5C +#define GL_SAMPLER_2D 0x8B5E +#define GL_SAMPLER_CUBE 0x8B60 +#define GL_VERTEX_ATTRIB_ARRAY_ENABLED 0x8622 +#define GL_VERTEX_ATTRIB_ARRAY_SIZE 0x8623 +#define GL_VERTEX_ATTRIB_ARRAY_STRIDE 0x8624 +#define GL_VERTEX_ATTRIB_ARRAY_TYPE 0x8625 +#define GL_VERTEX_ATTRIB_ARRAY_NORMALIZED 0x886A +#define GL_VERTEX_ATTRIB_ARRAY_POINTER 0x8645 +#define GL_VERTEX_ATTRIB_ARRAY_BUFFER_BINDING 0x889F +#define GL_IMPLEMENTATION_COLOR_READ_TYPE 0x8B9A +#define GL_IMPLEMENTATION_COLOR_READ_FORMAT 0x8B9B +#define GL_COMPILE_STATUS 0x8B81 +#define GL_INFO_LOG_LENGTH 0x8B84 +#define GL_SHADER_SOURCE_LENGTH 0x8B88 +#define GL_SHADER_COMPILER 0x8DFA +#define GL_SHADER_BINARY_FORMATS 0x8DF8 +#define GL_NUM_SHADER_BINARY_FORMATS 0x8DF9 +#define GL_LOW_FLOAT 0x8DF0 +#define GL_MEDIUM_FLOAT 0x8DF1 +#define GL_HIGH_FLOAT 0x8DF2 +#define GL_LOW_INT 0x8DF3 +#define GL_MEDIUM_INT 0x8DF4 +#define GL_HIGH_INT 0x8DF5 +#define GL_FRAMEBUFFER 0x8D40 +#define GL_RENDERBUFFER 0x8D41 +#define GL_RGBA4 0x8056 +#define GL_RGB5_A1 0x8057 +#define GL_RGB565 0x8D62 +#define GL_DEPTH_COMPONENT16 0x81A5 +#define GL_STENCIL_INDEX8 0x8D48 +#define GL_RENDERBUFFER_WIDTH 0x8D42 +#define GL_RENDERBUFFER_HEIGHT 0x8D43 +#define GL_RENDERBUFFER_INTERNAL_FORMAT 0x8D44 +#define GL_RENDERBUFFER_RED_SIZE 0x8D50 +#define GL_RENDERBUFFER_GREEN_SIZE 0x8D51 +#define GL_RENDERBUFFER_BLUE_SIZE 0x8D52 +#define GL_RENDERBUFFER_ALPHA_SIZE 0x8D53 +#define GL_RENDERBUFFER_DEPTH_SIZE 0x8D54 +#define GL_RENDERBUFFER_STENCIL_SIZE 0x8D55 +#define GL_FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE 0x8CD0 +#define GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME 0x8CD1 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL 0x8CD2 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE 0x8CD3 +#define GL_COLOR_ATTACHMENT0 0x8CE0 +#define GL_DEPTH_ATTACHMENT 0x8D00 +#define GL_STENCIL_ATTACHMENT 0x8D20 +#define GL_NONE 0 +#define GL_FRAMEBUFFER_COMPLETE 0x8CD5 +#define GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT 0x8CD6 +#define GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT 0x8CD7 +#define GL_FRAMEBUFFER_INCOMPLETE_DIMENSIONS 0x8CD9 +#define GL_FRAMEBUFFER_UNSUPPORTED 0x8CDD +#define GL_FRAMEBUFFER_BINDING 0x8CA6 +#define GL_RENDERBUFFER_BINDING 0x8CA7 +#define GL_MAX_RENDERBUFFER_SIZE 0x84E8 +#define GL_INVALID_FRAMEBUFFER_OPERATION 0x0506 +typedef void (GL_APIENTRYP PFNGLACTIVETEXTUREPROC) (GLenum texture); +typedef void (GL_APIENTRYP PFNGLATTACHSHADERPROC) (GLuint program, GLuint shader); +typedef void (GL_APIENTRYP PFNGLBINDATTRIBLOCATIONPROC) (GLuint program, GLuint index, const GLchar *name); +typedef void (GL_APIENTRYP PFNGLBINDBUFFERPROC) (GLenum target, GLuint buffer); +typedef void (GL_APIENTRYP PFNGLBINDFRAMEBUFFERPROC) (GLenum target, GLuint framebuffer); +typedef void (GL_APIENTRYP PFNGLBINDRENDERBUFFERPROC) (GLenum target, GLuint renderbuffer); +typedef void (GL_APIENTRYP PFNGLBINDTEXTUREPROC) (GLenum target, GLuint texture); +typedef void (GL_APIENTRYP PFNGLBLENDCOLORPROC) (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +typedef void (GL_APIENTRYP PFNGLBLENDEQUATIONPROC) (GLenum mode); +typedef void (GL_APIENTRYP PFNGLBLENDEQUATIONSEPARATEPROC) (GLenum modeRGB, GLenum modeAlpha); +typedef void (GL_APIENTRYP PFNGLBLENDFUNCPROC) (GLenum sfactor, GLenum dfactor); +typedef void (GL_APIENTRYP PFNGLBLENDFUNCSEPARATEPROC) (GLenum sfactorRGB, GLenum dfactorRGB, GLenum sfactorAlpha, GLenum dfactorAlpha); +typedef void (GL_APIENTRYP PFNGLBUFFERDATAPROC) (GLenum target, GLsizeiptr size, const void *data, GLenum usage); +typedef void (GL_APIENTRYP PFNGLBUFFERSUBDATAPROC) (GLenum target, GLintptr offset, GLsizeiptr size, const void *data); +typedef GLenum (GL_APIENTRYP PFNGLCHECKFRAMEBUFFERSTATUSPROC) (GLenum target); +typedef void (GL_APIENTRYP PFNGLCLEARPROC) (GLbitfield mask); +typedef void (GL_APIENTRYP PFNGLCLEARCOLORPROC) (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +typedef void (GL_APIENTRYP PFNGLCLEARDEPTHFPROC) (GLfloat d); +typedef void (GL_APIENTRYP PFNGLCLEARSTENCILPROC) (GLint s); +typedef void (GL_APIENTRYP PFNGLCOLORMASKPROC) (GLboolean red, GLboolean green, GLboolean blue, GLboolean alpha); +typedef void (GL_APIENTRYP PFNGLCOMPILESHADERPROC) (GLuint shader); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXIMAGE2DPROC) (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, const void *data); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXSUBIMAGE2DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *data); +typedef void (GL_APIENTRYP PFNGLCOPYTEXIMAGE2DPROC) (GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLsizei height, GLint border); +typedef void (GL_APIENTRYP PFNGLCOPYTEXSUBIMAGE2DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint x, GLint y, GLsizei width, GLsizei height); +typedef GLuint (GL_APIENTRYP PFNGLCREATEPROGRAMPROC) (void); +typedef GLuint (GL_APIENTRYP PFNGLCREATESHADERPROC) (GLenum type); +typedef void (GL_APIENTRYP PFNGLCULLFACEPROC) (GLenum mode); +typedef void (GL_APIENTRYP PFNGLDELETEBUFFERSPROC) (GLsizei n, const GLuint *buffers); +typedef void (GL_APIENTRYP PFNGLDELETEFRAMEBUFFERSPROC) (GLsizei n, const GLuint *framebuffers); +typedef void (GL_APIENTRYP PFNGLDELETEPROGRAMPROC) (GLuint program); +typedef void (GL_APIENTRYP PFNGLDELETERENDERBUFFERSPROC) (GLsizei n, const GLuint *renderbuffers); +typedef void (GL_APIENTRYP PFNGLDELETESHADERPROC) (GLuint shader); +typedef void (GL_APIENTRYP PFNGLDELETETEXTURESPROC) (GLsizei n, const GLuint *textures); +typedef void (GL_APIENTRYP PFNGLDEPTHFUNCPROC) (GLenum func); +typedef void (GL_APIENTRYP PFNGLDEPTHMASKPROC) (GLboolean flag); +typedef void (GL_APIENTRYP PFNGLDEPTHRANGEFPROC) (GLfloat n, GLfloat f); +typedef void (GL_APIENTRYP PFNGLDETACHSHADERPROC) (GLuint program, GLuint shader); +typedef void (GL_APIENTRYP PFNGLDISABLEPROC) (GLenum cap); +typedef void (GL_APIENTRYP PFNGLDISABLEVERTEXATTRIBARRAYPROC) (GLuint index); +typedef void (GL_APIENTRYP PFNGLDRAWARRAYSPROC) (GLenum mode, GLint first, GLsizei count); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices); +typedef void (GL_APIENTRYP PFNGLENABLEPROC) (GLenum cap); +typedef void (GL_APIENTRYP PFNGLENABLEVERTEXATTRIBARRAYPROC) (GLuint index); +typedef void (GL_APIENTRYP PFNGLFINISHPROC) (void); +typedef void (GL_APIENTRYP PFNGLFLUSHPROC) (void); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERRENDERBUFFERPROC) (GLenum target, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTURE2DPROC) (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +typedef void (GL_APIENTRYP PFNGLFRONTFACEPROC) (GLenum mode); +typedef void (GL_APIENTRYP PFNGLGENBUFFERSPROC) (GLsizei n, GLuint *buffers); +typedef void (GL_APIENTRYP PFNGLGENERATEMIPMAPPROC) (GLenum target); +typedef void (GL_APIENTRYP PFNGLGENFRAMEBUFFERSPROC) (GLsizei n, GLuint *framebuffers); +typedef void (GL_APIENTRYP PFNGLGENRENDERBUFFERSPROC) (GLsizei n, GLuint *renderbuffers); +typedef void (GL_APIENTRYP PFNGLGENTEXTURESPROC) (GLsizei n, GLuint *textures); +typedef void (GL_APIENTRYP PFNGLGETACTIVEATTRIBPROC) (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name); +typedef void (GL_APIENTRYP PFNGLGETACTIVEUNIFORMPROC) (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name); +typedef void (GL_APIENTRYP PFNGLGETATTACHEDSHADERSPROC) (GLuint program, GLsizei maxCount, GLsizei *count, GLuint *shaders); +typedef GLint (GL_APIENTRYP PFNGLGETATTRIBLOCATIONPROC) (GLuint program, const GLchar *name); +typedef void (GL_APIENTRYP PFNGLGETBOOLEANVPROC) (GLenum pname, GLboolean *data); +typedef void (GL_APIENTRYP PFNGLGETBUFFERPARAMETERIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef GLenum (GL_APIENTRYP PFNGLGETERRORPROC) (void); +typedef void (GL_APIENTRYP PFNGLGETFLOATVPROC) (GLenum pname, GLfloat *data); +typedef void (GL_APIENTRYP PFNGLGETFRAMEBUFFERATTACHMENTPARAMETERIVPROC) (GLenum target, GLenum attachment, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETINTEGERVPROC) (GLenum pname, GLint *data); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMIVPROC) (GLuint program, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMINFOLOGPROC) (GLuint program, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +typedef void (GL_APIENTRYP PFNGLGETRENDERBUFFERPARAMETERIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETSHADERIVPROC) (GLuint shader, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETSHADERINFOLOGPROC) (GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +typedef void (GL_APIENTRYP PFNGLGETSHADERPRECISIONFORMATPROC) (GLenum shadertype, GLenum precisiontype, GLint *range, GLint *precision); +typedef void (GL_APIENTRYP PFNGLGETSHADERSOURCEPROC) (GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *source); +typedef const GLubyte *(GL_APIENTRYP PFNGLGETSTRINGPROC) (GLenum name); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERFVPROC) (GLenum target, GLenum pname, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETUNIFORMFVPROC) (GLuint program, GLint location, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETUNIFORMIVPROC) (GLuint program, GLint location, GLint *params); +typedef GLint (GL_APIENTRYP PFNGLGETUNIFORMLOCATIONPROC) (GLuint program, const GLchar *name); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBFVPROC) (GLuint index, GLenum pname, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBIVPROC) (GLuint index, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBPOINTERVPROC) (GLuint index, GLenum pname, void **pointer); +typedef void (GL_APIENTRYP PFNGLHINTPROC) (GLenum target, GLenum mode); +typedef GLboolean (GL_APIENTRYP PFNGLISBUFFERPROC) (GLuint buffer); +typedef GLboolean (GL_APIENTRYP PFNGLISENABLEDPROC) (GLenum cap); +typedef GLboolean (GL_APIENTRYP PFNGLISFRAMEBUFFERPROC) (GLuint framebuffer); +typedef GLboolean (GL_APIENTRYP PFNGLISPROGRAMPROC) (GLuint program); +typedef GLboolean (GL_APIENTRYP PFNGLISRENDERBUFFERPROC) (GLuint renderbuffer); +typedef GLboolean (GL_APIENTRYP PFNGLISSHADERPROC) (GLuint shader); +typedef GLboolean (GL_APIENTRYP PFNGLISTEXTUREPROC) (GLuint texture); +typedef void (GL_APIENTRYP PFNGLLINEWIDTHPROC) (GLfloat width); +typedef void (GL_APIENTRYP PFNGLLINKPROGRAMPROC) (GLuint program); +typedef void (GL_APIENTRYP PFNGLPIXELSTOREIPROC) (GLenum pname, GLint param); +typedef void (GL_APIENTRYP PFNGLPOLYGONOFFSETPROC) (GLfloat factor, GLfloat units); +typedef void (GL_APIENTRYP PFNGLREADPIXELSPROC) (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, void *pixels); +typedef void (GL_APIENTRYP PFNGLRELEASESHADERCOMPILERPROC) (void); +typedef void (GL_APIENTRYP PFNGLRENDERBUFFERSTORAGEPROC) (GLenum target, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLSAMPLECOVERAGEPROC) (GLfloat value, GLboolean invert); +typedef void (GL_APIENTRYP PFNGLSCISSORPROC) (GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLSHADERBINARYPROC) (GLsizei count, const GLuint *shaders, GLenum binaryformat, const void *binary, GLsizei length); +typedef void (GL_APIENTRYP PFNGLSHADERSOURCEPROC) (GLuint shader, GLsizei count, const GLchar *const*string, const GLint *length); +typedef void (GL_APIENTRYP PFNGLSTENCILFUNCPROC) (GLenum func, GLint ref, GLuint mask); +typedef void (GL_APIENTRYP PFNGLSTENCILFUNCSEPARATEPROC) (GLenum face, GLenum func, GLint ref, GLuint mask); +typedef void (GL_APIENTRYP PFNGLSTENCILMASKPROC) (GLuint mask); +typedef void (GL_APIENTRYP PFNGLSTENCILMASKSEPARATEPROC) (GLenum face, GLuint mask); +typedef void (GL_APIENTRYP PFNGLSTENCILOPPROC) (GLenum fail, GLenum zfail, GLenum zpass); +typedef void (GL_APIENTRYP PFNGLSTENCILOPSEPARATEPROC) (GLenum face, GLenum sfail, GLenum dpfail, GLenum dppass); +typedef void (GL_APIENTRYP PFNGLTEXIMAGE2DPROC) (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, const void *pixels); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERFPROC) (GLenum target, GLenum pname, GLfloat param); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERFVPROC) (GLenum target, GLenum pname, const GLfloat *params); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERIPROC) (GLenum target, GLenum pname, GLint param); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERIVPROC) (GLenum target, GLenum pname, const GLint *params); +typedef void (GL_APIENTRYP PFNGLTEXSUBIMAGE2DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *pixels); +typedef void (GL_APIENTRYP PFNGLUNIFORM1FPROC) (GLint location, GLfloat v0); +typedef void (GL_APIENTRYP PFNGLUNIFORM1FVPROC) (GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM1IPROC) (GLint location, GLint v0); +typedef void (GL_APIENTRYP PFNGLUNIFORM1IVPROC) (GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM2FPROC) (GLint location, GLfloat v0, GLfloat v1); +typedef void (GL_APIENTRYP PFNGLUNIFORM2FVPROC) (GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM2IPROC) (GLint location, GLint v0, GLint v1); +typedef void (GL_APIENTRYP PFNGLUNIFORM2IVPROC) (GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM3FPROC) (GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +typedef void (GL_APIENTRYP PFNGLUNIFORM3FVPROC) (GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM3IPROC) (GLint location, GLint v0, GLint v1, GLint v2); +typedef void (GL_APIENTRYP PFNGLUNIFORM3IVPROC) (GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM4FPROC) (GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +typedef void (GL_APIENTRYP PFNGLUNIFORM4FVPROC) (GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM4IPROC) (GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +typedef void (GL_APIENTRYP PFNGLUNIFORM4IVPROC) (GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX2FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX3FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX4FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUSEPROGRAMPROC) (GLuint program); +typedef void (GL_APIENTRYP PFNGLVALIDATEPROGRAMPROC) (GLuint program); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB1FPROC) (GLuint index, GLfloat x); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB1FVPROC) (GLuint index, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB2FPROC) (GLuint index, GLfloat x, GLfloat y); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB2FVPROC) (GLuint index, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB3FPROC) (GLuint index, GLfloat x, GLfloat y, GLfloat z); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB3FVPROC) (GLuint index, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB4FPROC) (GLuint index, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB4FVPROC) (GLuint index, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBPOINTERPROC) (GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride, const void *pointer); +typedef void (GL_APIENTRYP PFNGLVIEWPORTPROC) (GLint x, GLint y, GLsizei width, GLsizei height); +#if GL_GLES_PROTOTYPES +GL_APICALL void GL_APIENTRY glActiveTexture (GLenum texture); +GL_APICALL void GL_APIENTRY glAttachShader (GLuint program, GLuint shader); +GL_APICALL void GL_APIENTRY glBindAttribLocation (GLuint program, GLuint index, const GLchar *name); +GL_APICALL void GL_APIENTRY glBindBuffer (GLenum target, GLuint buffer); +GL_APICALL void GL_APIENTRY glBindFramebuffer (GLenum target, GLuint framebuffer); +GL_APICALL void GL_APIENTRY glBindRenderbuffer (GLenum target, GLuint renderbuffer); +GL_APICALL void GL_APIENTRY glBindTexture (GLenum target, GLuint texture); +GL_APICALL void GL_APIENTRY glBlendColor (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +GL_APICALL void GL_APIENTRY glBlendEquation (GLenum mode); +GL_APICALL void GL_APIENTRY glBlendEquationSeparate (GLenum modeRGB, GLenum modeAlpha); +GL_APICALL void GL_APIENTRY glBlendFunc (GLenum sfactor, GLenum dfactor); +GL_APICALL void GL_APIENTRY glBlendFuncSeparate (GLenum sfactorRGB, GLenum dfactorRGB, GLenum sfactorAlpha, GLenum dfactorAlpha); +GL_APICALL void GL_APIENTRY glBufferData (GLenum target, GLsizeiptr size, const void *data, GLenum usage); +GL_APICALL void GL_APIENTRY glBufferSubData (GLenum target, GLintptr offset, GLsizeiptr size, const void *data); +GL_APICALL GLenum GL_APIENTRY glCheckFramebufferStatus (GLenum target); +GL_APICALL void GL_APIENTRY glClear (GLbitfield mask); +GL_APICALL void GL_APIENTRY glClearColor (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +GL_APICALL void GL_APIENTRY glClearDepthf (GLfloat d); +GL_APICALL void GL_APIENTRY glClearStencil (GLint s); +GL_APICALL void GL_APIENTRY glColorMask (GLboolean red, GLboolean green, GLboolean blue, GLboolean alpha); +GL_APICALL void GL_APIENTRY glCompileShader (GLuint shader); +GL_APICALL void GL_APIENTRY glCompressedTexImage2D (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, const void *data); +GL_APICALL void GL_APIENTRY glCompressedTexSubImage2D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *data); +GL_APICALL void GL_APIENTRY glCopyTexImage2D (GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLsizei height, GLint border); +GL_APICALL void GL_APIENTRY glCopyTexSubImage2D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint x, GLint y, GLsizei width, GLsizei height); +GL_APICALL GLuint GL_APIENTRY glCreateProgram (void); +GL_APICALL GLuint GL_APIENTRY glCreateShader (GLenum type); +GL_APICALL void GL_APIENTRY glCullFace (GLenum mode); +GL_APICALL void GL_APIENTRY glDeleteBuffers (GLsizei n, const GLuint *buffers); +GL_APICALL void GL_APIENTRY glDeleteFramebuffers (GLsizei n, const GLuint *framebuffers); +GL_APICALL void GL_APIENTRY glDeleteProgram (GLuint program); +GL_APICALL void GL_APIENTRY glDeleteRenderbuffers (GLsizei n, const GLuint *renderbuffers); +GL_APICALL void GL_APIENTRY glDeleteShader (GLuint shader); +GL_APICALL void GL_APIENTRY glDeleteTextures (GLsizei n, const GLuint *textures); +GL_APICALL void GL_APIENTRY glDepthFunc (GLenum func); +GL_APICALL void GL_APIENTRY glDepthMask (GLboolean flag); +GL_APICALL void GL_APIENTRY glDepthRangef (GLfloat n, GLfloat f); +GL_APICALL void GL_APIENTRY glDetachShader (GLuint program, GLuint shader); +GL_APICALL void GL_APIENTRY glDisable (GLenum cap); +GL_APICALL void GL_APIENTRY glDisableVertexAttribArray (GLuint index); +GL_APICALL void GL_APIENTRY glDrawArrays (GLenum mode, GLint first, GLsizei count); +GL_APICALL void GL_APIENTRY glDrawElements (GLenum mode, GLsizei count, GLenum type, const void *indices); +GL_APICALL void GL_APIENTRY glEnable (GLenum cap); +GL_APICALL void GL_APIENTRY glEnableVertexAttribArray (GLuint index); +GL_APICALL void GL_APIENTRY glFinish (void); +GL_APICALL void GL_APIENTRY glFlush (void); +GL_APICALL void GL_APIENTRY glFramebufferRenderbuffer (GLenum target, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer); +GL_APICALL void GL_APIENTRY glFramebufferTexture2D (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +GL_APICALL void GL_APIENTRY glFrontFace (GLenum mode); +GL_APICALL void GL_APIENTRY glGenBuffers (GLsizei n, GLuint *buffers); +GL_APICALL void GL_APIENTRY glGenerateMipmap (GLenum target); +GL_APICALL void GL_APIENTRY glGenFramebuffers (GLsizei n, GLuint *framebuffers); +GL_APICALL void GL_APIENTRY glGenRenderbuffers (GLsizei n, GLuint *renderbuffers); +GL_APICALL void GL_APIENTRY glGenTextures (GLsizei n, GLuint *textures); +GL_APICALL void GL_APIENTRY glGetActiveAttrib (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name); +GL_APICALL void GL_APIENTRY glGetActiveUniform (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name); +GL_APICALL void GL_APIENTRY glGetAttachedShaders (GLuint program, GLsizei maxCount, GLsizei *count, GLuint *shaders); +GL_APICALL GLint GL_APIENTRY glGetAttribLocation (GLuint program, const GLchar *name); +GL_APICALL void GL_APIENTRY glGetBooleanv (GLenum pname, GLboolean *data); +GL_APICALL void GL_APIENTRY glGetBufferParameteriv (GLenum target, GLenum pname, GLint *params); +GL_APICALL GLenum GL_APIENTRY glGetError (void); +GL_APICALL void GL_APIENTRY glGetFloatv (GLenum pname, GLfloat *data); +GL_APICALL void GL_APIENTRY glGetFramebufferAttachmentParameteriv (GLenum target, GLenum attachment, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetIntegerv (GLenum pname, GLint *data); +GL_APICALL void GL_APIENTRY glGetProgramiv (GLuint program, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetProgramInfoLog (GLuint program, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +GL_APICALL void GL_APIENTRY glGetRenderbufferParameteriv (GLenum target, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetShaderiv (GLuint shader, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetShaderInfoLog (GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +GL_APICALL void GL_APIENTRY glGetShaderPrecisionFormat (GLenum shadertype, GLenum precisiontype, GLint *range, GLint *precision); +GL_APICALL void GL_APIENTRY glGetShaderSource (GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *source); +GL_APICALL const GLubyte *GL_APIENTRY glGetString (GLenum name); +GL_APICALL void GL_APIENTRY glGetTexParameterfv (GLenum target, GLenum pname, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetTexParameteriv (GLenum target, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetUniformfv (GLuint program, GLint location, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetUniformiv (GLuint program, GLint location, GLint *params); +GL_APICALL GLint GL_APIENTRY glGetUniformLocation (GLuint program, const GLchar *name); +GL_APICALL void GL_APIENTRY glGetVertexAttribfv (GLuint index, GLenum pname, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetVertexAttribiv (GLuint index, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetVertexAttribPointerv (GLuint index, GLenum pname, void **pointer); +GL_APICALL void GL_APIENTRY glHint (GLenum target, GLenum mode); +GL_APICALL GLboolean GL_APIENTRY glIsBuffer (GLuint buffer); +GL_APICALL GLboolean GL_APIENTRY glIsEnabled (GLenum cap); +GL_APICALL GLboolean GL_APIENTRY glIsFramebuffer (GLuint framebuffer); +GL_APICALL GLboolean GL_APIENTRY glIsProgram (GLuint program); +GL_APICALL GLboolean GL_APIENTRY glIsRenderbuffer (GLuint renderbuffer); +GL_APICALL GLboolean GL_APIENTRY glIsShader (GLuint shader); +GL_APICALL GLboolean GL_APIENTRY glIsTexture (GLuint texture); +GL_APICALL void GL_APIENTRY glLineWidth (GLfloat width); +GL_APICALL void GL_APIENTRY glLinkProgram (GLuint program); +GL_APICALL void GL_APIENTRY glPixelStorei (GLenum pname, GLint param); +GL_APICALL void GL_APIENTRY glPolygonOffset (GLfloat factor, GLfloat units); +GL_APICALL void GL_APIENTRY glReadPixels (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, void *pixels); +GL_APICALL void GL_APIENTRY glReleaseShaderCompiler (void); +GL_APICALL void GL_APIENTRY glRenderbufferStorage (GLenum target, GLenum internalformat, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glSampleCoverage (GLfloat value, GLboolean invert); +GL_APICALL void GL_APIENTRY glScissor (GLint x, GLint y, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glShaderBinary (GLsizei count, const GLuint *shaders, GLenum binaryformat, const void *binary, GLsizei length); +GL_APICALL void GL_APIENTRY glShaderSource (GLuint shader, GLsizei count, const GLchar *const*string, const GLint *length); +GL_APICALL void GL_APIENTRY glStencilFunc (GLenum func, GLint ref, GLuint mask); +GL_APICALL void GL_APIENTRY glStencilFuncSeparate (GLenum face, GLenum func, GLint ref, GLuint mask); +GL_APICALL void GL_APIENTRY glStencilMask (GLuint mask); +GL_APICALL void GL_APIENTRY glStencilMaskSeparate (GLenum face, GLuint mask); +GL_APICALL void GL_APIENTRY glStencilOp (GLenum fail, GLenum zfail, GLenum zpass); +GL_APICALL void GL_APIENTRY glStencilOpSeparate (GLenum face, GLenum sfail, GLenum dpfail, GLenum dppass); +GL_APICALL void GL_APIENTRY glTexImage2D (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, const void *pixels); +GL_APICALL void GL_APIENTRY glTexParameterf (GLenum target, GLenum pname, GLfloat param); +GL_APICALL void GL_APIENTRY glTexParameterfv (GLenum target, GLenum pname, const GLfloat *params); +GL_APICALL void GL_APIENTRY glTexParameteri (GLenum target, GLenum pname, GLint param); +GL_APICALL void GL_APIENTRY glTexParameteriv (GLenum target, GLenum pname, const GLint *params); +GL_APICALL void GL_APIENTRY glTexSubImage2D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *pixels); +GL_APICALL void GL_APIENTRY glUniform1f (GLint location, GLfloat v0); +GL_APICALL void GL_APIENTRY glUniform1fv (GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniform1i (GLint location, GLint v0); +GL_APICALL void GL_APIENTRY glUniform1iv (GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glUniform2f (GLint location, GLfloat v0, GLfloat v1); +GL_APICALL void GL_APIENTRY glUniform2fv (GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniform2i (GLint location, GLint v0, GLint v1); +GL_APICALL void GL_APIENTRY glUniform2iv (GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glUniform3f (GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +GL_APICALL void GL_APIENTRY glUniform3fv (GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniform3i (GLint location, GLint v0, GLint v1, GLint v2); +GL_APICALL void GL_APIENTRY glUniform3iv (GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glUniform4f (GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +GL_APICALL void GL_APIENTRY glUniform4fv (GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniform4i (GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +GL_APICALL void GL_APIENTRY glUniform4iv (GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glUniformMatrix2fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix3fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix4fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUseProgram (GLuint program); +GL_APICALL void GL_APIENTRY glValidateProgram (GLuint program); +GL_APICALL void GL_APIENTRY glVertexAttrib1f (GLuint index, GLfloat x); +GL_APICALL void GL_APIENTRY glVertexAttrib1fv (GLuint index, const GLfloat *v); +GL_APICALL void GL_APIENTRY glVertexAttrib2f (GLuint index, GLfloat x, GLfloat y); +GL_APICALL void GL_APIENTRY glVertexAttrib2fv (GLuint index, const GLfloat *v); +GL_APICALL void GL_APIENTRY glVertexAttrib3f (GLuint index, GLfloat x, GLfloat y, GLfloat z); +GL_APICALL void GL_APIENTRY glVertexAttrib3fv (GLuint index, const GLfloat *v); +GL_APICALL void GL_APIENTRY glVertexAttrib4f (GLuint index, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +GL_APICALL void GL_APIENTRY glVertexAttrib4fv (GLuint index, const GLfloat *v); +GL_APICALL void GL_APIENTRY glVertexAttribPointer (GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride, const void *pointer); +GL_APICALL void GL_APIENTRY glViewport (GLint x, GLint y, GLsizei width, GLsizei height); +#endif +#endif /* GL_ES_VERSION_2_0 */ + +#ifndef GL_ES_VERSION_3_0 +#define GL_ES_VERSION_3_0 1 +typedef khronos_uint16_t GLhalf; +#define GL_READ_BUFFER 0x0C02 +#define GL_UNPACK_ROW_LENGTH 0x0CF2 +#define GL_UNPACK_SKIP_ROWS 0x0CF3 +#define GL_UNPACK_SKIP_PIXELS 0x0CF4 +#define GL_PACK_ROW_LENGTH 0x0D02 +#define GL_PACK_SKIP_ROWS 0x0D03 +#define GL_PACK_SKIP_PIXELS 0x0D04 +#define GL_COLOR 0x1800 +#define GL_DEPTH 0x1801 +#define GL_STENCIL 0x1802 +#define GL_RED 0x1903 +#define GL_RGB8 0x8051 +#define GL_RGBA8 0x8058 +#define GL_RGB10_A2 0x8059 +#define GL_TEXTURE_BINDING_3D 0x806A +#define GL_UNPACK_SKIP_IMAGES 0x806D +#define GL_UNPACK_IMAGE_HEIGHT 0x806E +#define GL_TEXTURE_3D 0x806F +#define GL_TEXTURE_WRAP_R 0x8072 +#define GL_MAX_3D_TEXTURE_SIZE 0x8073 +#define GL_UNSIGNED_INT_2_10_10_10_REV 0x8368 +#define GL_MAX_ELEMENTS_VERTICES 0x80E8 +#define GL_MAX_ELEMENTS_INDICES 0x80E9 +#define GL_TEXTURE_MIN_LOD 0x813A +#define GL_TEXTURE_MAX_LOD 0x813B +#define GL_TEXTURE_BASE_LEVEL 0x813C +#define GL_TEXTURE_MAX_LEVEL 0x813D +#define GL_MIN 0x8007 +#define GL_MAX 0x8008 +#define GL_DEPTH_COMPONENT24 0x81A6 +#define GL_MAX_TEXTURE_LOD_BIAS 0x84FD +#define GL_TEXTURE_COMPARE_MODE 0x884C +#define GL_TEXTURE_COMPARE_FUNC 0x884D +#define GL_CURRENT_QUERY 0x8865 +#define GL_QUERY_RESULT 0x8866 +#define GL_QUERY_RESULT_AVAILABLE 0x8867 +#define GL_BUFFER_MAPPED 0x88BC +#define GL_BUFFER_MAP_POINTER 0x88BD +#define GL_STREAM_READ 0x88E1 +#define GL_STREAM_COPY 0x88E2 +#define GL_STATIC_READ 0x88E5 +#define GL_STATIC_COPY 0x88E6 +#define GL_DYNAMIC_READ 0x88E9 +#define GL_DYNAMIC_COPY 0x88EA +#define GL_MAX_DRAW_BUFFERS 0x8824 +#define GL_DRAW_BUFFER0 0x8825 +#define GL_DRAW_BUFFER1 0x8826 +#define GL_DRAW_BUFFER2 0x8827 +#define GL_DRAW_BUFFER3 0x8828 +#define GL_DRAW_BUFFER4 0x8829 +#define GL_DRAW_BUFFER5 0x882A +#define GL_DRAW_BUFFER6 0x882B +#define GL_DRAW_BUFFER7 0x882C +#define GL_DRAW_BUFFER8 0x882D +#define GL_DRAW_BUFFER9 0x882E +#define GL_DRAW_BUFFER10 0x882F +#define GL_DRAW_BUFFER11 0x8830 +#define GL_DRAW_BUFFER12 0x8831 +#define GL_DRAW_BUFFER13 0x8832 +#define GL_DRAW_BUFFER14 0x8833 +#define GL_DRAW_BUFFER15 0x8834 +#define GL_MAX_FRAGMENT_UNIFORM_COMPONENTS 0x8B49 +#define GL_MAX_VERTEX_UNIFORM_COMPONENTS 0x8B4A +#define GL_SAMPLER_3D 0x8B5F +#define GL_SAMPLER_2D_SHADOW 0x8B62 +#define GL_FRAGMENT_SHADER_DERIVATIVE_HINT 0x8B8B +#define GL_PIXEL_PACK_BUFFER 0x88EB +#define GL_PIXEL_UNPACK_BUFFER 0x88EC +#define GL_PIXEL_PACK_BUFFER_BINDING 0x88ED +#define GL_PIXEL_UNPACK_BUFFER_BINDING 0x88EF +#define GL_FLOAT_MAT2x3 0x8B65 +#define GL_FLOAT_MAT2x4 0x8B66 +#define GL_FLOAT_MAT3x2 0x8B67 +#define GL_FLOAT_MAT3x4 0x8B68 +#define GL_FLOAT_MAT4x2 0x8B69 +#define GL_FLOAT_MAT4x3 0x8B6A +#define GL_SRGB 0x8C40 +#define GL_SRGB8 0x8C41 +#define GL_SRGB8_ALPHA8 0x8C43 +#define GL_COMPARE_REF_TO_TEXTURE 0x884E +#define GL_MAJOR_VERSION 0x821B +#define GL_MINOR_VERSION 0x821C +#define GL_NUM_EXTENSIONS 0x821D +#define GL_RGBA32F 0x8814 +#define GL_RGB32F 0x8815 +#define GL_RGBA16F 0x881A +#define GL_RGB16F 0x881B +#define GL_VERTEX_ATTRIB_ARRAY_INTEGER 0x88FD +#define GL_MAX_ARRAY_TEXTURE_LAYERS 0x88FF +#define GL_MIN_PROGRAM_TEXEL_OFFSET 0x8904 +#define GL_MAX_PROGRAM_TEXEL_OFFSET 0x8905 +#define GL_MAX_VARYING_COMPONENTS 0x8B4B +#define GL_TEXTURE_2D_ARRAY 0x8C1A +#define GL_TEXTURE_BINDING_2D_ARRAY 0x8C1D +#define GL_R11F_G11F_B10F 0x8C3A +#define GL_UNSIGNED_INT_10F_11F_11F_REV 0x8C3B +#define GL_RGB9_E5 0x8C3D +#define GL_UNSIGNED_INT_5_9_9_9_REV 0x8C3E +#define GL_TRANSFORM_FEEDBACK_VARYING_MAX_LENGTH 0x8C76 +#define GL_TRANSFORM_FEEDBACK_BUFFER_MODE 0x8C7F +#define GL_MAX_TRANSFORM_FEEDBACK_SEPARATE_COMPONENTS 0x8C80 +#define GL_TRANSFORM_FEEDBACK_VARYINGS 0x8C83 +#define GL_TRANSFORM_FEEDBACK_BUFFER_START 0x8C84 +#define GL_TRANSFORM_FEEDBACK_BUFFER_SIZE 0x8C85 +#define GL_TRANSFORM_FEEDBACK_PRIMITIVES_WRITTEN 0x8C88 +#define GL_RASTERIZER_DISCARD 0x8C89 +#define GL_MAX_TRANSFORM_FEEDBACK_INTERLEAVED_COMPONENTS 0x8C8A +#define GL_MAX_TRANSFORM_FEEDBACK_SEPARATE_ATTRIBS 0x8C8B +#define GL_INTERLEAVED_ATTRIBS 0x8C8C +#define GL_SEPARATE_ATTRIBS 0x8C8D +#define GL_TRANSFORM_FEEDBACK_BUFFER 0x8C8E +#define GL_TRANSFORM_FEEDBACK_BUFFER_BINDING 0x8C8F +#define GL_RGBA32UI 0x8D70 +#define GL_RGB32UI 0x8D71 +#define GL_RGBA16UI 0x8D76 +#define GL_RGB16UI 0x8D77 +#define GL_RGBA8UI 0x8D7C +#define GL_RGB8UI 0x8D7D +#define GL_RGBA32I 0x8D82 +#define GL_RGB32I 0x8D83 +#define GL_RGBA16I 0x8D88 +#define GL_RGB16I 0x8D89 +#define GL_RGBA8I 0x8D8E +#define GL_RGB8I 0x8D8F +#define GL_RED_INTEGER 0x8D94 +#define GL_RGB_INTEGER 0x8D98 +#define GL_RGBA_INTEGER 0x8D99 +#define GL_SAMPLER_2D_ARRAY 0x8DC1 +#define GL_SAMPLER_2D_ARRAY_SHADOW 0x8DC4 +#define GL_SAMPLER_CUBE_SHADOW 0x8DC5 +#define GL_UNSIGNED_INT_VEC2 0x8DC6 +#define GL_UNSIGNED_INT_VEC3 0x8DC7 +#define GL_UNSIGNED_INT_VEC4 0x8DC8 +#define GL_INT_SAMPLER_2D 0x8DCA +#define GL_INT_SAMPLER_3D 0x8DCB +#define GL_INT_SAMPLER_CUBE 0x8DCC +#define GL_INT_SAMPLER_2D_ARRAY 0x8DCF +#define GL_UNSIGNED_INT_SAMPLER_2D 0x8DD2 +#define GL_UNSIGNED_INT_SAMPLER_3D 0x8DD3 +#define GL_UNSIGNED_INT_SAMPLER_CUBE 0x8DD4 +#define GL_UNSIGNED_INT_SAMPLER_2D_ARRAY 0x8DD7 +#define GL_BUFFER_ACCESS_FLAGS 0x911F +#define GL_BUFFER_MAP_LENGTH 0x9120 +#define GL_BUFFER_MAP_OFFSET 0x9121 +#define GL_DEPTH_COMPONENT32F 0x8CAC +#define GL_DEPTH32F_STENCIL8 0x8CAD +#define GL_FLOAT_32_UNSIGNED_INT_24_8_REV 0x8DAD +#define GL_FRAMEBUFFER_ATTACHMENT_COLOR_ENCODING 0x8210 +#define GL_FRAMEBUFFER_ATTACHMENT_COMPONENT_TYPE 0x8211 +#define GL_FRAMEBUFFER_ATTACHMENT_RED_SIZE 0x8212 +#define GL_FRAMEBUFFER_ATTACHMENT_GREEN_SIZE 0x8213 +#define GL_FRAMEBUFFER_ATTACHMENT_BLUE_SIZE 0x8214 +#define GL_FRAMEBUFFER_ATTACHMENT_ALPHA_SIZE 0x8215 +#define GL_FRAMEBUFFER_ATTACHMENT_DEPTH_SIZE 0x8216 +#define GL_FRAMEBUFFER_ATTACHMENT_STENCIL_SIZE 0x8217 +#define GL_FRAMEBUFFER_DEFAULT 0x8218 +#define GL_FRAMEBUFFER_UNDEFINED 0x8219 +#define GL_DEPTH_STENCIL_ATTACHMENT 0x821A +#define GL_DEPTH_STENCIL 0x84F9 +#define GL_UNSIGNED_INT_24_8 0x84FA +#define GL_DEPTH24_STENCIL8 0x88F0 +#define GL_UNSIGNED_NORMALIZED 0x8C17 +#define GL_DRAW_FRAMEBUFFER_BINDING 0x8CA6 +#define GL_READ_FRAMEBUFFER 0x8CA8 +#define GL_DRAW_FRAMEBUFFER 0x8CA9 +#define GL_READ_FRAMEBUFFER_BINDING 0x8CAA +#define GL_RENDERBUFFER_SAMPLES 0x8CAB +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LAYER 0x8CD4 +#define GL_MAX_COLOR_ATTACHMENTS 0x8CDF +#define GL_COLOR_ATTACHMENT1 0x8CE1 +#define GL_COLOR_ATTACHMENT2 0x8CE2 +#define GL_COLOR_ATTACHMENT3 0x8CE3 +#define GL_COLOR_ATTACHMENT4 0x8CE4 +#define GL_COLOR_ATTACHMENT5 0x8CE5 +#define GL_COLOR_ATTACHMENT6 0x8CE6 +#define GL_COLOR_ATTACHMENT7 0x8CE7 +#define GL_COLOR_ATTACHMENT8 0x8CE8 +#define GL_COLOR_ATTACHMENT9 0x8CE9 +#define GL_COLOR_ATTACHMENT10 0x8CEA +#define GL_COLOR_ATTACHMENT11 0x8CEB +#define GL_COLOR_ATTACHMENT12 0x8CEC +#define GL_COLOR_ATTACHMENT13 0x8CED +#define GL_COLOR_ATTACHMENT14 0x8CEE +#define GL_COLOR_ATTACHMENT15 0x8CEF +#define GL_COLOR_ATTACHMENT16 0x8CF0 +#define GL_COLOR_ATTACHMENT17 0x8CF1 +#define GL_COLOR_ATTACHMENT18 0x8CF2 +#define GL_COLOR_ATTACHMENT19 0x8CF3 +#define GL_COLOR_ATTACHMENT20 0x8CF4 +#define GL_COLOR_ATTACHMENT21 0x8CF5 +#define GL_COLOR_ATTACHMENT22 0x8CF6 +#define GL_COLOR_ATTACHMENT23 0x8CF7 +#define GL_COLOR_ATTACHMENT24 0x8CF8 +#define GL_COLOR_ATTACHMENT25 0x8CF9 +#define GL_COLOR_ATTACHMENT26 0x8CFA +#define GL_COLOR_ATTACHMENT27 0x8CFB +#define GL_COLOR_ATTACHMENT28 0x8CFC +#define GL_COLOR_ATTACHMENT29 0x8CFD +#define GL_COLOR_ATTACHMENT30 0x8CFE +#define GL_COLOR_ATTACHMENT31 0x8CFF +#define GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE 0x8D56 +#define GL_MAX_SAMPLES 0x8D57 +#define GL_HALF_FLOAT 0x140B +#define GL_MAP_READ_BIT 0x0001 +#define GL_MAP_WRITE_BIT 0x0002 +#define GL_MAP_INVALIDATE_RANGE_BIT 0x0004 +#define GL_MAP_INVALIDATE_BUFFER_BIT 0x0008 +#define GL_MAP_FLUSH_EXPLICIT_BIT 0x0010 +#define GL_MAP_UNSYNCHRONIZED_BIT 0x0020 +#define GL_RG 0x8227 +#define GL_RG_INTEGER 0x8228 +#define GL_R8 0x8229 +#define GL_RG8 0x822B +#define GL_R16F 0x822D +#define GL_R32F 0x822E +#define GL_RG16F 0x822F +#define GL_RG32F 0x8230 +#define GL_R8I 0x8231 +#define GL_R8UI 0x8232 +#define GL_R16I 0x8233 +#define GL_R16UI 0x8234 +#define GL_R32I 0x8235 +#define GL_R32UI 0x8236 +#define GL_RG8I 0x8237 +#define GL_RG8UI 0x8238 +#define GL_RG16I 0x8239 +#define GL_RG16UI 0x823A +#define GL_RG32I 0x823B +#define GL_RG32UI 0x823C +#define GL_VERTEX_ARRAY_BINDING 0x85B5 +#define GL_R8_SNORM 0x8F94 +#define GL_RG8_SNORM 0x8F95 +#define GL_RGB8_SNORM 0x8F96 +#define GL_RGBA8_SNORM 0x8F97 +#define GL_SIGNED_NORMALIZED 0x8F9C +#define GL_PRIMITIVE_RESTART_FIXED_INDEX 0x8D69 +#define GL_COPY_READ_BUFFER 0x8F36 +#define GL_COPY_WRITE_BUFFER 0x8F37 +#define GL_COPY_READ_BUFFER_BINDING 0x8F36 +#define GL_COPY_WRITE_BUFFER_BINDING 0x8F37 +#define GL_UNIFORM_BUFFER 0x8A11 +#define GL_UNIFORM_BUFFER_BINDING 0x8A28 +#define GL_UNIFORM_BUFFER_START 0x8A29 +#define GL_UNIFORM_BUFFER_SIZE 0x8A2A +#define GL_MAX_VERTEX_UNIFORM_BLOCKS 0x8A2B +#define GL_MAX_FRAGMENT_UNIFORM_BLOCKS 0x8A2D +#define GL_MAX_COMBINED_UNIFORM_BLOCKS 0x8A2E +#define GL_MAX_UNIFORM_BUFFER_BINDINGS 0x8A2F +#define GL_MAX_UNIFORM_BLOCK_SIZE 0x8A30 +#define GL_MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS 0x8A31 +#define GL_MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS 0x8A33 +#define GL_UNIFORM_BUFFER_OFFSET_ALIGNMENT 0x8A34 +#define GL_ACTIVE_UNIFORM_BLOCK_MAX_NAME_LENGTH 0x8A35 +#define GL_ACTIVE_UNIFORM_BLOCKS 0x8A36 +#define GL_UNIFORM_TYPE 0x8A37 +#define GL_UNIFORM_SIZE 0x8A38 +#define GL_UNIFORM_NAME_LENGTH 0x8A39 +#define GL_UNIFORM_BLOCK_INDEX 0x8A3A +#define GL_UNIFORM_OFFSET 0x8A3B +#define GL_UNIFORM_ARRAY_STRIDE 0x8A3C +#define GL_UNIFORM_MATRIX_STRIDE 0x8A3D +#define GL_UNIFORM_IS_ROW_MAJOR 0x8A3E +#define GL_UNIFORM_BLOCK_BINDING 0x8A3F +#define GL_UNIFORM_BLOCK_DATA_SIZE 0x8A40 +#define GL_UNIFORM_BLOCK_NAME_LENGTH 0x8A41 +#define GL_UNIFORM_BLOCK_ACTIVE_UNIFORMS 0x8A42 +#define GL_UNIFORM_BLOCK_ACTIVE_UNIFORM_INDICES 0x8A43 +#define GL_UNIFORM_BLOCK_REFERENCED_BY_VERTEX_SHADER 0x8A44 +#define GL_UNIFORM_BLOCK_REFERENCED_BY_FRAGMENT_SHADER 0x8A46 +#define GL_INVALID_INDEX 0xFFFFFFFFu +#define GL_MAX_VERTEX_OUTPUT_COMPONENTS 0x9122 +#define GL_MAX_FRAGMENT_INPUT_COMPONENTS 0x9125 +#define GL_MAX_SERVER_WAIT_TIMEOUT 0x9111 +#define GL_OBJECT_TYPE 0x9112 +#define GL_SYNC_CONDITION 0x9113 +#define GL_SYNC_STATUS 0x9114 +#define GL_SYNC_FLAGS 0x9115 +#define GL_SYNC_FENCE 0x9116 +#define GL_SYNC_GPU_COMMANDS_COMPLETE 0x9117 +#define GL_UNSIGNALED 0x9118 +#define GL_SIGNALED 0x9119 +#define GL_ALREADY_SIGNALED 0x911A +#define GL_TIMEOUT_EXPIRED 0x911B +#define GL_CONDITION_SATISFIED 0x911C +#define GL_WAIT_FAILED 0x911D +#define GL_SYNC_FLUSH_COMMANDS_BIT 0x00000001 +#define GL_TIMEOUT_IGNORED 0xFFFFFFFFFFFFFFFFull +#define GL_VERTEX_ATTRIB_ARRAY_DIVISOR 0x88FE +#define GL_ANY_SAMPLES_PASSED 0x8C2F +#define GL_ANY_SAMPLES_PASSED_CONSERVATIVE 0x8D6A +#define GL_SAMPLER_BINDING 0x8919 +#define GL_RGB10_A2UI 0x906F +#define GL_TEXTURE_SWIZZLE_R 0x8E42 +#define GL_TEXTURE_SWIZZLE_G 0x8E43 +#define GL_TEXTURE_SWIZZLE_B 0x8E44 +#define GL_TEXTURE_SWIZZLE_A 0x8E45 +#define GL_GREEN 0x1904 +#define GL_BLUE 0x1905 +#define GL_INT_2_10_10_10_REV 0x8D9F +#define GL_TRANSFORM_FEEDBACK 0x8E22 +#define GL_TRANSFORM_FEEDBACK_PAUSED 0x8E23 +#define GL_TRANSFORM_FEEDBACK_ACTIVE 0x8E24 +#define GL_TRANSFORM_FEEDBACK_BINDING 0x8E25 +#define GL_PROGRAM_BINARY_RETRIEVABLE_HINT 0x8257 +#define GL_PROGRAM_BINARY_LENGTH 0x8741 +#define GL_NUM_PROGRAM_BINARY_FORMATS 0x87FE +#define GL_PROGRAM_BINARY_FORMATS 0x87FF +#define GL_COMPRESSED_R11_EAC 0x9270 +#define GL_COMPRESSED_SIGNED_R11_EAC 0x9271 +#define GL_COMPRESSED_RG11_EAC 0x9272 +#define GL_COMPRESSED_SIGNED_RG11_EAC 0x9273 +#define GL_COMPRESSED_RGB8_ETC2 0x9274 +#define GL_COMPRESSED_SRGB8_ETC2 0x9275 +#define GL_COMPRESSED_RGB8_PUNCHTHROUGH_ALPHA1_ETC2 0x9276 +#define GL_COMPRESSED_SRGB8_PUNCHTHROUGH_ALPHA1_ETC2 0x9277 +#define GL_COMPRESSED_RGBA8_ETC2_EAC 0x9278 +#define GL_COMPRESSED_SRGB8_ALPHA8_ETC2_EAC 0x9279 +#define GL_TEXTURE_IMMUTABLE_FORMAT 0x912F +#define GL_MAX_ELEMENT_INDEX 0x8D6B +#define GL_NUM_SAMPLE_COUNTS 0x9380 +#define GL_TEXTURE_IMMUTABLE_LEVELS 0x82DF +typedef void (GL_APIENTRYP PFNGLREADBUFFERPROC) (GLenum src); +typedef void (GL_APIENTRYP PFNGLDRAWRANGEELEMENTSPROC) (GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices); +typedef void (GL_APIENTRYP PFNGLTEXIMAGE3DPROC) (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, const void *pixels); +typedef void (GL_APIENTRYP PFNGLTEXSUBIMAGE3DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *pixels); +typedef void (GL_APIENTRYP PFNGLCOPYTEXSUBIMAGE3DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXIMAGE3DPROC) (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, const void *data); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXSUBIMAGE3DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void *data); +typedef void (GL_APIENTRYP PFNGLGENQUERIESPROC) (GLsizei n, GLuint *ids); +typedef void (GL_APIENTRYP PFNGLDELETEQUERIESPROC) (GLsizei n, const GLuint *ids); +typedef GLboolean (GL_APIENTRYP PFNGLISQUERYPROC) (GLuint id); +typedef void (GL_APIENTRYP PFNGLBEGINQUERYPROC) (GLenum target, GLuint id); +typedef void (GL_APIENTRYP PFNGLENDQUERYPROC) (GLenum target); +typedef void (GL_APIENTRYP PFNGLGETQUERYIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETQUERYOBJECTUIVPROC) (GLuint id, GLenum pname, GLuint *params); +typedef GLboolean (GL_APIENTRYP PFNGLUNMAPBUFFERPROC) (GLenum target); +typedef void (GL_APIENTRYP PFNGLGETBUFFERPOINTERVPROC) (GLenum target, GLenum pname, void **params); +typedef void (GL_APIENTRYP PFNGLDRAWBUFFERSPROC) (GLsizei n, const GLenum *bufs); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX2X3FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX3X2FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX2X4FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX4X2FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX3X4FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX4X3FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLBLITFRAMEBUFFERPROC) (GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +typedef void (GL_APIENTRYP PFNGLRENDERBUFFERSTORAGEMULTISAMPLEPROC) (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTURELAYERPROC) (GLenum target, GLenum attachment, GLuint texture, GLint level, GLint layer); +typedef void *(GL_APIENTRYP PFNGLMAPBUFFERRANGEPROC) (GLenum target, GLintptr offset, GLsizeiptr length, GLbitfield access); +typedef void (GL_APIENTRYP PFNGLFLUSHMAPPEDBUFFERRANGEPROC) (GLenum target, GLintptr offset, GLsizeiptr length); +typedef void (GL_APIENTRYP PFNGLBINDVERTEXARRAYPROC) (GLuint array); +typedef void (GL_APIENTRYP PFNGLDELETEVERTEXARRAYSPROC) (GLsizei n, const GLuint *arrays); +typedef void (GL_APIENTRYP PFNGLGENVERTEXARRAYSPROC) (GLsizei n, GLuint *arrays); +typedef GLboolean (GL_APIENTRYP PFNGLISVERTEXARRAYPROC) (GLuint array); +typedef void (GL_APIENTRYP PFNGLGETINTEGERI_VPROC) (GLenum target, GLuint index, GLint *data); +typedef void (GL_APIENTRYP PFNGLBEGINTRANSFORMFEEDBACKPROC) (GLenum primitiveMode); +typedef void (GL_APIENTRYP PFNGLENDTRANSFORMFEEDBACKPROC) (void); +typedef void (GL_APIENTRYP PFNGLBINDBUFFERRANGEPROC) (GLenum target, GLuint index, GLuint buffer, GLintptr offset, GLsizeiptr size); +typedef void (GL_APIENTRYP PFNGLBINDBUFFERBASEPROC) (GLenum target, GLuint index, GLuint buffer); +typedef void (GL_APIENTRYP PFNGLTRANSFORMFEEDBACKVARYINGSPROC) (GLuint program, GLsizei count, const GLchar *const*varyings, GLenum bufferMode); +typedef void (GL_APIENTRYP PFNGLGETTRANSFORMFEEDBACKVARYINGPROC) (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLsizei *size, GLenum *type, GLchar *name); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBIPOINTERPROC) (GLuint index, GLint size, GLenum type, GLsizei stride, const void *pointer); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBIIVPROC) (GLuint index, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBIUIVPROC) (GLuint index, GLenum pname, GLuint *params); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBI4IPROC) (GLuint index, GLint x, GLint y, GLint z, GLint w); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBI4UIPROC) (GLuint index, GLuint x, GLuint y, GLuint z, GLuint w); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBI4IVPROC) (GLuint index, const GLint *v); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBI4UIVPROC) (GLuint index, const GLuint *v); +typedef void (GL_APIENTRYP PFNGLGETUNIFORMUIVPROC) (GLuint program, GLint location, GLuint *params); +typedef GLint (GL_APIENTRYP PFNGLGETFRAGDATALOCATIONPROC) (GLuint program, const GLchar *name); +typedef void (GL_APIENTRYP PFNGLUNIFORM1UIPROC) (GLint location, GLuint v0); +typedef void (GL_APIENTRYP PFNGLUNIFORM2UIPROC) (GLint location, GLuint v0, GLuint v1); +typedef void (GL_APIENTRYP PFNGLUNIFORM3UIPROC) (GLint location, GLuint v0, GLuint v1, GLuint v2); +typedef void (GL_APIENTRYP PFNGLUNIFORM4UIPROC) (GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3); +typedef void (GL_APIENTRYP PFNGLUNIFORM1UIVPROC) (GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM2UIVPROC) (GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM3UIVPROC) (GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM4UIVPROC) (GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLCLEARBUFFERIVPROC) (GLenum buffer, GLint drawbuffer, const GLint *value); +typedef void (GL_APIENTRYP PFNGLCLEARBUFFERUIVPROC) (GLenum buffer, GLint drawbuffer, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLCLEARBUFFERFVPROC) (GLenum buffer, GLint drawbuffer, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLCLEARBUFFERFIPROC) (GLenum buffer, GLint drawbuffer, GLfloat depth, GLint stencil); +typedef const GLubyte *(GL_APIENTRYP PFNGLGETSTRINGIPROC) (GLenum name, GLuint index); +typedef void (GL_APIENTRYP PFNGLCOPYBUFFERSUBDATAPROC) (GLenum readTarget, GLenum writeTarget, GLintptr readOffset, GLintptr writeOffset, GLsizeiptr size); +typedef void (GL_APIENTRYP PFNGLGETUNIFORMINDICESPROC) (GLuint program, GLsizei uniformCount, const GLchar *const*uniformNames, GLuint *uniformIndices); +typedef void (GL_APIENTRYP PFNGLGETACTIVEUNIFORMSIVPROC) (GLuint program, GLsizei uniformCount, const GLuint *uniformIndices, GLenum pname, GLint *params); +typedef GLuint (GL_APIENTRYP PFNGLGETUNIFORMBLOCKINDEXPROC) (GLuint program, const GLchar *uniformBlockName); +typedef void (GL_APIENTRYP PFNGLGETACTIVEUNIFORMBLOCKIVPROC) (GLuint program, GLuint uniformBlockIndex, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETACTIVEUNIFORMBLOCKNAMEPROC) (GLuint program, GLuint uniformBlockIndex, GLsizei bufSize, GLsizei *length, GLchar *uniformBlockName); +typedef void (GL_APIENTRYP PFNGLUNIFORMBLOCKBINDINGPROC) (GLuint program, GLuint uniformBlockIndex, GLuint uniformBlockBinding); +typedef void (GL_APIENTRYP PFNGLDRAWARRAYSINSTANCEDPROC) (GLenum mode, GLint first, GLsizei count, GLsizei instancecount); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSINSTANCEDPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount); +typedef GLsync (GL_APIENTRYP PFNGLFENCESYNCPROC) (GLenum condition, GLbitfield flags); +typedef GLboolean (GL_APIENTRYP PFNGLISSYNCPROC) (GLsync sync); +typedef void (GL_APIENTRYP PFNGLDELETESYNCPROC) (GLsync sync); +typedef GLenum (GL_APIENTRYP PFNGLCLIENTWAITSYNCPROC) (GLsync sync, GLbitfield flags, GLuint64 timeout); +typedef void (GL_APIENTRYP PFNGLWAITSYNCPROC) (GLsync sync, GLbitfield flags, GLuint64 timeout); +typedef void (GL_APIENTRYP PFNGLGETINTEGER64VPROC) (GLenum pname, GLint64 *data); +typedef void (GL_APIENTRYP PFNGLGETSYNCIVPROC) (GLsync sync, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *values); +typedef void (GL_APIENTRYP PFNGLGETINTEGER64I_VPROC) (GLenum target, GLuint index, GLint64 *data); +typedef void (GL_APIENTRYP PFNGLGETBUFFERPARAMETERI64VPROC) (GLenum target, GLenum pname, GLint64 *params); +typedef void (GL_APIENTRYP PFNGLGENSAMPLERSPROC) (GLsizei count, GLuint *samplers); +typedef void (GL_APIENTRYP PFNGLDELETESAMPLERSPROC) (GLsizei count, const GLuint *samplers); +typedef GLboolean (GL_APIENTRYP PFNGLISSAMPLERPROC) (GLuint sampler); +typedef void (GL_APIENTRYP PFNGLBINDSAMPLERPROC) (GLuint unit, GLuint sampler); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERIPROC) (GLuint sampler, GLenum pname, GLint param); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERIVPROC) (GLuint sampler, GLenum pname, const GLint *param); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERFPROC) (GLuint sampler, GLenum pname, GLfloat param); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERFVPROC) (GLuint sampler, GLenum pname, const GLfloat *param); +typedef void (GL_APIENTRYP PFNGLGETSAMPLERPARAMETERIVPROC) (GLuint sampler, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETSAMPLERPARAMETERFVPROC) (GLuint sampler, GLenum pname, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBDIVISORPROC) (GLuint index, GLuint divisor); +typedef void (GL_APIENTRYP PFNGLBINDTRANSFORMFEEDBACKPROC) (GLenum target, GLuint id); +typedef void (GL_APIENTRYP PFNGLDELETETRANSFORMFEEDBACKSPROC) (GLsizei n, const GLuint *ids); +typedef void (GL_APIENTRYP PFNGLGENTRANSFORMFEEDBACKSPROC) (GLsizei n, GLuint *ids); +typedef GLboolean (GL_APIENTRYP PFNGLISTRANSFORMFEEDBACKPROC) (GLuint id); +typedef void (GL_APIENTRYP PFNGLPAUSETRANSFORMFEEDBACKPROC) (void); +typedef void (GL_APIENTRYP PFNGLRESUMETRANSFORMFEEDBACKPROC) (void); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMBINARYPROC) (GLuint program, GLsizei bufSize, GLsizei *length, GLenum *binaryFormat, void *binary); +typedef void (GL_APIENTRYP PFNGLPROGRAMBINARYPROC) (GLuint program, GLenum binaryFormat, const void *binary, GLsizei length); +typedef void (GL_APIENTRYP PFNGLPROGRAMPARAMETERIPROC) (GLuint program, GLenum pname, GLint value); +typedef void (GL_APIENTRYP PFNGLINVALIDATEFRAMEBUFFERPROC) (GLenum target, GLsizei numAttachments, const GLenum *attachments); +typedef void (GL_APIENTRYP PFNGLINVALIDATESUBFRAMEBUFFERPROC) (GLenum target, GLsizei numAttachments, const GLenum *attachments, GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGE2DPROC) (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGE3DPROC) (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +typedef void (GL_APIENTRYP PFNGLGETINTERNALFORMATIVPROC) (GLenum target, GLenum internalformat, GLenum pname, GLsizei bufSize, GLint *params); +#if GL_GLES_PROTOTYPES +GL_APICALL void GL_APIENTRY glReadBuffer (GLenum src); +GL_APICALL void GL_APIENTRY glDrawRangeElements (GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices); +GL_APICALL void GL_APIENTRY glTexImage3D (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, const void *pixels); +GL_APICALL void GL_APIENTRY glTexSubImage3D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *pixels); +GL_APICALL void GL_APIENTRY glCopyTexSubImage3D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glCompressedTexImage3D (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, const void *data); +GL_APICALL void GL_APIENTRY glCompressedTexSubImage3D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void *data); +GL_APICALL void GL_APIENTRY glGenQueries (GLsizei n, GLuint *ids); +GL_APICALL void GL_APIENTRY glDeleteQueries (GLsizei n, const GLuint *ids); +GL_APICALL GLboolean GL_APIENTRY glIsQuery (GLuint id); +GL_APICALL void GL_APIENTRY glBeginQuery (GLenum target, GLuint id); +GL_APICALL void GL_APIENTRY glEndQuery (GLenum target); +GL_APICALL void GL_APIENTRY glGetQueryiv (GLenum target, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetQueryObjectuiv (GLuint id, GLenum pname, GLuint *params); +GL_APICALL GLboolean GL_APIENTRY glUnmapBuffer (GLenum target); +GL_APICALL void GL_APIENTRY glGetBufferPointerv (GLenum target, GLenum pname, void **params); +GL_APICALL void GL_APIENTRY glDrawBuffers (GLsizei n, const GLenum *bufs); +GL_APICALL void GL_APIENTRY glUniformMatrix2x3fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix3x2fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix2x4fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix4x2fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix3x4fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix4x3fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glBlitFramebuffer (GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +GL_APICALL void GL_APIENTRY glRenderbufferStorageMultisample (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glFramebufferTextureLayer (GLenum target, GLenum attachment, GLuint texture, GLint level, GLint layer); +GL_APICALL void *GL_APIENTRY glMapBufferRange (GLenum target, GLintptr offset, GLsizeiptr length, GLbitfield access); +GL_APICALL void GL_APIENTRY glFlushMappedBufferRange (GLenum target, GLintptr offset, GLsizeiptr length); +GL_APICALL void GL_APIENTRY glBindVertexArray (GLuint array); +GL_APICALL void GL_APIENTRY glDeleteVertexArrays (GLsizei n, const GLuint *arrays); +GL_APICALL void GL_APIENTRY glGenVertexArrays (GLsizei n, GLuint *arrays); +GL_APICALL GLboolean GL_APIENTRY glIsVertexArray (GLuint array); +GL_APICALL void GL_APIENTRY glGetIntegeri_v (GLenum target, GLuint index, GLint *data); +GL_APICALL void GL_APIENTRY glBeginTransformFeedback (GLenum primitiveMode); +GL_APICALL void GL_APIENTRY glEndTransformFeedback (void); +GL_APICALL void GL_APIENTRY glBindBufferRange (GLenum target, GLuint index, GLuint buffer, GLintptr offset, GLsizeiptr size); +GL_APICALL void GL_APIENTRY glBindBufferBase (GLenum target, GLuint index, GLuint buffer); +GL_APICALL void GL_APIENTRY glTransformFeedbackVaryings (GLuint program, GLsizei count, const GLchar *const*varyings, GLenum bufferMode); +GL_APICALL void GL_APIENTRY glGetTransformFeedbackVarying (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLsizei *size, GLenum *type, GLchar *name); +GL_APICALL void GL_APIENTRY glVertexAttribIPointer (GLuint index, GLint size, GLenum type, GLsizei stride, const void *pointer); +GL_APICALL void GL_APIENTRY glGetVertexAttribIiv (GLuint index, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetVertexAttribIuiv (GLuint index, GLenum pname, GLuint *params); +GL_APICALL void GL_APIENTRY glVertexAttribI4i (GLuint index, GLint x, GLint y, GLint z, GLint w); +GL_APICALL void GL_APIENTRY glVertexAttribI4ui (GLuint index, GLuint x, GLuint y, GLuint z, GLuint w); +GL_APICALL void GL_APIENTRY glVertexAttribI4iv (GLuint index, const GLint *v); +GL_APICALL void GL_APIENTRY glVertexAttribI4uiv (GLuint index, const GLuint *v); +GL_APICALL void GL_APIENTRY glGetUniformuiv (GLuint program, GLint location, GLuint *params); +GL_APICALL GLint GL_APIENTRY glGetFragDataLocation (GLuint program, const GLchar *name); +GL_APICALL void GL_APIENTRY glUniform1ui (GLint location, GLuint v0); +GL_APICALL void GL_APIENTRY glUniform2ui (GLint location, GLuint v0, GLuint v1); +GL_APICALL void GL_APIENTRY glUniform3ui (GLint location, GLuint v0, GLuint v1, GLuint v2); +GL_APICALL void GL_APIENTRY glUniform4ui (GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3); +GL_APICALL void GL_APIENTRY glUniform1uiv (GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glUniform2uiv (GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glUniform3uiv (GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glUniform4uiv (GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glClearBufferiv (GLenum buffer, GLint drawbuffer, const GLint *value); +GL_APICALL void GL_APIENTRY glClearBufferuiv (GLenum buffer, GLint drawbuffer, const GLuint *value); +GL_APICALL void GL_APIENTRY glClearBufferfv (GLenum buffer, GLint drawbuffer, const GLfloat *value); +GL_APICALL void GL_APIENTRY glClearBufferfi (GLenum buffer, GLint drawbuffer, GLfloat depth, GLint stencil); +GL_APICALL const GLubyte *GL_APIENTRY glGetStringi (GLenum name, GLuint index); +GL_APICALL void GL_APIENTRY glCopyBufferSubData (GLenum readTarget, GLenum writeTarget, GLintptr readOffset, GLintptr writeOffset, GLsizeiptr size); +GL_APICALL void GL_APIENTRY glGetUniformIndices (GLuint program, GLsizei uniformCount, const GLchar *const*uniformNames, GLuint *uniformIndices); +GL_APICALL void GL_APIENTRY glGetActiveUniformsiv (GLuint program, GLsizei uniformCount, const GLuint *uniformIndices, GLenum pname, GLint *params); +GL_APICALL GLuint GL_APIENTRY glGetUniformBlockIndex (GLuint program, const GLchar *uniformBlockName); +GL_APICALL void GL_APIENTRY glGetActiveUniformBlockiv (GLuint program, GLuint uniformBlockIndex, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetActiveUniformBlockName (GLuint program, GLuint uniformBlockIndex, GLsizei bufSize, GLsizei *length, GLchar *uniformBlockName); +GL_APICALL void GL_APIENTRY glUniformBlockBinding (GLuint program, GLuint uniformBlockIndex, GLuint uniformBlockBinding); +GL_APICALL void GL_APIENTRY glDrawArraysInstanced (GLenum mode, GLint first, GLsizei count, GLsizei instancecount); +GL_APICALL void GL_APIENTRY glDrawElementsInstanced (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount); +GL_APICALL GLsync GL_APIENTRY glFenceSync (GLenum condition, GLbitfield flags); +GL_APICALL GLboolean GL_APIENTRY glIsSync (GLsync sync); +GL_APICALL void GL_APIENTRY glDeleteSync (GLsync sync); +GL_APICALL GLenum GL_APIENTRY glClientWaitSync (GLsync sync, GLbitfield flags, GLuint64 timeout); +GL_APICALL void GL_APIENTRY glWaitSync (GLsync sync, GLbitfield flags, GLuint64 timeout); +GL_APICALL void GL_APIENTRY glGetInteger64v (GLenum pname, GLint64 *data); +GL_APICALL void GL_APIENTRY glGetSynciv (GLsync sync, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *values); +GL_APICALL void GL_APIENTRY glGetInteger64i_v (GLenum target, GLuint index, GLint64 *data); +GL_APICALL void GL_APIENTRY glGetBufferParameteri64v (GLenum target, GLenum pname, GLint64 *params); +GL_APICALL void GL_APIENTRY glGenSamplers (GLsizei count, GLuint *samplers); +GL_APICALL void GL_APIENTRY glDeleteSamplers (GLsizei count, const GLuint *samplers); +GL_APICALL GLboolean GL_APIENTRY glIsSampler (GLuint sampler); +GL_APICALL void GL_APIENTRY glBindSampler (GLuint unit, GLuint sampler); +GL_APICALL void GL_APIENTRY glSamplerParameteri (GLuint sampler, GLenum pname, GLint param); +GL_APICALL void GL_APIENTRY glSamplerParameteriv (GLuint sampler, GLenum pname, const GLint *param); +GL_APICALL void GL_APIENTRY glSamplerParameterf (GLuint sampler, GLenum pname, GLfloat param); +GL_APICALL void GL_APIENTRY glSamplerParameterfv (GLuint sampler, GLenum pname, const GLfloat *param); +GL_APICALL void GL_APIENTRY glGetSamplerParameteriv (GLuint sampler, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetSamplerParameterfv (GLuint sampler, GLenum pname, GLfloat *params); +GL_APICALL void GL_APIENTRY glVertexAttribDivisor (GLuint index, GLuint divisor); +GL_APICALL void GL_APIENTRY glBindTransformFeedback (GLenum target, GLuint id); +GL_APICALL void GL_APIENTRY glDeleteTransformFeedbacks (GLsizei n, const GLuint *ids); +GL_APICALL void GL_APIENTRY glGenTransformFeedbacks (GLsizei n, GLuint *ids); +GL_APICALL GLboolean GL_APIENTRY glIsTransformFeedback (GLuint id); +GL_APICALL void GL_APIENTRY glPauseTransformFeedback (void); +GL_APICALL void GL_APIENTRY glResumeTransformFeedback (void); +GL_APICALL void GL_APIENTRY glGetProgramBinary (GLuint program, GLsizei bufSize, GLsizei *length, GLenum *binaryFormat, void *binary); +GL_APICALL void GL_APIENTRY glProgramBinary (GLuint program, GLenum binaryFormat, const void *binary, GLsizei length); +GL_APICALL void GL_APIENTRY glProgramParameteri (GLuint program, GLenum pname, GLint value); +GL_APICALL void GL_APIENTRY glInvalidateFramebuffer (GLenum target, GLsizei numAttachments, const GLenum *attachments); +GL_APICALL void GL_APIENTRY glInvalidateSubFramebuffer (GLenum target, GLsizei numAttachments, const GLenum *attachments, GLint x, GLint y, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glTexStorage2D (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glTexStorage3D (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +GL_APICALL void GL_APIENTRY glGetInternalformativ (GLenum target, GLenum internalformat, GLenum pname, GLsizei bufSize, GLint *params); +#endif +#endif /* GL_ES_VERSION_3_0 */ + +#ifndef GL_ES_VERSION_3_1 +#define GL_ES_VERSION_3_1 1 +#define GL_COMPUTE_SHADER 0x91B9 +#define GL_MAX_COMPUTE_UNIFORM_BLOCKS 0x91BB +#define GL_MAX_COMPUTE_TEXTURE_IMAGE_UNITS 0x91BC +#define GL_MAX_COMPUTE_IMAGE_UNIFORMS 0x91BD +#define GL_MAX_COMPUTE_SHARED_MEMORY_SIZE 0x8262 +#define GL_MAX_COMPUTE_UNIFORM_COMPONENTS 0x8263 +#define GL_MAX_COMPUTE_ATOMIC_COUNTER_BUFFERS 0x8264 +#define GL_MAX_COMPUTE_ATOMIC_COUNTERS 0x8265 +#define GL_MAX_COMBINED_COMPUTE_UNIFORM_COMPONENTS 0x8266 +#define GL_MAX_COMPUTE_WORK_GROUP_INVOCATIONS 0x90EB +#define GL_MAX_COMPUTE_WORK_GROUP_COUNT 0x91BE +#define GL_MAX_COMPUTE_WORK_GROUP_SIZE 0x91BF +#define GL_COMPUTE_WORK_GROUP_SIZE 0x8267 +#define GL_DISPATCH_INDIRECT_BUFFER 0x90EE +#define GL_DISPATCH_INDIRECT_BUFFER_BINDING 0x90EF +#define GL_COMPUTE_SHADER_BIT 0x00000020 +#define GL_DRAW_INDIRECT_BUFFER 0x8F3F +#define GL_DRAW_INDIRECT_BUFFER_BINDING 0x8F43 +#define GL_MAX_UNIFORM_LOCATIONS 0x826E +#define GL_FRAMEBUFFER_DEFAULT_WIDTH 0x9310 +#define GL_FRAMEBUFFER_DEFAULT_HEIGHT 0x9311 +#define GL_FRAMEBUFFER_DEFAULT_SAMPLES 0x9313 +#define GL_FRAMEBUFFER_DEFAULT_FIXED_SAMPLE_LOCATIONS 0x9314 +#define GL_MAX_FRAMEBUFFER_WIDTH 0x9315 +#define GL_MAX_FRAMEBUFFER_HEIGHT 0x9316 +#define GL_MAX_FRAMEBUFFER_SAMPLES 0x9318 +#define GL_UNIFORM 0x92E1 +#define GL_UNIFORM_BLOCK 0x92E2 +#define GL_PROGRAM_INPUT 0x92E3 +#define GL_PROGRAM_OUTPUT 0x92E4 +#define GL_BUFFER_VARIABLE 0x92E5 +#define GL_SHADER_STORAGE_BLOCK 0x92E6 +#define GL_ATOMIC_COUNTER_BUFFER 0x92C0 +#define GL_TRANSFORM_FEEDBACK_VARYING 0x92F4 +#define GL_ACTIVE_RESOURCES 0x92F5 +#define GL_MAX_NAME_LENGTH 0x92F6 +#define GL_MAX_NUM_ACTIVE_VARIABLES 0x92F7 +#define GL_NAME_LENGTH 0x92F9 +#define GL_TYPE 0x92FA +#define GL_ARRAY_SIZE 0x92FB +#define GL_OFFSET 0x92FC +#define GL_BLOCK_INDEX 0x92FD +#define GL_ARRAY_STRIDE 0x92FE +#define GL_MATRIX_STRIDE 0x92FF +#define GL_IS_ROW_MAJOR 0x9300 +#define GL_ATOMIC_COUNTER_BUFFER_INDEX 0x9301 +#define GL_BUFFER_BINDING 0x9302 +#define GL_BUFFER_DATA_SIZE 0x9303 +#define GL_NUM_ACTIVE_VARIABLES 0x9304 +#define GL_ACTIVE_VARIABLES 0x9305 +#define GL_REFERENCED_BY_VERTEX_SHADER 0x9306 +#define GL_REFERENCED_BY_FRAGMENT_SHADER 0x930A +#define GL_REFERENCED_BY_COMPUTE_SHADER 0x930B +#define GL_TOP_LEVEL_ARRAY_SIZE 0x930C +#define GL_TOP_LEVEL_ARRAY_STRIDE 0x930D +#define GL_LOCATION 0x930E +#define GL_VERTEX_SHADER_BIT 0x00000001 +#define GL_FRAGMENT_SHADER_BIT 0x00000002 +#define GL_ALL_SHADER_BITS 0xFFFFFFFF +#define GL_PROGRAM_SEPARABLE 0x8258 +#define GL_ACTIVE_PROGRAM 0x8259 +#define GL_PROGRAM_PIPELINE_BINDING 0x825A +#define GL_ATOMIC_COUNTER_BUFFER_BINDING 0x92C1 +#define GL_ATOMIC_COUNTER_BUFFER_START 0x92C2 +#define GL_ATOMIC_COUNTER_BUFFER_SIZE 0x92C3 +#define GL_MAX_VERTEX_ATOMIC_COUNTER_BUFFERS 0x92CC +#define GL_MAX_FRAGMENT_ATOMIC_COUNTER_BUFFERS 0x92D0 +#define GL_MAX_COMBINED_ATOMIC_COUNTER_BUFFERS 0x92D1 +#define GL_MAX_VERTEX_ATOMIC_COUNTERS 0x92D2 +#define GL_MAX_FRAGMENT_ATOMIC_COUNTERS 0x92D6 +#define GL_MAX_COMBINED_ATOMIC_COUNTERS 0x92D7 +#define GL_MAX_ATOMIC_COUNTER_BUFFER_SIZE 0x92D8 +#define GL_MAX_ATOMIC_COUNTER_BUFFER_BINDINGS 0x92DC +#define GL_ACTIVE_ATOMIC_COUNTER_BUFFERS 0x92D9 +#define GL_UNSIGNED_INT_ATOMIC_COUNTER 0x92DB +#define GL_MAX_IMAGE_UNITS 0x8F38 +#define GL_MAX_VERTEX_IMAGE_UNIFORMS 0x90CA +#define GL_MAX_FRAGMENT_IMAGE_UNIFORMS 0x90CE +#define GL_MAX_COMBINED_IMAGE_UNIFORMS 0x90CF +#define GL_IMAGE_BINDING_NAME 0x8F3A +#define GL_IMAGE_BINDING_LEVEL 0x8F3B +#define GL_IMAGE_BINDING_LAYERED 0x8F3C +#define GL_IMAGE_BINDING_LAYER 0x8F3D +#define GL_IMAGE_BINDING_ACCESS 0x8F3E +#define GL_IMAGE_BINDING_FORMAT 0x906E +#define GL_VERTEX_ATTRIB_ARRAY_BARRIER_BIT 0x00000001 +#define GL_ELEMENT_ARRAY_BARRIER_BIT 0x00000002 +#define GL_UNIFORM_BARRIER_BIT 0x00000004 +#define GL_TEXTURE_FETCH_BARRIER_BIT 0x00000008 +#define GL_SHADER_IMAGE_ACCESS_BARRIER_BIT 0x00000020 +#define GL_COMMAND_BARRIER_BIT 0x00000040 +#define GL_PIXEL_BUFFER_BARRIER_BIT 0x00000080 +#define GL_TEXTURE_UPDATE_BARRIER_BIT 0x00000100 +#define GL_BUFFER_UPDATE_BARRIER_BIT 0x00000200 +#define GL_FRAMEBUFFER_BARRIER_BIT 0x00000400 +#define GL_TRANSFORM_FEEDBACK_BARRIER_BIT 0x00000800 +#define GL_ATOMIC_COUNTER_BARRIER_BIT 0x00001000 +#define GL_ALL_BARRIER_BITS 0xFFFFFFFF +#define GL_IMAGE_2D 0x904D +#define GL_IMAGE_3D 0x904E +#define GL_IMAGE_CUBE 0x9050 +#define GL_IMAGE_2D_ARRAY 0x9053 +#define GL_INT_IMAGE_2D 0x9058 +#define GL_INT_IMAGE_3D 0x9059 +#define GL_INT_IMAGE_CUBE 0x905B +#define GL_INT_IMAGE_2D_ARRAY 0x905E +#define GL_UNSIGNED_INT_IMAGE_2D 0x9063 +#define GL_UNSIGNED_INT_IMAGE_3D 0x9064 +#define GL_UNSIGNED_INT_IMAGE_CUBE 0x9066 +#define GL_UNSIGNED_INT_IMAGE_2D_ARRAY 0x9069 +#define GL_IMAGE_FORMAT_COMPATIBILITY_TYPE 0x90C7 +#define GL_IMAGE_FORMAT_COMPATIBILITY_BY_SIZE 0x90C8 +#define GL_IMAGE_FORMAT_COMPATIBILITY_BY_CLASS 0x90C9 +#define GL_READ_ONLY 0x88B8 +#define GL_WRITE_ONLY 0x88B9 +#define GL_READ_WRITE 0x88BA +#define GL_SHADER_STORAGE_BUFFER 0x90D2 +#define GL_SHADER_STORAGE_BUFFER_BINDING 0x90D3 +#define GL_SHADER_STORAGE_BUFFER_START 0x90D4 +#define GL_SHADER_STORAGE_BUFFER_SIZE 0x90D5 +#define GL_MAX_VERTEX_SHADER_STORAGE_BLOCKS 0x90D6 +#define GL_MAX_FRAGMENT_SHADER_STORAGE_BLOCKS 0x90DA +#define GL_MAX_COMPUTE_SHADER_STORAGE_BLOCKS 0x90DB +#define GL_MAX_COMBINED_SHADER_STORAGE_BLOCKS 0x90DC +#define GL_MAX_SHADER_STORAGE_BUFFER_BINDINGS 0x90DD +#define GL_MAX_SHADER_STORAGE_BLOCK_SIZE 0x90DE +#define GL_SHADER_STORAGE_BUFFER_OFFSET_ALIGNMENT 0x90DF +#define GL_SHADER_STORAGE_BARRIER_BIT 0x00002000 +#define GL_MAX_COMBINED_SHADER_OUTPUT_RESOURCES 0x8F39 +#define GL_DEPTH_STENCIL_TEXTURE_MODE 0x90EA +#define GL_STENCIL_INDEX 0x1901 +#define GL_MIN_PROGRAM_TEXTURE_GATHER_OFFSET 0x8E5E +#define GL_MAX_PROGRAM_TEXTURE_GATHER_OFFSET 0x8E5F +#define GL_SAMPLE_POSITION 0x8E50 +#define GL_SAMPLE_MASK 0x8E51 +#define GL_SAMPLE_MASK_VALUE 0x8E52 +#define GL_TEXTURE_2D_MULTISAMPLE 0x9100 +#define GL_MAX_SAMPLE_MASK_WORDS 0x8E59 +#define GL_MAX_COLOR_TEXTURE_SAMPLES 0x910E +#define GL_MAX_DEPTH_TEXTURE_SAMPLES 0x910F +#define GL_MAX_INTEGER_SAMPLES 0x9110 +#define GL_TEXTURE_BINDING_2D_MULTISAMPLE 0x9104 +#define GL_TEXTURE_SAMPLES 0x9106 +#define GL_TEXTURE_FIXED_SAMPLE_LOCATIONS 0x9107 +#define GL_TEXTURE_WIDTH 0x1000 +#define GL_TEXTURE_HEIGHT 0x1001 +#define GL_TEXTURE_DEPTH 0x8071 +#define GL_TEXTURE_INTERNAL_FORMAT 0x1003 +#define GL_TEXTURE_RED_SIZE 0x805C +#define GL_TEXTURE_GREEN_SIZE 0x805D +#define GL_TEXTURE_BLUE_SIZE 0x805E +#define GL_TEXTURE_ALPHA_SIZE 0x805F +#define GL_TEXTURE_DEPTH_SIZE 0x884A +#define GL_TEXTURE_STENCIL_SIZE 0x88F1 +#define GL_TEXTURE_SHARED_SIZE 0x8C3F +#define GL_TEXTURE_RED_TYPE 0x8C10 +#define GL_TEXTURE_GREEN_TYPE 0x8C11 +#define GL_TEXTURE_BLUE_TYPE 0x8C12 +#define GL_TEXTURE_ALPHA_TYPE 0x8C13 +#define GL_TEXTURE_DEPTH_TYPE 0x8C16 +#define GL_TEXTURE_COMPRESSED 0x86A1 +#define GL_SAMPLER_2D_MULTISAMPLE 0x9108 +#define GL_INT_SAMPLER_2D_MULTISAMPLE 0x9109 +#define GL_UNSIGNED_INT_SAMPLER_2D_MULTISAMPLE 0x910A +#define GL_VERTEX_ATTRIB_BINDING 0x82D4 +#define GL_VERTEX_ATTRIB_RELATIVE_OFFSET 0x82D5 +#define GL_VERTEX_BINDING_DIVISOR 0x82D6 +#define GL_VERTEX_BINDING_OFFSET 0x82D7 +#define GL_VERTEX_BINDING_STRIDE 0x82D8 +#define GL_VERTEX_BINDING_BUFFER 0x8F4F +#define GL_MAX_VERTEX_ATTRIB_RELATIVE_OFFSET 0x82D9 +#define GL_MAX_VERTEX_ATTRIB_BINDINGS 0x82DA +#define GL_MAX_VERTEX_ATTRIB_STRIDE 0x82E5 +typedef void (GL_APIENTRYP PFNGLDISPATCHCOMPUTEPROC) (GLuint num_groups_x, GLuint num_groups_y, GLuint num_groups_z); +typedef void (GL_APIENTRYP PFNGLDISPATCHCOMPUTEINDIRECTPROC) (GLintptr indirect); +typedef void (GL_APIENTRYP PFNGLDRAWARRAYSINDIRECTPROC) (GLenum mode, const void *indirect); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSINDIRECTPROC) (GLenum mode, GLenum type, const void *indirect); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERPARAMETERIPROC) (GLenum target, GLenum pname, GLint param); +typedef void (GL_APIENTRYP PFNGLGETFRAMEBUFFERPARAMETERIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMINTERFACEIVPROC) (GLuint program, GLenum programInterface, GLenum pname, GLint *params); +typedef GLuint (GL_APIENTRYP PFNGLGETPROGRAMRESOURCEINDEXPROC) (GLuint program, GLenum programInterface, const GLchar *name); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMRESOURCENAMEPROC) (GLuint program, GLenum programInterface, GLuint index, GLsizei bufSize, GLsizei *length, GLchar *name); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMRESOURCEIVPROC) (GLuint program, GLenum programInterface, GLuint index, GLsizei propCount, const GLenum *props, GLsizei bufSize, GLsizei *length, GLint *params); +typedef GLint (GL_APIENTRYP PFNGLGETPROGRAMRESOURCELOCATIONPROC) (GLuint program, GLenum programInterface, const GLchar *name); +typedef void (GL_APIENTRYP PFNGLUSEPROGRAMSTAGESPROC) (GLuint pipeline, GLbitfield stages, GLuint program); +typedef void (GL_APIENTRYP PFNGLACTIVESHADERPROGRAMPROC) (GLuint pipeline, GLuint program); +typedef GLuint (GL_APIENTRYP PFNGLCREATESHADERPROGRAMVPROC) (GLenum type, GLsizei count, const GLchar *const*strings); +typedef void (GL_APIENTRYP PFNGLBINDPROGRAMPIPELINEPROC) (GLuint pipeline); +typedef void (GL_APIENTRYP PFNGLDELETEPROGRAMPIPELINESPROC) (GLsizei n, const GLuint *pipelines); +typedef void (GL_APIENTRYP PFNGLGENPROGRAMPIPELINESPROC) (GLsizei n, GLuint *pipelines); +typedef GLboolean (GL_APIENTRYP PFNGLISPROGRAMPIPELINEPROC) (GLuint pipeline); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMPIPELINEIVPROC) (GLuint pipeline, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1IPROC) (GLuint program, GLint location, GLint v0); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2IPROC) (GLuint program, GLint location, GLint v0, GLint v1); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3IPROC) (GLuint program, GLint location, GLint v0, GLint v1, GLint v2); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4IPROC) (GLuint program, GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1UIPROC) (GLuint program, GLint location, GLuint v0); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2UIPROC) (GLuint program, GLint location, GLuint v0, GLuint v1); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3UIPROC) (GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4UIPROC) (GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1FPROC) (GLuint program, GLint location, GLfloat v0); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2FPROC) (GLuint program, GLint location, GLfloat v0, GLfloat v1); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3FPROC) (GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4FPROC) (GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1IVPROC) (GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2IVPROC) (GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3IVPROC) (GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4IVPROC) (GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1UIVPROC) (GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2UIVPROC) (GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3UIVPROC) (GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4UIVPROC) (GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1FVPROC) (GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2FVPROC) (GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3FVPROC) (GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4FVPROC) (GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2FVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3FVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4FVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2X3FVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3X2FVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2X4FVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4X2FVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3X4FVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4X3FVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLVALIDATEPROGRAMPIPELINEPROC) (GLuint pipeline); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMPIPELINEINFOLOGPROC) (GLuint pipeline, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +typedef void (GL_APIENTRYP PFNGLBINDIMAGETEXTUREPROC) (GLuint unit, GLuint texture, GLint level, GLboolean layered, GLint layer, GLenum access, GLenum format); +typedef void (GL_APIENTRYP PFNGLGETBOOLEANI_VPROC) (GLenum target, GLuint index, GLboolean *data); +typedef void (GL_APIENTRYP PFNGLMEMORYBARRIERPROC) (GLbitfield barriers); +typedef void (GL_APIENTRYP PFNGLMEMORYBARRIERBYREGIONPROC) (GLbitfield barriers); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGE2DMULTISAMPLEPROC) (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLboolean fixedsamplelocations); +typedef void (GL_APIENTRYP PFNGLGETMULTISAMPLEFVPROC) (GLenum pname, GLuint index, GLfloat *val); +typedef void (GL_APIENTRYP PFNGLSAMPLEMASKIPROC) (GLuint maskNumber, GLbitfield mask); +typedef void (GL_APIENTRYP PFNGLGETTEXLEVELPARAMETERIVPROC) (GLenum target, GLint level, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETTEXLEVELPARAMETERFVPROC) (GLenum target, GLint level, GLenum pname, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLBINDVERTEXBUFFERPROC) (GLuint bindingindex, GLuint buffer, GLintptr offset, GLsizei stride); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBFORMATPROC) (GLuint attribindex, GLint size, GLenum type, GLboolean normalized, GLuint relativeoffset); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBIFORMATPROC) (GLuint attribindex, GLint size, GLenum type, GLuint relativeoffset); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBBINDINGPROC) (GLuint attribindex, GLuint bindingindex); +typedef void (GL_APIENTRYP PFNGLVERTEXBINDINGDIVISORPROC) (GLuint bindingindex, GLuint divisor); +#if GL_GLES_PROTOTYPES +GL_APICALL void GL_APIENTRY glDispatchCompute (GLuint num_groups_x, GLuint num_groups_y, GLuint num_groups_z); +GL_APICALL void GL_APIENTRY glDispatchComputeIndirect (GLintptr indirect); +GL_APICALL void GL_APIENTRY glDrawArraysIndirect (GLenum mode, const void *indirect); +GL_APICALL void GL_APIENTRY glDrawElementsIndirect (GLenum mode, GLenum type, const void *indirect); +GL_APICALL void GL_APIENTRY glFramebufferParameteri (GLenum target, GLenum pname, GLint param); +GL_APICALL void GL_APIENTRY glGetFramebufferParameteriv (GLenum target, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetProgramInterfaceiv (GLuint program, GLenum programInterface, GLenum pname, GLint *params); +GL_APICALL GLuint GL_APIENTRY glGetProgramResourceIndex (GLuint program, GLenum programInterface, const GLchar *name); +GL_APICALL void GL_APIENTRY glGetProgramResourceName (GLuint program, GLenum programInterface, GLuint index, GLsizei bufSize, GLsizei *length, GLchar *name); +GL_APICALL void GL_APIENTRY glGetProgramResourceiv (GLuint program, GLenum programInterface, GLuint index, GLsizei propCount, const GLenum *props, GLsizei bufSize, GLsizei *length, GLint *params); +GL_APICALL GLint GL_APIENTRY glGetProgramResourceLocation (GLuint program, GLenum programInterface, const GLchar *name); +GL_APICALL void GL_APIENTRY glUseProgramStages (GLuint pipeline, GLbitfield stages, GLuint program); +GL_APICALL void GL_APIENTRY glActiveShaderProgram (GLuint pipeline, GLuint program); +GL_APICALL GLuint GL_APIENTRY glCreateShaderProgramv (GLenum type, GLsizei count, const GLchar *const*strings); +GL_APICALL void GL_APIENTRY glBindProgramPipeline (GLuint pipeline); +GL_APICALL void GL_APIENTRY glDeleteProgramPipelines (GLsizei n, const GLuint *pipelines); +GL_APICALL void GL_APIENTRY glGenProgramPipelines (GLsizei n, GLuint *pipelines); +GL_APICALL GLboolean GL_APIENTRY glIsProgramPipeline (GLuint pipeline); +GL_APICALL void GL_APIENTRY glGetProgramPipelineiv (GLuint pipeline, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glProgramUniform1i (GLuint program, GLint location, GLint v0); +GL_APICALL void GL_APIENTRY glProgramUniform2i (GLuint program, GLint location, GLint v0, GLint v1); +GL_APICALL void GL_APIENTRY glProgramUniform3i (GLuint program, GLint location, GLint v0, GLint v1, GLint v2); +GL_APICALL void GL_APIENTRY glProgramUniform4i (GLuint program, GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +GL_APICALL void GL_APIENTRY glProgramUniform1ui (GLuint program, GLint location, GLuint v0); +GL_APICALL void GL_APIENTRY glProgramUniform2ui (GLuint program, GLint location, GLuint v0, GLuint v1); +GL_APICALL void GL_APIENTRY glProgramUniform3ui (GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2); +GL_APICALL void GL_APIENTRY glProgramUniform4ui (GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3); +GL_APICALL void GL_APIENTRY glProgramUniform1f (GLuint program, GLint location, GLfloat v0); +GL_APICALL void GL_APIENTRY glProgramUniform2f (GLuint program, GLint location, GLfloat v0, GLfloat v1); +GL_APICALL void GL_APIENTRY glProgramUniform3f (GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +GL_APICALL void GL_APIENTRY glProgramUniform4f (GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +GL_APICALL void GL_APIENTRY glProgramUniform1iv (GLuint program, GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glProgramUniform2iv (GLuint program, GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glProgramUniform3iv (GLuint program, GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glProgramUniform4iv (GLuint program, GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glProgramUniform1uiv (GLuint program, GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glProgramUniform2uiv (GLuint program, GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glProgramUniform3uiv (GLuint program, GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glProgramUniform4uiv (GLuint program, GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glProgramUniform1fv (GLuint program, GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniform2fv (GLuint program, GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniform3fv (GLuint program, GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniform4fv (GLuint program, GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix2fv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix3fv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix4fv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix2x3fv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix3x2fv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix2x4fv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix4x2fv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix3x4fv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix4x3fv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glValidateProgramPipeline (GLuint pipeline); +GL_APICALL void GL_APIENTRY glGetProgramPipelineInfoLog (GLuint pipeline, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +GL_APICALL void GL_APIENTRY glBindImageTexture (GLuint unit, GLuint texture, GLint level, GLboolean layered, GLint layer, GLenum access, GLenum format); +GL_APICALL void GL_APIENTRY glGetBooleani_v (GLenum target, GLuint index, GLboolean *data); +GL_APICALL void GL_APIENTRY glMemoryBarrier (GLbitfield barriers); +GL_APICALL void GL_APIENTRY glMemoryBarrierByRegion (GLbitfield barriers); +GL_APICALL void GL_APIENTRY glTexStorage2DMultisample (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLboolean fixedsamplelocations); +GL_APICALL void GL_APIENTRY glGetMultisamplefv (GLenum pname, GLuint index, GLfloat *val); +GL_APICALL void GL_APIENTRY glSampleMaski (GLuint maskNumber, GLbitfield mask); +GL_APICALL void GL_APIENTRY glGetTexLevelParameteriv (GLenum target, GLint level, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetTexLevelParameterfv (GLenum target, GLint level, GLenum pname, GLfloat *params); +GL_APICALL void GL_APIENTRY glBindVertexBuffer (GLuint bindingindex, GLuint buffer, GLintptr offset, GLsizei stride); +GL_APICALL void GL_APIENTRY glVertexAttribFormat (GLuint attribindex, GLint size, GLenum type, GLboolean normalized, GLuint relativeoffset); +GL_APICALL void GL_APIENTRY glVertexAttribIFormat (GLuint attribindex, GLint size, GLenum type, GLuint relativeoffset); +GL_APICALL void GL_APIENTRY glVertexAttribBinding (GLuint attribindex, GLuint bindingindex); +GL_APICALL void GL_APIENTRY glVertexBindingDivisor (GLuint bindingindex, GLuint divisor); +#endif +#endif /* GL_ES_VERSION_3_1 */ + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/angle/include/GLES3/gl31ext_explicit_context_autogen.inc b/vendor/angle/include/GLES3/gl31ext_explicit_context_autogen.inc new file mode 100644 index 00000000..32d98b64 --- /dev/null +++ b/vendor/angle/include/GLES3/gl31ext_explicit_context_autogen.inc @@ -0,0 +1,148 @@ +// GENERATED FILE - DO NOT EDIT. +// Generated by generate_entry_points.py using data from gl.xml and gl_angle_ext.xml. +// +// Copyright 2020 The ANGLE Project Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// +// gl31ext_explicit_context_autogen.inc: +// Function declarations for the EGL_ANGLE_explicit_context extension + +typedef void (GL_APIENTRYP PFNGLACTIVESHADERPROGRAMCONTEXTANGLEPROC)(GLeglContext ctx, GLuint pipeline, GLuint program); +typedef void (GL_APIENTRYP PFNGLBINDIMAGETEXTURECONTEXTANGLEPROC)(GLeglContext ctx, GLuint unit, GLuint texture, GLint level, GLboolean layered, GLint layer, GLenum access, GLenum format); +typedef void (GL_APIENTRYP PFNGLBINDPROGRAMPIPELINECONTEXTANGLEPROC)(GLeglContext ctx, GLuint pipeline); +typedef void (GL_APIENTRYP PFNGLBINDVERTEXBUFFERCONTEXTANGLEPROC)(GLeglContext ctx, GLuint bindingindex, GLuint buffer, GLintptr offset, GLsizei stride); +typedef GLuint (GL_APIENTRYP PFNGLCREATESHADERPROGRAMVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum type, GLsizei count, const GLchar *const*strings); +typedef void (GL_APIENTRYP PFNGLDELETEPROGRAMPIPELINESCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei n, const GLuint *pipelines); +typedef void (GL_APIENTRYP PFNGLDISPATCHCOMPUTECONTEXTANGLEPROC)(GLeglContext ctx, GLuint num_groups_x, GLuint num_groups_y, GLuint num_groups_z); +typedef void (GL_APIENTRYP PFNGLDISPATCHCOMPUTEINDIRECTCONTEXTANGLEPROC)(GLeglContext ctx, GLintptr indirect); +typedef void (GL_APIENTRYP PFNGLDRAWARRAYSINDIRECTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum mode, const void *indirect); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSINDIRECTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum mode, GLenum type, const void *indirect); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERPARAMETERICONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLint param); +typedef void (GL_APIENTRYP PFNGLGENPROGRAMPIPELINESCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei n, GLuint *pipelines); +typedef void (GL_APIENTRYP PFNGLGETBOOLEANI_VCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLuint index, GLboolean *data); +typedef void (GL_APIENTRYP PFNGLGETFRAMEBUFFERPARAMETERIVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETMULTISAMPLEFVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum pname, GLuint index, GLfloat *val); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMINTERFACEIVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLenum programInterface, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMPIPELINEINFOLOGCONTEXTANGLEPROC)(GLeglContext ctx, GLuint pipeline, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMPIPELINEIVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint pipeline, GLenum pname, GLint *params); +typedef GLuint (GL_APIENTRYP PFNGLGETPROGRAMRESOURCEINDEXCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLenum programInterface, const GLchar *name); +typedef GLint (GL_APIENTRYP PFNGLGETPROGRAMRESOURCELOCATIONCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLenum programInterface, const GLchar *name); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMRESOURCENAMECONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLenum programInterface, GLuint index, GLsizei bufSize, GLsizei *length, GLchar *name); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMRESOURCEIVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLenum programInterface, GLuint index, GLsizei propCount, const GLenum *props, GLsizei bufSize, GLsizei *length, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETTEXLEVELPARAMETERFVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLint level, GLenum pname, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETTEXLEVELPARAMETERIVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLint level, GLenum pname, GLint *params); +typedef GLboolean (GL_APIENTRYP PFNGLISPROGRAMPIPELINECONTEXTANGLEPROC)(GLeglContext ctx, GLuint pipeline); +typedef void (GL_APIENTRYP PFNGLMEMORYBARRIERCONTEXTANGLEPROC)(GLeglContext ctx, GLbitfield barriers); +typedef void (GL_APIENTRYP PFNGLMEMORYBARRIERBYREGIONCONTEXTANGLEPROC)(GLeglContext ctx, GLbitfield barriers); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1FCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLfloat v0); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1FVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1ICONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLint v0); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1IVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1UICONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLuint v0); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1UIVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2FCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLfloat v0, GLfloat v1); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2FVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2ICONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLint v0, GLint v1); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2IVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2UICONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLuint v0, GLuint v1); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2UIVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3FCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3FVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3ICONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLint v0, GLint v1, GLint v2); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3IVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3UICONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3UIVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4FCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4FVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4ICONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4IVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4UICONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4UIVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2FVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2X3FVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2X4FVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3FVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3X2FVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3X4FVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4FVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4X2FVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4X3FVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLSAMPLEMASKICONTEXTANGLEPROC)(GLeglContext ctx, GLuint maskNumber, GLbitfield mask); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGE2DMULTISAMPLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLboolean fixedsamplelocations); +typedef void (GL_APIENTRYP PFNGLUSEPROGRAMSTAGESCONTEXTANGLEPROC)(GLeglContext ctx, GLuint pipeline, GLbitfield stages, GLuint program); +typedef void (GL_APIENTRYP PFNGLVALIDATEPROGRAMPIPELINECONTEXTANGLEPROC)(GLeglContext ctx, GLuint pipeline); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBBINDINGCONTEXTANGLEPROC)(GLeglContext ctx, GLuint attribindex, GLuint bindingindex); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBFORMATCONTEXTANGLEPROC)(GLeglContext ctx, GLuint attribindex, GLint size, GLenum type, GLboolean normalized, GLuint relativeoffset); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBIFORMATCONTEXTANGLEPROC)(GLeglContext ctx, GLuint attribindex, GLint size, GLenum type, GLuint relativeoffset); +typedef void (GL_APIENTRYP PFNGLVERTEXBINDINGDIVISORCONTEXTANGLEPROC)(GLeglContext ctx, GLuint bindingindex, GLuint divisor); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glActiveShaderProgramContextANGLE(GLeglContext ctx, GLuint pipeline, GLuint program); +GL_APICALL void GL_APIENTRY glBindImageTextureContextANGLE(GLeglContext ctx, GLuint unit, GLuint texture, GLint level, GLboolean layered, GLint layer, GLenum access, GLenum format); +GL_APICALL void GL_APIENTRY glBindProgramPipelineContextANGLE(GLeglContext ctx, GLuint pipeline); +GL_APICALL void GL_APIENTRY glBindVertexBufferContextANGLE(GLeglContext ctx, GLuint bindingindex, GLuint buffer, GLintptr offset, GLsizei stride); +GL_APICALL GLuint GL_APIENTRY glCreateShaderProgramvContextANGLE(GLeglContext ctx, GLenum type, GLsizei count, const GLchar *const*strings); +GL_APICALL void GL_APIENTRY glDeleteProgramPipelinesContextANGLE(GLeglContext ctx, GLsizei n, const GLuint *pipelines); +GL_APICALL void GL_APIENTRY glDispatchComputeContextANGLE(GLeglContext ctx, GLuint num_groups_x, GLuint num_groups_y, GLuint num_groups_z); +GL_APICALL void GL_APIENTRY glDispatchComputeIndirectContextANGLE(GLeglContext ctx, GLintptr indirect); +GL_APICALL void GL_APIENTRY glDrawArraysIndirectContextANGLE(GLeglContext ctx, GLenum mode, const void *indirect); +GL_APICALL void GL_APIENTRY glDrawElementsIndirectContextANGLE(GLeglContext ctx, GLenum mode, GLenum type, const void *indirect); +GL_APICALL void GL_APIENTRY glFramebufferParameteriContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLint param); +GL_APICALL void GL_APIENTRY glGenProgramPipelinesContextANGLE(GLeglContext ctx, GLsizei n, GLuint *pipelines); +GL_APICALL void GL_APIENTRY glGetBooleani_vContextANGLE(GLeglContext ctx, GLenum target, GLuint index, GLboolean *data); +GL_APICALL void GL_APIENTRY glGetFramebufferParameterivContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetMultisamplefvContextANGLE(GLeglContext ctx, GLenum pname, GLuint index, GLfloat *val); +GL_APICALL void GL_APIENTRY glGetProgramInterfaceivContextANGLE(GLeglContext ctx, GLuint program, GLenum programInterface, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetProgramPipelineInfoLogContextANGLE(GLeglContext ctx, GLuint pipeline, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +GL_APICALL void GL_APIENTRY glGetProgramPipelineivContextANGLE(GLeglContext ctx, GLuint pipeline, GLenum pname, GLint *params); +GL_APICALL GLuint GL_APIENTRY glGetProgramResourceIndexContextANGLE(GLeglContext ctx, GLuint program, GLenum programInterface, const GLchar *name); +GL_APICALL GLint GL_APIENTRY glGetProgramResourceLocationContextANGLE(GLeglContext ctx, GLuint program, GLenum programInterface, const GLchar *name); +GL_APICALL void GL_APIENTRY glGetProgramResourceNameContextANGLE(GLeglContext ctx, GLuint program, GLenum programInterface, GLuint index, GLsizei bufSize, GLsizei *length, GLchar *name); +GL_APICALL void GL_APIENTRY glGetProgramResourceivContextANGLE(GLeglContext ctx, GLuint program, GLenum programInterface, GLuint index, GLsizei propCount, const GLenum *props, GLsizei bufSize, GLsizei *length, GLint *params); +GL_APICALL void GL_APIENTRY glGetTexLevelParameterfvContextANGLE(GLeglContext ctx, GLenum target, GLint level, GLenum pname, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetTexLevelParameterivContextANGLE(GLeglContext ctx, GLenum target, GLint level, GLenum pname, GLint *params); +GL_APICALL GLboolean GL_APIENTRY glIsProgramPipelineContextANGLE(GLeglContext ctx, GLuint pipeline); +GL_APICALL void GL_APIENTRY glMemoryBarrierContextANGLE(GLeglContext ctx, GLbitfield barriers); +GL_APICALL void GL_APIENTRY glMemoryBarrierByRegionContextANGLE(GLeglContext ctx, GLbitfield barriers); +GL_APICALL void GL_APIENTRY glProgramUniform1fContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLfloat v0); +GL_APICALL void GL_APIENTRY glProgramUniform1fvContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniform1iContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLint v0); +GL_APICALL void GL_APIENTRY glProgramUniform1ivContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glProgramUniform1uiContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLuint v0); +GL_APICALL void GL_APIENTRY glProgramUniform1uivContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glProgramUniform2fContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLfloat v0, GLfloat v1); +GL_APICALL void GL_APIENTRY glProgramUniform2fvContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniform2iContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLint v0, GLint v1); +GL_APICALL void GL_APIENTRY glProgramUniform2ivContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glProgramUniform2uiContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLuint v0, GLuint v1); +GL_APICALL void GL_APIENTRY glProgramUniform2uivContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glProgramUniform3fContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +GL_APICALL void GL_APIENTRY glProgramUniform3fvContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniform3iContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLint v0, GLint v1, GLint v2); +GL_APICALL void GL_APIENTRY glProgramUniform3ivContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glProgramUniform3uiContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2); +GL_APICALL void GL_APIENTRY glProgramUniform3uivContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glProgramUniform4fContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +GL_APICALL void GL_APIENTRY glProgramUniform4fvContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniform4iContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +GL_APICALL void GL_APIENTRY glProgramUniform4ivContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glProgramUniform4uiContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3); +GL_APICALL void GL_APIENTRY glProgramUniform4uivContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix2fvContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix2x3fvContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix2x4fvContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix3fvContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix3x2fvContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix3x4fvContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix4fvContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix4x2fvContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix4x3fvContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glSampleMaskiContextANGLE(GLeglContext ctx, GLuint maskNumber, GLbitfield mask); +GL_APICALL void GL_APIENTRY glTexStorage2DMultisampleContextANGLE(GLeglContext ctx, GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLboolean fixedsamplelocations); +GL_APICALL void GL_APIENTRY glUseProgramStagesContextANGLE(GLeglContext ctx, GLuint pipeline, GLbitfield stages, GLuint program); +GL_APICALL void GL_APIENTRY glValidateProgramPipelineContextANGLE(GLeglContext ctx, GLuint pipeline); +GL_APICALL void GL_APIENTRY glVertexAttribBindingContextANGLE(GLeglContext ctx, GLuint attribindex, GLuint bindingindex); +GL_APICALL void GL_APIENTRY glVertexAttribFormatContextANGLE(GLeglContext ctx, GLuint attribindex, GLint size, GLenum type, GLboolean normalized, GLuint relativeoffset); +GL_APICALL void GL_APIENTRY glVertexAttribIFormatContextANGLE(GLeglContext ctx, GLuint attribindex, GLint size, GLenum type, GLuint relativeoffset); +GL_APICALL void GL_APIENTRY glVertexBindingDivisorContextANGLE(GLeglContext ctx, GLuint bindingindex, GLuint divisor); +#endif diff --git a/vendor/angle/include/GLES3/gl32.h b/vendor/angle/include/GLES3/gl32.h new file mode 100644 index 00000000..ae56b0e9 --- /dev/null +++ b/vendor/angle/include/GLES3/gl32.h @@ -0,0 +1,1808 @@ +#ifndef __gles2_gl32_h_ +#define __gles2_gl32_h_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright 2013-2020 The Khronos Group Inc. +** SPDX-License-Identifier: MIT +** +** This header is generated from the Khronos OpenGL / OpenGL ES XML +** API Registry. The current version of the Registry, generator scripts +** used to make the header, and the header can be found at +** https://github.com/KhronosGroup/OpenGL-Registry +*/ + +#include + +#ifndef GL_APIENTRYP +#define GL_APIENTRYP GL_APIENTRY* +#endif + +#ifndef GL_GLES_PROTOTYPES +#define GL_GLES_PROTOTYPES 1 +#endif + +/* Generated on date 20191013 */ + +/* Generated C header for: + * API: gles2 + * Profile: common + * Versions considered: 2\.[0-9]|3\.[012] + * Versions emitted: .* + * Default extensions included: None + * Additional extensions included: _nomatch_^ + * Extensions removed: _nomatch_^ + */ + +#ifndef GL_ES_VERSION_2_0 +#define GL_ES_VERSION_2_0 1 +#include +typedef khronos_int8_t GLbyte; +typedef khronos_float_t GLclampf; +typedef khronos_int32_t GLfixed; +typedef khronos_int16_t GLshort; +typedef khronos_uint16_t GLushort; +typedef void GLvoid; +typedef struct __GLsync *GLsync; +typedef khronos_int64_t GLint64; +typedef khronos_uint64_t GLuint64; +typedef unsigned int GLenum; +typedef unsigned int GLuint; +typedef char GLchar; +typedef khronos_float_t GLfloat; +typedef khronos_ssize_t GLsizeiptr; +typedef khronos_intptr_t GLintptr; +typedef unsigned int GLbitfield; +typedef int GLint; +typedef unsigned char GLboolean; +typedef int GLsizei; +typedef khronos_uint8_t GLubyte; +#define GL_DEPTH_BUFFER_BIT 0x00000100 +#define GL_STENCIL_BUFFER_BIT 0x00000400 +#define GL_COLOR_BUFFER_BIT 0x00004000 +#define GL_FALSE 0 +#define GL_TRUE 1 +#define GL_POINTS 0x0000 +#define GL_LINES 0x0001 +#define GL_LINE_LOOP 0x0002 +#define GL_LINE_STRIP 0x0003 +#define GL_TRIANGLES 0x0004 +#define GL_TRIANGLE_STRIP 0x0005 +#define GL_TRIANGLE_FAN 0x0006 +#define GL_ZERO 0 +#define GL_ONE 1 +#define GL_SRC_COLOR 0x0300 +#define GL_ONE_MINUS_SRC_COLOR 0x0301 +#define GL_SRC_ALPHA 0x0302 +#define GL_ONE_MINUS_SRC_ALPHA 0x0303 +#define GL_DST_ALPHA 0x0304 +#define GL_ONE_MINUS_DST_ALPHA 0x0305 +#define GL_DST_COLOR 0x0306 +#define GL_ONE_MINUS_DST_COLOR 0x0307 +#define GL_SRC_ALPHA_SATURATE 0x0308 +#define GL_FUNC_ADD 0x8006 +#define GL_BLEND_EQUATION 0x8009 +#define GL_BLEND_EQUATION_RGB 0x8009 +#define GL_BLEND_EQUATION_ALPHA 0x883D +#define GL_FUNC_SUBTRACT 0x800A +#define GL_FUNC_REVERSE_SUBTRACT 0x800B +#define GL_BLEND_DST_RGB 0x80C8 +#define GL_BLEND_SRC_RGB 0x80C9 +#define GL_BLEND_DST_ALPHA 0x80CA +#define GL_BLEND_SRC_ALPHA 0x80CB +#define GL_CONSTANT_COLOR 0x8001 +#define GL_ONE_MINUS_CONSTANT_COLOR 0x8002 +#define GL_CONSTANT_ALPHA 0x8003 +#define GL_ONE_MINUS_CONSTANT_ALPHA 0x8004 +#define GL_BLEND_COLOR 0x8005 +#define GL_ARRAY_BUFFER 0x8892 +#define GL_ELEMENT_ARRAY_BUFFER 0x8893 +#define GL_ARRAY_BUFFER_BINDING 0x8894 +#define GL_ELEMENT_ARRAY_BUFFER_BINDING 0x8895 +#define GL_STREAM_DRAW 0x88E0 +#define GL_STATIC_DRAW 0x88E4 +#define GL_DYNAMIC_DRAW 0x88E8 +#define GL_BUFFER_SIZE 0x8764 +#define GL_BUFFER_USAGE 0x8765 +#define GL_CURRENT_VERTEX_ATTRIB 0x8626 +#define GL_FRONT 0x0404 +#define GL_BACK 0x0405 +#define GL_FRONT_AND_BACK 0x0408 +#define GL_TEXTURE_2D 0x0DE1 +#define GL_CULL_FACE 0x0B44 +#define GL_BLEND 0x0BE2 +#define GL_DITHER 0x0BD0 +#define GL_STENCIL_TEST 0x0B90 +#define GL_DEPTH_TEST 0x0B71 +#define GL_SCISSOR_TEST 0x0C11 +#define GL_POLYGON_OFFSET_FILL 0x8037 +#define GL_SAMPLE_ALPHA_TO_COVERAGE 0x809E +#define GL_SAMPLE_COVERAGE 0x80A0 +#define GL_NO_ERROR 0 +#define GL_INVALID_ENUM 0x0500 +#define GL_INVALID_VALUE 0x0501 +#define GL_INVALID_OPERATION 0x0502 +#define GL_OUT_OF_MEMORY 0x0505 +#define GL_CW 0x0900 +#define GL_CCW 0x0901 +#define GL_LINE_WIDTH 0x0B21 +#define GL_ALIASED_POINT_SIZE_RANGE 0x846D +#define GL_ALIASED_LINE_WIDTH_RANGE 0x846E +#define GL_CULL_FACE_MODE 0x0B45 +#define GL_FRONT_FACE 0x0B46 +#define GL_DEPTH_RANGE 0x0B70 +#define GL_DEPTH_WRITEMASK 0x0B72 +#define GL_DEPTH_CLEAR_VALUE 0x0B73 +#define GL_DEPTH_FUNC 0x0B74 +#define GL_STENCIL_CLEAR_VALUE 0x0B91 +#define GL_STENCIL_FUNC 0x0B92 +#define GL_STENCIL_FAIL 0x0B94 +#define GL_STENCIL_PASS_DEPTH_FAIL 0x0B95 +#define GL_STENCIL_PASS_DEPTH_PASS 0x0B96 +#define GL_STENCIL_REF 0x0B97 +#define GL_STENCIL_VALUE_MASK 0x0B93 +#define GL_STENCIL_WRITEMASK 0x0B98 +#define GL_STENCIL_BACK_FUNC 0x8800 +#define GL_STENCIL_BACK_FAIL 0x8801 +#define GL_STENCIL_BACK_PASS_DEPTH_FAIL 0x8802 +#define GL_STENCIL_BACK_PASS_DEPTH_PASS 0x8803 +#define GL_STENCIL_BACK_REF 0x8CA3 +#define GL_STENCIL_BACK_VALUE_MASK 0x8CA4 +#define GL_STENCIL_BACK_WRITEMASK 0x8CA5 +#define GL_VIEWPORT 0x0BA2 +#define GL_SCISSOR_BOX 0x0C10 +#define GL_COLOR_CLEAR_VALUE 0x0C22 +#define GL_COLOR_WRITEMASK 0x0C23 +#define GL_UNPACK_ALIGNMENT 0x0CF5 +#define GL_PACK_ALIGNMENT 0x0D05 +#define GL_MAX_TEXTURE_SIZE 0x0D33 +#define GL_MAX_VIEWPORT_DIMS 0x0D3A +#define GL_SUBPIXEL_BITS 0x0D50 +#define GL_RED_BITS 0x0D52 +#define GL_GREEN_BITS 0x0D53 +#define GL_BLUE_BITS 0x0D54 +#define GL_ALPHA_BITS 0x0D55 +#define GL_DEPTH_BITS 0x0D56 +#define GL_STENCIL_BITS 0x0D57 +#define GL_POLYGON_OFFSET_UNITS 0x2A00 +#define GL_POLYGON_OFFSET_FACTOR 0x8038 +#define GL_TEXTURE_BINDING_2D 0x8069 +#define GL_SAMPLE_BUFFERS 0x80A8 +#define GL_SAMPLES 0x80A9 +#define GL_SAMPLE_COVERAGE_VALUE 0x80AA +#define GL_SAMPLE_COVERAGE_INVERT 0x80AB +#define GL_NUM_COMPRESSED_TEXTURE_FORMATS 0x86A2 +#define GL_COMPRESSED_TEXTURE_FORMATS 0x86A3 +#define GL_DONT_CARE 0x1100 +#define GL_FASTEST 0x1101 +#define GL_NICEST 0x1102 +#define GL_GENERATE_MIPMAP_HINT 0x8192 +#define GL_BYTE 0x1400 +#define GL_UNSIGNED_BYTE 0x1401 +#define GL_SHORT 0x1402 +#define GL_UNSIGNED_SHORT 0x1403 +#define GL_INT 0x1404 +#define GL_UNSIGNED_INT 0x1405 +#define GL_FLOAT 0x1406 +#define GL_FIXED 0x140C +#define GL_DEPTH_COMPONENT 0x1902 +#define GL_ALPHA 0x1906 +#define GL_RGB 0x1907 +#define GL_RGBA 0x1908 +#define GL_LUMINANCE 0x1909 +#define GL_LUMINANCE_ALPHA 0x190A +#define GL_UNSIGNED_SHORT_4_4_4_4 0x8033 +#define GL_UNSIGNED_SHORT_5_5_5_1 0x8034 +#define GL_UNSIGNED_SHORT_5_6_5 0x8363 +#define GL_FRAGMENT_SHADER 0x8B30 +#define GL_VERTEX_SHADER 0x8B31 +#define GL_MAX_VERTEX_ATTRIBS 0x8869 +#define GL_MAX_VERTEX_UNIFORM_VECTORS 0x8DFB +#define GL_MAX_VARYING_VECTORS 0x8DFC +#define GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS 0x8B4D +#define GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS 0x8B4C +#define GL_MAX_TEXTURE_IMAGE_UNITS 0x8872 +#define GL_MAX_FRAGMENT_UNIFORM_VECTORS 0x8DFD +#define GL_SHADER_TYPE 0x8B4F +#define GL_DELETE_STATUS 0x8B80 +#define GL_LINK_STATUS 0x8B82 +#define GL_VALIDATE_STATUS 0x8B83 +#define GL_ATTACHED_SHADERS 0x8B85 +#define GL_ACTIVE_UNIFORMS 0x8B86 +#define GL_ACTIVE_UNIFORM_MAX_LENGTH 0x8B87 +#define GL_ACTIVE_ATTRIBUTES 0x8B89 +#define GL_ACTIVE_ATTRIBUTE_MAX_LENGTH 0x8B8A +#define GL_SHADING_LANGUAGE_VERSION 0x8B8C +#define GL_CURRENT_PROGRAM 0x8B8D +#define GL_NEVER 0x0200 +#define GL_LESS 0x0201 +#define GL_EQUAL 0x0202 +#define GL_LEQUAL 0x0203 +#define GL_GREATER 0x0204 +#define GL_NOTEQUAL 0x0205 +#define GL_GEQUAL 0x0206 +#define GL_ALWAYS 0x0207 +#define GL_KEEP 0x1E00 +#define GL_REPLACE 0x1E01 +#define GL_INCR 0x1E02 +#define GL_DECR 0x1E03 +#define GL_INVERT 0x150A +#define GL_INCR_WRAP 0x8507 +#define GL_DECR_WRAP 0x8508 +#define GL_VENDOR 0x1F00 +#define GL_RENDERER 0x1F01 +#define GL_VERSION 0x1F02 +#define GL_EXTENSIONS 0x1F03 +#define GL_NEAREST 0x2600 +#define GL_LINEAR 0x2601 +#define GL_NEAREST_MIPMAP_NEAREST 0x2700 +#define GL_LINEAR_MIPMAP_NEAREST 0x2701 +#define GL_NEAREST_MIPMAP_LINEAR 0x2702 +#define GL_LINEAR_MIPMAP_LINEAR 0x2703 +#define GL_TEXTURE_MAG_FILTER 0x2800 +#define GL_TEXTURE_MIN_FILTER 0x2801 +#define GL_TEXTURE_WRAP_S 0x2802 +#define GL_TEXTURE_WRAP_T 0x2803 +#define GL_TEXTURE 0x1702 +#define GL_TEXTURE_CUBE_MAP 0x8513 +#define GL_TEXTURE_BINDING_CUBE_MAP 0x8514 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_X 0x8515 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_X 0x8516 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_Y 0x8517 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_Y 0x8518 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_Z 0x8519 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_Z 0x851A +#define GL_MAX_CUBE_MAP_TEXTURE_SIZE 0x851C +#define GL_TEXTURE0 0x84C0 +#define GL_TEXTURE1 0x84C1 +#define GL_TEXTURE2 0x84C2 +#define GL_TEXTURE3 0x84C3 +#define GL_TEXTURE4 0x84C4 +#define GL_TEXTURE5 0x84C5 +#define GL_TEXTURE6 0x84C6 +#define GL_TEXTURE7 0x84C7 +#define GL_TEXTURE8 0x84C8 +#define GL_TEXTURE9 0x84C9 +#define GL_TEXTURE10 0x84CA +#define GL_TEXTURE11 0x84CB +#define GL_TEXTURE12 0x84CC +#define GL_TEXTURE13 0x84CD +#define GL_TEXTURE14 0x84CE +#define GL_TEXTURE15 0x84CF +#define GL_TEXTURE16 0x84D0 +#define GL_TEXTURE17 0x84D1 +#define GL_TEXTURE18 0x84D2 +#define GL_TEXTURE19 0x84D3 +#define GL_TEXTURE20 0x84D4 +#define GL_TEXTURE21 0x84D5 +#define GL_TEXTURE22 0x84D6 +#define GL_TEXTURE23 0x84D7 +#define GL_TEXTURE24 0x84D8 +#define GL_TEXTURE25 0x84D9 +#define GL_TEXTURE26 0x84DA +#define GL_TEXTURE27 0x84DB +#define GL_TEXTURE28 0x84DC +#define GL_TEXTURE29 0x84DD +#define GL_TEXTURE30 0x84DE +#define GL_TEXTURE31 0x84DF +#define GL_ACTIVE_TEXTURE 0x84E0 +#define GL_REPEAT 0x2901 +#define GL_CLAMP_TO_EDGE 0x812F +#define GL_MIRRORED_REPEAT 0x8370 +#define GL_FLOAT_VEC2 0x8B50 +#define GL_FLOAT_VEC3 0x8B51 +#define GL_FLOAT_VEC4 0x8B52 +#define GL_INT_VEC2 0x8B53 +#define GL_INT_VEC3 0x8B54 +#define GL_INT_VEC4 0x8B55 +#define GL_BOOL 0x8B56 +#define GL_BOOL_VEC2 0x8B57 +#define GL_BOOL_VEC3 0x8B58 +#define GL_BOOL_VEC4 0x8B59 +#define GL_FLOAT_MAT2 0x8B5A +#define GL_FLOAT_MAT3 0x8B5B +#define GL_FLOAT_MAT4 0x8B5C +#define GL_SAMPLER_2D 0x8B5E +#define GL_SAMPLER_CUBE 0x8B60 +#define GL_VERTEX_ATTRIB_ARRAY_ENABLED 0x8622 +#define GL_VERTEX_ATTRIB_ARRAY_SIZE 0x8623 +#define GL_VERTEX_ATTRIB_ARRAY_STRIDE 0x8624 +#define GL_VERTEX_ATTRIB_ARRAY_TYPE 0x8625 +#define GL_VERTEX_ATTRIB_ARRAY_NORMALIZED 0x886A +#define GL_VERTEX_ATTRIB_ARRAY_POINTER 0x8645 +#define GL_VERTEX_ATTRIB_ARRAY_BUFFER_BINDING 0x889F +#define GL_IMPLEMENTATION_COLOR_READ_TYPE 0x8B9A +#define GL_IMPLEMENTATION_COLOR_READ_FORMAT 0x8B9B +#define GL_COMPILE_STATUS 0x8B81 +#define GL_INFO_LOG_LENGTH 0x8B84 +#define GL_SHADER_SOURCE_LENGTH 0x8B88 +#define GL_SHADER_COMPILER 0x8DFA +#define GL_SHADER_BINARY_FORMATS 0x8DF8 +#define GL_NUM_SHADER_BINARY_FORMATS 0x8DF9 +#define GL_LOW_FLOAT 0x8DF0 +#define GL_MEDIUM_FLOAT 0x8DF1 +#define GL_HIGH_FLOAT 0x8DF2 +#define GL_LOW_INT 0x8DF3 +#define GL_MEDIUM_INT 0x8DF4 +#define GL_HIGH_INT 0x8DF5 +#define GL_FRAMEBUFFER 0x8D40 +#define GL_RENDERBUFFER 0x8D41 +#define GL_RGBA4 0x8056 +#define GL_RGB5_A1 0x8057 +#define GL_RGB565 0x8D62 +#define GL_DEPTH_COMPONENT16 0x81A5 +#define GL_STENCIL_INDEX8 0x8D48 +#define GL_RENDERBUFFER_WIDTH 0x8D42 +#define GL_RENDERBUFFER_HEIGHT 0x8D43 +#define GL_RENDERBUFFER_INTERNAL_FORMAT 0x8D44 +#define GL_RENDERBUFFER_RED_SIZE 0x8D50 +#define GL_RENDERBUFFER_GREEN_SIZE 0x8D51 +#define GL_RENDERBUFFER_BLUE_SIZE 0x8D52 +#define GL_RENDERBUFFER_ALPHA_SIZE 0x8D53 +#define GL_RENDERBUFFER_DEPTH_SIZE 0x8D54 +#define GL_RENDERBUFFER_STENCIL_SIZE 0x8D55 +#define GL_FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE 0x8CD0 +#define GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME 0x8CD1 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL 0x8CD2 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE 0x8CD3 +#define GL_COLOR_ATTACHMENT0 0x8CE0 +#define GL_DEPTH_ATTACHMENT 0x8D00 +#define GL_STENCIL_ATTACHMENT 0x8D20 +#define GL_NONE 0 +#define GL_FRAMEBUFFER_COMPLETE 0x8CD5 +#define GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT 0x8CD6 +#define GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT 0x8CD7 +#define GL_FRAMEBUFFER_INCOMPLETE_DIMENSIONS 0x8CD9 +#define GL_FRAMEBUFFER_UNSUPPORTED 0x8CDD +#define GL_FRAMEBUFFER_BINDING 0x8CA6 +#define GL_RENDERBUFFER_BINDING 0x8CA7 +#define GL_MAX_RENDERBUFFER_SIZE 0x84E8 +#define GL_INVALID_FRAMEBUFFER_OPERATION 0x0506 +typedef void (GL_APIENTRYP PFNGLACTIVETEXTUREPROC) (GLenum texture); +typedef void (GL_APIENTRYP PFNGLATTACHSHADERPROC) (GLuint program, GLuint shader); +typedef void (GL_APIENTRYP PFNGLBINDATTRIBLOCATIONPROC) (GLuint program, GLuint index, const GLchar *name); +typedef void (GL_APIENTRYP PFNGLBINDBUFFERPROC) (GLenum target, GLuint buffer); +typedef void (GL_APIENTRYP PFNGLBINDFRAMEBUFFERPROC) (GLenum target, GLuint framebuffer); +typedef void (GL_APIENTRYP PFNGLBINDRENDERBUFFERPROC) (GLenum target, GLuint renderbuffer); +typedef void (GL_APIENTRYP PFNGLBINDTEXTUREPROC) (GLenum target, GLuint texture); +typedef void (GL_APIENTRYP PFNGLBLENDCOLORPROC) (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +typedef void (GL_APIENTRYP PFNGLBLENDEQUATIONPROC) (GLenum mode); +typedef void (GL_APIENTRYP PFNGLBLENDEQUATIONSEPARATEPROC) (GLenum modeRGB, GLenum modeAlpha); +typedef void (GL_APIENTRYP PFNGLBLENDFUNCPROC) (GLenum sfactor, GLenum dfactor); +typedef void (GL_APIENTRYP PFNGLBLENDFUNCSEPARATEPROC) (GLenum sfactorRGB, GLenum dfactorRGB, GLenum sfactorAlpha, GLenum dfactorAlpha); +typedef void (GL_APIENTRYP PFNGLBUFFERDATAPROC) (GLenum target, GLsizeiptr size, const void *data, GLenum usage); +typedef void (GL_APIENTRYP PFNGLBUFFERSUBDATAPROC) (GLenum target, GLintptr offset, GLsizeiptr size, const void *data); +typedef GLenum (GL_APIENTRYP PFNGLCHECKFRAMEBUFFERSTATUSPROC) (GLenum target); +typedef void (GL_APIENTRYP PFNGLCLEARPROC) (GLbitfield mask); +typedef void (GL_APIENTRYP PFNGLCLEARCOLORPROC) (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +typedef void (GL_APIENTRYP PFNGLCLEARDEPTHFPROC) (GLfloat d); +typedef void (GL_APIENTRYP PFNGLCLEARSTENCILPROC) (GLint s); +typedef void (GL_APIENTRYP PFNGLCOLORMASKPROC) (GLboolean red, GLboolean green, GLboolean blue, GLboolean alpha); +typedef void (GL_APIENTRYP PFNGLCOMPILESHADERPROC) (GLuint shader); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXIMAGE2DPROC) (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, const void *data); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXSUBIMAGE2DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *data); +typedef void (GL_APIENTRYP PFNGLCOPYTEXIMAGE2DPROC) (GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLsizei height, GLint border); +typedef void (GL_APIENTRYP PFNGLCOPYTEXSUBIMAGE2DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint x, GLint y, GLsizei width, GLsizei height); +typedef GLuint (GL_APIENTRYP PFNGLCREATEPROGRAMPROC) (void); +typedef GLuint (GL_APIENTRYP PFNGLCREATESHADERPROC) (GLenum type); +typedef void (GL_APIENTRYP PFNGLCULLFACEPROC) (GLenum mode); +typedef void (GL_APIENTRYP PFNGLDELETEBUFFERSPROC) (GLsizei n, const GLuint *buffers); +typedef void (GL_APIENTRYP PFNGLDELETEFRAMEBUFFERSPROC) (GLsizei n, const GLuint *framebuffers); +typedef void (GL_APIENTRYP PFNGLDELETEPROGRAMPROC) (GLuint program); +typedef void (GL_APIENTRYP PFNGLDELETERENDERBUFFERSPROC) (GLsizei n, const GLuint *renderbuffers); +typedef void (GL_APIENTRYP PFNGLDELETESHADERPROC) (GLuint shader); +typedef void (GL_APIENTRYP PFNGLDELETETEXTURESPROC) (GLsizei n, const GLuint *textures); +typedef void (GL_APIENTRYP PFNGLDEPTHFUNCPROC) (GLenum func); +typedef void (GL_APIENTRYP PFNGLDEPTHMASKPROC) (GLboolean flag); +typedef void (GL_APIENTRYP PFNGLDEPTHRANGEFPROC) (GLfloat n, GLfloat f); +typedef void (GL_APIENTRYP PFNGLDETACHSHADERPROC) (GLuint program, GLuint shader); +typedef void (GL_APIENTRYP PFNGLDISABLEPROC) (GLenum cap); +typedef void (GL_APIENTRYP PFNGLDISABLEVERTEXATTRIBARRAYPROC) (GLuint index); +typedef void (GL_APIENTRYP PFNGLDRAWARRAYSPROC) (GLenum mode, GLint first, GLsizei count); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices); +typedef void (GL_APIENTRYP PFNGLENABLEPROC) (GLenum cap); +typedef void (GL_APIENTRYP PFNGLENABLEVERTEXATTRIBARRAYPROC) (GLuint index); +typedef void (GL_APIENTRYP PFNGLFINISHPROC) (void); +typedef void (GL_APIENTRYP PFNGLFLUSHPROC) (void); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERRENDERBUFFERPROC) (GLenum target, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTURE2DPROC) (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +typedef void (GL_APIENTRYP PFNGLFRONTFACEPROC) (GLenum mode); +typedef void (GL_APIENTRYP PFNGLGENBUFFERSPROC) (GLsizei n, GLuint *buffers); +typedef void (GL_APIENTRYP PFNGLGENERATEMIPMAPPROC) (GLenum target); +typedef void (GL_APIENTRYP PFNGLGENFRAMEBUFFERSPROC) (GLsizei n, GLuint *framebuffers); +typedef void (GL_APIENTRYP PFNGLGENRENDERBUFFERSPROC) (GLsizei n, GLuint *renderbuffers); +typedef void (GL_APIENTRYP PFNGLGENTEXTURESPROC) (GLsizei n, GLuint *textures); +typedef void (GL_APIENTRYP PFNGLGETACTIVEATTRIBPROC) (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name); +typedef void (GL_APIENTRYP PFNGLGETACTIVEUNIFORMPROC) (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name); +typedef void (GL_APIENTRYP PFNGLGETATTACHEDSHADERSPROC) (GLuint program, GLsizei maxCount, GLsizei *count, GLuint *shaders); +typedef GLint (GL_APIENTRYP PFNGLGETATTRIBLOCATIONPROC) (GLuint program, const GLchar *name); +typedef void (GL_APIENTRYP PFNGLGETBOOLEANVPROC) (GLenum pname, GLboolean *data); +typedef void (GL_APIENTRYP PFNGLGETBUFFERPARAMETERIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef GLenum (GL_APIENTRYP PFNGLGETERRORPROC) (void); +typedef void (GL_APIENTRYP PFNGLGETFLOATVPROC) (GLenum pname, GLfloat *data); +typedef void (GL_APIENTRYP PFNGLGETFRAMEBUFFERATTACHMENTPARAMETERIVPROC) (GLenum target, GLenum attachment, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETINTEGERVPROC) (GLenum pname, GLint *data); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMIVPROC) (GLuint program, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMINFOLOGPROC) (GLuint program, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +typedef void (GL_APIENTRYP PFNGLGETRENDERBUFFERPARAMETERIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETSHADERIVPROC) (GLuint shader, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETSHADERINFOLOGPROC) (GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +typedef void (GL_APIENTRYP PFNGLGETSHADERPRECISIONFORMATPROC) (GLenum shadertype, GLenum precisiontype, GLint *range, GLint *precision); +typedef void (GL_APIENTRYP PFNGLGETSHADERSOURCEPROC) (GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *source); +typedef const GLubyte *(GL_APIENTRYP PFNGLGETSTRINGPROC) (GLenum name); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERFVPROC) (GLenum target, GLenum pname, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETUNIFORMFVPROC) (GLuint program, GLint location, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETUNIFORMIVPROC) (GLuint program, GLint location, GLint *params); +typedef GLint (GL_APIENTRYP PFNGLGETUNIFORMLOCATIONPROC) (GLuint program, const GLchar *name); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBFVPROC) (GLuint index, GLenum pname, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBIVPROC) (GLuint index, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBPOINTERVPROC) (GLuint index, GLenum pname, void **pointer); +typedef void (GL_APIENTRYP PFNGLHINTPROC) (GLenum target, GLenum mode); +typedef GLboolean (GL_APIENTRYP PFNGLISBUFFERPROC) (GLuint buffer); +typedef GLboolean (GL_APIENTRYP PFNGLISENABLEDPROC) (GLenum cap); +typedef GLboolean (GL_APIENTRYP PFNGLISFRAMEBUFFERPROC) (GLuint framebuffer); +typedef GLboolean (GL_APIENTRYP PFNGLISPROGRAMPROC) (GLuint program); +typedef GLboolean (GL_APIENTRYP PFNGLISRENDERBUFFERPROC) (GLuint renderbuffer); +typedef GLboolean (GL_APIENTRYP PFNGLISSHADERPROC) (GLuint shader); +typedef GLboolean (GL_APIENTRYP PFNGLISTEXTUREPROC) (GLuint texture); +typedef void (GL_APIENTRYP PFNGLLINEWIDTHPROC) (GLfloat width); +typedef void (GL_APIENTRYP PFNGLLINKPROGRAMPROC) (GLuint program); +typedef void (GL_APIENTRYP PFNGLPIXELSTOREIPROC) (GLenum pname, GLint param); +typedef void (GL_APIENTRYP PFNGLPOLYGONOFFSETPROC) (GLfloat factor, GLfloat units); +typedef void (GL_APIENTRYP PFNGLREADPIXELSPROC) (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, void *pixels); +typedef void (GL_APIENTRYP PFNGLRELEASESHADERCOMPILERPROC) (void); +typedef void (GL_APIENTRYP PFNGLRENDERBUFFERSTORAGEPROC) (GLenum target, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLSAMPLECOVERAGEPROC) (GLfloat value, GLboolean invert); +typedef void (GL_APIENTRYP PFNGLSCISSORPROC) (GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLSHADERBINARYPROC) (GLsizei count, const GLuint *shaders, GLenum binaryformat, const void *binary, GLsizei length); +typedef void (GL_APIENTRYP PFNGLSHADERSOURCEPROC) (GLuint shader, GLsizei count, const GLchar *const*string, const GLint *length); +typedef void (GL_APIENTRYP PFNGLSTENCILFUNCPROC) (GLenum func, GLint ref, GLuint mask); +typedef void (GL_APIENTRYP PFNGLSTENCILFUNCSEPARATEPROC) (GLenum face, GLenum func, GLint ref, GLuint mask); +typedef void (GL_APIENTRYP PFNGLSTENCILMASKPROC) (GLuint mask); +typedef void (GL_APIENTRYP PFNGLSTENCILMASKSEPARATEPROC) (GLenum face, GLuint mask); +typedef void (GL_APIENTRYP PFNGLSTENCILOPPROC) (GLenum fail, GLenum zfail, GLenum zpass); +typedef void (GL_APIENTRYP PFNGLSTENCILOPSEPARATEPROC) (GLenum face, GLenum sfail, GLenum dpfail, GLenum dppass); +typedef void (GL_APIENTRYP PFNGLTEXIMAGE2DPROC) (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, const void *pixels); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERFPROC) (GLenum target, GLenum pname, GLfloat param); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERFVPROC) (GLenum target, GLenum pname, const GLfloat *params); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERIPROC) (GLenum target, GLenum pname, GLint param); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERIVPROC) (GLenum target, GLenum pname, const GLint *params); +typedef void (GL_APIENTRYP PFNGLTEXSUBIMAGE2DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *pixels); +typedef void (GL_APIENTRYP PFNGLUNIFORM1FPROC) (GLint location, GLfloat v0); +typedef void (GL_APIENTRYP PFNGLUNIFORM1FVPROC) (GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM1IPROC) (GLint location, GLint v0); +typedef void (GL_APIENTRYP PFNGLUNIFORM1IVPROC) (GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM2FPROC) (GLint location, GLfloat v0, GLfloat v1); +typedef void (GL_APIENTRYP PFNGLUNIFORM2FVPROC) (GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM2IPROC) (GLint location, GLint v0, GLint v1); +typedef void (GL_APIENTRYP PFNGLUNIFORM2IVPROC) (GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM3FPROC) (GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +typedef void (GL_APIENTRYP PFNGLUNIFORM3FVPROC) (GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM3IPROC) (GLint location, GLint v0, GLint v1, GLint v2); +typedef void (GL_APIENTRYP PFNGLUNIFORM3IVPROC) (GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM4FPROC) (GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +typedef void (GL_APIENTRYP PFNGLUNIFORM4FVPROC) (GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM4IPROC) (GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +typedef void (GL_APIENTRYP PFNGLUNIFORM4IVPROC) (GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX2FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX3FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX4FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUSEPROGRAMPROC) (GLuint program); +typedef void (GL_APIENTRYP PFNGLVALIDATEPROGRAMPROC) (GLuint program); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB1FPROC) (GLuint index, GLfloat x); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB1FVPROC) (GLuint index, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB2FPROC) (GLuint index, GLfloat x, GLfloat y); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB2FVPROC) (GLuint index, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB3FPROC) (GLuint index, GLfloat x, GLfloat y, GLfloat z); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB3FVPROC) (GLuint index, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB4FPROC) (GLuint index, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB4FVPROC) (GLuint index, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBPOINTERPROC) (GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride, const void *pointer); +typedef void (GL_APIENTRYP PFNGLVIEWPORTPROC) (GLint x, GLint y, GLsizei width, GLsizei height); +#if GL_GLES_PROTOTYPES +GL_APICALL void GL_APIENTRY glActiveTexture (GLenum texture); +GL_APICALL void GL_APIENTRY glAttachShader (GLuint program, GLuint shader); +GL_APICALL void GL_APIENTRY glBindAttribLocation (GLuint program, GLuint index, const GLchar *name); +GL_APICALL void GL_APIENTRY glBindBuffer (GLenum target, GLuint buffer); +GL_APICALL void GL_APIENTRY glBindFramebuffer (GLenum target, GLuint framebuffer); +GL_APICALL void GL_APIENTRY glBindRenderbuffer (GLenum target, GLuint renderbuffer); +GL_APICALL void GL_APIENTRY glBindTexture (GLenum target, GLuint texture); +GL_APICALL void GL_APIENTRY glBlendColor (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +GL_APICALL void GL_APIENTRY glBlendEquation (GLenum mode); +GL_APICALL void GL_APIENTRY glBlendEquationSeparate (GLenum modeRGB, GLenum modeAlpha); +GL_APICALL void GL_APIENTRY glBlendFunc (GLenum sfactor, GLenum dfactor); +GL_APICALL void GL_APIENTRY glBlendFuncSeparate (GLenum sfactorRGB, GLenum dfactorRGB, GLenum sfactorAlpha, GLenum dfactorAlpha); +GL_APICALL void GL_APIENTRY glBufferData (GLenum target, GLsizeiptr size, const void *data, GLenum usage); +GL_APICALL void GL_APIENTRY glBufferSubData (GLenum target, GLintptr offset, GLsizeiptr size, const void *data); +GL_APICALL GLenum GL_APIENTRY glCheckFramebufferStatus (GLenum target); +GL_APICALL void GL_APIENTRY glClear (GLbitfield mask); +GL_APICALL void GL_APIENTRY glClearColor (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +GL_APICALL void GL_APIENTRY glClearDepthf (GLfloat d); +GL_APICALL void GL_APIENTRY glClearStencil (GLint s); +GL_APICALL void GL_APIENTRY glColorMask (GLboolean red, GLboolean green, GLboolean blue, GLboolean alpha); +GL_APICALL void GL_APIENTRY glCompileShader (GLuint shader); +GL_APICALL void GL_APIENTRY glCompressedTexImage2D (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, const void *data); +GL_APICALL void GL_APIENTRY glCompressedTexSubImage2D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *data); +GL_APICALL void GL_APIENTRY glCopyTexImage2D (GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLsizei height, GLint border); +GL_APICALL void GL_APIENTRY glCopyTexSubImage2D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint x, GLint y, GLsizei width, GLsizei height); +GL_APICALL GLuint GL_APIENTRY glCreateProgram (void); +GL_APICALL GLuint GL_APIENTRY glCreateShader (GLenum type); +GL_APICALL void GL_APIENTRY glCullFace (GLenum mode); +GL_APICALL void GL_APIENTRY glDeleteBuffers (GLsizei n, const GLuint *buffers); +GL_APICALL void GL_APIENTRY glDeleteFramebuffers (GLsizei n, const GLuint *framebuffers); +GL_APICALL void GL_APIENTRY glDeleteProgram (GLuint program); +GL_APICALL void GL_APIENTRY glDeleteRenderbuffers (GLsizei n, const GLuint *renderbuffers); +GL_APICALL void GL_APIENTRY glDeleteShader (GLuint shader); +GL_APICALL void GL_APIENTRY glDeleteTextures (GLsizei n, const GLuint *textures); +GL_APICALL void GL_APIENTRY glDepthFunc (GLenum func); +GL_APICALL void GL_APIENTRY glDepthMask (GLboolean flag); +GL_APICALL void GL_APIENTRY glDepthRangef (GLfloat n, GLfloat f); +GL_APICALL void GL_APIENTRY glDetachShader (GLuint program, GLuint shader); +GL_APICALL void GL_APIENTRY glDisable (GLenum cap); +GL_APICALL void GL_APIENTRY glDisableVertexAttribArray (GLuint index); +GL_APICALL void GL_APIENTRY glDrawArrays (GLenum mode, GLint first, GLsizei count); +GL_APICALL void GL_APIENTRY glDrawElements (GLenum mode, GLsizei count, GLenum type, const void *indices); +GL_APICALL void GL_APIENTRY glEnable (GLenum cap); +GL_APICALL void GL_APIENTRY glEnableVertexAttribArray (GLuint index); +GL_APICALL void GL_APIENTRY glFinish (void); +GL_APICALL void GL_APIENTRY glFlush (void); +GL_APICALL void GL_APIENTRY glFramebufferRenderbuffer (GLenum target, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer); +GL_APICALL void GL_APIENTRY glFramebufferTexture2D (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +GL_APICALL void GL_APIENTRY glFrontFace (GLenum mode); +GL_APICALL void GL_APIENTRY glGenBuffers (GLsizei n, GLuint *buffers); +GL_APICALL void GL_APIENTRY glGenerateMipmap (GLenum target); +GL_APICALL void GL_APIENTRY glGenFramebuffers (GLsizei n, GLuint *framebuffers); +GL_APICALL void GL_APIENTRY glGenRenderbuffers (GLsizei n, GLuint *renderbuffers); +GL_APICALL void GL_APIENTRY glGenTextures (GLsizei n, GLuint *textures); +GL_APICALL void GL_APIENTRY glGetActiveAttrib (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name); +GL_APICALL void GL_APIENTRY glGetActiveUniform (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name); +GL_APICALL void GL_APIENTRY glGetAttachedShaders (GLuint program, GLsizei maxCount, GLsizei *count, GLuint *shaders); +GL_APICALL GLint GL_APIENTRY glGetAttribLocation (GLuint program, const GLchar *name); +GL_APICALL void GL_APIENTRY glGetBooleanv (GLenum pname, GLboolean *data); +GL_APICALL void GL_APIENTRY glGetBufferParameteriv (GLenum target, GLenum pname, GLint *params); +GL_APICALL GLenum GL_APIENTRY glGetError (void); +GL_APICALL void GL_APIENTRY glGetFloatv (GLenum pname, GLfloat *data); +GL_APICALL void GL_APIENTRY glGetFramebufferAttachmentParameteriv (GLenum target, GLenum attachment, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetIntegerv (GLenum pname, GLint *data); +GL_APICALL void GL_APIENTRY glGetProgramiv (GLuint program, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetProgramInfoLog (GLuint program, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +GL_APICALL void GL_APIENTRY glGetRenderbufferParameteriv (GLenum target, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetShaderiv (GLuint shader, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetShaderInfoLog (GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +GL_APICALL void GL_APIENTRY glGetShaderPrecisionFormat (GLenum shadertype, GLenum precisiontype, GLint *range, GLint *precision); +GL_APICALL void GL_APIENTRY glGetShaderSource (GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *source); +GL_APICALL const GLubyte *GL_APIENTRY glGetString (GLenum name); +GL_APICALL void GL_APIENTRY glGetTexParameterfv (GLenum target, GLenum pname, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetTexParameteriv (GLenum target, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetUniformfv (GLuint program, GLint location, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetUniformiv (GLuint program, GLint location, GLint *params); +GL_APICALL GLint GL_APIENTRY glGetUniformLocation (GLuint program, const GLchar *name); +GL_APICALL void GL_APIENTRY glGetVertexAttribfv (GLuint index, GLenum pname, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetVertexAttribiv (GLuint index, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetVertexAttribPointerv (GLuint index, GLenum pname, void **pointer); +GL_APICALL void GL_APIENTRY glHint (GLenum target, GLenum mode); +GL_APICALL GLboolean GL_APIENTRY glIsBuffer (GLuint buffer); +GL_APICALL GLboolean GL_APIENTRY glIsEnabled (GLenum cap); +GL_APICALL GLboolean GL_APIENTRY glIsFramebuffer (GLuint framebuffer); +GL_APICALL GLboolean GL_APIENTRY glIsProgram (GLuint program); +GL_APICALL GLboolean GL_APIENTRY glIsRenderbuffer (GLuint renderbuffer); +GL_APICALL GLboolean GL_APIENTRY glIsShader (GLuint shader); +GL_APICALL GLboolean GL_APIENTRY glIsTexture (GLuint texture); +GL_APICALL void GL_APIENTRY glLineWidth (GLfloat width); +GL_APICALL void GL_APIENTRY glLinkProgram (GLuint program); +GL_APICALL void GL_APIENTRY glPixelStorei (GLenum pname, GLint param); +GL_APICALL void GL_APIENTRY glPolygonOffset (GLfloat factor, GLfloat units); +GL_APICALL void GL_APIENTRY glReadPixels (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, void *pixels); +GL_APICALL void GL_APIENTRY glReleaseShaderCompiler (void); +GL_APICALL void GL_APIENTRY glRenderbufferStorage (GLenum target, GLenum internalformat, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glSampleCoverage (GLfloat value, GLboolean invert); +GL_APICALL void GL_APIENTRY glScissor (GLint x, GLint y, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glShaderBinary (GLsizei count, const GLuint *shaders, GLenum binaryformat, const void *binary, GLsizei length); +GL_APICALL void GL_APIENTRY glShaderSource (GLuint shader, GLsizei count, const GLchar *const*string, const GLint *length); +GL_APICALL void GL_APIENTRY glStencilFunc (GLenum func, GLint ref, GLuint mask); +GL_APICALL void GL_APIENTRY glStencilFuncSeparate (GLenum face, GLenum func, GLint ref, GLuint mask); +GL_APICALL void GL_APIENTRY glStencilMask (GLuint mask); +GL_APICALL void GL_APIENTRY glStencilMaskSeparate (GLenum face, GLuint mask); +GL_APICALL void GL_APIENTRY glStencilOp (GLenum fail, GLenum zfail, GLenum zpass); +GL_APICALL void GL_APIENTRY glStencilOpSeparate (GLenum face, GLenum sfail, GLenum dpfail, GLenum dppass); +GL_APICALL void GL_APIENTRY glTexImage2D (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, const void *pixels); +GL_APICALL void GL_APIENTRY glTexParameterf (GLenum target, GLenum pname, GLfloat param); +GL_APICALL void GL_APIENTRY glTexParameterfv (GLenum target, GLenum pname, const GLfloat *params); +GL_APICALL void GL_APIENTRY glTexParameteri (GLenum target, GLenum pname, GLint param); +GL_APICALL void GL_APIENTRY glTexParameteriv (GLenum target, GLenum pname, const GLint *params); +GL_APICALL void GL_APIENTRY glTexSubImage2D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *pixels); +GL_APICALL void GL_APIENTRY glUniform1f (GLint location, GLfloat v0); +GL_APICALL void GL_APIENTRY glUniform1fv (GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniform1i (GLint location, GLint v0); +GL_APICALL void GL_APIENTRY glUniform1iv (GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glUniform2f (GLint location, GLfloat v0, GLfloat v1); +GL_APICALL void GL_APIENTRY glUniform2fv (GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniform2i (GLint location, GLint v0, GLint v1); +GL_APICALL void GL_APIENTRY glUniform2iv (GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glUniform3f (GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +GL_APICALL void GL_APIENTRY glUniform3fv (GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniform3i (GLint location, GLint v0, GLint v1, GLint v2); +GL_APICALL void GL_APIENTRY glUniform3iv (GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glUniform4f (GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +GL_APICALL void GL_APIENTRY glUniform4fv (GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniform4i (GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +GL_APICALL void GL_APIENTRY glUniform4iv (GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glUniformMatrix2fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix3fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix4fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUseProgram (GLuint program); +GL_APICALL void GL_APIENTRY glValidateProgram (GLuint program); +GL_APICALL void GL_APIENTRY glVertexAttrib1f (GLuint index, GLfloat x); +GL_APICALL void GL_APIENTRY glVertexAttrib1fv (GLuint index, const GLfloat *v); +GL_APICALL void GL_APIENTRY glVertexAttrib2f (GLuint index, GLfloat x, GLfloat y); +GL_APICALL void GL_APIENTRY glVertexAttrib2fv (GLuint index, const GLfloat *v); +GL_APICALL void GL_APIENTRY glVertexAttrib3f (GLuint index, GLfloat x, GLfloat y, GLfloat z); +GL_APICALL void GL_APIENTRY glVertexAttrib3fv (GLuint index, const GLfloat *v); +GL_APICALL void GL_APIENTRY glVertexAttrib4f (GLuint index, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +GL_APICALL void GL_APIENTRY glVertexAttrib4fv (GLuint index, const GLfloat *v); +GL_APICALL void GL_APIENTRY glVertexAttribPointer (GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride, const void *pointer); +GL_APICALL void GL_APIENTRY glViewport (GLint x, GLint y, GLsizei width, GLsizei height); +#endif +#endif /* GL_ES_VERSION_2_0 */ + +#ifndef GL_ES_VERSION_3_0 +#define GL_ES_VERSION_3_0 1 +typedef khronos_uint16_t GLhalf; +#define GL_READ_BUFFER 0x0C02 +#define GL_UNPACK_ROW_LENGTH 0x0CF2 +#define GL_UNPACK_SKIP_ROWS 0x0CF3 +#define GL_UNPACK_SKIP_PIXELS 0x0CF4 +#define GL_PACK_ROW_LENGTH 0x0D02 +#define GL_PACK_SKIP_ROWS 0x0D03 +#define GL_PACK_SKIP_PIXELS 0x0D04 +#define GL_COLOR 0x1800 +#define GL_DEPTH 0x1801 +#define GL_STENCIL 0x1802 +#define GL_RED 0x1903 +#define GL_RGB8 0x8051 +#define GL_RGBA8 0x8058 +#define GL_RGB10_A2 0x8059 +#define GL_TEXTURE_BINDING_3D 0x806A +#define GL_UNPACK_SKIP_IMAGES 0x806D +#define GL_UNPACK_IMAGE_HEIGHT 0x806E +#define GL_TEXTURE_3D 0x806F +#define GL_TEXTURE_WRAP_R 0x8072 +#define GL_MAX_3D_TEXTURE_SIZE 0x8073 +#define GL_UNSIGNED_INT_2_10_10_10_REV 0x8368 +#define GL_MAX_ELEMENTS_VERTICES 0x80E8 +#define GL_MAX_ELEMENTS_INDICES 0x80E9 +#define GL_TEXTURE_MIN_LOD 0x813A +#define GL_TEXTURE_MAX_LOD 0x813B +#define GL_TEXTURE_BASE_LEVEL 0x813C +#define GL_TEXTURE_MAX_LEVEL 0x813D +#define GL_MIN 0x8007 +#define GL_MAX 0x8008 +#define GL_DEPTH_COMPONENT24 0x81A6 +#define GL_MAX_TEXTURE_LOD_BIAS 0x84FD +#define GL_TEXTURE_COMPARE_MODE 0x884C +#define GL_TEXTURE_COMPARE_FUNC 0x884D +#define GL_CURRENT_QUERY 0x8865 +#define GL_QUERY_RESULT 0x8866 +#define GL_QUERY_RESULT_AVAILABLE 0x8867 +#define GL_BUFFER_MAPPED 0x88BC +#define GL_BUFFER_MAP_POINTER 0x88BD +#define GL_STREAM_READ 0x88E1 +#define GL_STREAM_COPY 0x88E2 +#define GL_STATIC_READ 0x88E5 +#define GL_STATIC_COPY 0x88E6 +#define GL_DYNAMIC_READ 0x88E9 +#define GL_DYNAMIC_COPY 0x88EA +#define GL_MAX_DRAW_BUFFERS 0x8824 +#define GL_DRAW_BUFFER0 0x8825 +#define GL_DRAW_BUFFER1 0x8826 +#define GL_DRAW_BUFFER2 0x8827 +#define GL_DRAW_BUFFER3 0x8828 +#define GL_DRAW_BUFFER4 0x8829 +#define GL_DRAW_BUFFER5 0x882A +#define GL_DRAW_BUFFER6 0x882B +#define GL_DRAW_BUFFER7 0x882C +#define GL_DRAW_BUFFER8 0x882D +#define GL_DRAW_BUFFER9 0x882E +#define GL_DRAW_BUFFER10 0x882F +#define GL_DRAW_BUFFER11 0x8830 +#define GL_DRAW_BUFFER12 0x8831 +#define GL_DRAW_BUFFER13 0x8832 +#define GL_DRAW_BUFFER14 0x8833 +#define GL_DRAW_BUFFER15 0x8834 +#define GL_MAX_FRAGMENT_UNIFORM_COMPONENTS 0x8B49 +#define GL_MAX_VERTEX_UNIFORM_COMPONENTS 0x8B4A +#define GL_SAMPLER_3D 0x8B5F +#define GL_SAMPLER_2D_SHADOW 0x8B62 +#define GL_FRAGMENT_SHADER_DERIVATIVE_HINT 0x8B8B +#define GL_PIXEL_PACK_BUFFER 0x88EB +#define GL_PIXEL_UNPACK_BUFFER 0x88EC +#define GL_PIXEL_PACK_BUFFER_BINDING 0x88ED +#define GL_PIXEL_UNPACK_BUFFER_BINDING 0x88EF +#define GL_FLOAT_MAT2x3 0x8B65 +#define GL_FLOAT_MAT2x4 0x8B66 +#define GL_FLOAT_MAT3x2 0x8B67 +#define GL_FLOAT_MAT3x4 0x8B68 +#define GL_FLOAT_MAT4x2 0x8B69 +#define GL_FLOAT_MAT4x3 0x8B6A +#define GL_SRGB 0x8C40 +#define GL_SRGB8 0x8C41 +#define GL_SRGB8_ALPHA8 0x8C43 +#define GL_COMPARE_REF_TO_TEXTURE 0x884E +#define GL_MAJOR_VERSION 0x821B +#define GL_MINOR_VERSION 0x821C +#define GL_NUM_EXTENSIONS 0x821D +#define GL_RGBA32F 0x8814 +#define GL_RGB32F 0x8815 +#define GL_RGBA16F 0x881A +#define GL_RGB16F 0x881B +#define GL_VERTEX_ATTRIB_ARRAY_INTEGER 0x88FD +#define GL_MAX_ARRAY_TEXTURE_LAYERS 0x88FF +#define GL_MIN_PROGRAM_TEXEL_OFFSET 0x8904 +#define GL_MAX_PROGRAM_TEXEL_OFFSET 0x8905 +#define GL_MAX_VARYING_COMPONENTS 0x8B4B +#define GL_TEXTURE_2D_ARRAY 0x8C1A +#define GL_TEXTURE_BINDING_2D_ARRAY 0x8C1D +#define GL_R11F_G11F_B10F 0x8C3A +#define GL_UNSIGNED_INT_10F_11F_11F_REV 0x8C3B +#define GL_RGB9_E5 0x8C3D +#define GL_UNSIGNED_INT_5_9_9_9_REV 0x8C3E +#define GL_TRANSFORM_FEEDBACK_VARYING_MAX_LENGTH 0x8C76 +#define GL_TRANSFORM_FEEDBACK_BUFFER_MODE 0x8C7F +#define GL_MAX_TRANSFORM_FEEDBACK_SEPARATE_COMPONENTS 0x8C80 +#define GL_TRANSFORM_FEEDBACK_VARYINGS 0x8C83 +#define GL_TRANSFORM_FEEDBACK_BUFFER_START 0x8C84 +#define GL_TRANSFORM_FEEDBACK_BUFFER_SIZE 0x8C85 +#define GL_TRANSFORM_FEEDBACK_PRIMITIVES_WRITTEN 0x8C88 +#define GL_RASTERIZER_DISCARD 0x8C89 +#define GL_MAX_TRANSFORM_FEEDBACK_INTERLEAVED_COMPONENTS 0x8C8A +#define GL_MAX_TRANSFORM_FEEDBACK_SEPARATE_ATTRIBS 0x8C8B +#define GL_INTERLEAVED_ATTRIBS 0x8C8C +#define GL_SEPARATE_ATTRIBS 0x8C8D +#define GL_TRANSFORM_FEEDBACK_BUFFER 0x8C8E +#define GL_TRANSFORM_FEEDBACK_BUFFER_BINDING 0x8C8F +#define GL_RGBA32UI 0x8D70 +#define GL_RGB32UI 0x8D71 +#define GL_RGBA16UI 0x8D76 +#define GL_RGB16UI 0x8D77 +#define GL_RGBA8UI 0x8D7C +#define GL_RGB8UI 0x8D7D +#define GL_RGBA32I 0x8D82 +#define GL_RGB32I 0x8D83 +#define GL_RGBA16I 0x8D88 +#define GL_RGB16I 0x8D89 +#define GL_RGBA8I 0x8D8E +#define GL_RGB8I 0x8D8F +#define GL_RED_INTEGER 0x8D94 +#define GL_RGB_INTEGER 0x8D98 +#define GL_RGBA_INTEGER 0x8D99 +#define GL_SAMPLER_2D_ARRAY 0x8DC1 +#define GL_SAMPLER_2D_ARRAY_SHADOW 0x8DC4 +#define GL_SAMPLER_CUBE_SHADOW 0x8DC5 +#define GL_UNSIGNED_INT_VEC2 0x8DC6 +#define GL_UNSIGNED_INT_VEC3 0x8DC7 +#define GL_UNSIGNED_INT_VEC4 0x8DC8 +#define GL_INT_SAMPLER_2D 0x8DCA +#define GL_INT_SAMPLER_3D 0x8DCB +#define GL_INT_SAMPLER_CUBE 0x8DCC +#define GL_INT_SAMPLER_2D_ARRAY 0x8DCF +#define GL_UNSIGNED_INT_SAMPLER_2D 0x8DD2 +#define GL_UNSIGNED_INT_SAMPLER_3D 0x8DD3 +#define GL_UNSIGNED_INT_SAMPLER_CUBE 0x8DD4 +#define GL_UNSIGNED_INT_SAMPLER_2D_ARRAY 0x8DD7 +#define GL_BUFFER_ACCESS_FLAGS 0x911F +#define GL_BUFFER_MAP_LENGTH 0x9120 +#define GL_BUFFER_MAP_OFFSET 0x9121 +#define GL_DEPTH_COMPONENT32F 0x8CAC +#define GL_DEPTH32F_STENCIL8 0x8CAD +#define GL_FLOAT_32_UNSIGNED_INT_24_8_REV 0x8DAD +#define GL_FRAMEBUFFER_ATTACHMENT_COLOR_ENCODING 0x8210 +#define GL_FRAMEBUFFER_ATTACHMENT_COMPONENT_TYPE 0x8211 +#define GL_FRAMEBUFFER_ATTACHMENT_RED_SIZE 0x8212 +#define GL_FRAMEBUFFER_ATTACHMENT_GREEN_SIZE 0x8213 +#define GL_FRAMEBUFFER_ATTACHMENT_BLUE_SIZE 0x8214 +#define GL_FRAMEBUFFER_ATTACHMENT_ALPHA_SIZE 0x8215 +#define GL_FRAMEBUFFER_ATTACHMENT_DEPTH_SIZE 0x8216 +#define GL_FRAMEBUFFER_ATTACHMENT_STENCIL_SIZE 0x8217 +#define GL_FRAMEBUFFER_DEFAULT 0x8218 +#define GL_FRAMEBUFFER_UNDEFINED 0x8219 +#define GL_DEPTH_STENCIL_ATTACHMENT 0x821A +#define GL_DEPTH_STENCIL 0x84F9 +#define GL_UNSIGNED_INT_24_8 0x84FA +#define GL_DEPTH24_STENCIL8 0x88F0 +#define GL_UNSIGNED_NORMALIZED 0x8C17 +#define GL_DRAW_FRAMEBUFFER_BINDING 0x8CA6 +#define GL_READ_FRAMEBUFFER 0x8CA8 +#define GL_DRAW_FRAMEBUFFER 0x8CA9 +#define GL_READ_FRAMEBUFFER_BINDING 0x8CAA +#define GL_RENDERBUFFER_SAMPLES 0x8CAB +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LAYER 0x8CD4 +#define GL_MAX_COLOR_ATTACHMENTS 0x8CDF +#define GL_COLOR_ATTACHMENT1 0x8CE1 +#define GL_COLOR_ATTACHMENT2 0x8CE2 +#define GL_COLOR_ATTACHMENT3 0x8CE3 +#define GL_COLOR_ATTACHMENT4 0x8CE4 +#define GL_COLOR_ATTACHMENT5 0x8CE5 +#define GL_COLOR_ATTACHMENT6 0x8CE6 +#define GL_COLOR_ATTACHMENT7 0x8CE7 +#define GL_COLOR_ATTACHMENT8 0x8CE8 +#define GL_COLOR_ATTACHMENT9 0x8CE9 +#define GL_COLOR_ATTACHMENT10 0x8CEA +#define GL_COLOR_ATTACHMENT11 0x8CEB +#define GL_COLOR_ATTACHMENT12 0x8CEC +#define GL_COLOR_ATTACHMENT13 0x8CED +#define GL_COLOR_ATTACHMENT14 0x8CEE +#define GL_COLOR_ATTACHMENT15 0x8CEF +#define GL_COLOR_ATTACHMENT16 0x8CF0 +#define GL_COLOR_ATTACHMENT17 0x8CF1 +#define GL_COLOR_ATTACHMENT18 0x8CF2 +#define GL_COLOR_ATTACHMENT19 0x8CF3 +#define GL_COLOR_ATTACHMENT20 0x8CF4 +#define GL_COLOR_ATTACHMENT21 0x8CF5 +#define GL_COLOR_ATTACHMENT22 0x8CF6 +#define GL_COLOR_ATTACHMENT23 0x8CF7 +#define GL_COLOR_ATTACHMENT24 0x8CF8 +#define GL_COLOR_ATTACHMENT25 0x8CF9 +#define GL_COLOR_ATTACHMENT26 0x8CFA +#define GL_COLOR_ATTACHMENT27 0x8CFB +#define GL_COLOR_ATTACHMENT28 0x8CFC +#define GL_COLOR_ATTACHMENT29 0x8CFD +#define GL_COLOR_ATTACHMENT30 0x8CFE +#define GL_COLOR_ATTACHMENT31 0x8CFF +#define GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE 0x8D56 +#define GL_MAX_SAMPLES 0x8D57 +#define GL_HALF_FLOAT 0x140B +#define GL_MAP_READ_BIT 0x0001 +#define GL_MAP_WRITE_BIT 0x0002 +#define GL_MAP_INVALIDATE_RANGE_BIT 0x0004 +#define GL_MAP_INVALIDATE_BUFFER_BIT 0x0008 +#define GL_MAP_FLUSH_EXPLICIT_BIT 0x0010 +#define GL_MAP_UNSYNCHRONIZED_BIT 0x0020 +#define GL_RG 0x8227 +#define GL_RG_INTEGER 0x8228 +#define GL_R8 0x8229 +#define GL_RG8 0x822B +#define GL_R16F 0x822D +#define GL_R32F 0x822E +#define GL_RG16F 0x822F +#define GL_RG32F 0x8230 +#define GL_R8I 0x8231 +#define GL_R8UI 0x8232 +#define GL_R16I 0x8233 +#define GL_R16UI 0x8234 +#define GL_R32I 0x8235 +#define GL_R32UI 0x8236 +#define GL_RG8I 0x8237 +#define GL_RG8UI 0x8238 +#define GL_RG16I 0x8239 +#define GL_RG16UI 0x823A +#define GL_RG32I 0x823B +#define GL_RG32UI 0x823C +#define GL_VERTEX_ARRAY_BINDING 0x85B5 +#define GL_R8_SNORM 0x8F94 +#define GL_RG8_SNORM 0x8F95 +#define GL_RGB8_SNORM 0x8F96 +#define GL_RGBA8_SNORM 0x8F97 +#define GL_SIGNED_NORMALIZED 0x8F9C +#define GL_PRIMITIVE_RESTART_FIXED_INDEX 0x8D69 +#define GL_COPY_READ_BUFFER 0x8F36 +#define GL_COPY_WRITE_BUFFER 0x8F37 +#define GL_COPY_READ_BUFFER_BINDING 0x8F36 +#define GL_COPY_WRITE_BUFFER_BINDING 0x8F37 +#define GL_UNIFORM_BUFFER 0x8A11 +#define GL_UNIFORM_BUFFER_BINDING 0x8A28 +#define GL_UNIFORM_BUFFER_START 0x8A29 +#define GL_UNIFORM_BUFFER_SIZE 0x8A2A +#define GL_MAX_VERTEX_UNIFORM_BLOCKS 0x8A2B +#define GL_MAX_FRAGMENT_UNIFORM_BLOCKS 0x8A2D +#define GL_MAX_COMBINED_UNIFORM_BLOCKS 0x8A2E +#define GL_MAX_UNIFORM_BUFFER_BINDINGS 0x8A2F +#define GL_MAX_UNIFORM_BLOCK_SIZE 0x8A30 +#define GL_MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS 0x8A31 +#define GL_MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS 0x8A33 +#define GL_UNIFORM_BUFFER_OFFSET_ALIGNMENT 0x8A34 +#define GL_ACTIVE_UNIFORM_BLOCK_MAX_NAME_LENGTH 0x8A35 +#define GL_ACTIVE_UNIFORM_BLOCKS 0x8A36 +#define GL_UNIFORM_TYPE 0x8A37 +#define GL_UNIFORM_SIZE 0x8A38 +#define GL_UNIFORM_NAME_LENGTH 0x8A39 +#define GL_UNIFORM_BLOCK_INDEX 0x8A3A +#define GL_UNIFORM_OFFSET 0x8A3B +#define GL_UNIFORM_ARRAY_STRIDE 0x8A3C +#define GL_UNIFORM_MATRIX_STRIDE 0x8A3D +#define GL_UNIFORM_IS_ROW_MAJOR 0x8A3E +#define GL_UNIFORM_BLOCK_BINDING 0x8A3F +#define GL_UNIFORM_BLOCK_DATA_SIZE 0x8A40 +#define GL_UNIFORM_BLOCK_NAME_LENGTH 0x8A41 +#define GL_UNIFORM_BLOCK_ACTIVE_UNIFORMS 0x8A42 +#define GL_UNIFORM_BLOCK_ACTIVE_UNIFORM_INDICES 0x8A43 +#define GL_UNIFORM_BLOCK_REFERENCED_BY_VERTEX_SHADER 0x8A44 +#define GL_UNIFORM_BLOCK_REFERENCED_BY_FRAGMENT_SHADER 0x8A46 +#define GL_INVALID_INDEX 0xFFFFFFFFu +#define GL_MAX_VERTEX_OUTPUT_COMPONENTS 0x9122 +#define GL_MAX_FRAGMENT_INPUT_COMPONENTS 0x9125 +#define GL_MAX_SERVER_WAIT_TIMEOUT 0x9111 +#define GL_OBJECT_TYPE 0x9112 +#define GL_SYNC_CONDITION 0x9113 +#define GL_SYNC_STATUS 0x9114 +#define GL_SYNC_FLAGS 0x9115 +#define GL_SYNC_FENCE 0x9116 +#define GL_SYNC_GPU_COMMANDS_COMPLETE 0x9117 +#define GL_UNSIGNALED 0x9118 +#define GL_SIGNALED 0x9119 +#define GL_ALREADY_SIGNALED 0x911A +#define GL_TIMEOUT_EXPIRED 0x911B +#define GL_CONDITION_SATISFIED 0x911C +#define GL_WAIT_FAILED 0x911D +#define GL_SYNC_FLUSH_COMMANDS_BIT 0x00000001 +#define GL_TIMEOUT_IGNORED 0xFFFFFFFFFFFFFFFFull +#define GL_VERTEX_ATTRIB_ARRAY_DIVISOR 0x88FE +#define GL_ANY_SAMPLES_PASSED 0x8C2F +#define GL_ANY_SAMPLES_PASSED_CONSERVATIVE 0x8D6A +#define GL_SAMPLER_BINDING 0x8919 +#define GL_RGB10_A2UI 0x906F +#define GL_TEXTURE_SWIZZLE_R 0x8E42 +#define GL_TEXTURE_SWIZZLE_G 0x8E43 +#define GL_TEXTURE_SWIZZLE_B 0x8E44 +#define GL_TEXTURE_SWIZZLE_A 0x8E45 +#define GL_GREEN 0x1904 +#define GL_BLUE 0x1905 +#define GL_INT_2_10_10_10_REV 0x8D9F +#define GL_TRANSFORM_FEEDBACK 0x8E22 +#define GL_TRANSFORM_FEEDBACK_PAUSED 0x8E23 +#define GL_TRANSFORM_FEEDBACK_ACTIVE 0x8E24 +#define GL_TRANSFORM_FEEDBACK_BINDING 0x8E25 +#define GL_PROGRAM_BINARY_RETRIEVABLE_HINT 0x8257 +#define GL_PROGRAM_BINARY_LENGTH 0x8741 +#define GL_NUM_PROGRAM_BINARY_FORMATS 0x87FE +#define GL_PROGRAM_BINARY_FORMATS 0x87FF +#define GL_COMPRESSED_R11_EAC 0x9270 +#define GL_COMPRESSED_SIGNED_R11_EAC 0x9271 +#define GL_COMPRESSED_RG11_EAC 0x9272 +#define GL_COMPRESSED_SIGNED_RG11_EAC 0x9273 +#define GL_COMPRESSED_RGB8_ETC2 0x9274 +#define GL_COMPRESSED_SRGB8_ETC2 0x9275 +#define GL_COMPRESSED_RGB8_PUNCHTHROUGH_ALPHA1_ETC2 0x9276 +#define GL_COMPRESSED_SRGB8_PUNCHTHROUGH_ALPHA1_ETC2 0x9277 +#define GL_COMPRESSED_RGBA8_ETC2_EAC 0x9278 +#define GL_COMPRESSED_SRGB8_ALPHA8_ETC2_EAC 0x9279 +#define GL_TEXTURE_IMMUTABLE_FORMAT 0x912F +#define GL_MAX_ELEMENT_INDEX 0x8D6B +#define GL_NUM_SAMPLE_COUNTS 0x9380 +#define GL_TEXTURE_IMMUTABLE_LEVELS 0x82DF +typedef void (GL_APIENTRYP PFNGLREADBUFFERPROC) (GLenum src); +typedef void (GL_APIENTRYP PFNGLDRAWRANGEELEMENTSPROC) (GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices); +typedef void (GL_APIENTRYP PFNGLTEXIMAGE3DPROC) (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, const void *pixels); +typedef void (GL_APIENTRYP PFNGLTEXSUBIMAGE3DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *pixels); +typedef void (GL_APIENTRYP PFNGLCOPYTEXSUBIMAGE3DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXIMAGE3DPROC) (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, const void *data); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXSUBIMAGE3DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void *data); +typedef void (GL_APIENTRYP PFNGLGENQUERIESPROC) (GLsizei n, GLuint *ids); +typedef void (GL_APIENTRYP PFNGLDELETEQUERIESPROC) (GLsizei n, const GLuint *ids); +typedef GLboolean (GL_APIENTRYP PFNGLISQUERYPROC) (GLuint id); +typedef void (GL_APIENTRYP PFNGLBEGINQUERYPROC) (GLenum target, GLuint id); +typedef void (GL_APIENTRYP PFNGLENDQUERYPROC) (GLenum target); +typedef void (GL_APIENTRYP PFNGLGETQUERYIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETQUERYOBJECTUIVPROC) (GLuint id, GLenum pname, GLuint *params); +typedef GLboolean (GL_APIENTRYP PFNGLUNMAPBUFFERPROC) (GLenum target); +typedef void (GL_APIENTRYP PFNGLGETBUFFERPOINTERVPROC) (GLenum target, GLenum pname, void **params); +typedef void (GL_APIENTRYP PFNGLDRAWBUFFERSPROC) (GLsizei n, const GLenum *bufs); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX2X3FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX3X2FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX2X4FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX4X2FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX3X4FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX4X3FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLBLITFRAMEBUFFERPROC) (GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +typedef void (GL_APIENTRYP PFNGLRENDERBUFFERSTORAGEMULTISAMPLEPROC) (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTURELAYERPROC) (GLenum target, GLenum attachment, GLuint texture, GLint level, GLint layer); +typedef void *(GL_APIENTRYP PFNGLMAPBUFFERRANGEPROC) (GLenum target, GLintptr offset, GLsizeiptr length, GLbitfield access); +typedef void (GL_APIENTRYP PFNGLFLUSHMAPPEDBUFFERRANGEPROC) (GLenum target, GLintptr offset, GLsizeiptr length); +typedef void (GL_APIENTRYP PFNGLBINDVERTEXARRAYPROC) (GLuint array); +typedef void (GL_APIENTRYP PFNGLDELETEVERTEXARRAYSPROC) (GLsizei n, const GLuint *arrays); +typedef void (GL_APIENTRYP PFNGLGENVERTEXARRAYSPROC) (GLsizei n, GLuint *arrays); +typedef GLboolean (GL_APIENTRYP PFNGLISVERTEXARRAYPROC) (GLuint array); +typedef void (GL_APIENTRYP PFNGLGETINTEGERI_VPROC) (GLenum target, GLuint index, GLint *data); +typedef void (GL_APIENTRYP PFNGLBEGINTRANSFORMFEEDBACKPROC) (GLenum primitiveMode); +typedef void (GL_APIENTRYP PFNGLENDTRANSFORMFEEDBACKPROC) (void); +typedef void (GL_APIENTRYP PFNGLBINDBUFFERRANGEPROC) (GLenum target, GLuint index, GLuint buffer, GLintptr offset, GLsizeiptr size); +typedef void (GL_APIENTRYP PFNGLBINDBUFFERBASEPROC) (GLenum target, GLuint index, GLuint buffer); +typedef void (GL_APIENTRYP PFNGLTRANSFORMFEEDBACKVARYINGSPROC) (GLuint program, GLsizei count, const GLchar *const*varyings, GLenum bufferMode); +typedef void (GL_APIENTRYP PFNGLGETTRANSFORMFEEDBACKVARYINGPROC) (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLsizei *size, GLenum *type, GLchar *name); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBIPOINTERPROC) (GLuint index, GLint size, GLenum type, GLsizei stride, const void *pointer); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBIIVPROC) (GLuint index, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBIUIVPROC) (GLuint index, GLenum pname, GLuint *params); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBI4IPROC) (GLuint index, GLint x, GLint y, GLint z, GLint w); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBI4UIPROC) (GLuint index, GLuint x, GLuint y, GLuint z, GLuint w); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBI4IVPROC) (GLuint index, const GLint *v); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBI4UIVPROC) (GLuint index, const GLuint *v); +typedef void (GL_APIENTRYP PFNGLGETUNIFORMUIVPROC) (GLuint program, GLint location, GLuint *params); +typedef GLint (GL_APIENTRYP PFNGLGETFRAGDATALOCATIONPROC) (GLuint program, const GLchar *name); +typedef void (GL_APIENTRYP PFNGLUNIFORM1UIPROC) (GLint location, GLuint v0); +typedef void (GL_APIENTRYP PFNGLUNIFORM2UIPROC) (GLint location, GLuint v0, GLuint v1); +typedef void (GL_APIENTRYP PFNGLUNIFORM3UIPROC) (GLint location, GLuint v0, GLuint v1, GLuint v2); +typedef void (GL_APIENTRYP PFNGLUNIFORM4UIPROC) (GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3); +typedef void (GL_APIENTRYP PFNGLUNIFORM1UIVPROC) (GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM2UIVPROC) (GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM3UIVPROC) (GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM4UIVPROC) (GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLCLEARBUFFERIVPROC) (GLenum buffer, GLint drawbuffer, const GLint *value); +typedef void (GL_APIENTRYP PFNGLCLEARBUFFERUIVPROC) (GLenum buffer, GLint drawbuffer, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLCLEARBUFFERFVPROC) (GLenum buffer, GLint drawbuffer, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLCLEARBUFFERFIPROC) (GLenum buffer, GLint drawbuffer, GLfloat depth, GLint stencil); +typedef const GLubyte *(GL_APIENTRYP PFNGLGETSTRINGIPROC) (GLenum name, GLuint index); +typedef void (GL_APIENTRYP PFNGLCOPYBUFFERSUBDATAPROC) (GLenum readTarget, GLenum writeTarget, GLintptr readOffset, GLintptr writeOffset, GLsizeiptr size); +typedef void (GL_APIENTRYP PFNGLGETUNIFORMINDICESPROC) (GLuint program, GLsizei uniformCount, const GLchar *const*uniformNames, GLuint *uniformIndices); +typedef void (GL_APIENTRYP PFNGLGETACTIVEUNIFORMSIVPROC) (GLuint program, GLsizei uniformCount, const GLuint *uniformIndices, GLenum pname, GLint *params); +typedef GLuint (GL_APIENTRYP PFNGLGETUNIFORMBLOCKINDEXPROC) (GLuint program, const GLchar *uniformBlockName); +typedef void (GL_APIENTRYP PFNGLGETACTIVEUNIFORMBLOCKIVPROC) (GLuint program, GLuint uniformBlockIndex, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETACTIVEUNIFORMBLOCKNAMEPROC) (GLuint program, GLuint uniformBlockIndex, GLsizei bufSize, GLsizei *length, GLchar *uniformBlockName); +typedef void (GL_APIENTRYP PFNGLUNIFORMBLOCKBINDINGPROC) (GLuint program, GLuint uniformBlockIndex, GLuint uniformBlockBinding); +typedef void (GL_APIENTRYP PFNGLDRAWARRAYSINSTANCEDPROC) (GLenum mode, GLint first, GLsizei count, GLsizei instancecount); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSINSTANCEDPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount); +typedef GLsync (GL_APIENTRYP PFNGLFENCESYNCPROC) (GLenum condition, GLbitfield flags); +typedef GLboolean (GL_APIENTRYP PFNGLISSYNCPROC) (GLsync sync); +typedef void (GL_APIENTRYP PFNGLDELETESYNCPROC) (GLsync sync); +typedef GLenum (GL_APIENTRYP PFNGLCLIENTWAITSYNCPROC) (GLsync sync, GLbitfield flags, GLuint64 timeout); +typedef void (GL_APIENTRYP PFNGLWAITSYNCPROC) (GLsync sync, GLbitfield flags, GLuint64 timeout); +typedef void (GL_APIENTRYP PFNGLGETINTEGER64VPROC) (GLenum pname, GLint64 *data); +typedef void (GL_APIENTRYP PFNGLGETSYNCIVPROC) (GLsync sync, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *values); +typedef void (GL_APIENTRYP PFNGLGETINTEGER64I_VPROC) (GLenum target, GLuint index, GLint64 *data); +typedef void (GL_APIENTRYP PFNGLGETBUFFERPARAMETERI64VPROC) (GLenum target, GLenum pname, GLint64 *params); +typedef void (GL_APIENTRYP PFNGLGENSAMPLERSPROC) (GLsizei count, GLuint *samplers); +typedef void (GL_APIENTRYP PFNGLDELETESAMPLERSPROC) (GLsizei count, const GLuint *samplers); +typedef GLboolean (GL_APIENTRYP PFNGLISSAMPLERPROC) (GLuint sampler); +typedef void (GL_APIENTRYP PFNGLBINDSAMPLERPROC) (GLuint unit, GLuint sampler); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERIPROC) (GLuint sampler, GLenum pname, GLint param); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERIVPROC) (GLuint sampler, GLenum pname, const GLint *param); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERFPROC) (GLuint sampler, GLenum pname, GLfloat param); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERFVPROC) (GLuint sampler, GLenum pname, const GLfloat *param); +typedef void (GL_APIENTRYP PFNGLGETSAMPLERPARAMETERIVPROC) (GLuint sampler, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETSAMPLERPARAMETERFVPROC) (GLuint sampler, GLenum pname, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBDIVISORPROC) (GLuint index, GLuint divisor); +typedef void (GL_APIENTRYP PFNGLBINDTRANSFORMFEEDBACKPROC) (GLenum target, GLuint id); +typedef void (GL_APIENTRYP PFNGLDELETETRANSFORMFEEDBACKSPROC) (GLsizei n, const GLuint *ids); +typedef void (GL_APIENTRYP PFNGLGENTRANSFORMFEEDBACKSPROC) (GLsizei n, GLuint *ids); +typedef GLboolean (GL_APIENTRYP PFNGLISTRANSFORMFEEDBACKPROC) (GLuint id); +typedef void (GL_APIENTRYP PFNGLPAUSETRANSFORMFEEDBACKPROC) (void); +typedef void (GL_APIENTRYP PFNGLRESUMETRANSFORMFEEDBACKPROC) (void); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMBINARYPROC) (GLuint program, GLsizei bufSize, GLsizei *length, GLenum *binaryFormat, void *binary); +typedef void (GL_APIENTRYP PFNGLPROGRAMBINARYPROC) (GLuint program, GLenum binaryFormat, const void *binary, GLsizei length); +typedef void (GL_APIENTRYP PFNGLPROGRAMPARAMETERIPROC) (GLuint program, GLenum pname, GLint value); +typedef void (GL_APIENTRYP PFNGLINVALIDATEFRAMEBUFFERPROC) (GLenum target, GLsizei numAttachments, const GLenum *attachments); +typedef void (GL_APIENTRYP PFNGLINVALIDATESUBFRAMEBUFFERPROC) (GLenum target, GLsizei numAttachments, const GLenum *attachments, GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGE2DPROC) (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGE3DPROC) (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +typedef void (GL_APIENTRYP PFNGLGETINTERNALFORMATIVPROC) (GLenum target, GLenum internalformat, GLenum pname, GLsizei bufSize, GLint *params); +#if GL_GLES_PROTOTYPES +GL_APICALL void GL_APIENTRY glReadBuffer (GLenum src); +GL_APICALL void GL_APIENTRY glDrawRangeElements (GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices); +GL_APICALL void GL_APIENTRY glTexImage3D (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, const void *pixels); +GL_APICALL void GL_APIENTRY glTexSubImage3D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *pixels); +GL_APICALL void GL_APIENTRY glCopyTexSubImage3D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glCompressedTexImage3D (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, const void *data); +GL_APICALL void GL_APIENTRY glCompressedTexSubImage3D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void *data); +GL_APICALL void GL_APIENTRY glGenQueries (GLsizei n, GLuint *ids); +GL_APICALL void GL_APIENTRY glDeleteQueries (GLsizei n, const GLuint *ids); +GL_APICALL GLboolean GL_APIENTRY glIsQuery (GLuint id); +GL_APICALL void GL_APIENTRY glBeginQuery (GLenum target, GLuint id); +GL_APICALL void GL_APIENTRY glEndQuery (GLenum target); +GL_APICALL void GL_APIENTRY glGetQueryiv (GLenum target, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetQueryObjectuiv (GLuint id, GLenum pname, GLuint *params); +GL_APICALL GLboolean GL_APIENTRY glUnmapBuffer (GLenum target); +GL_APICALL void GL_APIENTRY glGetBufferPointerv (GLenum target, GLenum pname, void **params); +GL_APICALL void GL_APIENTRY glDrawBuffers (GLsizei n, const GLenum *bufs); +GL_APICALL void GL_APIENTRY glUniformMatrix2x3fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix3x2fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix2x4fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix4x2fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix3x4fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix4x3fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glBlitFramebuffer (GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +GL_APICALL void GL_APIENTRY glRenderbufferStorageMultisample (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glFramebufferTextureLayer (GLenum target, GLenum attachment, GLuint texture, GLint level, GLint layer); +GL_APICALL void *GL_APIENTRY glMapBufferRange (GLenum target, GLintptr offset, GLsizeiptr length, GLbitfield access); +GL_APICALL void GL_APIENTRY glFlushMappedBufferRange (GLenum target, GLintptr offset, GLsizeiptr length); +GL_APICALL void GL_APIENTRY glBindVertexArray (GLuint array); +GL_APICALL void GL_APIENTRY glDeleteVertexArrays (GLsizei n, const GLuint *arrays); +GL_APICALL void GL_APIENTRY glGenVertexArrays (GLsizei n, GLuint *arrays); +GL_APICALL GLboolean GL_APIENTRY glIsVertexArray (GLuint array); +GL_APICALL void GL_APIENTRY glGetIntegeri_v (GLenum target, GLuint index, GLint *data); +GL_APICALL void GL_APIENTRY glBeginTransformFeedback (GLenum primitiveMode); +GL_APICALL void GL_APIENTRY glEndTransformFeedback (void); +GL_APICALL void GL_APIENTRY glBindBufferRange (GLenum target, GLuint index, GLuint buffer, GLintptr offset, GLsizeiptr size); +GL_APICALL void GL_APIENTRY glBindBufferBase (GLenum target, GLuint index, GLuint buffer); +GL_APICALL void GL_APIENTRY glTransformFeedbackVaryings (GLuint program, GLsizei count, const GLchar *const*varyings, GLenum bufferMode); +GL_APICALL void GL_APIENTRY glGetTransformFeedbackVarying (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLsizei *size, GLenum *type, GLchar *name); +GL_APICALL void GL_APIENTRY glVertexAttribIPointer (GLuint index, GLint size, GLenum type, GLsizei stride, const void *pointer); +GL_APICALL void GL_APIENTRY glGetVertexAttribIiv (GLuint index, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetVertexAttribIuiv (GLuint index, GLenum pname, GLuint *params); +GL_APICALL void GL_APIENTRY glVertexAttribI4i (GLuint index, GLint x, GLint y, GLint z, GLint w); +GL_APICALL void GL_APIENTRY glVertexAttribI4ui (GLuint index, GLuint x, GLuint y, GLuint z, GLuint w); +GL_APICALL void GL_APIENTRY glVertexAttribI4iv (GLuint index, const GLint *v); +GL_APICALL void GL_APIENTRY glVertexAttribI4uiv (GLuint index, const GLuint *v); +GL_APICALL void GL_APIENTRY glGetUniformuiv (GLuint program, GLint location, GLuint *params); +GL_APICALL GLint GL_APIENTRY glGetFragDataLocation (GLuint program, const GLchar *name); +GL_APICALL void GL_APIENTRY glUniform1ui (GLint location, GLuint v0); +GL_APICALL void GL_APIENTRY glUniform2ui (GLint location, GLuint v0, GLuint v1); +GL_APICALL void GL_APIENTRY glUniform3ui (GLint location, GLuint v0, GLuint v1, GLuint v2); +GL_APICALL void GL_APIENTRY glUniform4ui (GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3); +GL_APICALL void GL_APIENTRY glUniform1uiv (GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glUniform2uiv (GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glUniform3uiv (GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glUniform4uiv (GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glClearBufferiv (GLenum buffer, GLint drawbuffer, const GLint *value); +GL_APICALL void GL_APIENTRY glClearBufferuiv (GLenum buffer, GLint drawbuffer, const GLuint *value); +GL_APICALL void GL_APIENTRY glClearBufferfv (GLenum buffer, GLint drawbuffer, const GLfloat *value); +GL_APICALL void GL_APIENTRY glClearBufferfi (GLenum buffer, GLint drawbuffer, GLfloat depth, GLint stencil); +GL_APICALL const GLubyte *GL_APIENTRY glGetStringi (GLenum name, GLuint index); +GL_APICALL void GL_APIENTRY glCopyBufferSubData (GLenum readTarget, GLenum writeTarget, GLintptr readOffset, GLintptr writeOffset, GLsizeiptr size); +GL_APICALL void GL_APIENTRY glGetUniformIndices (GLuint program, GLsizei uniformCount, const GLchar *const*uniformNames, GLuint *uniformIndices); +GL_APICALL void GL_APIENTRY glGetActiveUniformsiv (GLuint program, GLsizei uniformCount, const GLuint *uniformIndices, GLenum pname, GLint *params); +GL_APICALL GLuint GL_APIENTRY glGetUniformBlockIndex (GLuint program, const GLchar *uniformBlockName); +GL_APICALL void GL_APIENTRY glGetActiveUniformBlockiv (GLuint program, GLuint uniformBlockIndex, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetActiveUniformBlockName (GLuint program, GLuint uniformBlockIndex, GLsizei bufSize, GLsizei *length, GLchar *uniformBlockName); +GL_APICALL void GL_APIENTRY glUniformBlockBinding (GLuint program, GLuint uniformBlockIndex, GLuint uniformBlockBinding); +GL_APICALL void GL_APIENTRY glDrawArraysInstanced (GLenum mode, GLint first, GLsizei count, GLsizei instancecount); +GL_APICALL void GL_APIENTRY glDrawElementsInstanced (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount); +GL_APICALL GLsync GL_APIENTRY glFenceSync (GLenum condition, GLbitfield flags); +GL_APICALL GLboolean GL_APIENTRY glIsSync (GLsync sync); +GL_APICALL void GL_APIENTRY glDeleteSync (GLsync sync); +GL_APICALL GLenum GL_APIENTRY glClientWaitSync (GLsync sync, GLbitfield flags, GLuint64 timeout); +GL_APICALL void GL_APIENTRY glWaitSync (GLsync sync, GLbitfield flags, GLuint64 timeout); +GL_APICALL void GL_APIENTRY glGetInteger64v (GLenum pname, GLint64 *data); +GL_APICALL void GL_APIENTRY glGetSynciv (GLsync sync, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *values); +GL_APICALL void GL_APIENTRY glGetInteger64i_v (GLenum target, GLuint index, GLint64 *data); +GL_APICALL void GL_APIENTRY glGetBufferParameteri64v (GLenum target, GLenum pname, GLint64 *params); +GL_APICALL void GL_APIENTRY glGenSamplers (GLsizei count, GLuint *samplers); +GL_APICALL void GL_APIENTRY glDeleteSamplers (GLsizei count, const GLuint *samplers); +GL_APICALL GLboolean GL_APIENTRY glIsSampler (GLuint sampler); +GL_APICALL void GL_APIENTRY glBindSampler (GLuint unit, GLuint sampler); +GL_APICALL void GL_APIENTRY glSamplerParameteri (GLuint sampler, GLenum pname, GLint param); +GL_APICALL void GL_APIENTRY glSamplerParameteriv (GLuint sampler, GLenum pname, const GLint *param); +GL_APICALL void GL_APIENTRY glSamplerParameterf (GLuint sampler, GLenum pname, GLfloat param); +GL_APICALL void GL_APIENTRY glSamplerParameterfv (GLuint sampler, GLenum pname, const GLfloat *param); +GL_APICALL void GL_APIENTRY glGetSamplerParameteriv (GLuint sampler, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetSamplerParameterfv (GLuint sampler, GLenum pname, GLfloat *params); +GL_APICALL void GL_APIENTRY glVertexAttribDivisor (GLuint index, GLuint divisor); +GL_APICALL void GL_APIENTRY glBindTransformFeedback (GLenum target, GLuint id); +GL_APICALL void GL_APIENTRY glDeleteTransformFeedbacks (GLsizei n, const GLuint *ids); +GL_APICALL void GL_APIENTRY glGenTransformFeedbacks (GLsizei n, GLuint *ids); +GL_APICALL GLboolean GL_APIENTRY glIsTransformFeedback (GLuint id); +GL_APICALL void GL_APIENTRY glPauseTransformFeedback (void); +GL_APICALL void GL_APIENTRY glResumeTransformFeedback (void); +GL_APICALL void GL_APIENTRY glGetProgramBinary (GLuint program, GLsizei bufSize, GLsizei *length, GLenum *binaryFormat, void *binary); +GL_APICALL void GL_APIENTRY glProgramBinary (GLuint program, GLenum binaryFormat, const void *binary, GLsizei length); +GL_APICALL void GL_APIENTRY glProgramParameteri (GLuint program, GLenum pname, GLint value); +GL_APICALL void GL_APIENTRY glInvalidateFramebuffer (GLenum target, GLsizei numAttachments, const GLenum *attachments); +GL_APICALL void GL_APIENTRY glInvalidateSubFramebuffer (GLenum target, GLsizei numAttachments, const GLenum *attachments, GLint x, GLint y, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glTexStorage2D (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glTexStorage3D (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +GL_APICALL void GL_APIENTRY glGetInternalformativ (GLenum target, GLenum internalformat, GLenum pname, GLsizei bufSize, GLint *params); +#endif +#endif /* GL_ES_VERSION_3_0 */ + +#ifndef GL_ES_VERSION_3_1 +#define GL_ES_VERSION_3_1 1 +#define GL_COMPUTE_SHADER 0x91B9 +#define GL_MAX_COMPUTE_UNIFORM_BLOCKS 0x91BB +#define GL_MAX_COMPUTE_TEXTURE_IMAGE_UNITS 0x91BC +#define GL_MAX_COMPUTE_IMAGE_UNIFORMS 0x91BD +#define GL_MAX_COMPUTE_SHARED_MEMORY_SIZE 0x8262 +#define GL_MAX_COMPUTE_UNIFORM_COMPONENTS 0x8263 +#define GL_MAX_COMPUTE_ATOMIC_COUNTER_BUFFERS 0x8264 +#define GL_MAX_COMPUTE_ATOMIC_COUNTERS 0x8265 +#define GL_MAX_COMBINED_COMPUTE_UNIFORM_COMPONENTS 0x8266 +#define GL_MAX_COMPUTE_WORK_GROUP_INVOCATIONS 0x90EB +#define GL_MAX_COMPUTE_WORK_GROUP_COUNT 0x91BE +#define GL_MAX_COMPUTE_WORK_GROUP_SIZE 0x91BF +#define GL_COMPUTE_WORK_GROUP_SIZE 0x8267 +#define GL_DISPATCH_INDIRECT_BUFFER 0x90EE +#define GL_DISPATCH_INDIRECT_BUFFER_BINDING 0x90EF +#define GL_COMPUTE_SHADER_BIT 0x00000020 +#define GL_DRAW_INDIRECT_BUFFER 0x8F3F +#define GL_DRAW_INDIRECT_BUFFER_BINDING 0x8F43 +#define GL_MAX_UNIFORM_LOCATIONS 0x826E +#define GL_FRAMEBUFFER_DEFAULT_WIDTH 0x9310 +#define GL_FRAMEBUFFER_DEFAULT_HEIGHT 0x9311 +#define GL_FRAMEBUFFER_DEFAULT_SAMPLES 0x9313 +#define GL_FRAMEBUFFER_DEFAULT_FIXED_SAMPLE_LOCATIONS 0x9314 +#define GL_MAX_FRAMEBUFFER_WIDTH 0x9315 +#define GL_MAX_FRAMEBUFFER_HEIGHT 0x9316 +#define GL_MAX_FRAMEBUFFER_SAMPLES 0x9318 +#define GL_UNIFORM 0x92E1 +#define GL_UNIFORM_BLOCK 0x92E2 +#define GL_PROGRAM_INPUT 0x92E3 +#define GL_PROGRAM_OUTPUT 0x92E4 +#define GL_BUFFER_VARIABLE 0x92E5 +#define GL_SHADER_STORAGE_BLOCK 0x92E6 +#define GL_ATOMIC_COUNTER_BUFFER 0x92C0 +#define GL_TRANSFORM_FEEDBACK_VARYING 0x92F4 +#define GL_ACTIVE_RESOURCES 0x92F5 +#define GL_MAX_NAME_LENGTH 0x92F6 +#define GL_MAX_NUM_ACTIVE_VARIABLES 0x92F7 +#define GL_NAME_LENGTH 0x92F9 +#define GL_TYPE 0x92FA +#define GL_ARRAY_SIZE 0x92FB +#define GL_OFFSET 0x92FC +#define GL_BLOCK_INDEX 0x92FD +#define GL_ARRAY_STRIDE 0x92FE +#define GL_MATRIX_STRIDE 0x92FF +#define GL_IS_ROW_MAJOR 0x9300 +#define GL_ATOMIC_COUNTER_BUFFER_INDEX 0x9301 +#define GL_BUFFER_BINDING 0x9302 +#define GL_BUFFER_DATA_SIZE 0x9303 +#define GL_NUM_ACTIVE_VARIABLES 0x9304 +#define GL_ACTIVE_VARIABLES 0x9305 +#define GL_REFERENCED_BY_VERTEX_SHADER 0x9306 +#define GL_REFERENCED_BY_FRAGMENT_SHADER 0x930A +#define GL_REFERENCED_BY_COMPUTE_SHADER 0x930B +#define GL_TOP_LEVEL_ARRAY_SIZE 0x930C +#define GL_TOP_LEVEL_ARRAY_STRIDE 0x930D +#define GL_LOCATION 0x930E +#define GL_VERTEX_SHADER_BIT 0x00000001 +#define GL_FRAGMENT_SHADER_BIT 0x00000002 +#define GL_ALL_SHADER_BITS 0xFFFFFFFF +#define GL_PROGRAM_SEPARABLE 0x8258 +#define GL_ACTIVE_PROGRAM 0x8259 +#define GL_PROGRAM_PIPELINE_BINDING 0x825A +#define GL_ATOMIC_COUNTER_BUFFER_BINDING 0x92C1 +#define GL_ATOMIC_COUNTER_BUFFER_START 0x92C2 +#define GL_ATOMIC_COUNTER_BUFFER_SIZE 0x92C3 +#define GL_MAX_VERTEX_ATOMIC_COUNTER_BUFFERS 0x92CC +#define GL_MAX_FRAGMENT_ATOMIC_COUNTER_BUFFERS 0x92D0 +#define GL_MAX_COMBINED_ATOMIC_COUNTER_BUFFERS 0x92D1 +#define GL_MAX_VERTEX_ATOMIC_COUNTERS 0x92D2 +#define GL_MAX_FRAGMENT_ATOMIC_COUNTERS 0x92D6 +#define GL_MAX_COMBINED_ATOMIC_COUNTERS 0x92D7 +#define GL_MAX_ATOMIC_COUNTER_BUFFER_SIZE 0x92D8 +#define GL_MAX_ATOMIC_COUNTER_BUFFER_BINDINGS 0x92DC +#define GL_ACTIVE_ATOMIC_COUNTER_BUFFERS 0x92D9 +#define GL_UNSIGNED_INT_ATOMIC_COUNTER 0x92DB +#define GL_MAX_IMAGE_UNITS 0x8F38 +#define GL_MAX_VERTEX_IMAGE_UNIFORMS 0x90CA +#define GL_MAX_FRAGMENT_IMAGE_UNIFORMS 0x90CE +#define GL_MAX_COMBINED_IMAGE_UNIFORMS 0x90CF +#define GL_IMAGE_BINDING_NAME 0x8F3A +#define GL_IMAGE_BINDING_LEVEL 0x8F3B +#define GL_IMAGE_BINDING_LAYERED 0x8F3C +#define GL_IMAGE_BINDING_LAYER 0x8F3D +#define GL_IMAGE_BINDING_ACCESS 0x8F3E +#define GL_IMAGE_BINDING_FORMAT 0x906E +#define GL_VERTEX_ATTRIB_ARRAY_BARRIER_BIT 0x00000001 +#define GL_ELEMENT_ARRAY_BARRIER_BIT 0x00000002 +#define GL_UNIFORM_BARRIER_BIT 0x00000004 +#define GL_TEXTURE_FETCH_BARRIER_BIT 0x00000008 +#define GL_SHADER_IMAGE_ACCESS_BARRIER_BIT 0x00000020 +#define GL_COMMAND_BARRIER_BIT 0x00000040 +#define GL_PIXEL_BUFFER_BARRIER_BIT 0x00000080 +#define GL_TEXTURE_UPDATE_BARRIER_BIT 0x00000100 +#define GL_BUFFER_UPDATE_BARRIER_BIT 0x00000200 +#define GL_FRAMEBUFFER_BARRIER_BIT 0x00000400 +#define GL_TRANSFORM_FEEDBACK_BARRIER_BIT 0x00000800 +#define GL_ATOMIC_COUNTER_BARRIER_BIT 0x00001000 +#define GL_ALL_BARRIER_BITS 0xFFFFFFFF +#define GL_IMAGE_2D 0x904D +#define GL_IMAGE_3D 0x904E +#define GL_IMAGE_CUBE 0x9050 +#define GL_IMAGE_2D_ARRAY 0x9053 +#define GL_INT_IMAGE_2D 0x9058 +#define GL_INT_IMAGE_3D 0x9059 +#define GL_INT_IMAGE_CUBE 0x905B +#define GL_INT_IMAGE_2D_ARRAY 0x905E +#define GL_UNSIGNED_INT_IMAGE_2D 0x9063 +#define GL_UNSIGNED_INT_IMAGE_3D 0x9064 +#define GL_UNSIGNED_INT_IMAGE_CUBE 0x9066 +#define GL_UNSIGNED_INT_IMAGE_2D_ARRAY 0x9069 +#define GL_IMAGE_FORMAT_COMPATIBILITY_TYPE 0x90C7 +#define GL_IMAGE_FORMAT_COMPATIBILITY_BY_SIZE 0x90C8 +#define GL_IMAGE_FORMAT_COMPATIBILITY_BY_CLASS 0x90C9 +#define GL_READ_ONLY 0x88B8 +#define GL_WRITE_ONLY 0x88B9 +#define GL_READ_WRITE 0x88BA +#define GL_SHADER_STORAGE_BUFFER 0x90D2 +#define GL_SHADER_STORAGE_BUFFER_BINDING 0x90D3 +#define GL_SHADER_STORAGE_BUFFER_START 0x90D4 +#define GL_SHADER_STORAGE_BUFFER_SIZE 0x90D5 +#define GL_MAX_VERTEX_SHADER_STORAGE_BLOCKS 0x90D6 +#define GL_MAX_FRAGMENT_SHADER_STORAGE_BLOCKS 0x90DA +#define GL_MAX_COMPUTE_SHADER_STORAGE_BLOCKS 0x90DB +#define GL_MAX_COMBINED_SHADER_STORAGE_BLOCKS 0x90DC +#define GL_MAX_SHADER_STORAGE_BUFFER_BINDINGS 0x90DD +#define GL_MAX_SHADER_STORAGE_BLOCK_SIZE 0x90DE +#define GL_SHADER_STORAGE_BUFFER_OFFSET_ALIGNMENT 0x90DF +#define GL_SHADER_STORAGE_BARRIER_BIT 0x00002000 +#define GL_MAX_COMBINED_SHADER_OUTPUT_RESOURCES 0x8F39 +#define GL_DEPTH_STENCIL_TEXTURE_MODE 0x90EA +#define GL_STENCIL_INDEX 0x1901 +#define GL_MIN_PROGRAM_TEXTURE_GATHER_OFFSET 0x8E5E +#define GL_MAX_PROGRAM_TEXTURE_GATHER_OFFSET 0x8E5F +#define GL_SAMPLE_POSITION 0x8E50 +#define GL_SAMPLE_MASK 0x8E51 +#define GL_SAMPLE_MASK_VALUE 0x8E52 +#define GL_TEXTURE_2D_MULTISAMPLE 0x9100 +#define GL_MAX_SAMPLE_MASK_WORDS 0x8E59 +#define GL_MAX_COLOR_TEXTURE_SAMPLES 0x910E +#define GL_MAX_DEPTH_TEXTURE_SAMPLES 0x910F +#define GL_MAX_INTEGER_SAMPLES 0x9110 +#define GL_TEXTURE_BINDING_2D_MULTISAMPLE 0x9104 +#define GL_TEXTURE_SAMPLES 0x9106 +#define GL_TEXTURE_FIXED_SAMPLE_LOCATIONS 0x9107 +#define GL_TEXTURE_WIDTH 0x1000 +#define GL_TEXTURE_HEIGHT 0x1001 +#define GL_TEXTURE_DEPTH 0x8071 +#define GL_TEXTURE_INTERNAL_FORMAT 0x1003 +#define GL_TEXTURE_RED_SIZE 0x805C +#define GL_TEXTURE_GREEN_SIZE 0x805D +#define GL_TEXTURE_BLUE_SIZE 0x805E +#define GL_TEXTURE_ALPHA_SIZE 0x805F +#define GL_TEXTURE_DEPTH_SIZE 0x884A +#define GL_TEXTURE_STENCIL_SIZE 0x88F1 +#define GL_TEXTURE_SHARED_SIZE 0x8C3F +#define GL_TEXTURE_RED_TYPE 0x8C10 +#define GL_TEXTURE_GREEN_TYPE 0x8C11 +#define GL_TEXTURE_BLUE_TYPE 0x8C12 +#define GL_TEXTURE_ALPHA_TYPE 0x8C13 +#define GL_TEXTURE_DEPTH_TYPE 0x8C16 +#define GL_TEXTURE_COMPRESSED 0x86A1 +#define GL_SAMPLER_2D_MULTISAMPLE 0x9108 +#define GL_INT_SAMPLER_2D_MULTISAMPLE 0x9109 +#define GL_UNSIGNED_INT_SAMPLER_2D_MULTISAMPLE 0x910A +#define GL_VERTEX_ATTRIB_BINDING 0x82D4 +#define GL_VERTEX_ATTRIB_RELATIVE_OFFSET 0x82D5 +#define GL_VERTEX_BINDING_DIVISOR 0x82D6 +#define GL_VERTEX_BINDING_OFFSET 0x82D7 +#define GL_VERTEX_BINDING_STRIDE 0x82D8 +#define GL_VERTEX_BINDING_BUFFER 0x8F4F +#define GL_MAX_VERTEX_ATTRIB_RELATIVE_OFFSET 0x82D9 +#define GL_MAX_VERTEX_ATTRIB_BINDINGS 0x82DA +#define GL_MAX_VERTEX_ATTRIB_STRIDE 0x82E5 +typedef void (GL_APIENTRYP PFNGLDISPATCHCOMPUTEPROC) (GLuint num_groups_x, GLuint num_groups_y, GLuint num_groups_z); +typedef void (GL_APIENTRYP PFNGLDISPATCHCOMPUTEINDIRECTPROC) (GLintptr indirect); +typedef void (GL_APIENTRYP PFNGLDRAWARRAYSINDIRECTPROC) (GLenum mode, const void *indirect); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSINDIRECTPROC) (GLenum mode, GLenum type, const void *indirect); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERPARAMETERIPROC) (GLenum target, GLenum pname, GLint param); +typedef void (GL_APIENTRYP PFNGLGETFRAMEBUFFERPARAMETERIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMINTERFACEIVPROC) (GLuint program, GLenum programInterface, GLenum pname, GLint *params); +typedef GLuint (GL_APIENTRYP PFNGLGETPROGRAMRESOURCEINDEXPROC) (GLuint program, GLenum programInterface, const GLchar *name); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMRESOURCENAMEPROC) (GLuint program, GLenum programInterface, GLuint index, GLsizei bufSize, GLsizei *length, GLchar *name); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMRESOURCEIVPROC) (GLuint program, GLenum programInterface, GLuint index, GLsizei propCount, const GLenum *props, GLsizei bufSize, GLsizei *length, GLint *params); +typedef GLint (GL_APIENTRYP PFNGLGETPROGRAMRESOURCELOCATIONPROC) (GLuint program, GLenum programInterface, const GLchar *name); +typedef void (GL_APIENTRYP PFNGLUSEPROGRAMSTAGESPROC) (GLuint pipeline, GLbitfield stages, GLuint program); +typedef void (GL_APIENTRYP PFNGLACTIVESHADERPROGRAMPROC) (GLuint pipeline, GLuint program); +typedef GLuint (GL_APIENTRYP PFNGLCREATESHADERPROGRAMVPROC) (GLenum type, GLsizei count, const GLchar *const*strings); +typedef void (GL_APIENTRYP PFNGLBINDPROGRAMPIPELINEPROC) (GLuint pipeline); +typedef void (GL_APIENTRYP PFNGLDELETEPROGRAMPIPELINESPROC) (GLsizei n, const GLuint *pipelines); +typedef void (GL_APIENTRYP PFNGLGENPROGRAMPIPELINESPROC) (GLsizei n, GLuint *pipelines); +typedef GLboolean (GL_APIENTRYP PFNGLISPROGRAMPIPELINEPROC) (GLuint pipeline); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMPIPELINEIVPROC) (GLuint pipeline, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1IPROC) (GLuint program, GLint location, GLint v0); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2IPROC) (GLuint program, GLint location, GLint v0, GLint v1); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3IPROC) (GLuint program, GLint location, GLint v0, GLint v1, GLint v2); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4IPROC) (GLuint program, GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1UIPROC) (GLuint program, GLint location, GLuint v0); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2UIPROC) (GLuint program, GLint location, GLuint v0, GLuint v1); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3UIPROC) (GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4UIPROC) (GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1FPROC) (GLuint program, GLint location, GLfloat v0); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2FPROC) (GLuint program, GLint location, GLfloat v0, GLfloat v1); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3FPROC) (GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4FPROC) (GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1IVPROC) (GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2IVPROC) (GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3IVPROC) (GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4IVPROC) (GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1UIVPROC) (GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2UIVPROC) (GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3UIVPROC) (GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4UIVPROC) (GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1FVPROC) (GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2FVPROC) (GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3FVPROC) (GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4FVPROC) (GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2FVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3FVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4FVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2X3FVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3X2FVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2X4FVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4X2FVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3X4FVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4X3FVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLVALIDATEPROGRAMPIPELINEPROC) (GLuint pipeline); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMPIPELINEINFOLOGPROC) (GLuint pipeline, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +typedef void (GL_APIENTRYP PFNGLBINDIMAGETEXTUREPROC) (GLuint unit, GLuint texture, GLint level, GLboolean layered, GLint layer, GLenum access, GLenum format); +typedef void (GL_APIENTRYP PFNGLGETBOOLEANI_VPROC) (GLenum target, GLuint index, GLboolean *data); +typedef void (GL_APIENTRYP PFNGLMEMORYBARRIERPROC) (GLbitfield barriers); +typedef void (GL_APIENTRYP PFNGLMEMORYBARRIERBYREGIONPROC) (GLbitfield barriers); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGE2DMULTISAMPLEPROC) (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLboolean fixedsamplelocations); +typedef void (GL_APIENTRYP PFNGLGETMULTISAMPLEFVPROC) (GLenum pname, GLuint index, GLfloat *val); +typedef void (GL_APIENTRYP PFNGLSAMPLEMASKIPROC) (GLuint maskNumber, GLbitfield mask); +typedef void (GL_APIENTRYP PFNGLGETTEXLEVELPARAMETERIVPROC) (GLenum target, GLint level, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETTEXLEVELPARAMETERFVPROC) (GLenum target, GLint level, GLenum pname, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLBINDVERTEXBUFFERPROC) (GLuint bindingindex, GLuint buffer, GLintptr offset, GLsizei stride); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBFORMATPROC) (GLuint attribindex, GLint size, GLenum type, GLboolean normalized, GLuint relativeoffset); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBIFORMATPROC) (GLuint attribindex, GLint size, GLenum type, GLuint relativeoffset); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBBINDINGPROC) (GLuint attribindex, GLuint bindingindex); +typedef void (GL_APIENTRYP PFNGLVERTEXBINDINGDIVISORPROC) (GLuint bindingindex, GLuint divisor); +#if GL_GLES_PROTOTYPES +GL_APICALL void GL_APIENTRY glDispatchCompute (GLuint num_groups_x, GLuint num_groups_y, GLuint num_groups_z); +GL_APICALL void GL_APIENTRY glDispatchComputeIndirect (GLintptr indirect); +GL_APICALL void GL_APIENTRY glDrawArraysIndirect (GLenum mode, const void *indirect); +GL_APICALL void GL_APIENTRY glDrawElementsIndirect (GLenum mode, GLenum type, const void *indirect); +GL_APICALL void GL_APIENTRY glFramebufferParameteri (GLenum target, GLenum pname, GLint param); +GL_APICALL void GL_APIENTRY glGetFramebufferParameteriv (GLenum target, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetProgramInterfaceiv (GLuint program, GLenum programInterface, GLenum pname, GLint *params); +GL_APICALL GLuint GL_APIENTRY glGetProgramResourceIndex (GLuint program, GLenum programInterface, const GLchar *name); +GL_APICALL void GL_APIENTRY glGetProgramResourceName (GLuint program, GLenum programInterface, GLuint index, GLsizei bufSize, GLsizei *length, GLchar *name); +GL_APICALL void GL_APIENTRY glGetProgramResourceiv (GLuint program, GLenum programInterface, GLuint index, GLsizei propCount, const GLenum *props, GLsizei bufSize, GLsizei *length, GLint *params); +GL_APICALL GLint GL_APIENTRY glGetProgramResourceLocation (GLuint program, GLenum programInterface, const GLchar *name); +GL_APICALL void GL_APIENTRY glUseProgramStages (GLuint pipeline, GLbitfield stages, GLuint program); +GL_APICALL void GL_APIENTRY glActiveShaderProgram (GLuint pipeline, GLuint program); +GL_APICALL GLuint GL_APIENTRY glCreateShaderProgramv (GLenum type, GLsizei count, const GLchar *const*strings); +GL_APICALL void GL_APIENTRY glBindProgramPipeline (GLuint pipeline); +GL_APICALL void GL_APIENTRY glDeleteProgramPipelines (GLsizei n, const GLuint *pipelines); +GL_APICALL void GL_APIENTRY glGenProgramPipelines (GLsizei n, GLuint *pipelines); +GL_APICALL GLboolean GL_APIENTRY glIsProgramPipeline (GLuint pipeline); +GL_APICALL void GL_APIENTRY glGetProgramPipelineiv (GLuint pipeline, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glProgramUniform1i (GLuint program, GLint location, GLint v0); +GL_APICALL void GL_APIENTRY glProgramUniform2i (GLuint program, GLint location, GLint v0, GLint v1); +GL_APICALL void GL_APIENTRY glProgramUniform3i (GLuint program, GLint location, GLint v0, GLint v1, GLint v2); +GL_APICALL void GL_APIENTRY glProgramUniform4i (GLuint program, GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +GL_APICALL void GL_APIENTRY glProgramUniform1ui (GLuint program, GLint location, GLuint v0); +GL_APICALL void GL_APIENTRY glProgramUniform2ui (GLuint program, GLint location, GLuint v0, GLuint v1); +GL_APICALL void GL_APIENTRY glProgramUniform3ui (GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2); +GL_APICALL void GL_APIENTRY glProgramUniform4ui (GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3); +GL_APICALL void GL_APIENTRY glProgramUniform1f (GLuint program, GLint location, GLfloat v0); +GL_APICALL void GL_APIENTRY glProgramUniform2f (GLuint program, GLint location, GLfloat v0, GLfloat v1); +GL_APICALL void GL_APIENTRY glProgramUniform3f (GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +GL_APICALL void GL_APIENTRY glProgramUniform4f (GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +GL_APICALL void GL_APIENTRY glProgramUniform1iv (GLuint program, GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glProgramUniform2iv (GLuint program, GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glProgramUniform3iv (GLuint program, GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glProgramUniform4iv (GLuint program, GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glProgramUniform1uiv (GLuint program, GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glProgramUniform2uiv (GLuint program, GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glProgramUniform3uiv (GLuint program, GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glProgramUniform4uiv (GLuint program, GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glProgramUniform1fv (GLuint program, GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniform2fv (GLuint program, GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniform3fv (GLuint program, GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniform4fv (GLuint program, GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix2fv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix3fv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix4fv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix2x3fv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix3x2fv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix2x4fv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix4x2fv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix3x4fv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix4x3fv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glValidateProgramPipeline (GLuint pipeline); +GL_APICALL void GL_APIENTRY glGetProgramPipelineInfoLog (GLuint pipeline, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +GL_APICALL void GL_APIENTRY glBindImageTexture (GLuint unit, GLuint texture, GLint level, GLboolean layered, GLint layer, GLenum access, GLenum format); +GL_APICALL void GL_APIENTRY glGetBooleani_v (GLenum target, GLuint index, GLboolean *data); +GL_APICALL void GL_APIENTRY glMemoryBarrier (GLbitfield barriers); +GL_APICALL void GL_APIENTRY glMemoryBarrierByRegion (GLbitfield barriers); +GL_APICALL void GL_APIENTRY glTexStorage2DMultisample (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLboolean fixedsamplelocations); +GL_APICALL void GL_APIENTRY glGetMultisamplefv (GLenum pname, GLuint index, GLfloat *val); +GL_APICALL void GL_APIENTRY glSampleMaski (GLuint maskNumber, GLbitfield mask); +GL_APICALL void GL_APIENTRY glGetTexLevelParameteriv (GLenum target, GLint level, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetTexLevelParameterfv (GLenum target, GLint level, GLenum pname, GLfloat *params); +GL_APICALL void GL_APIENTRY glBindVertexBuffer (GLuint bindingindex, GLuint buffer, GLintptr offset, GLsizei stride); +GL_APICALL void GL_APIENTRY glVertexAttribFormat (GLuint attribindex, GLint size, GLenum type, GLboolean normalized, GLuint relativeoffset); +GL_APICALL void GL_APIENTRY glVertexAttribIFormat (GLuint attribindex, GLint size, GLenum type, GLuint relativeoffset); +GL_APICALL void GL_APIENTRY glVertexAttribBinding (GLuint attribindex, GLuint bindingindex); +GL_APICALL void GL_APIENTRY glVertexBindingDivisor (GLuint bindingindex, GLuint divisor); +#endif +#endif /* GL_ES_VERSION_3_1 */ + +#ifndef GL_ES_VERSION_3_2 +#define GL_ES_VERSION_3_2 1 +typedef void (GL_APIENTRY *GLDEBUGPROC)(GLenum source,GLenum type,GLuint id,GLenum severity,GLsizei length,const GLchar *message,const void *userParam); +#define GL_MULTISAMPLE_LINE_WIDTH_RANGE 0x9381 +#define GL_MULTISAMPLE_LINE_WIDTH_GRANULARITY 0x9382 +#define GL_MULTIPLY 0x9294 +#define GL_SCREEN 0x9295 +#define GL_OVERLAY 0x9296 +#define GL_DARKEN 0x9297 +#define GL_LIGHTEN 0x9298 +#define GL_COLORDODGE 0x9299 +#define GL_COLORBURN 0x929A +#define GL_HARDLIGHT 0x929B +#define GL_SOFTLIGHT 0x929C +#define GL_DIFFERENCE 0x929E +#define GL_EXCLUSION 0x92A0 +#define GL_HSL_HUE 0x92AD +#define GL_HSL_SATURATION 0x92AE +#define GL_HSL_COLOR 0x92AF +#define GL_HSL_LUMINOSITY 0x92B0 +#define GL_DEBUG_OUTPUT_SYNCHRONOUS 0x8242 +#define GL_DEBUG_NEXT_LOGGED_MESSAGE_LENGTH 0x8243 +#define GL_DEBUG_CALLBACK_FUNCTION 0x8244 +#define GL_DEBUG_CALLBACK_USER_PARAM 0x8245 +#define GL_DEBUG_SOURCE_API 0x8246 +#define GL_DEBUG_SOURCE_WINDOW_SYSTEM 0x8247 +#define GL_DEBUG_SOURCE_SHADER_COMPILER 0x8248 +#define GL_DEBUG_SOURCE_THIRD_PARTY 0x8249 +#define GL_DEBUG_SOURCE_APPLICATION 0x824A +#define GL_DEBUG_SOURCE_OTHER 0x824B +#define GL_DEBUG_TYPE_ERROR 0x824C +#define GL_DEBUG_TYPE_DEPRECATED_BEHAVIOR 0x824D +#define GL_DEBUG_TYPE_UNDEFINED_BEHAVIOR 0x824E +#define GL_DEBUG_TYPE_PORTABILITY 0x824F +#define GL_DEBUG_TYPE_PERFORMANCE 0x8250 +#define GL_DEBUG_TYPE_OTHER 0x8251 +#define GL_DEBUG_TYPE_MARKER 0x8268 +#define GL_DEBUG_TYPE_PUSH_GROUP 0x8269 +#define GL_DEBUG_TYPE_POP_GROUP 0x826A +#define GL_DEBUG_SEVERITY_NOTIFICATION 0x826B +#define GL_MAX_DEBUG_GROUP_STACK_DEPTH 0x826C +#define GL_DEBUG_GROUP_STACK_DEPTH 0x826D +#define GL_BUFFER 0x82E0 +#define GL_SHADER 0x82E1 +#define GL_PROGRAM 0x82E2 +#define GL_VERTEX_ARRAY 0x8074 +#define GL_QUERY 0x82E3 +#define GL_PROGRAM_PIPELINE 0x82E4 +#define GL_SAMPLER 0x82E6 +#define GL_MAX_LABEL_LENGTH 0x82E8 +#define GL_MAX_DEBUG_MESSAGE_LENGTH 0x9143 +#define GL_MAX_DEBUG_LOGGED_MESSAGES 0x9144 +#define GL_DEBUG_LOGGED_MESSAGES 0x9145 +#define GL_DEBUG_SEVERITY_HIGH 0x9146 +#define GL_DEBUG_SEVERITY_MEDIUM 0x9147 +#define GL_DEBUG_SEVERITY_LOW 0x9148 +#define GL_DEBUG_OUTPUT 0x92E0 +#define GL_CONTEXT_FLAG_DEBUG_BIT 0x00000002 +#define GL_STACK_OVERFLOW 0x0503 +#define GL_STACK_UNDERFLOW 0x0504 +#define GL_GEOMETRY_SHADER 0x8DD9 +#define GL_GEOMETRY_SHADER_BIT 0x00000004 +#define GL_GEOMETRY_VERTICES_OUT 0x8916 +#define GL_GEOMETRY_INPUT_TYPE 0x8917 +#define GL_GEOMETRY_OUTPUT_TYPE 0x8918 +#define GL_GEOMETRY_SHADER_INVOCATIONS 0x887F +#define GL_LAYER_PROVOKING_VERTEX 0x825E +#define GL_LINES_ADJACENCY 0x000A +#define GL_LINE_STRIP_ADJACENCY 0x000B +#define GL_TRIANGLES_ADJACENCY 0x000C +#define GL_TRIANGLE_STRIP_ADJACENCY 0x000D +#define GL_MAX_GEOMETRY_UNIFORM_COMPONENTS 0x8DDF +#define GL_MAX_GEOMETRY_UNIFORM_BLOCKS 0x8A2C +#define GL_MAX_COMBINED_GEOMETRY_UNIFORM_COMPONENTS 0x8A32 +#define GL_MAX_GEOMETRY_INPUT_COMPONENTS 0x9123 +#define GL_MAX_GEOMETRY_OUTPUT_COMPONENTS 0x9124 +#define GL_MAX_GEOMETRY_OUTPUT_VERTICES 0x8DE0 +#define GL_MAX_GEOMETRY_TOTAL_OUTPUT_COMPONENTS 0x8DE1 +#define GL_MAX_GEOMETRY_SHADER_INVOCATIONS 0x8E5A +#define GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS 0x8C29 +#define GL_MAX_GEOMETRY_ATOMIC_COUNTER_BUFFERS 0x92CF +#define GL_MAX_GEOMETRY_ATOMIC_COUNTERS 0x92D5 +#define GL_MAX_GEOMETRY_IMAGE_UNIFORMS 0x90CD +#define GL_MAX_GEOMETRY_SHADER_STORAGE_BLOCKS 0x90D7 +#define GL_FIRST_VERTEX_CONVENTION 0x8E4D +#define GL_LAST_VERTEX_CONVENTION 0x8E4E +#define GL_UNDEFINED_VERTEX 0x8260 +#define GL_PRIMITIVES_GENERATED 0x8C87 +#define GL_FRAMEBUFFER_DEFAULT_LAYERS 0x9312 +#define GL_MAX_FRAMEBUFFER_LAYERS 0x9317 +#define GL_FRAMEBUFFER_INCOMPLETE_LAYER_TARGETS 0x8DA8 +#define GL_FRAMEBUFFER_ATTACHMENT_LAYERED 0x8DA7 +#define GL_REFERENCED_BY_GEOMETRY_SHADER 0x9309 +#define GL_PRIMITIVE_BOUNDING_BOX 0x92BE +#define GL_CONTEXT_FLAG_ROBUST_ACCESS_BIT 0x00000004 +#define GL_CONTEXT_FLAGS 0x821E +#define GL_LOSE_CONTEXT_ON_RESET 0x8252 +#define GL_GUILTY_CONTEXT_RESET 0x8253 +#define GL_INNOCENT_CONTEXT_RESET 0x8254 +#define GL_UNKNOWN_CONTEXT_RESET 0x8255 +#define GL_RESET_NOTIFICATION_STRATEGY 0x8256 +#define GL_NO_RESET_NOTIFICATION 0x8261 +#define GL_CONTEXT_LOST 0x0507 +#define GL_SAMPLE_SHADING 0x8C36 +#define GL_MIN_SAMPLE_SHADING_VALUE 0x8C37 +#define GL_MIN_FRAGMENT_INTERPOLATION_OFFSET 0x8E5B +#define GL_MAX_FRAGMENT_INTERPOLATION_OFFSET 0x8E5C +#define GL_FRAGMENT_INTERPOLATION_OFFSET_BITS 0x8E5D +#define GL_PATCHES 0x000E +#define GL_PATCH_VERTICES 0x8E72 +#define GL_TESS_CONTROL_OUTPUT_VERTICES 0x8E75 +#define GL_TESS_GEN_MODE 0x8E76 +#define GL_TESS_GEN_SPACING 0x8E77 +#define GL_TESS_GEN_VERTEX_ORDER 0x8E78 +#define GL_TESS_GEN_POINT_MODE 0x8E79 +#define GL_ISOLINES 0x8E7A +#define GL_QUADS 0x0007 +#define GL_FRACTIONAL_ODD 0x8E7B +#define GL_FRACTIONAL_EVEN 0x8E7C +#define GL_MAX_PATCH_VERTICES 0x8E7D +#define GL_MAX_TESS_GEN_LEVEL 0x8E7E +#define GL_MAX_TESS_CONTROL_UNIFORM_COMPONENTS 0x8E7F +#define GL_MAX_TESS_EVALUATION_UNIFORM_COMPONENTS 0x8E80 +#define GL_MAX_TESS_CONTROL_TEXTURE_IMAGE_UNITS 0x8E81 +#define GL_MAX_TESS_EVALUATION_TEXTURE_IMAGE_UNITS 0x8E82 +#define GL_MAX_TESS_CONTROL_OUTPUT_COMPONENTS 0x8E83 +#define GL_MAX_TESS_PATCH_COMPONENTS 0x8E84 +#define GL_MAX_TESS_CONTROL_TOTAL_OUTPUT_COMPONENTS 0x8E85 +#define GL_MAX_TESS_EVALUATION_OUTPUT_COMPONENTS 0x8E86 +#define GL_MAX_TESS_CONTROL_UNIFORM_BLOCKS 0x8E89 +#define GL_MAX_TESS_EVALUATION_UNIFORM_BLOCKS 0x8E8A +#define GL_MAX_TESS_CONTROL_INPUT_COMPONENTS 0x886C +#define GL_MAX_TESS_EVALUATION_INPUT_COMPONENTS 0x886D +#define GL_MAX_COMBINED_TESS_CONTROL_UNIFORM_COMPONENTS 0x8E1E +#define GL_MAX_COMBINED_TESS_EVALUATION_UNIFORM_COMPONENTS 0x8E1F +#define GL_MAX_TESS_CONTROL_ATOMIC_COUNTER_BUFFERS 0x92CD +#define GL_MAX_TESS_EVALUATION_ATOMIC_COUNTER_BUFFERS 0x92CE +#define GL_MAX_TESS_CONTROL_ATOMIC_COUNTERS 0x92D3 +#define GL_MAX_TESS_EVALUATION_ATOMIC_COUNTERS 0x92D4 +#define GL_MAX_TESS_CONTROL_IMAGE_UNIFORMS 0x90CB +#define GL_MAX_TESS_EVALUATION_IMAGE_UNIFORMS 0x90CC +#define GL_MAX_TESS_CONTROL_SHADER_STORAGE_BLOCKS 0x90D8 +#define GL_MAX_TESS_EVALUATION_SHADER_STORAGE_BLOCKS 0x90D9 +#define GL_PRIMITIVE_RESTART_FOR_PATCHES_SUPPORTED 0x8221 +#define GL_IS_PER_PATCH 0x92E7 +#define GL_REFERENCED_BY_TESS_CONTROL_SHADER 0x9307 +#define GL_REFERENCED_BY_TESS_EVALUATION_SHADER 0x9308 +#define GL_TESS_CONTROL_SHADER 0x8E88 +#define GL_TESS_EVALUATION_SHADER 0x8E87 +#define GL_TESS_CONTROL_SHADER_BIT 0x00000008 +#define GL_TESS_EVALUATION_SHADER_BIT 0x00000010 +#define GL_TEXTURE_BORDER_COLOR 0x1004 +#define GL_CLAMP_TO_BORDER 0x812D +#define GL_TEXTURE_BUFFER 0x8C2A +#define GL_TEXTURE_BUFFER_BINDING 0x8C2A +#define GL_MAX_TEXTURE_BUFFER_SIZE 0x8C2B +#define GL_TEXTURE_BINDING_BUFFER 0x8C2C +#define GL_TEXTURE_BUFFER_DATA_STORE_BINDING 0x8C2D +#define GL_TEXTURE_BUFFER_OFFSET_ALIGNMENT 0x919F +#define GL_SAMPLER_BUFFER 0x8DC2 +#define GL_INT_SAMPLER_BUFFER 0x8DD0 +#define GL_UNSIGNED_INT_SAMPLER_BUFFER 0x8DD8 +#define GL_IMAGE_BUFFER 0x9051 +#define GL_INT_IMAGE_BUFFER 0x905C +#define GL_UNSIGNED_INT_IMAGE_BUFFER 0x9067 +#define GL_TEXTURE_BUFFER_OFFSET 0x919D +#define GL_TEXTURE_BUFFER_SIZE 0x919E +#define GL_COMPRESSED_RGBA_ASTC_4x4 0x93B0 +#define GL_COMPRESSED_RGBA_ASTC_5x4 0x93B1 +#define GL_COMPRESSED_RGBA_ASTC_5x5 0x93B2 +#define GL_COMPRESSED_RGBA_ASTC_6x5 0x93B3 +#define GL_COMPRESSED_RGBA_ASTC_6x6 0x93B4 +#define GL_COMPRESSED_RGBA_ASTC_8x5 0x93B5 +#define GL_COMPRESSED_RGBA_ASTC_8x6 0x93B6 +#define GL_COMPRESSED_RGBA_ASTC_8x8 0x93B7 +#define GL_COMPRESSED_RGBA_ASTC_10x5 0x93B8 +#define GL_COMPRESSED_RGBA_ASTC_10x6 0x93B9 +#define GL_COMPRESSED_RGBA_ASTC_10x8 0x93BA +#define GL_COMPRESSED_RGBA_ASTC_10x10 0x93BB +#define GL_COMPRESSED_RGBA_ASTC_12x10 0x93BC +#define GL_COMPRESSED_RGBA_ASTC_12x12 0x93BD +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_4x4 0x93D0 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x4 0x93D1 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x5 0x93D2 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x5 0x93D3 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x6 0x93D4 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x5 0x93D5 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x6 0x93D6 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x8 0x93D7 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x5 0x93D8 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x6 0x93D9 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x8 0x93DA +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x10 0x93DB +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_12x10 0x93DC +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_12x12 0x93DD +#define GL_TEXTURE_CUBE_MAP_ARRAY 0x9009 +#define GL_TEXTURE_BINDING_CUBE_MAP_ARRAY 0x900A +#define GL_SAMPLER_CUBE_MAP_ARRAY 0x900C +#define GL_SAMPLER_CUBE_MAP_ARRAY_SHADOW 0x900D +#define GL_INT_SAMPLER_CUBE_MAP_ARRAY 0x900E +#define GL_UNSIGNED_INT_SAMPLER_CUBE_MAP_ARRAY 0x900F +#define GL_IMAGE_CUBE_MAP_ARRAY 0x9054 +#define GL_INT_IMAGE_CUBE_MAP_ARRAY 0x905F +#define GL_UNSIGNED_INT_IMAGE_CUBE_MAP_ARRAY 0x906A +#define GL_TEXTURE_2D_MULTISAMPLE_ARRAY 0x9102 +#define GL_TEXTURE_BINDING_2D_MULTISAMPLE_ARRAY 0x9105 +#define GL_SAMPLER_2D_MULTISAMPLE_ARRAY 0x910B +#define GL_INT_SAMPLER_2D_MULTISAMPLE_ARRAY 0x910C +#define GL_UNSIGNED_INT_SAMPLER_2D_MULTISAMPLE_ARRAY 0x910D +typedef void (GL_APIENTRYP PFNGLBLENDBARRIERPROC) (void); +typedef void (GL_APIENTRYP PFNGLCOPYIMAGESUBDATAPROC) (GLuint srcName, GLenum srcTarget, GLint srcLevel, GLint srcX, GLint srcY, GLint srcZ, GLuint dstName, GLenum dstTarget, GLint dstLevel, GLint dstX, GLint dstY, GLint dstZ, GLsizei srcWidth, GLsizei srcHeight, GLsizei srcDepth); +typedef void (GL_APIENTRYP PFNGLDEBUGMESSAGECONTROLPROC) (GLenum source, GLenum type, GLenum severity, GLsizei count, const GLuint *ids, GLboolean enabled); +typedef void (GL_APIENTRYP PFNGLDEBUGMESSAGEINSERTPROC) (GLenum source, GLenum type, GLuint id, GLenum severity, GLsizei length, const GLchar *buf); +typedef void (GL_APIENTRYP PFNGLDEBUGMESSAGECALLBACKPROC) (GLDEBUGPROC callback, const void *userParam); +typedef GLuint (GL_APIENTRYP PFNGLGETDEBUGMESSAGELOGPROC) (GLuint count, GLsizei bufSize, GLenum *sources, GLenum *types, GLuint *ids, GLenum *severities, GLsizei *lengths, GLchar *messageLog); +typedef void (GL_APIENTRYP PFNGLPUSHDEBUGGROUPPROC) (GLenum source, GLuint id, GLsizei length, const GLchar *message); +typedef void (GL_APIENTRYP PFNGLPOPDEBUGGROUPPROC) (void); +typedef void (GL_APIENTRYP PFNGLOBJECTLABELPROC) (GLenum identifier, GLuint name, GLsizei length, const GLchar *label); +typedef void (GL_APIENTRYP PFNGLGETOBJECTLABELPROC) (GLenum identifier, GLuint name, GLsizei bufSize, GLsizei *length, GLchar *label); +typedef void (GL_APIENTRYP PFNGLOBJECTPTRLABELPROC) (const void *ptr, GLsizei length, const GLchar *label); +typedef void (GL_APIENTRYP PFNGLGETOBJECTPTRLABELPROC) (const void *ptr, GLsizei bufSize, GLsizei *length, GLchar *label); +typedef void (GL_APIENTRYP PFNGLGETPOINTERVPROC) (GLenum pname, void **params); +typedef void (GL_APIENTRYP PFNGLENABLEIPROC) (GLenum target, GLuint index); +typedef void (GL_APIENTRYP PFNGLDISABLEIPROC) (GLenum target, GLuint index); +typedef void (GL_APIENTRYP PFNGLBLENDEQUATIONIPROC) (GLuint buf, GLenum mode); +typedef void (GL_APIENTRYP PFNGLBLENDEQUATIONSEPARATEIPROC) (GLuint buf, GLenum modeRGB, GLenum modeAlpha); +typedef void (GL_APIENTRYP PFNGLBLENDFUNCIPROC) (GLuint buf, GLenum src, GLenum dst); +typedef void (GL_APIENTRYP PFNGLBLENDFUNCSEPARATEIPROC) (GLuint buf, GLenum srcRGB, GLenum dstRGB, GLenum srcAlpha, GLenum dstAlpha); +typedef void (GL_APIENTRYP PFNGLCOLORMASKIPROC) (GLuint index, GLboolean r, GLboolean g, GLboolean b, GLboolean a); +typedef GLboolean (GL_APIENTRYP PFNGLISENABLEDIPROC) (GLenum target, GLuint index); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSBASEVERTEXPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLint basevertex); +typedef void (GL_APIENTRYP PFNGLDRAWRANGEELEMENTSBASEVERTEXPROC) (GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices, GLint basevertex); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSINSTANCEDBASEVERTEXPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLint basevertex); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTUREPROC) (GLenum target, GLenum attachment, GLuint texture, GLint level); +typedef void (GL_APIENTRYP PFNGLPRIMITIVEBOUNDINGBOXPROC) (GLfloat minX, GLfloat minY, GLfloat minZ, GLfloat minW, GLfloat maxX, GLfloat maxY, GLfloat maxZ, GLfloat maxW); +typedef GLenum (GL_APIENTRYP PFNGLGETGRAPHICSRESETSTATUSPROC) (void); +typedef void (GL_APIENTRYP PFNGLREADNPIXELSPROC) (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, void *data); +typedef void (GL_APIENTRYP PFNGLGETNUNIFORMFVPROC) (GLuint program, GLint location, GLsizei bufSize, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETNUNIFORMIVPROC) (GLuint program, GLint location, GLsizei bufSize, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETNUNIFORMUIVPROC) (GLuint program, GLint location, GLsizei bufSize, GLuint *params); +typedef void (GL_APIENTRYP PFNGLMINSAMPLESHADINGPROC) (GLfloat value); +typedef void (GL_APIENTRYP PFNGLPATCHPARAMETERIPROC) (GLenum pname, GLint value); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERIIVPROC) (GLenum target, GLenum pname, const GLint *params); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERIUIVPROC) (GLenum target, GLenum pname, const GLuint *params); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERIIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERIUIVPROC) (GLenum target, GLenum pname, GLuint *params); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERIIVPROC) (GLuint sampler, GLenum pname, const GLint *param); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERIUIVPROC) (GLuint sampler, GLenum pname, const GLuint *param); +typedef void (GL_APIENTRYP PFNGLGETSAMPLERPARAMETERIIVPROC) (GLuint sampler, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETSAMPLERPARAMETERIUIVPROC) (GLuint sampler, GLenum pname, GLuint *params); +typedef void (GL_APIENTRYP PFNGLTEXBUFFERPROC) (GLenum target, GLenum internalformat, GLuint buffer); +typedef void (GL_APIENTRYP PFNGLTEXBUFFERRANGEPROC) (GLenum target, GLenum internalformat, GLuint buffer, GLintptr offset, GLsizeiptr size); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGE3DMULTISAMPLEPROC) (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedsamplelocations); +#if GL_GLES_PROTOTYPES +GL_APICALL void GL_APIENTRY glBlendBarrier (void); +GL_APICALL void GL_APIENTRY glCopyImageSubData (GLuint srcName, GLenum srcTarget, GLint srcLevel, GLint srcX, GLint srcY, GLint srcZ, GLuint dstName, GLenum dstTarget, GLint dstLevel, GLint dstX, GLint dstY, GLint dstZ, GLsizei srcWidth, GLsizei srcHeight, GLsizei srcDepth); +GL_APICALL void GL_APIENTRY glDebugMessageControl (GLenum source, GLenum type, GLenum severity, GLsizei count, const GLuint *ids, GLboolean enabled); +GL_APICALL void GL_APIENTRY glDebugMessageInsert (GLenum source, GLenum type, GLuint id, GLenum severity, GLsizei length, const GLchar *buf); +GL_APICALL void GL_APIENTRY glDebugMessageCallback (GLDEBUGPROC callback, const void *userParam); +GL_APICALL GLuint GL_APIENTRY glGetDebugMessageLog (GLuint count, GLsizei bufSize, GLenum *sources, GLenum *types, GLuint *ids, GLenum *severities, GLsizei *lengths, GLchar *messageLog); +GL_APICALL void GL_APIENTRY glPushDebugGroup (GLenum source, GLuint id, GLsizei length, const GLchar *message); +GL_APICALL void GL_APIENTRY glPopDebugGroup (void); +GL_APICALL void GL_APIENTRY glObjectLabel (GLenum identifier, GLuint name, GLsizei length, const GLchar *label); +GL_APICALL void GL_APIENTRY glGetObjectLabel (GLenum identifier, GLuint name, GLsizei bufSize, GLsizei *length, GLchar *label); +GL_APICALL void GL_APIENTRY glObjectPtrLabel (const void *ptr, GLsizei length, const GLchar *label); +GL_APICALL void GL_APIENTRY glGetObjectPtrLabel (const void *ptr, GLsizei bufSize, GLsizei *length, GLchar *label); +GL_APICALL void GL_APIENTRY glGetPointerv (GLenum pname, void **params); +GL_APICALL void GL_APIENTRY glEnablei (GLenum target, GLuint index); +GL_APICALL void GL_APIENTRY glDisablei (GLenum target, GLuint index); +GL_APICALL void GL_APIENTRY glBlendEquationi (GLuint buf, GLenum mode); +GL_APICALL void GL_APIENTRY glBlendEquationSeparatei (GLuint buf, GLenum modeRGB, GLenum modeAlpha); +GL_APICALL void GL_APIENTRY glBlendFunci (GLuint buf, GLenum src, GLenum dst); +GL_APICALL void GL_APIENTRY glBlendFuncSeparatei (GLuint buf, GLenum srcRGB, GLenum dstRGB, GLenum srcAlpha, GLenum dstAlpha); +GL_APICALL void GL_APIENTRY glColorMaski (GLuint index, GLboolean r, GLboolean g, GLboolean b, GLboolean a); +GL_APICALL GLboolean GL_APIENTRY glIsEnabledi (GLenum target, GLuint index); +GL_APICALL void GL_APIENTRY glDrawElementsBaseVertex (GLenum mode, GLsizei count, GLenum type, const void *indices, GLint basevertex); +GL_APICALL void GL_APIENTRY glDrawRangeElementsBaseVertex (GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices, GLint basevertex); +GL_APICALL void GL_APIENTRY glDrawElementsInstancedBaseVertex (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLint basevertex); +GL_APICALL void GL_APIENTRY glFramebufferTexture (GLenum target, GLenum attachment, GLuint texture, GLint level); +GL_APICALL void GL_APIENTRY glPrimitiveBoundingBox (GLfloat minX, GLfloat minY, GLfloat minZ, GLfloat minW, GLfloat maxX, GLfloat maxY, GLfloat maxZ, GLfloat maxW); +GL_APICALL GLenum GL_APIENTRY glGetGraphicsResetStatus (void); +GL_APICALL void GL_APIENTRY glReadnPixels (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, void *data); +GL_APICALL void GL_APIENTRY glGetnUniformfv (GLuint program, GLint location, GLsizei bufSize, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetnUniformiv (GLuint program, GLint location, GLsizei bufSize, GLint *params); +GL_APICALL void GL_APIENTRY glGetnUniformuiv (GLuint program, GLint location, GLsizei bufSize, GLuint *params); +GL_APICALL void GL_APIENTRY glMinSampleShading (GLfloat value); +GL_APICALL void GL_APIENTRY glPatchParameteri (GLenum pname, GLint value); +GL_APICALL void GL_APIENTRY glTexParameterIiv (GLenum target, GLenum pname, const GLint *params); +GL_APICALL void GL_APIENTRY glTexParameterIuiv (GLenum target, GLenum pname, const GLuint *params); +GL_APICALL void GL_APIENTRY glGetTexParameterIiv (GLenum target, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetTexParameterIuiv (GLenum target, GLenum pname, GLuint *params); +GL_APICALL void GL_APIENTRY glSamplerParameterIiv (GLuint sampler, GLenum pname, const GLint *param); +GL_APICALL void GL_APIENTRY glSamplerParameterIuiv (GLuint sampler, GLenum pname, const GLuint *param); +GL_APICALL void GL_APIENTRY glGetSamplerParameterIiv (GLuint sampler, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetSamplerParameterIuiv (GLuint sampler, GLenum pname, GLuint *params); +GL_APICALL void GL_APIENTRY glTexBuffer (GLenum target, GLenum internalformat, GLuint buffer); +GL_APICALL void GL_APIENTRY glTexBufferRange (GLenum target, GLenum internalformat, GLuint buffer, GLintptr offset, GLsizeiptr size); +GL_APICALL void GL_APIENTRY glTexStorage3DMultisample (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedsamplelocations); +#endif +#endif /* GL_ES_VERSION_3_2 */ + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/angle/include/GLES3/gl32ext_explicit_context_autogen.inc b/vendor/angle/include/GLES3/gl32ext_explicit_context_autogen.inc new file mode 100644 index 00000000..d698e3c5 --- /dev/null +++ b/vendor/angle/include/GLES3/gl32ext_explicit_context_autogen.inc @@ -0,0 +1,100 @@ +// GENERATED FILE - DO NOT EDIT. +// Generated by generate_entry_points.py using data from gl.xml and gl_angle_ext.xml. +// +// Copyright 2020 The ANGLE Project Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// +// gl32ext_explicit_context_autogen.inc: +// Function declarations for the EGL_ANGLE_explicit_context extension + +typedef void (GL_APIENTRYP PFNGLBLENDBARRIERCONTEXTANGLEPROC)(GLeglContext ctx); +typedef void (GL_APIENTRYP PFNGLBLENDEQUATIONSEPARATEICONTEXTANGLEPROC)(GLeglContext ctx, GLuint buf, GLenum modeRGB, GLenum modeAlpha); +typedef void (GL_APIENTRYP PFNGLBLENDEQUATIONICONTEXTANGLEPROC)(GLeglContext ctx, GLuint buf, GLenum mode); +typedef void (GL_APIENTRYP PFNGLBLENDFUNCSEPARATEICONTEXTANGLEPROC)(GLeglContext ctx, GLuint buf, GLenum srcRGB, GLenum dstRGB, GLenum srcAlpha, GLenum dstAlpha); +typedef void (GL_APIENTRYP PFNGLBLENDFUNCICONTEXTANGLEPROC)(GLeglContext ctx, GLuint buf, GLenum src, GLenum dst); +typedef void (GL_APIENTRYP PFNGLCOLORMASKICONTEXTANGLEPROC)(GLeglContext ctx, GLuint index, GLboolean r, GLboolean g, GLboolean b, GLboolean a); +typedef void (GL_APIENTRYP PFNGLCOPYIMAGESUBDATACONTEXTANGLEPROC)(GLeglContext ctx, GLuint srcName, GLenum srcTarget, GLint srcLevel, GLint srcX, GLint srcY, GLint srcZ, GLuint dstName, GLenum dstTarget, GLint dstLevel, GLint dstX, GLint dstY, GLint dstZ, GLsizei srcWidth, GLsizei srcHeight, GLsizei srcDepth); +typedef void (GL_APIENTRYP PFNGLDEBUGMESSAGECALLBACKCONTEXTANGLEPROC)(GLeglContext ctx, GLDEBUGPROC callback, const void *userParam); +typedef void (GL_APIENTRYP PFNGLDEBUGMESSAGECONTROLCONTEXTANGLEPROC)(GLeglContext ctx, GLenum source, GLenum type, GLenum severity, GLsizei count, const GLuint *ids, GLboolean enabled); +typedef void (GL_APIENTRYP PFNGLDEBUGMESSAGEINSERTCONTEXTANGLEPROC)(GLeglContext ctx, GLenum source, GLenum type, GLuint id, GLenum severity, GLsizei length, const GLchar *buf); +typedef void (GL_APIENTRYP PFNGLDISABLEICONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLuint index); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSBASEVERTEXCONTEXTANGLEPROC)(GLeglContext ctx, GLenum mode, GLsizei count, GLenum type, const void *indices, GLint basevertex); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSINSTANCEDBASEVERTEXCONTEXTANGLEPROC)(GLeglContext ctx, GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLint basevertex); +typedef void (GL_APIENTRYP PFNGLDRAWRANGEELEMENTSBASEVERTEXCONTEXTANGLEPROC)(GLeglContext ctx, GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices, GLint basevertex); +typedef void (GL_APIENTRYP PFNGLENABLEICONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLuint index); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTURECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum attachment, GLuint texture, GLint level); +typedef GLuint (GL_APIENTRYP PFNGLGETDEBUGMESSAGELOGCONTEXTANGLEPROC)(GLeglContext ctx, GLuint count, GLsizei bufSize, GLenum *sources, GLenum *types, GLuint *ids, GLenum *severities, GLsizei *lengths, GLchar *messageLog); +typedef GLenum (GL_APIENTRYP PFNGLGETGRAPHICSRESETSTATUSCONTEXTANGLEPROC)(GLeglContext ctx); +typedef void (GL_APIENTRYP PFNGLGETOBJECTLABELCONTEXTANGLEPROC)(GLeglContext ctx, GLenum identifier, GLuint name, GLsizei bufSize, GLsizei *length, GLchar *label); +typedef void (GL_APIENTRYP PFNGLGETOBJECTPTRLABELCONTEXTANGLEPROC)(GLeglContext ctx, const void *ptr, GLsizei bufSize, GLsizei *length, GLchar *label); +typedef void (GL_APIENTRYP PFNGLGETPOINTERVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum pname, void **params); +typedef void (GL_APIENTRYP PFNGLGETSAMPLERPARAMETERIIVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint sampler, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETSAMPLERPARAMETERIUIVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint sampler, GLenum pname, GLuint *params); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERIIVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERIUIVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLuint *params); +typedef void (GL_APIENTRYP PFNGLGETNUNIFORMFVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei bufSize, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETNUNIFORMIVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei bufSize, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETNUNIFORMUIVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLsizei bufSize, GLuint *params); +typedef GLboolean (GL_APIENTRYP PFNGLISENABLEDICONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLuint index); +typedef void (GL_APIENTRYP PFNGLMINSAMPLESHADINGCONTEXTANGLEPROC)(GLeglContext ctx, GLfloat value); +typedef void (GL_APIENTRYP PFNGLOBJECTLABELCONTEXTANGLEPROC)(GLeglContext ctx, GLenum identifier, GLuint name, GLsizei length, const GLchar *label); +typedef void (GL_APIENTRYP PFNGLOBJECTPTRLABELCONTEXTANGLEPROC)(GLeglContext ctx, const void *ptr, GLsizei length, const GLchar *label); +typedef void (GL_APIENTRYP PFNGLPATCHPARAMETERICONTEXTANGLEPROC)(GLeglContext ctx, GLenum pname, GLint value); +typedef void (GL_APIENTRYP PFNGLPOPDEBUGGROUPCONTEXTANGLEPROC)(GLeglContext ctx); +typedef void (GL_APIENTRYP PFNGLPRIMITIVEBOUNDINGBOXCONTEXTANGLEPROC)(GLeglContext ctx, GLfloat minX, GLfloat minY, GLfloat minZ, GLfloat minW, GLfloat maxX, GLfloat maxY, GLfloat maxZ, GLfloat maxW); +typedef void (GL_APIENTRYP PFNGLPUSHDEBUGGROUPCONTEXTANGLEPROC)(GLeglContext ctx, GLenum source, GLuint id, GLsizei length, const GLchar *message); +typedef void (GL_APIENTRYP PFNGLREADNPIXELSCONTEXTANGLEPROC)(GLeglContext ctx, GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, void *data); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERIIVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint sampler, GLenum pname, const GLint *param); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERIUIVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint sampler, GLenum pname, const GLuint *param); +typedef void (GL_APIENTRYP PFNGLTEXBUFFERCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum internalformat, GLuint buffer); +typedef void (GL_APIENTRYP PFNGLTEXBUFFERRANGECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum internalformat, GLuint buffer, GLintptr offset, GLsizeiptr size); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERIIVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, const GLint *params); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERIUIVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, const GLuint *params); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGE3DMULTISAMPLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedsamplelocations); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glBlendBarrierContextANGLE(GLeglContext ctx); +GL_APICALL void GL_APIENTRY glBlendEquationSeparateiContextANGLE(GLeglContext ctx, GLuint buf, GLenum modeRGB, GLenum modeAlpha); +GL_APICALL void GL_APIENTRY glBlendEquationiContextANGLE(GLeglContext ctx, GLuint buf, GLenum mode); +GL_APICALL void GL_APIENTRY glBlendFuncSeparateiContextANGLE(GLeglContext ctx, GLuint buf, GLenum srcRGB, GLenum dstRGB, GLenum srcAlpha, GLenum dstAlpha); +GL_APICALL void GL_APIENTRY glBlendFunciContextANGLE(GLeglContext ctx, GLuint buf, GLenum src, GLenum dst); +GL_APICALL void GL_APIENTRY glColorMaskiContextANGLE(GLeglContext ctx, GLuint index, GLboolean r, GLboolean g, GLboolean b, GLboolean a); +GL_APICALL void GL_APIENTRY glCopyImageSubDataContextANGLE(GLeglContext ctx, GLuint srcName, GLenum srcTarget, GLint srcLevel, GLint srcX, GLint srcY, GLint srcZ, GLuint dstName, GLenum dstTarget, GLint dstLevel, GLint dstX, GLint dstY, GLint dstZ, GLsizei srcWidth, GLsizei srcHeight, GLsizei srcDepth); +GL_APICALL void GL_APIENTRY glDebugMessageCallbackContextANGLE(GLeglContext ctx, GLDEBUGPROC callback, const void *userParam); +GL_APICALL void GL_APIENTRY glDebugMessageControlContextANGLE(GLeglContext ctx, GLenum source, GLenum type, GLenum severity, GLsizei count, const GLuint *ids, GLboolean enabled); +GL_APICALL void GL_APIENTRY glDebugMessageInsertContextANGLE(GLeglContext ctx, GLenum source, GLenum type, GLuint id, GLenum severity, GLsizei length, const GLchar *buf); +GL_APICALL void GL_APIENTRY glDisableiContextANGLE(GLeglContext ctx, GLenum target, GLuint index); +GL_APICALL void GL_APIENTRY glDrawElementsBaseVertexContextANGLE(GLeglContext ctx, GLenum mode, GLsizei count, GLenum type, const void *indices, GLint basevertex); +GL_APICALL void GL_APIENTRY glDrawElementsInstancedBaseVertexContextANGLE(GLeglContext ctx, GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLint basevertex); +GL_APICALL void GL_APIENTRY glDrawRangeElementsBaseVertexContextANGLE(GLeglContext ctx, GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices, GLint basevertex); +GL_APICALL void GL_APIENTRY glEnableiContextANGLE(GLeglContext ctx, GLenum target, GLuint index); +GL_APICALL void GL_APIENTRY glFramebufferTextureContextANGLE(GLeglContext ctx, GLenum target, GLenum attachment, GLuint texture, GLint level); +GL_APICALL GLuint GL_APIENTRY glGetDebugMessageLogContextANGLE(GLeglContext ctx, GLuint count, GLsizei bufSize, GLenum *sources, GLenum *types, GLuint *ids, GLenum *severities, GLsizei *lengths, GLchar *messageLog); +GL_APICALL GLenum GL_APIENTRY glGetGraphicsResetStatusContextANGLE(GLeglContext ctx); +GL_APICALL void GL_APIENTRY glGetObjectLabelContextANGLE(GLeglContext ctx, GLenum identifier, GLuint name, GLsizei bufSize, GLsizei *length, GLchar *label); +GL_APICALL void GL_APIENTRY glGetObjectPtrLabelContextANGLE(GLeglContext ctx, const void *ptr, GLsizei bufSize, GLsizei *length, GLchar *label); +GL_APICALL void GL_APIENTRY glGetPointervContextANGLE(GLeglContext ctx, GLenum pname, void **params); +GL_APICALL void GL_APIENTRY glGetSamplerParameterIivContextANGLE(GLeglContext ctx, GLuint sampler, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetSamplerParameterIuivContextANGLE(GLeglContext ctx, GLuint sampler, GLenum pname, GLuint *params); +GL_APICALL void GL_APIENTRY glGetTexParameterIivContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetTexParameterIuivContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLuint *params); +GL_APICALL void GL_APIENTRY glGetnUniformfvContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei bufSize, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetnUniformivContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei bufSize, GLint *params); +GL_APICALL void GL_APIENTRY glGetnUniformuivContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLsizei bufSize, GLuint *params); +GL_APICALL GLboolean GL_APIENTRY glIsEnablediContextANGLE(GLeglContext ctx, GLenum target, GLuint index); +GL_APICALL void GL_APIENTRY glMinSampleShadingContextANGLE(GLeglContext ctx, GLfloat value); +GL_APICALL void GL_APIENTRY glObjectLabelContextANGLE(GLeglContext ctx, GLenum identifier, GLuint name, GLsizei length, const GLchar *label); +GL_APICALL void GL_APIENTRY glObjectPtrLabelContextANGLE(GLeglContext ctx, const void *ptr, GLsizei length, const GLchar *label); +GL_APICALL void GL_APIENTRY glPatchParameteriContextANGLE(GLeglContext ctx, GLenum pname, GLint value); +GL_APICALL void GL_APIENTRY glPopDebugGroupContextANGLE(GLeglContext ctx); +GL_APICALL void GL_APIENTRY glPrimitiveBoundingBoxContextANGLE(GLeglContext ctx, GLfloat minX, GLfloat minY, GLfloat minZ, GLfloat minW, GLfloat maxX, GLfloat maxY, GLfloat maxZ, GLfloat maxW); +GL_APICALL void GL_APIENTRY glPushDebugGroupContextANGLE(GLeglContext ctx, GLenum source, GLuint id, GLsizei length, const GLchar *message); +GL_APICALL void GL_APIENTRY glReadnPixelsContextANGLE(GLeglContext ctx, GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, void *data); +GL_APICALL void GL_APIENTRY glSamplerParameterIivContextANGLE(GLeglContext ctx, GLuint sampler, GLenum pname, const GLint *param); +GL_APICALL void GL_APIENTRY glSamplerParameterIuivContextANGLE(GLeglContext ctx, GLuint sampler, GLenum pname, const GLuint *param); +GL_APICALL void GL_APIENTRY glTexBufferContextANGLE(GLeglContext ctx, GLenum target, GLenum internalformat, GLuint buffer); +GL_APICALL void GL_APIENTRY glTexBufferRangeContextANGLE(GLeglContext ctx, GLenum target, GLenum internalformat, GLuint buffer, GLintptr offset, GLsizeiptr size); +GL_APICALL void GL_APIENTRY glTexParameterIivContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, const GLint *params); +GL_APICALL void GL_APIENTRY glTexParameterIuivContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, const GLuint *params); +GL_APICALL void GL_APIENTRY glTexStorage3DMultisampleContextANGLE(GLeglContext ctx, GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedsamplelocations); +#endif diff --git a/vendor/angle/include/GLES3/gl3ext_explicit_context_autogen.inc b/vendor/angle/include/GLES3/gl3ext_explicit_context_autogen.inc new file mode 100644 index 00000000..8f36bc8f --- /dev/null +++ b/vendor/angle/include/GLES3/gl3ext_explicit_context_autogen.inc @@ -0,0 +1,220 @@ +// GENERATED FILE - DO NOT EDIT. +// Generated by generate_entry_points.py using data from gl.xml and gl_angle_ext.xml. +// +// Copyright 2020 The ANGLE Project Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// +// gl3ext_explicit_context_autogen.inc: +// Function declarations for the EGL_ANGLE_explicit_context extension + +typedef void (GL_APIENTRYP PFNGLBEGINQUERYCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLuint id); +typedef void (GL_APIENTRYP PFNGLBEGINTRANSFORMFEEDBACKCONTEXTANGLEPROC)(GLeglContext ctx, GLenum primitiveMode); +typedef void (GL_APIENTRYP PFNGLBINDBUFFERBASECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLuint index, GLuint buffer); +typedef void (GL_APIENTRYP PFNGLBINDBUFFERRANGECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLuint index, GLuint buffer, GLintptr offset, GLsizeiptr size); +typedef void (GL_APIENTRYP PFNGLBINDSAMPLERCONTEXTANGLEPROC)(GLeglContext ctx, GLuint unit, GLuint sampler); +typedef void (GL_APIENTRYP PFNGLBINDTRANSFORMFEEDBACKCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLuint id); +typedef void (GL_APIENTRYP PFNGLBINDVERTEXARRAYCONTEXTANGLEPROC)(GLeglContext ctx, GLuint array); +typedef void (GL_APIENTRYP PFNGLBLITFRAMEBUFFERCONTEXTANGLEPROC)(GLeglContext ctx, GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +typedef void (GL_APIENTRYP PFNGLCLEARBUFFERFICONTEXTANGLEPROC)(GLeglContext ctx, GLenum buffer, GLint drawbuffer, GLfloat depth, GLint stencil); +typedef void (GL_APIENTRYP PFNGLCLEARBUFFERFVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum buffer, GLint drawbuffer, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLCLEARBUFFERIVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum buffer, GLint drawbuffer, const GLint *value); +typedef void (GL_APIENTRYP PFNGLCLEARBUFFERUIVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum buffer, GLint drawbuffer, const GLuint *value); +typedef GLenum (GL_APIENTRYP PFNGLCLIENTWAITSYNCCONTEXTANGLEPROC)(GLeglContext ctx, GLsync sync, GLbitfield flags, GLuint64 timeout); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXIMAGE3DCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, const void *data); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXSUBIMAGE3DCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void *data); +typedef void (GL_APIENTRYP PFNGLCOPYBUFFERSUBDATACONTEXTANGLEPROC)(GLeglContext ctx, GLenum readTarget, GLenum writeTarget, GLintptr readOffset, GLintptr writeOffset, GLsizeiptr size); +typedef void (GL_APIENTRYP PFNGLCOPYTEXSUBIMAGE3DCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLDELETEQUERIESCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei n, const GLuint *ids); +typedef void (GL_APIENTRYP PFNGLDELETESAMPLERSCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei count, const GLuint *samplers); +typedef void (GL_APIENTRYP PFNGLDELETESYNCCONTEXTANGLEPROC)(GLeglContext ctx, GLsync sync); +typedef void (GL_APIENTRYP PFNGLDELETETRANSFORMFEEDBACKSCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei n, const GLuint *ids); +typedef void (GL_APIENTRYP PFNGLDELETEVERTEXARRAYSCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei n, const GLuint *arrays); +typedef void (GL_APIENTRYP PFNGLDRAWARRAYSINSTANCEDCONTEXTANGLEPROC)(GLeglContext ctx, GLenum mode, GLint first, GLsizei count, GLsizei instancecount); +typedef void (GL_APIENTRYP PFNGLDRAWBUFFERSCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei n, const GLenum *bufs); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSINSTANCEDCONTEXTANGLEPROC)(GLeglContext ctx, GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount); +typedef void (GL_APIENTRYP PFNGLDRAWRANGEELEMENTSCONTEXTANGLEPROC)(GLeglContext ctx, GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices); +typedef void (GL_APIENTRYP PFNGLENDQUERYCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target); +typedef void (GL_APIENTRYP PFNGLENDTRANSFORMFEEDBACKCONTEXTANGLEPROC)(GLeglContext ctx); +typedef GLsync (GL_APIENTRYP PFNGLFENCESYNCCONTEXTANGLEPROC)(GLeglContext ctx, GLenum condition, GLbitfield flags); +typedef void (GL_APIENTRYP PFNGLFLUSHMAPPEDBUFFERRANGECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLintptr offset, GLsizeiptr length); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTURELAYERCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum attachment, GLuint texture, GLint level, GLint layer); +typedef void (GL_APIENTRYP PFNGLGENQUERIESCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei n, GLuint *ids); +typedef void (GL_APIENTRYP PFNGLGENSAMPLERSCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei count, GLuint *samplers); +typedef void (GL_APIENTRYP PFNGLGENTRANSFORMFEEDBACKSCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei n, GLuint *ids); +typedef void (GL_APIENTRYP PFNGLGENVERTEXARRAYSCONTEXTANGLEPROC)(GLeglContext ctx, GLsizei n, GLuint *arrays); +typedef void (GL_APIENTRYP PFNGLGETACTIVEUNIFORMBLOCKNAMECONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLuint uniformBlockIndex, GLsizei bufSize, GLsizei *length, GLchar *uniformBlockName); +typedef void (GL_APIENTRYP PFNGLGETACTIVEUNIFORMBLOCKIVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLuint uniformBlockIndex, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETACTIVEUNIFORMSIVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLsizei uniformCount, const GLuint *uniformIndices, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETBUFFERPARAMETERI64VCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLint64 *params); +typedef void (GL_APIENTRYP PFNGLGETBUFFERPOINTERVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, void **params); +typedef GLint (GL_APIENTRYP PFNGLGETFRAGDATALOCATIONCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, const GLchar *name); +typedef void (GL_APIENTRYP PFNGLGETINTEGER64I_VCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLuint index, GLint64 *data); +typedef void (GL_APIENTRYP PFNGLGETINTEGER64VCONTEXTANGLEPROC)(GLeglContext ctx, GLenum pname, GLint64 *data); +typedef void (GL_APIENTRYP PFNGLGETINTEGERI_VCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLuint index, GLint *data); +typedef void (GL_APIENTRYP PFNGLGETINTERNALFORMATIVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum internalformat, GLenum pname, GLsizei bufSize, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMBINARYCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLsizei bufSize, GLsizei *length, GLenum *binaryFormat, void *binary); +typedef void (GL_APIENTRYP PFNGLGETQUERYOBJECTUIVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint id, GLenum pname, GLuint *params); +typedef void (GL_APIENTRYP PFNGLGETQUERYIVCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETSAMPLERPARAMETERFVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint sampler, GLenum pname, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETSAMPLERPARAMETERIVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint sampler, GLenum pname, GLint *params); +typedef const GLubyte *(GL_APIENTRYP PFNGLGETSTRINGICONTEXTANGLEPROC)(GLeglContext ctx, GLenum name, GLuint index); +typedef void (GL_APIENTRYP PFNGLGETSYNCIVCONTEXTANGLEPROC)(GLeglContext ctx, GLsync sync, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *values); +typedef void (GL_APIENTRYP PFNGLGETTRANSFORMFEEDBACKVARYINGCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLsizei *size, GLenum *type, GLchar *name); +typedef GLuint (GL_APIENTRYP PFNGLGETUNIFORMBLOCKINDEXCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, const GLchar *uniformBlockName); +typedef void (GL_APIENTRYP PFNGLGETUNIFORMINDICESCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLsizei uniformCount, const GLchar *const*uniformNames, GLuint *uniformIndices); +typedef void (GL_APIENTRYP PFNGLGETUNIFORMUIVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLint location, GLuint *params); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBIIVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint index, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBIUIVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint index, GLenum pname, GLuint *params); +typedef void (GL_APIENTRYP PFNGLINVALIDATEFRAMEBUFFERCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLsizei numAttachments, const GLenum *attachments); +typedef void (GL_APIENTRYP PFNGLINVALIDATESUBFRAMEBUFFERCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLsizei numAttachments, const GLenum *attachments, GLint x, GLint y, GLsizei width, GLsizei height); +typedef GLboolean (GL_APIENTRYP PFNGLISQUERYCONTEXTANGLEPROC)(GLeglContext ctx, GLuint id); +typedef GLboolean (GL_APIENTRYP PFNGLISSAMPLERCONTEXTANGLEPROC)(GLeglContext ctx, GLuint sampler); +typedef GLboolean (GL_APIENTRYP PFNGLISSYNCCONTEXTANGLEPROC)(GLeglContext ctx, GLsync sync); +typedef GLboolean (GL_APIENTRYP PFNGLISTRANSFORMFEEDBACKCONTEXTANGLEPROC)(GLeglContext ctx, GLuint id); +typedef GLboolean (GL_APIENTRYP PFNGLISVERTEXARRAYCONTEXTANGLEPROC)(GLeglContext ctx, GLuint array); +typedef void *(GL_APIENTRYP PFNGLMAPBUFFERRANGECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLintptr offset, GLsizeiptr length, GLbitfield access); +typedef void (GL_APIENTRYP PFNGLPAUSETRANSFORMFEEDBACKCONTEXTANGLEPROC)(GLeglContext ctx); +typedef void (GL_APIENTRYP PFNGLPROGRAMBINARYCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLenum binaryFormat, const void *binary, GLsizei length); +typedef void (GL_APIENTRYP PFNGLPROGRAMPARAMETERICONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLenum pname, GLint value); +typedef void (GL_APIENTRYP PFNGLREADBUFFERCONTEXTANGLEPROC)(GLeglContext ctx, GLenum src); +typedef void (GL_APIENTRYP PFNGLRENDERBUFFERSTORAGEMULTISAMPLECONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLRESUMETRANSFORMFEEDBACKCONTEXTANGLEPROC)(GLeglContext ctx); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERFCONTEXTANGLEPROC)(GLeglContext ctx, GLuint sampler, GLenum pname, GLfloat param); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERFVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint sampler, GLenum pname, const GLfloat *param); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERICONTEXTANGLEPROC)(GLeglContext ctx, GLuint sampler, GLenum pname, GLint param); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERIVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint sampler, GLenum pname, const GLint *param); +typedef void (GL_APIENTRYP PFNGLTEXIMAGE3DCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, const void *pixels); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGE2DCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGE3DCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +typedef void (GL_APIENTRYP PFNGLTEXSUBIMAGE3DCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *pixels); +typedef void (GL_APIENTRYP PFNGLTRANSFORMFEEDBACKVARYINGSCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLsizei count, const GLchar *const*varyings, GLenum bufferMode); +typedef void (GL_APIENTRYP PFNGLUNIFORM1UICONTEXTANGLEPROC)(GLeglContext ctx, GLint location, GLuint v0); +typedef void (GL_APIENTRYP PFNGLUNIFORM1UIVCONTEXTANGLEPROC)(GLeglContext ctx, GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM2UICONTEXTANGLEPROC)(GLeglContext ctx, GLint location, GLuint v0, GLuint v1); +typedef void (GL_APIENTRYP PFNGLUNIFORM2UIVCONTEXTANGLEPROC)(GLeglContext ctx, GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM3UICONTEXTANGLEPROC)(GLeglContext ctx, GLint location, GLuint v0, GLuint v1, GLuint v2); +typedef void (GL_APIENTRYP PFNGLUNIFORM3UIVCONTEXTANGLEPROC)(GLeglContext ctx, GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM4UICONTEXTANGLEPROC)(GLeglContext ctx, GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3); +typedef void (GL_APIENTRYP PFNGLUNIFORM4UIVCONTEXTANGLEPROC)(GLeglContext ctx, GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMBLOCKBINDINGCONTEXTANGLEPROC)(GLeglContext ctx, GLuint program, GLuint uniformBlockIndex, GLuint uniformBlockBinding); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX2X3FVCONTEXTANGLEPROC)(GLeglContext ctx, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX2X4FVCONTEXTANGLEPROC)(GLeglContext ctx, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX3X2FVCONTEXTANGLEPROC)(GLeglContext ctx, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX3X4FVCONTEXTANGLEPROC)(GLeglContext ctx, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX4X2FVCONTEXTANGLEPROC)(GLeglContext ctx, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX4X3FVCONTEXTANGLEPROC)(GLeglContext ctx, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef GLboolean (GL_APIENTRYP PFNGLUNMAPBUFFERCONTEXTANGLEPROC)(GLeglContext ctx, GLenum target); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBDIVISORCONTEXTANGLEPROC)(GLeglContext ctx, GLuint index, GLuint divisor); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBI4ICONTEXTANGLEPROC)(GLeglContext ctx, GLuint index, GLint x, GLint y, GLint z, GLint w); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBI4IVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint index, const GLint *v); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBI4UICONTEXTANGLEPROC)(GLeglContext ctx, GLuint index, GLuint x, GLuint y, GLuint z, GLuint w); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBI4UIVCONTEXTANGLEPROC)(GLeglContext ctx, GLuint index, const GLuint *v); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBIPOINTERCONTEXTANGLEPROC)(GLeglContext ctx, GLuint index, GLint size, GLenum type, GLsizei stride, const void *pointer); +typedef void (GL_APIENTRYP PFNGLWAITSYNCCONTEXTANGLEPROC)(GLeglContext ctx, GLsync sync, GLbitfield flags, GLuint64 timeout); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glBeginQueryContextANGLE(GLeglContext ctx, GLenum target, GLuint id); +GL_APICALL void GL_APIENTRY glBeginTransformFeedbackContextANGLE(GLeglContext ctx, GLenum primitiveMode); +GL_APICALL void GL_APIENTRY glBindBufferBaseContextANGLE(GLeglContext ctx, GLenum target, GLuint index, GLuint buffer); +GL_APICALL void GL_APIENTRY glBindBufferRangeContextANGLE(GLeglContext ctx, GLenum target, GLuint index, GLuint buffer, GLintptr offset, GLsizeiptr size); +GL_APICALL void GL_APIENTRY glBindSamplerContextANGLE(GLeglContext ctx, GLuint unit, GLuint sampler); +GL_APICALL void GL_APIENTRY glBindTransformFeedbackContextANGLE(GLeglContext ctx, GLenum target, GLuint id); +GL_APICALL void GL_APIENTRY glBindVertexArrayContextANGLE(GLeglContext ctx, GLuint array); +GL_APICALL void GL_APIENTRY glBlitFramebufferContextANGLE(GLeglContext ctx, GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +GL_APICALL void GL_APIENTRY glClearBufferfiContextANGLE(GLeglContext ctx, GLenum buffer, GLint drawbuffer, GLfloat depth, GLint stencil); +GL_APICALL void GL_APIENTRY glClearBufferfvContextANGLE(GLeglContext ctx, GLenum buffer, GLint drawbuffer, const GLfloat *value); +GL_APICALL void GL_APIENTRY glClearBufferivContextANGLE(GLeglContext ctx, GLenum buffer, GLint drawbuffer, const GLint *value); +GL_APICALL void GL_APIENTRY glClearBufferuivContextANGLE(GLeglContext ctx, GLenum buffer, GLint drawbuffer, const GLuint *value); +GL_APICALL GLenum GL_APIENTRY glClientWaitSyncContextANGLE(GLeglContext ctx, GLsync sync, GLbitfield flags, GLuint64 timeout); +GL_APICALL void GL_APIENTRY glCompressedTexImage3DContextANGLE(GLeglContext ctx, GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, const void *data); +GL_APICALL void GL_APIENTRY glCompressedTexSubImage3DContextANGLE(GLeglContext ctx, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void *data); +GL_APICALL void GL_APIENTRY glCopyBufferSubDataContextANGLE(GLeglContext ctx, GLenum readTarget, GLenum writeTarget, GLintptr readOffset, GLintptr writeOffset, GLsizeiptr size); +GL_APICALL void GL_APIENTRY glCopyTexSubImage3DContextANGLE(GLeglContext ctx, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glDeleteQueriesContextANGLE(GLeglContext ctx, GLsizei n, const GLuint *ids); +GL_APICALL void GL_APIENTRY glDeleteSamplersContextANGLE(GLeglContext ctx, GLsizei count, const GLuint *samplers); +GL_APICALL void GL_APIENTRY glDeleteSyncContextANGLE(GLeglContext ctx, GLsync sync); +GL_APICALL void GL_APIENTRY glDeleteTransformFeedbacksContextANGLE(GLeglContext ctx, GLsizei n, const GLuint *ids); +GL_APICALL void GL_APIENTRY glDeleteVertexArraysContextANGLE(GLeglContext ctx, GLsizei n, const GLuint *arrays); +GL_APICALL void GL_APIENTRY glDrawArraysInstancedContextANGLE(GLeglContext ctx, GLenum mode, GLint first, GLsizei count, GLsizei instancecount); +GL_APICALL void GL_APIENTRY glDrawBuffersContextANGLE(GLeglContext ctx, GLsizei n, const GLenum *bufs); +GL_APICALL void GL_APIENTRY glDrawElementsInstancedContextANGLE(GLeglContext ctx, GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount); +GL_APICALL void GL_APIENTRY glDrawRangeElementsContextANGLE(GLeglContext ctx, GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices); +GL_APICALL void GL_APIENTRY glEndQueryContextANGLE(GLeglContext ctx, GLenum target); +GL_APICALL void GL_APIENTRY glEndTransformFeedbackContextANGLE(GLeglContext ctx); +GL_APICALL GLsync GL_APIENTRY glFenceSyncContextANGLE(GLeglContext ctx, GLenum condition, GLbitfield flags); +GL_APICALL void GL_APIENTRY glFlushMappedBufferRangeContextANGLE(GLeglContext ctx, GLenum target, GLintptr offset, GLsizeiptr length); +GL_APICALL void GL_APIENTRY glFramebufferTextureLayerContextANGLE(GLeglContext ctx, GLenum target, GLenum attachment, GLuint texture, GLint level, GLint layer); +GL_APICALL void GL_APIENTRY glGenQueriesContextANGLE(GLeglContext ctx, GLsizei n, GLuint *ids); +GL_APICALL void GL_APIENTRY glGenSamplersContextANGLE(GLeglContext ctx, GLsizei count, GLuint *samplers); +GL_APICALL void GL_APIENTRY glGenTransformFeedbacksContextANGLE(GLeglContext ctx, GLsizei n, GLuint *ids); +GL_APICALL void GL_APIENTRY glGenVertexArraysContextANGLE(GLeglContext ctx, GLsizei n, GLuint *arrays); +GL_APICALL void GL_APIENTRY glGetActiveUniformBlockNameContextANGLE(GLeglContext ctx, GLuint program, GLuint uniformBlockIndex, GLsizei bufSize, GLsizei *length, GLchar *uniformBlockName); +GL_APICALL void GL_APIENTRY glGetActiveUniformBlockivContextANGLE(GLeglContext ctx, GLuint program, GLuint uniformBlockIndex, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetActiveUniformsivContextANGLE(GLeglContext ctx, GLuint program, GLsizei uniformCount, const GLuint *uniformIndices, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetBufferParameteri64vContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLint64 *params); +GL_APICALL void GL_APIENTRY glGetBufferPointervContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, void **params); +GL_APICALL GLint GL_APIENTRY glGetFragDataLocationContextANGLE(GLeglContext ctx, GLuint program, const GLchar *name); +GL_APICALL void GL_APIENTRY glGetInteger64i_vContextANGLE(GLeglContext ctx, GLenum target, GLuint index, GLint64 *data); +GL_APICALL void GL_APIENTRY glGetInteger64vContextANGLE(GLeglContext ctx, GLenum pname, GLint64 *data); +GL_APICALL void GL_APIENTRY glGetIntegeri_vContextANGLE(GLeglContext ctx, GLenum target, GLuint index, GLint *data); +GL_APICALL void GL_APIENTRY glGetInternalformativContextANGLE(GLeglContext ctx, GLenum target, GLenum internalformat, GLenum pname, GLsizei bufSize, GLint *params); +GL_APICALL void GL_APIENTRY glGetProgramBinaryContextANGLE(GLeglContext ctx, GLuint program, GLsizei bufSize, GLsizei *length, GLenum *binaryFormat, void *binary); +GL_APICALL void GL_APIENTRY glGetQueryObjectuivContextANGLE(GLeglContext ctx, GLuint id, GLenum pname, GLuint *params); +GL_APICALL void GL_APIENTRY glGetQueryivContextANGLE(GLeglContext ctx, GLenum target, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetSamplerParameterfvContextANGLE(GLeglContext ctx, GLuint sampler, GLenum pname, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetSamplerParameterivContextANGLE(GLeglContext ctx, GLuint sampler, GLenum pname, GLint *params); +GL_APICALL const GLubyte *GL_APIENTRY glGetStringiContextANGLE(GLeglContext ctx, GLenum name, GLuint index); +GL_APICALL void GL_APIENTRY glGetSyncivContextANGLE(GLeglContext ctx, GLsync sync, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *values); +GL_APICALL void GL_APIENTRY glGetTransformFeedbackVaryingContextANGLE(GLeglContext ctx, GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLsizei *size, GLenum *type, GLchar *name); +GL_APICALL GLuint GL_APIENTRY glGetUniformBlockIndexContextANGLE(GLeglContext ctx, GLuint program, const GLchar *uniformBlockName); +GL_APICALL void GL_APIENTRY glGetUniformIndicesContextANGLE(GLeglContext ctx, GLuint program, GLsizei uniformCount, const GLchar *const*uniformNames, GLuint *uniformIndices); +GL_APICALL void GL_APIENTRY glGetUniformuivContextANGLE(GLeglContext ctx, GLuint program, GLint location, GLuint *params); +GL_APICALL void GL_APIENTRY glGetVertexAttribIivContextANGLE(GLeglContext ctx, GLuint index, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetVertexAttribIuivContextANGLE(GLeglContext ctx, GLuint index, GLenum pname, GLuint *params); +GL_APICALL void GL_APIENTRY glInvalidateFramebufferContextANGLE(GLeglContext ctx, GLenum target, GLsizei numAttachments, const GLenum *attachments); +GL_APICALL void GL_APIENTRY glInvalidateSubFramebufferContextANGLE(GLeglContext ctx, GLenum target, GLsizei numAttachments, const GLenum *attachments, GLint x, GLint y, GLsizei width, GLsizei height); +GL_APICALL GLboolean GL_APIENTRY glIsQueryContextANGLE(GLeglContext ctx, GLuint id); +GL_APICALL GLboolean GL_APIENTRY glIsSamplerContextANGLE(GLeglContext ctx, GLuint sampler); +GL_APICALL GLboolean GL_APIENTRY glIsSyncContextANGLE(GLeglContext ctx, GLsync sync); +GL_APICALL GLboolean GL_APIENTRY glIsTransformFeedbackContextANGLE(GLeglContext ctx, GLuint id); +GL_APICALL GLboolean GL_APIENTRY glIsVertexArrayContextANGLE(GLeglContext ctx, GLuint array); +GL_APICALL void *GL_APIENTRY glMapBufferRangeContextANGLE(GLeglContext ctx, GLenum target, GLintptr offset, GLsizeiptr length, GLbitfield access); +GL_APICALL void GL_APIENTRY glPauseTransformFeedbackContextANGLE(GLeglContext ctx); +GL_APICALL void GL_APIENTRY glProgramBinaryContextANGLE(GLeglContext ctx, GLuint program, GLenum binaryFormat, const void *binary, GLsizei length); +GL_APICALL void GL_APIENTRY glProgramParameteriContextANGLE(GLeglContext ctx, GLuint program, GLenum pname, GLint value); +GL_APICALL void GL_APIENTRY glReadBufferContextANGLE(GLeglContext ctx, GLenum src); +GL_APICALL void GL_APIENTRY glRenderbufferStorageMultisampleContextANGLE(GLeglContext ctx, GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glResumeTransformFeedbackContextANGLE(GLeglContext ctx); +GL_APICALL void GL_APIENTRY glSamplerParameterfContextANGLE(GLeglContext ctx, GLuint sampler, GLenum pname, GLfloat param); +GL_APICALL void GL_APIENTRY glSamplerParameterfvContextANGLE(GLeglContext ctx, GLuint sampler, GLenum pname, const GLfloat *param); +GL_APICALL void GL_APIENTRY glSamplerParameteriContextANGLE(GLeglContext ctx, GLuint sampler, GLenum pname, GLint param); +GL_APICALL void GL_APIENTRY glSamplerParameterivContextANGLE(GLeglContext ctx, GLuint sampler, GLenum pname, const GLint *param); +GL_APICALL void GL_APIENTRY glTexImage3DContextANGLE(GLeglContext ctx, GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, const void *pixels); +GL_APICALL void GL_APIENTRY glTexStorage2DContextANGLE(GLeglContext ctx, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glTexStorage3DContextANGLE(GLeglContext ctx, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +GL_APICALL void GL_APIENTRY glTexSubImage3DContextANGLE(GLeglContext ctx, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *pixels); +GL_APICALL void GL_APIENTRY glTransformFeedbackVaryingsContextANGLE(GLeglContext ctx, GLuint program, GLsizei count, const GLchar *const*varyings, GLenum bufferMode); +GL_APICALL void GL_APIENTRY glUniform1uiContextANGLE(GLeglContext ctx, GLint location, GLuint v0); +GL_APICALL void GL_APIENTRY glUniform1uivContextANGLE(GLeglContext ctx, GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glUniform2uiContextANGLE(GLeglContext ctx, GLint location, GLuint v0, GLuint v1); +GL_APICALL void GL_APIENTRY glUniform2uivContextANGLE(GLeglContext ctx, GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glUniform3uiContextANGLE(GLeglContext ctx, GLint location, GLuint v0, GLuint v1, GLuint v2); +GL_APICALL void GL_APIENTRY glUniform3uivContextANGLE(GLeglContext ctx, GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glUniform4uiContextANGLE(GLeglContext ctx, GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3); +GL_APICALL void GL_APIENTRY glUniform4uivContextANGLE(GLeglContext ctx, GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glUniformBlockBindingContextANGLE(GLeglContext ctx, GLuint program, GLuint uniformBlockIndex, GLuint uniformBlockBinding); +GL_APICALL void GL_APIENTRY glUniformMatrix2x3fvContextANGLE(GLeglContext ctx, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix2x4fvContextANGLE(GLeglContext ctx, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix3x2fvContextANGLE(GLeglContext ctx, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix3x4fvContextANGLE(GLeglContext ctx, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix4x2fvContextANGLE(GLeglContext ctx, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix4x3fvContextANGLE(GLeglContext ctx, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL GLboolean GL_APIENTRY glUnmapBufferContextANGLE(GLeglContext ctx, GLenum target); +GL_APICALL void GL_APIENTRY glVertexAttribDivisorContextANGLE(GLeglContext ctx, GLuint index, GLuint divisor); +GL_APICALL void GL_APIENTRY glVertexAttribI4iContextANGLE(GLeglContext ctx, GLuint index, GLint x, GLint y, GLint z, GLint w); +GL_APICALL void GL_APIENTRY glVertexAttribI4ivContextANGLE(GLeglContext ctx, GLuint index, const GLint *v); +GL_APICALL void GL_APIENTRY glVertexAttribI4uiContextANGLE(GLeglContext ctx, GLuint index, GLuint x, GLuint y, GLuint z, GLuint w); +GL_APICALL void GL_APIENTRY glVertexAttribI4uivContextANGLE(GLeglContext ctx, GLuint index, const GLuint *v); +GL_APICALL void GL_APIENTRY glVertexAttribIPointerContextANGLE(GLeglContext ctx, GLuint index, GLint size, GLenum type, GLsizei stride, const void *pointer); +GL_APICALL void GL_APIENTRY glWaitSyncContextANGLE(GLeglContext ctx, GLsync sync, GLbitfield flags, GLuint64 timeout); +#endif diff --git a/vendor/angle/include/GLES3/gl3platform.h b/vendor/angle/include/GLES3/gl3platform.h new file mode 100644 index 00000000..8699212d --- /dev/null +++ b/vendor/angle/include/GLES3/gl3platform.h @@ -0,0 +1,27 @@ +#ifndef __gl3platform_h_ +#define __gl3platform_h_ + +/* +** Copyright 2017-2020 The Khronos Group Inc. +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* Platform-specific types and definitions for OpenGL ES 3.X gl3.h + * + * Adopters may modify khrplatform.h and this file to suit their platform. + * Please contribute modifications back to Khronos as pull requests on the + * public github repository: + * https://github.com/KhronosGroup/OpenGL-Registry + */ + +#include + +#ifndef GL_APICALL +#define GL_APICALL KHRONOS_APICALL +#endif + +#ifndef GL_APIENTRY +#define GL_APIENTRY KHRONOS_APIENTRY +#endif + +#endif /* __gl3platform_h_ */ diff --git a/vendor/angle/include/GLSLANG/ShaderLang.h b/vendor/angle/include/GLSLANG/ShaderLang.h new file mode 100644 index 00000000..5fceb184 --- /dev/null +++ b/vendor/angle/include/GLSLANG/ShaderLang.h @@ -0,0 +1,917 @@ +// +// Copyright 2002 The ANGLE Project Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// +#ifndef GLSLANG_SHADERLANG_H_ +#define GLSLANG_SHADERLANG_H_ + +#include + +#include "KHR/khrplatform.h" + +#include +#include +#include +#include +#include + +// +// This is the platform independent interface between an OGL driver +// and the shading language compiler. +// + +// Note: make sure to increment ANGLE_SH_VERSION when changing ShaderVars.h +#include "ShaderVars.h" + +// Version number for shader translation API. +// It is incremented every time the API changes. +#define ANGLE_SH_VERSION 254 + +enum ShShaderSpec +{ + SH_GLES2_SPEC, + SH_WEBGL_SPEC, + + SH_GLES3_SPEC, + SH_WEBGL2_SPEC, + + SH_GLES3_1_SPEC, + SH_WEBGL3_SPEC, + + SH_GLES3_2_SPEC, + + SH_GL_CORE_SPEC, + SH_GL_COMPATIBILITY_SPEC, +}; + +enum ShShaderOutput +{ + // ESSL output only supported in some configurations. + SH_ESSL_OUTPUT = 0x8B45, + + // GLSL output only supported in some configurations. + SH_GLSL_COMPATIBILITY_OUTPUT = 0x8B46, + // Note: GL introduced core profiles in 1.5. + SH_GLSL_130_OUTPUT = 0x8B47, + SH_GLSL_140_OUTPUT = 0x8B80, + SH_GLSL_150_CORE_OUTPUT = 0x8B81, + SH_GLSL_330_CORE_OUTPUT = 0x8B82, + SH_GLSL_400_CORE_OUTPUT = 0x8B83, + SH_GLSL_410_CORE_OUTPUT = 0x8B84, + SH_GLSL_420_CORE_OUTPUT = 0x8B85, + SH_GLSL_430_CORE_OUTPUT = 0x8B86, + SH_GLSL_440_CORE_OUTPUT = 0x8B87, + SH_GLSL_450_CORE_OUTPUT = 0x8B88, + + // Prefer using these to specify HLSL output type: + SH_HLSL_3_0_OUTPUT = 0x8B48, // D3D 9 + SH_HLSL_4_1_OUTPUT = 0x8B49, // D3D 11 + SH_HLSL_4_0_FL9_3_OUTPUT = 0x8B4A, // D3D 11 feature level 9_3 + + // Output SPIR-V for the Vulkan backend. + SH_SPIRV_VULKAN_OUTPUT = 0x8B4B, + + // Output SPIR-V to be cross compiled to Metal. + SH_SPIRV_METAL_OUTPUT = 0x8B4C, +}; + +// Compile options. +// The Compile options type is defined in ShaderVars.h, to allow ANGLE to import the ShaderVars +// header without needing the ShaderLang header. This avoids some conflicts with glslang. + +const ShCompileOptions SH_VALIDATE = 0; +const ShCompileOptions SH_VALIDATE_LOOP_INDEXING = UINT64_C(1) << 0; +const ShCompileOptions SH_INTERMEDIATE_TREE = UINT64_C(1) << 1; +const ShCompileOptions SH_OBJECT_CODE = UINT64_C(1) << 2; +const ShCompileOptions SH_VARIABLES = UINT64_C(1) << 3; +const ShCompileOptions SH_LINE_DIRECTIVES = UINT64_C(1) << 4; +const ShCompileOptions SH_SOURCE_PATH = UINT64_C(1) << 5; + +// This flag will keep invariant declaration for input in fragment shader for GLSL >=4.20 on AMD. +// From GLSL >= 4.20, it's optional to add invariant for fragment input, but GPU vendors have +// different implementations about this. Some drivers forbid invariant in fragment for GLSL>= 4.20, +// e.g. Linux Mesa, some drivers treat that as optional, e.g. NVIDIA, some drivers require invariant +// must match between vertex and fragment shader, e.g. AMD. The behavior on AMD is obviously wrong. +// Remove invariant for input in fragment shader to workaround the restriction on Intel Mesa. +// But don't remove on AMD Linux to avoid triggering the bug on AMD. +const ShCompileOptions SH_DONT_REMOVE_INVARIANT_FOR_FRAGMENT_INPUT = UINT64_C(1) << 6; + +// Due to spec difference between GLSL 4.1 or lower and ESSL3, some platforms (for example, Mac OSX +// core profile) require a variable's "invariant"/"centroid" qualifiers to match between vertex and +// fragment shader. A simple solution to allow such shaders to link is to omit the two qualifiers. +// AMD driver in Linux requires invariant qualifier to match between vertex and fragment shaders, +// while ESSL3 disallows invariant qualifier in fragment shader and GLSL >= 4.2 doesn't require +// invariant qualifier to match between shaders. Remove invariant qualifier from vertex shader to +// workaround AMD driver bug. +// Note that the two flags take effect on ESSL3 input shaders translated to GLSL 4.1 or lower and to +// GLSL 4.2 or newer on Linux AMD. +// TODO(zmo): This is not a good long-term solution. Simply dropping these qualifiers may break some +// developers' content. A more complex workaround of dynamically generating, compiling, and +// re-linking shaders that use these qualifiers should be implemented. +const ShCompileOptions SH_REMOVE_INVARIANT_AND_CENTROID_FOR_ESSL3 = UINT64_C(1) << 7; + +// This flag works around bug in Intel Mac drivers related to abs(i) where +// i is an integer. +const ShCompileOptions SH_EMULATE_ABS_INT_FUNCTION = UINT64_C(1) << 8; + +// Enforce the GLSL 1.017 Appendix A section 7 packing restrictions. +// This flag only enforces (and can only enforce) the packing +// restrictions for uniform variables in both vertex and fragment +// shaders. ShCheckVariablesWithinPackingLimits() lets embedders +// enforce the packing restrictions for varying variables during +// program link time. +const ShCompileOptions SH_ENFORCE_PACKING_RESTRICTIONS = UINT64_C(1) << 9; + +// This flag ensures all indirect (expression-based) array indexing +// is clamped to the bounds of the array. This ensures, for example, +// that you cannot read off the end of a uniform, whether an array +// vec234, or mat234 type. The ShArrayIndexClampingStrategy enum, +// specified in the ShBuiltInResources when constructing the +// compiler, selects the strategy for the clamping implementation. +// TODO(http://anglebug.com/4361): fix for compute shaders. +const ShCompileOptions SH_CLAMP_INDIRECT_ARRAY_BOUNDS = UINT64_C(1) << 10; + +// This flag limits the complexity of an expression. +const ShCompileOptions SH_LIMIT_EXPRESSION_COMPLEXITY = UINT64_C(1) << 11; + +// This flag limits the depth of the call stack. +const ShCompileOptions SH_LIMIT_CALL_STACK_DEPTH = UINT64_C(1) << 12; + +// This flag initializes gl_Position to vec4(0,0,0,0) at the +// beginning of the vertex shader's main(), and has no effect in the +// fragment shader. It is intended as a workaround for drivers which +// incorrectly fail to link programs if gl_Position is not written. +const ShCompileOptions SH_INIT_GL_POSITION = UINT64_C(1) << 13; + +// This flag replaces +// "a && b" with "a ? b : false", +// "a || b" with "a ? true : b". +// This is to work around a MacOSX driver bug that |b| is executed +// independent of |a|'s value. +const ShCompileOptions SH_UNFOLD_SHORT_CIRCUIT = UINT64_C(1) << 14; + +// This flag initializes output variables to 0 at the beginning of main(). +// It is to avoid undefined behaviors. +const ShCompileOptions SH_INIT_OUTPUT_VARIABLES = UINT64_C(1) << 15; + +// This flag scalarizes vec/ivec/bvec/mat constructor args. +// It is intended as a workaround for Linux/Mac driver bugs. +const ShCompileOptions SH_SCALARIZE_VEC_AND_MAT_CONSTRUCTOR_ARGS = UINT64_C(1) << 16; + +// This flag overwrites a struct name with a unique prefix. +// It is intended as a workaround for drivers that do not handle +// struct scopes correctly, including all Mac drivers and Linux AMD. +const ShCompileOptions SH_REGENERATE_STRUCT_NAMES = UINT64_C(1) << 17; + +// This flag makes the compiler not prune unused function early in the +// compilation process. Pruning coupled with SH_LIMIT_CALL_STACK_DEPTH +// helps avoid bad shaders causing stack overflows. +const ShCompileOptions SH_DONT_PRUNE_UNUSED_FUNCTIONS = UINT64_C(1) << 18; + +// This flag works around a bug in NVIDIA 331 series drivers related +// to pow(x, y) where y is a constant vector. +const ShCompileOptions SH_REMOVE_POW_WITH_CONSTANT_EXPONENT = UINT64_C(1) << 19; + +// This flag works around bugs in Mac drivers related to do-while by +// transforming them into an other construct. +const ShCompileOptions SH_REWRITE_DO_WHILE_LOOPS = UINT64_C(1) << 20; + +// This flag works around a bug in the HLSL compiler optimizer that folds certain +// constant pow expressions incorrectly. Only applies to the HLSL back-end. It works +// by expanding the integer pow expressions into a series of multiplies. +const ShCompileOptions SH_EXPAND_SELECT_HLSL_INTEGER_POW_EXPRESSIONS = UINT64_C(1) << 21; + +// Flatten "#pragma STDGL invariant(all)" into the declarations of +// varying variables and built-in GLSL variables. This compiler +// option is enabled automatically when needed. +const ShCompileOptions SH_FLATTEN_PRAGMA_STDGL_INVARIANT_ALL = UINT64_C(1) << 22; + +// Some drivers do not take into account the base level of the texture in the results of the +// HLSL GetDimensions builtin. This flag instructs the compiler to manually add the base level +// offsetting. +const ShCompileOptions SH_HLSL_GET_DIMENSIONS_IGNORES_BASE_LEVEL = UINT64_C(1) << 23; + +// This flag works around an issue in translating GLSL function texelFetchOffset on +// INTEL drivers. It works by translating texelFetchOffset into texelFetch. +const ShCompileOptions SH_REWRITE_TEXELFETCHOFFSET_TO_TEXELFETCH = UINT64_C(1) << 24; + +// This flag works around condition bug of for and while loops in Intel Mac OSX drivers. +// Condition calculation is not correct. Rewrite it from "CONDITION" to "CONDITION && true". +const ShCompileOptions SH_ADD_AND_TRUE_TO_LOOP_CONDITION = UINT64_C(1) << 25; + +// This flag works around a bug in evaluating unary minus operator on integer on some INTEL +// drivers. It works by translating -(int) into ~(int) + 1. +const ShCompileOptions SH_REWRITE_INTEGER_UNARY_MINUS_OPERATOR = UINT64_C(1) << 26; + +// This flag works around a bug in evaluating isnan() on some INTEL D3D and Mac OSX drivers. +// It works by using an expression to emulate this function. +const ShCompileOptions SH_EMULATE_ISNAN_FLOAT_FUNCTION = UINT64_C(1) << 27; + +// This flag will use all uniforms of unused std140 and shared uniform blocks at the +// beginning of the vertex/fragment shader's main(). It is intended as a workaround for Mac +// drivers with shader version 4.10. In those drivers, they will treat unused +// std140 and shared uniform blocks' members as inactive. However, WebGL2.0 based on +// OpenGL ES3.0.4 requires all members of a named uniform block declared with a shared or std140 +// layout qualifier to be considered active. The uniform block itself is also considered active. +const ShCompileOptions SH_USE_UNUSED_STANDARD_SHARED_BLOCKS = UINT64_C(1) << 28; + +// This flag works around a bug in unary minus operator on float numbers on Intel +// Mac OSX 10.11 drivers. It works by translating -float into 0.0 - float. +const ShCompileOptions SH_REWRITE_FLOAT_UNARY_MINUS_OPERATOR = UINT64_C(1) << 29; + +// This flag works around a bug in evaluating atan(y, x) on some NVIDIA OpenGL drivers. +// It works by using an expression to emulate this function. +const ShCompileOptions SH_EMULATE_ATAN2_FLOAT_FUNCTION = UINT64_C(1) << 30; + +// Set to initialize uninitialized local and global temporary variables. Should only be used with +// GLSL output. In HLSL output variables are initialized regardless of if this flag is set. +const ShCompileOptions SH_INITIALIZE_UNINITIALIZED_LOCALS = UINT64_C(1) << 31; + +// The flag modifies the shader in the following way: +// Every occurrence of gl_InstanceID is replaced by the global temporary variable InstanceID. +// Every occurrence of gl_ViewID_OVR is replaced by the varying variable ViewID_OVR. +// At the beginning of the body of main() in a vertex shader the following initializers are added: +// ViewID_OVR = uint(gl_InstanceID) % num_views; +// InstanceID = gl_InstanceID / num_views; +// ViewID_OVR is added as a varying variable to both the vertex and fragment shaders. +const ShCompileOptions SH_INITIALIZE_BUILTINS_FOR_INSTANCED_MULTIVIEW = UINT64_C(1) << 32; + +// With the flag enabled the GLSL/ESSL vertex shader is modified to include code for viewport +// selection in the following way: +// - Code to enable the extension ARB_shader_viewport_layer_array/NV_viewport_array2 is included. +// - Code to select the viewport index or layer is inserted at the beginning of main after +// ViewID_OVR's initialization. +// - A declaration of the uniform multiviewBaseViewLayerIndex. +// Note: The SH_INITIALIZE_BUILTINS_FOR_INSTANCED_MULTIVIEW flag also has to be enabled to have the +// temporary variable ViewID_OVR declared and initialized. +const ShCompileOptions SH_SELECT_VIEW_IN_NV_GLSL_VERTEX_SHADER = UINT64_C(1) << 33; + +// If the flag is enabled, gl_PointSize is clamped to the maximum point size specified in +// ShBuiltInResources in vertex shaders. +const ShCompileOptions SH_CLAMP_POINT_SIZE = UINT64_C(1) << 34; + +// Turn some arithmetic operations that operate on a float vector-scalar pair into vector-vector +// operations. This is done recursively. Some scalar binary operations inside vector constructors +// are also turned into vector operations. +// +// This is targeted to work around a bug in NVIDIA OpenGL drivers that was reproducible on NVIDIA +// driver version 387.92. It works around the most common occurrences of the bug. +const ShCompileOptions SH_REWRITE_VECTOR_SCALAR_ARITHMETIC = UINT64_C(1) << 35; + +// Don't use loops to initialize uninitialized variables. Only has an effect if some kind of +// variable initialization is turned on. +const ShCompileOptions SH_DONT_USE_LOOPS_TO_INITIALIZE_VARIABLES = UINT64_C(1) << 36; + +// Don't use D3D constant register zero when allocating space for uniforms. This is targeted to work +// around a bug in NVIDIA D3D driver version 388.59 where in very specific cases the driver would +// not handle constant register zero correctly. Only has an effect on HLSL translation. +const ShCompileOptions SH_SKIP_D3D_CONSTANT_REGISTER_ZERO = UINT64_C(1) << 37; + +// Clamp gl_FragDepth to the range [0.0, 1.0] in case it is statically used. +const ShCompileOptions SH_CLAMP_FRAG_DEPTH = UINT64_C(1) << 38; + +// Rewrite expressions like "v.x = z = expression;". Works around a bug in NVIDIA OpenGL drivers +// prior to version 397.31. +const ShCompileOptions SH_REWRITE_REPEATED_ASSIGN_TO_SWIZZLED = UINT64_C(1) << 39; + +// Rewrite gl_DrawID as a uniform int +const ShCompileOptions SH_EMULATE_GL_DRAW_ID = UINT64_C(1) << 40; + +// This flag initializes shared variables to 0. +// It is to avoid ompute shaders being able to read undefined values that could be coming from +// another webpage/application. +const ShCompileOptions SH_INIT_SHARED_VARIABLES = UINT64_C(1) << 41; + +// Forces the value returned from an atomic operations to be always be resolved. This is targeted to +// workaround a bug in NVIDIA D3D driver where the return value from +// RWByteAddressBuffer.InterlockedAdd does not get resolved when used in the .yzw components of a +// RWByteAddressBuffer.Store operation. Only has an effect on HLSL translation. +// http://anglebug.com/3246 +const ShCompileOptions SH_FORCE_ATOMIC_VALUE_RESOLUTION = UINT64_C(1) << 42; + +// Rewrite gl_BaseVertex and gl_BaseInstance as uniform int +const ShCompileOptions SH_EMULATE_GL_BASE_VERTEX_BASE_INSTANCE = UINT64_C(1) << 43; + +// Emulate seamful cube map sampling for OpenGL ES2.0. Currently only applies to the Vulkan +// backend, as is done after samplers are moved out of structs. Can likely be made to work on +// the other backends as well. +const ShCompileOptions SH_EMULATE_SEAMFUL_CUBE_MAP_SAMPLING = UINT64_C(1) << 44; + +// This flag controls how to translate WEBGL_video_texture sampling function. +const ShCompileOptions SH_TAKE_VIDEO_TEXTURE_AS_EXTERNAL_OES = UINT64_C(1) << 45; + +// If requested, validates the AST after every transformation. Useful for debugging. +const ShCompileOptions SH_VALIDATE_AST = UINT64_C(1) << 46; + +// Bit 47 is available + +// This flag works around a inconsistent behavior in Mac AMD driver where gl_VertexID doesn't +// include base vertex value. It replaces gl_VertexID with (gl_VertexID + angle_BaseVertex) +// when angle_BaseVertex is available. +const ShCompileOptions SH_ADD_BASE_VERTEX_TO_VERTEX_ID = UINT64_C(1) << 48; + +// This works around the dynamic lvalue indexing of swizzled vectors on various platforms. +const ShCompileOptions SH_REMOVE_DYNAMIC_INDEXING_OF_SWIZZLED_VECTOR = UINT64_C(1) << 49; + +// This flag works around a slow fxc compile performance issue with dynamic uniform indexing. +const ShCompileOptions SH_ALLOW_TRANSLATE_UNIFORM_BLOCK_TO_STRUCTUREDBUFFER = UINT64_C(1) << 50; + +// This flag indicates whether Bresenham line raster emulation code should be generated. This +// emulation is necessary if the backend uses a differnet algorithm to draw lines. Currently only +// implemented for the Vulkan backend. +const ShCompileOptions SH_ADD_BRESENHAM_LINE_RASTER_EMULATION = UINT64_C(1) << 51; + +// This flag allows disabling ARB_texture_rectangle on a per-compile basis. This is necessary +// for WebGL contexts becuase ARB_texture_rectangle may be necessary for the WebGL implementation +// internally but shouldn't be exposed to WebGL user code. +const ShCompileOptions SH_DISABLE_ARB_TEXTURE_RECTANGLE = UINT64_C(1) << 52; + +// This flag works around a driver bug by rewriting uses of row-major matrices +// as column-major in ESSL 3.00 and greater shaders. +const ShCompileOptions SH_REWRITE_ROW_MAJOR_MATRICES = UINT64_C(1) << 53; + +// Drop any explicit precision qualifiers from shader. +const ShCompileOptions SH_IGNORE_PRECISION_QUALIFIERS = UINT64_C(1) << 54; + +// Allow compiler to do early fragment tests as an optimization. +const ShCompileOptions SH_EARLY_FRAGMENT_TESTS_OPTIMIZATION = UINT64_C(1) << 55; + +// Allow compiler to insert Android pre-rotation code. +const ShCompileOptions SH_ADD_PRE_ROTATION = UINT64_C(1) << 56; + +const ShCompileOptions SH_FORCE_SHADER_PRECISION_HIGHP_TO_MEDIUMP = UINT64_C(1) << 57; + +// Allow compiler to use specialization constant to do pre-rotation and y flip. +const ShCompileOptions SH_USE_SPECIALIZATION_CONSTANT = UINT64_C(1) << 58; + +// Ask compiler to generate Vulkan transform feedback emulation support code. +const ShCompileOptions SH_ADD_VULKAN_XFB_EMULATION_SUPPORT_CODE = UINT64_C(1) << 59; + +// Ask compiler to generate Vulkan transform feedback support code when using the +// VK_EXT_transform_feedback extension. +const ShCompileOptions SH_ADD_VULKAN_XFB_EXTENSION_SUPPORT_CODE = UINT64_C(1) << 60; + +// This flag initializes fragment shader's output variables to zero at the beginning of the fragment +// shader's main(). It is intended as a workaround for drivers which get context lost if +// gl_FragColor is not written. +const ShCompileOptions SH_INIT_FRAGMENT_OUTPUT_VARIABLES = UINT64_C(1) << 61; + +// Defines alternate strategies for implementing array index clamping. +enum ShArrayIndexClampingStrategy +{ + // Use the clamp intrinsic for array index clamping. + SH_CLAMP_WITH_CLAMP_INTRINSIC = 1, + + // Use a user-defined function for array index clamping. + SH_CLAMP_WITH_USER_DEFINED_INT_CLAMP_FUNCTION +}; + +// The 64 bits hash function. The first parameter is the input string; the +// second parameter is the string length. +using ShHashFunction64 = khronos_uint64_t (*)(const char *, size_t); + +// +// Implementation dependent built-in resources (constants and extensions). +// The names for these resources has been obtained by stripping gl_/GL_. +// +struct ShBuiltInResources +{ + // Constants. + int MaxVertexAttribs; + int MaxVertexUniformVectors; + int MaxVaryingVectors; + int MaxVertexTextureImageUnits; + int MaxCombinedTextureImageUnits; + int MaxTextureImageUnits; + int MaxFragmentUniformVectors; + int MaxDrawBuffers; + + // Extensions. + // Set to 1 to enable the extension, else 0. + int OES_standard_derivatives; + int OES_EGL_image_external; + int OES_EGL_image_external_essl3; + int NV_EGL_stream_consumer_external; + int ARB_texture_rectangle; + int EXT_blend_func_extended; + int EXT_draw_buffers; + int EXT_frag_depth; + int EXT_shader_texture_lod; + int WEBGL_debug_shader_precision; + int EXT_shader_framebuffer_fetch; + int EXT_shader_framebuffer_fetch_non_coherent; + int NV_shader_framebuffer_fetch; + int NV_shader_noperspective_interpolation; + int ARM_shader_framebuffer_fetch; + int OVR_multiview; + int OVR_multiview2; + int EXT_multisampled_render_to_texture; + int EXT_multisampled_render_to_texture2; + int EXT_YUV_target; + int EXT_geometry_shader; + int OES_shader_io_blocks; + int EXT_shader_io_blocks; + int EXT_gpu_shader5; + int EXT_shader_non_constant_global_initializers; + int OES_texture_storage_multisample_2d_array; + int OES_texture_3D; + int ANGLE_texture_multisample; + int ANGLE_multi_draw; + int ANGLE_base_vertex_base_instance; + int WEBGL_video_texture; + int APPLE_clip_distance; + int OES_texture_cube_map_array; + int EXT_texture_cube_map_array; + int EXT_shadow_samplers; + int OES_shader_multisample_interpolation; + int OES_shader_image_atomic; + int EXT_tessellation_shader; + int OES_texture_buffer; + int EXT_texture_buffer; + int OES_sample_variables; + int EXT_clip_cull_distance; + + // Set to 1 to enable replacing GL_EXT_draw_buffers #extension directives + // with GL_NV_draw_buffers in ESSL output. This flag can be used to emulate + // EXT_draw_buffers by using it in combination with GLES3.0 glDrawBuffers + // function. This applies to Tegra K1 devices. + int NV_draw_buffers; + + // Set to 1 if highp precision is supported in the ESSL 1.00 version of the + // fragment language. Does not affect versions of the language where highp + // support is mandatory. + // Default is 0. + int FragmentPrecisionHigh; + + // GLSL ES 3.0 constants. + int MaxVertexOutputVectors; + int MaxFragmentInputVectors; + int MinProgramTexelOffset; + int MaxProgramTexelOffset; + + // Extension constants. + + // Value of GL_MAX_DUAL_SOURCE_DRAW_BUFFERS_EXT for OpenGL ES output context. + // Value of GL_MAX_DUAL_SOURCE_DRAW_BUFFERS for OpenGL output context. + // GLES SL version 100 gl_MaxDualSourceDrawBuffersEXT value for EXT_blend_func_extended. + int MaxDualSourceDrawBuffers; + + // Value of GL_MAX_VIEWS_OVR. + int MaxViewsOVR; + + // Name Hashing. + // Set a 64 bit hash function to enable user-defined name hashing. + // Default is NULL. + ShHashFunction64 HashFunction; + + // Selects a strategy to use when implementing array index clamping. + // Default is SH_CLAMP_WITH_CLAMP_INTRINSIC. + ShArrayIndexClampingStrategy ArrayIndexClampingStrategy; + + // The maximum complexity an expression can be when SH_LIMIT_EXPRESSION_COMPLEXITY is turned on. + int MaxExpressionComplexity; + + // The maximum depth a call stack can be. + int MaxCallStackDepth; + + // The maximum number of parameters a function can have when SH_LIMIT_EXPRESSION_COMPLEXITY is + // turned on. + int MaxFunctionParameters; + + // GLES 3.1 constants + + // texture gather offset constraints. + int MinProgramTextureGatherOffset; + int MaxProgramTextureGatherOffset; + + // maximum number of available image units + int MaxImageUnits; + + // OES_sample_variables constant + // maximum number of available samples + int MaxSamples; + + // maximum number of image uniforms in a vertex shader + int MaxVertexImageUniforms; + + // maximum number of image uniforms in a fragment shader + int MaxFragmentImageUniforms; + + // maximum number of image uniforms in a compute shader + int MaxComputeImageUniforms; + + // maximum total number of image uniforms in a program + int MaxCombinedImageUniforms; + + // maximum number of uniform locations + int MaxUniformLocations; + + // maximum number of ssbos and images in a shader + int MaxCombinedShaderOutputResources; + + // maximum number of groups in each dimension + std::array MaxComputeWorkGroupCount; + // maximum number of threads per work group in each dimension + std::array MaxComputeWorkGroupSize; + + // maximum number of total uniform components + int MaxComputeUniformComponents; + + // maximum number of texture image units in a compute shader + int MaxComputeTextureImageUnits; + + // maximum number of atomic counters in a compute shader + int MaxComputeAtomicCounters; + + // maximum number of atomic counter buffers in a compute shader + int MaxComputeAtomicCounterBuffers; + + // maximum number of atomic counters in a vertex shader + int MaxVertexAtomicCounters; + + // maximum number of atomic counters in a fragment shader + int MaxFragmentAtomicCounters; + + // maximum number of atomic counters in a program + int MaxCombinedAtomicCounters; + + // maximum binding for an atomic counter + int MaxAtomicCounterBindings; + + // maximum number of atomic counter buffers in a vertex shader + int MaxVertexAtomicCounterBuffers; + + // maximum number of atomic counter buffers in a fragment shader + int MaxFragmentAtomicCounterBuffers; + + // maximum number of atomic counter buffers in a program + int MaxCombinedAtomicCounterBuffers; + + // maximum number of buffer object storage in machine units + int MaxAtomicCounterBufferSize; + + // maximum number of uniform block bindings + int MaxUniformBufferBindings; + + // maximum number of shader storage buffer bindings + int MaxShaderStorageBufferBindings; + + // maximum point size (higher limit from ALIASED_POINT_SIZE_RANGE) + float MaxPointSize; + + // EXT_geometry_shader constants + int MaxGeometryUniformComponents; + int MaxGeometryUniformBlocks; + int MaxGeometryInputComponents; + int MaxGeometryOutputComponents; + int MaxGeometryOutputVertices; + int MaxGeometryTotalOutputComponents; + int MaxGeometryTextureImageUnits; + int MaxGeometryAtomicCounterBuffers; + int MaxGeometryAtomicCounters; + int MaxGeometryShaderStorageBlocks; + int MaxGeometryShaderInvocations; + int MaxGeometryImageUniforms; + + // EXT_tessellation_shader constants + int MaxTessControlInputComponents; + int MaxTessControlOutputComponents; + int MaxTessControlTextureImageUnits; + int MaxTessControlUniformComponents; + int MaxTessControlTotalOutputComponents; + int MaxTessControlImageUniforms; + int MaxTessControlAtomicCounters; + int MaxTessControlAtomicCounterBuffers; + + int MaxTessPatchComponents; + int MaxPatchVertices; + int MaxTessGenLevel; + + int MaxTessEvaluationInputComponents; + int MaxTessEvaluationOutputComponents; + int MaxTessEvaluationTextureImageUnits; + int MaxTessEvaluationUniformComponents; + int MaxTessEvaluationImageUniforms; + int MaxTessEvaluationAtomicCounters; + int MaxTessEvaluationAtomicCounterBuffers; + + // Subpixel bits used in rasterization. + int SubPixelBits; + + // APPLE_clip_distance/EXT_clip_cull_distance constant + int MaxClipDistances; + int MaxCullDistances; + int MaxCombinedClipAndCullDistances; +}; + +// +// ShHandle held by but opaque to the driver. It is allocated, +// managed, and de-allocated by the compiler. Its contents +// are defined by and used by the compiler. +// +// If handle creation fails, 0 will be returned. +// +using ShHandle = void *; + +namespace sh +{ + +// +// Driver must call this first, once, before doing any other compiler operations. +// If the function succeeds, the return value is true, else false. +// +bool Initialize(); +// +// Driver should call this at shutdown. +// If the function succeeds, the return value is true, else false. +// +bool Finalize(); + +// +// Initialize built-in resources with minimum expected values. +// Parameters: +// resources: The object to initialize. Will be comparable with memcmp. +// +void InitBuiltInResources(ShBuiltInResources *resources); + +// +// Returns the a concatenated list of the items in ShBuiltInResources as a null-terminated string. +// This function must be updated whenever ShBuiltInResources is changed. +// Parameters: +// handle: Specifies the handle of the compiler to be used. +const std::string &GetBuiltInResourcesString(const ShHandle handle); + +// +// Driver calls these to create and destroy compiler objects. +// +// Returns the handle of constructed compiler, null if the requested compiler is not supported. +// Parameters: +// type: Specifies the type of shader - GL_FRAGMENT_SHADER or GL_VERTEX_SHADER. +// spec: Specifies the language spec the compiler must conform to - SH_GLES2_SPEC or SH_WEBGL_SPEC. +// output: Specifies the output code type - for example SH_ESSL_OUTPUT, SH_GLSL_OUTPUT, +// SH_HLSL_3_0_OUTPUT or SH_HLSL_4_1_OUTPUT. Note: Each output type may only +// be supported in some configurations. +// resources: Specifies the built-in resources. +ShHandle ConstructCompiler(sh::GLenum type, + ShShaderSpec spec, + ShShaderOutput output, + const ShBuiltInResources *resources); +void Destruct(ShHandle handle); + +// +// Compiles the given shader source. +// If the function succeeds, the return value is true, else false. +// Parameters: +// handle: Specifies the handle of compiler to be used. +// shaderStrings: Specifies an array of pointers to null-terminated strings containing the shader +// source code. +// numStrings: Specifies the number of elements in shaderStrings array. +// compileOptions: A mask containing the following parameters: +// SH_VALIDATE: Validates shader to ensure that it conforms to the spec +// specified during compiler construction. +// SH_VALIDATE_LOOP_INDEXING: Validates loop and indexing in the shader to +// ensure that they do not exceed the minimum +// functionality mandated in GLSL 1.0 spec, +// Appendix A, Section 4 and 5. +// There is no need to specify this parameter when +// compiling for WebGL - it is implied. +// SH_INTERMEDIATE_TREE: Writes intermediate tree to info log. +// Can be queried by calling sh::GetInfoLog(). +// SH_OBJECT_CODE: Translates intermediate tree to glsl or hlsl shader. +// Can be queried by calling sh::GetObjectCode(). +// SH_VARIABLES: Extracts attributes, uniforms, and varyings. +// Can be queried by calling ShGetVariableInfo(). +// +bool Compile(const ShHandle handle, + const char *const shaderStrings[], + size_t numStrings, + ShCompileOptions compileOptions); + +// Clears the results from the previous compilation. +void ClearResults(const ShHandle handle); + +// Return the version of the shader language. +int GetShaderVersion(const ShHandle handle); + +// Return the currently set language output type. +ShShaderOutput GetShaderOutputType(const ShHandle handle); + +// Returns null-terminated information log for a compiled shader. +// Parameters: +// handle: Specifies the compiler +const std::string &GetInfoLog(const ShHandle handle); + +// Returns null-terminated object code for a compiled shader. +// Parameters: +// handle: Specifies the compiler +const std::string &GetObjectCode(const ShHandle handle); + +// Returns a (original_name, hash) map containing all the user defined names in the shader, +// including variable names, function names, struct names, and struct field names. +// Parameters: +// handle: Specifies the compiler +const std::map *GetNameHashingMap(const ShHandle handle); + +// Shader variable inspection. +// Returns a pointer to a list of variables of the designated type. +// (See ShaderVars.h for type definitions, included above) +// Returns NULL on failure. +// Parameters: +// handle: Specifies the compiler +const std::vector *GetUniforms(const ShHandle handle); +const std::vector *GetVaryings(const ShHandle handle); +const std::vector *GetInputVaryings(const ShHandle handle); +const std::vector *GetOutputVaryings(const ShHandle handle); +const std::vector *GetAttributes(const ShHandle handle); +const std::vector *GetOutputVariables(const ShHandle handle); +const std::vector *GetInterfaceBlocks(const ShHandle handle); +const std::vector *GetUniformBlocks(const ShHandle handle); +const std::vector *GetShaderStorageBlocks(const ShHandle handle); +sh::WorkGroupSize GetComputeShaderLocalGroupSize(const ShHandle handle); +// Returns the number of views specified through the num_views layout qualifier. If num_views is +// not set, the function returns -1. +int GetVertexShaderNumViews(const ShHandle handle); +// Returns true if compiler has injected instructions for early fragment tests as an optimization +bool HasEarlyFragmentTestsOptimization(const ShHandle handle); + +// Returns specialization constant usage bits +uint32_t GetShaderSpecConstUsageBits(const ShHandle handle); + +// Returns true if the passed in variables pack in maxVectors followingthe packing rules from the +// GLSL 1.017 spec, Appendix A, section 7. +// Returns false otherwise. Also look at the SH_ENFORCE_PACKING_RESTRICTIONS +// flag above. +// Parameters: +// maxVectors: the available rows of registers. +// variables: an array of variables. +bool CheckVariablesWithinPackingLimits(int maxVectors, + const std::vector &variables); + +// Gives the compiler-assigned register for a shader storage block. +// The method writes the value to the output variable "indexOut". +// Returns true if it found a valid shader storage block, false otherwise. +// Parameters: +// handle: Specifies the compiler +// shaderStorageBlockName: Specifies the shader storage block +// indexOut: output variable that stores the assigned register +bool GetShaderStorageBlockRegister(const ShHandle handle, + const std::string &shaderStorageBlockName, + unsigned int *indexOut); + +// Gives the compiler-assigned register for a uniform block. +// The method writes the value to the output variable "indexOut". +// Returns true if it found a valid uniform block, false otherwise. +// Parameters: +// handle: Specifies the compiler +// uniformBlockName: Specifies the uniform block +// indexOut: output variable that stores the assigned register +bool GetUniformBlockRegister(const ShHandle handle, + const std::string &uniformBlockName, + unsigned int *indexOut); + +bool ShouldUniformBlockUseStructuredBuffer(const ShHandle handle, + const std::string &uniformBlockName); +const std::set *GetSlowCompilingUniformBlockSet(const ShHandle handle); + +// Gives a map from uniform names to compiler-assigned registers in the default uniform block. +// Note that the map contains also registers of samplers that have been extracted from structs. +const std::map *GetUniformRegisterMap(const ShHandle handle); + +// Sampler, image and atomic counters share registers(t type and u type), +// GetReadonlyImage2DRegisterIndex and GetImage2DRegisterIndex return the first index into +// a range of reserved registers for image2D/iimage2D/uimage2D variables. +// Parameters: handle: Specifies the compiler +unsigned int GetReadonlyImage2DRegisterIndex(const ShHandle handle); +unsigned int GetImage2DRegisterIndex(const ShHandle handle); + +// The method records these used function names related with image2D/iimage2D/uimage2D, these +// functions will be dynamically generated. +// Parameters: +// handle: Specifies the compiler +const std::set *GetUsedImage2DFunctionNames(const ShHandle handle); + +bool HasValidGeometryShaderInputPrimitiveType(const ShHandle handle); +bool HasValidGeometryShaderOutputPrimitiveType(const ShHandle handle); +bool HasValidGeometryShaderMaxVertices(const ShHandle handle); +bool HasValidTessGenMode(const ShHandle handle); +bool HasValidTessGenSpacing(const ShHandle handle); +bool HasValidTessGenVertexOrder(const ShHandle handle); +bool HasValidTessGenPointMode(const ShHandle handle); +GLenum GetGeometryShaderInputPrimitiveType(const ShHandle handle); +GLenum GetGeometryShaderOutputPrimitiveType(const ShHandle handle); +int GetGeometryShaderInvocations(const ShHandle handle); +int GetGeometryShaderMaxVertices(const ShHandle handle); +unsigned int GetShaderSharedMemorySize(const ShHandle handle); +int GetTessControlShaderVertices(const ShHandle handle); +GLenum GetTessGenMode(const ShHandle handle); +GLenum GetTessGenSpacing(const ShHandle handle); +GLenum GetTessGenVertexOrder(const ShHandle handle); +GLenum GetTessGenPointMode(const ShHandle handle); + +// +// Helper function to identify specs that are based on the WebGL spec. +// +inline bool IsWebGLBasedSpec(ShShaderSpec spec) +{ + return (spec == SH_WEBGL_SPEC || spec == SH_WEBGL2_SPEC || spec == SH_WEBGL3_SPEC); +} + +// +// Helper function to identify DesktopGL specs +// +inline bool IsDesktopGLSpec(ShShaderSpec spec) +{ + return spec == SH_GL_CORE_SPEC || spec == SH_GL_COMPATIBILITY_SPEC; +} + +// Can't prefix with just _ because then we might introduce a double underscore, which is not safe +// in GLSL (ESSL 3.00.6 section 3.8: All identifiers containing a double underscore are reserved for +// use by the underlying implementation). u is short for user-defined. +extern const char kUserDefinedNamePrefix[]; + +namespace vk +{ + +// Specialization constant ids +enum class SpecializationConstantId : uint32_t +{ + LineRasterEmulation = 0, + SurfaceRotation = 1, + DrawableWidth = 2, + DrawableHeight = 3, + + InvalidEnum = 4, + EnumCount = InvalidEnum, +}; + +enum class SurfaceRotation : uint32_t +{ + Identity, + Rotated90Degrees, + Rotated180Degrees, + Rotated270Degrees, + FlippedIdentity, + FlippedRotated90Degrees, + FlippedRotated180Degrees, + FlippedRotated270Degrees, + + InvalidEnum, + EnumCount = InvalidEnum, +}; + +enum class SpecConstUsage : uint32_t +{ + LineRasterEmulation = 0, + YFlip = 1, + Rotation = 2, + DrawableSize = 3, + + InvalidEnum = 4, + EnumCount = InvalidEnum, +}; + +// Interface block name containing the aggregate default uniforms +extern const char kDefaultUniformsNameVS[]; +extern const char kDefaultUniformsNameTCS[]; +extern const char kDefaultUniformsNameTES[]; +extern const char kDefaultUniformsNameGS[]; +extern const char kDefaultUniformsNameFS[]; +extern const char kDefaultUniformsNameCS[]; + +// Interface block and variable names containing driver uniforms +extern const char kDriverUniformsBlockName[]; +extern const char kDriverUniformsVarName[]; + +// Interface block array name used for atomic counter emulation +extern const char kAtomicCountersBlockName[]; + +// Line raster emulation varying +extern const char kLineRasterEmulationPosition[]; + +// Transform feedback emulation support +extern const char kXfbEmulationGetOffsetsFunctionName[]; +extern const char kXfbEmulationCaptureFunctionName[]; +extern const char kXfbEmulationBufferBlockName[]; +extern const char kXfbEmulationBufferName[]; +extern const char kXfbEmulationBufferFieldName[]; + +// Transform feedback extension support +extern const char kXfbExtensionPositionOutName[]; + +// EXT_shader_framebuffer_fetch and EXT_shader_framebuffer_fetch_non_coherent +extern const char kInputAttachmentName[]; + +} // namespace vk + +namespace mtl +{ +// Specialization constant to enable GL_SAMPLE_COVERAGE_VALUE emulation. +extern const char kCoverageMaskEnabledConstName[]; + +// Specialization constant to emulate rasterizer discard. +extern const char kRasterizerDiscardEnabledConstName[]; +} // namespace mtl +} // namespace sh + +#endif // GLSLANG_SHADERLANG_H_ diff --git a/vendor/angle/include/GLSLANG/ShaderVars.h b/vendor/angle/include/GLSLANG/ShaderVars.h new file mode 100644 index 00000000..a8f3bb28 --- /dev/null +++ b/vendor/angle/include/GLSLANG/ShaderVars.h @@ -0,0 +1,317 @@ +// +// Copyright 2013 The ANGLE Project Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// +// ShaderVars.h: +// Types to represent GL variables (varyings, uniforms, etc) +// + +#ifndef GLSLANG_SHADERVARS_H_ +#define GLSLANG_SHADERVARS_H_ + +#include +#include +#include +#include + +// This type is defined here to simplify ANGLE's integration with glslang for SPIRv. +using ShCompileOptions = uint64_t; + +namespace sh +{ +// GLenum alias +typedef unsigned int GLenum; + +// Varying interpolation qualifier, see section 4.3.9 of the ESSL 3.00.4 spec +enum InterpolationType +{ + INTERPOLATION_SMOOTH, + INTERPOLATION_CENTROID, + INTERPOLATION_SAMPLE, + INTERPOLATION_FLAT, + INTERPOLATION_NOPERSPECTIVE +}; + +// Validate link & SSO consistency of interpolation qualifiers +bool InterpolationTypesMatch(InterpolationType a, InterpolationType b); + +// Uniform block layout qualifier, see section 4.3.8.3 of the ESSL 3.00.4 spec +enum BlockLayoutType +{ + BLOCKLAYOUT_STANDARD, + BLOCKLAYOUT_STD140 = BLOCKLAYOUT_STANDARD, + BLOCKLAYOUT_STD430, // Shader storage block layout qualifier + BLOCKLAYOUT_PACKED, + BLOCKLAYOUT_SHARED +}; + +// Interface Blocks, see section 4.3.9 of the ESSL 3.10 spec +enum class BlockType +{ + BLOCK_UNIFORM, + BLOCK_BUFFER, +}; + +// Base class for all variables defined in shaders, including Varyings, Uniforms, etc +// Note: we must override the copy constructor and assignment operator so we can +// work around excessive GCC binary bloating: +// See https://code.google.com/p/angleproject/issues/detail?id=697 +struct ShaderVariable +{ + ShaderVariable(); + ShaderVariable(GLenum typeIn); + ShaderVariable(GLenum typeIn, unsigned int arraySizeIn); + ~ShaderVariable(); + ShaderVariable(const ShaderVariable &other); + ShaderVariable &operator=(const ShaderVariable &other); + bool operator==(const ShaderVariable &other) const; + bool operator!=(const ShaderVariable &other) const { return !operator==(other); } + + bool isArrayOfArrays() const { return arraySizes.size() >= 2u; } + bool isArray() const { return !arraySizes.empty(); } + unsigned int getArraySizeProduct() const; + // Return the inner array size product. + // For example, if there's a variable declared as size 3 array of size 4 array of size 5 array + // of int: + // int a[3][4][5]; + // then getInnerArraySizeProduct of a would be 4*5. + unsigned int getInnerArraySizeProduct() const; + + // Array size 0 means not an array when passed to or returned from these functions. + // Note that setArraySize() is deprecated and should not be used inside ANGLE. + unsigned int getOutermostArraySize() const { return isArray() ? arraySizes.back() : 0; } + void setArraySize(unsigned int size); + + // Turn this ShaderVariable from an array into a specific element in that array. Will update + // flattenedOffsetInParentArrays. + void indexIntoArray(unsigned int arrayIndex); + + // Get the nth nested array size from the top. Caller is responsible for range checking + // arrayNestingIndex. + unsigned int getNestedArraySize(unsigned int arrayNestingIndex) const; + + // This function should only be used with variables that are of a basic type or an array of a + // basic type. Shader interface variables that are enumerated according to rules in GLES 3.1 + // spec section 7.3.1.1 page 77 are fine. For those variables the return value should match the + // ARRAY_SIZE value that can be queried through the API. + unsigned int getBasicTypeElementCount() const; + + unsigned int getExternalSize() const; + + bool isStruct() const { return !fields.empty(); } + const std::string &getStructName() const { return structOrBlockName; } + void setStructName(const std::string &newName) { structOrBlockName = newName; } + + // All of the shader's variables are described using nested data + // structures. This is needed in order to disambiguate similar looking + // types, such as two structs containing the same fields, but in + // different orders. "findInfoByMappedName" provides an easy query for + // users to dive into the data structure and fetch the unique variable + // instance corresponding to a dereferencing chain of the top-level + // variable. + // Given a mapped name like 'a[0].b.c[0]', return the ShaderVariable + // that defines 'c' in |leafVar|, and the original name 'A[0].B.C[0]' + // in |originalName|, based on the assumption that |this| defines 'a'. + // If no match is found, return false. + bool findInfoByMappedName(const std::string &mappedFullName, + const ShaderVariable **leafVar, + std::string *originalFullName) const; + + // Find the child field which matches 'fullName' == var.name + "." + field.name. + // Return nullptr if not found. + const sh::ShaderVariable *findField(const std::string &fullName, uint32_t *fieldIndexOut) const; + + bool isBuiltIn() const; + bool isEmulatedBuiltIn() const; + + GLenum type; + GLenum precision; + std::string name; + std::string mappedName; + + // Used to make an array type. Outermost array size is stored at the end of the vector. + std::vector arraySizes; + + // Offset of this variable in parent arrays. In case the parent is an array of arrays, the + // offset is outerArrayElement * innerArraySize + innerArrayElement. + // For example, if there's a variable declared as size 3 array of size 4 array of int: + // int a[3][4]; + // then the flattenedOffsetInParentArrays of a[2] would be 2. + // and flattenedOffsetInParentArrays of a[2][1] would be 2*4 + 1 = 9. + int parentArrayIndex() const + { + return hasParentArrayIndex() ? flattenedOffsetInParentArrays : 0; + } + + void setParentArrayIndex(int indexIn) { flattenedOffsetInParentArrays = indexIn; } + + bool hasParentArrayIndex() const { return flattenedOffsetInParentArrays != -1; } + + // Static use means that the variable is accessed somewhere in the shader source. + bool staticUse; + // A variable is active unless the compiler determined that it is not accessed by the shader. + // All active variables are statically used, but not all statically used variables are + // necessarily active. GLES 3.0.5 section 2.12.6. GLES 3.1 section 7.3.1. + bool active; + std::vector fields; + // structOrBlockName is used for: + // + // - varyings of struct type, in which case it contains the struct name. + // - shader I/O blocks, in which case it contains the block name. + std::string structOrBlockName; + std::string mappedStructOrBlockName; + + // Only applies to interface block fields. Kept here for simplicity. + bool isRowMajorLayout; + + // VariableWithLocation + int location; + + // The location of inputs or outputs without location layout quailifer will be updated to '-1'. + // GLES Spec 3.1, Section 7.3. PROGRAM OBJECTS + // Not all active variables are assigned valid locations; + // the following variables will have an effective location of -1: + bool hasImplicitLocation; + void resetEffectiveLocation(); + void updateEffectiveLocation(const sh::ShaderVariable &parent); + + // Uniform + int binding; + // Decide whether two uniforms are the same at shader link time, + // assuming they are from consecutive shader stages. + // GLSL ES Spec 3.00.3, section 4.3.5. + // GLSL ES Spec 3.10.4, section 4.4.5 + bool isSameUniformAtLinkTime(const ShaderVariable &other) const; + GLenum imageUnitFormat; + int offset; + bool readonly; + bool writeonly; + + // From EXT_shader_framebuffer_fetch + bool isFragmentInOut; + + // OutputVariable + // From EXT_blend_func_extended. + int index; + + // From EXT_YUV_target + bool yuv; + + // InterfaceBlockField + // Decide whether two InterfaceBlock fields are the same at shader + // link time, assuming they are from consecutive shader stages. + // See GLSL ES Spec 3.00.3, sec 4.3.7. + bool isSameInterfaceBlockFieldAtLinkTime(const ShaderVariable &other) const; + + // Varying + InterpolationType interpolation; + bool isInvariant; + bool isShaderIOBlock; + bool isPatch; + + // Decide whether two varyings are the same at shader link time, + // assuming they are from consecutive shader stages. + // Invariance needs to match only in ESSL1. Relevant spec sections: + // GLSL ES 3.00.4, sections 4.6.1 and 4.3.9. + // GLSL ES 1.00.17, section 4.6.4. + bool isSameVaryingAtLinkTime(const ShaderVariable &other, int shaderVersion) const; + // Deprecated version of isSameVaryingAtLinkTime, which assumes ESSL1. + bool isSameVaryingAtLinkTime(const ShaderVariable &other) const; + + // Shader I/O blocks may match by block name or instance, based on whether both stages have an + // instance name or not. + bool isSameNameAtLinkTime(const ShaderVariable &other) const; + + // If the variable is a sampler that has ever been statically used with texelFetch + bool texelFetchStaticUse; + + protected: + bool isSameVariableAtLinkTime(const ShaderVariable &other, + bool matchPrecision, + bool matchName) const; + + int flattenedOffsetInParentArrays; +}; + +// TODO: anglebug.com/3899 +// For backwards compatibility for other codebases (e.g., chromium/src/gpu/command_buffer/service) +using Uniform = ShaderVariable; +using Attribute = ShaderVariable; +using OutputVariable = ShaderVariable; +using InterfaceBlockField = ShaderVariable; +using Varying = ShaderVariable; + +struct InterfaceBlock +{ + InterfaceBlock(); + ~InterfaceBlock(); + InterfaceBlock(const InterfaceBlock &other); + InterfaceBlock &operator=(const InterfaceBlock &other); + + // Fields from blocks with non-empty instance names are prefixed with the block name. + std::string fieldPrefix() const; + std::string fieldMappedPrefix() const; + + // Decide whether two interface blocks are the same at shader link time. + bool isSameInterfaceBlockAtLinkTime(const InterfaceBlock &other) const; + + bool isBuiltIn() const; + + bool isArray() const { return arraySize > 0; } + unsigned int elementCount() const { return std::max(1u, arraySize); } + + std::string name; + std::string mappedName; + std::string instanceName; + unsigned int arraySize; + BlockLayoutType layout; + + // Deprecated. Matrix packing should only be queried from individual fields of the block. + // TODO(oetuaho): Remove this once it is no longer used in Chromium. + bool isRowMajorLayout; + + int binding; + bool staticUse; + bool active; + BlockType blockType; + std::vector fields; +}; + +struct WorkGroupSize +{ + // Must have a trivial default constructor since it is used in YYSTYPE. + inline WorkGroupSize() = default; + inline explicit constexpr WorkGroupSize(int initialSize); + + void fill(int fillValue); + void setLocalSize(int localSizeX, int localSizeY, int localSizeZ); + + int &operator[](size_t index); + int operator[](size_t index) const; + size_t size() const; + + // Checks whether two work group size declarations match. + // Two work group size declarations are the same if the explicitly specified elements are the + // same or if one of them is specified as one and the other one is not specified + bool isWorkGroupSizeMatching(const WorkGroupSize &right) const; + + // Checks whether any of the values are set. + bool isAnyValueSet() const; + + // Checks whether all of the values are set. + bool isDeclared() const; + + // Checks whether either all of the values are set, or none of them are. + bool isLocalSizeValid() const; + + int localSizeQualifiers[3]; +}; + +inline constexpr WorkGroupSize::WorkGroupSize(int initialSize) + : localSizeQualifiers{initialSize, initialSize, initialSize} +{} + +} // namespace sh + +#endif // GLSLANG_SHADERVARS_H_ diff --git a/vendor/angle/include/KHR/.clang-format b/vendor/angle/include/KHR/.clang-format new file mode 100644 index 00000000..06147100 --- /dev/null +++ b/vendor/angle/include/KHR/.clang-format @@ -0,0 +1,3 @@ + + +DisableFormat: true diff --git a/vendor/angle/include/KHR/khrplatform.h b/vendor/angle/include/KHR/khrplatform.h new file mode 100644 index 00000000..dd22d927 --- /dev/null +++ b/vendor/angle/include/KHR/khrplatform.h @@ -0,0 +1,290 @@ +#ifndef __khrplatform_h_ +#define __khrplatform_h_ + +/* +** Copyright (c) 2008-2018 The Khronos Group Inc. +** +** Permission is hereby granted, free of charge, to any person obtaining a +** copy of this software and/or associated documentation files (the +** "Materials"), to deal in the Materials without restriction, including +** without limitation the rights to use, copy, modify, merge, publish, +** distribute, sublicense, and/or sell copies of the Materials, and to +** permit persons to whom the Materials are furnished to do so, subject to +** the following conditions: +** +** The above copyright notice and this permission notice shall be included +** in all copies or substantial portions of the Materials. +** +** THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +** EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +** MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +** IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +** CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +** MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. +*/ + +/* Khronos platform-specific types and definitions. + * + * The master copy of khrplatform.h is maintained in the Khronos EGL + * Registry repository at https://github.com/KhronosGroup/EGL-Registry + * The last semantic modification to khrplatform.h was at commit ID: + * 67a3e0864c2d75ea5287b9f3d2eb74a745936692 + * + * Adopters may modify this file to suit their platform. Adopters are + * encouraged to submit platform specific modifications to the Khronos + * group so that they can be included in future versions of this file. + * Please submit changes by filing pull requests or issues on + * the EGL Registry repository linked above. + * + * + * See the Implementer's Guidelines for information about where this file + * should be located on your system and for more details of its use: + * http://www.khronos.org/registry/implementers_guide.pdf + * + * This file should be included as + * #include + * by Khronos client API header files that use its types and defines. + * + * The types in khrplatform.h should only be used to define API-specific types. + * + * Types defined in khrplatform.h: + * khronos_int8_t signed 8 bit + * khronos_uint8_t unsigned 8 bit + * khronos_int16_t signed 16 bit + * khronos_uint16_t unsigned 16 bit + * khronos_int32_t signed 32 bit + * khronos_uint32_t unsigned 32 bit + * khronos_int64_t signed 64 bit + * khronos_uint64_t unsigned 64 bit + * khronos_intptr_t signed same number of bits as a pointer + * khronos_uintptr_t unsigned same number of bits as a pointer + * khronos_ssize_t signed size + * khronos_usize_t unsigned size + * khronos_float_t signed 32 bit floating point + * khronos_time_ns_t unsigned 64 bit time in nanoseconds + * khronos_utime_nanoseconds_t unsigned time interval or absolute time in + * nanoseconds + * khronos_stime_nanoseconds_t signed time interval in nanoseconds + * khronos_boolean_enum_t enumerated boolean type. This should + * only be used as a base type when a client API's boolean type is + * an enum. Client APIs which use an integer or other type for + * booleans cannot use this as the base type for their boolean. + * + * Tokens defined in khrplatform.h: + * + * KHRONOS_FALSE, KHRONOS_TRUE Enumerated boolean false/true values. + * + * KHRONOS_SUPPORT_INT64 is 1 if 64 bit integers are supported; otherwise 0. + * KHRONOS_SUPPORT_FLOAT is 1 if floats are supported; otherwise 0. + * + * Calling convention macros defined in this file: + * KHRONOS_APICALL + * KHRONOS_APIENTRY + * KHRONOS_APIATTRIBUTES + * + * These may be used in function prototypes as: + * + * KHRONOS_APICALL void KHRONOS_APIENTRY funcname( + * int arg1, + * int arg2) KHRONOS_APIATTRIBUTES; + */ + +#if defined(__SCITECH_SNAP__) && !defined(KHRONOS_STATIC) +# define KHRONOS_STATIC 1 +#endif + +/*------------------------------------------------------------------------- + * Definition of KHRONOS_APICALL + *------------------------------------------------------------------------- + * This precedes the return type of the function in the function prototype. + */ +#if defined(KHRONOS_STATIC) + /* If the preprocessor constant KHRONOS_STATIC is defined, make the + * header compatible with static linking. */ +# define KHRONOS_APICALL +#elif defined(_WIN32) +# define KHRONOS_APICALL __declspec(dllimport) +#elif defined (__SYMBIAN32__) +# define KHRONOS_APICALL IMPORT_C +#elif defined(__ANDROID__) +# define KHRONOS_APICALL __attribute__((visibility("default"))) +#else +# define KHRONOS_APICALL +#endif + +/*------------------------------------------------------------------------- + * Definition of KHRONOS_APIENTRY + *------------------------------------------------------------------------- + * This follows the return type of the function and precedes the function + * name in the function prototype. + */ +#if defined(_WIN32) && !defined(_WIN32_WCE) && !defined(__SCITECH_SNAP__) + /* Win32 but not WinCE */ +# define KHRONOS_APIENTRY __stdcall +#else +# define KHRONOS_APIENTRY +#endif + +/*------------------------------------------------------------------------- + * Definition of KHRONOS_APIATTRIBUTES + *------------------------------------------------------------------------- + * This follows the closing parenthesis of the function prototype arguments. + */ +#if defined (__ARMCC_2__) +#define KHRONOS_APIATTRIBUTES __softfp +#else +#define KHRONOS_APIATTRIBUTES +#endif + +/*------------------------------------------------------------------------- + * basic type definitions + *-----------------------------------------------------------------------*/ +#if (defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L) || defined(__GNUC__) || defined(__SCO__) || defined(__USLC__) + + +/* + * Using + */ +#include +typedef int32_t khronos_int32_t; +typedef uint32_t khronos_uint32_t; +typedef int64_t khronos_int64_t; +typedef uint64_t khronos_uint64_t; +#define KHRONOS_SUPPORT_INT64 1 +#define KHRONOS_SUPPORT_FLOAT 1 + +#elif defined(__VMS ) || defined(__sgi) + +/* + * Using + */ +#include +typedef int32_t khronos_int32_t; +typedef uint32_t khronos_uint32_t; +typedef int64_t khronos_int64_t; +typedef uint64_t khronos_uint64_t; +#define KHRONOS_SUPPORT_INT64 1 +#define KHRONOS_SUPPORT_FLOAT 1 + +#elif defined(_WIN32) && !defined(__SCITECH_SNAP__) + +/* + * Win32 + */ +typedef __int32 khronos_int32_t; +typedef unsigned __int32 khronos_uint32_t; +typedef __int64 khronos_int64_t; +typedef unsigned __int64 khronos_uint64_t; +#define KHRONOS_SUPPORT_INT64 1 +#define KHRONOS_SUPPORT_FLOAT 1 + +#elif defined(__sun__) || defined(__digital__) + +/* + * Sun or Digital + */ +typedef int khronos_int32_t; +typedef unsigned int khronos_uint32_t; +#if defined(__arch64__) || defined(_LP64) +typedef long int khronos_int64_t; +typedef unsigned long int khronos_uint64_t; +#else +typedef long long int khronos_int64_t; +typedef unsigned long long int khronos_uint64_t; +#endif /* __arch64__ */ +#define KHRONOS_SUPPORT_INT64 1 +#define KHRONOS_SUPPORT_FLOAT 1 + +#elif 0 + +/* + * Hypothetical platform with no float or int64 support + */ +typedef int khronos_int32_t; +typedef unsigned int khronos_uint32_t; +#define KHRONOS_SUPPORT_INT64 0 +#define KHRONOS_SUPPORT_FLOAT 0 + +#else + +/* + * Generic fallback + */ +#include +typedef int32_t khronos_int32_t; +typedef uint32_t khronos_uint32_t; +typedef int64_t khronos_int64_t; +typedef uint64_t khronos_uint64_t; +#define KHRONOS_SUPPORT_INT64 1 +#define KHRONOS_SUPPORT_FLOAT 1 + +#endif + + +/* + * Types that are (so far) the same on all platforms + */ +typedef signed char khronos_int8_t; +typedef unsigned char khronos_uint8_t; +typedef signed short int khronos_int16_t; +typedef unsigned short int khronos_uint16_t; + +/* + * Types that differ between LLP64 and LP64 architectures - in LLP64, + * pointers are 64 bits, but 'long' is still 32 bits. Win64 appears + * to be the only LLP64 architecture in current use. + */ +#ifdef _WIN64 +typedef signed long long int khronos_intptr_t; +typedef unsigned long long int khronos_uintptr_t; +typedef signed long long int khronos_ssize_t; +typedef unsigned long long int khronos_usize_t; +#else +typedef signed long int khronos_intptr_t; +typedef unsigned long int khronos_uintptr_t; +typedef signed long int khronos_ssize_t; +typedef unsigned long int khronos_usize_t; +#endif + +#if KHRONOS_SUPPORT_FLOAT +/* + * Float type + */ +typedef float khronos_float_t; +#endif + +#if KHRONOS_SUPPORT_INT64 +/* Time types + * + * These types can be used to represent a time interval in nanoseconds or + * an absolute Unadjusted System Time. Unadjusted System Time is the number + * of nanoseconds since some arbitrary system event (e.g. since the last + * time the system booted). The Unadjusted System Time is an unsigned + * 64 bit value that wraps back to 0 every 584 years. Time intervals + * may be either signed or unsigned. + */ +typedef khronos_uint64_t khronos_utime_nanoseconds_t; +typedef khronos_int64_t khronos_stime_nanoseconds_t; +#endif + +/* + * Dummy value used to pad enum types to 32 bits. + */ +#ifndef KHRONOS_MAX_ENUM +#define KHRONOS_MAX_ENUM 0x7FFFFFFF +#endif + +/* + * Enumerated boolean type + * + * Values other than zero should be considered to be true. Therefore + * comparisons should not be made against KHRONOS_TRUE. + */ +typedef enum { + KHRONOS_FALSE = 0, + KHRONOS_TRUE = 1, + KHRONOS_BOOLEAN_ENUM_FORCE_SIZE = KHRONOS_MAX_ENUM +} khronos_boolean_enum_t; + +#endif /* __khrplatform_h_ */ diff --git a/vendor/angle/include/WGL/.clang-format b/vendor/angle/include/WGL/.clang-format new file mode 100644 index 00000000..06147100 --- /dev/null +++ b/vendor/angle/include/WGL/.clang-format @@ -0,0 +1,3 @@ + + +DisableFormat: true diff --git a/vendor/angle/include/WGL/wgl.h b/vendor/angle/include/WGL/wgl.h new file mode 100644 index 00000000..0bae52bd --- /dev/null +++ b/vendor/angle/include/WGL/wgl.h @@ -0,0 +1,946 @@ +#ifndef __wgl_wgl_h_ +#define __wgl_wgl_h_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2013-2018 The Khronos Group Inc. +** +** Permission is hereby granted, free of charge, to any person obtaining a +** copy of this software and/or associated documentation files (the +** "Materials"), to deal in the Materials without restriction, including +** without limitation the rights to use, copy, modify, merge, publish, +** distribute, sublicense, and/or sell copies of the Materials, and to +** permit persons to whom the Materials are furnished to do so, subject to +** the following conditions: +** +** The above copyright notice and this permission notice shall be included +** in all copies or substantial portions of the Materials. +** +** THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +** EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +** MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +** IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +** CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +** MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. +*/ +/* +** This header is generated from the Khronos OpenGL / OpenGL ES XML +** API Registry. The current version of the Registry, generator scripts +** used to make the header, and the header can be found at +** https://github.com/KhronosGroup/OpenGL-Registry +*/ + +#if defined(_WIN32) && !defined(APIENTRY) && !defined(__CYGWIN__) && !defined(__SCITECH_SNAP__) +#define WIN32_LEAN_AND_MEAN 1 +#include +#endif + +/* Generated on date 20181206 */ + +/* Generated C header for: + * API: wgl + * Versions considered: .* + * Versions emitted: .* + * Default extensions included: wgl + * Additional extensions included: _nomatch_^ + * Extensions removed: _nomatch_^ + */ + +#ifndef WGL_VERSION_1_0 +#define WGL_VERSION_1_0 1 +#define WGL_FONT_LINES 0 +#define WGL_FONT_POLYGONS 1 +#define WGL_SWAP_MAIN_PLANE 0x00000001 +#define WGL_SWAP_OVERLAY1 0x00000002 +#define WGL_SWAP_OVERLAY2 0x00000004 +#define WGL_SWAP_OVERLAY3 0x00000008 +#define WGL_SWAP_OVERLAY4 0x00000010 +#define WGL_SWAP_OVERLAY5 0x00000020 +#define WGL_SWAP_OVERLAY6 0x00000040 +#define WGL_SWAP_OVERLAY7 0x00000080 +#define WGL_SWAP_OVERLAY8 0x00000100 +#define WGL_SWAP_OVERLAY9 0x00000200 +#define WGL_SWAP_OVERLAY10 0x00000400 +#define WGL_SWAP_OVERLAY11 0x00000800 +#define WGL_SWAP_OVERLAY12 0x00001000 +#define WGL_SWAP_OVERLAY13 0x00002000 +#define WGL_SWAP_OVERLAY14 0x00004000 +#define WGL_SWAP_OVERLAY15 0x00008000 +#define WGL_SWAP_UNDERLAY1 0x00010000 +#define WGL_SWAP_UNDERLAY2 0x00020000 +#define WGL_SWAP_UNDERLAY3 0x00040000 +#define WGL_SWAP_UNDERLAY4 0x00080000 +#define WGL_SWAP_UNDERLAY5 0x00100000 +#define WGL_SWAP_UNDERLAY6 0x00200000 +#define WGL_SWAP_UNDERLAY7 0x00400000 +#define WGL_SWAP_UNDERLAY8 0x00800000 +#define WGL_SWAP_UNDERLAY9 0x01000000 +#define WGL_SWAP_UNDERLAY10 0x02000000 +#define WGL_SWAP_UNDERLAY11 0x04000000 +#define WGL_SWAP_UNDERLAY12 0x08000000 +#define WGL_SWAP_UNDERLAY13 0x10000000 +#define WGL_SWAP_UNDERLAY14 0x20000000 +#define WGL_SWAP_UNDERLAY15 0x40000000 +typedef int (WINAPI * PFNCHOOSEPIXELFORMATPROC) (HDC hDc, const PIXELFORMATDESCRIPTOR *pPfd); +typedef int (WINAPI * PFNDESCRIBEPIXELFORMATPROC) (HDC hdc, int ipfd, UINT cjpfd, const PIXELFORMATDESCRIPTOR *ppfd); +typedef UINT (WINAPI * PFNGETENHMETAFILEPIXELFORMATPROC) (HENHMETAFILE hemf, const PIXELFORMATDESCRIPTOR *ppfd); +typedef int (WINAPI * PFNGETPIXELFORMATPROC) (HDC hdc); +typedef BOOL (WINAPI * PFNSETPIXELFORMATPROC) (HDC hdc, int ipfd, const PIXELFORMATDESCRIPTOR *ppfd); +typedef BOOL (WINAPI * PFNSWAPBUFFERSPROC) (HDC hdc); +typedef BOOL (WINAPI * PFNWGLCOPYCONTEXTPROC) (HGLRC hglrcSrc, HGLRC hglrcDst, UINT mask); +typedef HGLRC (WINAPI * PFNWGLCREATECONTEXTPROC) (HDC hDc); +typedef HGLRC (WINAPI * PFNWGLCREATELAYERCONTEXTPROC) (HDC hDc, int level); +typedef BOOL (WINAPI * PFNWGLDELETECONTEXTPROC) (HGLRC oldContext); +typedef BOOL (WINAPI * PFNWGLDESCRIBELAYERPLANEPROC) (HDC hDc, int pixelFormat, int layerPlane, UINT nBytes, const LAYERPLANEDESCRIPTOR *plpd); +typedef HGLRC (WINAPI * PFNWGLGETCURRENTCONTEXTPROC) (void); +typedef HDC (WINAPI * PFNWGLGETCURRENTDCPROC) (void); +typedef int (WINAPI * PFNWGLGETLAYERPALETTEENTRIESPROC) (HDC hdc, int iLayerPlane, int iStart, int cEntries, const COLORREF *pcr); +typedef PROC (WINAPI * PFNWGLGETPROCADDRESSPROC) (LPCSTR lpszProc); +typedef BOOL (WINAPI * PFNWGLMAKECURRENTPROC) (HDC hDc, HGLRC newContext); +typedef BOOL (WINAPI * PFNWGLREALIZELAYERPALETTEPROC) (HDC hdc, int iLayerPlane, BOOL bRealize); +typedef int (WINAPI * PFNWGLSETLAYERPALETTEENTRIESPROC) (HDC hdc, int iLayerPlane, int iStart, int cEntries, const COLORREF *pcr); +typedef BOOL (WINAPI * PFNWGLSHARELISTSPROC) (HGLRC hrcSrvShare, HGLRC hrcSrvSource); +typedef BOOL (WINAPI * PFNWGLSWAPLAYERBUFFERSPROC) (HDC hdc, UINT fuFlags); +typedef BOOL (WINAPI * PFNWGLUSEFONTBITMAPSPROC) (HDC hDC, DWORD first, DWORD count, DWORD listBase); +typedef BOOL (WINAPI * PFNWGLUSEFONTBITMAPSAPROC) (HDC hDC, DWORD first, DWORD count, DWORD listBase); +typedef BOOL (WINAPI * PFNWGLUSEFONTBITMAPSWPROC) (HDC hDC, DWORD first, DWORD count, DWORD listBase); +typedef BOOL (WINAPI * PFNWGLUSEFONTOUTLINESPROC) (HDC hDC, DWORD first, DWORD count, DWORD listBase, FLOAT deviation, FLOAT extrusion, int format, LPGLYPHMETRICSFLOAT lpgmf); +typedef BOOL (WINAPI * PFNWGLUSEFONTOUTLINESAPROC) (HDC hDC, DWORD first, DWORD count, DWORD listBase, FLOAT deviation, FLOAT extrusion, int format, LPGLYPHMETRICSFLOAT lpgmf); +typedef BOOL (WINAPI * PFNWGLUSEFONTOUTLINESWPROC) (HDC hDC, DWORD first, DWORD count, DWORD listBase, FLOAT deviation, FLOAT extrusion, int format, LPGLYPHMETRICSFLOAT lpgmf); +#ifdef WGL_WGLEXT_PROTOTYPES +int WINAPI ChoosePixelFormat (HDC hDc, const PIXELFORMATDESCRIPTOR *pPfd); +int WINAPI DescribePixelFormat (HDC hdc, int ipfd, UINT cjpfd, const PIXELFORMATDESCRIPTOR *ppfd); +UINT WINAPI GetEnhMetaFilePixelFormat (HENHMETAFILE hemf, const PIXELFORMATDESCRIPTOR *ppfd); +int WINAPI GetPixelFormat (HDC hdc); +BOOL WINAPI SetPixelFormat (HDC hdc, int ipfd, const PIXELFORMATDESCRIPTOR *ppfd); +BOOL WINAPI SwapBuffers (HDC hdc); +BOOL WINAPI wglCopyContext (HGLRC hglrcSrc, HGLRC hglrcDst, UINT mask); +HGLRC WINAPI wglCreateContext (HDC hDc); +HGLRC WINAPI wglCreateLayerContext (HDC hDc, int level); +BOOL WINAPI wglDeleteContext (HGLRC oldContext); +BOOL WINAPI wglDescribeLayerPlane (HDC hDc, int pixelFormat, int layerPlane, UINT nBytes, const LAYERPLANEDESCRIPTOR *plpd); +HGLRC WINAPI wglGetCurrentContext (void); +HDC WINAPI wglGetCurrentDC (void); +int WINAPI wglGetLayerPaletteEntries (HDC hdc, int iLayerPlane, int iStart, int cEntries, const COLORREF *pcr); +PROC WINAPI wglGetProcAddress (LPCSTR lpszProc); +BOOL WINAPI wglMakeCurrent (HDC hDc, HGLRC newContext); +BOOL WINAPI wglRealizeLayerPalette (HDC hdc, int iLayerPlane, BOOL bRealize); +int WINAPI wglSetLayerPaletteEntries (HDC hdc, int iLayerPlane, int iStart, int cEntries, const COLORREF *pcr); +BOOL WINAPI wglShareLists (HGLRC hrcSrvShare, HGLRC hrcSrvSource); +BOOL WINAPI wglSwapLayerBuffers (HDC hdc, UINT fuFlags); +BOOL WINAPI wglUseFontBitmaps (HDC hDC, DWORD first, DWORD count, DWORD listBase); +BOOL WINAPI wglUseFontBitmapsA (HDC hDC, DWORD first, DWORD count, DWORD listBase); +BOOL WINAPI wglUseFontBitmapsW (HDC hDC, DWORD first, DWORD count, DWORD listBase); +BOOL WINAPI wglUseFontOutlines (HDC hDC, DWORD first, DWORD count, DWORD listBase, FLOAT deviation, FLOAT extrusion, int format, LPGLYPHMETRICSFLOAT lpgmf); +BOOL WINAPI wglUseFontOutlinesA (HDC hDC, DWORD first, DWORD count, DWORD listBase, FLOAT deviation, FLOAT extrusion, int format, LPGLYPHMETRICSFLOAT lpgmf); +BOOL WINAPI wglUseFontOutlinesW (HDC hDC, DWORD first, DWORD count, DWORD listBase, FLOAT deviation, FLOAT extrusion, int format, LPGLYPHMETRICSFLOAT lpgmf); +#endif +#endif /* WGL_VERSION_1_0 */ + +#ifndef WGL_ARB_buffer_region +#define WGL_ARB_buffer_region 1 +#define WGL_FRONT_COLOR_BUFFER_BIT_ARB 0x00000001 +#define WGL_BACK_COLOR_BUFFER_BIT_ARB 0x00000002 +#define WGL_DEPTH_BUFFER_BIT_ARB 0x00000004 +#define WGL_STENCIL_BUFFER_BIT_ARB 0x00000008 +typedef HANDLE (WINAPI * PFNWGLCREATEBUFFERREGIONARBPROC) (HDC hDC, int iLayerPlane, UINT uType); +typedef VOID (WINAPI * PFNWGLDELETEBUFFERREGIONARBPROC) (HANDLE hRegion); +typedef BOOL (WINAPI * PFNWGLSAVEBUFFERREGIONARBPROC) (HANDLE hRegion, int x, int y, int width, int height); +typedef BOOL (WINAPI * PFNWGLRESTOREBUFFERREGIONARBPROC) (HANDLE hRegion, int x, int y, int width, int height, int xSrc, int ySrc); +#ifdef WGL_WGLEXT_PROTOTYPES +HANDLE WINAPI wglCreateBufferRegionARB (HDC hDC, int iLayerPlane, UINT uType); +VOID WINAPI wglDeleteBufferRegionARB (HANDLE hRegion); +BOOL WINAPI wglSaveBufferRegionARB (HANDLE hRegion, int x, int y, int width, int height); +BOOL WINAPI wglRestoreBufferRegionARB (HANDLE hRegion, int x, int y, int width, int height, int xSrc, int ySrc); +#endif +#endif /* WGL_ARB_buffer_region */ + +#ifndef WGL_ARB_context_flush_control +#define WGL_ARB_context_flush_control 1 +#define WGL_CONTEXT_RELEASE_BEHAVIOR_ARB 0x2097 +#define WGL_CONTEXT_RELEASE_BEHAVIOR_NONE_ARB 0 +#define WGL_CONTEXT_RELEASE_BEHAVIOR_FLUSH_ARB 0x2098 +#endif /* WGL_ARB_context_flush_control */ + +#ifndef WGL_ARB_create_context +#define WGL_ARB_create_context 1 +#define WGL_CONTEXT_DEBUG_BIT_ARB 0x00000001 +#define WGL_CONTEXT_FORWARD_COMPATIBLE_BIT_ARB 0x00000002 +#define WGL_CONTEXT_MAJOR_VERSION_ARB 0x2091 +#define WGL_CONTEXT_MINOR_VERSION_ARB 0x2092 +#define WGL_CONTEXT_LAYER_PLANE_ARB 0x2093 +#define WGL_CONTEXT_FLAGS_ARB 0x2094 +#define ERROR_INVALID_VERSION_ARB 0x2095 +typedef HGLRC (WINAPI * PFNWGLCREATECONTEXTATTRIBSARBPROC) (HDC hDC, HGLRC hShareContext, const int *attribList); +#ifdef WGL_WGLEXT_PROTOTYPES +HGLRC WINAPI wglCreateContextAttribsARB (HDC hDC, HGLRC hShareContext, const int *attribList); +#endif +#endif /* WGL_ARB_create_context */ + +#ifndef WGL_ARB_create_context_no_error +#define WGL_ARB_create_context_no_error 1 +#define WGL_CONTEXT_OPENGL_NO_ERROR_ARB 0x31B3 +#endif /* WGL_ARB_create_context_no_error */ + +#ifndef WGL_ARB_create_context_profile +#define WGL_ARB_create_context_profile 1 +#define WGL_CONTEXT_PROFILE_MASK_ARB 0x9126 +#define WGL_CONTEXT_CORE_PROFILE_BIT_ARB 0x00000001 +#define WGL_CONTEXT_COMPATIBILITY_PROFILE_BIT_ARB 0x00000002 +#define ERROR_INVALID_PROFILE_ARB 0x2096 +#endif /* WGL_ARB_create_context_profile */ + +#ifndef WGL_ARB_create_context_robustness +#define WGL_ARB_create_context_robustness 1 +#define WGL_CONTEXT_ROBUST_ACCESS_BIT_ARB 0x00000004 +#define WGL_LOSE_CONTEXT_ON_RESET_ARB 0x8252 +#define WGL_CONTEXT_RESET_NOTIFICATION_STRATEGY_ARB 0x8256 +#define WGL_NO_RESET_NOTIFICATION_ARB 0x8261 +#endif /* WGL_ARB_create_context_robustness */ + +#ifndef WGL_ARB_extensions_string +#define WGL_ARB_extensions_string 1 +typedef const char *(WINAPI * PFNWGLGETEXTENSIONSSTRINGARBPROC) (HDC hdc); +#ifdef WGL_WGLEXT_PROTOTYPES +const char *WINAPI wglGetExtensionsStringARB (HDC hdc); +#endif +#endif /* WGL_ARB_extensions_string */ + +#ifndef WGL_ARB_framebuffer_sRGB +#define WGL_ARB_framebuffer_sRGB 1 +#define WGL_FRAMEBUFFER_SRGB_CAPABLE_ARB 0x20A9 +#endif /* WGL_ARB_framebuffer_sRGB */ + +#ifndef WGL_ARB_make_current_read +#define WGL_ARB_make_current_read 1 +#define ERROR_INVALID_PIXEL_TYPE_ARB 0x2043 +#define ERROR_INCOMPATIBLE_DEVICE_CONTEXTS_ARB 0x2054 +typedef BOOL (WINAPI * PFNWGLMAKECONTEXTCURRENTARBPROC) (HDC hDrawDC, HDC hReadDC, HGLRC hglrc); +typedef HDC (WINAPI * PFNWGLGETCURRENTREADDCARBPROC) (void); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglMakeContextCurrentARB (HDC hDrawDC, HDC hReadDC, HGLRC hglrc); +HDC WINAPI wglGetCurrentReadDCARB (void); +#endif +#endif /* WGL_ARB_make_current_read */ + +#ifndef WGL_ARB_multisample +#define WGL_ARB_multisample 1 +#define WGL_SAMPLE_BUFFERS_ARB 0x2041 +#define WGL_SAMPLES_ARB 0x2042 +#endif /* WGL_ARB_multisample */ + +#ifndef WGL_ARB_pbuffer +#define WGL_ARB_pbuffer 1 +DECLARE_HANDLE(HPBUFFERARB); +#define WGL_DRAW_TO_PBUFFER_ARB 0x202D +#define WGL_MAX_PBUFFER_PIXELS_ARB 0x202E +#define WGL_MAX_PBUFFER_WIDTH_ARB 0x202F +#define WGL_MAX_PBUFFER_HEIGHT_ARB 0x2030 +#define WGL_PBUFFER_LARGEST_ARB 0x2033 +#define WGL_PBUFFER_WIDTH_ARB 0x2034 +#define WGL_PBUFFER_HEIGHT_ARB 0x2035 +#define WGL_PBUFFER_LOST_ARB 0x2036 +typedef HPBUFFERARB (WINAPI * PFNWGLCREATEPBUFFERARBPROC) (HDC hDC, int iPixelFormat, int iWidth, int iHeight, const int *piAttribList); +typedef HDC (WINAPI * PFNWGLGETPBUFFERDCARBPROC) (HPBUFFERARB hPbuffer); +typedef int (WINAPI * PFNWGLRELEASEPBUFFERDCARBPROC) (HPBUFFERARB hPbuffer, HDC hDC); +typedef BOOL (WINAPI * PFNWGLDESTROYPBUFFERARBPROC) (HPBUFFERARB hPbuffer); +typedef BOOL (WINAPI * PFNWGLQUERYPBUFFERARBPROC) (HPBUFFERARB hPbuffer, int iAttribute, int *piValue); +#ifdef WGL_WGLEXT_PROTOTYPES +HPBUFFERARB WINAPI wglCreatePbufferARB (HDC hDC, int iPixelFormat, int iWidth, int iHeight, const int *piAttribList); +HDC WINAPI wglGetPbufferDCARB (HPBUFFERARB hPbuffer); +int WINAPI wglReleasePbufferDCARB (HPBUFFERARB hPbuffer, HDC hDC); +BOOL WINAPI wglDestroyPbufferARB (HPBUFFERARB hPbuffer); +BOOL WINAPI wglQueryPbufferARB (HPBUFFERARB hPbuffer, int iAttribute, int *piValue); +#endif +#endif /* WGL_ARB_pbuffer */ + +#ifndef WGL_ARB_pixel_format +#define WGL_ARB_pixel_format 1 +#define WGL_NUMBER_PIXEL_FORMATS_ARB 0x2000 +#define WGL_DRAW_TO_WINDOW_ARB 0x2001 +#define WGL_DRAW_TO_BITMAP_ARB 0x2002 +#define WGL_ACCELERATION_ARB 0x2003 +#define WGL_NEED_PALETTE_ARB 0x2004 +#define WGL_NEED_SYSTEM_PALETTE_ARB 0x2005 +#define WGL_SWAP_LAYER_BUFFERS_ARB 0x2006 +#define WGL_SWAP_METHOD_ARB 0x2007 +#define WGL_NUMBER_OVERLAYS_ARB 0x2008 +#define WGL_NUMBER_UNDERLAYS_ARB 0x2009 +#define WGL_TRANSPARENT_ARB 0x200A +#define WGL_TRANSPARENT_RED_VALUE_ARB 0x2037 +#define WGL_TRANSPARENT_GREEN_VALUE_ARB 0x2038 +#define WGL_TRANSPARENT_BLUE_VALUE_ARB 0x2039 +#define WGL_TRANSPARENT_ALPHA_VALUE_ARB 0x203A +#define WGL_TRANSPARENT_INDEX_VALUE_ARB 0x203B +#define WGL_SHARE_DEPTH_ARB 0x200C +#define WGL_SHARE_STENCIL_ARB 0x200D +#define WGL_SHARE_ACCUM_ARB 0x200E +#define WGL_SUPPORT_GDI_ARB 0x200F +#define WGL_SUPPORT_OPENGL_ARB 0x2010 +#define WGL_DOUBLE_BUFFER_ARB 0x2011 +#define WGL_STEREO_ARB 0x2012 +#define WGL_PIXEL_TYPE_ARB 0x2013 +#define WGL_COLOR_BITS_ARB 0x2014 +#define WGL_RED_BITS_ARB 0x2015 +#define WGL_RED_SHIFT_ARB 0x2016 +#define WGL_GREEN_BITS_ARB 0x2017 +#define WGL_GREEN_SHIFT_ARB 0x2018 +#define WGL_BLUE_BITS_ARB 0x2019 +#define WGL_BLUE_SHIFT_ARB 0x201A +#define WGL_ALPHA_BITS_ARB 0x201B +#define WGL_ALPHA_SHIFT_ARB 0x201C +#define WGL_ACCUM_BITS_ARB 0x201D +#define WGL_ACCUM_RED_BITS_ARB 0x201E +#define WGL_ACCUM_GREEN_BITS_ARB 0x201F +#define WGL_ACCUM_BLUE_BITS_ARB 0x2020 +#define WGL_ACCUM_ALPHA_BITS_ARB 0x2021 +#define WGL_DEPTH_BITS_ARB 0x2022 +#define WGL_STENCIL_BITS_ARB 0x2023 +#define WGL_AUX_BUFFERS_ARB 0x2024 +#define WGL_NO_ACCELERATION_ARB 0x2025 +#define WGL_GENERIC_ACCELERATION_ARB 0x2026 +#define WGL_FULL_ACCELERATION_ARB 0x2027 +#define WGL_SWAP_EXCHANGE_ARB 0x2028 +#define WGL_SWAP_COPY_ARB 0x2029 +#define WGL_SWAP_UNDEFINED_ARB 0x202A +#define WGL_TYPE_RGBA_ARB 0x202B +#define WGL_TYPE_COLORINDEX_ARB 0x202C +typedef BOOL (WINAPI * PFNWGLGETPIXELFORMATATTRIBIVARBPROC) (HDC hdc, int iPixelFormat, int iLayerPlane, UINT nAttributes, const int *piAttributes, int *piValues); +typedef BOOL (WINAPI * PFNWGLGETPIXELFORMATATTRIBFVARBPROC) (HDC hdc, int iPixelFormat, int iLayerPlane, UINT nAttributes, const int *piAttributes, FLOAT *pfValues); +typedef BOOL (WINAPI * PFNWGLCHOOSEPIXELFORMATARBPROC) (HDC hdc, const int *piAttribIList, const FLOAT *pfAttribFList, UINT nMaxFormats, int *piFormats, UINT *nNumFormats); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglGetPixelFormatAttribivARB (HDC hdc, int iPixelFormat, int iLayerPlane, UINT nAttributes, const int *piAttributes, int *piValues); +BOOL WINAPI wglGetPixelFormatAttribfvARB (HDC hdc, int iPixelFormat, int iLayerPlane, UINT nAttributes, const int *piAttributes, FLOAT *pfValues); +BOOL WINAPI wglChoosePixelFormatARB (HDC hdc, const int *piAttribIList, const FLOAT *pfAttribFList, UINT nMaxFormats, int *piFormats, UINT *nNumFormats); +#endif +#endif /* WGL_ARB_pixel_format */ + +#ifndef WGL_ARB_pixel_format_float +#define WGL_ARB_pixel_format_float 1 +#define WGL_TYPE_RGBA_FLOAT_ARB 0x21A0 +#endif /* WGL_ARB_pixel_format_float */ + +#ifndef WGL_ARB_render_texture +#define WGL_ARB_render_texture 1 +#define WGL_BIND_TO_TEXTURE_RGB_ARB 0x2070 +#define WGL_BIND_TO_TEXTURE_RGBA_ARB 0x2071 +#define WGL_TEXTURE_FORMAT_ARB 0x2072 +#define WGL_TEXTURE_TARGET_ARB 0x2073 +#define WGL_MIPMAP_TEXTURE_ARB 0x2074 +#define WGL_TEXTURE_RGB_ARB 0x2075 +#define WGL_TEXTURE_RGBA_ARB 0x2076 +#define WGL_NO_TEXTURE_ARB 0x2077 +#define WGL_TEXTURE_CUBE_MAP_ARB 0x2078 +#define WGL_TEXTURE_1D_ARB 0x2079 +#define WGL_TEXTURE_2D_ARB 0x207A +#define WGL_MIPMAP_LEVEL_ARB 0x207B +#define WGL_CUBE_MAP_FACE_ARB 0x207C +#define WGL_TEXTURE_CUBE_MAP_POSITIVE_X_ARB 0x207D +#define WGL_TEXTURE_CUBE_MAP_NEGATIVE_X_ARB 0x207E +#define WGL_TEXTURE_CUBE_MAP_POSITIVE_Y_ARB 0x207F +#define WGL_TEXTURE_CUBE_MAP_NEGATIVE_Y_ARB 0x2080 +#define WGL_TEXTURE_CUBE_MAP_POSITIVE_Z_ARB 0x2081 +#define WGL_TEXTURE_CUBE_MAP_NEGATIVE_Z_ARB 0x2082 +#define WGL_FRONT_LEFT_ARB 0x2083 +#define WGL_FRONT_RIGHT_ARB 0x2084 +#define WGL_BACK_LEFT_ARB 0x2085 +#define WGL_BACK_RIGHT_ARB 0x2086 +#define WGL_AUX0_ARB 0x2087 +#define WGL_AUX1_ARB 0x2088 +#define WGL_AUX2_ARB 0x2089 +#define WGL_AUX3_ARB 0x208A +#define WGL_AUX4_ARB 0x208B +#define WGL_AUX5_ARB 0x208C +#define WGL_AUX6_ARB 0x208D +#define WGL_AUX7_ARB 0x208E +#define WGL_AUX8_ARB 0x208F +#define WGL_AUX9_ARB 0x2090 +typedef BOOL (WINAPI * PFNWGLBINDTEXIMAGEARBPROC) (HPBUFFERARB hPbuffer, int iBuffer); +typedef BOOL (WINAPI * PFNWGLRELEASETEXIMAGEARBPROC) (HPBUFFERARB hPbuffer, int iBuffer); +typedef BOOL (WINAPI * PFNWGLSETPBUFFERATTRIBARBPROC) (HPBUFFERARB hPbuffer, const int *piAttribList); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglBindTexImageARB (HPBUFFERARB hPbuffer, int iBuffer); +BOOL WINAPI wglReleaseTexImageARB (HPBUFFERARB hPbuffer, int iBuffer); +BOOL WINAPI wglSetPbufferAttribARB (HPBUFFERARB hPbuffer, const int *piAttribList); +#endif +#endif /* WGL_ARB_render_texture */ + +#ifndef WGL_ARB_robustness_application_isolation +#define WGL_ARB_robustness_application_isolation 1 +#define WGL_CONTEXT_RESET_ISOLATION_BIT_ARB 0x00000008 +#endif /* WGL_ARB_robustness_application_isolation */ + +#ifndef WGL_ARB_robustness_share_group_isolation +#define WGL_ARB_robustness_share_group_isolation 1 +#endif /* WGL_ARB_robustness_share_group_isolation */ + +#ifndef WGL_3DFX_multisample +#define WGL_3DFX_multisample 1 +#define WGL_SAMPLE_BUFFERS_3DFX 0x2060 +#define WGL_SAMPLES_3DFX 0x2061 +#endif /* WGL_3DFX_multisample */ + +#ifndef WGL_3DL_stereo_control +#define WGL_3DL_stereo_control 1 +#define WGL_STEREO_EMITTER_ENABLE_3DL 0x2055 +#define WGL_STEREO_EMITTER_DISABLE_3DL 0x2056 +#define WGL_STEREO_POLARITY_NORMAL_3DL 0x2057 +#define WGL_STEREO_POLARITY_INVERT_3DL 0x2058 +typedef BOOL (WINAPI * PFNWGLSETSTEREOEMITTERSTATE3DLPROC) (HDC hDC, UINT uState); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglSetStereoEmitterState3DL (HDC hDC, UINT uState); +#endif +#endif /* WGL_3DL_stereo_control */ + +#ifndef WGL_AMD_gpu_association +#define WGL_AMD_gpu_association 1 +#define WGL_GPU_VENDOR_AMD 0x1F00 +#define WGL_GPU_RENDERER_STRING_AMD 0x1F01 +#define WGL_GPU_OPENGL_VERSION_STRING_AMD 0x1F02 +#define WGL_GPU_FASTEST_TARGET_GPUS_AMD 0x21A2 +#define WGL_GPU_RAM_AMD 0x21A3 +#define WGL_GPU_CLOCK_AMD 0x21A4 +#define WGL_GPU_NUM_PIPES_AMD 0x21A5 +#define WGL_GPU_NUM_SIMD_AMD 0x21A6 +#define WGL_GPU_NUM_RB_AMD 0x21A7 +#define WGL_GPU_NUM_SPI_AMD 0x21A8 +typedef UINT (WINAPI * PFNWGLGETGPUIDSAMDPROC) (UINT maxCount, UINT *ids); +typedef INT (WINAPI * PFNWGLGETGPUINFOAMDPROC) (UINT id, int property, GLenum dataType, UINT size, void *data); +typedef UINT (WINAPI * PFNWGLGETCONTEXTGPUIDAMDPROC) (HGLRC hglrc); +typedef HGLRC (WINAPI * PFNWGLCREATEASSOCIATEDCONTEXTAMDPROC) (UINT id); +typedef HGLRC (WINAPI * PFNWGLCREATEASSOCIATEDCONTEXTATTRIBSAMDPROC) (UINT id, HGLRC hShareContext, const int *attribList); +typedef BOOL (WINAPI * PFNWGLDELETEASSOCIATEDCONTEXTAMDPROC) (HGLRC hglrc); +typedef BOOL (WINAPI * PFNWGLMAKEASSOCIATEDCONTEXTCURRENTAMDPROC) (HGLRC hglrc); +typedef HGLRC (WINAPI * PFNWGLGETCURRENTASSOCIATEDCONTEXTAMDPROC) (void); +typedef VOID (WINAPI * PFNWGLBLITCONTEXTFRAMEBUFFERAMDPROC) (HGLRC dstCtx, GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +#ifdef WGL_WGLEXT_PROTOTYPES +UINT WINAPI wglGetGPUIDsAMD (UINT maxCount, UINT *ids); +INT WINAPI wglGetGPUInfoAMD (UINT id, int property, GLenum dataType, UINT size, void *data); +UINT WINAPI wglGetContextGPUIDAMD (HGLRC hglrc); +HGLRC WINAPI wglCreateAssociatedContextAMD (UINT id); +HGLRC WINAPI wglCreateAssociatedContextAttribsAMD (UINT id, HGLRC hShareContext, const int *attribList); +BOOL WINAPI wglDeleteAssociatedContextAMD (HGLRC hglrc); +BOOL WINAPI wglMakeAssociatedContextCurrentAMD (HGLRC hglrc); +HGLRC WINAPI wglGetCurrentAssociatedContextAMD (void); +VOID WINAPI wglBlitContextFramebufferAMD (HGLRC dstCtx, GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +#endif +#endif /* WGL_AMD_gpu_association */ + +#ifndef WGL_ATI_pixel_format_float +#define WGL_ATI_pixel_format_float 1 +#define WGL_TYPE_RGBA_FLOAT_ATI 0x21A0 +#endif /* WGL_ATI_pixel_format_float */ + +#ifndef WGL_ATI_render_texture_rectangle +#define WGL_ATI_render_texture_rectangle 1 +#define WGL_TEXTURE_RECTANGLE_ATI 0x21A5 +#endif /* WGL_ATI_render_texture_rectangle */ + +#ifndef WGL_EXT_colorspace +#define WGL_EXT_colorspace 1 +#define WGL_COLORSPACE_EXT 0x309D +#define WGL_COLORSPACE_SRGB_EXT 0x3089 +#define WGL_COLORSPACE_LINEAR_EXT 0x308A +#endif /* WGL_EXT_colorspace */ + +#ifndef WGL_EXT_create_context_es2_profile +#define WGL_EXT_create_context_es2_profile 1 +#define WGL_CONTEXT_ES2_PROFILE_BIT_EXT 0x00000004 +#endif /* WGL_EXT_create_context_es2_profile */ + +#ifndef WGL_EXT_create_context_es_profile +#define WGL_EXT_create_context_es_profile 1 +#define WGL_CONTEXT_ES_PROFILE_BIT_EXT 0x00000004 +#endif /* WGL_EXT_create_context_es_profile */ + +#ifndef WGL_EXT_depth_float +#define WGL_EXT_depth_float 1 +#define WGL_DEPTH_FLOAT_EXT 0x2040 +#endif /* WGL_EXT_depth_float */ + +#ifndef WGL_EXT_display_color_table +#define WGL_EXT_display_color_table 1 +typedef GLboolean (WINAPI * PFNWGLCREATEDISPLAYCOLORTABLEEXTPROC) (GLushort id); +typedef GLboolean (WINAPI * PFNWGLLOADDISPLAYCOLORTABLEEXTPROC) (const GLushort *table, GLuint length); +typedef GLboolean (WINAPI * PFNWGLBINDDISPLAYCOLORTABLEEXTPROC) (GLushort id); +typedef VOID (WINAPI * PFNWGLDESTROYDISPLAYCOLORTABLEEXTPROC) (GLushort id); +#ifdef WGL_WGLEXT_PROTOTYPES +GLboolean WINAPI wglCreateDisplayColorTableEXT (GLushort id); +GLboolean WINAPI wglLoadDisplayColorTableEXT (const GLushort *table, GLuint length); +GLboolean WINAPI wglBindDisplayColorTableEXT (GLushort id); +VOID WINAPI wglDestroyDisplayColorTableEXT (GLushort id); +#endif +#endif /* WGL_EXT_display_color_table */ + +#ifndef WGL_EXT_extensions_string +#define WGL_EXT_extensions_string 1 +typedef const char *(WINAPI * PFNWGLGETEXTENSIONSSTRINGEXTPROC) (void); +#ifdef WGL_WGLEXT_PROTOTYPES +const char *WINAPI wglGetExtensionsStringEXT (void); +#endif +#endif /* WGL_EXT_extensions_string */ + +#ifndef WGL_EXT_framebuffer_sRGB +#define WGL_EXT_framebuffer_sRGB 1 +#define WGL_FRAMEBUFFER_SRGB_CAPABLE_EXT 0x20A9 +#endif /* WGL_EXT_framebuffer_sRGB */ + +#ifndef WGL_EXT_make_current_read +#define WGL_EXT_make_current_read 1 +#define ERROR_INVALID_PIXEL_TYPE_EXT 0x2043 +typedef BOOL (WINAPI * PFNWGLMAKECONTEXTCURRENTEXTPROC) (HDC hDrawDC, HDC hReadDC, HGLRC hglrc); +typedef HDC (WINAPI * PFNWGLGETCURRENTREADDCEXTPROC) (void); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglMakeContextCurrentEXT (HDC hDrawDC, HDC hReadDC, HGLRC hglrc); +HDC WINAPI wglGetCurrentReadDCEXT (void); +#endif +#endif /* WGL_EXT_make_current_read */ + +#ifndef WGL_EXT_multisample +#define WGL_EXT_multisample 1 +#define WGL_SAMPLE_BUFFERS_EXT 0x2041 +#define WGL_SAMPLES_EXT 0x2042 +#endif /* WGL_EXT_multisample */ + +#ifndef WGL_EXT_pbuffer +#define WGL_EXT_pbuffer 1 +DECLARE_HANDLE(HPBUFFEREXT); +#define WGL_DRAW_TO_PBUFFER_EXT 0x202D +#define WGL_MAX_PBUFFER_PIXELS_EXT 0x202E +#define WGL_MAX_PBUFFER_WIDTH_EXT 0x202F +#define WGL_MAX_PBUFFER_HEIGHT_EXT 0x2030 +#define WGL_OPTIMAL_PBUFFER_WIDTH_EXT 0x2031 +#define WGL_OPTIMAL_PBUFFER_HEIGHT_EXT 0x2032 +#define WGL_PBUFFER_LARGEST_EXT 0x2033 +#define WGL_PBUFFER_WIDTH_EXT 0x2034 +#define WGL_PBUFFER_HEIGHT_EXT 0x2035 +typedef HPBUFFEREXT (WINAPI * PFNWGLCREATEPBUFFEREXTPROC) (HDC hDC, int iPixelFormat, int iWidth, int iHeight, const int *piAttribList); +typedef HDC (WINAPI * PFNWGLGETPBUFFERDCEXTPROC) (HPBUFFEREXT hPbuffer); +typedef int (WINAPI * PFNWGLRELEASEPBUFFERDCEXTPROC) (HPBUFFEREXT hPbuffer, HDC hDC); +typedef BOOL (WINAPI * PFNWGLDESTROYPBUFFEREXTPROC) (HPBUFFEREXT hPbuffer); +typedef BOOL (WINAPI * PFNWGLQUERYPBUFFEREXTPROC) (HPBUFFEREXT hPbuffer, int iAttribute, int *piValue); +#ifdef WGL_WGLEXT_PROTOTYPES +HPBUFFEREXT WINAPI wglCreatePbufferEXT (HDC hDC, int iPixelFormat, int iWidth, int iHeight, const int *piAttribList); +HDC WINAPI wglGetPbufferDCEXT (HPBUFFEREXT hPbuffer); +int WINAPI wglReleasePbufferDCEXT (HPBUFFEREXT hPbuffer, HDC hDC); +BOOL WINAPI wglDestroyPbufferEXT (HPBUFFEREXT hPbuffer); +BOOL WINAPI wglQueryPbufferEXT (HPBUFFEREXT hPbuffer, int iAttribute, int *piValue); +#endif +#endif /* WGL_EXT_pbuffer */ + +#ifndef WGL_EXT_pixel_format +#define WGL_EXT_pixel_format 1 +#define WGL_NUMBER_PIXEL_FORMATS_EXT 0x2000 +#define WGL_DRAW_TO_WINDOW_EXT 0x2001 +#define WGL_DRAW_TO_BITMAP_EXT 0x2002 +#define WGL_ACCELERATION_EXT 0x2003 +#define WGL_NEED_PALETTE_EXT 0x2004 +#define WGL_NEED_SYSTEM_PALETTE_EXT 0x2005 +#define WGL_SWAP_LAYER_BUFFERS_EXT 0x2006 +#define WGL_SWAP_METHOD_EXT 0x2007 +#define WGL_NUMBER_OVERLAYS_EXT 0x2008 +#define WGL_NUMBER_UNDERLAYS_EXT 0x2009 +#define WGL_TRANSPARENT_EXT 0x200A +#define WGL_TRANSPARENT_VALUE_EXT 0x200B +#define WGL_SHARE_DEPTH_EXT 0x200C +#define WGL_SHARE_STENCIL_EXT 0x200D +#define WGL_SHARE_ACCUM_EXT 0x200E +#define WGL_SUPPORT_GDI_EXT 0x200F +#define WGL_SUPPORT_OPENGL_EXT 0x2010 +#define WGL_DOUBLE_BUFFER_EXT 0x2011 +#define WGL_STEREO_EXT 0x2012 +#define WGL_PIXEL_TYPE_EXT 0x2013 +#define WGL_COLOR_BITS_EXT 0x2014 +#define WGL_RED_BITS_EXT 0x2015 +#define WGL_RED_SHIFT_EXT 0x2016 +#define WGL_GREEN_BITS_EXT 0x2017 +#define WGL_GREEN_SHIFT_EXT 0x2018 +#define WGL_BLUE_BITS_EXT 0x2019 +#define WGL_BLUE_SHIFT_EXT 0x201A +#define WGL_ALPHA_BITS_EXT 0x201B +#define WGL_ALPHA_SHIFT_EXT 0x201C +#define WGL_ACCUM_BITS_EXT 0x201D +#define WGL_ACCUM_RED_BITS_EXT 0x201E +#define WGL_ACCUM_GREEN_BITS_EXT 0x201F +#define WGL_ACCUM_BLUE_BITS_EXT 0x2020 +#define WGL_ACCUM_ALPHA_BITS_EXT 0x2021 +#define WGL_DEPTH_BITS_EXT 0x2022 +#define WGL_STENCIL_BITS_EXT 0x2023 +#define WGL_AUX_BUFFERS_EXT 0x2024 +#define WGL_NO_ACCELERATION_EXT 0x2025 +#define WGL_GENERIC_ACCELERATION_EXT 0x2026 +#define WGL_FULL_ACCELERATION_EXT 0x2027 +#define WGL_SWAP_EXCHANGE_EXT 0x2028 +#define WGL_SWAP_COPY_EXT 0x2029 +#define WGL_SWAP_UNDEFINED_EXT 0x202A +#define WGL_TYPE_RGBA_EXT 0x202B +#define WGL_TYPE_COLORINDEX_EXT 0x202C +typedef BOOL (WINAPI * PFNWGLGETPIXELFORMATATTRIBIVEXTPROC) (HDC hdc, int iPixelFormat, int iLayerPlane, UINT nAttributes, int *piAttributes, int *piValues); +typedef BOOL (WINAPI * PFNWGLGETPIXELFORMATATTRIBFVEXTPROC) (HDC hdc, int iPixelFormat, int iLayerPlane, UINT nAttributes, int *piAttributes, FLOAT *pfValues); +typedef BOOL (WINAPI * PFNWGLCHOOSEPIXELFORMATEXTPROC) (HDC hdc, const int *piAttribIList, const FLOAT *pfAttribFList, UINT nMaxFormats, int *piFormats, UINT *nNumFormats); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglGetPixelFormatAttribivEXT (HDC hdc, int iPixelFormat, int iLayerPlane, UINT nAttributes, int *piAttributes, int *piValues); +BOOL WINAPI wglGetPixelFormatAttribfvEXT (HDC hdc, int iPixelFormat, int iLayerPlane, UINT nAttributes, int *piAttributes, FLOAT *pfValues); +BOOL WINAPI wglChoosePixelFormatEXT (HDC hdc, const int *piAttribIList, const FLOAT *pfAttribFList, UINT nMaxFormats, int *piFormats, UINT *nNumFormats); +#endif +#endif /* WGL_EXT_pixel_format */ + +#ifndef WGL_EXT_pixel_format_packed_float +#define WGL_EXT_pixel_format_packed_float 1 +#define WGL_TYPE_RGBA_UNSIGNED_FLOAT_EXT 0x20A8 +#endif /* WGL_EXT_pixel_format_packed_float */ + +#ifndef WGL_EXT_swap_control +#define WGL_EXT_swap_control 1 +typedef BOOL (WINAPI * PFNWGLSWAPINTERVALEXTPROC) (int interval); +typedef int (WINAPI * PFNWGLGETSWAPINTERVALEXTPROC) (void); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglSwapIntervalEXT (int interval); +int WINAPI wglGetSwapIntervalEXT (void); +#endif +#endif /* WGL_EXT_swap_control */ + +#ifndef WGL_EXT_swap_control_tear +#define WGL_EXT_swap_control_tear 1 +#endif /* WGL_EXT_swap_control_tear */ + +#ifndef WGL_I3D_digital_video_control +#define WGL_I3D_digital_video_control 1 +#define WGL_DIGITAL_VIDEO_CURSOR_ALPHA_FRAMEBUFFER_I3D 0x2050 +#define WGL_DIGITAL_VIDEO_CURSOR_ALPHA_VALUE_I3D 0x2051 +#define WGL_DIGITAL_VIDEO_CURSOR_INCLUDED_I3D 0x2052 +#define WGL_DIGITAL_VIDEO_GAMMA_CORRECTED_I3D 0x2053 +typedef BOOL (WINAPI * PFNWGLGETDIGITALVIDEOPARAMETERSI3DPROC) (HDC hDC, int iAttribute, int *piValue); +typedef BOOL (WINAPI * PFNWGLSETDIGITALVIDEOPARAMETERSI3DPROC) (HDC hDC, int iAttribute, const int *piValue); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglGetDigitalVideoParametersI3D (HDC hDC, int iAttribute, int *piValue); +BOOL WINAPI wglSetDigitalVideoParametersI3D (HDC hDC, int iAttribute, const int *piValue); +#endif +#endif /* WGL_I3D_digital_video_control */ + +#ifndef WGL_I3D_gamma +#define WGL_I3D_gamma 1 +#define WGL_GAMMA_TABLE_SIZE_I3D 0x204E +#define WGL_GAMMA_EXCLUDE_DESKTOP_I3D 0x204F +typedef BOOL (WINAPI * PFNWGLGETGAMMATABLEPARAMETERSI3DPROC) (HDC hDC, int iAttribute, int *piValue); +typedef BOOL (WINAPI * PFNWGLSETGAMMATABLEPARAMETERSI3DPROC) (HDC hDC, int iAttribute, const int *piValue); +typedef BOOL (WINAPI * PFNWGLGETGAMMATABLEI3DPROC) (HDC hDC, int iEntries, USHORT *puRed, USHORT *puGreen, USHORT *puBlue); +typedef BOOL (WINAPI * PFNWGLSETGAMMATABLEI3DPROC) (HDC hDC, int iEntries, const USHORT *puRed, const USHORT *puGreen, const USHORT *puBlue); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglGetGammaTableParametersI3D (HDC hDC, int iAttribute, int *piValue); +BOOL WINAPI wglSetGammaTableParametersI3D (HDC hDC, int iAttribute, const int *piValue); +BOOL WINAPI wglGetGammaTableI3D (HDC hDC, int iEntries, USHORT *puRed, USHORT *puGreen, USHORT *puBlue); +BOOL WINAPI wglSetGammaTableI3D (HDC hDC, int iEntries, const USHORT *puRed, const USHORT *puGreen, const USHORT *puBlue); +#endif +#endif /* WGL_I3D_gamma */ + +#ifndef WGL_I3D_genlock +#define WGL_I3D_genlock 1 +#define WGL_GENLOCK_SOURCE_MULTIVIEW_I3D 0x2044 +#define WGL_GENLOCK_SOURCE_EXTERNAL_SYNC_I3D 0x2045 +#define WGL_GENLOCK_SOURCE_EXTERNAL_FIELD_I3D 0x2046 +#define WGL_GENLOCK_SOURCE_EXTERNAL_TTL_I3D 0x2047 +#define WGL_GENLOCK_SOURCE_DIGITAL_SYNC_I3D 0x2048 +#define WGL_GENLOCK_SOURCE_DIGITAL_FIELD_I3D 0x2049 +#define WGL_GENLOCK_SOURCE_EDGE_FALLING_I3D 0x204A +#define WGL_GENLOCK_SOURCE_EDGE_RISING_I3D 0x204B +#define WGL_GENLOCK_SOURCE_EDGE_BOTH_I3D 0x204C +typedef BOOL (WINAPI * PFNWGLENABLEGENLOCKI3DPROC) (HDC hDC); +typedef BOOL (WINAPI * PFNWGLDISABLEGENLOCKI3DPROC) (HDC hDC); +typedef BOOL (WINAPI * PFNWGLISENABLEDGENLOCKI3DPROC) (HDC hDC, BOOL *pFlag); +typedef BOOL (WINAPI * PFNWGLGENLOCKSOURCEI3DPROC) (HDC hDC, UINT uSource); +typedef BOOL (WINAPI * PFNWGLGETGENLOCKSOURCEI3DPROC) (HDC hDC, UINT *uSource); +typedef BOOL (WINAPI * PFNWGLGENLOCKSOURCEEDGEI3DPROC) (HDC hDC, UINT uEdge); +typedef BOOL (WINAPI * PFNWGLGETGENLOCKSOURCEEDGEI3DPROC) (HDC hDC, UINT *uEdge); +typedef BOOL (WINAPI * PFNWGLGENLOCKSAMPLERATEI3DPROC) (HDC hDC, UINT uRate); +typedef BOOL (WINAPI * PFNWGLGETGENLOCKSAMPLERATEI3DPROC) (HDC hDC, UINT *uRate); +typedef BOOL (WINAPI * PFNWGLGENLOCKSOURCEDELAYI3DPROC) (HDC hDC, UINT uDelay); +typedef BOOL (WINAPI * PFNWGLGETGENLOCKSOURCEDELAYI3DPROC) (HDC hDC, UINT *uDelay); +typedef BOOL (WINAPI * PFNWGLQUERYGENLOCKMAXSOURCEDELAYI3DPROC) (HDC hDC, UINT *uMaxLineDelay, UINT *uMaxPixelDelay); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglEnableGenlockI3D (HDC hDC); +BOOL WINAPI wglDisableGenlockI3D (HDC hDC); +BOOL WINAPI wglIsEnabledGenlockI3D (HDC hDC, BOOL *pFlag); +BOOL WINAPI wglGenlockSourceI3D (HDC hDC, UINT uSource); +BOOL WINAPI wglGetGenlockSourceI3D (HDC hDC, UINT *uSource); +BOOL WINAPI wglGenlockSourceEdgeI3D (HDC hDC, UINT uEdge); +BOOL WINAPI wglGetGenlockSourceEdgeI3D (HDC hDC, UINT *uEdge); +BOOL WINAPI wglGenlockSampleRateI3D (HDC hDC, UINT uRate); +BOOL WINAPI wglGetGenlockSampleRateI3D (HDC hDC, UINT *uRate); +BOOL WINAPI wglGenlockSourceDelayI3D (HDC hDC, UINT uDelay); +BOOL WINAPI wglGetGenlockSourceDelayI3D (HDC hDC, UINT *uDelay); +BOOL WINAPI wglQueryGenlockMaxSourceDelayI3D (HDC hDC, UINT *uMaxLineDelay, UINT *uMaxPixelDelay); +#endif +#endif /* WGL_I3D_genlock */ + +#ifndef WGL_I3D_image_buffer +#define WGL_I3D_image_buffer 1 +#define WGL_IMAGE_BUFFER_MIN_ACCESS_I3D 0x00000001 +#define WGL_IMAGE_BUFFER_LOCK_I3D 0x00000002 +typedef LPVOID (WINAPI * PFNWGLCREATEIMAGEBUFFERI3DPROC) (HDC hDC, DWORD dwSize, UINT uFlags); +typedef BOOL (WINAPI * PFNWGLDESTROYIMAGEBUFFERI3DPROC) (HDC hDC, LPVOID pAddress); +typedef BOOL (WINAPI * PFNWGLASSOCIATEIMAGEBUFFEREVENTSI3DPROC) (HDC hDC, const HANDLE *pEvent, const LPVOID *pAddress, const DWORD *pSize, UINT count); +typedef BOOL (WINAPI * PFNWGLRELEASEIMAGEBUFFEREVENTSI3DPROC) (HDC hDC, const LPVOID *pAddress, UINT count); +#ifdef WGL_WGLEXT_PROTOTYPES +LPVOID WINAPI wglCreateImageBufferI3D (HDC hDC, DWORD dwSize, UINT uFlags); +BOOL WINAPI wglDestroyImageBufferI3D (HDC hDC, LPVOID pAddress); +BOOL WINAPI wglAssociateImageBufferEventsI3D (HDC hDC, const HANDLE *pEvent, const LPVOID *pAddress, const DWORD *pSize, UINT count); +BOOL WINAPI wglReleaseImageBufferEventsI3D (HDC hDC, const LPVOID *pAddress, UINT count); +#endif +#endif /* WGL_I3D_image_buffer */ + +#ifndef WGL_I3D_swap_frame_lock +#define WGL_I3D_swap_frame_lock 1 +typedef BOOL (WINAPI * PFNWGLENABLEFRAMELOCKI3DPROC) (void); +typedef BOOL (WINAPI * PFNWGLDISABLEFRAMELOCKI3DPROC) (void); +typedef BOOL (WINAPI * PFNWGLISENABLEDFRAMELOCKI3DPROC) (BOOL *pFlag); +typedef BOOL (WINAPI * PFNWGLQUERYFRAMELOCKMASTERI3DPROC) (BOOL *pFlag); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglEnableFrameLockI3D (void); +BOOL WINAPI wglDisableFrameLockI3D (void); +BOOL WINAPI wglIsEnabledFrameLockI3D (BOOL *pFlag); +BOOL WINAPI wglQueryFrameLockMasterI3D (BOOL *pFlag); +#endif +#endif /* WGL_I3D_swap_frame_lock */ + +#ifndef WGL_I3D_swap_frame_usage +#define WGL_I3D_swap_frame_usage 1 +typedef BOOL (WINAPI * PFNWGLGETFRAMEUSAGEI3DPROC) (float *pUsage); +typedef BOOL (WINAPI * PFNWGLBEGINFRAMETRACKINGI3DPROC) (void); +typedef BOOL (WINAPI * PFNWGLENDFRAMETRACKINGI3DPROC) (void); +typedef BOOL (WINAPI * PFNWGLQUERYFRAMETRACKINGI3DPROC) (DWORD *pFrameCount, DWORD *pMissedFrames, float *pLastMissedUsage); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglGetFrameUsageI3D (float *pUsage); +BOOL WINAPI wglBeginFrameTrackingI3D (void); +BOOL WINAPI wglEndFrameTrackingI3D (void); +BOOL WINAPI wglQueryFrameTrackingI3D (DWORD *pFrameCount, DWORD *pMissedFrames, float *pLastMissedUsage); +#endif +#endif /* WGL_I3D_swap_frame_usage */ + +#ifndef WGL_NV_DX_interop +#define WGL_NV_DX_interop 1 +#define WGL_ACCESS_READ_ONLY_NV 0x00000000 +#define WGL_ACCESS_READ_WRITE_NV 0x00000001 +#define WGL_ACCESS_WRITE_DISCARD_NV 0x00000002 +typedef BOOL (WINAPI * PFNWGLDXSETRESOURCESHAREHANDLENVPROC) (void *dxObject, HANDLE shareHandle); +typedef HANDLE (WINAPI * PFNWGLDXOPENDEVICENVPROC) (void *dxDevice); +typedef BOOL (WINAPI * PFNWGLDXCLOSEDEVICENVPROC) (HANDLE hDevice); +typedef HANDLE (WINAPI * PFNWGLDXREGISTEROBJECTNVPROC) (HANDLE hDevice, void *dxObject, GLuint name, GLenum type, GLenum access); +typedef BOOL (WINAPI * PFNWGLDXUNREGISTEROBJECTNVPROC) (HANDLE hDevice, HANDLE hObject); +typedef BOOL (WINAPI * PFNWGLDXOBJECTACCESSNVPROC) (HANDLE hObject, GLenum access); +typedef BOOL (WINAPI * PFNWGLDXLOCKOBJECTSNVPROC) (HANDLE hDevice, GLint count, HANDLE *hObjects); +typedef BOOL (WINAPI * PFNWGLDXUNLOCKOBJECTSNVPROC) (HANDLE hDevice, GLint count, HANDLE *hObjects); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglDXSetResourceShareHandleNV (void *dxObject, HANDLE shareHandle); +HANDLE WINAPI wglDXOpenDeviceNV (void *dxDevice); +BOOL WINAPI wglDXCloseDeviceNV (HANDLE hDevice); +HANDLE WINAPI wglDXRegisterObjectNV (HANDLE hDevice, void *dxObject, GLuint name, GLenum type, GLenum access); +BOOL WINAPI wglDXUnregisterObjectNV (HANDLE hDevice, HANDLE hObject); +BOOL WINAPI wglDXObjectAccessNV (HANDLE hObject, GLenum access); +BOOL WINAPI wglDXLockObjectsNV (HANDLE hDevice, GLint count, HANDLE *hObjects); +BOOL WINAPI wglDXUnlockObjectsNV (HANDLE hDevice, GLint count, HANDLE *hObjects); +#endif +#endif /* WGL_NV_DX_interop */ + +#ifndef WGL_NV_DX_interop2 +#define WGL_NV_DX_interop2 1 +#endif /* WGL_NV_DX_interop2 */ + +#ifndef WGL_NV_copy_image +#define WGL_NV_copy_image 1 +typedef BOOL (WINAPI * PFNWGLCOPYIMAGESUBDATANVPROC) (HGLRC hSrcRC, GLuint srcName, GLenum srcTarget, GLint srcLevel, GLint srcX, GLint srcY, GLint srcZ, HGLRC hDstRC, GLuint dstName, GLenum dstTarget, GLint dstLevel, GLint dstX, GLint dstY, GLint dstZ, GLsizei width, GLsizei height, GLsizei depth); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglCopyImageSubDataNV (HGLRC hSrcRC, GLuint srcName, GLenum srcTarget, GLint srcLevel, GLint srcX, GLint srcY, GLint srcZ, HGLRC hDstRC, GLuint dstName, GLenum dstTarget, GLint dstLevel, GLint dstX, GLint dstY, GLint dstZ, GLsizei width, GLsizei height, GLsizei depth); +#endif +#endif /* WGL_NV_copy_image */ + +#ifndef WGL_NV_delay_before_swap +#define WGL_NV_delay_before_swap 1 +typedef BOOL (WINAPI * PFNWGLDELAYBEFORESWAPNVPROC) (HDC hDC, GLfloat seconds); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglDelayBeforeSwapNV (HDC hDC, GLfloat seconds); +#endif +#endif /* WGL_NV_delay_before_swap */ + +#ifndef WGL_NV_float_buffer +#define WGL_NV_float_buffer 1 +#define WGL_FLOAT_COMPONENTS_NV 0x20B0 +#define WGL_BIND_TO_TEXTURE_RECTANGLE_FLOAT_R_NV 0x20B1 +#define WGL_BIND_TO_TEXTURE_RECTANGLE_FLOAT_RG_NV 0x20B2 +#define WGL_BIND_TO_TEXTURE_RECTANGLE_FLOAT_RGB_NV 0x20B3 +#define WGL_BIND_TO_TEXTURE_RECTANGLE_FLOAT_RGBA_NV 0x20B4 +#define WGL_TEXTURE_FLOAT_R_NV 0x20B5 +#define WGL_TEXTURE_FLOAT_RG_NV 0x20B6 +#define WGL_TEXTURE_FLOAT_RGB_NV 0x20B7 +#define WGL_TEXTURE_FLOAT_RGBA_NV 0x20B8 +#endif /* WGL_NV_float_buffer */ + +#ifndef WGL_NV_gpu_affinity +#define WGL_NV_gpu_affinity 1 +DECLARE_HANDLE(HGPUNV); +struct _GPU_DEVICE { + DWORD cb; + CHAR DeviceName[32]; + CHAR DeviceString[128]; + DWORD Flags; + RECT rcVirtualScreen; +}; +typedef struct _GPU_DEVICE *PGPU_DEVICE; +#define ERROR_INCOMPATIBLE_AFFINITY_MASKS_NV 0x20D0 +#define ERROR_MISSING_AFFINITY_MASK_NV 0x20D1 +typedef BOOL (WINAPI * PFNWGLENUMGPUSNVPROC) (UINT iGpuIndex, HGPUNV *phGpu); +typedef BOOL (WINAPI * PFNWGLENUMGPUDEVICESNVPROC) (HGPUNV hGpu, UINT iDeviceIndex, PGPU_DEVICE lpGpuDevice); +typedef HDC (WINAPI * PFNWGLCREATEAFFINITYDCNVPROC) (const HGPUNV *phGpuList); +typedef BOOL (WINAPI * PFNWGLENUMGPUSFROMAFFINITYDCNVPROC) (HDC hAffinityDC, UINT iGpuIndex, HGPUNV *hGpu); +typedef BOOL (WINAPI * PFNWGLDELETEDCNVPROC) (HDC hdc); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglEnumGpusNV (UINT iGpuIndex, HGPUNV *phGpu); +BOOL WINAPI wglEnumGpuDevicesNV (HGPUNV hGpu, UINT iDeviceIndex, PGPU_DEVICE lpGpuDevice); +HDC WINAPI wglCreateAffinityDCNV (const HGPUNV *phGpuList); +BOOL WINAPI wglEnumGpusFromAffinityDCNV (HDC hAffinityDC, UINT iGpuIndex, HGPUNV *hGpu); +BOOL WINAPI wglDeleteDCNV (HDC hdc); +#endif +#endif /* WGL_NV_gpu_affinity */ + +#ifndef WGL_NV_multisample_coverage +#define WGL_NV_multisample_coverage 1 +#define WGL_COVERAGE_SAMPLES_NV 0x2042 +#define WGL_COLOR_SAMPLES_NV 0x20B9 +#endif /* WGL_NV_multisample_coverage */ + +#ifndef WGL_NV_present_video +#define WGL_NV_present_video 1 +DECLARE_HANDLE(HVIDEOOUTPUTDEVICENV); +#define WGL_NUM_VIDEO_SLOTS_NV 0x20F0 +typedef int (WINAPI * PFNWGLENUMERATEVIDEODEVICESNVPROC) (HDC hDC, HVIDEOOUTPUTDEVICENV *phDeviceList); +typedef BOOL (WINAPI * PFNWGLBINDVIDEODEVICENVPROC) (HDC hDC, unsigned int uVideoSlot, HVIDEOOUTPUTDEVICENV hVideoDevice, const int *piAttribList); +typedef BOOL (WINAPI * PFNWGLQUERYCURRENTCONTEXTNVPROC) (int iAttribute, int *piValue); +#ifdef WGL_WGLEXT_PROTOTYPES +int WINAPI wglEnumerateVideoDevicesNV (HDC hDC, HVIDEOOUTPUTDEVICENV *phDeviceList); +BOOL WINAPI wglBindVideoDeviceNV (HDC hDC, unsigned int uVideoSlot, HVIDEOOUTPUTDEVICENV hVideoDevice, const int *piAttribList); +BOOL WINAPI wglQueryCurrentContextNV (int iAttribute, int *piValue); +#endif +#endif /* WGL_NV_present_video */ + +#ifndef WGL_NV_render_depth_texture +#define WGL_NV_render_depth_texture 1 +#define WGL_BIND_TO_TEXTURE_DEPTH_NV 0x20A3 +#define WGL_BIND_TO_TEXTURE_RECTANGLE_DEPTH_NV 0x20A4 +#define WGL_DEPTH_TEXTURE_FORMAT_NV 0x20A5 +#define WGL_TEXTURE_DEPTH_COMPONENT_NV 0x20A6 +#define WGL_DEPTH_COMPONENT_NV 0x20A7 +#endif /* WGL_NV_render_depth_texture */ + +#ifndef WGL_NV_render_texture_rectangle +#define WGL_NV_render_texture_rectangle 1 +#define WGL_BIND_TO_TEXTURE_RECTANGLE_RGB_NV 0x20A0 +#define WGL_BIND_TO_TEXTURE_RECTANGLE_RGBA_NV 0x20A1 +#define WGL_TEXTURE_RECTANGLE_NV 0x20A2 +#endif /* WGL_NV_render_texture_rectangle */ + +#ifndef WGL_NV_swap_group +#define WGL_NV_swap_group 1 +typedef BOOL (WINAPI * PFNWGLJOINSWAPGROUPNVPROC) (HDC hDC, GLuint group); +typedef BOOL (WINAPI * PFNWGLBINDSWAPBARRIERNVPROC) (GLuint group, GLuint barrier); +typedef BOOL (WINAPI * PFNWGLQUERYSWAPGROUPNVPROC) (HDC hDC, GLuint *group, GLuint *barrier); +typedef BOOL (WINAPI * PFNWGLQUERYMAXSWAPGROUPSNVPROC) (HDC hDC, GLuint *maxGroups, GLuint *maxBarriers); +typedef BOOL (WINAPI * PFNWGLQUERYFRAMECOUNTNVPROC) (HDC hDC, GLuint *count); +typedef BOOL (WINAPI * PFNWGLRESETFRAMECOUNTNVPROC) (HDC hDC); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglJoinSwapGroupNV (HDC hDC, GLuint group); +BOOL WINAPI wglBindSwapBarrierNV (GLuint group, GLuint barrier); +BOOL WINAPI wglQuerySwapGroupNV (HDC hDC, GLuint *group, GLuint *barrier); +BOOL WINAPI wglQueryMaxSwapGroupsNV (HDC hDC, GLuint *maxGroups, GLuint *maxBarriers); +BOOL WINAPI wglQueryFrameCountNV (HDC hDC, GLuint *count); +BOOL WINAPI wglResetFrameCountNV (HDC hDC); +#endif +#endif /* WGL_NV_swap_group */ + +#ifndef WGL_NV_vertex_array_range +#define WGL_NV_vertex_array_range 1 +typedef void *(WINAPI * PFNWGLALLOCATEMEMORYNVPROC) (GLsizei size, GLfloat readfreq, GLfloat writefreq, GLfloat priority); +typedef void (WINAPI * PFNWGLFREEMEMORYNVPROC) (void *pointer); +#ifdef WGL_WGLEXT_PROTOTYPES +void *WINAPI wglAllocateMemoryNV (GLsizei size, GLfloat readfreq, GLfloat writefreq, GLfloat priority); +void WINAPI wglFreeMemoryNV (void *pointer); +#endif +#endif /* WGL_NV_vertex_array_range */ + +#ifndef WGL_NV_video_capture +#define WGL_NV_video_capture 1 +DECLARE_HANDLE(HVIDEOINPUTDEVICENV); +#define WGL_UNIQUE_ID_NV 0x20CE +#define WGL_NUM_VIDEO_CAPTURE_SLOTS_NV 0x20CF +typedef BOOL (WINAPI * PFNWGLBINDVIDEOCAPTUREDEVICENVPROC) (UINT uVideoSlot, HVIDEOINPUTDEVICENV hDevice); +typedef UINT (WINAPI * PFNWGLENUMERATEVIDEOCAPTUREDEVICESNVPROC) (HDC hDc, HVIDEOINPUTDEVICENV *phDeviceList); +typedef BOOL (WINAPI * PFNWGLLOCKVIDEOCAPTUREDEVICENVPROC) (HDC hDc, HVIDEOINPUTDEVICENV hDevice); +typedef BOOL (WINAPI * PFNWGLQUERYVIDEOCAPTUREDEVICENVPROC) (HDC hDc, HVIDEOINPUTDEVICENV hDevice, int iAttribute, int *piValue); +typedef BOOL (WINAPI * PFNWGLRELEASEVIDEOCAPTUREDEVICENVPROC) (HDC hDc, HVIDEOINPUTDEVICENV hDevice); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglBindVideoCaptureDeviceNV (UINT uVideoSlot, HVIDEOINPUTDEVICENV hDevice); +UINT WINAPI wglEnumerateVideoCaptureDevicesNV (HDC hDc, HVIDEOINPUTDEVICENV *phDeviceList); +BOOL WINAPI wglLockVideoCaptureDeviceNV (HDC hDc, HVIDEOINPUTDEVICENV hDevice); +BOOL WINAPI wglQueryVideoCaptureDeviceNV (HDC hDc, HVIDEOINPUTDEVICENV hDevice, int iAttribute, int *piValue); +BOOL WINAPI wglReleaseVideoCaptureDeviceNV (HDC hDc, HVIDEOINPUTDEVICENV hDevice); +#endif +#endif /* WGL_NV_video_capture */ + +#ifndef WGL_NV_video_output +#define WGL_NV_video_output 1 +DECLARE_HANDLE(HPVIDEODEV); +#define WGL_BIND_TO_VIDEO_RGB_NV 0x20C0 +#define WGL_BIND_TO_VIDEO_RGBA_NV 0x20C1 +#define WGL_BIND_TO_VIDEO_RGB_AND_DEPTH_NV 0x20C2 +#define WGL_VIDEO_OUT_COLOR_NV 0x20C3 +#define WGL_VIDEO_OUT_ALPHA_NV 0x20C4 +#define WGL_VIDEO_OUT_DEPTH_NV 0x20C5 +#define WGL_VIDEO_OUT_COLOR_AND_ALPHA_NV 0x20C6 +#define WGL_VIDEO_OUT_COLOR_AND_DEPTH_NV 0x20C7 +#define WGL_VIDEO_OUT_FRAME 0x20C8 +#define WGL_VIDEO_OUT_FIELD_1 0x20C9 +#define WGL_VIDEO_OUT_FIELD_2 0x20CA +#define WGL_VIDEO_OUT_STACKED_FIELDS_1_2 0x20CB +#define WGL_VIDEO_OUT_STACKED_FIELDS_2_1 0x20CC +typedef BOOL (WINAPI * PFNWGLGETVIDEODEVICENVPROC) (HDC hDC, int numDevices, HPVIDEODEV *hVideoDevice); +typedef BOOL (WINAPI * PFNWGLRELEASEVIDEODEVICENVPROC) (HPVIDEODEV hVideoDevice); +typedef BOOL (WINAPI * PFNWGLBINDVIDEOIMAGENVPROC) (HPVIDEODEV hVideoDevice, HPBUFFERARB hPbuffer, int iVideoBuffer); +typedef BOOL (WINAPI * PFNWGLRELEASEVIDEOIMAGENVPROC) (HPBUFFERARB hPbuffer, int iVideoBuffer); +typedef BOOL (WINAPI * PFNWGLSENDPBUFFERTOVIDEONVPROC) (HPBUFFERARB hPbuffer, int iBufferType, unsigned long *pulCounterPbuffer, BOOL bBlock); +typedef BOOL (WINAPI * PFNWGLGETVIDEOINFONVPROC) (HPVIDEODEV hpVideoDevice, unsigned long *pulCounterOutputPbuffer, unsigned long *pulCounterOutputVideo); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglGetVideoDeviceNV (HDC hDC, int numDevices, HPVIDEODEV *hVideoDevice); +BOOL WINAPI wglReleaseVideoDeviceNV (HPVIDEODEV hVideoDevice); +BOOL WINAPI wglBindVideoImageNV (HPVIDEODEV hVideoDevice, HPBUFFERARB hPbuffer, int iVideoBuffer); +BOOL WINAPI wglReleaseVideoImageNV (HPBUFFERARB hPbuffer, int iVideoBuffer); +BOOL WINAPI wglSendPbufferToVideoNV (HPBUFFERARB hPbuffer, int iBufferType, unsigned long *pulCounterPbuffer, BOOL bBlock); +BOOL WINAPI wglGetVideoInfoNV (HPVIDEODEV hpVideoDevice, unsigned long *pulCounterOutputPbuffer, unsigned long *pulCounterOutputVideo); +#endif +#endif /* WGL_NV_video_output */ + +#ifndef WGL_OML_sync_control +#define WGL_OML_sync_control 1 +typedef BOOL (WINAPI * PFNWGLGETSYNCVALUESOMLPROC) (HDC hdc, INT64 *ust, INT64 *msc, INT64 *sbc); +typedef BOOL (WINAPI * PFNWGLGETMSCRATEOMLPROC) (HDC hdc, INT32 *numerator, INT32 *denominator); +typedef INT64 (WINAPI * PFNWGLSWAPBUFFERSMSCOMLPROC) (HDC hdc, INT64 target_msc, INT64 divisor, INT64 remainder); +typedef INT64 (WINAPI * PFNWGLSWAPLAYERBUFFERSMSCOMLPROC) (HDC hdc, int fuPlanes, INT64 target_msc, INT64 divisor, INT64 remainder); +typedef BOOL (WINAPI * PFNWGLWAITFORMSCOMLPROC) (HDC hdc, INT64 target_msc, INT64 divisor, INT64 remainder, INT64 *ust, INT64 *msc, INT64 *sbc); +typedef BOOL (WINAPI * PFNWGLWAITFORSBCOMLPROC) (HDC hdc, INT64 target_sbc, INT64 *ust, INT64 *msc, INT64 *sbc); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglGetSyncValuesOML (HDC hdc, INT64 *ust, INT64 *msc, INT64 *sbc); +BOOL WINAPI wglGetMscRateOML (HDC hdc, INT32 *numerator, INT32 *denominator); +INT64 WINAPI wglSwapBuffersMscOML (HDC hdc, INT64 target_msc, INT64 divisor, INT64 remainder); +INT64 WINAPI wglSwapLayerBuffersMscOML (HDC hdc, int fuPlanes, INT64 target_msc, INT64 divisor, INT64 remainder); +BOOL WINAPI wglWaitForMscOML (HDC hdc, INT64 target_msc, INT64 divisor, INT64 remainder, INT64 *ust, INT64 *msc, INT64 *sbc); +BOOL WINAPI wglWaitForSbcOML (HDC hdc, INT64 target_sbc, INT64 *ust, INT64 *msc, INT64 *sbc); +#endif +#endif /* WGL_OML_sync_control */ + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/angle/include/angle_gl.h b/vendor/angle/include/angle_gl.h new file mode 100644 index 00000000..4ff294bf --- /dev/null +++ b/vendor/angle/include/angle_gl.h @@ -0,0 +1,27 @@ +// +// Copyright 2014 The ANGLE Project Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// +// angle_gl.h: +// Includes all necessary GL headers and definitions for ANGLE. +// + +#ifndef ANGLEGL_H_ +#define ANGLEGL_H_ + +#include "GLES/gl.h" +#include "GLES/glext.h" +#include "GLES2/gl2.h" +#include "GLES2/gl2ext.h" +#include "GLES3/gl3.h" +#include "GLES3/gl31.h" +#include "GLES3/gl32.h" + +// TODO(http://anglebug.com/3730): Autogenerate these enums from gl.xml +// HACK: Defines for queries that are not in GLES +#define GL_CONTEXT_PROFILE_MASK 0x9126 +#define GL_CONTEXT_COMPATIBILITY_PROFILE_BIT 0x00000002 +#define GL_CONTEXT_CORE_PROFILE_BIT 0x00000001 + +#endif // ANGLEGL_H_ diff --git a/vendor/angle/include/angle_windowsstore.h b/vendor/angle/include/angle_windowsstore.h new file mode 100644 index 00000000..a0d0fda6 --- /dev/null +++ b/vendor/angle/include/angle_windowsstore.h @@ -0,0 +1,51 @@ +// +// Copyright 2014 The ANGLE Project Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// +// angle_windowsstore.h: + +#ifndef ANGLE_WINDOWSSTORE_H_ +#define ANGLE_WINDOWSSTORE_H_ + +// The following properties can be set on the CoreApplication to support additional +// ANGLE configuration options. +// +// The Visual Studio sample templates provided with this version of ANGLE have examples +// of how to set these property values. + +// +// Property: EGLNativeWindowTypeProperty +// Type: IInspectable +// Description: Set this property to specify the window type to use for creating a surface. +// If this property is missing, surface creation will fail. +// +const wchar_t EGLNativeWindowTypeProperty[] = L"EGLNativeWindowTypeProperty"; + +// +// Property: EGLRenderSurfaceSizeProperty +// Type: Size +// Description: Set this property to specify a preferred size in pixels of the render surface. +// The render surface size width and height must be greater than 0. +// If this property is set, then the render surface size is fixed. +// The render surface will then be scaled to the window dimensions. +// If this property is missing, a default behavior will be provided. +// The default behavior uses the window size if a CoreWindow is specified or +// the size of the SwapChainPanel control if one is specified. +// +const wchar_t EGLRenderSurfaceSizeProperty[] = L"EGLRenderSurfaceSizeProperty"; + +// +// Property: EGLRenderResolutionScaleProperty +// Type: Single +// Description: Use this to specify a preferred scale for the render surface compared to the window. +// For example, if the window is 800x480, and: +// - scale is set to 0.5f then the surface will be 400x240 +// - scale is set to 1.2f then the surface will be 960x576 +// If the window resizes or rotates then the surface will resize accordingly. +// EGLRenderResolutionScaleProperty and EGLRenderSurfaceSizeProperty cannot both be set. +// The scale factor should be > 0.0f. +// +const wchar_t EGLRenderResolutionScaleProperty[] = L"EGLRenderResolutionScaleProperty"; + +#endif // ANGLE_WINDOWSSTORE_H_ diff --git a/vendor/angle/include/export.h b/vendor/angle/include/export.h new file mode 100644 index 00000000..6a8e30fd --- /dev/null +++ b/vendor/angle/include/export.h @@ -0,0 +1,40 @@ +// +// Copyright 2014 The ANGLE Project Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// + +// export.h : Defines ANGLE_EXPORT, a macro for exporting functions from the DLL + +#ifndef LIBGLESV2_EXPORT_H_ +#define LIBGLESV2_EXPORT_H_ + +#if !defined(ANGLE_EXPORT) +# if defined(_WIN32) +# if defined(LIBGLESV2_IMPLEMENTATION) || defined(LIBANGLE_IMPLEMENTATION) || \ + defined(LIBFEATURE_SUPPORT_IMPLEMENTATION) || defined(LIBCL_IMPLEMENTATION) +# define ANGLE_EXPORT __declspec(dllexport) +# else +# define ANGLE_EXPORT __declspec(dllimport) +# endif +# elif defined(__GNUC__) +# if defined(LIBGLESV2_IMPLEMENTATION) || defined(LIBANGLE_IMPLEMENTATION) || \ + defined(LIBFEATURE_SUPPORT_IMPLEMENTATION) || defined(LIBCL_IMPLEMENTATION) +# define ANGLE_EXPORT __attribute__((visibility("default"))) +# else +# define ANGLE_EXPORT +# endif +# else +# define ANGLE_EXPORT +# endif +#endif // !defined(ANGLE_EXPORT) + +#if !defined(ANGLE_NO_EXPORT) +# if defined(__GNUC__) +# define ANGLE_NO_EXPORT __attribute__((visibility("hidden"))) +# else +# define ANGLE_NO_EXPORT +# endif +#endif // !defined(ANGLE_NO_EXPORT) + +#endif // LIBGLESV2_EXPORT_H_ diff --git a/vendor/angle/include/platform/Feature.h b/vendor/angle/include/platform/Feature.h new file mode 100644 index 00000000..3ac263d6 --- /dev/null +++ b/vendor/angle/include/platform/Feature.h @@ -0,0 +1,200 @@ +// +// Copyright 2019 The ANGLE Project Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// +// Feature.h: Definition of structs to hold feature/workaround information. +// + +#ifndef ANGLE_PLATFORM_FEATURE_H_ +#define ANGLE_PLATFORM_FEATURE_H_ + +#include +#include +#include + +#define ANGLE_FEATURE_CONDITION(set, feature, cond) \ + do \ + { \ + (set)->feature.enabled = cond; \ + (set)->feature.condition = ANGLE_STRINGIFY(cond); \ + } while (0) + +namespace angle +{ + +enum class FeatureCategory +{ + FrontendWorkarounds, + FrontendFeatures, + OpenGLWorkarounds, + D3DWorkarounds, + D3DCompilerWorkarounds, + VulkanWorkarounds, + VulkanFeatures, + MetalFeatures, +}; + +constexpr char kFeatureCategoryFrontendWorkarounds[] = "Frontend workarounds"; +constexpr char kFeatureCategoryFrontendFeatures[] = "Frontend features"; +constexpr char kFeatureCategoryOpenGLWorkarounds[] = "OpenGL workarounds"; +constexpr char kFeatureCategoryD3DWorkarounds[] = "D3D workarounds"; +constexpr char kFeatureCategoryD3DCompilerWorkarounds[] = "D3D compiler workarounds"; +constexpr char kFeatureCategoryVulkanWorkarounds[] = "Vulkan workarounds"; +constexpr char kFeatureCategoryVulkanFeatures[] = "Vulkan features"; +constexpr char kFeatureCategoryMetalFeatures[] = "Metal features"; +constexpr char kFeatureCategoryUnknown[] = "Unknown"; + +inline const char *FeatureCategoryToString(const FeatureCategory &fc) +{ + switch (fc) + { + case FeatureCategory::FrontendWorkarounds: + return kFeatureCategoryFrontendWorkarounds; + break; + + case FeatureCategory::FrontendFeatures: + return kFeatureCategoryFrontendFeatures; + break; + + case FeatureCategory::OpenGLWorkarounds: + return kFeatureCategoryOpenGLWorkarounds; + break; + + case FeatureCategory::D3DWorkarounds: + return kFeatureCategoryD3DWorkarounds; + break; + + case FeatureCategory::D3DCompilerWorkarounds: + return kFeatureCategoryD3DCompilerWorkarounds; + break; + + case FeatureCategory::VulkanWorkarounds: + return kFeatureCategoryVulkanWorkarounds; + break; + + case FeatureCategory::VulkanFeatures: + return kFeatureCategoryVulkanFeatures; + break; + + case FeatureCategory::MetalFeatures: + return kFeatureCategoryMetalFeatures; + break; + + default: + return kFeatureCategoryUnknown; + break; + } +} + +constexpr char kFeatureStatusEnabled[] = "enabled"; +constexpr char kFeatureStatusDisabled[] = "disabled"; + +inline const char *FeatureStatusToString(const bool &status) +{ + if (status) + { + return kFeatureStatusEnabled; + } + return kFeatureStatusDisabled; +} + +struct Feature; + +using FeatureMap = std::map; +using FeatureList = std::vector; + +struct Feature +{ + Feature(const Feature &other); + Feature(const char *name, + const FeatureCategory &category, + const char *description, + FeatureMap *const mapPtr, + const char *bug); + ~Feature(); + + // The name of the workaround, lowercase, camel_case + const char *const name; + + // The category that the workaround belongs to. Eg. "Vulkan workarounds" + const FeatureCategory category; + + // A short description to be read by the user. + const char *const description; + + // A link to the bug, if any + const char *const bug; + + // Whether the workaround is enabled or not. Determined by heuristics like vendor ID and + // version, but may be overriden to any value. + bool enabled = false; + + // A stingified version of the condition used to set 'enabled'. ie "IsNvidia() && IsApple()" + const char *condition; +}; + +inline Feature::Feature(const Feature &other) = default; +inline Feature::Feature(const char *name, + const FeatureCategory &category, + const char *description, + FeatureMap *const mapPtr, + const char *bug = "") + : name(name), + category(category), + description(description), + bug(bug), + enabled(false), + condition("") +{ + if (mapPtr != nullptr) + { + (*mapPtr)[std::string(name)] = this; + } +} + +inline Feature::~Feature() = default; + +struct FeatureSetBase +{ + public: + FeatureSetBase(); + ~FeatureSetBase(); + + private: + // Non-copyable + FeatureSetBase(const FeatureSetBase &other) = delete; + FeatureSetBase &operator=(const FeatureSetBase &other) = delete; + + protected: + FeatureMap members = FeatureMap(); + + public: + void overrideFeatures(const std::vector &featureNames, bool enabled) + { + for (const std::string &name : featureNames) + { + if (members.find(name) != members.end()) + { + members[name]->enabled = enabled; + } + } + } + + void populateFeatureList(FeatureList *features) const + { + for (FeatureMap::const_iterator it = members.begin(); it != members.end(); it++) + { + features->push_back(it->second); + } + } + + const FeatureMap &getFeatures() const { return members; } +}; + +inline FeatureSetBase::FeatureSetBase() = default; +inline FeatureSetBase::~FeatureSetBase() = default; + +} // namespace angle + +#endif // ANGLE_PLATFORM_WORKAROUND_H_ diff --git a/vendor/angle/include/platform/FeaturesD3D.h b/vendor/angle/include/platform/FeaturesD3D.h new file mode 100644 index 00000000..4e458624 --- /dev/null +++ b/vendor/angle/include/platform/FeaturesD3D.h @@ -0,0 +1,224 @@ +// +// Copyright 2015 The ANGLE Project Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// + +// FeaturesD3D.h: Features and workarounds for D3D driver bugs and other issues. + +#ifndef ANGLE_PLATFORM_FEATURESD3D_H_ +#define ANGLE_PLATFORM_FEATURESD3D_H_ + +#include "platform/Feature.h" + +namespace angle +{ + +// Workarounds attached to each shader. Do not need to expose information about these workarounds so +// a simple bool struct suffices. +struct CompilerWorkaroundsD3D +{ + bool skipOptimization = false; + + bool useMaxOptimization = false; + + // IEEE strictness needs to be enabled for NANs to work. + bool enableIEEEStrictness = false; +}; + +struct FeaturesD3D : FeatureSetBase +{ + FeaturesD3D(); + ~FeaturesD3D(); + + // On some systems, having extra rendertargets than necessary slows down the shader. + // We can fix this by optimizing those out of the shader. At the same time, we can + // work around a bug on some nVidia drivers that they ignore "null" render targets + // in D3D11, by compacting the active color attachments list to omit null entries. + Feature mrtPerfWorkaround = {"mrt_perf_workaround", FeatureCategory::D3DWorkarounds, + "Some drivers have a bug where they ignore null render targets", + &members}; + + Feature setDataFasterThanImageUpload = {"set_data_faster_than_image_upload", + FeatureCategory::D3DWorkarounds, + "Set data faster than image upload", &members}; + + // Some renderers can't disable mipmaps on a mipmapped texture (i.e. solely sample from level + // zero, and ignore the other levels). D3D11 Feature Level 10+ does this by setting MaxLOD to + // 0.0f in the Sampler state. D3D9 sets D3DSAMP_MIPFILTER to D3DTEXF_NONE. There is no + // equivalent to this in D3D11 Feature Level 9_3. This causes problems when (for example) an + // application creates a mipmapped texture2D, but sets GL_TEXTURE_MIN_FILTER to GL_NEAREST + // (i.e disables mipmaps). To work around this, D3D11 FL9_3 has to create two copies of the + // texture. The textures' level zeros are identical, but only one texture has mips. + Feature zeroMaxLodWorkaround = {"zero_max_lod", FeatureCategory::D3DWorkarounds, + "Missing an option to disable mipmaps on a mipmapped texture", + &members}; + + // Some renderers do not support Geometry Shaders so the Geometry Shader-based PointSprite + // emulation will not work. To work around this, D3D11 FL9_3 has to use a different pointsprite + // emulation that is implemented using instanced quads. + Feature useInstancedPointSpriteEmulation = { + "use_instanced_point_sprite_emulation", FeatureCategory::D3DWorkarounds, + "Some D3D11 renderers do not support geometry shaders for pointsprite emulation", &members}; + + // A bug fixed in NVIDIA driver version 347.88 < x <= 368.81 triggers a TDR when using + // CopySubresourceRegion from a staging texture to a depth/stencil in D3D11. The workaround + // is to use UpdateSubresource to trigger an extra copy. We disable this workaround on newer + // NVIDIA driver versions because of a second driver bug present with the workaround enabled. + // (See: http://anglebug.com/1452) + Feature depthStencilBlitExtraCopy = { + "depth_stencil_blit_extra_copy", FeatureCategory::D3DWorkarounds, + "Bug in some drivers triggers a TDR when using CopySubresourceRegion from a staging " + "texture to a depth/stencil", + &members, "http://anglebug.com/1452"}; + + // The HLSL optimizer has a bug with optimizing "pow" in certain integer-valued expressions. + // We can work around this by expanding the pow into a series of multiplies if we're running + // under the affected compiler. + Feature expandIntegerPowExpressions = { + "expand_integer_pow_expressions", FeatureCategory::D3DWorkarounds, + "The HLSL optimizer has a bug with optimizing 'pow' in certain integer-valued expressions", + &members}; + + // NVIDIA drivers sometimes write out-of-order results to StreamOut buffers when transform + // feedback is used to repeatedly write to the same buffer positions. + Feature flushAfterEndingTransformFeedback = { + "flush_after_ending_transform_feedback", FeatureCategory::D3DWorkarounds, + "Some drivers sometimes write out-of-order results to StreamOut buffers when transform " + "feedback is used to repeatedly write to the same buffer positions", + &members}; + + // Some drivers (NVIDIA) do not take into account the base level of the texture in the results + // of the HLSL GetDimensions builtin. + Feature getDimensionsIgnoresBaseLevel = { + "get_dimensions_ignores_base_level", FeatureCategory::D3DWorkarounds, + "Some drivers do not take into account the base level of the " + "texture in the results of the HLSL GetDimensions builtin", + &members}; + + // On some Intel drivers, HLSL's function texture.Load returns 0 when the parameter Location + // is negative, even if the sum of Offset and Location is in range. This may cause errors when + // translating GLSL's function texelFetchOffset into texture.Load, as it is valid for + // texelFetchOffset to use negative texture coordinates as its parameter P when the sum of P + // and Offset is in range. To work around this, we translate texelFetchOffset into texelFetch + // by adding Offset directly to Location before reading the texture. + Feature preAddTexelFetchOffsets = { + "pre_add_texel_fetch_offsets", FeatureCategory::D3DWorkarounds, + "HLSL's function texture.Load returns 0 when the parameter Location is negative, even if " + "the sum of Offset and Location is in range", + &members}; + + // On some AMD drivers, 1x1 and 2x2 mips of depth/stencil textures aren't sampled correctly. + // We can work around this bug by doing an internal blit to a temporary single-channel texture + // before we sample. + Feature emulateTinyStencilTextures = { + "emulate_tiny_stencil_textures", FeatureCategory::D3DWorkarounds, + "1x1 and 2x2 mips of depth/stencil textures aren't sampled correctly", &members}; + + // In Intel driver, the data with format DXGI_FORMAT_B5G6R5_UNORM will be parsed incorrectly. + // This workaroud will disable B5G6R5 support when it's Intel driver. By default, it will use + // R8G8B8A8 format. This bug is fixed in version 4539 on Intel drivers. + // On older AMD drivers, the data in DXGI_FORMAT_B5G6R5_UNORM becomes corrupted for unknown + // reasons. + Feature disableB5G6R5Support = {"disable_b5g6r5_support", FeatureCategory::D3DWorkarounds, + "Textures with the format " + "DXGI_FORMAT_B5G6R5_UNORM have incorrect data", + &members}; + + // On some Intel drivers, evaluating unary minus operator on integer may get wrong answer in + // vertex shaders. To work around this bug, we translate -(int) into ~(int)+1. + // This driver bug is fixed in 20.19.15.4624. + Feature rewriteUnaryMinusOperator = { + "rewrite_unary_minus_operator", FeatureCategory::D3DWorkarounds, + "Evaluating unary minus operator on integer may get wrong answer in vertex shaders", + &members}; + + // On some Intel drivers, using isnan() on highp float will get wrong answer. To work around + // this bug, we use an expression to emulate function isnan(). + // Tracking bug: https://crbug.com/650547 + // This driver bug is fixed in 21.20.16.4542. + Feature emulateIsnanFloat = {"emulate_isnan_float", FeatureCategory::D3DWorkarounds, + "Using isnan() on highp float will get wrong answer", &members, + "https://crbug.com/650547"}; + + // On some Intel drivers, using clear() may not take effect. To work around this bug, we call + // clear() twice on these platforms. + // Tracking bug: https://crbug.com/655534 + Feature callClearTwice = {"call_clear_twice", FeatureCategory::D3DWorkarounds, + "Using clear() may not take effect", &members, + "https://crbug.com/655534"}; + + // On some Intel drivers, copying from staging storage to constant buffer storage does not + // seem to work. Work around this by keeping system memory storage as a canonical reference + // for buffer data. + // D3D11-only workaround. See http://crbug.com/593024. + Feature useSystemMemoryForConstantBuffers = {"use_system_memory_for_constant_buffers", + FeatureCategory::D3DWorkarounds, + "Copying from staging storage to constant buffer " + "storage does not work", + &members, "https://crbug.com/593024"}; + + // This workaround is for the ANGLE_multiview extension. If enabled the viewport or render + // target slice will be selected in the geometry shader stage. The workaround flag is added to + // make it possible to select the code path in end2end and performance tests. + Feature selectViewInGeometryShader = { + "select_view_in_geometry_shader", FeatureCategory::D3DWorkarounds, + "The viewport or render target slice will be selected in the geometry shader stage for " + "the ANGLE_multiview extension", + &members}; + + // When rendering with no render target on D3D, two bugs lead to incorrect behavior on Intel + // drivers < 4815. The rendering samples always pass neglecting discard statements in pixel + // shader. + // 1. If rendertarget is not set, the pixel shader will be recompiled to drop 'SV_TARGET'. + // When using a pixel shader with no 'SV_TARGET' in a draw, the pixels are always generated even + // if they should be discard by 'discard' statements. + // 2. If ID3D11BlendState.RenderTarget[].RenderTargetWriteMask is 0 and rendertarget is not set, + // then rendering samples also pass neglecting discard statements in pixel shader. + // So we add a mock texture as render target in such case. See http://anglebug.com/2152 + Feature addMockTextureNoRenderTarget = { + "add_mock_texture_no_render_target", FeatureCategory::D3DWorkarounds, + "On some drivers when rendering with no render target, two bugs lead to incorrect behavior", + &members, "http://anglebug.com/2152"}; + + // Don't use D3D constant register zero when allocating space for uniforms in the vertex shader. + // This is targeted to work around a bug in NVIDIA D3D driver version 388.59 where in very + // specific cases the driver would not handle constant register zero correctly. + Feature skipVSConstantRegisterZero = { + "skip_vs_constant_register_zero", FeatureCategory::D3DWorkarounds, + "In specific cases the driver doesn't handle constant register zero correctly", &members}; + + // Forces the value returned from an atomic operations to be always be resolved. This is + // targeted to workaround a bug in NVIDIA D3D driver where the return value from + // RWByteAddressBuffer.InterlockedAdd does not get resolved when used in the .yzw components of + // a RWByteAddressBuffer.Store operation. Only has an effect on HLSL translation. + // http://anglebug.com/3246 + Feature forceAtomicValueResolution = { + "force_atomic_value_resolution", FeatureCategory::D3DWorkarounds, + "On some drivers the return value from RWByteAddressBuffer.InterlockedAdd does not resolve " + "when used in the .yzw components of a RWByteAddressBuffer.Store operation", + &members, "http://anglebug.com/3246"}; + + // Match chromium's robust resource init behaviour by always prefering to upload texture data + // instead of clearing. Clear calls have been observed to cause texture corruption for some + // formats. + Feature allowClearForRobustResourceInit = { + "allow_clear_for_robust_resource_init", FeatureCategory::D3DWorkarounds, + "Some drivers corrupt texture data when clearing for robust resource initialization.", + &members, "http://crbug.com/941620"}; + + // Allow translating uniform block to StructuredBuffer. This is targeted to work around a slow + // fxc compile performance issue with dynamic uniform indexing. http://anglebug.com/3682 + Feature allowTranslateUniformBlockToStructuredBuffer = { + "allow_translate_uniform_block_to_structured_buffer", FeatureCategory::D3DWorkarounds, + "There is a slow fxc compile performance issue with dynamic uniform indexing if " + "translating a uniform block with a large array member to cbuffer.", + &members, "http://anglebug.com/3682"}; +}; + +inline FeaturesD3D::FeaturesD3D() = default; +inline FeaturesD3D::~FeaturesD3D() = default; + +} // namespace angle + +#endif // ANGLE_PLATFORM_FEATURESD3D_H_ diff --git a/vendor/angle/include/platform/FeaturesGL.h b/vendor/angle/include/platform/FeaturesGL.h new file mode 100644 index 00000000..91c9cd1d --- /dev/null +++ b/vendor/angle/include/platform/FeaturesGL.h @@ -0,0 +1,539 @@ +// +// Copyright 2015 The ANGLE Project Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// + +// FeaturesGL.h: Features and workarounds for GL driver bugs and other issues. + +#ifndef ANGLE_PLATFORM_FEATURESGL_H_ +#define ANGLE_PLATFORM_FEATURESGL_H_ + +#include "platform/Feature.h" + +namespace angle +{ + +struct FeaturesGL : FeatureSetBase +{ + FeaturesGL(); + ~FeaturesGL(); + + // When writing a float to a normalized integer framebuffer, desktop OpenGL is allowed to write + // one of the two closest normalized integer representations (although round to nearest is + // preferred) (see section 2.3.5.2 of the GL 4.5 core specification). OpenGL ES requires that + // round-to-nearest is used (see "Conversion from Floating-Point to Framebuffer Fixed-Point" in + // section 2.1.2 of the OpenGL ES 2.0.25 spec). This issue only shows up on AMD drivers on + // framebuffer formats that have 1-bit alpha, work around this by using higher precision formats + // instead. + Feature avoid1BitAlphaTextureFormats = {"avoid_1_bit_alpha_texture_formats", + FeatureCategory::OpenGLWorkarounds, + "Issue with 1-bit alpha framebuffer formats", &members}; + + // On some older Intel drivers, GL_RGBA4 is not color renderable, glCheckFramebufferStatus + // returns GL_FRAMEBUFFER_UNSUPPORTED. Work around this by using a known color-renderable + // format. + Feature rgba4IsNotSupportedForColorRendering = {"rgba4_is_not_supported_for_color_rendering", + FeatureCategory::OpenGLWorkarounds, + "GL_RGBA4 is not color renderable", &members}; + + // Newer Intel GPUs natively support ETC2/EAC compressed texture formats. + Feature allowEtcFormats = {"allow_etc_formats", FeatureCategory::OpenGLWorkarounds, + "Enable ETC2/EAC on desktop OpenGL", &members}; + + // When clearing a framebuffer on Intel or AMD drivers, when GL_FRAMEBUFFER_SRGB is enabled, the + // driver clears to the linearized clear color despite the framebuffer not supporting SRGB + // blending. It only seems to do this when the framebuffer has only linear attachments, mixed + // attachments appear to get the correct clear color. + Feature doesSRGBClearsOnLinearFramebufferAttachments = { + "does_srgb_clears_on_linear_framebuffer_attachments", FeatureCategory::OpenGLWorkarounds, + "Issue clearing framebuffers with linear attachments when GL_FRAMEBUFFER_SRGB is enabled", + &members}; + + // On Mac some GLSL constructs involving do-while loops cause GPU hangs, such as the following: + // int i = 1; + // do { + // i --; + // continue; + // } while (i > 0) + // Work around this by rewriting the do-while to use another GLSL construct (block + while) + Feature doWhileGLSLCausesGPUHang = { + "do_while_glsl_causes_gpu_hang", FeatureCategory::OpenGLWorkarounds, + "Some GLSL constructs involving do-while loops cause GPU hangs", &members, + "http://crbug.com/644669"}; + + // On Mac AMD GPU gl_VertexID in GLSL vertex shader doesn't include base vertex value, + // Work aronud this by replace gl_VertexID with (gl_VertexID - angle_BaseVertex) when + // angle_BaseVertex is present. + Feature addBaseVertexToVertexID = { + "vertex_id_does_not_include_base_vertex", FeatureCategory::OpenGLWorkarounds, + "gl_VertexID in GLSL vertex shader doesn't include base vertex value", &members}; + + // Calling glFinish doesn't cause all queries to report that the result is available on some + // (NVIDIA) drivers. It was found that enabling GL_DEBUG_OUTPUT_SYNCHRONOUS before the finish + // causes it to fully finish. + Feature finishDoesNotCauseQueriesToBeAvailable = { + "finish_does_not_cause_queries_to_be_available", FeatureCategory::OpenGLWorkarounds, + "glFinish doesn't cause all queries to report available result", &members}; + + // Always call useProgram after a successful link to avoid a driver bug. + // This workaround is meant to reproduce the use_current_program_after_successful_link + // workaround in Chromium (http://crbug.com/110263). It has been shown that this workaround is + // not necessary for MacOSX 10.9 and higher (http://crrev.com/39eb535b). + Feature alwaysCallUseProgramAfterLink = { + "always_call_use_program_after_link", FeatureCategory::OpenGLWorkarounds, + "Always call useProgram after a successful link to avoid a driver bug", &members, + "http://crbug.com/110263"}; + + // On NVIDIA, in the case of unpacking from a pixel unpack buffer, unpack overlapping rows row + // by row. + Feature unpackOverlappingRowsSeparatelyUnpackBuffer = { + "unpack_overlapping_rows_separately_unpack_buffer", FeatureCategory::OpenGLWorkarounds, + "In the case of unpacking from a pixel unpack buffer, unpack overlapping rows row by row", + &members}; + + // On NVIDIA, in the case of packing to a pixel pack buffer, pack overlapping rows row by row. + Feature packOverlappingRowsSeparatelyPackBuffer = { + "pack_overlapping_rows_separately_pack_buffer", FeatureCategory::OpenGLWorkarounds, + "In the case of packing to a pixel pack buffer, pack overlapping rows row by row", + &members}; + + // On NVIDIA, during initialization, assign the current vertex attributes to the spec-mandated + // defaults. + Feature initializeCurrentVertexAttributes = { + "initialize_current_vertex_attributes", FeatureCategory::OpenGLWorkarounds, + "During initialization, assign the current vertex attributes to the spec-mandated defaults", + &members}; + + // abs(i) where i is an integer returns unexpected result on Intel Mac. + // Emulate abs(i) with i * sign(i). + Feature emulateAbsIntFunction = {"emulate_abs_int_function", FeatureCategory::OpenGLWorkarounds, + "abs(i) where i is an integer returns unexpected result", + &members, "http://crbug.com/642227"}; + + // On Intel Mac, calculation of loop conditions in for and while loop has bug. + // Add "&& true" to the end of the condition expression to work around the bug. + Feature addAndTrueToLoopCondition = { + "add_and_true_to_loop_condition", FeatureCategory::OpenGLWorkarounds, + "Calculation of loop conditions in for and while loop has bug", &members}; + + // When uploading textures from an unpack buffer, some drivers count an extra row padding when + // checking if the pixel unpack buffer is big enough. Tracking bug: http://anglebug.com/1512 + // For example considering the pixel buffer below where in memory, each row data (D) of the + // texture is followed by some unused data (the dots): + // +-------+--+ + // |DDDDDDD|..| + // |DDDDDDD|..| + // |DDDDDDD|..| + // |DDDDDDD|..| + // +-------A--B + // The last pixel read will be A, but the driver will think it is B, causing it to generate an + // error when the pixel buffer is just big enough. + Feature unpackLastRowSeparatelyForPaddingInclusion = { + "unpack_last_row_separately_for_padding_inclusion", FeatureCategory::OpenGLWorkarounds, + "When uploading textures from an unpack buffer, some drivers count an extra row padding", + &members, "http://anglebug.com/1512"}; + + // Equivalent workaround when uploading data from a pixel pack buffer. + Feature packLastRowSeparatelyForPaddingInclusion = { + "pack_last_row_separately_for_padding_inclusion", FeatureCategory::OpenGLWorkarounds, + "When uploading textures from an pack buffer, some drivers count an extra row padding", + &members, "http://anglebug.com/1512"}; + + // On some Intel drivers, using isnan() on highp float will get wrong answer. To work around + // this bug, we use an expression to emulate function isnan(). + // Tracking bug: http://crbug.com/650547 + Feature emulateIsnanFloat = {"emulate_isnan_float", FeatureCategory::OpenGLWorkarounds, + "Using isnan() on highp float will get wrong answer", &members, + "http://crbug.com/650547"}; + + // On Mac with OpenGL version 4.1, unused std140 or shared uniform blocks will be + // treated as inactive which is not consistent with WebGL2.0 spec. Reference all members in a + // unused std140 or shared uniform block at the beginning of main to work around it. + // Also used on Linux AMD. + Feature useUnusedBlocksWithStandardOrSharedLayout = { + "use_unused_blocks_with_standard_or_shared_layout", FeatureCategory::OpenGLWorkarounds, + "Unused std140 or shared uniform blocks will be treated as inactive", &members}; + + // This flag is used to fix spec difference between GLSL 4.1 or lower and ESSL3. + Feature removeInvariantAndCentroidForESSL3 = { + "remove_invarient_and_centroid_for_essl3", FeatureCategory::OpenGLWorkarounds, + "Fix spec difference between GLSL 4.1 or lower and ESSL3", &members}; + + // On Intel Mac OSX 10.11 driver, using "-float" will get wrong answer. Use "0.0 - float" to + // replace "-float". + // Tracking bug: http://crbug.com/308366 + Feature rewriteFloatUnaryMinusOperator = { + "rewrite_float_unary_minus_operator", FeatureCategory::OpenGLWorkarounds, + "Using '-' will get wrong answer", &members, "http://crbug.com/308366"}; + + // On NVIDIA drivers, atan(y, x) may return a wrong answer. + // Tracking bug: http://crbug.com/672380 + Feature emulateAtan2Float = {"emulate_atan_2_float", FeatureCategory::OpenGLWorkarounds, + "atan(y, x) may return a wrong answer", &members, + "http://crbug.com/672380"}; + + // Some drivers seem to forget about UBO bindings when using program binaries. Work around + // this by re-applying the bindings after the program binary is loaded or saved. + // This only seems to affect AMD OpenGL drivers, and some Android devices. + // http://anglebug.com/1637 + Feature reapplyUBOBindingsAfterUsingBinaryProgram = { + "reapply_ubo_bindings_after_using_binary_program", FeatureCategory::OpenGLWorkarounds, + "Some drivers forget about UBO bindings when using program binaries", &members, + "http://anglebug.com/1637"}; + + // Some Linux OpenGL drivers return 0 when we query MAX_VERTEX_ATTRIB_STRIDE in an OpenGL 4.4 or + // higher context. + // This only seems to affect AMD OpenGL drivers. + // Tracking bug: http://anglebug.com/1936 + Feature emulateMaxVertexAttribStride = { + "emulate_max_vertex_attrib_stride", FeatureCategory::OpenGLWorkarounds, + "Some drivers return 0 when MAX_VERTEX_ATTRIB_STRIED queried", &members, + "http://anglebug.com/1936"}; + + // Initializing uninitialized locals caused odd behavior on Android Qualcomm in a few WebGL 2 + // tests. Tracking bug: http://anglebug.com/2046 + Feature dontInitializeUninitializedLocals = { + "dont_initialize_uninitialized_locals", FeatureCategory::OpenGLWorkarounds, + "Initializing uninitialized locals caused odd behavior in a few WebGL 2 tests", &members, + "http://anglebug.com/2046"}; + + // On some NVIDIA drivers the point size range reported from the API is inconsistent with the + // actual behavior. Clamp the point size to the value from the API to fix this. + Feature clampPointSize = { + "clamp_point_size", FeatureCategory::OpenGLWorkarounds, + "The point size range reported from the API is inconsistent with the actual behavior", + &members}; + + // On some NVIDIA drivers certain types of GLSL arithmetic ops mixing vectors and scalars may be + // executed incorrectly. Change them in the shader translator. Tracking bug: + // http://crbug.com/772651 + Feature rewriteVectorScalarArithmetic = {"rewrite_vector_scalar_arithmetic", + FeatureCategory::OpenGLWorkarounds, + "Certain types of GLSL arithmetic ops mixing vectors " + "and scalars may be executed incorrectly", + &members, "http://crbug.com/772651"}; + + // On some Android devices for loops used to initialize variables hit native GLSL compiler bugs. + Feature dontUseLoopsToInitializeVariables = { + "dont_use_loops_to_initialize_variables", FeatureCategory::OpenGLWorkarounds, + "For loops used to initialize variables hit native GLSL compiler bugs", &members, + "http://crbug.com/809422"}; + + // On some NVIDIA drivers gl_FragDepth is not clamped correctly when rendering to a floating + // point depth buffer. Clamp it in the translated shader to fix this. + Feature clampFragDepth = { + "clamp_frag_depth", FeatureCategory::OpenGLWorkarounds, + "gl_FragDepth is not clamped correctly when rendering to a floating point depth buffer", + &members}; + + // On some NVIDIA drivers before version 397.31 repeated assignment to swizzled values inside a + // GLSL user-defined function have incorrect results. Rewrite this type of statements to fix + // this. + Feature rewriteRepeatedAssignToSwizzled = {"rewrite_repeated_assign_to_swizzled", + FeatureCategory::OpenGLWorkarounds, + "Repeated assignment to swizzled values inside a " + "GLSL user-defined function have incorrect results", + &members}; + + // On some AMD and Intel GL drivers ARB_blend_func_extended does not pass the tests. + // It might be possible to work around the Intel bug by rewriting *FragData to *FragColor + // instead of disabling the functionality entirely. The AMD bug looked like incorrect blending, + // not sure if a workaround is feasible. http://anglebug.com/1085 + Feature disableBlendFuncExtended = { + "disable_blend_func_extended", FeatureCategory::OpenGLWorkarounds, + "ARB_blend_func_extended does not pass the tests", &members, "http://anglebug.com/1085"}; + + // Qualcomm drivers returns raw sRGB values instead of linearized values when calling + // glReadPixels on unsized sRGB texture formats. http://crbug.com/550292 and + // http://crbug.com/565179 + Feature unsizedsRGBReadPixelsDoesntTransform = { + "unsized_srgb_read_pixels_doesnt_transform", FeatureCategory::OpenGLWorkarounds, + "Drivers returning raw sRGB values instead of linearized values when calling glReadPixels " + "on unsized sRGB texture formats", + &members, "http://crbug.com/565179"}; + + // Older Qualcomm drivers generate errors when querying the number of bits in timer queries, ex: + // GetQueryivEXT(GL_TIME_ELAPSED, GL_QUERY_COUNTER_BITS). http://anglebug.com/3027 + Feature queryCounterBitsGeneratesErrors = { + "query_counter_bits_generates_errors", FeatureCategory::OpenGLWorkarounds, + "Drivers generate errors when querying the number of bits in timer queries", &members, + "http://anglebug.com/3027"}; + + // Re-linking a program in parallel is buggy on some Intel Windows OpenGL drivers and Android + // platforms. + // http://anglebug.com/3045 + Feature dontRelinkProgramsInParallel = { + "dont_relink_programs_in_parallel", FeatureCategory::OpenGLWorkarounds, + "Relinking a program in parallel is buggy", &members, "http://anglebug.com/3045"}; + + // Some tests have been seen to fail using worker contexts, this switch allows worker contexts + // to be disabled for some platforms. http://crbug.com/849576 + Feature disableWorkerContexts = {"disable_worker_contexts", FeatureCategory::OpenGLWorkarounds, + "Some tests have been seen to fail using worker contexts", + &members, "http://crbug.com/849576"}; + + // Most Android devices fail to allocate a texture that is larger than 4096. Limit the caps + // instead of generating GL_OUT_OF_MEMORY errors. Also causes system to hang on some older + // intel mesa drivers on Linux. + Feature limitMaxTextureSizeTo4096 = {"max_texture_size_limit_4096", + FeatureCategory::OpenGLWorkarounds, + "Limit max texture size to 4096 to avoid frequent " + "out-of-memory errors", + &members, "http://crbug.com/927470"}; + + // Prevent excessive MSAA allocations on Android devices, various rendering bugs have been + // observed and they tend to be high DPI anyways. http://crbug.com/797243 + Feature limitMaxMSAASamplesTo4 = { + "max_msaa_sample_count_4", FeatureCategory::OpenGLWorkarounds, + "Various rendering bugs have been observed when using higher MSAA counts", &members, + "http://crbug.com/797243"}; + + // Prefer to do the robust resource init clear using a glClear. Calls to TexSubImage2D on large + // textures can take hundreds of milliseconds because of slow uploads on macOS. Do this only on + // macOS because clears are buggy on other drivers. + // https://crbug.com/848952 (slow uploads on macOS) + // https://crbug.com/883276 (buggy clears on Android) + Feature allowClearForRobustResourceInit = { + "allow_clear_for_robust_resource_init", FeatureCategory::OpenGLWorkarounds, + "Using glClear for robust resource initialization is buggy on some drivers and leads to " + "texture corruption. Default to data uploads except on MacOS where it is very slow.", + &members, "http://crbug.com/883276"}; + + // Some drivers automatically handle out-of-bounds uniform array access but others need manual + // clamping to satisfy the WebGL requirements. + Feature clampArrayAccess = {"clamp_array_access", FeatureCategory::OpenGLWorkarounds, + "Clamp uniform array access to avoid reading invalid memory.", + &members, "http://anglebug.com/2978"}; + + // Reset glTexImage2D base level to workaround pixel comparison failure above Mac OS 10.12.4 on + // Intel Mac. + Feature resetTexImage2DBaseLevel = {"reset_teximage2d_base_level", + FeatureCategory::OpenGLWorkarounds, + "Reset texture base level before calling glTexImage2D to " + "work around pixel comparison failure.", + &members, "https://crbug.com/705865"}; + + // glClearColor does not always work on Intel 6xxx Mac drivers when the clear color made up of + // all zeros and ones. + Feature clearToZeroOrOneBroken = { + "clear_to_zero_or_one_broken", FeatureCategory::OpenGLWorkarounds, + "Clears when the clear color is all zeros or ones do not work.", &members, + "https://crbug.com/710443"}; + + // Some older Linux Intel mesa drivers will hang the system when allocating large textures. Fix + // this by capping the max texture size. + Feature limitMax3dArrayTextureSizeTo1024 = { + "max_3d_array_texture_size_1024", FeatureCategory::OpenGLWorkarounds, + "Limit max 3d texture size and max array texture layers to 1024 to avoid system hang", + &members, "http://crbug.com/927470"}; + + // BlitFramebuffer has issues on some platforms with large source/dest texture sizes. This + // workaround adjusts the destination rectangle source and dest rectangle to fit within maximum + // twice the size of the framebuffer. + Feature adjustSrcDstRegionBlitFramebuffer = { + "adjust_src_dst_region_for_blitframebuffer", FeatureCategory::OpenGLWorkarounds, + "Many platforms have issues with blitFramebuffer when the parameters are large.", &members, + "http://crbug.com/830046"}; + + // BlitFramebuffer has issues on Mac when the source bounds aren't enclosed by the framebuffer. + // This workaround clips the source region and adjust the dest region proportionally. + Feature clipSrcRegionBlitFramebuffer = { + "clip_src_region_for_blitframebuffer", FeatureCategory::OpenGLWorkarounds, + "Issues with blitFramebuffer when the parameters don't match the framebuffer size.", + &members, "http://crbug.com/830046"}; + + // Calling glTexImage2D with zero size generates GL errors + Feature resettingTexturesGeneratesErrors = { + "reset_texture_generates_errors", FeatureCategory::OpenGLWorkarounds, + "Calling glTexImage2D with zero size generates errors.", &members, + "http://anglebug.com/3859"}; + + // Mac Intel samples transparent black from GL_COMPRESSED_RGB_S3TC_DXT1_EXT + Feature rgbDXT1TexturesSampleZeroAlpha = { + "rgb_dxt1_textures_sample_zero_alpha", FeatureCategory::OpenGLWorkarounds, + "Sampling BLACK texels from RGB DXT1 textures returns transparent black on Mac.", &members, + "http://anglebug.com/3729"}; + + // Mac incorrectly executes both sides of && and || expressions when they should short-circuit. + Feature unfoldShortCircuits = { + "unfold_short_circuits", FeatureCategory::OpenGLWorkarounds, + "Mac incorrectly executes both sides of && and || expressions when they should " + "short-circuit.", + &members, "http://anglebug.com/482"}; + + Feature emulatePrimitiveRestartFixedIndex = { + "emulate_primitive_restart_fixed_index", FeatureCategory::OpenGLWorkarounds, + "When GL_PRIMITIVE_RESTART_FIXED_INDEX is not available, emulate it with " + "GL_PRIMITIVE_RESTART and glPrimitiveRestartIndex.", + &members, "http://anglebug.com/3997"}; + + Feature setPrimitiveRestartFixedIndexForDrawArrays = { + "set_primitive_restart_fixed_index_for_draw_arrays", FeatureCategory::OpenGLWorkarounds, + "Some drivers discard vertex data in DrawArrays calls when the fixed primitive restart " + "index is within the number of primitives being drawn.", + &members, "http://anglebug.com/3997"}; + + // Dynamic indexing of swizzled l-values doesn't work correctly on various platforms. + Feature removeDynamicIndexingOfSwizzledVector = { + "remove_dynamic_indexing_of_swizzled_vector", FeatureCategory::OpenGLWorkarounds, + "Dynamic indexing of swizzled l-values doesn't work correctly on various platforms.", + &members, "http://crbug.com/709351"}; + + // Intel Mac drivers does not treat texelFetchOffset() correctly. + Feature preAddTexelFetchOffsets = { + "pre_add_texel_fetch_offsets", FeatureCategory::OpenGLWorkarounds, + "Intel Mac drivers mistakenly consider the parameter position of nagative vaule as invalid " + "even if the sum of position and offset is in range, so we need to add workarounds by " + "rewriting texelFetchOffset(sampler, position, lod, offset) into texelFetch(sampler, " + "position + offset, lod).", + &members, "http://crbug.com/642605"}; + + // All Mac drivers do not handle struct scopes correctly. This workaround overwrites a struct + // name with a unique prefix + Feature regenerateStructNames = { + "regenerate_struct_names", FeatureCategory::OpenGLWorkarounds, + "All Mac drivers do not handle struct scopes correctly. This workaround overwrites a struct" + "name with a unique prefix.", + &members, "http://crbug.com/403957"}; + + // Quite some OpenGL ES drivers don't implement readPixels for RGBA/UNSIGNED_SHORT from + // EXT_texture_norm16 correctly + Feature readPixelsUsingImplementationColorReadFormatForNorm16 = { + "read_pixels_using_implementation_color_read_format", FeatureCategory::OpenGLWorkarounds, + "Quite some OpenGL ES drivers don't implement readPixels for RGBA/UNSIGNED_SHORT from " + "EXT_texture_norm16 correctly", + &members, "http://anglebug.com/4214"}; + + // Bugs exist in some Intel drivers where dependencies are incorrectly + // tracked for textures which are copy destinations (via CopyTexImage2D, for + // example). Flush before DeleteTexture if these entry points have been + // called recently. + Feature flushBeforeDeleteTextureIfCopiedTo = { + "flush_before_delete_texture_if_copied_to", FeatureCategory::OpenGLWorkarounds, + "Some drivers track CopyTex{Sub}Image texture dependencies incorrectly. Flush" + " before glDeleteTextures in this case", + &members, "http://anglebug.com/4267"}; + + // Rewrite row-major matrices as column-major as a driver bug workaround if + // necessary. + Feature rewriteRowMajorMatrices = { + "rewrite_row_major_matrices", FeatureCategory::OpenGLWorkarounds, + "Rewrite row major matrices in shaders as column major as a driver bug workaround", + &members, "http://anglebug.com/2273"}; + + // Bugs exist in various OpenGL Intel drivers on Windows that produce incorrect + // values for GL_COMPRESSED_SRGB_S3TC_DXT1_EXT format. Replace it with + // GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT1_EXT as it's the closest option allowed by + // the WebGL extension spec. + Feature avoidDXT1sRGBTextureFormat = { + "avoid_dxt1_srgb_texture_format", FeatureCategory::OpenGLWorkarounds, + "Replaces DXT1 sRGB with DXT1 sRGB Alpha as a driver bug workaround.", &members}; + + // Bugs exist in OpenGL AMD drivers on Windows that produce incorrect pipeline state for + // colorMaski calls. + Feature disableDrawBuffersIndexed = {"disable_draw_buffers_indexed", + FeatureCategory::OpenGLWorkarounds, + "Disable OES_draw_buffers_indexed extension.", &members}; + + // GL_EXT_semaphore_fd doesn't work properly with Mesa 19.3.4 and earlier versions. + Feature disableSemaphoreFd = {"disable_semaphore_fd", FeatureCategory::OpenGLWorkarounds, + "Disable GL_EXT_semaphore_fd extension", &members, + "https://crbug.com/1046462"}; + + // GL_EXT_disjoint_timer_query doesn't work properly with Linux VMWare drivers. + Feature disableTimestampQueries = { + "disable_timestamp_queries", FeatureCategory::OpenGLWorkarounds, + "Disable GL_EXT_disjoint_timer_query extension", &members, "https://crbug.com/811661"}; + + // Some drivers use linear blending when generating mipmaps for sRGB textures. Work around this + // by generating mipmaps in a linear texture and copying back to sRGB. + Feature encodeAndDecodeSRGBForGenerateMipmap = { + "decode_encode_srgb_for_generatemipmap", FeatureCategory::OpenGLWorkarounds, + "Decode and encode before generateMipmap for srgb format textures.", &members, + "http://anglebug.com/4646"}; + + Feature emulateCopyTexImage2DFromRenderbuffers = { + "emulate_copyteximage2d_from_renderbuffers", FeatureCategory::OpenGLWorkarounds, + "CopyTexImage2D spuriously returns errors on iOS when copying from renderbuffers.", + &members, "https://anglebug.com/4674"}; + + Feature disableGPUSwitchingSupport = { + "disable_gpu_switching_support", FeatureCategory::OpenGLWorkarounds, + "Disable GPU switching support (use only the low-power GPU) on older MacBook Pros.", + &members, "https://crbug.com/1091824"}; + + // KHR_parallel_shader_compile fails TSAN on Linux, so we avoid using it with this workaround. + Feature disableNativeParallelCompile = { + "disable_native_parallel_compile", FeatureCategory::OpenGLWorkarounds, + "Do not use native KHR_parallel_shader_compile even when available.", &members, + "http://crbug.com/1094869"}; + + Feature emulatePackSkipRowsAndPackSkipPixels = { + "emulate_pack_skip_rows_and_pack_skip_pixels", FeatureCategory::OpenGLWorkarounds, + "GL_PACK_SKIP_ROWS and GL_PACK_SKIP_PIXELS are ignored in Apple's OpenGL driver.", &members, + "https://anglebug.com/4849"}; + + // Some drivers return bogus/1hz values for GetMscRate, which we may want to clamp + Feature clampMscRate = { + "clamp_msc_rate", FeatureCategory::OpenGLWorkarounds, + "Some drivers return bogus values for GetMscRate, so we clamp it to 30Hz", &members, + "https://crbug.com/1042393"}; + + // Mac drivers generate GL_INVALID_VALUE when binding a transform feedback buffer with + // glBindBufferRange before first binding it to some generic binding point. + Feature bindTransformFeedbackBufferBeforeBindBufferRange = { + "bind_transform_feedback_buffer_before_bind_buffer_range", + FeatureCategory::OpenGLWorkarounds, + "Bind transform feedback buffers to the generic binding point before calling " + "glBindBufferBase or glBindBufferRange.", + &members, "https://anglebug.com/5140"}; + + // Speculative fix for issues on Linux/Wayland where exposing GLX_OML_sync_control renders + // Chrome unusable + Feature disableSyncControlSupport = { + "disable_sync_control_support", FeatureCategory::OpenGLWorkarounds, + "Speculative fix for issues on Linux/Wayland where exposing GLX_OML_sync_control renders " + "Chrome unusable", + &members, "https://crbug.com/1137851"}; + + // Buffers need to maintain a shadow copy of data when buffer data readback is not possible + // through the GL API + Feature keepBufferShadowCopy = { + "keep_buffer_shadow_copy", FeatureCategory::OpenGLWorkarounds, + "Maintain a shadow copy of buffer data when the GL API does not permit reading data back.", + &members}; + + // glGenerateMipmap fails if the zero texture level is not set on some Mac drivers + Feature setZeroLevelBeforeGenerateMipmap = { + "set_zero_level_before_generating_mipmap", FeatureCategory::OpenGLWorkarounds, + "glGenerateMipmap fails if the zero texture level is not set on some Mac drivers.", + &members}; + + // On macOS with AMD GPUs, packed color formats like RGB565 and RGBA4444 are buggy. Promote them + // to 8 bit per channel formats. + Feature promotePackedFormatsTo8BitPerChannel = { + "promote_packed_formats_to_8_bit_per_channel", FeatureCategory::OpenGLWorkarounds, + "Packed color formats are buggy on Macs with AMD GPUs", &members, + "http://anglebug.com/5469"}; + + // If gl_FragColor is not written by fragment shader, it may cause context lost with Adreno 42x + // and 3xx. + Feature initFragmentOutputVariables = { + "init_fragment_output_variables", FeatureCategory::OpenGLWorkarounds, + "No init gl_FragColor causes context lost", &members, "http://crbug.com/1171371"}; + + // On macOS with Intel GPUs, instanced array with divisor > 0 is buggy when first > 0 in + // drawArraysInstanced. Shift the attributes with extra offset to workaround. + Feature shiftInstancedArrayDataWithExtraOffset = { + "shift_instanced_array_data_with_offset", FeatureCategory::OpenGLWorkarounds, + "glDrawArraysInstanced is buggy on certain new Mac Intel GPUs", &members, + "http://crbug.com/1144207"}; +}; + +inline FeaturesGL::FeaturesGL() = default; +inline FeaturesGL::~FeaturesGL() = default; + +} // namespace angle + +#endif // ANGLE_PLATFORM_FEATURESGL_H_ diff --git a/vendor/angle/include/platform/FeaturesMtl.h b/vendor/angle/include/platform/FeaturesMtl.h new file mode 100644 index 00000000..fe5e8abd --- /dev/null +++ b/vendor/angle/include/platform/FeaturesMtl.h @@ -0,0 +1,90 @@ +// +// Copyright 2019 The ANGLE Project Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// +// FeaturesMtl.h: Optional features for the Metal renderer. +// + +#ifndef ANGLE_PLATFORM_FEATURESMTL_H_ +#define ANGLE_PLATFORM_FEATURESMTL_H_ + +#include "platform/Feature.h" + +namespace angle +{ + +struct FeaturesMtl : FeatureSetBase +{ + // BaseVertex/Instanced draw support: + Feature hasBaseVertexInstancedDraw = { + "has_base_vertex_instanced_draw", FeatureCategory::MetalFeatures, + "The renderer supports base vertex instanced draw", &members}; + + // Support depth texture filtering + Feature hasDepthTextureFiltering = { + "has_depth_texture_filtering", FeatureCategory::MetalFeatures, + "The renderer supports depth texture's filtering other than nearest", &members}; + + // Support explicit memory barrier + Feature hasExplicitMemBarrier = {"has_explicit_mem_barrier_mtl", FeatureCategory::MetalFeatures, + "The renderer supports explicit memory barrier", &members}; + + // Some renderer can break render pass cheaply, i.e. desktop class GPUs. + Feature hasCheapRenderPass = {"has_cheap_render_pass_mtl", FeatureCategory::MetalFeatures, + "The renderer can cheaply break a render pass.", &members}; + + // Non-uniform compute shader dispatch support, i.e. Group size is not necessarily to be fixed: + Feature hasNonUniformDispatch = { + "has_non_uniform_dispatch", FeatureCategory::MetalFeatures, + "The renderer supports non uniform compute shader dispatch's group size", &members}; + + // fragment stencil output support + Feature hasStencilOutput = {"has_shader_stencil_output", FeatureCategory::MetalFeatures, + "The renderer supports stencil output from fragment shader", + &members}; + + // Texture swizzle support: + Feature hasTextureSwizzle = {"has_texture_swizzle", FeatureCategory::MetalFeatures, + "The renderer supports texture swizzle", &members}; + + Feature hasDepthAutoResolve = { + "has_msaa_depth_auto_resolve", FeatureCategory::MetalFeatures, + "The renderer supports MSAA depth auto resolve at the end of render pass", &members}; + + Feature hasStencilAutoResolve = { + "has_msaa_stencil_auto_resolve", FeatureCategory::MetalFeatures, + "The renderer supports MSAA stencil auto resolve at the end of render pass", &members}; + + Feature hasEvents = {"has_mtl_events", FeatureCategory::MetalFeatures, + "The renderer supports MTL(Shared)Event", &members}; + + // On macos, separate depth & stencil buffers are not supproted. However, on iOS devices, + // they are supproted: + Feature allowSeparatedDepthStencilBuffers = { + "allow_separate_depth_stencil_buffers", FeatureCategory::MetalFeatures, + "Some Apple platforms such as iOS allows separate depth & stencil buffers, " + "whereas others such as macOS don't", + &members}; + + Feature allowMultisampleStoreAndResolve = { + "allow_msaa_store_and_resolve", FeatureCategory::MetalFeatures, + "The renderer supports MSAA store and resolve in the same pass", &members}; + + Feature allowGenMultipleMipsPerPass = { + "gen_multiple_mips_per_pass", FeatureCategory::MetalFeatures, + "The renderer supports generating multiple mipmaps per pass", &members}; + + Feature forceBufferGPUStorage = { + "force_buffer_gpu_storage_mtl", FeatureCategory::MetalFeatures, + "On systems that support both buffer's memory allocation on GPU and shared memory (such as " + "macOS), force using GPU memory allocation for buffers.", + &members}; + + Feature forceD24S8AsUnsupported = {"force_d24s8_as_unsupported", FeatureCategory::MetalFeatures, + "Force Depth24Stencil8 format as unsupported.", &members}; +}; + +} // namespace angle + +#endif // ANGLE_PLATFORM_FEATURESMTL_H_ diff --git a/vendor/angle/include/platform/FeaturesVk.h b/vendor/angle/include/platform/FeaturesVk.h new file mode 100644 index 00000000..e58a24f5 --- /dev/null +++ b/vendor/angle/include/platform/FeaturesVk.h @@ -0,0 +1,481 @@ +// +// Copyright 2018 The ANGLE Project Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// +// FeaturesVk.h: Optional features for the Vulkan renderer. +// + +#ifndef ANGLE_PLATFORM_FEATURESVK_H_ +#define ANGLE_PLATFORM_FEATURESVK_H_ + +#include "platform/Feature.h" + +#include + +namespace angle +{ + +struct FeaturesVk : FeatureSetBase +{ + FeaturesVk(); + ~FeaturesVk(); + + // Line segment rasterization must follow OpenGL rules. This means using an algorithm similar + // to Bresenham's. Vulkan uses a different algorithm. This feature enables the use of pixel + // shader patching to implement OpenGL basic line rasterization rules. This feature will + // normally always be enabled. Exposing it as an option enables performance testing. + Feature basicGLLineRasterization = { + "basicGLLineRasterization", FeatureCategory::VulkanFeatures, + "Enable the use of pixel shader patching to implement OpenGL basic line " + "rasterization rules", + &members}; + + // If the VK_EXT_line_rasterization extension is available we'll use it to get + // Bresenham line rasterization. + Feature bresenhamLineRasterization = { + "bresenhamLineRasterization", FeatureCategory::VulkanFeatures, + "Enable Bresenham line rasterization via VK_EXT_line_rasterization extension", &members}; + + // If the VK_EXT_provoking_vertex extension is available, we'll use it to set + // the provoking vertex mode + Feature provokingVertex = {"provokingVertex", FeatureCategory::VulkanFeatures, + "Enable provoking vertex mode via VK_EXT_provoking_vertex extension", + &members}; + + // Add an extra copy region when using vkCmdCopyBuffer as the Windows Intel driver seems + // to have a bug where the last region is ignored. + Feature extraCopyBufferRegion = { + "extraCopyBufferRegion", FeatureCategory::VulkanWorkarounds, + "Some drivers seem to have a bug where the last copy region in vkCmdCopyBuffer is ignored", + &members}; + + // This flag is added for the sole purpose of end2end tests, to test the correctness + // of various algorithms when a fallback format is used, such as using a packed format to + // emulate a depth- or stencil-only format. + Feature forceFallbackFormat = {"forceFallbackFormat", FeatureCategory::VulkanWorkarounds, + "Force a fallback format for angle_end2end_tests", &members}; + + // On some NVIDIA drivers the point size range reported from the API is inconsistent with the + // actual behavior. Clamp the point size to the value from the API to fix this. + // Tracked in http://anglebug.com/2970. + Feature clampPointSize = { + "clampPointSize", FeatureCategory::VulkanWorkarounds, + "The point size range reported from the API is inconsistent with the actual behavior", + &members, "http://anglebug.com/2970"}; + + // On some NVIDIA drivers the depth value is not clamped to [0,1] for floating point depth + // buffers. This is NVIDIA bug 3171019, see http://anglebug.com/3970 for details. + Feature depthClamping = { + "depth_clamping", FeatureCategory::VulkanWorkarounds, + "The depth value is not clamped to [0,1] for floating point depth buffers.", &members, + "http://anglebug.com/3970"}; + + // On some android devices, the memory barrier between the compute shader that converts vertex + // attributes and the vertex shader that reads from it is ineffective. Only known workaround is + // to perform a flush after the conversion. http://anglebug.com/3016 + Feature flushAfterVertexConversion = { + "flushAfterVertexConversion", FeatureCategory::VulkanWorkarounds, + "The memory barrier between the compute shader that converts vertex attributes and the " + "vertex shader that reads from it is ineffective", + &members, "http://anglebug.com/3016"}; + + Feature supportsRenderpass2 = {"supportsRenderpass2", FeatureCategory::VulkanFeatures, + "VkDevice supports the VK_KHR_create_renderpass2 extension", + &members}; + + // Whether the VkDevice supports the VK_KHR_incremental_present extension, on which the + // EGL_KHR_swap_buffers_with_damage extension can be layered. + Feature supportsIncrementalPresent = { + "supportsIncrementalPresent", FeatureCategory::VulkanFeatures, + "VkDevice supports the VK_KHR_incremental_present extension", &members}; + + // Whether texture copies on cube map targets should be done on GPU. This is a workaround for + // Intel drivers on windows that have an issue with creating single-layer views on cube map + // textures. + Feature forceCPUPathForCubeMapCopy = { + "forceCPUPathForCubeMapCopy", FeatureCategory::VulkanWorkarounds, + "Some drivers have an issue with creating single-layer views on cube map textures", + &members}; + + // Whether the VkDevice supports the VK_ANDROID_external_memory_android_hardware_buffer + // extension, on which the EGL_ANDROID_image_native_buffer extension can be layered. + Feature supportsAndroidHardwareBuffer = { + "supportsAndroidHardwareBuffer", FeatureCategory::VulkanFeatures, + "VkDevice supports the VK_ANDROID_external_memory_android_hardware_buffer extension", + &members}; + + // Whether the VkDevice supports the VK_GGP_frame_token extension, on which + // the EGL_ANGLE_swap_with_frame_token extension can be layered. + Feature supportsGGPFrameToken = {"supportsGGPFrameToken", FeatureCategory::VulkanFeatures, + "VkDevice supports the VK_GGP_frame_token extension", + &members}; + + // Whether the VkDevice supports the VK_KHR_external_memory_fd extension, on which the + // GL_EXT_memory_object_fd extension can be layered. + Feature supportsExternalMemoryFd = {"supportsExternalMemoryFd", FeatureCategory::VulkanFeatures, + "VkDevice supports the VK_KHR_external_memory_fd extension", + &members}; + + // Whether the VkDevice supports the VK_FUCHSIA_external_memory + // extension, on which the GL_ANGLE_memory_object_fuchsia extension can be layered. + Feature supportsExternalMemoryFuchsia = { + "supportsExternalMemoryFuchsia", FeatureCategory::VulkanFeatures, + "VkDevice supports the VK_FUCHSIA_external_memory extension", &members}; + + Feature supportsFilteringPrecision = { + "supportsFilteringPrecision", FeatureCategory::VulkanFeatures, + "VkDevice supports the VK_GOOGLE_sampler_filtering_precision extension", &members}; + + // Whether the VkDevice supports the VK_KHR_external_fence_capabilities extension. + Feature supportsExternalFenceCapabilities = { + "supportsExternalFenceCapabilities", FeatureCategory::VulkanFeatures, + "VkDevice supports the VK_KHR_external_fence_capabilities extension", &members}; + + // Whether the VkDevice supports the VK_KHR_external_semaphore_capabilities extension. + Feature supportsExternalSemaphoreCapabilities = { + "supportsExternalSemaphoreCapabilities", FeatureCategory::VulkanFeatures, + "VkDevice supports the VK_KHR_external_semaphore_capabilities extension", &members}; + + // Whether the VkDevice supports the VK_KHR_external_semaphore_fd extension, on which the + // GL_EXT_semaphore_fd extension can be layered. + Feature supportsExternalSemaphoreFd = { + "supportsExternalSemaphoreFd", FeatureCategory::VulkanFeatures, + "VkDevice supports the VK_KHR_external_semaphore_fd extension", &members}; + + // Whether the VkDevice supports the VK_FUCHSIA_external_semaphore + // extension, on which the GL_ANGLE_semaphore_fuchsia extension can be layered. + angle::Feature supportsExternalSemaphoreFuchsia = { + "supportsExternalSemaphoreFuchsia", FeatureCategory::VulkanFeatures, + "VkDevice supports the VK_FUCHSIA_external_semaphore extension", &members}; + + // Whether the VkDevice supports the VK_KHR_external_fence_fd extension, on which the + // EGL_ANDROID_native_fence extension can be layered. + Feature supportsExternalFenceFd = {"supportsExternalFenceFd", FeatureCategory::VulkanFeatures, + "VkDevice supports the VK_KHR_external_fence_fd extension", + &members, "http://anglebug.com/2517"}; + + // Whether the VkDevice can support EGL_ANDROID_native_fence_sync extension. + Feature supportsAndroidNativeFenceSync = { + "supportsAndroidNativeFenceSync", FeatureCategory::VulkanFeatures, + "VkDevice supports the EGL_ANDROID_native_fence_sync extension", &members, + "http://anglebug.com/2517"}; + + // Whether the VkDevice can support imageCubeArray feature properly. + Feature supportsImageCubeArray = {"supportsImageCubeArray", FeatureCategory::VulkanFeatures, + "VkDevice supports the imageCubeArray feature properly", + &members, "http://anglebug.com/3584"}; + + // Whether the VkDevice supports the VK_EXT_shader_stencil_export extension, which is used to + // perform multisampled resolve of stencil buffer. A multi-step workaround is used instead if + // this extension is not available. + Feature supportsShaderStencilExport = { + "supportsShaderStencilExport", FeatureCategory::VulkanFeatures, + "VkDevice supports the VK_EXT_shader_stencil_export extension", &members}; + + // Whether the VkDevice supports the VK_KHR_sampler_ycbcr_conversion extension, which is needed + // to support Ycbcr conversion with external images. + Feature supportsYUVSamplerConversion = { + "supportsYUVSamplerConversion", FeatureCategory::VulkanFeatures, + "VkDevice supports the VK_KHR_sampler_ycbcr_conversion extension", &members}; + + // Where VK_EXT_transform_feedback is not support, an emulation path is used. + // http://anglebug.com/3205 + Feature emulateTransformFeedback = { + "emulateTransformFeedback", FeatureCategory::VulkanFeatures, + "Emulate transform feedback as the VK_EXT_transform_feedback is not present.", &members, + "http://anglebug.com/3205"}; + + // Where VK_EXT_transform_feedback is supported, it's preferred over an emulation path. + // http://anglebug.com/3206 + Feature supportsTransformFeedbackExtension = { + "supportsTransformFeedbackExtension", FeatureCategory::VulkanFeatures, + "Transform feedback uses the VK_EXT_transform_feedback extension.", &members, + "http://anglebug.com/3206"}; + + // Whether the VkDevice supports the VK_EXT_index_type_uint8 extension + // http://anglebug.com/4405 + Feature supportsIndexTypeUint8 = {"supportsIndexTypeUint8", FeatureCategory::VulkanFeatures, + "VkDevice supports the VK_EXT_index_type_uint8 extension", + &members, "http://anglebug.com/4405"}; + + // Whether the VkDevice supports the VK_KHR_depth_stencil_resolve extension with the + // independentResolveNone feature. + // http://anglebug.com/4836 + Feature supportsDepthStencilResolve = {"supportsDepthStencilResolve", + FeatureCategory::VulkanFeatures, + "VkDevice supports the VK_KHR_depth_stencil_resolve " + "extension with the independentResolveNone feature", + &members, "http://anglebug.com/4836"}; + + // VK_PRESENT_MODE_FIFO_KHR causes random timeouts on Linux Intel. http://anglebug.com/3153 + Feature disableFifoPresentMode = {"disableFifoPresentMode", FeatureCategory::VulkanWorkarounds, + "VK_PRESENT_MODE_FIFO_KHR causes random timeouts", &members, + "http://anglebug.com/3153"}; + + // On Qualcomm, gaps in bound descriptor set indices causes the post-gap sets to misbehave. + // For example, binding only descriptor set 3 results in zero being read from a uniform buffer + // object within that set. This flag results in empty descriptor sets being bound for any + // unused descriptor set to work around this issue. http://anglebug.com/2727 + Feature bindEmptyForUnusedDescriptorSets = { + "bindEmptyForUnusedDescriptorSets", FeatureCategory::VulkanWorkarounds, + "Gaps in bound descriptor set indices causes the post-gap sets to misbehave", &members, + "http://anglebug.com/2727"}; + + // OES_depth_texture is a commonly expected feature on Android. However it + // requires that D16_UNORM support texture filtering + // (e.g. VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT) and some devices + // do not. Work-around this by setting saying D16_UNORM supports filtering + // anyway. + Feature forceD16TexFilter = { + "forceD16TexFilter", FeatureCategory::VulkanWorkarounds, + "VK_FORMAT_D16_UNORM does not support VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT, " + "which prevents OES_depth_texture from being supported.", + &members, "http://anglebug.com/3452"}; + + // On some android devices, vkCmdBlitImage with flipped coordinates blits incorrectly. This + // workaround makes sure this path is avoided. http://anglebug.com/3498 + Feature disableFlippingBlitWithCommand = { + "disableFlippingBlitWithCommand", FeatureCategory::VulkanWorkarounds, + "vkCmdBlitImage with flipped coordinates blits incorrectly.", &members, + "http://anglebug.com/3498"}; + + // On platform with Intel or AMD GPU, a window resizing would not trigger the vulkan driver to + // return VK_ERROR_OUT_OF_DATE on swapchain present. Work-around by query current window extent + // every frame to detect a window resizing. + // http://anglebug.com/3623, http://anglebug.com/3624, http://anglebug.com/3625 + Feature perFrameWindowSizeQuery = { + "perFrameWindowSizeQuery", FeatureCategory::VulkanWorkarounds, + "Vulkan swapchain is not returning VK_ERROR_OUT_OF_DATE when window resizing", &members, + "http://anglebug.com/3623, http://anglebug.com/3624, http://anglebug.com/3625"}; + + // Seamful cube map emulation misbehaves on the AMD windows driver, so it's disallowed. + Feature disallowSeamfulCubeMapEmulation = { + "disallowSeamfulCubeMapEmulation", FeatureCategory::VulkanWorkarounds, + "Seamful cube map emulation misbehaves on some drivers, so it's disallowed", &members, + "http://anglebug.com/3243"}; + + // Vulkan considers vertex attribute accesses to count up to the last multiple of the stride. + // This additional access supports AMD's robust buffer access implementation. + // AMDVLK in particular will return incorrect values when the vertex access extends into the + // range that would be the stride padding and the buffer is too small. + // This workaround limits GL_MAX_VERTEX_ATTRIB_STRIDE to a reasonable value and pads out + // every buffer allocation size to be large enough to support a maximum vertex stride. + // http://anglebug.com/4428 + Feature padBuffersToMaxVertexAttribStride = { + "padBuffersToMaxVertexAttribStride", FeatureCategory::VulkanWorkarounds, + "Vulkan considers vertex attribute accesses to count up to the last multiple of the " + "stride. This additional access supports AMD's robust buffer access implementation. " + "AMDVLK in particular will return incorrect values when the vertex access extends into " + "the range that would be the stride padding and the buffer is too small. " + "This workaround limits GL_MAX_VERTEX_ATTRIB_STRIDE to a maximum value and " + "pads up every buffer allocation size to be a multiple of the maximum stride.", + &members, "http://anglebug.com/4428"}; + + // Whether the VkDevice supports the VK_EXT_swapchain_colorspace extension + // http://anglebug.com/2514 + Feature supportsSwapchainColorspace = { + "supportsSwapchainColorspace", FeatureCategory::VulkanFeatures, + "VkDevice supports the VK_EXT_swapchain_colorspace extension", &members, + "http://anglebug.com/2514"}; + + // Whether the VkDevice supports the VK_EXT_external_memory_host extension, on which the + // ANGLE_iosurface_client_buffer extension can be layered. + Feature supportsExternalMemoryHost = { + "supportsExternalMemoryHost", FeatureCategory::VulkanFeatures, + "VkDevice supports the VK_EXT_external_memory_host extension", &members}; + + // Whether to fill new buffers and textures with nonzero data to sanitize robust resource + // initialization and flush out assumptions about zero init. + Feature allocateNonZeroMemory = { + "allocateNonZeroMemory", FeatureCategory::VulkanFeatures, + "Fill new allocations with non-zero values to flush out errors.", &members, + "http://anglebug.com/4384"}; + + // Whether to log each callback from the VK_EXT_device_memory_report extension. This feature is + // used for trying to debug GPU memory leaks. + Feature logMemoryReportCallbacks = {"logMemoryReportCallbacks", FeatureCategory::VulkanFeatures, + "Log each callback from VK_EXT_device_memory_report", + &members}; + + // Whether to log statistics from the VK_EXT_device_memory_report extension each eglSwapBuffer. + Feature logMemoryReportStats = {"logMemoryReportStats", FeatureCategory::VulkanFeatures, + "Log stats from VK_EXT_device_memory_report each swap", + &members}; + + // Allocate a "shadow" buffer for GL buffer objects. For GPU-read only buffers + // glMap* latency can be reduced by maintaining a copy of the buffer which is + // writeable only by the CPU. We then return this shadow buffer on glMap* calls. + Feature shadowBuffers = { + "shadowBuffers", FeatureCategory::VulkanFeatures, + "Allocate a shadow buffer for GL buffer objects to reduce glMap* latency.", &members, + "http://anglebug.com/4339"}; + + // Persistently map buffer memory until destroy, saves on map/unmap IOCTL overhead + // for buffers that are updated frequently. + Feature persistentlyMappedBuffers = { + "persistentlyMappedBuffers", FeatureCategory::VulkanFeatures, + "Persistently map buffer memory to reduce map/unmap IOCTL overhead.", &members, + "http://anglebug.com/2162"}; + + // Android needs to pre-rotate surfaces that are not oriented per the native device's + // orientation (e.g. a landscape application on a Pixel phone). This feature works for + // full-screen applications. http://anglebug.com/3502 + Feature enablePreRotateSurfaces = {"enablePreRotateSurfaces", FeatureCategory::VulkanFeatures, + "Enable Android pre-rotation for landscape applications", + &members, "http://anglebug.com/3502"}; + + // Enable precision qualifiers for shaders generated by Vulkan backend http://anglebug.com/3078 + Feature enablePrecisionQualifiers = { + "enablePrecisionQualifiers", FeatureCategory::VulkanFeatures, + "Enable precision qualifiers in shaders", &members, "http://anglebug.com/3078"}; + + // Desktop (at least NVIDIA) drivers prefer combining barriers into one vkCmdPipelineBarrier + // call over issuing multiple barrier calls with fine grained dependency information to have + // better performance. http://anglebug.com/4633 + Feature preferAggregateBarrierCalls = { + "preferAggregateBarrierCalls", FeatureCategory::VulkanWorkarounds, + "Single barrier call is preferred over multiple calls with " + "fine grained pipeline stage dependency information", + &members, "http://anglebug.com/4633"}; + + // Tell the Vulkan back-end to use the async command queue to dispatch work to the GPU. Command + // buffer work will happened in a worker thread. Otherwise use Renderer::CommandQueue directly. + Feature asyncCommandQueue = {"asyncCommandQueue", FeatureCategory::VulkanFeatures, + "Use CommandQueue worker thread to dispatch work to GPU.", + &members, "http://anglebug.com/4324"}; + + // Whether the VkDevice supports the VK_KHR_shader_float16_int8 extension and has the + // shaderFloat16 feature. + Feature supportsShaderFloat16 = {"supportsShaderFloat16", FeatureCategory::VulkanFeatures, + "VkDevice supports the VK_KHR_shader_float16_int8 extension " + "and has the shaderFloat16 feature", + &members, "http://anglebug.com/4551"}; + + // Some devices don't meet the limits required to perform mipmap generation using the built-in + // compute shader. On some other devices, VK_IMAGE_USAGE_STORAGE_BIT is detrimental to + // performance, making this solution impractical. + Feature allowGenerateMipmapWithCompute = { + "allowGenerateMipmapWithCompute", FeatureCategory::VulkanFeatures, + "Use the compute path to generate mipmaps on devices that meet the minimum requirements, " + "and the performance is better.", + &members, "http://anglebug.com/4551"}; + + // Whether the VkDevice supports the VK_QCOM_render_pass_store_ops extension + // http://anglebug.com/5505 + Feature supportsRenderPassStoreOpNoneQCOM = { + "supportsRenderPassStoreOpNoneQCOM", FeatureCategory::VulkanFeatures, + "VkDevice supports VK_QCOM_render_pass_store_ops extension.", &members, + "http://anglebug.com/5055"}; + + // Force maxUniformBufferSize to 16K on Qualcomm's Adreno 540. Pixel2's Adreno540 reports + // maxUniformBufferSize 64k but various tests failed with that size. For that specific + // device, we set to 16k for now which is known to pass all tests. + // https://issuetracker.google.com/161903006 + Feature forceMaxUniformBufferSize16KB = { + "forceMaxUniformBufferSize16KB", FeatureCategory::VulkanWorkarounds, + "Force max uniform buffer size to 16K on some device due to bug", &members, + "https://issuetracker.google.com/161903006"}; + + // Enable mutable bit by default for ICD's that support VK_KHR_image_format_list. + // http://anglebug.com/5281 + Feature supportsImageFormatList = { + "supportsImageFormatList", FeatureCategory::VulkanFeatures, + "Enable VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT by default for ICDs " + "that support VK_KHR_image_format_list", + &members, "http://anglebug.com/5281"}; + + // Swiftshader on mac fails to initialize WebGL context when EXT_multisampled_render_to_texture + // is used by Chromium. + // http://anglebug.com/4937 + Feature enableMultisampledRenderToTexture = { + "enableMultisampledRenderToTexture", FeatureCategory::VulkanWorkarounds, + "Expose EXT_multisampled_render_to_texture", &members, "http://anglebug.com/4937"}; + + // Qualcomm fails some tests when reducing the preferred block size to 4M. + // http://anglebug.com/4995 + Feature preferredLargeHeapBlockSize4MB = { + "preferredLargeHeapBlockSize4MB", FeatureCategory::VulkanWorkarounds, + "Use 4 MB preferred large heap block size with AMD allocator", &members, + "http://anglebug.com/4995"}; + + // Manhattan is calling glFlush in the middle of renderpass which breaks renderpass and hurts + // performance on tile based GPU. When this is enabled, we will defer the glFlush call made in + // the middle of renderpass to the end of renderpass. + // https://issuetracker.google.com/issues/166475273 + Feature deferFlushUntilEndRenderPass = { + "deferFlushUntilEndRenderPass", FeatureCategory::VulkanWorkarounds, + "Allow glFlush to be deferred until renderpass ends", &members, + "https://issuetracker.google.com/issues/166475273"}; + + // Android mistakenly destroys oldSwapchain passed to vkCreateSwapchainKHR, causing crashes on + // certain drivers. http://anglebug.com/5061 + Feature waitIdleBeforeSwapchainRecreation = { + "waitIdleBeforeSwapchainRecreation", FeatureCategory::VulkanWorkarounds, + "Before passing an oldSwapchain to VkSwapchainCreateInfoKHR, wait for queue to be idle. " + "Works around a bug on platforms which destroy oldSwapchain in vkCreateSwapchainKHR.", + &members, "http://anglebug.com/5061"}; + + // Translate non-nearest mip filtering modes to nearest mip for all samplers for performance + // comparisons. ANGLE is non-conformant if this feature is enabled. + Feature forceNearestMipFiltering = {"forceNearestMipFiltering", + FeatureCategory::VulkanWorkarounds, + "Force nearest mip filtering when sampling.", &members}; + + // Qualcomm missynchronizes vkCmdClearAttachments in the middle of render pass. + // https://issuetracker.google.com/166809097 + Feature preferDrawClearOverVkCmdClearAttachments = { + "preferDrawClearOverVkCmdClearAttachments", FeatureCategory::VulkanWorkarounds, + "On some hardware, clear using a draw call instead of vkCmdClearAttachments in the middle " + "of render pass due to bugs", + &members, "https://issuetracker.google.com/166809097"}; + + // Whether prerotation is being emulated for testing. 90 degree rotation. + Feature emulatedPrerotation90 = {"emulatedPrerotation90", FeatureCategory::VulkanFeatures, + "Emulate 90-degree prerotation.", &members, + "http://anglebug.com/4901"}; + + // Whether prerotation is being emulated for testing. 180 degree rotation. + Feature emulatedPrerotation180 = {"emulatedPrerotation180", FeatureCategory::VulkanFeatures, + "Emulate 180-degree prerotation.", &members, + "http://anglebug.com/4901"}; + + // Whether prerotation is being emulated for testing. 270 degree rotation. + Feature emulatedPrerotation270 = {"emulatedPrerotation270", FeatureCategory::VulkanFeatures, + "Emulate 270-degree prerotation.", &members, + "http://anglebug.com/4901"}; + + // Whether we should use driver uniforms over specialization constants for some shader + // modifications like yflip and rotation. + Feature forceDriverUniformOverSpecConst = { + "forceDriverUniformOverSpecConst", FeatureCategory::VulkanWorkarounds, + "Forces using driver uniforms instead of specialization constants.", &members, + "http://issuetracker.google.com/173636783"}; + + // Whether non-conformant configurations and extensions should be exposed. When an extension is + // in development, or a GLES version is not supported on a device, we may still want to expose + // them for partial testing. This feature is enabled by our test harness. + Feature exposeNonConformantExtensionsAndVersions = { + "exposeNonConformantExtensionsAndVersions", FeatureCategory::VulkanWorkarounds, + "Expose GLES versions and extensions that are not conformant.", &members, + "http://anglebug.com/5375"}; + + // imageAtomicExchange is expected to work for r32f formats, but support for atomic operations + // for VK_FORMAT_R32_SFLOAT is rare. This support is emulated by using an r32ui format for such + // images instead. + Feature emulateR32fImageAtomicExchange = { + "emulateR32fImageAtomicExchange", FeatureCategory::VulkanWorkarounds, + "Emulate r32f images with r32ui to support imageAtomicExchange.", &members, + "http://anglebug.com/5535"}; + + Feature supportsNegativeViewport = { + "supportsNegativeViewport", FeatureCategory::VulkanFeatures, + "The driver supports inverting the viewport with a negative height.", &members}; +}; + +inline FeaturesVk::FeaturesVk() = default; +inline FeaturesVk::~FeaturesVk() = default; + +} // namespace angle + +#endif // ANGLE_PLATFORM_FEATURESVK_H_ diff --git a/vendor/angle/include/platform/FrontendFeatures.h b/vendor/angle/include/platform/FrontendFeatures.h new file mode 100644 index 00000000..edaa6df5 --- /dev/null +++ b/vendor/angle/include/platform/FrontendFeatures.h @@ -0,0 +1,75 @@ +// +// Copyright 2016 The ANGLE Project Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// + +// FrontendFeatures.h: Features/workarounds for driver bugs and other behaviors seen +// on all platforms. + +#ifndef ANGLE_PLATFORM_FRONTENDFEATURES_H_ +#define ANGLE_PLATFORM_FRONTENDFEATURES_H_ + +#include "platform/Feature.h" + +namespace angle +{ + +struct FrontendFeatures : angle::FeatureSetBase +{ + FrontendFeatures(); + ~FrontendFeatures(); + + // Force the context to be lost (via KHR_robustness) if a GL_OUT_OF_MEMORY error occurs. The + // driver may be in an inconsistent state if this happens, and some users of ANGLE rely on this + // notification to prevent further execution. + angle::Feature loseContextOnOutOfMemory = { + "lose_context_on_out_of_memory", angle::FeatureCategory::FrontendWorkarounds, + "Some users rely on a lost context notification if a GL_OUT_OF_MEMORY " + "error occurs", + &members}; + + // Program binaries don't contain transform feedback varyings on Qualcomm GPUs. + // Work around this by disabling the program cache for programs with transform feedback. + angle::Feature disableProgramCachingForTransformFeedback = { + "disable_program_caching_for_transform_feedback", + angle::FeatureCategory::FrontendWorkarounds, + "On some GPUs, program binaries don't contain transform feedback varyings", &members}; + + // On Windows Intel OpenGL drivers TexImage sometimes seems to interact with the Framebuffer. + // Flaky crashes can occur unless we sync the Framebuffer bindings. The workaround is to add + // Framebuffer binding dirty bits to TexImage updates. See http://anglebug.com/2906 + angle::Feature syncFramebufferBindingsOnTexImage = { + "sync_framebuffer_bindings_on_tex_image", angle::FeatureCategory::FrontendWorkarounds, + "On some drivers TexImage sometimes seems to interact " + "with the Framebuffer", + &members}; + + angle::Feature scalarizeVecAndMatConstructorArgs = { + "scalarize_vec_and_mat_constructor_args", angle::FeatureCategory::FrontendWorkarounds, + "Always rewrite vec/mat constructors to be consistent", &members, + "http://crbug.com/1165751"}; + + // Disable support for GL_OES_get_program_binary + angle::Feature disableProgramBinary = { + "disable_program_binary", angle::FeatureCategory::FrontendFeatures, + "Disable support for GL_OES_get_program_binary", &members, "http://anglebug.com/5007"}; + + // Allow disabling of GL_EXT_texture_filter_anisotropic through a runtime feature for + // performance comparisons. + angle::Feature disableAnisotropicFiltering = { + "disable_anisotropic_filtering", angle::FeatureCategory::FrontendWorkarounds, + "Disable support for anisotropic filtering", &members}; + + // We can use this feature to override compressed format support for portability. + angle::Feature allowCompressedFormats = {"allow_compressed_formats", + angle::FeatureCategory::FrontendWorkarounds, + "Allow compressed formats", &members}; +}; + +inline FrontendFeatures::FrontendFeatures() = default; +inline FrontendFeatures::~FrontendFeatures() = default; + +} // namespace angle + +#endif // ANGLE_PLATFORM_FRONTENDFEATURES_H_ diff --git a/vendor/angle/include/platform/Platform.h b/vendor/angle/include/platform/Platform.h new file mode 100644 index 00000000..2c9537ed --- /dev/null +++ b/vendor/angle/include/platform/Platform.h @@ -0,0 +1,10 @@ +// +// Copyright 2020 The ANGLE Project Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// +// Platform.h: Simple forwarding header to PlatformMethods.h. +// Ideally we can remove this file at some point since "Platform.h" is overloaded. +// + +#include "PlatformMethods.h" diff --git a/vendor/angle/include/platform/PlatformMethods.h b/vendor/angle/include/platform/PlatformMethods.h new file mode 100644 index 00000000..4697a87e --- /dev/null +++ b/vendor/angle/include/platform/PlatformMethods.h @@ -0,0 +1,339 @@ +// +// Copyright 2015 The ANGLE Project Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// PlatformMethods.h: The public interface ANGLE exposes to the API layer, for +// doing platform-specific tasks like gathering data, or for tracing. + +#ifndef ANGLE_PLATFORMMETHODS_H +#define ANGLE_PLATFORMMETHODS_H + +#include +#include +#include + +#define EGL_PLATFORM_ANGLE_PLATFORM_METHODS_ANGLEX 0x3482 + +#if !defined(ANGLE_PLATFORM_EXPORT) +# if defined(_WIN32) +# if !defined(LIBANGLE_IMPLEMENTATION) +# define ANGLE_PLATFORM_EXPORT __declspec(dllimport) +# else +# define ANGLE_PLATFORM_EXPORT __declspec(dllexport) +# endif +# elif defined(__GNUC__) || defined(__clang__) +# define ANGLE_PLATFORM_EXPORT __attribute__((visibility("default"))) +# endif +#endif +#if !defined(ANGLE_PLATFORM_EXPORT) +# define ANGLE_PLATFORM_EXPORT +#endif + +#if defined(_WIN32) +# define ANGLE_APIENTRY __stdcall +#else +# define ANGLE_APIENTRY +#endif + +namespace angle +{ +struct FeaturesD3D; +struct FeaturesVk; +struct FeaturesMtl; +using TraceEventHandle = uint64_t; +using EGLDisplayType = void *; +struct PlatformMethods; + +// Use a C-like API to not trigger undefined calling behaviour. +// Avoid using decltype here to work around sanitizer limitations. +// TODO(jmadill): Use decltype here if/when UBSAN is fixed. + +// System -------------------------------------------------------------- + +// Wall clock time in seconds since the epoch. +// TODO(jmadill): investigate using an ANGLE internal time library +using CurrentTimeFunc = double (*)(PlatformMethods *platform); +inline double DefaultCurrentTime(PlatformMethods *platform) +{ + return 0.0; +} + +// Monotonically increasing time in seconds from an arbitrary fixed point in the past. +// This function is expected to return at least millisecond-precision values. For this reason, +// it is recommended that the fixed point be no further in the past than the epoch. +using MonotonicallyIncreasingTimeFunc = double (*)(PlatformMethods *platform); +inline double DefaultMonotonicallyIncreasingTime(PlatformMethods *platform) +{ + return 0.0; +} + +// Logging ------------------------------------------------------------ + +// Log an error message within the platform implementation. +using LogErrorFunc = void (*)(PlatformMethods *platform, const char *errorMessage); +inline void DefaultLogError(PlatformMethods *platform, const char *errorMessage) {} + +// Log a warning message within the platform implementation. +using LogWarningFunc = void (*)(PlatformMethods *platform, const char *warningMessage); +inline void DefaultLogWarning(PlatformMethods *platform, const char *warningMessage) {} + +// Log an info message within the platform implementation. +using LogInfoFunc = void (*)(PlatformMethods *platform, const char *infoMessage); +inline void DefaultLogInfo(PlatformMethods *platform, const char *infoMessage) {} + +// Tracing -------- + +// Get a pointer to the enabled state of the given trace category. The +// embedder can dynamically change the enabled state as trace event +// recording is started and stopped by the application. Only long-lived +// literal strings should be given as the category name. The implementation +// expects the returned pointer to be held permanently in a local static. If +// the unsigned char is non-zero, tracing is enabled. If tracing is enabled, +// addTraceEvent is expected to be called by the trace event macros. +using GetTraceCategoryEnabledFlagFunc = const unsigned char *(*)(PlatformMethods *platform, + const char *categoryName); +inline const unsigned char *DefaultGetTraceCategoryEnabledFlag(PlatformMethods *platform, + const char *categoryName) +{ + return nullptr; +} + +// +// Add a trace event to the platform tracing system. Depending on the actual +// enabled state, this event may be recorded or dropped. +// - phase specifies the type of event: +// - BEGIN ('B'): Marks the beginning of a scoped event. +// - END ('E'): Marks the end of a scoped event. +// - COMPLETE ('X'): Marks the beginning of a scoped event, but doesn't +// need a matching END event. Instead, at the end of the scope, +// updateTraceEventDuration() must be called with the TraceEventHandle +// returned from addTraceEvent(). +// - INSTANT ('I'): Standalone, instantaneous event. +// - START ('S'): Marks the beginning of an asynchronous event (the end +// event can occur in a different scope or thread). The id parameter is +// used to match START/FINISH pairs. +// - FINISH ('F'): Marks the end of an asynchronous event. +// - COUNTER ('C'): Used to trace integer quantities that change over +// time. The argument values are expected to be of type int. +// - METADATA ('M'): Reserved for internal use. +// - categoryEnabled is the pointer returned by getTraceCategoryEnabledFlag. +// - name is the name of the event. Also used to match BEGIN/END and +// START/FINISH pairs. +// - id optionally allows events of the same name to be distinguished from +// each other. For example, to trace the construction and destruction of +// objects, specify the pointer as the id parameter. +// - timestamp should be a time value returned from monotonicallyIncreasingTime. +// - numArgs specifies the number of elements in argNames, argTypes, and +// argValues. +// - argNames is the array of argument names. Use long-lived literal strings +// or specify the COPY flag. +// - argTypes is the array of argument types: +// - BOOL (1): bool +// - UINT (2): unsigned long long +// - INT (3): long long +// - DOUBLE (4): double +// - POINTER (5): void* +// - STRING (6): char* (long-lived null-terminated char* string) +// - COPY_STRING (7): char* (temporary null-terminated char* string) +// - CONVERTABLE (8): WebConvertableToTraceFormat +// - argValues is the array of argument values. Each value is the unsigned +// long long member of a union of all supported types. +// - flags can be 0 or one or more of the following, ORed together: +// - COPY (0x1): treat all strings (name, argNames and argValues of type +// string) as temporary so that they will be copied by addTraceEvent. +// - HAS_ID (0x2): use the id argument to uniquely identify the event for +// matching with other events of the same name. +// - MANGLE_ID (0x4): specify this flag if the id parameter is the value +// of a pointer. +using AddTraceEventFunc = angle::TraceEventHandle (*)(PlatformMethods *platform, + char phase, + const unsigned char *categoryEnabledFlag, + const char *name, + unsigned long long id, + double timestamp, + int numArgs, + const char **argNames, + const unsigned char *argTypes, + const unsigned long long *argValues, + unsigned char flags); +inline angle::TraceEventHandle DefaultAddTraceEvent(PlatformMethods *platform, + char phase, + const unsigned char *categoryEnabledFlag, + const char *name, + unsigned long long id, + double timestamp, + int numArgs, + const char **argNames, + const unsigned char *argTypes, + const unsigned long long *argValues, + unsigned char flags) +{ + return 0; +} + +// Set the duration field of a COMPLETE trace event. +using UpdateTraceEventDurationFunc = void (*)(PlatformMethods *platform, + const unsigned char *categoryEnabledFlag, + const char *name, + angle::TraceEventHandle eventHandle); +inline void DefaultUpdateTraceEventDuration(PlatformMethods *platform, + const unsigned char *categoryEnabledFlag, + const char *name, + angle::TraceEventHandle eventHandle) +{} + +// Callbacks for reporting histogram data. +// CustomCounts histogram has exponential bucket sizes, so that min=1, max=1000000, bucketCount=50 +// would do. +using HistogramCustomCountsFunc = void (*)(PlatformMethods *platform, + const char *name, + int sample, + int min, + int max, + int bucketCount); +inline void DefaultHistogramCustomCounts(PlatformMethods *platform, + const char *name, + int sample, + int min, + int max, + int bucketCount) +{} +// Enumeration histogram buckets are linear, boundaryValue should be larger than any possible sample +// value. +using HistogramEnumerationFunc = void (*)(PlatformMethods *platform, + const char *name, + int sample, + int boundaryValue); +inline void DefaultHistogramEnumeration(PlatformMethods *platform, + const char *name, + int sample, + int boundaryValue) +{} +// Unlike enumeration histograms, sparse histograms only allocate memory for non-empty buckets. +using HistogramSparseFunc = void (*)(PlatformMethods *platform, const char *name, int sample); +inline void DefaultHistogramSparse(PlatformMethods *platform, const char *name, int sample) {} +// Boolean histograms track two-state variables. +using HistogramBooleanFunc = void (*)(PlatformMethods *platform, const char *name, bool sample); +inline void DefaultHistogramBoolean(PlatformMethods *platform, const char *name, bool sample) {} + +// Allows us to programatically override ANGLE's default workarounds for testing purposes. +using OverrideWorkaroundsD3DFunc = void (*)(PlatformMethods *platform, + angle::FeaturesD3D *featuresD3D); +inline void DefaultOverrideWorkaroundsD3D(PlatformMethods *platform, + angle::FeaturesD3D *featuresD3D) +{} + +using OverrideFeaturesVkFunc = void (*)(PlatformMethods *platform, + angle::FeaturesVk *featuresVulkan); +inline void DefaultOverrideFeaturesVk(PlatformMethods *platform, angle::FeaturesVk *featuresVulkan) +{} + +using OverrideFeaturesMtlFunc = void (*)(PlatformMethods *platform, + angle::FeaturesMtl *featuresMetal); +inline void DefaultOverrideFeaturesMtl(PlatformMethods *platform, angle::FeaturesMtl *featuresMetal) +{} + +// Callback on a successful program link with the program binary. Can be used to store +// shaders to disk. Keys are a 160-bit SHA-1 hash. +using ProgramKeyType = std::array; +using CacheProgramFunc = void (*)(PlatformMethods *platform, + const ProgramKeyType &key, + size_t programSize, + const uint8_t *programBytes); +inline void DefaultCacheProgram(PlatformMethods *platform, + const ProgramKeyType &key, + size_t programSize, + const uint8_t *programBytes) +{} + +using PostWorkerTaskCallback = void (*)(void *userData); +using PostWorkerTaskFunc = void (*)(PlatformMethods *platform, + PostWorkerTaskCallback callback, + void *userData); +constexpr PostWorkerTaskFunc DefaultPostWorkerTask = nullptr; + +// Platform methods are enumerated here once. +#define ANGLE_PLATFORM_OP(OP) \ + OP(currentTime, CurrentTime) \ + OP(monotonicallyIncreasingTime, MonotonicallyIncreasingTime) \ + OP(logError, LogError) \ + OP(logWarning, LogWarning) \ + OP(logInfo, LogInfo) \ + OP(getTraceCategoryEnabledFlag, GetTraceCategoryEnabledFlag) \ + OP(addTraceEvent, AddTraceEvent) \ + OP(updateTraceEventDuration, UpdateTraceEventDuration) \ + OP(histogramCustomCounts, HistogramCustomCounts) \ + OP(histogramEnumeration, HistogramEnumeration) \ + OP(histogramSparse, HistogramSparse) \ + OP(histogramBoolean, HistogramBoolean) \ + OP(overrideWorkaroundsD3D, OverrideWorkaroundsD3D) \ + OP(overrideFeaturesVk, OverrideFeaturesVk) \ + OP(cacheProgram, CacheProgram) \ + OP(overrideFeaturesMtl, OverrideFeaturesMtl) \ + OP(postWorkerTask, PostWorkerTask) + +#define ANGLE_PLATFORM_METHOD_DEF(Name, CapsName) CapsName##Func Name = Default##CapsName; + +struct ANGLE_PLATFORM_EXPORT PlatformMethods +{ + inline PlatformMethods(); + + // User data pointer for any implementation specific members. Put it at the start of the + // platform structure so it doesn't become overwritten if one version of the platform + // adds or removes new members. + void *context = 0; + + ANGLE_PLATFORM_OP(ANGLE_PLATFORM_METHOD_DEF) +}; + +inline PlatformMethods::PlatformMethods() = default; + +#undef ANGLE_PLATFORM_METHOD_DEF + +// Subtract one to account for the context pointer. +constexpr unsigned int g_NumPlatformMethods = (sizeof(PlatformMethods) / sizeof(uintptr_t)) - 1; + +#define ANGLE_PLATFORM_METHOD_STRING(Name) #Name +#define ANGLE_PLATFORM_METHOD_STRING2(Name, CapsName) ANGLE_PLATFORM_METHOD_STRING(Name), + +constexpr const char *const g_PlatformMethodNames[g_NumPlatformMethods] = { + ANGLE_PLATFORM_OP(ANGLE_PLATFORM_METHOD_STRING2)}; + +#undef ANGLE_PLATFORM_METHOD_STRING2 +#undef ANGLE_PLATFORM_METHOD_STRING + +} // namespace angle + +extern "C" { + +// Gets the platform methods on the passed-in EGL display. If the method name signature does not +// match the compiled signature for this ANGLE, false is returned. On success true is returned. +// The application should set any platform methods it cares about on the returned pointer. +// If display is not valid, behaviour is undefined. + +ANGLE_PLATFORM_EXPORT bool ANGLE_APIENTRY ANGLEGetDisplayPlatform(angle::EGLDisplayType display, + const char *const methodNames[], + unsigned int methodNameCount, + void *context, + void *platformMethodsOut); + +// Sets the platform methods back to their defaults. +// If display is not valid, behaviour is undefined. +ANGLE_PLATFORM_EXPORT void ANGLE_APIENTRY ANGLEResetDisplayPlatform(angle::EGLDisplayType display); +} // extern "C" + +namespace angle +{ +typedef bool(ANGLE_APIENTRY *GetDisplayPlatformFunc)(angle::EGLDisplayType, + const char *const *, + unsigned int, + void *, + void *); +typedef void(ANGLE_APIENTRY *ResetDisplayPlatformFunc)(angle::EGLDisplayType); +} // namespace angle + +// This function is not exported +angle::PlatformMethods *ANGLEPlatformCurrent(); + +#endif // ANGLE_PLATFORMMETHODS_H diff --git a/vendor/angle/include/vulkan/vulkan_fuchsia_ext.h b/vendor/angle/include/vulkan/vulkan_fuchsia_ext.h new file mode 100644 index 00000000..75f120e7 --- /dev/null +++ b/vendor/angle/include/vulkan/vulkan_fuchsia_ext.h @@ -0,0 +1,145 @@ +// +// Copyright 2019 The ANGLE Project Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// +// vulkan_fuchsia_ext: +// Defines Fuchsia-specific Vulkan extensions that haven't been +// upstreamed to the official Vulkan headers. +// + +#ifndef COMMON_VULKAN_FUCHSIA_EXT_H_ +#define COMMON_VULKAN_FUCHSIA_EXT_H_ + +#if !defined(VK_NO_PROTOTYPES) +# define VK_NO_PROTOTYPES +#endif + +#include + +#if defined(ANGLE_PLATFORM_FUCHSIA) +# include +#else +typedef uint32_t zx_handle_t; +# define ZX_HANDLE_INVALID ((zx_handle_t)0) +#endif + +#ifdef __cplusplus +extern "C" { +#endif + +const VkStructureType VK_STRUCTURE_TYPE_TEMP_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA = + static_cast(1001005000); +const VkStructureType VK_STRUCTURE_TYPE_TEMP_MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA = + static_cast(1001005001); +const VkStructureType VK_STRUCTURE_TYPE_TEMP_MEMORY_GET_ZIRCON_HANDLE_INFO_FUCHSIA = + static_cast(1001005002); +const VkStructureType VK_STRUCTURE_TYPE_TEMP_IMPORT_SEMAPHORE_ZIRCON_HANDLE_INFO_FUCHSIA = + static_cast(1001006000); +const VkStructureType VK_STRUCTURE_TYPE_TEMP_SEMAPHORE_GET_ZIRCON_HANDLE_INFO_FUCHSIA = + static_cast(1001006001); + +const VkExternalMemoryHandleTypeFlagBits + VK_EXTERNAL_MEMORY_HANDLE_TYPE_TEMP_ZIRCON_VMO_BIT_FUCHSIA = + static_cast(0x00100000); + +const VkExternalSemaphoreHandleTypeFlagBits + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_TEMP_ZIRCON_EVENT_BIT_FUCHSIA = + static_cast(0x00100000); + +#define VK_FUCHSIA_external_memory 1 +#define VK_FUCHSIA_EXTERNAL_MEMORY_SPEC_VERSION 1 +#define VK_FUCHSIA_EXTERNAL_MEMORY_EXTENSION_NAME "VK_FUCHSIA_external_memory" + +typedef struct VkImportMemoryZirconHandleInfoFUCHSIA +{ + VkStructureType sType; + const void *pNext; + VkExternalMemoryHandleTypeFlagBits handleType; + zx_handle_t handle; +} VkImportMemoryZirconHandleInfoFUCHSIA; + +typedef struct VkMemoryZirconHandlePropertiesFUCHSIA +{ + VkStructureType sType; + void *pNext; + zx_handle_t memoryTypeBits; +} VkMemoryZirconHandlePropertiesFUCHSIA; + +typedef struct VkMemoryGetZirconHandleInfoFUCHSIA +{ + VkStructureType sType; + const void *pNext; + VkDeviceMemory memory; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkMemoryGetZirconHandleInfoFUCHSIA; + +typedef VkResult(VKAPI_PTR *PFN_vkGetMemoryZirconHandleFUCHSIA)( + VkDevice device, + const VkMemoryGetZirconHandleInfoFUCHSIA *pGetZirconHandleInfo, + zx_handle_t *pZirconHandle); +typedef VkResult(VKAPI_PTR *PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA)( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t ZirconHandle, + VkMemoryZirconHandlePropertiesFUCHSIA *pMemoryZirconHandleProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL +vkGetMemoryZirconHandleFUCHSIA(VkDevice device, + const VkMemoryGetZirconHandleInfoFUCHSIA *pGetZirconHandleInfo, + zx_handle_t *pZirconHandle); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryZirconHandlePropertiesFUCHSIA( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t ZirconHandle, + VkMemoryZirconHandlePropertiesFUCHSIA *pMemoryZirconHandleProperties); +#endif + +#define VK_FUCHSIA_external_semaphore 1 +#define VK_FUCHSIA_EXTERNAL_SEMAPHORE_SPEC_VERSION 1 +#define VK_FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME "VK_FUCHSIA_external_semaphore" + +typedef struct VkImportSemaphoreZirconHandleInfoFUCHSIA +{ + VkStructureType sType; + const void *pNext; + VkSemaphore semaphore; + VkSemaphoreImportFlags flags; + VkExternalSemaphoreHandleTypeFlagBits handleType; + zx_handle_t handle; +} VkImportSemaphoreZirconHandleInfoFUCHSIA; + +typedef struct VkSemaphoreGetZirconHandleInfoFUCHSIA +{ + VkStructureType sType; + const void *pNext; + VkSemaphore semaphore; + VkExternalSemaphoreHandleTypeFlagBits handleType; +} VkSemaphoreGetZirconHandleInfoFUCHSIA; + +typedef VkResult(VKAPI_PTR *PFN_vkImportSemaphoreZirconHandleFUCHSIA)( + VkDevice device, + const VkImportSemaphoreZirconHandleInfoFUCHSIA *pImportSemaphoreZirconHandleInfo); +typedef VkResult(VKAPI_PTR *PFN_vkGetSemaphoreZirconHandleFUCHSIA)( + VkDevice device, + const VkSemaphoreGetZirconHandleInfoFUCHSIA *pGetZirconHandleInfo, + zx_handle_t *pZirconHandle); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreZirconHandleFUCHSIA( + VkDevice device, + const VkImportSemaphoreZirconHandleInfoFUCHSIA *pImportSemaphoreZirconHandleInfo); + +VKAPI_ATTR VkResult VKAPI_CALL +vkGetSemaphoreZirconHandleFUCHSIA(VkDevice device, + const VkSemaphoreGetZirconHandleInfoFUCHSIA *pGetZirconHandleInfo, + zx_handle_t *pZirconHandle); +#endif + +#ifdef __cplusplus +} +#endif + +#endif // COMMON_VULKAN_FUCHSIA_EXT_H_ diff --git a/vendor/angle/win/x64/libEGL.dll b/vendor/angle/win/x64/libEGL.dll new file mode 100644 index 00000000..5bb9b781 --- /dev/null +++ b/vendor/angle/win/x64/libEGL.dll @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4fc01dc969b403b012cb26299ac645a6406e762382f230f831dd95d40e513515 +size 141824 diff --git a/vendor/angle/win/x64/libEGL.dll.lib b/vendor/angle/win/x64/libEGL.dll.lib new file mode 100644 index 00000000..cbc0f608 --- /dev/null +++ b/vendor/angle/win/x64/libEGL.dll.lib @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:895de3dd2ede8164ea37d7836284afd143022a2fea155d495115090dec9a11c4 +size 24306 diff --git a/vendor/angle/win/x64/libGLESv2.dll b/vendor/angle/win/x64/libGLESv2.dll new file mode 100644 index 00000000..935b600e --- /dev/null +++ b/vendor/angle/win/x64/libGLESv2.dll @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f716736fbfc78396b070f3c3d1c343bf1a070b9b7a910e362af809b183236481 +size 10339840 diff --git a/vendor/angle/win/x64/libGLESv2.dll.lib b/vendor/angle/win/x64/libGLESv2.dll.lib new file mode 100644 index 00000000..e1eda18b --- /dev/null +++ b/vendor/angle/win/x64/libGLESv2.dll.lib @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:096abf3ff6dac1d7710410eeb805ffa07e85286e21d169f7cf532ed68de8f495 +size 824902 diff --git a/vendor/angle/win/x86/libEGL.dll b/vendor/angle/win/x86/libEGL.dll new file mode 100644 index 00000000..8fc860d1 --- /dev/null +++ b/vendor/angle/win/x86/libEGL.dll @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:597d0e38ecd75fcc6011499137d0eec0dc6d5abe63a1775cf7640eaac9d1ee01 +size 81920 diff --git a/vendor/angle/win/x86/libEGL.dll.lib b/vendor/angle/win/x86/libEGL.dll.lib new file mode 100644 index 00000000..f0d181b5 --- /dev/null +++ b/vendor/angle/win/x86/libEGL.dll.lib @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:68084f7831e3d2530af36252a2a94136887f93d8c9ebac4f42db58107d9ee3ae +size 26078 diff --git a/vendor/angle/win/x86/libGLESv2.dll b/vendor/angle/win/x86/libGLESv2.dll new file mode 100644 index 00000000..c70b9367 --- /dev/null +++ b/vendor/angle/win/x86/libGLESv2.dll @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:03a7541a3cfac82f37af794293a86300876d027593da39686137be757dc9a38c +size 19692544 diff --git a/vendor/angle/win/x86/libGLESv2.dll.lib b/vendor/angle/win/x86/libGLESv2.dll.lib new file mode 100644 index 00000000..0dbbab67 --- /dev/null +++ b/vendor/angle/win/x86/libGLESv2.dll.lib @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:24052cdf80bcef6bf317f41c731d63fb2b79aca2c23dd9ba58fdde69a1021481 +size 885110 diff --git a/vendor/angle/win/x86/zlib.dll b/vendor/angle/win/x86/zlib.dll new file mode 100644 index 00000000..a2aefac6 --- /dev/null +++ b/vendor/angle/win/x86/zlib.dll @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:22d790c94b29530c6f77190765febae0f407af97cc38384d7d518c0052d0ad27 +size 160256 diff --git a/vendor/swiftshader/include/Android/android/api-level.h b/vendor/swiftshader/include/Android/android/api-level.h new file mode 100644 index 00000000..2d2f0968 --- /dev/null +++ b/vendor/swiftshader/include/Android/android/api-level.h @@ -0,0 +1,38 @@ +/* + * Copyright (C) 2008 The Android Open Source Project + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in + * the documentation and/or other materials provided with the + * distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, + * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, + * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS + * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED + * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT + * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + * SUCH DAMAGE. + */ + +#ifndef ANDROID_API_LEVEL_H +#define ANDROID_API_LEVEL_H + +/* + * Magic version number for a current development build, which has + * not yet turned into an official release. + */ +#define __ANDROID_API__ 10000 + +#endif /* ANDROID_API_LEVEL_H */ diff --git a/vendor/swiftshader/include/Android/android/sync.h b/vendor/swiftshader/include/Android/android/sync.h new file mode 100644 index 00000000..1ae728a5 --- /dev/null +++ b/vendor/swiftshader/include/Android/android/sync.h @@ -0,0 +1,21 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +extern "C" { +int sync_wait(int fd, int timeout); +}; diff --git a/vendor/swiftshader/include/Android/cutils/native_handle.h b/vendor/swiftshader/include/Android/cutils/native_handle.h new file mode 100644 index 00000000..b92a6638 --- /dev/null +++ b/vendor/swiftshader/include/Android/cutils/native_handle.h @@ -0,0 +1,21 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +struct native_handle_t; + +typedef const struct native_handle_t* buffer_handle_t; diff --git a/vendor/swiftshader/include/Android/hardware/gralloc.h b/vendor/swiftshader/include/Android/hardware/gralloc.h new file mode 100644 index 00000000..013e86a3 --- /dev/null +++ b/vendor/swiftshader/include/Android/hardware/gralloc.h @@ -0,0 +1,49 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include + +#include + +struct android_ycbcr; + +enum { + GRALLOC_USAGE_SW_READ_OFTEN = 0x00000003U, + GRALLOC_USAGE_SW_WRITE_OFTEN = 0x00000030U, + GRALLOC_USAGE_HW_TEXTURE = 0x00000100U, + GRALLOC_USAGE_HW_RENDER = 0x00000200U, +}; + +struct gralloc_module_t { + hw_module_t common; + int (*registerBuffer)(gralloc_module_t const*, buffer_handle_t); + int (*unregisterBuffer)(gralloc_module_t const*, buffer_handle_t); + int (*lock)(gralloc_module_t const*, buffer_handle_t, int, int, int, int, int, void**); + int (*unlock)(gralloc_module_t const*, buffer_handle_t); + int (*perform)(gralloc_module_t const*, int, ...); + int (*lock_ycbcr)(gralloc_module_t const*, buffer_handle_t, int, int, int, int, int, + android_ycbcr*); + int (*lockAsync)(gralloc_module_t const*, buffer_handle_t, int, int, int, int, int, void**, int); + int (*unlockAsync)(gralloc_module_t const*, buffer_handle_t, int*); + int (*lockAsync_ycbcr)(gralloc_module_t const*, buffer_handle_t, int, int, int, int, int, + android_ycbcr*, int); + int32_t (*getTransportSize)(gralloc_module_t const*, buffer_handle_t, uint32_t, uint32_t); + int32_t (*validateBufferSize)(gralloc_module_t const*, buffer_handle_t, uint32_t, uint32_t, int32_t, int, uint32_t); + + void* reserved_proc[1]; +}; diff --git a/vendor/swiftshader/include/Android/hardware/gralloc1.h b/vendor/swiftshader/include/Android/hardware/gralloc1.h new file mode 100644 index 00000000..b02decfa --- /dev/null +++ b/vendor/swiftshader/include/Android/hardware/gralloc1.h @@ -0,0 +1,77 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include + +#include + +#define GRALLOC_MODULE_API_VERSION_1_0 HARDWARE_MAKE_API_VERSION(1, 0) + +#define GRALLOC_HARDWARE_MODULE_ID "gralloc" + +enum { + GRALLOC1_ERROR_NONE = 0, + GRALLOC1_ERROR_BAD_HANDLE = 2, + GRALLOC1_ERROR_BAD_VALUE = 3, + GRALLOC1_ERROR_UNDEFINED = 6, +}; + +enum { + GRALLOC1_FUNCTION_LOCK = 18, + GRALLOC1_FUNCTION_UNLOCK = 20, +}; + +enum { + GRALLOC1_CONSUMER_USAGE_CPU_READ = 1ULL << 1, + GRALLOC1_CONSUMER_USAGE_CPU_READ_OFTEN = 1ULL << 2 | GRALLOC1_CONSUMER_USAGE_CPU_READ, + GRALLOC1_CONSUMER_USAGE_CPU_WRITE = 1ULL << 5, + GRALLOC1_CONSUMER_USAGE_CPU_WRITE_OFTEN = 1ULL << 6 | GRALLOC1_CONSUMER_USAGE_CPU_WRITE, + GRALLOC1_CONSUMER_USAGE_GPU_TEXTURE = 1ULL << 8, +}; + +enum { + GRALLOC1_PRODUCER_USAGE_CPU_READ = 1ULL << 1, + GRALLOC1_PRODUCER_USAGE_CPU_READ_OFTEN = 1ULL << 2 | GRALLOC1_PRODUCER_USAGE_CPU_READ, + GRALLOC1_PRODUCER_USAGE_CPU_WRITE = 1ULL << 5, + GRALLOC1_PRODUCER_USAGE_CPU_WRITE_OFTEN = 1ULL << 6 | GRALLOC1_PRODUCER_USAGE_CPU_WRITE, + GRALLOC1_PRODUCER_USAGE_GPU_RENDER_TARGET = 1ULL << 9, +}; + +typedef void (*gralloc1_function_pointer_t)(); + +struct gralloc1_rect_t { + int32_t left; + int32_t top; + int32_t width; + int32_t height; +}; + +struct gralloc1_device_t { + hw_device_t common; + void (*getCapabilities)(gralloc1_device_t*, uint32_t*, int32_t*); + gralloc1_function_pointer_t (*getFunction)(gralloc1_device_t*, int32_t); +}; + +typedef int32_t (*GRALLOC1_PFN_LOCK)(gralloc1_device_t*, buffer_handle_t, uint64_t, uint64_t, + const gralloc1_rect_t*, void**, int32_t); +typedef int32_t (*GRALLOC1_PFN_UNLOCK)(gralloc1_device_t*, buffer_handle_t, int32_t*); + +static inline int gralloc1_open(const hw_module_t* module, gralloc1_device_t** device) { + return module->methods->open(module, GRALLOC_HARDWARE_MODULE_ID, + reinterpret_cast(device)); +} diff --git a/vendor/swiftshader/include/Android/hardware/hardware.h b/vendor/swiftshader/include/Android/hardware/hardware.h new file mode 100644 index 00000000..21d6dc4a --- /dev/null +++ b/vendor/swiftshader/include/Android/hardware/hardware.h @@ -0,0 +1,66 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include + +#define MAKE_TAG_CONSTANT(A, B, C, D) (((A) << 24) | ((B) << 16) | ((C) << 8) | (D)) + +#define HARDWARE_MODULE_TAG MAKE_TAG_CONSTANT('H', 'W', 'M', 'T') +#define HARDWARE_DEVICE_TAG MAKE_TAG_CONSTANT('H', 'W', 'D', 'T') + +#define HARDWARE_MAKE_API_VERSION(maj, min) ((((maj)&0xff) << 8) | ((min)&0xff)) + +#define HARDWARE_HAL_API_VERSION HARDWARE_MAKE_API_VERSION(1, 0) + +struct hw_module_methods_t; + +struct hw_module_t { + uint32_t tag; + uint16_t module_api_version; + uint16_t hal_api_version; + const char* id; + const char* name; + const char* author; + hw_module_methods_t* methods; + void* dso; +#ifdef __LP64__ + uint64_t reserved[32 - 7]; +#else + uint32_t reserved[32 - 7]; +#endif +}; + +struct hw_device_t { + uint32_t tag; + uint32_t version; + struct hw_module_t* module; +#ifdef __LP64__ + uint64_t reserved[12]; +#else + uint32_t reserved[12]; +#endif + int (*close)(hw_device_t* device); +}; + +struct hw_module_methods_t { + int (*open)(const hw_module_t*, const char*, hw_device_t**); +}; + +extern "C" { +int hw_get_module(const char* id, const hw_module_t** module); +}; diff --git a/vendor/swiftshader/include/Android/nativebase/nativebase.h b/vendor/swiftshader/include/Android/nativebase/nativebase.h new file mode 100644 index 00000000..c2e84d79 --- /dev/null +++ b/vendor/swiftshader/include/Android/nativebase/nativebase.h @@ -0,0 +1,64 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include + +#include +#include + +// clang-format off +#define ANDROID_NATIVE_MAKE_CONSTANT(a, b, c, d) \ + ((static_cast(a) << 24) | \ + (static_cast(b) << 16) | \ + (static_cast(c) << 8) | \ + (static_cast(d) << 0)) +// clang-format on + +struct android_native_base_t { + int magic; + int version; + void* reserved[4]; + void (*incRef)(android_native_base_t*); + void (*decRef)(android_native_base_t*); +}; + +#define ANDROID_NATIVE_BUFFER_MAGIC ANDROID_NATIVE_MAKE_CONSTANT('_', 'b', 'f', 'r') + +struct ANativeWindowBuffer { + ANativeWindowBuffer() { + common.magic = ANDROID_NATIVE_BUFFER_MAGIC; + common.version = sizeof(ANativeWindowBuffer); + memset(common.reserved, 0, sizeof(common.reserved)); + } + + android_native_base_t common; + + int width; + int height; + int stride; + int format; + int usage_deprecated; + uintptr_t layerCount; + + void* reserved[1]; + + const native_handle_t* handle; + uint64_t usage; + + void* reserved_proc[8 - (sizeof(uint64_t) / sizeof(void*))]; +}; diff --git a/vendor/swiftshader/include/Android/sync/sync.h b/vendor/swiftshader/include/Android/sync/sync.h new file mode 100644 index 00000000..1ae728a5 --- /dev/null +++ b/vendor/swiftshader/include/Android/sync/sync.h @@ -0,0 +1,21 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +extern "C" { +int sync_wait(int fd, int timeout); +}; diff --git a/vendor/swiftshader/include/Android/system/graphics.h b/vendor/swiftshader/include/Android/system/graphics.h new file mode 100644 index 00000000..563287a5 --- /dev/null +++ b/vendor/swiftshader/include/Android/system/graphics.h @@ -0,0 +1,28 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +enum { + HAL_PIXEL_FORMAT_RGBA_8888 = 1, + HAL_PIXEL_FORMAT_RGBX_8888 = 2, + HAL_PIXEL_FORMAT_RGB_888 = 3, + HAL_PIXEL_FORMAT_RGB_565 = 4, + HAL_PIXEL_FORMAT_BGRA_8888 = 5, + HAL_PIXEL_FORMAT_RGBA_FP16 = 22, + HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED = 34, + HAL_PIXEL_FORMAT_YV12 = 842094169, +}; diff --git a/vendor/swiftshader/include/Android/vndk/window.h b/vendor/swiftshader/include/Android/vndk/window.h new file mode 100644 index 00000000..8b5965f8 --- /dev/null +++ b/vendor/swiftshader/include/Android/vndk/window.h @@ -0,0 +1,31 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include + +struct ANativeWindow; +typedef struct ANativeWindow ANativeWindow; + +void ANativeWindow_acquire(ANativeWindow* window); +void ANativeWindow_release(ANativeWindow* window); +int32_t ANativeWindow_getWidth(ANativeWindow* window); +int32_t ANativeWindow_getHeight(ANativeWindow* window); +int ANativeWindow_dequeueBuffer(ANativeWindow* window, ANativeWindowBuffer** buffer, int* fenceFd); +int ANativeWindow_queueBuffer(ANativeWindow* window, ANativeWindowBuffer* buffer, int fenceFd); +int ANativeWindow_cancelBuffer(ANativeWindow* window, ANativeWindowBuffer* buffer, int fenceFd); +int ANativeWindow_setUsage(ANativeWindow* window, uint64_t usage); diff --git a/vendor/swiftshader/include/EGL/egl.h b/vendor/swiftshader/include/EGL/egl.h new file mode 100644 index 00000000..c9e8b7c5 --- /dev/null +++ b/vendor/swiftshader/include/EGL/egl.h @@ -0,0 +1,303 @@ +#ifndef __egl_h_ +#define __egl_h_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2013-2017 The Khronos Group Inc. +** +** Permission is hereby granted, free of charge, to any person obtaining a +** copy of this software and/or associated documentation files (the +** "Materials"), to deal in the Materials without restriction, including +** without limitation the rights to use, copy, modify, merge, publish, +** distribute, sublicense, and/or sell copies of the Materials, and to +** permit persons to whom the Materials are furnished to do so, subject to +** the following conditions: +** +** The above copyright notice and this permission notice shall be included +** in all copies or substantial portions of the Materials. +** +** THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +** EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +** MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +** IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +** CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +** MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. +*/ +/* +** This header is generated from the Khronos OpenGL / OpenGL ES XML +** API Registry. The current version of the Registry, generator scripts +** used to make the header, and the header can be found at +** http://www.khronos.org/registry/egl +** +** Khronos $Git commit SHA1: bae3518c48 $ on $Git commit date: 2018-05-17 10:56:57 -0700 $ +*/ + +#include + +/* Generated on date 20180517 */ + +/* Generated C header for: + * API: egl + * Versions considered: .* + * Versions emitted: .* + * Default extensions included: None + * Additional extensions included: _nomatch_^ + * Extensions removed: _nomatch_^ + */ + +#ifndef EGL_VERSION_1_0 +#define EGL_VERSION_1_0 1 +typedef unsigned int EGLBoolean; +typedef void *EGLDisplay; +#include +#include +typedef void *EGLConfig; +typedef void *EGLSurface; +typedef void *EGLContext; +typedef void (*__eglMustCastToProperFunctionPointerType)(void); +#define EGL_ALPHA_SIZE 0x3021 +#define EGL_BAD_ACCESS 0x3002 +#define EGL_BAD_ALLOC 0x3003 +#define EGL_BAD_ATTRIBUTE 0x3004 +#define EGL_BAD_CONFIG 0x3005 +#define EGL_BAD_CONTEXT 0x3006 +#define EGL_BAD_CURRENT_SURFACE 0x3007 +#define EGL_BAD_DISPLAY 0x3008 +#define EGL_BAD_MATCH 0x3009 +#define EGL_BAD_NATIVE_PIXMAP 0x300A +#define EGL_BAD_NATIVE_WINDOW 0x300B +#define EGL_BAD_PARAMETER 0x300C +#define EGL_BAD_SURFACE 0x300D +#define EGL_BLUE_SIZE 0x3022 +#define EGL_BUFFER_SIZE 0x3020 +#define EGL_CONFIG_CAVEAT 0x3027 +#define EGL_CONFIG_ID 0x3028 +#define EGL_CORE_NATIVE_ENGINE 0x305B +#define EGL_DEPTH_SIZE 0x3025 +#define EGL_DONT_CARE EGL_CAST(EGLint,-1) +#define EGL_DRAW 0x3059 +#define EGL_EXTENSIONS 0x3055 +#define EGL_FALSE 0 +#define EGL_GREEN_SIZE 0x3023 +#define EGL_HEIGHT 0x3056 +#define EGL_LARGEST_PBUFFER 0x3058 +#define EGL_LEVEL 0x3029 +#define EGL_MAX_PBUFFER_HEIGHT 0x302A +#define EGL_MAX_PBUFFER_PIXELS 0x302B +#define EGL_MAX_PBUFFER_WIDTH 0x302C +#define EGL_NATIVE_RENDERABLE 0x302D +#define EGL_NATIVE_VISUAL_ID 0x302E +#define EGL_NATIVE_VISUAL_TYPE 0x302F +#define EGL_NONE 0x3038 +#define EGL_NON_CONFORMANT_CONFIG 0x3051 +#define EGL_NOT_INITIALIZED 0x3001 +#define EGL_NO_CONTEXT EGL_CAST(EGLContext,0) +#define EGL_NO_DISPLAY EGL_CAST(EGLDisplay,0) +#define EGL_NO_SURFACE EGL_CAST(EGLSurface,0) +#define EGL_PBUFFER_BIT 0x0001 +#define EGL_PIXMAP_BIT 0x0002 +#define EGL_READ 0x305A +#define EGL_RED_SIZE 0x3024 +#define EGL_SAMPLES 0x3031 +#define EGL_SAMPLE_BUFFERS 0x3032 +#define EGL_SLOW_CONFIG 0x3050 +#define EGL_STENCIL_SIZE 0x3026 +#define EGL_SUCCESS 0x3000 +#define EGL_SURFACE_TYPE 0x3033 +#define EGL_TRANSPARENT_BLUE_VALUE 0x3035 +#define EGL_TRANSPARENT_GREEN_VALUE 0x3036 +#define EGL_TRANSPARENT_RED_VALUE 0x3037 +#define EGL_TRANSPARENT_RGB 0x3052 +#define EGL_TRANSPARENT_TYPE 0x3034 +#define EGL_TRUE 1 +#define EGL_VENDOR 0x3053 +#define EGL_VERSION 0x3054 +#define EGL_WIDTH 0x3057 +#define EGL_WINDOW_BIT 0x0004 +EGLAPI EGLBoolean EGLAPIENTRY eglChooseConfig (EGLDisplay dpy, const EGLint *attrib_list, EGLConfig *configs, EGLint config_size, EGLint *num_config); +EGLAPI EGLBoolean EGLAPIENTRY eglCopyBuffers (EGLDisplay dpy, EGLSurface surface, EGLNativePixmapType target); +EGLAPI EGLContext EGLAPIENTRY eglCreateContext (EGLDisplay dpy, EGLConfig config, EGLContext share_context, const EGLint *attrib_list); +EGLAPI EGLSurface EGLAPIENTRY eglCreatePbufferSurface (EGLDisplay dpy, EGLConfig config, const EGLint *attrib_list); +EGLAPI EGLSurface EGLAPIENTRY eglCreatePixmapSurface (EGLDisplay dpy, EGLConfig config, EGLNativePixmapType pixmap, const EGLint *attrib_list); +EGLAPI EGLSurface EGLAPIENTRY eglCreateWindowSurface (EGLDisplay dpy, EGLConfig config, EGLNativeWindowType win, const EGLint *attrib_list); +EGLAPI EGLBoolean EGLAPIENTRY eglDestroyContext (EGLDisplay dpy, EGLContext ctx); +EGLAPI EGLBoolean EGLAPIENTRY eglDestroySurface (EGLDisplay dpy, EGLSurface surface); +EGLAPI EGLBoolean EGLAPIENTRY eglGetConfigAttrib (EGLDisplay dpy, EGLConfig config, EGLint attribute, EGLint *value); +EGLAPI EGLBoolean EGLAPIENTRY eglGetConfigs (EGLDisplay dpy, EGLConfig *configs, EGLint config_size, EGLint *num_config); +EGLAPI EGLDisplay EGLAPIENTRY eglGetCurrentDisplay (void); +EGLAPI EGLSurface EGLAPIENTRY eglGetCurrentSurface (EGLint readdraw); +EGLAPI EGLDisplay EGLAPIENTRY eglGetDisplay (EGLNativeDisplayType display_id); +EGLAPI EGLint EGLAPIENTRY eglGetError (void); +EGLAPI __eglMustCastToProperFunctionPointerType EGLAPIENTRY eglGetProcAddress (const char *procname); +EGLAPI EGLBoolean EGLAPIENTRY eglInitialize (EGLDisplay dpy, EGLint *major, EGLint *minor); +EGLAPI EGLBoolean EGLAPIENTRY eglMakeCurrent (EGLDisplay dpy, EGLSurface draw, EGLSurface read, EGLContext ctx); +EGLAPI EGLBoolean EGLAPIENTRY eglQueryContext (EGLDisplay dpy, EGLContext ctx, EGLint attribute, EGLint *value); +EGLAPI const char *EGLAPIENTRY eglQueryString (EGLDisplay dpy, EGLint name); +EGLAPI EGLBoolean EGLAPIENTRY eglQuerySurface (EGLDisplay dpy, EGLSurface surface, EGLint attribute, EGLint *value); +EGLAPI EGLBoolean EGLAPIENTRY eglSwapBuffers (EGLDisplay dpy, EGLSurface surface); +EGLAPI EGLBoolean EGLAPIENTRY eglTerminate (EGLDisplay dpy); +EGLAPI EGLBoolean EGLAPIENTRY eglWaitGL (void); +EGLAPI EGLBoolean EGLAPIENTRY eglWaitNative (EGLint engine); +#endif /* EGL_VERSION_1_0 */ + +#ifndef EGL_VERSION_1_1 +#define EGL_VERSION_1_1 1 +#define EGL_BACK_BUFFER 0x3084 +#define EGL_BIND_TO_TEXTURE_RGB 0x3039 +#define EGL_BIND_TO_TEXTURE_RGBA 0x303A +#define EGL_CONTEXT_LOST 0x300E +#define EGL_MIN_SWAP_INTERVAL 0x303B +#define EGL_MAX_SWAP_INTERVAL 0x303C +#define EGL_MIPMAP_TEXTURE 0x3082 +#define EGL_MIPMAP_LEVEL 0x3083 +#define EGL_NO_TEXTURE 0x305C +#define EGL_TEXTURE_2D 0x305F +#define EGL_TEXTURE_FORMAT 0x3080 +#define EGL_TEXTURE_RGB 0x305D +#define EGL_TEXTURE_RGBA 0x305E +#define EGL_TEXTURE_TARGET 0x3081 +EGLAPI EGLBoolean EGLAPIENTRY eglBindTexImage (EGLDisplay dpy, EGLSurface surface, EGLint buffer); +EGLAPI EGLBoolean EGLAPIENTRY eglReleaseTexImage (EGLDisplay dpy, EGLSurface surface, EGLint buffer); +EGLAPI EGLBoolean EGLAPIENTRY eglSurfaceAttrib (EGLDisplay dpy, EGLSurface surface, EGLint attribute, EGLint value); +EGLAPI EGLBoolean EGLAPIENTRY eglSwapInterval (EGLDisplay dpy, EGLint interval); +#endif /* EGL_VERSION_1_1 */ + +#ifndef EGL_VERSION_1_2 +#define EGL_VERSION_1_2 1 +typedef unsigned int EGLenum; +typedef void *EGLClientBuffer; +#define EGL_ALPHA_FORMAT 0x3088 +#define EGL_ALPHA_FORMAT_NONPRE 0x308B +#define EGL_ALPHA_FORMAT_PRE 0x308C +#define EGL_ALPHA_MASK_SIZE 0x303E +#define EGL_BUFFER_PRESERVED 0x3094 +#define EGL_BUFFER_DESTROYED 0x3095 +#define EGL_CLIENT_APIS 0x308D +#define EGL_COLORSPACE 0x3087 +#define EGL_COLORSPACE_sRGB 0x3089 +#define EGL_COLORSPACE_LINEAR 0x308A +#define EGL_COLOR_BUFFER_TYPE 0x303F +#define EGL_CONTEXT_CLIENT_TYPE 0x3097 +#define EGL_DISPLAY_SCALING 10000 +#define EGL_HORIZONTAL_RESOLUTION 0x3090 +#define EGL_LUMINANCE_BUFFER 0x308F +#define EGL_LUMINANCE_SIZE 0x303D +#define EGL_OPENGL_ES_BIT 0x0001 +#define EGL_OPENVG_BIT 0x0002 +#define EGL_OPENGL_ES_API 0x30A0 +#define EGL_OPENVG_API 0x30A1 +#define EGL_OPENVG_IMAGE 0x3096 +#define EGL_PIXEL_ASPECT_RATIO 0x3092 +#define EGL_RENDERABLE_TYPE 0x3040 +#define EGL_RENDER_BUFFER 0x3086 +#define EGL_RGB_BUFFER 0x308E +#define EGL_SINGLE_BUFFER 0x3085 +#define EGL_SWAP_BEHAVIOR 0x3093 +#define EGL_UNKNOWN EGL_CAST(EGLint,-1) +#define EGL_VERTICAL_RESOLUTION 0x3091 +EGLAPI EGLBoolean EGLAPIENTRY eglBindAPI (EGLenum api); +EGLAPI EGLenum EGLAPIENTRY eglQueryAPI (void); +EGLAPI EGLSurface EGLAPIENTRY eglCreatePbufferFromClientBuffer (EGLDisplay dpy, EGLenum buftype, EGLClientBuffer buffer, EGLConfig config, const EGLint *attrib_list); +EGLAPI EGLBoolean EGLAPIENTRY eglReleaseThread (void); +EGLAPI EGLBoolean EGLAPIENTRY eglWaitClient (void); +#endif /* EGL_VERSION_1_2 */ + +#ifndef EGL_VERSION_1_3 +#define EGL_VERSION_1_3 1 +#define EGL_CONFORMANT 0x3042 +#define EGL_CONTEXT_CLIENT_VERSION 0x3098 +#define EGL_MATCH_NATIVE_PIXMAP 0x3041 +#define EGL_OPENGL_ES2_BIT 0x0004 +#define EGL_VG_ALPHA_FORMAT 0x3088 +#define EGL_VG_ALPHA_FORMAT_NONPRE 0x308B +#define EGL_VG_ALPHA_FORMAT_PRE 0x308C +#define EGL_VG_ALPHA_FORMAT_PRE_BIT 0x0040 +#define EGL_VG_COLORSPACE 0x3087 +#define EGL_VG_COLORSPACE_sRGB 0x3089 +#define EGL_VG_COLORSPACE_LINEAR 0x308A +#define EGL_VG_COLORSPACE_LINEAR_BIT 0x0020 +#endif /* EGL_VERSION_1_3 */ + +#ifndef EGL_VERSION_1_4 +#define EGL_VERSION_1_4 1 +#define EGL_DEFAULT_DISPLAY EGL_CAST(EGLNativeDisplayType,0) +#define EGL_MULTISAMPLE_RESOLVE_BOX_BIT 0x0200 +#define EGL_MULTISAMPLE_RESOLVE 0x3099 +#define EGL_MULTISAMPLE_RESOLVE_DEFAULT 0x309A +#define EGL_MULTISAMPLE_RESOLVE_BOX 0x309B +#define EGL_OPENGL_API 0x30A2 +#define EGL_OPENGL_BIT 0x0008 +#define EGL_SWAP_BEHAVIOR_PRESERVED_BIT 0x0400 +EGLAPI EGLContext EGLAPIENTRY eglGetCurrentContext (void); +#endif /* EGL_VERSION_1_4 */ + +#ifndef EGL_VERSION_1_5 +#define EGL_VERSION_1_5 1 +typedef void *EGLSync; +typedef intptr_t EGLAttrib; +typedef khronos_utime_nanoseconds_t EGLTime; +typedef void *EGLImage; +#define EGL_CONTEXT_MAJOR_VERSION 0x3098 +#define EGL_CONTEXT_MINOR_VERSION 0x30FB +#define EGL_CONTEXT_OPENGL_PROFILE_MASK 0x30FD +#define EGL_CONTEXT_OPENGL_RESET_NOTIFICATION_STRATEGY 0x31BD +#define EGL_NO_RESET_NOTIFICATION 0x31BE +#define EGL_LOSE_CONTEXT_ON_RESET 0x31BF +#define EGL_CONTEXT_OPENGL_CORE_PROFILE_BIT 0x00000001 +#define EGL_CONTEXT_OPENGL_COMPATIBILITY_PROFILE_BIT 0x00000002 +#define EGL_CONTEXT_OPENGL_DEBUG 0x31B0 +#define EGL_CONTEXT_OPENGL_FORWARD_COMPATIBLE 0x31B1 +#define EGL_CONTEXT_OPENGL_ROBUST_ACCESS 0x31B2 +#define EGL_OPENGL_ES3_BIT 0x00000040 +#define EGL_CL_EVENT_HANDLE 0x309C +#define EGL_SYNC_CL_EVENT 0x30FE +#define EGL_SYNC_CL_EVENT_COMPLETE 0x30FF +#define EGL_SYNC_PRIOR_COMMANDS_COMPLETE 0x30F0 +#define EGL_SYNC_TYPE 0x30F7 +#define EGL_SYNC_STATUS 0x30F1 +#define EGL_SYNC_CONDITION 0x30F8 +#define EGL_SIGNALED 0x30F2 +#define EGL_UNSIGNALED 0x30F3 +#define EGL_SYNC_FLUSH_COMMANDS_BIT 0x0001 +#define EGL_FOREVER 0xFFFFFFFFFFFFFFFFull +#define EGL_TIMEOUT_EXPIRED 0x30F5 +#define EGL_CONDITION_SATISFIED 0x30F6 +#define EGL_NO_SYNC EGL_CAST(EGLSync,0) +#define EGL_SYNC_FENCE 0x30F9 +#define EGL_GL_COLORSPACE 0x309D +#define EGL_GL_COLORSPACE_SRGB 0x3089 +#define EGL_GL_COLORSPACE_LINEAR 0x308A +#define EGL_GL_RENDERBUFFER 0x30B9 +#define EGL_GL_TEXTURE_2D 0x30B1 +#define EGL_GL_TEXTURE_LEVEL 0x30BC +#define EGL_GL_TEXTURE_3D 0x30B2 +#define EGL_GL_TEXTURE_ZOFFSET 0x30BD +#define EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_X 0x30B3 +#define EGL_GL_TEXTURE_CUBE_MAP_NEGATIVE_X 0x30B4 +#define EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_Y 0x30B5 +#define EGL_GL_TEXTURE_CUBE_MAP_NEGATIVE_Y 0x30B6 +#define EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_Z 0x30B7 +#define EGL_GL_TEXTURE_CUBE_MAP_NEGATIVE_Z 0x30B8 +#define EGL_IMAGE_PRESERVED 0x30D2 +#define EGL_NO_IMAGE EGL_CAST(EGLImage,0) +EGLAPI EGLSync EGLAPIENTRY eglCreateSync (EGLDisplay dpy, EGLenum type, const EGLAttrib *attrib_list); +EGLAPI EGLBoolean EGLAPIENTRY eglDestroySync (EGLDisplay dpy, EGLSync sync); +EGLAPI EGLint EGLAPIENTRY eglClientWaitSync (EGLDisplay dpy, EGLSync sync, EGLint flags, EGLTime timeout); +EGLAPI EGLBoolean EGLAPIENTRY eglGetSyncAttrib (EGLDisplay dpy, EGLSync sync, EGLint attribute, EGLAttrib *value); +EGLAPI EGLImage EGLAPIENTRY eglCreateImage (EGLDisplay dpy, EGLContext ctx, EGLenum target, EGLClientBuffer buffer, const EGLAttrib *attrib_list); +EGLAPI EGLBoolean EGLAPIENTRY eglDestroyImage (EGLDisplay dpy, EGLImage image); +EGLAPI EGLDisplay EGLAPIENTRY eglGetPlatformDisplay (EGLenum platform, void *native_display, const EGLAttrib *attrib_list); +EGLAPI EGLSurface EGLAPIENTRY eglCreatePlatformWindowSurface (EGLDisplay dpy, EGLConfig config, void *native_window, const EGLAttrib *attrib_list); +EGLAPI EGLSurface EGLAPIENTRY eglCreatePlatformPixmapSurface (EGLDisplay dpy, EGLConfig config, void *native_pixmap, const EGLAttrib *attrib_list); +EGLAPI EGLBoolean EGLAPIENTRY eglWaitSync (EGLDisplay dpy, EGLSync sync, EGLint flags); +#endif /* EGL_VERSION_1_5 */ + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/swiftshader/include/EGL/eglext.h b/vendor/swiftshader/include/EGL/eglext.h new file mode 100644 index 00000000..ad4ffd7d --- /dev/null +++ b/vendor/swiftshader/include/EGL/eglext.h @@ -0,0 +1,1318 @@ +#ifndef __eglext_h_ +#define __eglext_h_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2013-2017 The Khronos Group Inc. +** +** Permission is hereby granted, free of charge, to any person obtaining a +** copy of this software and/or associated documentation files (the +** "Materials"), to deal in the Materials without restriction, including +** without limitation the rights to use, copy, modify, merge, publish, +** distribute, sublicense, and/or sell copies of the Materials, and to +** permit persons to whom the Materials are furnished to do so, subject to +** the following conditions: +** +** The above copyright notice and this permission notice shall be included +** in all copies or substantial portions of the Materials. +** +** THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +** EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +** MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +** IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +** CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +** MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. +*/ +/* +** This header is generated from the Khronos OpenGL / OpenGL ES XML +** API Registry. The current version of the Registry, generator scripts +** used to make the header, and the header can be found at +** http://www.khronos.org/registry/egl +** +** Khronos $Git commit SHA1: bae3518c48 $ on $Git commit date: 2018-05-17 10:56:57 -0700 $ +*/ + +#include + +#define EGL_EGLEXT_VERSION 20180517 + +/* Generated C header for: + * API: egl + * Versions considered: .* + * Versions emitted: _nomatch_^ + * Default extensions included: egl + * Additional extensions included: _nomatch_^ + * Extensions removed: _nomatch_^ + */ + +#ifndef EGL_KHR_cl_event +#define EGL_KHR_cl_event 1 +#define EGL_CL_EVENT_HANDLE_KHR 0x309C +#define EGL_SYNC_CL_EVENT_KHR 0x30FE +#define EGL_SYNC_CL_EVENT_COMPLETE_KHR 0x30FF +#endif /* EGL_KHR_cl_event */ + +#ifndef EGL_KHR_cl_event2 +#define EGL_KHR_cl_event2 1 +typedef void *EGLSyncKHR; +typedef intptr_t EGLAttribKHR; +typedef EGLSyncKHR (EGLAPIENTRYP PFNEGLCREATESYNC64KHRPROC) (EGLDisplay dpy, EGLenum type, const EGLAttribKHR *attrib_list); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLSyncKHR EGLAPIENTRY eglCreateSync64KHR (EGLDisplay dpy, EGLenum type, const EGLAttribKHR *attrib_list); +#endif +#endif /* EGL_KHR_cl_event2 */ + +#ifndef EGL_KHR_client_get_all_proc_addresses +#define EGL_KHR_client_get_all_proc_addresses 1 +#endif /* EGL_KHR_client_get_all_proc_addresses */ + +#ifndef EGL_KHR_config_attribs +#define EGL_KHR_config_attribs 1 +#define EGL_CONFORMANT_KHR 0x3042 +#define EGL_VG_COLORSPACE_LINEAR_BIT_KHR 0x0020 +#define EGL_VG_ALPHA_FORMAT_PRE_BIT_KHR 0x0040 +#endif /* EGL_KHR_config_attribs */ + +#ifndef EGL_KHR_context_flush_control +#define EGL_KHR_context_flush_control 1 +#define EGL_CONTEXT_RELEASE_BEHAVIOR_NONE_KHR 0 +#define EGL_CONTEXT_RELEASE_BEHAVIOR_KHR 0x2097 +#define EGL_CONTEXT_RELEASE_BEHAVIOR_FLUSH_KHR 0x2098 +#endif /* EGL_KHR_context_flush_control */ + +#ifndef EGL_KHR_create_context +#define EGL_KHR_create_context 1 +#define EGL_CONTEXT_MAJOR_VERSION_KHR 0x3098 +#define EGL_CONTEXT_MINOR_VERSION_KHR 0x30FB +#define EGL_CONTEXT_FLAGS_KHR 0x30FC +#define EGL_CONTEXT_OPENGL_PROFILE_MASK_KHR 0x30FD +#define EGL_CONTEXT_OPENGL_RESET_NOTIFICATION_STRATEGY_KHR 0x31BD +#define EGL_NO_RESET_NOTIFICATION_KHR 0x31BE +#define EGL_LOSE_CONTEXT_ON_RESET_KHR 0x31BF +#define EGL_CONTEXT_OPENGL_DEBUG_BIT_KHR 0x00000001 +#define EGL_CONTEXT_OPENGL_FORWARD_COMPATIBLE_BIT_KHR 0x00000002 +#define EGL_CONTEXT_OPENGL_ROBUST_ACCESS_BIT_KHR 0x00000004 +#define EGL_CONTEXT_OPENGL_CORE_PROFILE_BIT_KHR 0x00000001 +#define EGL_CONTEXT_OPENGL_COMPATIBILITY_PROFILE_BIT_KHR 0x00000002 +#define EGL_OPENGL_ES3_BIT_KHR 0x00000040 +#endif /* EGL_KHR_create_context */ + +#ifndef EGL_KHR_create_context_no_error +#define EGL_KHR_create_context_no_error 1 +#define EGL_CONTEXT_OPENGL_NO_ERROR_KHR 0x31B3 +#endif /* EGL_KHR_create_context_no_error */ + +#ifndef EGL_KHR_debug +#define EGL_KHR_debug 1 +typedef void *EGLLabelKHR; +typedef void *EGLObjectKHR; +typedef void (EGLAPIENTRY *EGLDEBUGPROCKHR)(EGLenum error,const char *command,EGLint messageType,EGLLabelKHR threadLabel,EGLLabelKHR objectLabel,const char* message); +#define EGL_OBJECT_THREAD_KHR 0x33B0 +#define EGL_OBJECT_DISPLAY_KHR 0x33B1 +#define EGL_OBJECT_CONTEXT_KHR 0x33B2 +#define EGL_OBJECT_SURFACE_KHR 0x33B3 +#define EGL_OBJECT_IMAGE_KHR 0x33B4 +#define EGL_OBJECT_SYNC_KHR 0x33B5 +#define EGL_OBJECT_STREAM_KHR 0x33B6 +#define EGL_DEBUG_MSG_CRITICAL_KHR 0x33B9 +#define EGL_DEBUG_MSG_ERROR_KHR 0x33BA +#define EGL_DEBUG_MSG_WARN_KHR 0x33BB +#define EGL_DEBUG_MSG_INFO_KHR 0x33BC +#define EGL_DEBUG_CALLBACK_KHR 0x33B8 +typedef EGLint (EGLAPIENTRYP PFNEGLDEBUGMESSAGECONTROLKHRPROC) (EGLDEBUGPROCKHR callback, const EGLAttrib *attrib_list); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYDEBUGKHRPROC) (EGLint attribute, EGLAttrib *value); +typedef EGLint (EGLAPIENTRYP PFNEGLLABELOBJECTKHRPROC) (EGLDisplay display, EGLenum objectType, EGLObjectKHR object, EGLLabelKHR label); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLint EGLAPIENTRY eglDebugMessageControlKHR (EGLDEBUGPROCKHR callback, const EGLAttrib *attrib_list); +EGLAPI EGLBoolean EGLAPIENTRY eglQueryDebugKHR (EGLint attribute, EGLAttrib *value); +EGLAPI EGLint EGLAPIENTRY eglLabelObjectKHR (EGLDisplay display, EGLenum objectType, EGLObjectKHR object, EGLLabelKHR label); +#endif +#endif /* EGL_KHR_debug */ + +#ifndef EGL_KHR_display_reference +#define EGL_KHR_display_reference 1 +#define EGL_TRACK_REFERENCES_KHR 0x3352 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYDISPLAYATTRIBKHRPROC) (EGLDisplay dpy, EGLint name, EGLAttrib *value); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglQueryDisplayAttribKHR (EGLDisplay dpy, EGLint name, EGLAttrib *value); +#endif +#endif /* EGL_KHR_display_reference */ + +#ifndef EGL_KHR_fence_sync +#define EGL_KHR_fence_sync 1 +typedef khronos_utime_nanoseconds_t EGLTimeKHR; +#ifdef KHRONOS_SUPPORT_INT64 +#define EGL_SYNC_PRIOR_COMMANDS_COMPLETE_KHR 0x30F0 +#define EGL_SYNC_CONDITION_KHR 0x30F8 +#define EGL_SYNC_FENCE_KHR 0x30F9 +typedef EGLSyncKHR (EGLAPIENTRYP PFNEGLCREATESYNCKHRPROC) (EGLDisplay dpy, EGLenum type, const EGLint *attrib_list); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLDESTROYSYNCKHRPROC) (EGLDisplay dpy, EGLSyncKHR sync); +typedef EGLint (EGLAPIENTRYP PFNEGLCLIENTWAITSYNCKHRPROC) (EGLDisplay dpy, EGLSyncKHR sync, EGLint flags, EGLTimeKHR timeout); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLGETSYNCATTRIBKHRPROC) (EGLDisplay dpy, EGLSyncKHR sync, EGLint attribute, EGLint *value); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLSyncKHR EGLAPIENTRY eglCreateSyncKHR (EGLDisplay dpy, EGLenum type, const EGLint *attrib_list); +EGLAPI EGLBoolean EGLAPIENTRY eglDestroySyncKHR (EGLDisplay dpy, EGLSyncKHR sync); +EGLAPI EGLint EGLAPIENTRY eglClientWaitSyncKHR (EGLDisplay dpy, EGLSyncKHR sync, EGLint flags, EGLTimeKHR timeout); +EGLAPI EGLBoolean EGLAPIENTRY eglGetSyncAttribKHR (EGLDisplay dpy, EGLSyncKHR sync, EGLint attribute, EGLint *value); +#endif +#endif /* KHRONOS_SUPPORT_INT64 */ +#endif /* EGL_KHR_fence_sync */ + +#ifndef EGL_KHR_get_all_proc_addresses +#define EGL_KHR_get_all_proc_addresses 1 +#endif /* EGL_KHR_get_all_proc_addresses */ + +#ifndef EGL_KHR_gl_colorspace +#define EGL_KHR_gl_colorspace 1 +#define EGL_GL_COLORSPACE_KHR 0x309D +#define EGL_GL_COLORSPACE_SRGB_KHR 0x3089 +#define EGL_GL_COLORSPACE_LINEAR_KHR 0x308A +#endif /* EGL_KHR_gl_colorspace */ + +#ifndef EGL_KHR_gl_renderbuffer_image +#define EGL_KHR_gl_renderbuffer_image 1 +#define EGL_GL_RENDERBUFFER_KHR 0x30B9 +#endif /* EGL_KHR_gl_renderbuffer_image */ + +#ifndef EGL_KHR_gl_texture_2D_image +#define EGL_KHR_gl_texture_2D_image 1 +#define EGL_GL_TEXTURE_2D_KHR 0x30B1 +#define EGL_GL_TEXTURE_LEVEL_KHR 0x30BC +#endif /* EGL_KHR_gl_texture_2D_image */ + +#ifndef EGL_KHR_gl_texture_3D_image +#define EGL_KHR_gl_texture_3D_image 1 +#define EGL_GL_TEXTURE_3D_KHR 0x30B2 +#define EGL_GL_TEXTURE_ZOFFSET_KHR 0x30BD +#endif /* EGL_KHR_gl_texture_3D_image */ + +#ifndef EGL_KHR_gl_texture_cubemap_image +#define EGL_KHR_gl_texture_cubemap_image 1 +#define EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_X_KHR 0x30B3 +#define EGL_GL_TEXTURE_CUBE_MAP_NEGATIVE_X_KHR 0x30B4 +#define EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_Y_KHR 0x30B5 +#define EGL_GL_TEXTURE_CUBE_MAP_NEGATIVE_Y_KHR 0x30B6 +#define EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_Z_KHR 0x30B7 +#define EGL_GL_TEXTURE_CUBE_MAP_NEGATIVE_Z_KHR 0x30B8 +#endif /* EGL_KHR_gl_texture_cubemap_image */ + +#ifndef EGL_KHR_image +#define EGL_KHR_image 1 +typedef void *EGLImageKHR; +#define EGL_NATIVE_PIXMAP_KHR 0x30B0 +#define EGL_NO_IMAGE_KHR EGL_CAST(EGLImageKHR,0) +typedef EGLImageKHR (EGLAPIENTRYP PFNEGLCREATEIMAGEKHRPROC) (EGLDisplay dpy, EGLContext ctx, EGLenum target, EGLClientBuffer buffer, const EGLint *attrib_list); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLDESTROYIMAGEKHRPROC) (EGLDisplay dpy, EGLImageKHR image); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLImageKHR EGLAPIENTRY eglCreateImageKHR (EGLDisplay dpy, EGLContext ctx, EGLenum target, EGLClientBuffer buffer, const EGLint *attrib_list); +EGLAPI EGLBoolean EGLAPIENTRY eglDestroyImageKHR (EGLDisplay dpy, EGLImageKHR image); +#endif +#endif /* EGL_KHR_image */ + +#ifndef EGL_KHR_image_base +#define EGL_KHR_image_base 1 +#define EGL_IMAGE_PRESERVED_KHR 0x30D2 +#endif /* EGL_KHR_image_base */ + +#ifndef EGL_KHR_image_pixmap +#define EGL_KHR_image_pixmap 1 +#endif /* EGL_KHR_image_pixmap */ + +#ifndef EGL_KHR_lock_surface +#define EGL_KHR_lock_surface 1 +#define EGL_READ_SURFACE_BIT_KHR 0x0001 +#define EGL_WRITE_SURFACE_BIT_KHR 0x0002 +#define EGL_LOCK_SURFACE_BIT_KHR 0x0080 +#define EGL_OPTIMAL_FORMAT_BIT_KHR 0x0100 +#define EGL_MATCH_FORMAT_KHR 0x3043 +#define EGL_FORMAT_RGB_565_EXACT_KHR 0x30C0 +#define EGL_FORMAT_RGB_565_KHR 0x30C1 +#define EGL_FORMAT_RGBA_8888_EXACT_KHR 0x30C2 +#define EGL_FORMAT_RGBA_8888_KHR 0x30C3 +#define EGL_MAP_PRESERVE_PIXELS_KHR 0x30C4 +#define EGL_LOCK_USAGE_HINT_KHR 0x30C5 +#define EGL_BITMAP_POINTER_KHR 0x30C6 +#define EGL_BITMAP_PITCH_KHR 0x30C7 +#define EGL_BITMAP_ORIGIN_KHR 0x30C8 +#define EGL_BITMAP_PIXEL_RED_OFFSET_KHR 0x30C9 +#define EGL_BITMAP_PIXEL_GREEN_OFFSET_KHR 0x30CA +#define EGL_BITMAP_PIXEL_BLUE_OFFSET_KHR 0x30CB +#define EGL_BITMAP_PIXEL_ALPHA_OFFSET_KHR 0x30CC +#define EGL_BITMAP_PIXEL_LUMINANCE_OFFSET_KHR 0x30CD +#define EGL_LOWER_LEFT_KHR 0x30CE +#define EGL_UPPER_LEFT_KHR 0x30CF +typedef EGLBoolean (EGLAPIENTRYP PFNEGLLOCKSURFACEKHRPROC) (EGLDisplay dpy, EGLSurface surface, const EGLint *attrib_list); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLUNLOCKSURFACEKHRPROC) (EGLDisplay dpy, EGLSurface surface); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglLockSurfaceKHR (EGLDisplay dpy, EGLSurface surface, const EGLint *attrib_list); +EGLAPI EGLBoolean EGLAPIENTRY eglUnlockSurfaceKHR (EGLDisplay dpy, EGLSurface surface); +#endif +#endif /* EGL_KHR_lock_surface */ + +#ifndef EGL_KHR_lock_surface2 +#define EGL_KHR_lock_surface2 1 +#define EGL_BITMAP_PIXEL_SIZE_KHR 0x3110 +#endif /* EGL_KHR_lock_surface2 */ + +#ifndef EGL_KHR_lock_surface3 +#define EGL_KHR_lock_surface3 1 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYSURFACE64KHRPROC) (EGLDisplay dpy, EGLSurface surface, EGLint attribute, EGLAttribKHR *value); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglQuerySurface64KHR (EGLDisplay dpy, EGLSurface surface, EGLint attribute, EGLAttribKHR *value); +#endif +#endif /* EGL_KHR_lock_surface3 */ + +#ifndef EGL_KHR_mutable_render_buffer +#define EGL_KHR_mutable_render_buffer 1 +#define EGL_MUTABLE_RENDER_BUFFER_BIT_KHR 0x1000 +#endif /* EGL_KHR_mutable_render_buffer */ + +#ifndef EGL_KHR_no_config_context +#define EGL_KHR_no_config_context 1 +#define EGL_NO_CONFIG_KHR EGL_CAST(EGLConfig,0) +#endif /* EGL_KHR_no_config_context */ + +#ifndef EGL_KHR_partial_update +#define EGL_KHR_partial_update 1 +#define EGL_BUFFER_AGE_KHR 0x313D +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSETDAMAGEREGIONKHRPROC) (EGLDisplay dpy, EGLSurface surface, EGLint *rects, EGLint n_rects); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglSetDamageRegionKHR (EGLDisplay dpy, EGLSurface surface, EGLint *rects, EGLint n_rects); +#endif +#endif /* EGL_KHR_partial_update */ + +#ifndef EGL_KHR_platform_android +#define EGL_KHR_platform_android 1 +#define EGL_PLATFORM_ANDROID_KHR 0x3141 +#endif /* EGL_KHR_platform_android */ + +#ifndef EGL_KHR_platform_gbm +#define EGL_KHR_platform_gbm 1 +#define EGL_PLATFORM_GBM_KHR 0x31D7 +#endif /* EGL_KHR_platform_gbm */ + +#ifndef EGL_KHR_platform_wayland +#define EGL_KHR_platform_wayland 1 +#define EGL_PLATFORM_WAYLAND_KHR 0x31D8 +#endif /* EGL_KHR_platform_wayland */ + +#ifndef EGL_KHR_platform_x11 +#define EGL_KHR_platform_x11 1 +#define EGL_PLATFORM_X11_KHR 0x31D5 +#define EGL_PLATFORM_X11_SCREEN_KHR 0x31D6 +#endif /* EGL_KHR_platform_x11 */ + +#ifndef EGL_KHR_reusable_sync +#define EGL_KHR_reusable_sync 1 +#ifdef KHRONOS_SUPPORT_INT64 +#define EGL_SYNC_STATUS_KHR 0x30F1 +#define EGL_SIGNALED_KHR 0x30F2 +#define EGL_UNSIGNALED_KHR 0x30F3 +#define EGL_TIMEOUT_EXPIRED_KHR 0x30F5 +#define EGL_CONDITION_SATISFIED_KHR 0x30F6 +#define EGL_SYNC_TYPE_KHR 0x30F7 +#define EGL_SYNC_REUSABLE_KHR 0x30FA +#define EGL_SYNC_FLUSH_COMMANDS_BIT_KHR 0x0001 +#define EGL_FOREVER_KHR 0xFFFFFFFFFFFFFFFFull +#define EGL_NO_SYNC_KHR EGL_CAST(EGLSyncKHR,0) +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSIGNALSYNCKHRPROC) (EGLDisplay dpy, EGLSyncKHR sync, EGLenum mode); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglSignalSyncKHR (EGLDisplay dpy, EGLSyncKHR sync, EGLenum mode); +#endif +#endif /* KHRONOS_SUPPORT_INT64 */ +#endif /* EGL_KHR_reusable_sync */ + +#ifndef EGL_KHR_stream +#define EGL_KHR_stream 1 +typedef void *EGLStreamKHR; +typedef khronos_uint64_t EGLuint64KHR; +#ifdef KHRONOS_SUPPORT_INT64 +#define EGL_NO_STREAM_KHR EGL_CAST(EGLStreamKHR,0) +#define EGL_CONSUMER_LATENCY_USEC_KHR 0x3210 +#define EGL_PRODUCER_FRAME_KHR 0x3212 +#define EGL_CONSUMER_FRAME_KHR 0x3213 +#define EGL_STREAM_STATE_KHR 0x3214 +#define EGL_STREAM_STATE_CREATED_KHR 0x3215 +#define EGL_STREAM_STATE_CONNECTING_KHR 0x3216 +#define EGL_STREAM_STATE_EMPTY_KHR 0x3217 +#define EGL_STREAM_STATE_NEW_FRAME_AVAILABLE_KHR 0x3218 +#define EGL_STREAM_STATE_OLD_FRAME_AVAILABLE_KHR 0x3219 +#define EGL_STREAM_STATE_DISCONNECTED_KHR 0x321A +#define EGL_BAD_STREAM_KHR 0x321B +#define EGL_BAD_STATE_KHR 0x321C +typedef EGLStreamKHR (EGLAPIENTRYP PFNEGLCREATESTREAMKHRPROC) (EGLDisplay dpy, const EGLint *attrib_list); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLDESTROYSTREAMKHRPROC) (EGLDisplay dpy, EGLStreamKHR stream); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSTREAMATTRIBKHRPROC) (EGLDisplay dpy, EGLStreamKHR stream, EGLenum attribute, EGLint value); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYSTREAMKHRPROC) (EGLDisplay dpy, EGLStreamKHR stream, EGLenum attribute, EGLint *value); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYSTREAMU64KHRPROC) (EGLDisplay dpy, EGLStreamKHR stream, EGLenum attribute, EGLuint64KHR *value); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLStreamKHR EGLAPIENTRY eglCreateStreamKHR (EGLDisplay dpy, const EGLint *attrib_list); +EGLAPI EGLBoolean EGLAPIENTRY eglDestroyStreamKHR (EGLDisplay dpy, EGLStreamKHR stream); +EGLAPI EGLBoolean EGLAPIENTRY eglStreamAttribKHR (EGLDisplay dpy, EGLStreamKHR stream, EGLenum attribute, EGLint value); +EGLAPI EGLBoolean EGLAPIENTRY eglQueryStreamKHR (EGLDisplay dpy, EGLStreamKHR stream, EGLenum attribute, EGLint *value); +EGLAPI EGLBoolean EGLAPIENTRY eglQueryStreamu64KHR (EGLDisplay dpy, EGLStreamKHR stream, EGLenum attribute, EGLuint64KHR *value); +#endif +#endif /* KHRONOS_SUPPORT_INT64 */ +#endif /* EGL_KHR_stream */ + +#ifndef EGL_KHR_stream_attrib +#define EGL_KHR_stream_attrib 1 +#ifdef KHRONOS_SUPPORT_INT64 +typedef EGLStreamKHR (EGLAPIENTRYP PFNEGLCREATESTREAMATTRIBKHRPROC) (EGLDisplay dpy, const EGLAttrib *attrib_list); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSETSTREAMATTRIBKHRPROC) (EGLDisplay dpy, EGLStreamKHR stream, EGLenum attribute, EGLAttrib value); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYSTREAMATTRIBKHRPROC) (EGLDisplay dpy, EGLStreamKHR stream, EGLenum attribute, EGLAttrib *value); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSTREAMCONSUMERACQUIREATTRIBKHRPROC) (EGLDisplay dpy, EGLStreamKHR stream, const EGLAttrib *attrib_list); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSTREAMCONSUMERRELEASEATTRIBKHRPROC) (EGLDisplay dpy, EGLStreamKHR stream, const EGLAttrib *attrib_list); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLStreamKHR EGLAPIENTRY eglCreateStreamAttribKHR (EGLDisplay dpy, const EGLAttrib *attrib_list); +EGLAPI EGLBoolean EGLAPIENTRY eglSetStreamAttribKHR (EGLDisplay dpy, EGLStreamKHR stream, EGLenum attribute, EGLAttrib value); +EGLAPI EGLBoolean EGLAPIENTRY eglQueryStreamAttribKHR (EGLDisplay dpy, EGLStreamKHR stream, EGLenum attribute, EGLAttrib *value); +EGLAPI EGLBoolean EGLAPIENTRY eglStreamConsumerAcquireAttribKHR (EGLDisplay dpy, EGLStreamKHR stream, const EGLAttrib *attrib_list); +EGLAPI EGLBoolean EGLAPIENTRY eglStreamConsumerReleaseAttribKHR (EGLDisplay dpy, EGLStreamKHR stream, const EGLAttrib *attrib_list); +#endif +#endif /* KHRONOS_SUPPORT_INT64 */ +#endif /* EGL_KHR_stream_attrib */ + +#ifndef EGL_KHR_stream_consumer_gltexture +#define EGL_KHR_stream_consumer_gltexture 1 +#ifdef EGL_KHR_stream +#define EGL_CONSUMER_ACQUIRE_TIMEOUT_USEC_KHR 0x321E +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSTREAMCONSUMERGLTEXTUREEXTERNALKHRPROC) (EGLDisplay dpy, EGLStreamKHR stream); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSTREAMCONSUMERACQUIREKHRPROC) (EGLDisplay dpy, EGLStreamKHR stream); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSTREAMCONSUMERRELEASEKHRPROC) (EGLDisplay dpy, EGLStreamKHR stream); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglStreamConsumerGLTextureExternalKHR (EGLDisplay dpy, EGLStreamKHR stream); +EGLAPI EGLBoolean EGLAPIENTRY eglStreamConsumerAcquireKHR (EGLDisplay dpy, EGLStreamKHR stream); +EGLAPI EGLBoolean EGLAPIENTRY eglStreamConsumerReleaseKHR (EGLDisplay dpy, EGLStreamKHR stream); +#endif +#endif /* EGL_KHR_stream */ +#endif /* EGL_KHR_stream_consumer_gltexture */ + +#ifndef EGL_KHR_stream_cross_process_fd +#define EGL_KHR_stream_cross_process_fd 1 +typedef int EGLNativeFileDescriptorKHR; +#ifdef EGL_KHR_stream +#define EGL_NO_FILE_DESCRIPTOR_KHR EGL_CAST(EGLNativeFileDescriptorKHR,-1) +typedef EGLNativeFileDescriptorKHR (EGLAPIENTRYP PFNEGLGETSTREAMFILEDESCRIPTORKHRPROC) (EGLDisplay dpy, EGLStreamKHR stream); +typedef EGLStreamKHR (EGLAPIENTRYP PFNEGLCREATESTREAMFROMFILEDESCRIPTORKHRPROC) (EGLDisplay dpy, EGLNativeFileDescriptorKHR file_descriptor); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLNativeFileDescriptorKHR EGLAPIENTRY eglGetStreamFileDescriptorKHR (EGLDisplay dpy, EGLStreamKHR stream); +EGLAPI EGLStreamKHR EGLAPIENTRY eglCreateStreamFromFileDescriptorKHR (EGLDisplay dpy, EGLNativeFileDescriptorKHR file_descriptor); +#endif +#endif /* EGL_KHR_stream */ +#endif /* EGL_KHR_stream_cross_process_fd */ + +#ifndef EGL_KHR_stream_fifo +#define EGL_KHR_stream_fifo 1 +#ifdef EGL_KHR_stream +#define EGL_STREAM_FIFO_LENGTH_KHR 0x31FC +#define EGL_STREAM_TIME_NOW_KHR 0x31FD +#define EGL_STREAM_TIME_CONSUMER_KHR 0x31FE +#define EGL_STREAM_TIME_PRODUCER_KHR 0x31FF +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYSTREAMTIMEKHRPROC) (EGLDisplay dpy, EGLStreamKHR stream, EGLenum attribute, EGLTimeKHR *value); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglQueryStreamTimeKHR (EGLDisplay dpy, EGLStreamKHR stream, EGLenum attribute, EGLTimeKHR *value); +#endif +#endif /* EGL_KHR_stream */ +#endif /* EGL_KHR_stream_fifo */ + +#ifndef EGL_KHR_stream_producer_aldatalocator +#define EGL_KHR_stream_producer_aldatalocator 1 +#ifdef EGL_KHR_stream +#endif /* EGL_KHR_stream */ +#endif /* EGL_KHR_stream_producer_aldatalocator */ + +#ifndef EGL_KHR_stream_producer_eglsurface +#define EGL_KHR_stream_producer_eglsurface 1 +#ifdef EGL_KHR_stream +#define EGL_STREAM_BIT_KHR 0x0800 +typedef EGLSurface (EGLAPIENTRYP PFNEGLCREATESTREAMPRODUCERSURFACEKHRPROC) (EGLDisplay dpy, EGLConfig config, EGLStreamKHR stream, const EGLint *attrib_list); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLSurface EGLAPIENTRY eglCreateStreamProducerSurfaceKHR (EGLDisplay dpy, EGLConfig config, EGLStreamKHR stream, const EGLint *attrib_list); +#endif +#endif /* EGL_KHR_stream */ +#endif /* EGL_KHR_stream_producer_eglsurface */ + +#ifndef EGL_KHR_surfaceless_context +#define EGL_KHR_surfaceless_context 1 +#endif /* EGL_KHR_surfaceless_context */ + +#ifndef EGL_KHR_swap_buffers_with_damage +#define EGL_KHR_swap_buffers_with_damage 1 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSWAPBUFFERSWITHDAMAGEKHRPROC) (EGLDisplay dpy, EGLSurface surface, EGLint *rects, EGLint n_rects); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglSwapBuffersWithDamageKHR (EGLDisplay dpy, EGLSurface surface, EGLint *rects, EGLint n_rects); +#endif +#endif /* EGL_KHR_swap_buffers_with_damage */ + +#ifndef EGL_KHR_vg_parent_image +#define EGL_KHR_vg_parent_image 1 +#define EGL_VG_PARENT_IMAGE_KHR 0x30BA +#endif /* EGL_KHR_vg_parent_image */ + +#ifndef EGL_KHR_wait_sync +#define EGL_KHR_wait_sync 1 +typedef EGLint (EGLAPIENTRYP PFNEGLWAITSYNCKHRPROC) (EGLDisplay dpy, EGLSyncKHR sync, EGLint flags); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLint EGLAPIENTRY eglWaitSyncKHR (EGLDisplay dpy, EGLSyncKHR sync, EGLint flags); +#endif +#endif /* EGL_KHR_wait_sync */ + +#ifndef EGL_ANDROID_blob_cache +#define EGL_ANDROID_blob_cache 1 +typedef khronos_ssize_t EGLsizeiANDROID; +typedef void (*EGLSetBlobFuncANDROID) (const void *key, EGLsizeiANDROID keySize, const void *value, EGLsizeiANDROID valueSize); +typedef EGLsizeiANDROID (*EGLGetBlobFuncANDROID) (const void *key, EGLsizeiANDROID keySize, void *value, EGLsizeiANDROID valueSize); +typedef void (EGLAPIENTRYP PFNEGLSETBLOBCACHEFUNCSANDROIDPROC) (EGLDisplay dpy, EGLSetBlobFuncANDROID set, EGLGetBlobFuncANDROID get); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI void EGLAPIENTRY eglSetBlobCacheFuncsANDROID (EGLDisplay dpy, EGLSetBlobFuncANDROID set, EGLGetBlobFuncANDROID get); +#endif +#endif /* EGL_ANDROID_blob_cache */ + +#ifndef EGL_ANDROID_create_native_client_buffer +#define EGL_ANDROID_create_native_client_buffer 1 +#define EGL_NATIVE_BUFFER_USAGE_ANDROID 0x3143 +#define EGL_NATIVE_BUFFER_USAGE_PROTECTED_BIT_ANDROID 0x00000001 +#define EGL_NATIVE_BUFFER_USAGE_RENDERBUFFER_BIT_ANDROID 0x00000002 +#define EGL_NATIVE_BUFFER_USAGE_TEXTURE_BIT_ANDROID 0x00000004 +typedef EGLClientBuffer (EGLAPIENTRYP PFNEGLCREATENATIVECLIENTBUFFERANDROIDPROC) (const EGLint *attrib_list); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLClientBuffer EGLAPIENTRY eglCreateNativeClientBufferANDROID (const EGLint *attrib_list); +#endif +#endif /* EGL_ANDROID_create_native_client_buffer */ + +#ifndef EGL_ANDROID_framebuffer_target +#define EGL_ANDROID_framebuffer_target 1 +#define EGL_FRAMEBUFFER_TARGET_ANDROID 0x3147 +#endif /* EGL_ANDROID_framebuffer_target */ + +#ifndef EGL_ANDROID_front_buffer_auto_refresh +#define EGL_ANDROID_front_buffer_auto_refresh 1 +#define EGL_FRONT_BUFFER_AUTO_REFRESH_ANDROID 0x314C +#endif /* EGL_ANDROID_front_buffer_auto_refresh */ + +#ifndef EGL_ANDROID_get_frame_timestamps +#define EGL_ANDROID_get_frame_timestamps 1 +typedef khronos_stime_nanoseconds_t EGLnsecsANDROID; +#define EGL_TIMESTAMP_PENDING_ANDROID EGL_CAST(EGLnsecsANDROID,-2) +#define EGL_TIMESTAMP_INVALID_ANDROID EGL_CAST(EGLnsecsANDROID,-1) +#define EGL_TIMESTAMPS_ANDROID 0x3430 +#define EGL_COMPOSITE_DEADLINE_ANDROID 0x3431 +#define EGL_COMPOSITE_INTERVAL_ANDROID 0x3432 +#define EGL_COMPOSITE_TO_PRESENT_LATENCY_ANDROID 0x3433 +#define EGL_REQUESTED_PRESENT_TIME_ANDROID 0x3434 +#define EGL_RENDERING_COMPLETE_TIME_ANDROID 0x3435 +#define EGL_COMPOSITION_LATCH_TIME_ANDROID 0x3436 +#define EGL_FIRST_COMPOSITION_START_TIME_ANDROID 0x3437 +#define EGL_LAST_COMPOSITION_START_TIME_ANDROID 0x3438 +#define EGL_FIRST_COMPOSITION_GPU_FINISHED_TIME_ANDROID 0x3439 +#define EGL_DISPLAY_PRESENT_TIME_ANDROID 0x343A +#define EGL_DEQUEUE_READY_TIME_ANDROID 0x343B +#define EGL_READS_DONE_TIME_ANDROID 0x343C +typedef EGLBoolean (EGLAPIENTRYP PFNEGLGETCOMPOSITORTIMINGSUPPORTEDANDROIDPROC) (EGLDisplay dpy, EGLSurface surface, EGLint name); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLGETCOMPOSITORTIMINGANDROIDPROC) (EGLDisplay dpy, EGLSurface surface, EGLint numTimestamps, const EGLint *names, EGLnsecsANDROID *values); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLGETNEXTFRAMEIDANDROIDPROC) (EGLDisplay dpy, EGLSurface surface, EGLuint64KHR *frameId); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLGETFRAMETIMESTAMPSUPPORTEDANDROIDPROC) (EGLDisplay dpy, EGLSurface surface, EGLint timestamp); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLGETFRAMETIMESTAMPSANDROIDPROC) (EGLDisplay dpy, EGLSurface surface, EGLuint64KHR frameId, EGLint numTimestamps, const EGLint *timestamps, EGLnsecsANDROID *values); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglGetCompositorTimingSupportedANDROID (EGLDisplay dpy, EGLSurface surface, EGLint name); +EGLAPI EGLBoolean EGLAPIENTRY eglGetCompositorTimingANDROID (EGLDisplay dpy, EGLSurface surface, EGLint numTimestamps, const EGLint *names, EGLnsecsANDROID *values); +EGLAPI EGLBoolean EGLAPIENTRY eglGetNextFrameIdANDROID (EGLDisplay dpy, EGLSurface surface, EGLuint64KHR *frameId); +EGLAPI EGLBoolean EGLAPIENTRY eglGetFrameTimestampSupportedANDROID (EGLDisplay dpy, EGLSurface surface, EGLint timestamp); +EGLAPI EGLBoolean EGLAPIENTRY eglGetFrameTimestampsANDROID (EGLDisplay dpy, EGLSurface surface, EGLuint64KHR frameId, EGLint numTimestamps, const EGLint *timestamps, EGLnsecsANDROID *values); +#endif +#endif /* EGL_ANDROID_get_frame_timestamps */ + +#ifndef EGL_ANDROID_get_native_client_buffer +#define EGL_ANDROID_get_native_client_buffer 1 +struct AHardwareBuffer; +typedef EGLClientBuffer (EGLAPIENTRYP PFNEGLGETNATIVECLIENTBUFFERANDROIDPROC) (const struct AHardwareBuffer *buffer); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLClientBuffer EGLAPIENTRY eglGetNativeClientBufferANDROID (const struct AHardwareBuffer *buffer); +#endif +#endif /* EGL_ANDROID_get_native_client_buffer */ + +#ifndef EGL_ANDROID_image_native_buffer +#define EGL_ANDROID_image_native_buffer 1 +#define EGL_NATIVE_BUFFER_ANDROID 0x3140 +#endif /* EGL_ANDROID_image_native_buffer */ + +#ifndef EGL_ANDROID_native_fence_sync +#define EGL_ANDROID_native_fence_sync 1 +#define EGL_SYNC_NATIVE_FENCE_ANDROID 0x3144 +#define EGL_SYNC_NATIVE_FENCE_FD_ANDROID 0x3145 +#define EGL_SYNC_NATIVE_FENCE_SIGNALED_ANDROID 0x3146 +#define EGL_NO_NATIVE_FENCE_FD_ANDROID -1 +typedef EGLint (EGLAPIENTRYP PFNEGLDUPNATIVEFENCEFDANDROIDPROC) (EGLDisplay dpy, EGLSyncKHR sync); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLint EGLAPIENTRY eglDupNativeFenceFDANDROID (EGLDisplay dpy, EGLSyncKHR sync); +#endif +#endif /* EGL_ANDROID_native_fence_sync */ + +#ifndef EGL_ANDROID_presentation_time +#define EGL_ANDROID_presentation_time 1 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLPRESENTATIONTIMEANDROIDPROC) (EGLDisplay dpy, EGLSurface surface, EGLnsecsANDROID time); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglPresentationTimeANDROID (EGLDisplay dpy, EGLSurface surface, EGLnsecsANDROID time); +#endif +#endif /* EGL_ANDROID_presentation_time */ + +#ifndef EGL_ANDROID_recordable +#define EGL_ANDROID_recordable 1 +#define EGL_RECORDABLE_ANDROID 0x3142 +#endif /* EGL_ANDROID_recordable */ + +#ifndef EGL_ANGLE_d3d_share_handle_client_buffer +#define EGL_ANGLE_d3d_share_handle_client_buffer 1 +#define EGL_D3D_TEXTURE_2D_SHARE_HANDLE_ANGLE 0x3200 +#endif /* EGL_ANGLE_d3d_share_handle_client_buffer */ + +#ifndef EGL_ANGLE_device_d3d +#define EGL_ANGLE_device_d3d 1 +#define EGL_D3D9_DEVICE_ANGLE 0x33A0 +#define EGL_D3D11_DEVICE_ANGLE 0x33A1 +#endif /* EGL_ANGLE_device_d3d */ + +#ifndef EGL_ANGLE_query_surface_pointer +#define EGL_ANGLE_query_surface_pointer 1 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYSURFACEPOINTERANGLEPROC) (EGLDisplay dpy, EGLSurface surface, EGLint attribute, void **value); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglQuerySurfacePointerANGLE (EGLDisplay dpy, EGLSurface surface, EGLint attribute, void **value); +#endif +#endif /* EGL_ANGLE_query_surface_pointer */ + +#ifndef EGL_ANGLE_surface_d3d_texture_2d_share_handle +#define EGL_ANGLE_surface_d3d_texture_2d_share_handle 1 +#endif /* EGL_ANGLE_surface_d3d_texture_2d_share_handle */ + +#ifndef EGL_ANGLE_window_fixed_size +#define EGL_ANGLE_window_fixed_size 1 +#define EGL_FIXED_SIZE_ANGLE 0x3201 +#endif /* EGL_ANGLE_window_fixed_size */ + +#ifndef EGL_ARM_implicit_external_sync +#define EGL_ARM_implicit_external_sync 1 +#define EGL_SYNC_PRIOR_COMMANDS_IMPLICIT_EXTERNAL_ARM 0x328A +#endif /* EGL_ARM_implicit_external_sync */ + +#ifndef EGL_ARM_pixmap_multisample_discard +#define EGL_ARM_pixmap_multisample_discard 1 +#define EGL_DISCARD_SAMPLES_ARM 0x3286 +#endif /* EGL_ARM_pixmap_multisample_discard */ + +#ifndef EGL_EXT_bind_to_front +#define EGL_EXT_bind_to_front 1 +#define EGL_FRONT_BUFFER_EXT 0x3464 +#endif /* EGL_EXT_bind_to_front */ + +#ifndef EGL_EXT_buffer_age +#define EGL_EXT_buffer_age 1 +#define EGL_BUFFER_AGE_EXT 0x313D +#endif /* EGL_EXT_buffer_age */ + +#ifndef EGL_EXT_client_extensions +#define EGL_EXT_client_extensions 1 +#endif /* EGL_EXT_client_extensions */ + +#ifndef EGL_EXT_client_sync +#define EGL_EXT_client_sync 1 +#define EGL_SYNC_CLIENT_EXT 0x3364 +#define EGL_SYNC_CLIENT_SIGNAL_EXT 0x3365 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLCLIENTSIGNALSYNCEXTPROC) (EGLDisplay dpy, EGLSync sync, const EGLAttrib *attrib_list); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglClientSignalSyncEXT (EGLDisplay dpy, EGLSync sync, const EGLAttrib *attrib_list); +#endif +#endif /* EGL_EXT_client_sync */ + +#ifndef EGL_EXT_compositor +#define EGL_EXT_compositor 1 +#define EGL_PRIMARY_COMPOSITOR_CONTEXT_EXT 0x3460 +#define EGL_EXTERNAL_REF_ID_EXT 0x3461 +#define EGL_COMPOSITOR_DROP_NEWEST_FRAME_EXT 0x3462 +#define EGL_COMPOSITOR_KEEP_NEWEST_FRAME_EXT 0x3463 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLCOMPOSITORSETCONTEXTLISTEXTPROC) (const EGLint *external_ref_ids, EGLint num_entries); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLCOMPOSITORSETCONTEXTATTRIBUTESEXTPROC) (EGLint external_ref_id, const EGLint *context_attributes, EGLint num_entries); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLCOMPOSITORSETWINDOWLISTEXTPROC) (EGLint external_ref_id, const EGLint *external_win_ids, EGLint num_entries); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLCOMPOSITORSETWINDOWATTRIBUTESEXTPROC) (EGLint external_win_id, const EGLint *window_attributes, EGLint num_entries); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLCOMPOSITORBINDTEXWINDOWEXTPROC) (EGLint external_win_id); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLCOMPOSITORSETSIZEEXTPROC) (EGLint external_win_id, EGLint width, EGLint height); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLCOMPOSITORSWAPPOLICYEXTPROC) (EGLint external_win_id, EGLint policy); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglCompositorSetContextListEXT (const EGLint *external_ref_ids, EGLint num_entries); +EGLAPI EGLBoolean EGLAPIENTRY eglCompositorSetContextAttributesEXT (EGLint external_ref_id, const EGLint *context_attributes, EGLint num_entries); +EGLAPI EGLBoolean EGLAPIENTRY eglCompositorSetWindowListEXT (EGLint external_ref_id, const EGLint *external_win_ids, EGLint num_entries); +EGLAPI EGLBoolean EGLAPIENTRY eglCompositorSetWindowAttributesEXT (EGLint external_win_id, const EGLint *window_attributes, EGLint num_entries); +EGLAPI EGLBoolean EGLAPIENTRY eglCompositorBindTexWindowEXT (EGLint external_win_id); +EGLAPI EGLBoolean EGLAPIENTRY eglCompositorSetSizeEXT (EGLint external_win_id, EGLint width, EGLint height); +EGLAPI EGLBoolean EGLAPIENTRY eglCompositorSwapPolicyEXT (EGLint external_win_id, EGLint policy); +#endif +#endif /* EGL_EXT_compositor */ + +#ifndef EGL_EXT_create_context_robustness +#define EGL_EXT_create_context_robustness 1 +#define EGL_CONTEXT_OPENGL_ROBUST_ACCESS_EXT 0x30BF +#define EGL_CONTEXT_OPENGL_RESET_NOTIFICATION_STRATEGY_EXT 0x3138 +#define EGL_NO_RESET_NOTIFICATION_EXT 0x31BE +#define EGL_LOSE_CONTEXT_ON_RESET_EXT 0x31BF +#endif /* EGL_EXT_create_context_robustness */ + +#ifndef EGL_EXT_device_base +#define EGL_EXT_device_base 1 +typedef void *EGLDeviceEXT; +#define EGL_NO_DEVICE_EXT EGL_CAST(EGLDeviceEXT,0) +#define EGL_BAD_DEVICE_EXT 0x322B +#define EGL_DEVICE_EXT 0x322C +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYDEVICEATTRIBEXTPROC) (EGLDeviceEXT device, EGLint attribute, EGLAttrib *value); +typedef const char *(EGLAPIENTRYP PFNEGLQUERYDEVICESTRINGEXTPROC) (EGLDeviceEXT device, EGLint name); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYDEVICESEXTPROC) (EGLint max_devices, EGLDeviceEXT *devices, EGLint *num_devices); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYDISPLAYATTRIBEXTPROC) (EGLDisplay dpy, EGLint attribute, EGLAttrib *value); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglQueryDeviceAttribEXT (EGLDeviceEXT device, EGLint attribute, EGLAttrib *value); +EGLAPI const char *EGLAPIENTRY eglQueryDeviceStringEXT (EGLDeviceEXT device, EGLint name); +EGLAPI EGLBoolean EGLAPIENTRY eglQueryDevicesEXT (EGLint max_devices, EGLDeviceEXT *devices, EGLint *num_devices); +EGLAPI EGLBoolean EGLAPIENTRY eglQueryDisplayAttribEXT (EGLDisplay dpy, EGLint attribute, EGLAttrib *value); +#endif +#endif /* EGL_EXT_device_base */ + +#ifndef EGL_EXT_device_drm +#define EGL_EXT_device_drm 1 +#define EGL_DRM_DEVICE_FILE_EXT 0x3233 +#endif /* EGL_EXT_device_drm */ + +#ifndef EGL_EXT_device_enumeration +#define EGL_EXT_device_enumeration 1 +#endif /* EGL_EXT_device_enumeration */ + +#ifndef EGL_EXT_device_openwf +#define EGL_EXT_device_openwf 1 +#define EGL_OPENWF_DEVICE_ID_EXT 0x3237 +#endif /* EGL_EXT_device_openwf */ + +#ifndef EGL_EXT_device_query +#define EGL_EXT_device_query 1 +#endif /* EGL_EXT_device_query */ + +#ifndef EGL_EXT_gl_colorspace_bt2020_linear +#define EGL_EXT_gl_colorspace_bt2020_linear 1 +#define EGL_GL_COLORSPACE_BT2020_LINEAR_EXT 0x333F +#endif /* EGL_EXT_gl_colorspace_bt2020_linear */ + +#ifndef EGL_EXT_gl_colorspace_bt2020_pq +#define EGL_EXT_gl_colorspace_bt2020_pq 1 +#define EGL_GL_COLORSPACE_BT2020_PQ_EXT 0x3340 +#endif /* EGL_EXT_gl_colorspace_bt2020_pq */ + +#ifndef EGL_EXT_gl_colorspace_display_p3 +#define EGL_EXT_gl_colorspace_display_p3 1 +#define EGL_GL_COLORSPACE_DISPLAY_P3_EXT 0x3363 +#endif /* EGL_EXT_gl_colorspace_display_p3 */ + +#ifndef EGL_EXT_gl_colorspace_display_p3_linear +#define EGL_EXT_gl_colorspace_display_p3_linear 1 +#define EGL_GL_COLORSPACE_DISPLAY_P3_LINEAR_EXT 0x3362 +#endif /* EGL_EXT_gl_colorspace_display_p3_linear */ + +#ifndef EGL_EXT_gl_colorspace_scrgb +#define EGL_EXT_gl_colorspace_scrgb 1 +#define EGL_GL_COLORSPACE_SCRGB_EXT 0x3351 +#endif /* EGL_EXT_gl_colorspace_scrgb */ + +#ifndef EGL_EXT_gl_colorspace_scrgb_linear +#define EGL_EXT_gl_colorspace_scrgb_linear 1 +#define EGL_GL_COLORSPACE_SCRGB_LINEAR_EXT 0x3350 +#endif /* EGL_EXT_gl_colorspace_scrgb_linear */ + +#ifndef EGL_EXT_image_dma_buf_import +#define EGL_EXT_image_dma_buf_import 1 +#define EGL_LINUX_DMA_BUF_EXT 0x3270 +#define EGL_LINUX_DRM_FOURCC_EXT 0x3271 +#define EGL_DMA_BUF_PLANE0_FD_EXT 0x3272 +#define EGL_DMA_BUF_PLANE0_OFFSET_EXT 0x3273 +#define EGL_DMA_BUF_PLANE0_PITCH_EXT 0x3274 +#define EGL_DMA_BUF_PLANE1_FD_EXT 0x3275 +#define EGL_DMA_BUF_PLANE1_OFFSET_EXT 0x3276 +#define EGL_DMA_BUF_PLANE1_PITCH_EXT 0x3277 +#define EGL_DMA_BUF_PLANE2_FD_EXT 0x3278 +#define EGL_DMA_BUF_PLANE2_OFFSET_EXT 0x3279 +#define EGL_DMA_BUF_PLANE2_PITCH_EXT 0x327A +#define EGL_YUV_COLOR_SPACE_HINT_EXT 0x327B +#define EGL_SAMPLE_RANGE_HINT_EXT 0x327C +#define EGL_YUV_CHROMA_HORIZONTAL_SITING_HINT_EXT 0x327D +#define EGL_YUV_CHROMA_VERTICAL_SITING_HINT_EXT 0x327E +#define EGL_ITU_REC601_EXT 0x327F +#define EGL_ITU_REC709_EXT 0x3280 +#define EGL_ITU_REC2020_EXT 0x3281 +#define EGL_YUV_FULL_RANGE_EXT 0x3282 +#define EGL_YUV_NARROW_RANGE_EXT 0x3283 +#define EGL_YUV_CHROMA_SITING_0_EXT 0x3284 +#define EGL_YUV_CHROMA_SITING_0_5_EXT 0x3285 +#endif /* EGL_EXT_image_dma_buf_import */ + +#ifndef EGL_EXT_image_dma_buf_import_modifiers +#define EGL_EXT_image_dma_buf_import_modifiers 1 +#define EGL_DMA_BUF_PLANE3_FD_EXT 0x3440 +#define EGL_DMA_BUF_PLANE3_OFFSET_EXT 0x3441 +#define EGL_DMA_BUF_PLANE3_PITCH_EXT 0x3442 +#define EGL_DMA_BUF_PLANE0_MODIFIER_LO_EXT 0x3443 +#define EGL_DMA_BUF_PLANE0_MODIFIER_HI_EXT 0x3444 +#define EGL_DMA_BUF_PLANE1_MODIFIER_LO_EXT 0x3445 +#define EGL_DMA_BUF_PLANE1_MODIFIER_HI_EXT 0x3446 +#define EGL_DMA_BUF_PLANE2_MODIFIER_LO_EXT 0x3447 +#define EGL_DMA_BUF_PLANE2_MODIFIER_HI_EXT 0x3448 +#define EGL_DMA_BUF_PLANE3_MODIFIER_LO_EXT 0x3449 +#define EGL_DMA_BUF_PLANE3_MODIFIER_HI_EXT 0x344A +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYDMABUFFORMATSEXTPROC) (EGLDisplay dpy, EGLint max_formats, EGLint *formats, EGLint *num_formats); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYDMABUFMODIFIERSEXTPROC) (EGLDisplay dpy, EGLint format, EGLint max_modifiers, EGLuint64KHR *modifiers, EGLBoolean *external_only, EGLint *num_modifiers); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglQueryDmaBufFormatsEXT (EGLDisplay dpy, EGLint max_formats, EGLint *formats, EGLint *num_formats); +EGLAPI EGLBoolean EGLAPIENTRY eglQueryDmaBufModifiersEXT (EGLDisplay dpy, EGLint format, EGLint max_modifiers, EGLuint64KHR *modifiers, EGLBoolean *external_only, EGLint *num_modifiers); +#endif +#endif /* EGL_EXT_image_dma_buf_import_modifiers */ + +#ifndef EGL_EXT_image_gl_colorspace +#define EGL_EXT_image_gl_colorspace 1 +#define EGL_GL_COLORSPACE_DEFAULT_EXT 0x314D +#endif /* EGL_EXT_image_gl_colorspace */ + +#ifndef EGL_EXT_image_implicit_sync_control +#define EGL_EXT_image_implicit_sync_control 1 +#define EGL_IMPORT_SYNC_TYPE_EXT 0x3470 +#define EGL_IMPORT_IMPLICIT_SYNC_EXT 0x3471 +#define EGL_IMPORT_EXPLICIT_SYNC_EXT 0x3472 +#endif /* EGL_EXT_image_implicit_sync_control */ + +#ifndef EGL_EXT_multiview_window +#define EGL_EXT_multiview_window 1 +#define EGL_MULTIVIEW_VIEW_COUNT_EXT 0x3134 +#endif /* EGL_EXT_multiview_window */ + +#ifndef EGL_EXT_output_base +#define EGL_EXT_output_base 1 +typedef void *EGLOutputLayerEXT; +typedef void *EGLOutputPortEXT; +#define EGL_NO_OUTPUT_LAYER_EXT EGL_CAST(EGLOutputLayerEXT,0) +#define EGL_NO_OUTPUT_PORT_EXT EGL_CAST(EGLOutputPortEXT,0) +#define EGL_BAD_OUTPUT_LAYER_EXT 0x322D +#define EGL_BAD_OUTPUT_PORT_EXT 0x322E +#define EGL_SWAP_INTERVAL_EXT 0x322F +typedef EGLBoolean (EGLAPIENTRYP PFNEGLGETOUTPUTLAYERSEXTPROC) (EGLDisplay dpy, const EGLAttrib *attrib_list, EGLOutputLayerEXT *layers, EGLint max_layers, EGLint *num_layers); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLGETOUTPUTPORTSEXTPROC) (EGLDisplay dpy, const EGLAttrib *attrib_list, EGLOutputPortEXT *ports, EGLint max_ports, EGLint *num_ports); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLOUTPUTLAYERATTRIBEXTPROC) (EGLDisplay dpy, EGLOutputLayerEXT layer, EGLint attribute, EGLAttrib value); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYOUTPUTLAYERATTRIBEXTPROC) (EGLDisplay dpy, EGLOutputLayerEXT layer, EGLint attribute, EGLAttrib *value); +typedef const char *(EGLAPIENTRYP PFNEGLQUERYOUTPUTLAYERSTRINGEXTPROC) (EGLDisplay dpy, EGLOutputLayerEXT layer, EGLint name); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLOUTPUTPORTATTRIBEXTPROC) (EGLDisplay dpy, EGLOutputPortEXT port, EGLint attribute, EGLAttrib value); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYOUTPUTPORTATTRIBEXTPROC) (EGLDisplay dpy, EGLOutputPortEXT port, EGLint attribute, EGLAttrib *value); +typedef const char *(EGLAPIENTRYP PFNEGLQUERYOUTPUTPORTSTRINGEXTPROC) (EGLDisplay dpy, EGLOutputPortEXT port, EGLint name); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglGetOutputLayersEXT (EGLDisplay dpy, const EGLAttrib *attrib_list, EGLOutputLayerEXT *layers, EGLint max_layers, EGLint *num_layers); +EGLAPI EGLBoolean EGLAPIENTRY eglGetOutputPortsEXT (EGLDisplay dpy, const EGLAttrib *attrib_list, EGLOutputPortEXT *ports, EGLint max_ports, EGLint *num_ports); +EGLAPI EGLBoolean EGLAPIENTRY eglOutputLayerAttribEXT (EGLDisplay dpy, EGLOutputLayerEXT layer, EGLint attribute, EGLAttrib value); +EGLAPI EGLBoolean EGLAPIENTRY eglQueryOutputLayerAttribEXT (EGLDisplay dpy, EGLOutputLayerEXT layer, EGLint attribute, EGLAttrib *value); +EGLAPI const char *EGLAPIENTRY eglQueryOutputLayerStringEXT (EGLDisplay dpy, EGLOutputLayerEXT layer, EGLint name); +EGLAPI EGLBoolean EGLAPIENTRY eglOutputPortAttribEXT (EGLDisplay dpy, EGLOutputPortEXT port, EGLint attribute, EGLAttrib value); +EGLAPI EGLBoolean EGLAPIENTRY eglQueryOutputPortAttribEXT (EGLDisplay dpy, EGLOutputPortEXT port, EGLint attribute, EGLAttrib *value); +EGLAPI const char *EGLAPIENTRY eglQueryOutputPortStringEXT (EGLDisplay dpy, EGLOutputPortEXT port, EGLint name); +#endif +#endif /* EGL_EXT_output_base */ + +#ifndef EGL_EXT_output_drm +#define EGL_EXT_output_drm 1 +#define EGL_DRM_CRTC_EXT 0x3234 +#define EGL_DRM_PLANE_EXT 0x3235 +#define EGL_DRM_CONNECTOR_EXT 0x3236 +#endif /* EGL_EXT_output_drm */ + +#ifndef EGL_EXT_output_openwf +#define EGL_EXT_output_openwf 1 +#define EGL_OPENWF_PIPELINE_ID_EXT 0x3238 +#define EGL_OPENWF_PORT_ID_EXT 0x3239 +#endif /* EGL_EXT_output_openwf */ + +#ifndef EGL_EXT_pixel_format_float +#define EGL_EXT_pixel_format_float 1 +#define EGL_COLOR_COMPONENT_TYPE_EXT 0x3339 +#define EGL_COLOR_COMPONENT_TYPE_FIXED_EXT 0x333A +#define EGL_COLOR_COMPONENT_TYPE_FLOAT_EXT 0x333B +#endif /* EGL_EXT_pixel_format_float */ + +#ifndef EGL_EXT_platform_base +#define EGL_EXT_platform_base 1 +typedef EGLDisplay (EGLAPIENTRYP PFNEGLGETPLATFORMDISPLAYEXTPROC) (EGLenum platform, void *native_display, const EGLint *attrib_list); +typedef EGLSurface (EGLAPIENTRYP PFNEGLCREATEPLATFORMWINDOWSURFACEEXTPROC) (EGLDisplay dpy, EGLConfig config, void *native_window, const EGLint *attrib_list); +typedef EGLSurface (EGLAPIENTRYP PFNEGLCREATEPLATFORMPIXMAPSURFACEEXTPROC) (EGLDisplay dpy, EGLConfig config, void *native_pixmap, const EGLint *attrib_list); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLDisplay EGLAPIENTRY eglGetPlatformDisplayEXT (EGLenum platform, void *native_display, const EGLint *attrib_list); +EGLAPI EGLSurface EGLAPIENTRY eglCreatePlatformWindowSurfaceEXT (EGLDisplay dpy, EGLConfig config, void *native_window, const EGLint *attrib_list); +EGLAPI EGLSurface EGLAPIENTRY eglCreatePlatformPixmapSurfaceEXT (EGLDisplay dpy, EGLConfig config, void *native_pixmap, const EGLint *attrib_list); +#endif +#endif /* EGL_EXT_platform_base */ + +#ifndef EGL_EXT_platform_device +#define EGL_EXT_platform_device 1 +#define EGL_PLATFORM_DEVICE_EXT 0x313F +#endif /* EGL_EXT_platform_device */ + +#ifndef EGL_EXT_platform_wayland +#define EGL_EXT_platform_wayland 1 +#define EGL_PLATFORM_WAYLAND_EXT 0x31D8 +#endif /* EGL_EXT_platform_wayland */ + +#ifndef EGL_EXT_platform_x11 +#define EGL_EXT_platform_x11 1 +#define EGL_PLATFORM_X11_EXT 0x31D5 +#define EGL_PLATFORM_X11_SCREEN_EXT 0x31D6 +#endif /* EGL_EXT_platform_x11 */ + +#ifndef EGL_EXT_protected_content +#define EGL_EXT_protected_content 1 +#define EGL_PROTECTED_CONTENT_EXT 0x32C0 +#endif /* EGL_EXT_protected_content */ + +#ifndef EGL_EXT_protected_surface +#define EGL_EXT_protected_surface 1 +#endif /* EGL_EXT_protected_surface */ + +#ifndef EGL_EXT_stream_consumer_egloutput +#define EGL_EXT_stream_consumer_egloutput 1 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSTREAMCONSUMEROUTPUTEXTPROC) (EGLDisplay dpy, EGLStreamKHR stream, EGLOutputLayerEXT layer); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglStreamConsumerOutputEXT (EGLDisplay dpy, EGLStreamKHR stream, EGLOutputLayerEXT layer); +#endif +#endif /* EGL_EXT_stream_consumer_egloutput */ + +#ifndef EGL_EXT_surface_CTA861_3_metadata +#define EGL_EXT_surface_CTA861_3_metadata 1 +#define EGL_CTA861_3_MAX_CONTENT_LIGHT_LEVEL_EXT 0x3360 +#define EGL_CTA861_3_MAX_FRAME_AVERAGE_LEVEL_EXT 0x3361 +#endif /* EGL_EXT_surface_CTA861_3_metadata */ + +#ifndef EGL_EXT_surface_SMPTE2086_metadata +#define EGL_EXT_surface_SMPTE2086_metadata 1 +#define EGL_SMPTE2086_DISPLAY_PRIMARY_RX_EXT 0x3341 +#define EGL_SMPTE2086_DISPLAY_PRIMARY_RY_EXT 0x3342 +#define EGL_SMPTE2086_DISPLAY_PRIMARY_GX_EXT 0x3343 +#define EGL_SMPTE2086_DISPLAY_PRIMARY_GY_EXT 0x3344 +#define EGL_SMPTE2086_DISPLAY_PRIMARY_BX_EXT 0x3345 +#define EGL_SMPTE2086_DISPLAY_PRIMARY_BY_EXT 0x3346 +#define EGL_SMPTE2086_WHITE_POINT_X_EXT 0x3347 +#define EGL_SMPTE2086_WHITE_POINT_Y_EXT 0x3348 +#define EGL_SMPTE2086_MAX_LUMINANCE_EXT 0x3349 +#define EGL_SMPTE2086_MIN_LUMINANCE_EXT 0x334A +#define EGL_METADATA_SCALING_EXT 50000 +#endif /* EGL_EXT_surface_SMPTE2086_metadata */ + +#ifndef EGL_EXT_swap_buffers_with_damage +#define EGL_EXT_swap_buffers_with_damage 1 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSWAPBUFFERSWITHDAMAGEEXTPROC) (EGLDisplay dpy, EGLSurface surface, EGLint *rects, EGLint n_rects); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglSwapBuffersWithDamageEXT (EGLDisplay dpy, EGLSurface surface, EGLint *rects, EGLint n_rects); +#endif +#endif /* EGL_EXT_swap_buffers_with_damage */ + +#ifndef EGL_EXT_sync_reuse +#define EGL_EXT_sync_reuse 1 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLUNSIGNALSYNCEXTPROC) (EGLDisplay dpy, EGLSync sync, const EGLAttrib *attrib_list); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglUnsignalSyncEXT (EGLDisplay dpy, EGLSync sync, const EGLAttrib *attrib_list); +#endif +#endif /* EGL_EXT_sync_reuse */ + +#ifndef EGL_EXT_yuv_surface +#define EGL_EXT_yuv_surface 1 +#define EGL_YUV_ORDER_EXT 0x3301 +#define EGL_YUV_NUMBER_OF_PLANES_EXT 0x3311 +#define EGL_YUV_SUBSAMPLE_EXT 0x3312 +#define EGL_YUV_DEPTH_RANGE_EXT 0x3317 +#define EGL_YUV_CSC_STANDARD_EXT 0x330A +#define EGL_YUV_PLANE_BPP_EXT 0x331A +#define EGL_YUV_BUFFER_EXT 0x3300 +#define EGL_YUV_ORDER_YUV_EXT 0x3302 +#define EGL_YUV_ORDER_YVU_EXT 0x3303 +#define EGL_YUV_ORDER_YUYV_EXT 0x3304 +#define EGL_YUV_ORDER_UYVY_EXT 0x3305 +#define EGL_YUV_ORDER_YVYU_EXT 0x3306 +#define EGL_YUV_ORDER_VYUY_EXT 0x3307 +#define EGL_YUV_ORDER_AYUV_EXT 0x3308 +#define EGL_YUV_SUBSAMPLE_4_2_0_EXT 0x3313 +#define EGL_YUV_SUBSAMPLE_4_2_2_EXT 0x3314 +#define EGL_YUV_SUBSAMPLE_4_4_4_EXT 0x3315 +#define EGL_YUV_DEPTH_RANGE_LIMITED_EXT 0x3318 +#define EGL_YUV_DEPTH_RANGE_FULL_EXT 0x3319 +#define EGL_YUV_CSC_STANDARD_601_EXT 0x330B +#define EGL_YUV_CSC_STANDARD_709_EXT 0x330C +#define EGL_YUV_CSC_STANDARD_2020_EXT 0x330D +#define EGL_YUV_PLANE_BPP_0_EXT 0x331B +#define EGL_YUV_PLANE_BPP_8_EXT 0x331C +#define EGL_YUV_PLANE_BPP_10_EXT 0x331D +#endif /* EGL_EXT_yuv_surface */ + +#ifndef EGL_HI_clientpixmap +#define EGL_HI_clientpixmap 1 +struct EGLClientPixmapHI { + void *pData; + EGLint iWidth; + EGLint iHeight; + EGLint iStride; +}; +#define EGL_CLIENT_PIXMAP_POINTER_HI 0x8F74 +typedef EGLSurface (EGLAPIENTRYP PFNEGLCREATEPIXMAPSURFACEHIPROC) (EGLDisplay dpy, EGLConfig config, struct EGLClientPixmapHI *pixmap); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLSurface EGLAPIENTRY eglCreatePixmapSurfaceHI (EGLDisplay dpy, EGLConfig config, struct EGLClientPixmapHI *pixmap); +#endif +#endif /* EGL_HI_clientpixmap */ + +#ifndef EGL_HI_colorformats +#define EGL_HI_colorformats 1 +#define EGL_COLOR_FORMAT_HI 0x8F70 +#define EGL_COLOR_RGB_HI 0x8F71 +#define EGL_COLOR_RGBA_HI 0x8F72 +#define EGL_COLOR_ARGB_HI 0x8F73 +#endif /* EGL_HI_colorformats */ + +#ifndef EGL_IMG_context_priority +#define EGL_IMG_context_priority 1 +#define EGL_CONTEXT_PRIORITY_LEVEL_IMG 0x3100 +#define EGL_CONTEXT_PRIORITY_HIGH_IMG 0x3101 +#define EGL_CONTEXT_PRIORITY_MEDIUM_IMG 0x3102 +#define EGL_CONTEXT_PRIORITY_LOW_IMG 0x3103 +#endif /* EGL_IMG_context_priority */ + +#ifndef EGL_IMG_image_plane_attribs +#define EGL_IMG_image_plane_attribs 1 +#define EGL_NATIVE_BUFFER_MULTIPLANE_SEPARATE_IMG 0x3105 +#define EGL_NATIVE_BUFFER_PLANE_OFFSET_IMG 0x3106 +#endif /* EGL_IMG_image_plane_attribs */ + +#ifndef EGL_MESA_drm_image +#define EGL_MESA_drm_image 1 +#define EGL_DRM_BUFFER_FORMAT_MESA 0x31D0 +#define EGL_DRM_BUFFER_USE_MESA 0x31D1 +#define EGL_DRM_BUFFER_FORMAT_ARGB32_MESA 0x31D2 +#define EGL_DRM_BUFFER_MESA 0x31D3 +#define EGL_DRM_BUFFER_STRIDE_MESA 0x31D4 +#define EGL_DRM_BUFFER_USE_SCANOUT_MESA 0x00000001 +#define EGL_DRM_BUFFER_USE_SHARE_MESA 0x00000002 +#define EGL_DRM_BUFFER_USE_CURSOR_MESA 0x00000004 +typedef EGLImageKHR (EGLAPIENTRYP PFNEGLCREATEDRMIMAGEMESAPROC) (EGLDisplay dpy, const EGLint *attrib_list); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLEXPORTDRMIMAGEMESAPROC) (EGLDisplay dpy, EGLImageKHR image, EGLint *name, EGLint *handle, EGLint *stride); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLImageKHR EGLAPIENTRY eglCreateDRMImageMESA (EGLDisplay dpy, const EGLint *attrib_list); +EGLAPI EGLBoolean EGLAPIENTRY eglExportDRMImageMESA (EGLDisplay dpy, EGLImageKHR image, EGLint *name, EGLint *handle, EGLint *stride); +#endif +#endif /* EGL_MESA_drm_image */ + +#ifndef EGL_MESA_image_dma_buf_export +#define EGL_MESA_image_dma_buf_export 1 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLEXPORTDMABUFIMAGEQUERYMESAPROC) (EGLDisplay dpy, EGLImageKHR image, int *fourcc, int *num_planes, EGLuint64KHR *modifiers); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLEXPORTDMABUFIMAGEMESAPROC) (EGLDisplay dpy, EGLImageKHR image, int *fds, EGLint *strides, EGLint *offsets); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglExportDMABUFImageQueryMESA (EGLDisplay dpy, EGLImageKHR image, int *fourcc, int *num_planes, EGLuint64KHR *modifiers); +EGLAPI EGLBoolean EGLAPIENTRY eglExportDMABUFImageMESA (EGLDisplay dpy, EGLImageKHR image, int *fds, EGLint *strides, EGLint *offsets); +#endif +#endif /* EGL_MESA_image_dma_buf_export */ + +#ifndef EGL_MESA_platform_gbm +#define EGL_MESA_platform_gbm 1 +#define EGL_PLATFORM_GBM_MESA 0x31D7 +#endif /* EGL_MESA_platform_gbm */ + +#ifndef EGL_MESA_platform_surfaceless +#define EGL_MESA_platform_surfaceless 1 +#define EGL_PLATFORM_SURFACELESS_MESA 0x31DD +#endif /* EGL_MESA_platform_surfaceless */ + +#ifndef EGL_NOK_swap_region +#define EGL_NOK_swap_region 1 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSWAPBUFFERSREGIONNOKPROC) (EGLDisplay dpy, EGLSurface surface, EGLint numRects, const EGLint *rects); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglSwapBuffersRegionNOK (EGLDisplay dpy, EGLSurface surface, EGLint numRects, const EGLint *rects); +#endif +#endif /* EGL_NOK_swap_region */ + +#ifndef EGL_NOK_swap_region2 +#define EGL_NOK_swap_region2 1 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSWAPBUFFERSREGION2NOKPROC) (EGLDisplay dpy, EGLSurface surface, EGLint numRects, const EGLint *rects); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglSwapBuffersRegion2NOK (EGLDisplay dpy, EGLSurface surface, EGLint numRects, const EGLint *rects); +#endif +#endif /* EGL_NOK_swap_region2 */ + +#ifndef EGL_NOK_texture_from_pixmap +#define EGL_NOK_texture_from_pixmap 1 +#define EGL_Y_INVERTED_NOK 0x307F +#endif /* EGL_NOK_texture_from_pixmap */ + +#ifndef EGL_NV_3dvision_surface +#define EGL_NV_3dvision_surface 1 +#define EGL_AUTO_STEREO_NV 0x3136 +#endif /* EGL_NV_3dvision_surface */ + +#ifndef EGL_NV_context_priority_realtime +#define EGL_NV_context_priority_realtime 1 +#define EGL_CONTEXT_PRIORITY_REALTIME_NV 0x3357 +#endif /* EGL_NV_context_priority_realtime */ + +#ifndef EGL_NV_coverage_sample +#define EGL_NV_coverage_sample 1 +#define EGL_COVERAGE_BUFFERS_NV 0x30E0 +#define EGL_COVERAGE_SAMPLES_NV 0x30E1 +#endif /* EGL_NV_coverage_sample */ + +#ifndef EGL_NV_coverage_sample_resolve +#define EGL_NV_coverage_sample_resolve 1 +#define EGL_COVERAGE_SAMPLE_RESOLVE_NV 0x3131 +#define EGL_COVERAGE_SAMPLE_RESOLVE_DEFAULT_NV 0x3132 +#define EGL_COVERAGE_SAMPLE_RESOLVE_NONE_NV 0x3133 +#endif /* EGL_NV_coverage_sample_resolve */ + +#ifndef EGL_NV_cuda_event +#define EGL_NV_cuda_event 1 +#define EGL_CUDA_EVENT_HANDLE_NV 0x323B +#define EGL_SYNC_CUDA_EVENT_NV 0x323C +#define EGL_SYNC_CUDA_EVENT_COMPLETE_NV 0x323D +#endif /* EGL_NV_cuda_event */ + +#ifndef EGL_NV_depth_nonlinear +#define EGL_NV_depth_nonlinear 1 +#define EGL_DEPTH_ENCODING_NV 0x30E2 +#define EGL_DEPTH_ENCODING_NONE_NV 0 +#define EGL_DEPTH_ENCODING_NONLINEAR_NV 0x30E3 +#endif /* EGL_NV_depth_nonlinear */ + +#ifndef EGL_NV_device_cuda +#define EGL_NV_device_cuda 1 +#define EGL_CUDA_DEVICE_NV 0x323A +#endif /* EGL_NV_device_cuda */ + +#ifndef EGL_NV_native_query +#define EGL_NV_native_query 1 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYNATIVEDISPLAYNVPROC) (EGLDisplay dpy, EGLNativeDisplayType *display_id); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYNATIVEWINDOWNVPROC) (EGLDisplay dpy, EGLSurface surf, EGLNativeWindowType *window); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYNATIVEPIXMAPNVPROC) (EGLDisplay dpy, EGLSurface surf, EGLNativePixmapType *pixmap); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglQueryNativeDisplayNV (EGLDisplay dpy, EGLNativeDisplayType *display_id); +EGLAPI EGLBoolean EGLAPIENTRY eglQueryNativeWindowNV (EGLDisplay dpy, EGLSurface surf, EGLNativeWindowType *window); +EGLAPI EGLBoolean EGLAPIENTRY eglQueryNativePixmapNV (EGLDisplay dpy, EGLSurface surf, EGLNativePixmapType *pixmap); +#endif +#endif /* EGL_NV_native_query */ + +#ifndef EGL_NV_post_convert_rounding +#define EGL_NV_post_convert_rounding 1 +#endif /* EGL_NV_post_convert_rounding */ + +#ifndef EGL_NV_post_sub_buffer +#define EGL_NV_post_sub_buffer 1 +#define EGL_POST_SUB_BUFFER_SUPPORTED_NV 0x30BE +typedef EGLBoolean (EGLAPIENTRYP PFNEGLPOSTSUBBUFFERNVPROC) (EGLDisplay dpy, EGLSurface surface, EGLint x, EGLint y, EGLint width, EGLint height); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglPostSubBufferNV (EGLDisplay dpy, EGLSurface surface, EGLint x, EGLint y, EGLint width, EGLint height); +#endif +#endif /* EGL_NV_post_sub_buffer */ + +#ifndef EGL_NV_robustness_video_memory_purge +#define EGL_NV_robustness_video_memory_purge 1 +#define EGL_GENERATE_RESET_ON_VIDEO_MEMORY_PURGE_NV 0x334C +#endif /* EGL_NV_robustness_video_memory_purge */ + +#ifndef EGL_NV_stream_consumer_gltexture_yuv +#define EGL_NV_stream_consumer_gltexture_yuv 1 +#define EGL_YUV_PLANE0_TEXTURE_UNIT_NV 0x332C +#define EGL_YUV_PLANE1_TEXTURE_UNIT_NV 0x332D +#define EGL_YUV_PLANE2_TEXTURE_UNIT_NV 0x332E +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSTREAMCONSUMERGLTEXTUREEXTERNALATTRIBSNVPROC) (EGLDisplay dpy, EGLStreamKHR stream, const EGLAttrib *attrib_list); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglStreamConsumerGLTextureExternalAttribsNV (EGLDisplay dpy, EGLStreamKHR stream, const EGLAttrib *attrib_list); +#endif +#endif /* EGL_NV_stream_consumer_gltexture_yuv */ + +#ifndef EGL_NV_stream_cross_display +#define EGL_NV_stream_cross_display 1 +#define EGL_STREAM_CROSS_DISPLAY_NV 0x334E +#endif /* EGL_NV_stream_cross_display */ + +#ifndef EGL_NV_stream_cross_object +#define EGL_NV_stream_cross_object 1 +#define EGL_STREAM_CROSS_OBJECT_NV 0x334D +#endif /* EGL_NV_stream_cross_object */ + +#ifndef EGL_NV_stream_cross_partition +#define EGL_NV_stream_cross_partition 1 +#define EGL_STREAM_CROSS_PARTITION_NV 0x323F +#endif /* EGL_NV_stream_cross_partition */ + +#ifndef EGL_NV_stream_cross_process +#define EGL_NV_stream_cross_process 1 +#define EGL_STREAM_CROSS_PROCESS_NV 0x3245 +#endif /* EGL_NV_stream_cross_process */ + +#ifndef EGL_NV_stream_cross_system +#define EGL_NV_stream_cross_system 1 +#define EGL_STREAM_CROSS_SYSTEM_NV 0x334F +#endif /* EGL_NV_stream_cross_system */ + +#ifndef EGL_NV_stream_fifo_next +#define EGL_NV_stream_fifo_next 1 +#define EGL_PENDING_FRAME_NV 0x3329 +#define EGL_STREAM_TIME_PENDING_NV 0x332A +#endif /* EGL_NV_stream_fifo_next */ + +#ifndef EGL_NV_stream_fifo_synchronous +#define EGL_NV_stream_fifo_synchronous 1 +#define EGL_STREAM_FIFO_SYNCHRONOUS_NV 0x3336 +#endif /* EGL_NV_stream_fifo_synchronous */ + +#ifndef EGL_NV_stream_flush +#define EGL_NV_stream_flush 1 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSTREAMFLUSHNVPROC) (EGLDisplay dpy, EGLStreamKHR stream); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglStreamFlushNV (EGLDisplay dpy, EGLStreamKHR stream); +#endif +#endif /* EGL_NV_stream_flush */ + +#ifndef EGL_NV_stream_frame_limits +#define EGL_NV_stream_frame_limits 1 +#define EGL_PRODUCER_MAX_FRAME_HINT_NV 0x3337 +#define EGL_CONSUMER_MAX_FRAME_HINT_NV 0x3338 +#endif /* EGL_NV_stream_frame_limits */ + +#ifndef EGL_NV_stream_metadata +#define EGL_NV_stream_metadata 1 +#define EGL_MAX_STREAM_METADATA_BLOCKS_NV 0x3250 +#define EGL_MAX_STREAM_METADATA_BLOCK_SIZE_NV 0x3251 +#define EGL_MAX_STREAM_METADATA_TOTAL_SIZE_NV 0x3252 +#define EGL_PRODUCER_METADATA_NV 0x3253 +#define EGL_CONSUMER_METADATA_NV 0x3254 +#define EGL_PENDING_METADATA_NV 0x3328 +#define EGL_METADATA0_SIZE_NV 0x3255 +#define EGL_METADATA1_SIZE_NV 0x3256 +#define EGL_METADATA2_SIZE_NV 0x3257 +#define EGL_METADATA3_SIZE_NV 0x3258 +#define EGL_METADATA0_TYPE_NV 0x3259 +#define EGL_METADATA1_TYPE_NV 0x325A +#define EGL_METADATA2_TYPE_NV 0x325B +#define EGL_METADATA3_TYPE_NV 0x325C +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYDISPLAYATTRIBNVPROC) (EGLDisplay dpy, EGLint attribute, EGLAttrib *value); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSETSTREAMMETADATANVPROC) (EGLDisplay dpy, EGLStreamKHR stream, EGLint n, EGLint offset, EGLint size, const void *data); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLQUERYSTREAMMETADATANVPROC) (EGLDisplay dpy, EGLStreamKHR stream, EGLenum name, EGLint n, EGLint offset, EGLint size, void *data); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglQueryDisplayAttribNV (EGLDisplay dpy, EGLint attribute, EGLAttrib *value); +EGLAPI EGLBoolean EGLAPIENTRY eglSetStreamMetadataNV (EGLDisplay dpy, EGLStreamKHR stream, EGLint n, EGLint offset, EGLint size, const void *data); +EGLAPI EGLBoolean EGLAPIENTRY eglQueryStreamMetadataNV (EGLDisplay dpy, EGLStreamKHR stream, EGLenum name, EGLint n, EGLint offset, EGLint size, void *data); +#endif +#endif /* EGL_NV_stream_metadata */ + +#ifndef EGL_NV_stream_remote +#define EGL_NV_stream_remote 1 +#define EGL_STREAM_STATE_INITIALIZING_NV 0x3240 +#define EGL_STREAM_TYPE_NV 0x3241 +#define EGL_STREAM_PROTOCOL_NV 0x3242 +#define EGL_STREAM_ENDPOINT_NV 0x3243 +#define EGL_STREAM_LOCAL_NV 0x3244 +#define EGL_STREAM_PRODUCER_NV 0x3247 +#define EGL_STREAM_CONSUMER_NV 0x3248 +#define EGL_STREAM_PROTOCOL_FD_NV 0x3246 +#endif /* EGL_NV_stream_remote */ + +#ifndef EGL_NV_stream_reset +#define EGL_NV_stream_reset 1 +#define EGL_SUPPORT_RESET_NV 0x3334 +#define EGL_SUPPORT_REUSE_NV 0x3335 +typedef EGLBoolean (EGLAPIENTRYP PFNEGLRESETSTREAMNVPROC) (EGLDisplay dpy, EGLStreamKHR stream); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLBoolean EGLAPIENTRY eglResetStreamNV (EGLDisplay dpy, EGLStreamKHR stream); +#endif +#endif /* EGL_NV_stream_reset */ + +#ifndef EGL_NV_stream_socket +#define EGL_NV_stream_socket 1 +#define EGL_STREAM_PROTOCOL_SOCKET_NV 0x324B +#define EGL_SOCKET_HANDLE_NV 0x324C +#define EGL_SOCKET_TYPE_NV 0x324D +#endif /* EGL_NV_stream_socket */ + +#ifndef EGL_NV_stream_socket_inet +#define EGL_NV_stream_socket_inet 1 +#define EGL_SOCKET_TYPE_INET_NV 0x324F +#endif /* EGL_NV_stream_socket_inet */ + +#ifndef EGL_NV_stream_socket_unix +#define EGL_NV_stream_socket_unix 1 +#define EGL_SOCKET_TYPE_UNIX_NV 0x324E +#endif /* EGL_NV_stream_socket_unix */ + +#ifndef EGL_NV_stream_sync +#define EGL_NV_stream_sync 1 +#define EGL_SYNC_NEW_FRAME_NV 0x321F +typedef EGLSyncKHR (EGLAPIENTRYP PFNEGLCREATESTREAMSYNCNVPROC) (EGLDisplay dpy, EGLStreamKHR stream, EGLenum type, const EGLint *attrib_list); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLSyncKHR EGLAPIENTRY eglCreateStreamSyncNV (EGLDisplay dpy, EGLStreamKHR stream, EGLenum type, const EGLint *attrib_list); +#endif +#endif /* EGL_NV_stream_sync */ + +#ifndef EGL_NV_sync +#define EGL_NV_sync 1 +typedef void *EGLSyncNV; +typedef khronos_utime_nanoseconds_t EGLTimeNV; +#ifdef KHRONOS_SUPPORT_INT64 +#define EGL_SYNC_PRIOR_COMMANDS_COMPLETE_NV 0x30E6 +#define EGL_SYNC_STATUS_NV 0x30E7 +#define EGL_SIGNALED_NV 0x30E8 +#define EGL_UNSIGNALED_NV 0x30E9 +#define EGL_SYNC_FLUSH_COMMANDS_BIT_NV 0x0001 +#define EGL_FOREVER_NV 0xFFFFFFFFFFFFFFFFull +#define EGL_ALREADY_SIGNALED_NV 0x30EA +#define EGL_TIMEOUT_EXPIRED_NV 0x30EB +#define EGL_CONDITION_SATISFIED_NV 0x30EC +#define EGL_SYNC_TYPE_NV 0x30ED +#define EGL_SYNC_CONDITION_NV 0x30EE +#define EGL_SYNC_FENCE_NV 0x30EF +#define EGL_NO_SYNC_NV EGL_CAST(EGLSyncNV,0) +typedef EGLSyncNV (EGLAPIENTRYP PFNEGLCREATEFENCESYNCNVPROC) (EGLDisplay dpy, EGLenum condition, const EGLint *attrib_list); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLDESTROYSYNCNVPROC) (EGLSyncNV sync); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLFENCENVPROC) (EGLSyncNV sync); +typedef EGLint (EGLAPIENTRYP PFNEGLCLIENTWAITSYNCNVPROC) (EGLSyncNV sync, EGLint flags, EGLTimeNV timeout); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLSIGNALSYNCNVPROC) (EGLSyncNV sync, EGLenum mode); +typedef EGLBoolean (EGLAPIENTRYP PFNEGLGETSYNCATTRIBNVPROC) (EGLSyncNV sync, EGLint attribute, EGLint *value); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLSyncNV EGLAPIENTRY eglCreateFenceSyncNV (EGLDisplay dpy, EGLenum condition, const EGLint *attrib_list); +EGLAPI EGLBoolean EGLAPIENTRY eglDestroySyncNV (EGLSyncNV sync); +EGLAPI EGLBoolean EGLAPIENTRY eglFenceNV (EGLSyncNV sync); +EGLAPI EGLint EGLAPIENTRY eglClientWaitSyncNV (EGLSyncNV sync, EGLint flags, EGLTimeNV timeout); +EGLAPI EGLBoolean EGLAPIENTRY eglSignalSyncNV (EGLSyncNV sync, EGLenum mode); +EGLAPI EGLBoolean EGLAPIENTRY eglGetSyncAttribNV (EGLSyncNV sync, EGLint attribute, EGLint *value); +#endif +#endif /* KHRONOS_SUPPORT_INT64 */ +#endif /* EGL_NV_sync */ + +#ifndef EGL_NV_system_time +#define EGL_NV_system_time 1 +typedef khronos_utime_nanoseconds_t EGLuint64NV; +#ifdef KHRONOS_SUPPORT_INT64 +typedef EGLuint64NV (EGLAPIENTRYP PFNEGLGETSYSTEMTIMEFREQUENCYNVPROC) (void); +typedef EGLuint64NV (EGLAPIENTRYP PFNEGLGETSYSTEMTIMENVPROC) (void); +#ifdef EGL_EGLEXT_PROTOTYPES +EGLAPI EGLuint64NV EGLAPIENTRY eglGetSystemTimeFrequencyNV (void); +EGLAPI EGLuint64NV EGLAPIENTRY eglGetSystemTimeNV (void); +#endif +#endif /* KHRONOS_SUPPORT_INT64 */ +#endif /* EGL_NV_system_time */ + +#ifndef EGL_TIZEN_image_native_buffer +#define EGL_TIZEN_image_native_buffer 1 +#define EGL_NATIVE_BUFFER_TIZEN 0x32A0 +#endif /* EGL_TIZEN_image_native_buffer */ + +#ifndef EGL_TIZEN_image_native_surface +#define EGL_TIZEN_image_native_surface 1 +#define EGL_NATIVE_SURFACE_TIZEN 0x32A1 +#endif /* EGL_TIZEN_image_native_surface */ + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/swiftshader/include/EGL/eglplatform.h b/vendor/swiftshader/include/EGL/eglplatform.h new file mode 100644 index 00000000..97c31e27 --- /dev/null +++ b/vendor/swiftshader/include/EGL/eglplatform.h @@ -0,0 +1,158 @@ +#ifndef __eglplatform_h_ +#define __eglplatform_h_ + +/* +** Copyright (c) 2007-2016 The Khronos Group Inc. +** +** Permission is hereby granted, free of charge, to any person obtaining a +** copy of this software and/or associated documentation files (the +** "Materials"), to deal in the Materials without restriction, including +** without limitation the rights to use, copy, modify, merge, publish, +** distribute, sublicense, and/or sell copies of the Materials, and to +** permit persons to whom the Materials are furnished to do so, subject to +** the following conditions: +** +** The above copyright notice and this permission notice shall be included +** in all copies or substantial portions of the Materials. +** +** THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +** EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +** MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +** IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +** CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +** MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. +*/ + +/* Platform-specific types and definitions for egl.h + * $Revision: 30994 $ on $Date: 2015-04-30 13:36:48 -0700 (Thu, 30 Apr 2015) $ + * + * Adopters may modify khrplatform.h and this file to suit their platform. + * You are encouraged to submit all modifications to the Khronos group so that + * they can be included in future versions of this file. Please submit changes + * by sending them to the public Khronos Bugzilla (http://khronos.org/bugzilla) + * by filing a bug against product "EGL" component "Registry". + */ + +#include + +/* Macros used in EGL function prototype declarations. + * + * EGL functions should be prototyped as: + * + * EGLAPI return-type EGLAPIENTRY eglFunction(arguments); + * typedef return-type (EXPAPIENTRYP PFNEGLFUNCTIONPROC) (arguments); + * + * KHRONOS_APICALL and KHRONOS_APIENTRY are defined in KHR/khrplatform.h + */ + +#ifndef EGLAPI +#define EGLAPI KHRONOS_APICALL +#endif + +#ifndef EGLAPIENTRY +#define EGLAPIENTRY KHRONOS_APIENTRY +#endif +#define EGLAPIENTRYP EGLAPIENTRY* + +/* The types NativeDisplayType, NativeWindowType, and NativePixmapType + * are aliases of window-system-dependent types, such as X Display * or + * Windows Device Context. They must be defined in platform-specific + * code below. The EGL-prefixed versions of Native*Type are the same + * types, renamed in EGL 1.3 so all types in the API start with "EGL". + * + * Khronos STRONGLY RECOMMENDS that you use the default definitions + * provided below, since these changes affect both binary and source + * portability of applications using EGL running on different EGL + * implementations. + */ + +#if defined(_WIN32) || defined(__VC32__) && !defined(__CYGWIN__) && !defined(__SCITECH_SNAP__) /* Win32 and WinCE */ +#ifndef WIN32_LEAN_AND_MEAN +#define WIN32_LEAN_AND_MEAN 1 +#endif +#include + +typedef HDC EGLNativeDisplayType; +typedef HBITMAP EGLNativePixmapType; +typedef HWND EGLNativeWindowType; + +#elif defined(__APPLE__) || defined(__WINSCW__) || defined(__SYMBIAN32__) /* Symbian */ + +typedef int EGLNativeDisplayType; +typedef void *EGLNativeWindowType; +typedef void *EGLNativePixmapType; + +#elif defined(WL_EGL_PLATFORM) + +typedef struct wl_display *EGLNativeDisplayType; +typedef struct wl_egl_pixmap *EGLNativePixmapType; +typedef struct wl_egl_window *EGLNativeWindowType; + +#elif defined(__GBM__) + +typedef struct gbm_device *EGLNativeDisplayType; +typedef struct gbm_bo *EGLNativePixmapType; +typedef void *EGLNativeWindowType; + +#elif defined(__ANDROID__) || defined(ANDROID) + +struct ANativeWindow; +struct egl_native_pixmap_t; + +typedef struct ANativeWindow* EGLNativeWindowType; +typedef struct egl_native_pixmap_t* EGLNativePixmapType; +typedef void* EGLNativeDisplayType; + +#elif defined(USE_OZONE) + +typedef intptr_t EGLNativeDisplayType; +typedef intptr_t EGLNativeWindowType; +typedef intptr_t EGLNativePixmapType; + +#elif defined(__unix__) + +/* X11 (tentative) */ +#include +#include + +typedef Display *EGLNativeDisplayType; +typedef Pixmap EGLNativePixmapType; +typedef Window EGLNativeWindowType; + +#elif defined(__HAIKU__) + +#include + +typedef void *EGLNativeDisplayType; +typedef khronos_uintptr_t EGLNativePixmapType; +typedef khronos_uintptr_t EGLNativeWindowType; + +#else +#error "Platform not recognized" +#endif + +/* EGL 1.2 types, renamed for consistency in EGL 1.3 */ +typedef EGLNativeDisplayType NativeDisplayType; +typedef EGLNativePixmapType NativePixmapType; +typedef EGLNativeWindowType NativeWindowType; + + +/* Define EGLint. This must be a signed integral type large enough to contain + * all legal attribute names and values passed into and out of EGL, whether + * their type is boolean, bitmask, enumerant (symbolic constant), integer, + * handle, or other. While in general a 32-bit integer will suffice, if + * handles are 64 bit types, then EGLint should be defined as a signed 64-bit + * integer type. + */ +typedef khronos_int32_t EGLint; + + +/* C++ / C typecast macros for special EGL handle values */ +#if defined(__cplusplus) +#define EGL_CAST(type, value) (static_cast(value)) +#else +#define EGL_CAST(type, value) ((type) (value)) +#endif + +#endif /* __eglplatform_h */ diff --git a/vendor/swiftshader/include/GL/glcorearb.h b/vendor/swiftshader/include/GL/glcorearb.h new file mode 100644 index 00000000..ff3e9a85 --- /dev/null +++ b/vendor/swiftshader/include/GL/glcorearb.h @@ -0,0 +1,5703 @@ +#ifndef __gl_glcorearb_h_ +#define __gl_glcorearb_h_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2013-2018 The Khronos Group Inc. +** +** Permission is hereby granted, free of charge, to any person obtaining a +** copy of this software and/or associated documentation files (the +** "Materials"), to deal in the Materials without restriction, including +** without limitation the rights to use, copy, modify, merge, publish, +** distribute, sublicense, and/or sell copies of the Materials, and to +** permit persons to whom the Materials are furnished to do so, subject to +** the following conditions: +** +** The above copyright notice and this permission notice shall be included +** in all copies or substantial portions of the Materials. +** +** THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +** EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +** MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +** IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +** CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +** MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. +*/ +/* +** This header is generated from the Khronos OpenGL / OpenGL ES XML +** API Registry. The current version of the Registry, generator scripts +** used to make the header, and the header can be found at +** https://github.com/KhronosGroup/OpenGL-Registry +*/ + +#if defined(_WIN32) && !defined(APIENTRY) && !defined(__CYGWIN__) && !defined(__SCITECH_SNAP__) +#ifndef WIN32_LEAN_AND_MEAN +#define WIN32_LEAN_AND_MEAN 1 +#endif +#include +#endif + +#ifndef APIENTRY +#define APIENTRY +#endif +#ifndef APIENTRYP +#define APIENTRYP APIENTRY * +#endif +#ifndef GLAPI +#define GLAPI extern +#endif + +/* glcorearb.h is for use with OpenGL core profile implementations. +** It should should be placed in the same directory as gl.h and +** included as . +** +** glcorearb.h includes only APIs in the latest OpenGL core profile +** implementation together with APIs in newer ARB extensions which +** can be supported by the core profile. It does not, and never will +** include functionality removed from the core profile, such as +** fixed-function vertex and fragment processing. +** +** Do not #include both and either of or +** in the same source file. +*/ + +/* Generated C header for: + * API: gl + * Profile: core + * Versions considered: .* + * Versions emitted: .* + * Default extensions included: glcore + * Additional extensions included: _nomatch_^ + * Extensions removed: _nomatch_^ + */ + +#ifndef GL_VERSION_1_0 +#define GL_VERSION_1_0 1 +typedef void GLvoid; +typedef unsigned int GLenum; +typedef float GLfloat; +typedef int GLint; +typedef int GLsizei; +typedef unsigned int GLbitfield; +typedef double GLdouble; +typedef unsigned int GLuint; +typedef unsigned char GLboolean; +typedef unsigned char GLubyte; +#define GL_DEPTH_BUFFER_BIT 0x00000100 +#define GL_STENCIL_BUFFER_BIT 0x00000400 +#define GL_COLOR_BUFFER_BIT 0x00004000 +#define GL_FALSE 0 +#define GL_TRUE 1 +#define GL_POINTS 0x0000 +#define GL_LINES 0x0001 +#define GL_LINE_LOOP 0x0002 +#define GL_LINE_STRIP 0x0003 +#define GL_TRIANGLES 0x0004 +#define GL_TRIANGLE_STRIP 0x0005 +#define GL_TRIANGLE_FAN 0x0006 +#define GL_QUADS 0x0007 +#define GL_NEVER 0x0200 +#define GL_LESS 0x0201 +#define GL_EQUAL 0x0202 +#define GL_LEQUAL 0x0203 +#define GL_GREATER 0x0204 +#define GL_NOTEQUAL 0x0205 +#define GL_GEQUAL 0x0206 +#define GL_ALWAYS 0x0207 +#define GL_ZERO 0 +#define GL_ONE 1 +#define GL_SRC_COLOR 0x0300 +#define GL_ONE_MINUS_SRC_COLOR 0x0301 +#define GL_SRC_ALPHA 0x0302 +#define GL_ONE_MINUS_SRC_ALPHA 0x0303 +#define GL_DST_ALPHA 0x0304 +#define GL_ONE_MINUS_DST_ALPHA 0x0305 +#define GL_DST_COLOR 0x0306 +#define GL_ONE_MINUS_DST_COLOR 0x0307 +#define GL_SRC_ALPHA_SATURATE 0x0308 +#define GL_NONE 0 +#define GL_FRONT_LEFT 0x0400 +#define GL_FRONT_RIGHT 0x0401 +#define GL_BACK_LEFT 0x0402 +#define GL_BACK_RIGHT 0x0403 +#define GL_FRONT 0x0404 +#define GL_BACK 0x0405 +#define GL_LEFT 0x0406 +#define GL_RIGHT 0x0407 +#define GL_FRONT_AND_BACK 0x0408 +#define GL_NO_ERROR 0 +#define GL_INVALID_ENUM 0x0500 +#define GL_INVALID_VALUE 0x0501 +#define GL_INVALID_OPERATION 0x0502 +#define GL_OUT_OF_MEMORY 0x0505 +#define GL_CW 0x0900 +#define GL_CCW 0x0901 +#define GL_POINT_SIZE 0x0B11 +#define GL_POINT_SIZE_RANGE 0x0B12 +#define GL_POINT_SIZE_GRANULARITY 0x0B13 +#define GL_LINE_SMOOTH 0x0B20 +#define GL_LINE_WIDTH 0x0B21 +#define GL_LINE_WIDTH_RANGE 0x0B22 +#define GL_LINE_WIDTH_GRANULARITY 0x0B23 +#define GL_POLYGON_MODE 0x0B40 +#define GL_POLYGON_SMOOTH 0x0B41 +#define GL_CULL_FACE 0x0B44 +#define GL_CULL_FACE_MODE 0x0B45 +#define GL_FRONT_FACE 0x0B46 +#define GL_DEPTH_RANGE 0x0B70 +#define GL_DEPTH_TEST 0x0B71 +#define GL_DEPTH_WRITEMASK 0x0B72 +#define GL_DEPTH_CLEAR_VALUE 0x0B73 +#define GL_DEPTH_FUNC 0x0B74 +#define GL_STENCIL_TEST 0x0B90 +#define GL_STENCIL_CLEAR_VALUE 0x0B91 +#define GL_STENCIL_FUNC 0x0B92 +#define GL_STENCIL_VALUE_MASK 0x0B93 +#define GL_STENCIL_FAIL 0x0B94 +#define GL_STENCIL_PASS_DEPTH_FAIL 0x0B95 +#define GL_STENCIL_PASS_DEPTH_PASS 0x0B96 +#define GL_STENCIL_REF 0x0B97 +#define GL_STENCIL_WRITEMASK 0x0B98 +#define GL_VIEWPORT 0x0BA2 +#define GL_DITHER 0x0BD0 +#define GL_BLEND_DST 0x0BE0 +#define GL_BLEND_SRC 0x0BE1 +#define GL_BLEND 0x0BE2 +#define GL_LOGIC_OP_MODE 0x0BF0 +#define GL_DRAW_BUFFER 0x0C01 +#define GL_READ_BUFFER 0x0C02 +#define GL_SCISSOR_BOX 0x0C10 +#define GL_SCISSOR_TEST 0x0C11 +#define GL_COLOR_CLEAR_VALUE 0x0C22 +#define GL_COLOR_WRITEMASK 0x0C23 +#define GL_DOUBLEBUFFER 0x0C32 +#define GL_STEREO 0x0C33 +#define GL_LINE_SMOOTH_HINT 0x0C52 +#define GL_POLYGON_SMOOTH_HINT 0x0C53 +#define GL_UNPACK_SWAP_BYTES 0x0CF0 +#define GL_UNPACK_LSB_FIRST 0x0CF1 +#define GL_UNPACK_ROW_LENGTH 0x0CF2 +#define GL_UNPACK_SKIP_ROWS 0x0CF3 +#define GL_UNPACK_SKIP_PIXELS 0x0CF4 +#define GL_UNPACK_ALIGNMENT 0x0CF5 +#define GL_PACK_SWAP_BYTES 0x0D00 +#define GL_PACK_LSB_FIRST 0x0D01 +#define GL_PACK_ROW_LENGTH 0x0D02 +#define GL_PACK_SKIP_ROWS 0x0D03 +#define GL_PACK_SKIP_PIXELS 0x0D04 +#define GL_PACK_ALIGNMENT 0x0D05 +#define GL_MAX_TEXTURE_SIZE 0x0D33 +#define GL_MAX_VIEWPORT_DIMS 0x0D3A +#define GL_SUBPIXEL_BITS 0x0D50 +#define GL_TEXTURE_1D 0x0DE0 +#define GL_TEXTURE_2D 0x0DE1 +#define GL_TEXTURE_WIDTH 0x1000 +#define GL_TEXTURE_HEIGHT 0x1001 +#define GL_TEXTURE_BORDER_COLOR 0x1004 +#define GL_DONT_CARE 0x1100 +#define GL_FASTEST 0x1101 +#define GL_NICEST 0x1102 +#define GL_BYTE 0x1400 +#define GL_UNSIGNED_BYTE 0x1401 +#define GL_SHORT 0x1402 +#define GL_UNSIGNED_SHORT 0x1403 +#define GL_INT 0x1404 +#define GL_UNSIGNED_INT 0x1405 +#define GL_FLOAT 0x1406 +#define GL_STACK_OVERFLOW 0x0503 +#define GL_STACK_UNDERFLOW 0x0504 +#define GL_CLEAR 0x1500 +#define GL_AND 0x1501 +#define GL_AND_REVERSE 0x1502 +#define GL_COPY 0x1503 +#define GL_AND_INVERTED 0x1504 +#define GL_NOOP 0x1505 +#define GL_XOR 0x1506 +#define GL_OR 0x1507 +#define GL_NOR 0x1508 +#define GL_EQUIV 0x1509 +#define GL_INVERT 0x150A +#define GL_OR_REVERSE 0x150B +#define GL_COPY_INVERTED 0x150C +#define GL_OR_INVERTED 0x150D +#define GL_NAND 0x150E +#define GL_SET 0x150F +#define GL_TEXTURE 0x1702 +#define GL_COLOR 0x1800 +#define GL_DEPTH 0x1801 +#define GL_STENCIL 0x1802 +#define GL_STENCIL_INDEX 0x1901 +#define GL_DEPTH_COMPONENT 0x1902 +#define GL_RED 0x1903 +#define GL_GREEN 0x1904 +#define GL_BLUE 0x1905 +#define GL_ALPHA 0x1906 +#define GL_RGB 0x1907 +#define GL_RGBA 0x1908 +#define GL_POINT 0x1B00 +#define GL_LINE 0x1B01 +#define GL_FILL 0x1B02 +#define GL_KEEP 0x1E00 +#define GL_REPLACE 0x1E01 +#define GL_INCR 0x1E02 +#define GL_DECR 0x1E03 +#define GL_VENDOR 0x1F00 +#define GL_RENDERER 0x1F01 +#define GL_VERSION 0x1F02 +#define GL_EXTENSIONS 0x1F03 +#define GL_NEAREST 0x2600 +#define GL_LINEAR 0x2601 +#define GL_NEAREST_MIPMAP_NEAREST 0x2700 +#define GL_LINEAR_MIPMAP_NEAREST 0x2701 +#define GL_NEAREST_MIPMAP_LINEAR 0x2702 +#define GL_LINEAR_MIPMAP_LINEAR 0x2703 +#define GL_TEXTURE_MAG_FILTER 0x2800 +#define GL_TEXTURE_MIN_FILTER 0x2801 +#define GL_TEXTURE_WRAP_S 0x2802 +#define GL_TEXTURE_WRAP_T 0x2803 +#define GL_REPEAT 0x2901 +typedef void (APIENTRYP PFNGLCULLFACEPROC) (GLenum mode); +typedef void (APIENTRYP PFNGLFRONTFACEPROC) (GLenum mode); +typedef void (APIENTRYP PFNGLHINTPROC) (GLenum target, GLenum mode); +typedef void (APIENTRYP PFNGLLINEWIDTHPROC) (GLfloat width); +typedef void (APIENTRYP PFNGLPOINTSIZEPROC) (GLfloat size); +typedef void (APIENTRYP PFNGLPOLYGONMODEPROC) (GLenum face, GLenum mode); +typedef void (APIENTRYP PFNGLSCISSORPROC) (GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLTEXPARAMETERFPROC) (GLenum target, GLenum pname, GLfloat param); +typedef void (APIENTRYP PFNGLTEXPARAMETERFVPROC) (GLenum target, GLenum pname, const GLfloat *params); +typedef void (APIENTRYP PFNGLTEXPARAMETERIPROC) (GLenum target, GLenum pname, GLint param); +typedef void (APIENTRYP PFNGLTEXPARAMETERIVPROC) (GLenum target, GLenum pname, const GLint *params); +typedef void (APIENTRYP PFNGLTEXIMAGE1DPROC) (GLenum target, GLint level, GLint internalformat, GLsizei width, GLint border, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLTEXIMAGE2DPROC) (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLDRAWBUFFERPROC) (GLenum buf); +typedef void (APIENTRYP PFNGLCLEARPROC) (GLbitfield mask); +typedef void (APIENTRYP PFNGLCLEARCOLORPROC) (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +typedef void (APIENTRYP PFNGLCLEARSTENCILPROC) (GLint s); +typedef void (APIENTRYP PFNGLCLEARDEPTHPROC) (GLdouble depth); +typedef void (APIENTRYP PFNGLSTENCILMASKPROC) (GLuint mask); +typedef void (APIENTRYP PFNGLCOLORMASKPROC) (GLboolean red, GLboolean green, GLboolean blue, GLboolean alpha); +typedef void (APIENTRYP PFNGLDEPTHMASKPROC) (GLboolean flag); +typedef void (APIENTRYP PFNGLDISABLEPROC) (GLenum cap); +typedef void (APIENTRYP PFNGLENABLEPROC) (GLenum cap); +typedef void (APIENTRYP PFNGLFINISHPROC) (void); +typedef void (APIENTRYP PFNGLFLUSHPROC) (void); +typedef void (APIENTRYP PFNGLBLENDFUNCPROC) (GLenum sfactor, GLenum dfactor); +typedef void (APIENTRYP PFNGLLOGICOPPROC) (GLenum opcode); +typedef void (APIENTRYP PFNGLSTENCILFUNCPROC) (GLenum func, GLint ref, GLuint mask); +typedef void (APIENTRYP PFNGLSTENCILOPPROC) (GLenum fail, GLenum zfail, GLenum zpass); +typedef void (APIENTRYP PFNGLDEPTHFUNCPROC) (GLenum func); +typedef void (APIENTRYP PFNGLPIXELSTOREFPROC) (GLenum pname, GLfloat param); +typedef void (APIENTRYP PFNGLPIXELSTOREIPROC) (GLenum pname, GLint param); +typedef void (APIENTRYP PFNGLREADBUFFERPROC) (GLenum src); +typedef void (APIENTRYP PFNGLREADPIXELSPROC) (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, void *pixels); +typedef void (APIENTRYP PFNGLGETBOOLEANVPROC) (GLenum pname, GLboolean *data); +typedef void (APIENTRYP PFNGLGETDOUBLEVPROC) (GLenum pname, GLdouble *data); +typedef GLenum (APIENTRYP PFNGLGETERRORPROC) (void); +typedef void (APIENTRYP PFNGLGETFLOATVPROC) (GLenum pname, GLfloat *data); +typedef void (APIENTRYP PFNGLGETINTEGERVPROC) (GLenum pname, GLint *data); +typedef const GLubyte *(APIENTRYP PFNGLGETSTRINGPROC) (GLenum name); +typedef void (APIENTRYP PFNGLGETTEXIMAGEPROC) (GLenum target, GLint level, GLenum format, GLenum type, void *pixels); +typedef void (APIENTRYP PFNGLGETTEXPARAMETERFVPROC) (GLenum target, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETTEXPARAMETERIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETTEXLEVELPARAMETERFVPROC) (GLenum target, GLint level, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETTEXLEVELPARAMETERIVPROC) (GLenum target, GLint level, GLenum pname, GLint *params); +typedef GLboolean (APIENTRYP PFNGLISENABLEDPROC) (GLenum cap); +typedef void (APIENTRYP PFNGLDEPTHRANGEPROC) (GLdouble n, GLdouble f); +typedef void (APIENTRYP PFNGLVIEWPORTPROC) (GLint x, GLint y, GLsizei width, GLsizei height); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glCullFace (GLenum mode); +GLAPI void APIENTRY glFrontFace (GLenum mode); +GLAPI void APIENTRY glHint (GLenum target, GLenum mode); +GLAPI void APIENTRY glLineWidth (GLfloat width); +GLAPI void APIENTRY glPointSize (GLfloat size); +GLAPI void APIENTRY glPolygonMode (GLenum face, GLenum mode); +GLAPI void APIENTRY glScissor (GLint x, GLint y, GLsizei width, GLsizei height); +GLAPI void APIENTRY glTexParameterf (GLenum target, GLenum pname, GLfloat param); +GLAPI void APIENTRY glTexParameterfv (GLenum target, GLenum pname, const GLfloat *params); +GLAPI void APIENTRY glTexParameteri (GLenum target, GLenum pname, GLint param); +GLAPI void APIENTRY glTexParameteriv (GLenum target, GLenum pname, const GLint *params); +GLAPI void APIENTRY glTexImage1D (GLenum target, GLint level, GLint internalformat, GLsizei width, GLint border, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glTexImage2D (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glDrawBuffer (GLenum buf); +GLAPI void APIENTRY glClear (GLbitfield mask); +GLAPI void APIENTRY glClearColor (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +GLAPI void APIENTRY glClearStencil (GLint s); +GLAPI void APIENTRY glClearDepth (GLdouble depth); +GLAPI void APIENTRY glStencilMask (GLuint mask); +GLAPI void APIENTRY glColorMask (GLboolean red, GLboolean green, GLboolean blue, GLboolean alpha); +GLAPI void APIENTRY glDepthMask (GLboolean flag); +GLAPI void APIENTRY glDisable (GLenum cap); +GLAPI void APIENTRY glEnable (GLenum cap); +GLAPI void APIENTRY glFinish (void); +GLAPI void APIENTRY glFlush (void); +GLAPI void APIENTRY glBlendFunc (GLenum sfactor, GLenum dfactor); +GLAPI void APIENTRY glLogicOp (GLenum opcode); +GLAPI void APIENTRY glStencilFunc (GLenum func, GLint ref, GLuint mask); +GLAPI void APIENTRY glStencilOp (GLenum fail, GLenum zfail, GLenum zpass); +GLAPI void APIENTRY glDepthFunc (GLenum func); +GLAPI void APIENTRY glPixelStoref (GLenum pname, GLfloat param); +GLAPI void APIENTRY glPixelStorei (GLenum pname, GLint param); +GLAPI void APIENTRY glReadBuffer (GLenum src); +GLAPI void APIENTRY glReadPixels (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, void *pixels); +GLAPI void APIENTRY glGetBooleanv (GLenum pname, GLboolean *data); +GLAPI void APIENTRY glGetDoublev (GLenum pname, GLdouble *data); +GLAPI GLenum APIENTRY glGetError (void); +GLAPI void APIENTRY glGetFloatv (GLenum pname, GLfloat *data); +GLAPI void APIENTRY glGetIntegerv (GLenum pname, GLint *data); +GLAPI const GLubyte *APIENTRY glGetString (GLenum name); +GLAPI void APIENTRY glGetTexImage (GLenum target, GLint level, GLenum format, GLenum type, void *pixels); +GLAPI void APIENTRY glGetTexParameterfv (GLenum target, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetTexParameteriv (GLenum target, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetTexLevelParameterfv (GLenum target, GLint level, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetTexLevelParameteriv (GLenum target, GLint level, GLenum pname, GLint *params); +GLAPI GLboolean APIENTRY glIsEnabled (GLenum cap); +GLAPI void APIENTRY glDepthRange (GLdouble n, GLdouble f); +GLAPI void APIENTRY glViewport (GLint x, GLint y, GLsizei width, GLsizei height); +#endif +#endif /* GL_VERSION_1_0 */ + +#ifndef GL_VERSION_1_1 +#define GL_VERSION_1_1 1 +typedef float GLclampf; +typedef double GLclampd; +#define GL_COLOR_LOGIC_OP 0x0BF2 +#define GL_POLYGON_OFFSET_UNITS 0x2A00 +#define GL_POLYGON_OFFSET_POINT 0x2A01 +#define GL_POLYGON_OFFSET_LINE 0x2A02 +#define GL_POLYGON_OFFSET_FILL 0x8037 +#define GL_POLYGON_OFFSET_FACTOR 0x8038 +#define GL_TEXTURE_BINDING_1D 0x8068 +#define GL_TEXTURE_BINDING_2D 0x8069 +#define GL_TEXTURE_INTERNAL_FORMAT 0x1003 +#define GL_TEXTURE_RED_SIZE 0x805C +#define GL_TEXTURE_GREEN_SIZE 0x805D +#define GL_TEXTURE_BLUE_SIZE 0x805E +#define GL_TEXTURE_ALPHA_SIZE 0x805F +#define GL_DOUBLE 0x140A +#define GL_PROXY_TEXTURE_1D 0x8063 +#define GL_PROXY_TEXTURE_2D 0x8064 +#define GL_R3_G3_B2 0x2A10 +#define GL_RGB4 0x804F +#define GL_RGB5 0x8050 +#define GL_RGB8 0x8051 +#define GL_RGB10 0x8052 +#define GL_RGB12 0x8053 +#define GL_RGB16 0x8054 +#define GL_RGBA2 0x8055 +#define GL_RGBA4 0x8056 +#define GL_RGB5_A1 0x8057 +#define GL_RGBA8 0x8058 +#define GL_RGB10_A2 0x8059 +#define GL_RGBA12 0x805A +#define GL_RGBA16 0x805B +#define GL_VERTEX_ARRAY 0x8074 +typedef void (APIENTRYP PFNGLDRAWARRAYSPROC) (GLenum mode, GLint first, GLsizei count); +typedef void (APIENTRYP PFNGLDRAWELEMENTSPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices); +typedef void (APIENTRYP PFNGLGETPOINTERVPROC) (GLenum pname, void **params); +typedef void (APIENTRYP PFNGLPOLYGONOFFSETPROC) (GLfloat factor, GLfloat units); +typedef void (APIENTRYP PFNGLCOPYTEXIMAGE1DPROC) (GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLint border); +typedef void (APIENTRYP PFNGLCOPYTEXIMAGE2DPROC) (GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLsizei height, GLint border); +typedef void (APIENTRYP PFNGLCOPYTEXSUBIMAGE1DPROC) (GLenum target, GLint level, GLint xoffset, GLint x, GLint y, GLsizei width); +typedef void (APIENTRYP PFNGLCOPYTEXSUBIMAGE2DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLTEXSUBIMAGE1DPROC) (GLenum target, GLint level, GLint xoffset, GLsizei width, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLTEXSUBIMAGE2DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLBINDTEXTUREPROC) (GLenum target, GLuint texture); +typedef void (APIENTRYP PFNGLDELETETEXTURESPROC) (GLsizei n, const GLuint *textures); +typedef void (APIENTRYP PFNGLGENTEXTURESPROC) (GLsizei n, GLuint *textures); +typedef GLboolean (APIENTRYP PFNGLISTEXTUREPROC) (GLuint texture); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glDrawArrays (GLenum mode, GLint first, GLsizei count); +GLAPI void APIENTRY glDrawElements (GLenum mode, GLsizei count, GLenum type, const void *indices); +GLAPI void APIENTRY glGetPointerv (GLenum pname, void **params); +GLAPI void APIENTRY glPolygonOffset (GLfloat factor, GLfloat units); +GLAPI void APIENTRY glCopyTexImage1D (GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLint border); +GLAPI void APIENTRY glCopyTexImage2D (GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLsizei height, GLint border); +GLAPI void APIENTRY glCopyTexSubImage1D (GLenum target, GLint level, GLint xoffset, GLint x, GLint y, GLsizei width); +GLAPI void APIENTRY glCopyTexSubImage2D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint x, GLint y, GLsizei width, GLsizei height); +GLAPI void APIENTRY glTexSubImage1D (GLenum target, GLint level, GLint xoffset, GLsizei width, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glTexSubImage2D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glBindTexture (GLenum target, GLuint texture); +GLAPI void APIENTRY glDeleteTextures (GLsizei n, const GLuint *textures); +GLAPI void APIENTRY glGenTextures (GLsizei n, GLuint *textures); +GLAPI GLboolean APIENTRY glIsTexture (GLuint texture); +#endif +#endif /* GL_VERSION_1_1 */ + +#ifndef GL_VERSION_1_2 +#define GL_VERSION_1_2 1 +#define GL_UNSIGNED_BYTE_3_3_2 0x8032 +#define GL_UNSIGNED_SHORT_4_4_4_4 0x8033 +#define GL_UNSIGNED_SHORT_5_5_5_1 0x8034 +#define GL_UNSIGNED_INT_8_8_8_8 0x8035 +#define GL_UNSIGNED_INT_10_10_10_2 0x8036 +#define GL_TEXTURE_BINDING_3D 0x806A +#define GL_PACK_SKIP_IMAGES 0x806B +#define GL_PACK_IMAGE_HEIGHT 0x806C +#define GL_UNPACK_SKIP_IMAGES 0x806D +#define GL_UNPACK_IMAGE_HEIGHT 0x806E +#define GL_TEXTURE_3D 0x806F +#define GL_PROXY_TEXTURE_3D 0x8070 +#define GL_TEXTURE_DEPTH 0x8071 +#define GL_TEXTURE_WRAP_R 0x8072 +#define GL_MAX_3D_TEXTURE_SIZE 0x8073 +#define GL_UNSIGNED_BYTE_2_3_3_REV 0x8362 +#define GL_UNSIGNED_SHORT_5_6_5 0x8363 +#define GL_UNSIGNED_SHORT_5_6_5_REV 0x8364 +#define GL_UNSIGNED_SHORT_4_4_4_4_REV 0x8365 +#define GL_UNSIGNED_SHORT_1_5_5_5_REV 0x8366 +#define GL_UNSIGNED_INT_8_8_8_8_REV 0x8367 +#define GL_UNSIGNED_INT_2_10_10_10_REV 0x8368 +#define GL_BGR 0x80E0 +#define GL_BGRA 0x80E1 +#define GL_MAX_ELEMENTS_VERTICES 0x80E8 +#define GL_MAX_ELEMENTS_INDICES 0x80E9 +#define GL_CLAMP_TO_EDGE 0x812F +#define GL_TEXTURE_MIN_LOD 0x813A +#define GL_TEXTURE_MAX_LOD 0x813B +#define GL_TEXTURE_BASE_LEVEL 0x813C +#define GL_TEXTURE_MAX_LEVEL 0x813D +#define GL_SMOOTH_POINT_SIZE_RANGE 0x0B12 +#define GL_SMOOTH_POINT_SIZE_GRANULARITY 0x0B13 +#define GL_SMOOTH_LINE_WIDTH_RANGE 0x0B22 +#define GL_SMOOTH_LINE_WIDTH_GRANULARITY 0x0B23 +#define GL_ALIASED_LINE_WIDTH_RANGE 0x846E +typedef void (APIENTRYP PFNGLDRAWRANGEELEMENTSPROC) (GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices); +typedef void (APIENTRYP PFNGLTEXIMAGE3DPROC) (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLTEXSUBIMAGE3DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLCOPYTEXSUBIMAGE3DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glDrawRangeElements (GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices); +GLAPI void APIENTRY glTexImage3D (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glTexSubImage3D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glCopyTexSubImage3D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height); +#endif +#endif /* GL_VERSION_1_2 */ + +#ifndef GL_VERSION_1_3 +#define GL_VERSION_1_3 1 +#define GL_TEXTURE0 0x84C0 +#define GL_TEXTURE1 0x84C1 +#define GL_TEXTURE2 0x84C2 +#define GL_TEXTURE3 0x84C3 +#define GL_TEXTURE4 0x84C4 +#define GL_TEXTURE5 0x84C5 +#define GL_TEXTURE6 0x84C6 +#define GL_TEXTURE7 0x84C7 +#define GL_TEXTURE8 0x84C8 +#define GL_TEXTURE9 0x84C9 +#define GL_TEXTURE10 0x84CA +#define GL_TEXTURE11 0x84CB +#define GL_TEXTURE12 0x84CC +#define GL_TEXTURE13 0x84CD +#define GL_TEXTURE14 0x84CE +#define GL_TEXTURE15 0x84CF +#define GL_TEXTURE16 0x84D0 +#define GL_TEXTURE17 0x84D1 +#define GL_TEXTURE18 0x84D2 +#define GL_TEXTURE19 0x84D3 +#define GL_TEXTURE20 0x84D4 +#define GL_TEXTURE21 0x84D5 +#define GL_TEXTURE22 0x84D6 +#define GL_TEXTURE23 0x84D7 +#define GL_TEXTURE24 0x84D8 +#define GL_TEXTURE25 0x84D9 +#define GL_TEXTURE26 0x84DA +#define GL_TEXTURE27 0x84DB +#define GL_TEXTURE28 0x84DC +#define GL_TEXTURE29 0x84DD +#define GL_TEXTURE30 0x84DE +#define GL_TEXTURE31 0x84DF +#define GL_ACTIVE_TEXTURE 0x84E0 +#define GL_MULTISAMPLE 0x809D +#define GL_SAMPLE_ALPHA_TO_COVERAGE 0x809E +#define GL_SAMPLE_ALPHA_TO_ONE 0x809F +#define GL_SAMPLE_COVERAGE 0x80A0 +#define GL_SAMPLE_BUFFERS 0x80A8 +#define GL_SAMPLES 0x80A9 +#define GL_SAMPLE_COVERAGE_VALUE 0x80AA +#define GL_SAMPLE_COVERAGE_INVERT 0x80AB +#define GL_TEXTURE_CUBE_MAP 0x8513 +#define GL_TEXTURE_BINDING_CUBE_MAP 0x8514 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_X 0x8515 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_X 0x8516 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_Y 0x8517 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_Y 0x8518 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_Z 0x8519 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_Z 0x851A +#define GL_PROXY_TEXTURE_CUBE_MAP 0x851B +#define GL_MAX_CUBE_MAP_TEXTURE_SIZE 0x851C +#define GL_COMPRESSED_RGB 0x84ED +#define GL_COMPRESSED_RGBA 0x84EE +#define GL_TEXTURE_COMPRESSION_HINT 0x84EF +#define GL_TEXTURE_COMPRESSED_IMAGE_SIZE 0x86A0 +#define GL_TEXTURE_COMPRESSED 0x86A1 +#define GL_NUM_COMPRESSED_TEXTURE_FORMATS 0x86A2 +#define GL_COMPRESSED_TEXTURE_FORMATS 0x86A3 +#define GL_CLAMP_TO_BORDER 0x812D +typedef void (APIENTRYP PFNGLACTIVETEXTUREPROC) (GLenum texture); +typedef void (APIENTRYP PFNGLSAMPLECOVERAGEPROC) (GLfloat value, GLboolean invert); +typedef void (APIENTRYP PFNGLCOMPRESSEDTEXIMAGE3DPROC) (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, const void *data); +typedef void (APIENTRYP PFNGLCOMPRESSEDTEXIMAGE2DPROC) (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, const void *data); +typedef void (APIENTRYP PFNGLCOMPRESSEDTEXIMAGE1DPROC) (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLint border, GLsizei imageSize, const void *data); +typedef void (APIENTRYP PFNGLCOMPRESSEDTEXSUBIMAGE3DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void *data); +typedef void (APIENTRYP PFNGLCOMPRESSEDTEXSUBIMAGE2DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *data); +typedef void (APIENTRYP PFNGLCOMPRESSEDTEXSUBIMAGE1DPROC) (GLenum target, GLint level, GLint xoffset, GLsizei width, GLenum format, GLsizei imageSize, const void *data); +typedef void (APIENTRYP PFNGLGETCOMPRESSEDTEXIMAGEPROC) (GLenum target, GLint level, void *img); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glActiveTexture (GLenum texture); +GLAPI void APIENTRY glSampleCoverage (GLfloat value, GLboolean invert); +GLAPI void APIENTRY glCompressedTexImage3D (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, const void *data); +GLAPI void APIENTRY glCompressedTexImage2D (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, const void *data); +GLAPI void APIENTRY glCompressedTexImage1D (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLint border, GLsizei imageSize, const void *data); +GLAPI void APIENTRY glCompressedTexSubImage3D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void *data); +GLAPI void APIENTRY glCompressedTexSubImage2D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *data); +GLAPI void APIENTRY glCompressedTexSubImage1D (GLenum target, GLint level, GLint xoffset, GLsizei width, GLenum format, GLsizei imageSize, const void *data); +GLAPI void APIENTRY glGetCompressedTexImage (GLenum target, GLint level, void *img); +#endif +#endif /* GL_VERSION_1_3 */ + +#ifndef GL_VERSION_1_4 +#define GL_VERSION_1_4 1 +#define GL_BLEND_DST_RGB 0x80C8 +#define GL_BLEND_SRC_RGB 0x80C9 +#define GL_BLEND_DST_ALPHA 0x80CA +#define GL_BLEND_SRC_ALPHA 0x80CB +#define GL_POINT_FADE_THRESHOLD_SIZE 0x8128 +#define GL_DEPTH_COMPONENT16 0x81A5 +#define GL_DEPTH_COMPONENT24 0x81A6 +#define GL_DEPTH_COMPONENT32 0x81A7 +#define GL_MIRRORED_REPEAT 0x8370 +#define GL_MAX_TEXTURE_LOD_BIAS 0x84FD +#define GL_TEXTURE_LOD_BIAS 0x8501 +#define GL_INCR_WRAP 0x8507 +#define GL_DECR_WRAP 0x8508 +#define GL_TEXTURE_DEPTH_SIZE 0x884A +#define GL_TEXTURE_COMPARE_MODE 0x884C +#define GL_TEXTURE_COMPARE_FUNC 0x884D +#define GL_BLEND_COLOR 0x8005 +#define GL_BLEND_EQUATION 0x8009 +#define GL_CONSTANT_COLOR 0x8001 +#define GL_ONE_MINUS_CONSTANT_COLOR 0x8002 +#define GL_CONSTANT_ALPHA 0x8003 +#define GL_ONE_MINUS_CONSTANT_ALPHA 0x8004 +#define GL_FUNC_ADD 0x8006 +#define GL_FUNC_REVERSE_SUBTRACT 0x800B +#define GL_FUNC_SUBTRACT 0x800A +#define GL_MIN 0x8007 +#define GL_MAX 0x8008 +typedef void (APIENTRYP PFNGLBLENDFUNCSEPARATEPROC) (GLenum sfactorRGB, GLenum dfactorRGB, GLenum sfactorAlpha, GLenum dfactorAlpha); +typedef void (APIENTRYP PFNGLMULTIDRAWARRAYSPROC) (GLenum mode, const GLint *first, const GLsizei *count, GLsizei drawcount); +typedef void (APIENTRYP PFNGLMULTIDRAWELEMENTSPROC) (GLenum mode, const GLsizei *count, GLenum type, const void *const*indices, GLsizei drawcount); +typedef void (APIENTRYP PFNGLPOINTPARAMETERFPROC) (GLenum pname, GLfloat param); +typedef void (APIENTRYP PFNGLPOINTPARAMETERFVPROC) (GLenum pname, const GLfloat *params); +typedef void (APIENTRYP PFNGLPOINTPARAMETERIPROC) (GLenum pname, GLint param); +typedef void (APIENTRYP PFNGLPOINTPARAMETERIVPROC) (GLenum pname, const GLint *params); +typedef void (APIENTRYP PFNGLBLENDCOLORPROC) (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +typedef void (APIENTRYP PFNGLBLENDEQUATIONPROC) (GLenum mode); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBlendFuncSeparate (GLenum sfactorRGB, GLenum dfactorRGB, GLenum sfactorAlpha, GLenum dfactorAlpha); +GLAPI void APIENTRY glMultiDrawArrays (GLenum mode, const GLint *first, const GLsizei *count, GLsizei drawcount); +GLAPI void APIENTRY glMultiDrawElements (GLenum mode, const GLsizei *count, GLenum type, const void *const*indices, GLsizei drawcount); +GLAPI void APIENTRY glPointParameterf (GLenum pname, GLfloat param); +GLAPI void APIENTRY glPointParameterfv (GLenum pname, const GLfloat *params); +GLAPI void APIENTRY glPointParameteri (GLenum pname, GLint param); +GLAPI void APIENTRY glPointParameteriv (GLenum pname, const GLint *params); +GLAPI void APIENTRY glBlendColor (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +GLAPI void APIENTRY glBlendEquation (GLenum mode); +#endif +#endif /* GL_VERSION_1_4 */ + +#ifndef GL_VERSION_1_5 +#define GL_VERSION_1_5 1 +#include +typedef khronos_ssize_t GLsizeiptr; +typedef khronos_intptr_t GLintptr; +#define GL_BUFFER_SIZE 0x8764 +#define GL_BUFFER_USAGE 0x8765 +#define GL_QUERY_COUNTER_BITS 0x8864 +#define GL_CURRENT_QUERY 0x8865 +#define GL_QUERY_RESULT 0x8866 +#define GL_QUERY_RESULT_AVAILABLE 0x8867 +#define GL_ARRAY_BUFFER 0x8892 +#define GL_ELEMENT_ARRAY_BUFFER 0x8893 +#define GL_ARRAY_BUFFER_BINDING 0x8894 +#define GL_ELEMENT_ARRAY_BUFFER_BINDING 0x8895 +#define GL_VERTEX_ATTRIB_ARRAY_BUFFER_BINDING 0x889F +#define GL_READ_ONLY 0x88B8 +#define GL_WRITE_ONLY 0x88B9 +#define GL_READ_WRITE 0x88BA +#define GL_BUFFER_ACCESS 0x88BB +#define GL_BUFFER_MAPPED 0x88BC +#define GL_BUFFER_MAP_POINTER 0x88BD +#define GL_STREAM_DRAW 0x88E0 +#define GL_STREAM_READ 0x88E1 +#define GL_STREAM_COPY 0x88E2 +#define GL_STATIC_DRAW 0x88E4 +#define GL_STATIC_READ 0x88E5 +#define GL_STATIC_COPY 0x88E6 +#define GL_DYNAMIC_DRAW 0x88E8 +#define GL_DYNAMIC_READ 0x88E9 +#define GL_DYNAMIC_COPY 0x88EA +#define GL_SAMPLES_PASSED 0x8914 +#define GL_SRC1_ALPHA 0x8589 +typedef void (APIENTRYP PFNGLGENQUERIESPROC) (GLsizei n, GLuint *ids); +typedef void (APIENTRYP PFNGLDELETEQUERIESPROC) (GLsizei n, const GLuint *ids); +typedef GLboolean (APIENTRYP PFNGLISQUERYPROC) (GLuint id); +typedef void (APIENTRYP PFNGLBEGINQUERYPROC) (GLenum target, GLuint id); +typedef void (APIENTRYP PFNGLENDQUERYPROC) (GLenum target); +typedef void (APIENTRYP PFNGLGETQUERYIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETQUERYOBJECTIVPROC) (GLuint id, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETQUERYOBJECTUIVPROC) (GLuint id, GLenum pname, GLuint *params); +typedef void (APIENTRYP PFNGLBINDBUFFERPROC) (GLenum target, GLuint buffer); +typedef void (APIENTRYP PFNGLDELETEBUFFERSPROC) (GLsizei n, const GLuint *buffers); +typedef void (APIENTRYP PFNGLGENBUFFERSPROC) (GLsizei n, GLuint *buffers); +typedef GLboolean (APIENTRYP PFNGLISBUFFERPROC) (GLuint buffer); +typedef void (APIENTRYP PFNGLBUFFERDATAPROC) (GLenum target, GLsizeiptr size, const void *data, GLenum usage); +typedef void (APIENTRYP PFNGLBUFFERSUBDATAPROC) (GLenum target, GLintptr offset, GLsizeiptr size, const void *data); +typedef void (APIENTRYP PFNGLGETBUFFERSUBDATAPROC) (GLenum target, GLintptr offset, GLsizeiptr size, void *data); +typedef void *(APIENTRYP PFNGLMAPBUFFERPROC) (GLenum target, GLenum access); +typedef GLboolean (APIENTRYP PFNGLUNMAPBUFFERPROC) (GLenum target); +typedef void (APIENTRYP PFNGLGETBUFFERPARAMETERIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETBUFFERPOINTERVPROC) (GLenum target, GLenum pname, void **params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glGenQueries (GLsizei n, GLuint *ids); +GLAPI void APIENTRY glDeleteQueries (GLsizei n, const GLuint *ids); +GLAPI GLboolean APIENTRY glIsQuery (GLuint id); +GLAPI void APIENTRY glBeginQuery (GLenum target, GLuint id); +GLAPI void APIENTRY glEndQuery (GLenum target); +GLAPI void APIENTRY glGetQueryiv (GLenum target, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetQueryObjectiv (GLuint id, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetQueryObjectuiv (GLuint id, GLenum pname, GLuint *params); +GLAPI void APIENTRY glBindBuffer (GLenum target, GLuint buffer); +GLAPI void APIENTRY glDeleteBuffers (GLsizei n, const GLuint *buffers); +GLAPI void APIENTRY glGenBuffers (GLsizei n, GLuint *buffers); +GLAPI GLboolean APIENTRY glIsBuffer (GLuint buffer); +GLAPI void APIENTRY glBufferData (GLenum target, GLsizeiptr size, const void *data, GLenum usage); +GLAPI void APIENTRY glBufferSubData (GLenum target, GLintptr offset, GLsizeiptr size, const void *data); +GLAPI void APIENTRY glGetBufferSubData (GLenum target, GLintptr offset, GLsizeiptr size, void *data); +GLAPI void *APIENTRY glMapBuffer (GLenum target, GLenum access); +GLAPI GLboolean APIENTRY glUnmapBuffer (GLenum target); +GLAPI void APIENTRY glGetBufferParameteriv (GLenum target, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetBufferPointerv (GLenum target, GLenum pname, void **params); +#endif +#endif /* GL_VERSION_1_5 */ + +#ifndef GL_VERSION_2_0 +#define GL_VERSION_2_0 1 +typedef char GLchar; +typedef short GLshort; +typedef signed char GLbyte; +typedef unsigned short GLushort; +#define GL_BLEND_EQUATION_RGB 0x8009 +#define GL_VERTEX_ATTRIB_ARRAY_ENABLED 0x8622 +#define GL_VERTEX_ATTRIB_ARRAY_SIZE 0x8623 +#define GL_VERTEX_ATTRIB_ARRAY_STRIDE 0x8624 +#define GL_VERTEX_ATTRIB_ARRAY_TYPE 0x8625 +#define GL_CURRENT_VERTEX_ATTRIB 0x8626 +#define GL_VERTEX_PROGRAM_POINT_SIZE 0x8642 +#define GL_VERTEX_ATTRIB_ARRAY_POINTER 0x8645 +#define GL_STENCIL_BACK_FUNC 0x8800 +#define GL_STENCIL_BACK_FAIL 0x8801 +#define GL_STENCIL_BACK_PASS_DEPTH_FAIL 0x8802 +#define GL_STENCIL_BACK_PASS_DEPTH_PASS 0x8803 +#define GL_MAX_DRAW_BUFFERS 0x8824 +#define GL_DRAW_BUFFER0 0x8825 +#define GL_DRAW_BUFFER1 0x8826 +#define GL_DRAW_BUFFER2 0x8827 +#define GL_DRAW_BUFFER3 0x8828 +#define GL_DRAW_BUFFER4 0x8829 +#define GL_DRAW_BUFFER5 0x882A +#define GL_DRAW_BUFFER6 0x882B +#define GL_DRAW_BUFFER7 0x882C +#define GL_DRAW_BUFFER8 0x882D +#define GL_DRAW_BUFFER9 0x882E +#define GL_DRAW_BUFFER10 0x882F +#define GL_DRAW_BUFFER11 0x8830 +#define GL_DRAW_BUFFER12 0x8831 +#define GL_DRAW_BUFFER13 0x8832 +#define GL_DRAW_BUFFER14 0x8833 +#define GL_DRAW_BUFFER15 0x8834 +#define GL_BLEND_EQUATION_ALPHA 0x883D +#define GL_MAX_VERTEX_ATTRIBS 0x8869 +#define GL_VERTEX_ATTRIB_ARRAY_NORMALIZED 0x886A +#define GL_MAX_TEXTURE_IMAGE_UNITS 0x8872 +#define GL_FRAGMENT_SHADER 0x8B30 +#define GL_VERTEX_SHADER 0x8B31 +#define GL_MAX_FRAGMENT_UNIFORM_COMPONENTS 0x8B49 +#define GL_MAX_VERTEX_UNIFORM_COMPONENTS 0x8B4A +#define GL_MAX_VARYING_FLOATS 0x8B4B +#define GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS 0x8B4C +#define GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS 0x8B4D +#define GL_SHADER_TYPE 0x8B4F +#define GL_FLOAT_VEC2 0x8B50 +#define GL_FLOAT_VEC3 0x8B51 +#define GL_FLOAT_VEC4 0x8B52 +#define GL_INT_VEC2 0x8B53 +#define GL_INT_VEC3 0x8B54 +#define GL_INT_VEC4 0x8B55 +#define GL_BOOL 0x8B56 +#define GL_BOOL_VEC2 0x8B57 +#define GL_BOOL_VEC3 0x8B58 +#define GL_BOOL_VEC4 0x8B59 +#define GL_FLOAT_MAT2 0x8B5A +#define GL_FLOAT_MAT3 0x8B5B +#define GL_FLOAT_MAT4 0x8B5C +#define GL_SAMPLER_1D 0x8B5D +#define GL_SAMPLER_2D 0x8B5E +#define GL_SAMPLER_3D 0x8B5F +#define GL_SAMPLER_CUBE 0x8B60 +#define GL_SAMPLER_1D_SHADOW 0x8B61 +#define GL_SAMPLER_2D_SHADOW 0x8B62 +#define GL_DELETE_STATUS 0x8B80 +#define GL_COMPILE_STATUS 0x8B81 +#define GL_LINK_STATUS 0x8B82 +#define GL_VALIDATE_STATUS 0x8B83 +#define GL_INFO_LOG_LENGTH 0x8B84 +#define GL_ATTACHED_SHADERS 0x8B85 +#define GL_ACTIVE_UNIFORMS 0x8B86 +#define GL_ACTIVE_UNIFORM_MAX_LENGTH 0x8B87 +#define GL_SHADER_SOURCE_LENGTH 0x8B88 +#define GL_ACTIVE_ATTRIBUTES 0x8B89 +#define GL_ACTIVE_ATTRIBUTE_MAX_LENGTH 0x8B8A +#define GL_FRAGMENT_SHADER_DERIVATIVE_HINT 0x8B8B +#define GL_SHADING_LANGUAGE_VERSION 0x8B8C +#define GL_CURRENT_PROGRAM 0x8B8D +#define GL_POINT_SPRITE_COORD_ORIGIN 0x8CA0 +#define GL_LOWER_LEFT 0x8CA1 +#define GL_UPPER_LEFT 0x8CA2 +#define GL_STENCIL_BACK_REF 0x8CA3 +#define GL_STENCIL_BACK_VALUE_MASK 0x8CA4 +#define GL_STENCIL_BACK_WRITEMASK 0x8CA5 +typedef void (APIENTRYP PFNGLBLENDEQUATIONSEPARATEPROC) (GLenum modeRGB, GLenum modeAlpha); +typedef void (APIENTRYP PFNGLDRAWBUFFERSPROC) (GLsizei n, const GLenum *bufs); +typedef void (APIENTRYP PFNGLSTENCILOPSEPARATEPROC) (GLenum face, GLenum sfail, GLenum dpfail, GLenum dppass); +typedef void (APIENTRYP PFNGLSTENCILFUNCSEPARATEPROC) (GLenum face, GLenum func, GLint ref, GLuint mask); +typedef void (APIENTRYP PFNGLSTENCILMASKSEPARATEPROC) (GLenum face, GLuint mask); +typedef void (APIENTRYP PFNGLATTACHSHADERPROC) (GLuint program, GLuint shader); +typedef void (APIENTRYP PFNGLBINDATTRIBLOCATIONPROC) (GLuint program, GLuint index, const GLchar *name); +typedef void (APIENTRYP PFNGLCOMPILESHADERPROC) (GLuint shader); +typedef GLuint (APIENTRYP PFNGLCREATEPROGRAMPROC) (void); +typedef GLuint (APIENTRYP PFNGLCREATESHADERPROC) (GLenum type); +typedef void (APIENTRYP PFNGLDELETEPROGRAMPROC) (GLuint program); +typedef void (APIENTRYP PFNGLDELETESHADERPROC) (GLuint shader); +typedef void (APIENTRYP PFNGLDETACHSHADERPROC) (GLuint program, GLuint shader); +typedef void (APIENTRYP PFNGLDISABLEVERTEXATTRIBARRAYPROC) (GLuint index); +typedef void (APIENTRYP PFNGLENABLEVERTEXATTRIBARRAYPROC) (GLuint index); +typedef void (APIENTRYP PFNGLGETACTIVEATTRIBPROC) (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name); +typedef void (APIENTRYP PFNGLGETACTIVEUNIFORMPROC) (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name); +typedef void (APIENTRYP PFNGLGETATTACHEDSHADERSPROC) (GLuint program, GLsizei maxCount, GLsizei *count, GLuint *shaders); +typedef GLint (APIENTRYP PFNGLGETATTRIBLOCATIONPROC) (GLuint program, const GLchar *name); +typedef void (APIENTRYP PFNGLGETPROGRAMIVPROC) (GLuint program, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETPROGRAMINFOLOGPROC) (GLuint program, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +typedef void (APIENTRYP PFNGLGETSHADERIVPROC) (GLuint shader, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETSHADERINFOLOGPROC) (GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +typedef void (APIENTRYP PFNGLGETSHADERSOURCEPROC) (GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *source); +typedef GLint (APIENTRYP PFNGLGETUNIFORMLOCATIONPROC) (GLuint program, const GLchar *name); +typedef void (APIENTRYP PFNGLGETUNIFORMFVPROC) (GLuint program, GLint location, GLfloat *params); +typedef void (APIENTRYP PFNGLGETUNIFORMIVPROC) (GLuint program, GLint location, GLint *params); +typedef void (APIENTRYP PFNGLGETVERTEXATTRIBDVPROC) (GLuint index, GLenum pname, GLdouble *params); +typedef void (APIENTRYP PFNGLGETVERTEXATTRIBFVPROC) (GLuint index, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETVERTEXATTRIBIVPROC) (GLuint index, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETVERTEXATTRIBPOINTERVPROC) (GLuint index, GLenum pname, void **pointer); +typedef GLboolean (APIENTRYP PFNGLISPROGRAMPROC) (GLuint program); +typedef GLboolean (APIENTRYP PFNGLISSHADERPROC) (GLuint shader); +typedef void (APIENTRYP PFNGLLINKPROGRAMPROC) (GLuint program); +typedef void (APIENTRYP PFNGLSHADERSOURCEPROC) (GLuint shader, GLsizei count, const GLchar *const*string, const GLint *length); +typedef void (APIENTRYP PFNGLUSEPROGRAMPROC) (GLuint program); +typedef void (APIENTRYP PFNGLUNIFORM1FPROC) (GLint location, GLfloat v0); +typedef void (APIENTRYP PFNGLUNIFORM2FPROC) (GLint location, GLfloat v0, GLfloat v1); +typedef void (APIENTRYP PFNGLUNIFORM3FPROC) (GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +typedef void (APIENTRYP PFNGLUNIFORM4FPROC) (GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +typedef void (APIENTRYP PFNGLUNIFORM1IPROC) (GLint location, GLint v0); +typedef void (APIENTRYP PFNGLUNIFORM2IPROC) (GLint location, GLint v0, GLint v1); +typedef void (APIENTRYP PFNGLUNIFORM3IPROC) (GLint location, GLint v0, GLint v1, GLint v2); +typedef void (APIENTRYP PFNGLUNIFORM4IPROC) (GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +typedef void (APIENTRYP PFNGLUNIFORM1FVPROC) (GLint location, GLsizei count, const GLfloat *value); +typedef void (APIENTRYP PFNGLUNIFORM2FVPROC) (GLint location, GLsizei count, const GLfloat *value); +typedef void (APIENTRYP PFNGLUNIFORM3FVPROC) (GLint location, GLsizei count, const GLfloat *value); +typedef void (APIENTRYP PFNGLUNIFORM4FVPROC) (GLint location, GLsizei count, const GLfloat *value); +typedef void (APIENTRYP PFNGLUNIFORM1IVPROC) (GLint location, GLsizei count, const GLint *value); +typedef void (APIENTRYP PFNGLUNIFORM2IVPROC) (GLint location, GLsizei count, const GLint *value); +typedef void (APIENTRYP PFNGLUNIFORM3IVPROC) (GLint location, GLsizei count, const GLint *value); +typedef void (APIENTRYP PFNGLUNIFORM4IVPROC) (GLint location, GLsizei count, const GLint *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX2FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX3FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX4FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLVALIDATEPROGRAMPROC) (GLuint program); +typedef void (APIENTRYP PFNGLVERTEXATTRIB1DPROC) (GLuint index, GLdouble x); +typedef void (APIENTRYP PFNGLVERTEXATTRIB1DVPROC) (GLuint index, const GLdouble *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB1FPROC) (GLuint index, GLfloat x); +typedef void (APIENTRYP PFNGLVERTEXATTRIB1FVPROC) (GLuint index, const GLfloat *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB1SPROC) (GLuint index, GLshort x); +typedef void (APIENTRYP PFNGLVERTEXATTRIB1SVPROC) (GLuint index, const GLshort *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB2DPROC) (GLuint index, GLdouble x, GLdouble y); +typedef void (APIENTRYP PFNGLVERTEXATTRIB2DVPROC) (GLuint index, const GLdouble *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB2FPROC) (GLuint index, GLfloat x, GLfloat y); +typedef void (APIENTRYP PFNGLVERTEXATTRIB2FVPROC) (GLuint index, const GLfloat *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB2SPROC) (GLuint index, GLshort x, GLshort y); +typedef void (APIENTRYP PFNGLVERTEXATTRIB2SVPROC) (GLuint index, const GLshort *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB3DPROC) (GLuint index, GLdouble x, GLdouble y, GLdouble z); +typedef void (APIENTRYP PFNGLVERTEXATTRIB3DVPROC) (GLuint index, const GLdouble *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB3FPROC) (GLuint index, GLfloat x, GLfloat y, GLfloat z); +typedef void (APIENTRYP PFNGLVERTEXATTRIB3FVPROC) (GLuint index, const GLfloat *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB3SPROC) (GLuint index, GLshort x, GLshort y, GLshort z); +typedef void (APIENTRYP PFNGLVERTEXATTRIB3SVPROC) (GLuint index, const GLshort *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4NBVPROC) (GLuint index, const GLbyte *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4NIVPROC) (GLuint index, const GLint *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4NSVPROC) (GLuint index, const GLshort *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4NUBPROC) (GLuint index, GLubyte x, GLubyte y, GLubyte z, GLubyte w); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4NUBVPROC) (GLuint index, const GLubyte *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4NUIVPROC) (GLuint index, const GLuint *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4NUSVPROC) (GLuint index, const GLushort *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4BVPROC) (GLuint index, const GLbyte *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4DPROC) (GLuint index, GLdouble x, GLdouble y, GLdouble z, GLdouble w); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4DVPROC) (GLuint index, const GLdouble *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4FPROC) (GLuint index, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4FVPROC) (GLuint index, const GLfloat *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4IVPROC) (GLuint index, const GLint *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4SPROC) (GLuint index, GLshort x, GLshort y, GLshort z, GLshort w); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4SVPROC) (GLuint index, const GLshort *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4UBVPROC) (GLuint index, const GLubyte *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4UIVPROC) (GLuint index, const GLuint *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4USVPROC) (GLuint index, const GLushort *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBPOINTERPROC) (GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride, const void *pointer); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBlendEquationSeparate (GLenum modeRGB, GLenum modeAlpha); +GLAPI void APIENTRY glDrawBuffers (GLsizei n, const GLenum *bufs); +GLAPI void APIENTRY glStencilOpSeparate (GLenum face, GLenum sfail, GLenum dpfail, GLenum dppass); +GLAPI void APIENTRY glStencilFuncSeparate (GLenum face, GLenum func, GLint ref, GLuint mask); +GLAPI void APIENTRY glStencilMaskSeparate (GLenum face, GLuint mask); +GLAPI void APIENTRY glAttachShader (GLuint program, GLuint shader); +GLAPI void APIENTRY glBindAttribLocation (GLuint program, GLuint index, const GLchar *name); +GLAPI void APIENTRY glCompileShader (GLuint shader); +GLAPI GLuint APIENTRY glCreateProgram (void); +GLAPI GLuint APIENTRY glCreateShader (GLenum type); +GLAPI void APIENTRY glDeleteProgram (GLuint program); +GLAPI void APIENTRY glDeleteShader (GLuint shader); +GLAPI void APIENTRY glDetachShader (GLuint program, GLuint shader); +GLAPI void APIENTRY glDisableVertexAttribArray (GLuint index); +GLAPI void APIENTRY glEnableVertexAttribArray (GLuint index); +GLAPI void APIENTRY glGetActiveAttrib (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name); +GLAPI void APIENTRY glGetActiveUniform (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name); +GLAPI void APIENTRY glGetAttachedShaders (GLuint program, GLsizei maxCount, GLsizei *count, GLuint *shaders); +GLAPI GLint APIENTRY glGetAttribLocation (GLuint program, const GLchar *name); +GLAPI void APIENTRY glGetProgramiv (GLuint program, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetProgramInfoLog (GLuint program, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +GLAPI void APIENTRY glGetShaderiv (GLuint shader, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetShaderInfoLog (GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +GLAPI void APIENTRY glGetShaderSource (GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *source); +GLAPI GLint APIENTRY glGetUniformLocation (GLuint program, const GLchar *name); +GLAPI void APIENTRY glGetUniformfv (GLuint program, GLint location, GLfloat *params); +GLAPI void APIENTRY glGetUniformiv (GLuint program, GLint location, GLint *params); +GLAPI void APIENTRY glGetVertexAttribdv (GLuint index, GLenum pname, GLdouble *params); +GLAPI void APIENTRY glGetVertexAttribfv (GLuint index, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetVertexAttribiv (GLuint index, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetVertexAttribPointerv (GLuint index, GLenum pname, void **pointer); +GLAPI GLboolean APIENTRY glIsProgram (GLuint program); +GLAPI GLboolean APIENTRY glIsShader (GLuint shader); +GLAPI void APIENTRY glLinkProgram (GLuint program); +GLAPI void APIENTRY glShaderSource (GLuint shader, GLsizei count, const GLchar *const*string, const GLint *length); +GLAPI void APIENTRY glUseProgram (GLuint program); +GLAPI void APIENTRY glUniform1f (GLint location, GLfloat v0); +GLAPI void APIENTRY glUniform2f (GLint location, GLfloat v0, GLfloat v1); +GLAPI void APIENTRY glUniform3f (GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +GLAPI void APIENTRY glUniform4f (GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +GLAPI void APIENTRY glUniform1i (GLint location, GLint v0); +GLAPI void APIENTRY glUniform2i (GLint location, GLint v0, GLint v1); +GLAPI void APIENTRY glUniform3i (GLint location, GLint v0, GLint v1, GLint v2); +GLAPI void APIENTRY glUniform4i (GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +GLAPI void APIENTRY glUniform1fv (GLint location, GLsizei count, const GLfloat *value); +GLAPI void APIENTRY glUniform2fv (GLint location, GLsizei count, const GLfloat *value); +GLAPI void APIENTRY glUniform3fv (GLint location, GLsizei count, const GLfloat *value); +GLAPI void APIENTRY glUniform4fv (GLint location, GLsizei count, const GLfloat *value); +GLAPI void APIENTRY glUniform1iv (GLint location, GLsizei count, const GLint *value); +GLAPI void APIENTRY glUniform2iv (GLint location, GLsizei count, const GLint *value); +GLAPI void APIENTRY glUniform3iv (GLint location, GLsizei count, const GLint *value); +GLAPI void APIENTRY glUniform4iv (GLint location, GLsizei count, const GLint *value); +GLAPI void APIENTRY glUniformMatrix2fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glUniformMatrix3fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glUniformMatrix4fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glValidateProgram (GLuint program); +GLAPI void APIENTRY glVertexAttrib1d (GLuint index, GLdouble x); +GLAPI void APIENTRY glVertexAttrib1dv (GLuint index, const GLdouble *v); +GLAPI void APIENTRY glVertexAttrib1f (GLuint index, GLfloat x); +GLAPI void APIENTRY glVertexAttrib1fv (GLuint index, const GLfloat *v); +GLAPI void APIENTRY glVertexAttrib1s (GLuint index, GLshort x); +GLAPI void APIENTRY glVertexAttrib1sv (GLuint index, const GLshort *v); +GLAPI void APIENTRY glVertexAttrib2d (GLuint index, GLdouble x, GLdouble y); +GLAPI void APIENTRY glVertexAttrib2dv (GLuint index, const GLdouble *v); +GLAPI void APIENTRY glVertexAttrib2f (GLuint index, GLfloat x, GLfloat y); +GLAPI void APIENTRY glVertexAttrib2fv (GLuint index, const GLfloat *v); +GLAPI void APIENTRY glVertexAttrib2s (GLuint index, GLshort x, GLshort y); +GLAPI void APIENTRY glVertexAttrib2sv (GLuint index, const GLshort *v); +GLAPI void APIENTRY glVertexAttrib3d (GLuint index, GLdouble x, GLdouble y, GLdouble z); +GLAPI void APIENTRY glVertexAttrib3dv (GLuint index, const GLdouble *v); +GLAPI void APIENTRY glVertexAttrib3f (GLuint index, GLfloat x, GLfloat y, GLfloat z); +GLAPI void APIENTRY glVertexAttrib3fv (GLuint index, const GLfloat *v); +GLAPI void APIENTRY glVertexAttrib3s (GLuint index, GLshort x, GLshort y, GLshort z); +GLAPI void APIENTRY glVertexAttrib3sv (GLuint index, const GLshort *v); +GLAPI void APIENTRY glVertexAttrib4Nbv (GLuint index, const GLbyte *v); +GLAPI void APIENTRY glVertexAttrib4Niv (GLuint index, const GLint *v); +GLAPI void APIENTRY glVertexAttrib4Nsv (GLuint index, const GLshort *v); +GLAPI void APIENTRY glVertexAttrib4Nub (GLuint index, GLubyte x, GLubyte y, GLubyte z, GLubyte w); +GLAPI void APIENTRY glVertexAttrib4Nubv (GLuint index, const GLubyte *v); +GLAPI void APIENTRY glVertexAttrib4Nuiv (GLuint index, const GLuint *v); +GLAPI void APIENTRY glVertexAttrib4Nusv (GLuint index, const GLushort *v); +GLAPI void APIENTRY glVertexAttrib4bv (GLuint index, const GLbyte *v); +GLAPI void APIENTRY glVertexAttrib4d (GLuint index, GLdouble x, GLdouble y, GLdouble z, GLdouble w); +GLAPI void APIENTRY glVertexAttrib4dv (GLuint index, const GLdouble *v); +GLAPI void APIENTRY glVertexAttrib4f (GLuint index, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +GLAPI void APIENTRY glVertexAttrib4fv (GLuint index, const GLfloat *v); +GLAPI void APIENTRY glVertexAttrib4iv (GLuint index, const GLint *v); +GLAPI void APIENTRY glVertexAttrib4s (GLuint index, GLshort x, GLshort y, GLshort z, GLshort w); +GLAPI void APIENTRY glVertexAttrib4sv (GLuint index, const GLshort *v); +GLAPI void APIENTRY glVertexAttrib4ubv (GLuint index, const GLubyte *v); +GLAPI void APIENTRY glVertexAttrib4uiv (GLuint index, const GLuint *v); +GLAPI void APIENTRY glVertexAttrib4usv (GLuint index, const GLushort *v); +GLAPI void APIENTRY glVertexAttribPointer (GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride, const void *pointer); +#endif +#endif /* GL_VERSION_2_0 */ + +#ifndef GL_VERSION_2_1 +#define GL_VERSION_2_1 1 +#define GL_PIXEL_PACK_BUFFER 0x88EB +#define GL_PIXEL_UNPACK_BUFFER 0x88EC +#define GL_PIXEL_PACK_BUFFER_BINDING 0x88ED +#define GL_PIXEL_UNPACK_BUFFER_BINDING 0x88EF +#define GL_FLOAT_MAT2x3 0x8B65 +#define GL_FLOAT_MAT2x4 0x8B66 +#define GL_FLOAT_MAT3x2 0x8B67 +#define GL_FLOAT_MAT3x4 0x8B68 +#define GL_FLOAT_MAT4x2 0x8B69 +#define GL_FLOAT_MAT4x3 0x8B6A +#define GL_SRGB 0x8C40 +#define GL_SRGB8 0x8C41 +#define GL_SRGB_ALPHA 0x8C42 +#define GL_SRGB8_ALPHA8 0x8C43 +#define GL_COMPRESSED_SRGB 0x8C48 +#define GL_COMPRESSED_SRGB_ALPHA 0x8C49 +typedef void (APIENTRYP PFNGLUNIFORMMATRIX2X3FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX3X2FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX2X4FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX4X2FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX3X4FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX4X3FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glUniformMatrix2x3fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glUniformMatrix3x2fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glUniformMatrix2x4fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glUniformMatrix4x2fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glUniformMatrix3x4fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glUniformMatrix4x3fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +#endif +#endif /* GL_VERSION_2_1 */ + +#ifndef GL_VERSION_3_0 +#define GL_VERSION_3_0 1 +typedef unsigned short GLhalf; +#define GL_COMPARE_REF_TO_TEXTURE 0x884E +#define GL_CLIP_DISTANCE0 0x3000 +#define GL_CLIP_DISTANCE1 0x3001 +#define GL_CLIP_DISTANCE2 0x3002 +#define GL_CLIP_DISTANCE3 0x3003 +#define GL_CLIP_DISTANCE4 0x3004 +#define GL_CLIP_DISTANCE5 0x3005 +#define GL_CLIP_DISTANCE6 0x3006 +#define GL_CLIP_DISTANCE7 0x3007 +#define GL_MAX_CLIP_DISTANCES 0x0D32 +#define GL_MAJOR_VERSION 0x821B +#define GL_MINOR_VERSION 0x821C +#define GL_NUM_EXTENSIONS 0x821D +#define GL_CONTEXT_FLAGS 0x821E +#define GL_COMPRESSED_RED 0x8225 +#define GL_COMPRESSED_RG 0x8226 +#define GL_CONTEXT_FLAG_FORWARD_COMPATIBLE_BIT 0x00000001 +#define GL_RGBA32F 0x8814 +#define GL_RGB32F 0x8815 +#define GL_RGBA16F 0x881A +#define GL_RGB16F 0x881B +#define GL_VERTEX_ATTRIB_ARRAY_INTEGER 0x88FD +#define GL_MAX_ARRAY_TEXTURE_LAYERS 0x88FF +#define GL_MIN_PROGRAM_TEXEL_OFFSET 0x8904 +#define GL_MAX_PROGRAM_TEXEL_OFFSET 0x8905 +#define GL_CLAMP_READ_COLOR 0x891C +#define GL_FIXED_ONLY 0x891D +#define GL_MAX_VARYING_COMPONENTS 0x8B4B +#define GL_TEXTURE_1D_ARRAY 0x8C18 +#define GL_PROXY_TEXTURE_1D_ARRAY 0x8C19 +#define GL_TEXTURE_2D_ARRAY 0x8C1A +#define GL_PROXY_TEXTURE_2D_ARRAY 0x8C1B +#define GL_TEXTURE_BINDING_1D_ARRAY 0x8C1C +#define GL_TEXTURE_BINDING_2D_ARRAY 0x8C1D +#define GL_R11F_G11F_B10F 0x8C3A +#define GL_UNSIGNED_INT_10F_11F_11F_REV 0x8C3B +#define GL_RGB9_E5 0x8C3D +#define GL_UNSIGNED_INT_5_9_9_9_REV 0x8C3E +#define GL_TEXTURE_SHARED_SIZE 0x8C3F +#define GL_TRANSFORM_FEEDBACK_VARYING_MAX_LENGTH 0x8C76 +#define GL_TRANSFORM_FEEDBACK_BUFFER_MODE 0x8C7F +#define GL_MAX_TRANSFORM_FEEDBACK_SEPARATE_COMPONENTS 0x8C80 +#define GL_TRANSFORM_FEEDBACK_VARYINGS 0x8C83 +#define GL_TRANSFORM_FEEDBACK_BUFFER_START 0x8C84 +#define GL_TRANSFORM_FEEDBACK_BUFFER_SIZE 0x8C85 +#define GL_PRIMITIVES_GENERATED 0x8C87 +#define GL_TRANSFORM_FEEDBACK_PRIMITIVES_WRITTEN 0x8C88 +#define GL_RASTERIZER_DISCARD 0x8C89 +#define GL_MAX_TRANSFORM_FEEDBACK_INTERLEAVED_COMPONENTS 0x8C8A +#define GL_MAX_TRANSFORM_FEEDBACK_SEPARATE_ATTRIBS 0x8C8B +#define GL_INTERLEAVED_ATTRIBS 0x8C8C +#define GL_SEPARATE_ATTRIBS 0x8C8D +#define GL_TRANSFORM_FEEDBACK_BUFFER 0x8C8E +#define GL_TRANSFORM_FEEDBACK_BUFFER_BINDING 0x8C8F +#define GL_RGBA32UI 0x8D70 +#define GL_RGB32UI 0x8D71 +#define GL_RGBA16UI 0x8D76 +#define GL_RGB16UI 0x8D77 +#define GL_RGBA8UI 0x8D7C +#define GL_RGB8UI 0x8D7D +#define GL_RGBA32I 0x8D82 +#define GL_RGB32I 0x8D83 +#define GL_RGBA16I 0x8D88 +#define GL_RGB16I 0x8D89 +#define GL_RGBA8I 0x8D8E +#define GL_RGB8I 0x8D8F +#define GL_RED_INTEGER 0x8D94 +#define GL_GREEN_INTEGER 0x8D95 +#define GL_BLUE_INTEGER 0x8D96 +#define GL_RGB_INTEGER 0x8D98 +#define GL_RGBA_INTEGER 0x8D99 +#define GL_BGR_INTEGER 0x8D9A +#define GL_BGRA_INTEGER 0x8D9B +#define GL_SAMPLER_1D_ARRAY 0x8DC0 +#define GL_SAMPLER_2D_ARRAY 0x8DC1 +#define GL_SAMPLER_1D_ARRAY_SHADOW 0x8DC3 +#define GL_SAMPLER_2D_ARRAY_SHADOW 0x8DC4 +#define GL_SAMPLER_CUBE_SHADOW 0x8DC5 +#define GL_UNSIGNED_INT_VEC2 0x8DC6 +#define GL_UNSIGNED_INT_VEC3 0x8DC7 +#define GL_UNSIGNED_INT_VEC4 0x8DC8 +#define GL_INT_SAMPLER_1D 0x8DC9 +#define GL_INT_SAMPLER_2D 0x8DCA +#define GL_INT_SAMPLER_3D 0x8DCB +#define GL_INT_SAMPLER_CUBE 0x8DCC +#define GL_INT_SAMPLER_1D_ARRAY 0x8DCE +#define GL_INT_SAMPLER_2D_ARRAY 0x8DCF +#define GL_UNSIGNED_INT_SAMPLER_1D 0x8DD1 +#define GL_UNSIGNED_INT_SAMPLER_2D 0x8DD2 +#define GL_UNSIGNED_INT_SAMPLER_3D 0x8DD3 +#define GL_UNSIGNED_INT_SAMPLER_CUBE 0x8DD4 +#define GL_UNSIGNED_INT_SAMPLER_1D_ARRAY 0x8DD6 +#define GL_UNSIGNED_INT_SAMPLER_2D_ARRAY 0x8DD7 +#define GL_QUERY_WAIT 0x8E13 +#define GL_QUERY_NO_WAIT 0x8E14 +#define GL_QUERY_BY_REGION_WAIT 0x8E15 +#define GL_QUERY_BY_REGION_NO_WAIT 0x8E16 +#define GL_BUFFER_ACCESS_FLAGS 0x911F +#define GL_BUFFER_MAP_LENGTH 0x9120 +#define GL_BUFFER_MAP_OFFSET 0x9121 +#define GL_DEPTH_COMPONENT32F 0x8CAC +#define GL_DEPTH32F_STENCIL8 0x8CAD +#define GL_FLOAT_32_UNSIGNED_INT_24_8_REV 0x8DAD +#define GL_INVALID_FRAMEBUFFER_OPERATION 0x0506 +#define GL_FRAMEBUFFER_ATTACHMENT_COLOR_ENCODING 0x8210 +#define GL_FRAMEBUFFER_ATTACHMENT_COMPONENT_TYPE 0x8211 +#define GL_FRAMEBUFFER_ATTACHMENT_RED_SIZE 0x8212 +#define GL_FRAMEBUFFER_ATTACHMENT_GREEN_SIZE 0x8213 +#define GL_FRAMEBUFFER_ATTACHMENT_BLUE_SIZE 0x8214 +#define GL_FRAMEBUFFER_ATTACHMENT_ALPHA_SIZE 0x8215 +#define GL_FRAMEBUFFER_ATTACHMENT_DEPTH_SIZE 0x8216 +#define GL_FRAMEBUFFER_ATTACHMENT_STENCIL_SIZE 0x8217 +#define GL_FRAMEBUFFER_DEFAULT 0x8218 +#define GL_FRAMEBUFFER_UNDEFINED 0x8219 +#define GL_DEPTH_STENCIL_ATTACHMENT 0x821A +#define GL_MAX_RENDERBUFFER_SIZE 0x84E8 +#define GL_DEPTH_STENCIL 0x84F9 +#define GL_UNSIGNED_INT_24_8 0x84FA +#define GL_DEPTH24_STENCIL8 0x88F0 +#define GL_TEXTURE_STENCIL_SIZE 0x88F1 +#define GL_TEXTURE_RED_TYPE 0x8C10 +#define GL_TEXTURE_GREEN_TYPE 0x8C11 +#define GL_TEXTURE_BLUE_TYPE 0x8C12 +#define GL_TEXTURE_ALPHA_TYPE 0x8C13 +#define GL_TEXTURE_DEPTH_TYPE 0x8C16 +#define GL_UNSIGNED_NORMALIZED 0x8C17 +#define GL_FRAMEBUFFER_BINDING 0x8CA6 +#define GL_DRAW_FRAMEBUFFER_BINDING 0x8CA6 +#define GL_RENDERBUFFER_BINDING 0x8CA7 +#define GL_READ_FRAMEBUFFER 0x8CA8 +#define GL_DRAW_FRAMEBUFFER 0x8CA9 +#define GL_READ_FRAMEBUFFER_BINDING 0x8CAA +#define GL_RENDERBUFFER_SAMPLES 0x8CAB +#define GL_FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE 0x8CD0 +#define GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME 0x8CD1 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL 0x8CD2 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE 0x8CD3 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LAYER 0x8CD4 +#define GL_FRAMEBUFFER_COMPLETE 0x8CD5 +#define GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT 0x8CD6 +#define GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT 0x8CD7 +#define GL_FRAMEBUFFER_INCOMPLETE_DRAW_BUFFER 0x8CDB +#define GL_FRAMEBUFFER_INCOMPLETE_READ_BUFFER 0x8CDC +#define GL_FRAMEBUFFER_UNSUPPORTED 0x8CDD +#define GL_MAX_COLOR_ATTACHMENTS 0x8CDF +#define GL_COLOR_ATTACHMENT0 0x8CE0 +#define GL_COLOR_ATTACHMENT1 0x8CE1 +#define GL_COLOR_ATTACHMENT2 0x8CE2 +#define GL_COLOR_ATTACHMENT3 0x8CE3 +#define GL_COLOR_ATTACHMENT4 0x8CE4 +#define GL_COLOR_ATTACHMENT5 0x8CE5 +#define GL_COLOR_ATTACHMENT6 0x8CE6 +#define GL_COLOR_ATTACHMENT7 0x8CE7 +#define GL_COLOR_ATTACHMENT8 0x8CE8 +#define GL_COLOR_ATTACHMENT9 0x8CE9 +#define GL_COLOR_ATTACHMENT10 0x8CEA +#define GL_COLOR_ATTACHMENT11 0x8CEB +#define GL_COLOR_ATTACHMENT12 0x8CEC +#define GL_COLOR_ATTACHMENT13 0x8CED +#define GL_COLOR_ATTACHMENT14 0x8CEE +#define GL_COLOR_ATTACHMENT15 0x8CEF +#define GL_COLOR_ATTACHMENT16 0x8CF0 +#define GL_COLOR_ATTACHMENT17 0x8CF1 +#define GL_COLOR_ATTACHMENT18 0x8CF2 +#define GL_COLOR_ATTACHMENT19 0x8CF3 +#define GL_COLOR_ATTACHMENT20 0x8CF4 +#define GL_COLOR_ATTACHMENT21 0x8CF5 +#define GL_COLOR_ATTACHMENT22 0x8CF6 +#define GL_COLOR_ATTACHMENT23 0x8CF7 +#define GL_COLOR_ATTACHMENT24 0x8CF8 +#define GL_COLOR_ATTACHMENT25 0x8CF9 +#define GL_COLOR_ATTACHMENT26 0x8CFA +#define GL_COLOR_ATTACHMENT27 0x8CFB +#define GL_COLOR_ATTACHMENT28 0x8CFC +#define GL_COLOR_ATTACHMENT29 0x8CFD +#define GL_COLOR_ATTACHMENT30 0x8CFE +#define GL_COLOR_ATTACHMENT31 0x8CFF +#define GL_DEPTH_ATTACHMENT 0x8D00 +#define GL_STENCIL_ATTACHMENT 0x8D20 +#define GL_FRAMEBUFFER 0x8D40 +#define GL_RENDERBUFFER 0x8D41 +#define GL_RENDERBUFFER_WIDTH 0x8D42 +#define GL_RENDERBUFFER_HEIGHT 0x8D43 +#define GL_RENDERBUFFER_INTERNAL_FORMAT 0x8D44 +#define GL_STENCIL_INDEX1 0x8D46 +#define GL_STENCIL_INDEX4 0x8D47 +#define GL_STENCIL_INDEX8 0x8D48 +#define GL_STENCIL_INDEX16 0x8D49 +#define GL_RENDERBUFFER_RED_SIZE 0x8D50 +#define GL_RENDERBUFFER_GREEN_SIZE 0x8D51 +#define GL_RENDERBUFFER_BLUE_SIZE 0x8D52 +#define GL_RENDERBUFFER_ALPHA_SIZE 0x8D53 +#define GL_RENDERBUFFER_DEPTH_SIZE 0x8D54 +#define GL_RENDERBUFFER_STENCIL_SIZE 0x8D55 +#define GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE 0x8D56 +#define GL_MAX_SAMPLES 0x8D57 +#define GL_FRAMEBUFFER_SRGB 0x8DB9 +#define GL_HALF_FLOAT 0x140B +#define GL_MAP_READ_BIT 0x0001 +#define GL_MAP_WRITE_BIT 0x0002 +#define GL_MAP_INVALIDATE_RANGE_BIT 0x0004 +#define GL_MAP_INVALIDATE_BUFFER_BIT 0x0008 +#define GL_MAP_FLUSH_EXPLICIT_BIT 0x0010 +#define GL_MAP_UNSYNCHRONIZED_BIT 0x0020 +#define GL_COMPRESSED_RED_RGTC1 0x8DBB +#define GL_COMPRESSED_SIGNED_RED_RGTC1 0x8DBC +#define GL_COMPRESSED_RG_RGTC2 0x8DBD +#define GL_COMPRESSED_SIGNED_RG_RGTC2 0x8DBE +#define GL_RG 0x8227 +#define GL_RG_INTEGER 0x8228 +#define GL_R8 0x8229 +#define GL_R16 0x822A +#define GL_RG8 0x822B +#define GL_RG16 0x822C +#define GL_R16F 0x822D +#define GL_R32F 0x822E +#define GL_RG16F 0x822F +#define GL_RG32F 0x8230 +#define GL_R8I 0x8231 +#define GL_R8UI 0x8232 +#define GL_R16I 0x8233 +#define GL_R16UI 0x8234 +#define GL_R32I 0x8235 +#define GL_R32UI 0x8236 +#define GL_RG8I 0x8237 +#define GL_RG8UI 0x8238 +#define GL_RG16I 0x8239 +#define GL_RG16UI 0x823A +#define GL_RG32I 0x823B +#define GL_RG32UI 0x823C +#define GL_VERTEX_ARRAY_BINDING 0x85B5 +typedef void (APIENTRYP PFNGLCOLORMASKIPROC) (GLuint index, GLboolean r, GLboolean g, GLboolean b, GLboolean a); +typedef void (APIENTRYP PFNGLGETBOOLEANI_VPROC) (GLenum target, GLuint index, GLboolean *data); +typedef void (APIENTRYP PFNGLGETINTEGERI_VPROC) (GLenum target, GLuint index, GLint *data); +typedef void (APIENTRYP PFNGLENABLEIPROC) (GLenum target, GLuint index); +typedef void (APIENTRYP PFNGLDISABLEIPROC) (GLenum target, GLuint index); +typedef GLboolean (APIENTRYP PFNGLISENABLEDIPROC) (GLenum target, GLuint index); +typedef void (APIENTRYP PFNGLBEGINTRANSFORMFEEDBACKPROC) (GLenum primitiveMode); +typedef void (APIENTRYP PFNGLENDTRANSFORMFEEDBACKPROC) (void); +typedef void (APIENTRYP PFNGLBINDBUFFERRANGEPROC) (GLenum target, GLuint index, GLuint buffer, GLintptr offset, GLsizeiptr size); +typedef void (APIENTRYP PFNGLBINDBUFFERBASEPROC) (GLenum target, GLuint index, GLuint buffer); +typedef void (APIENTRYP PFNGLTRANSFORMFEEDBACKVARYINGSPROC) (GLuint program, GLsizei count, const GLchar *const*varyings, GLenum bufferMode); +typedef void (APIENTRYP PFNGLGETTRANSFORMFEEDBACKVARYINGPROC) (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLsizei *size, GLenum *type, GLchar *name); +typedef void (APIENTRYP PFNGLCLAMPCOLORPROC) (GLenum target, GLenum clamp); +typedef void (APIENTRYP PFNGLBEGINCONDITIONALRENDERPROC) (GLuint id, GLenum mode); +typedef void (APIENTRYP PFNGLENDCONDITIONALRENDERPROC) (void); +typedef void (APIENTRYP PFNGLVERTEXATTRIBIPOINTERPROC) (GLuint index, GLint size, GLenum type, GLsizei stride, const void *pointer); +typedef void (APIENTRYP PFNGLGETVERTEXATTRIBIIVPROC) (GLuint index, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETVERTEXATTRIBIUIVPROC) (GLuint index, GLenum pname, GLuint *params); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI1IPROC) (GLuint index, GLint x); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI2IPROC) (GLuint index, GLint x, GLint y); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI3IPROC) (GLuint index, GLint x, GLint y, GLint z); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI4IPROC) (GLuint index, GLint x, GLint y, GLint z, GLint w); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI1UIPROC) (GLuint index, GLuint x); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI2UIPROC) (GLuint index, GLuint x, GLuint y); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI3UIPROC) (GLuint index, GLuint x, GLuint y, GLuint z); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI4UIPROC) (GLuint index, GLuint x, GLuint y, GLuint z, GLuint w); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI1IVPROC) (GLuint index, const GLint *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI2IVPROC) (GLuint index, const GLint *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI3IVPROC) (GLuint index, const GLint *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI4IVPROC) (GLuint index, const GLint *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI1UIVPROC) (GLuint index, const GLuint *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI2UIVPROC) (GLuint index, const GLuint *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI3UIVPROC) (GLuint index, const GLuint *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI4UIVPROC) (GLuint index, const GLuint *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI4BVPROC) (GLuint index, const GLbyte *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI4SVPROC) (GLuint index, const GLshort *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI4UBVPROC) (GLuint index, const GLubyte *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI4USVPROC) (GLuint index, const GLushort *v); +typedef void (APIENTRYP PFNGLGETUNIFORMUIVPROC) (GLuint program, GLint location, GLuint *params); +typedef void (APIENTRYP PFNGLBINDFRAGDATALOCATIONPROC) (GLuint program, GLuint color, const GLchar *name); +typedef GLint (APIENTRYP PFNGLGETFRAGDATALOCATIONPROC) (GLuint program, const GLchar *name); +typedef void (APIENTRYP PFNGLUNIFORM1UIPROC) (GLint location, GLuint v0); +typedef void (APIENTRYP PFNGLUNIFORM2UIPROC) (GLint location, GLuint v0, GLuint v1); +typedef void (APIENTRYP PFNGLUNIFORM3UIPROC) (GLint location, GLuint v0, GLuint v1, GLuint v2); +typedef void (APIENTRYP PFNGLUNIFORM4UIPROC) (GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3); +typedef void (APIENTRYP PFNGLUNIFORM1UIVPROC) (GLint location, GLsizei count, const GLuint *value); +typedef void (APIENTRYP PFNGLUNIFORM2UIVPROC) (GLint location, GLsizei count, const GLuint *value); +typedef void (APIENTRYP PFNGLUNIFORM3UIVPROC) (GLint location, GLsizei count, const GLuint *value); +typedef void (APIENTRYP PFNGLUNIFORM4UIVPROC) (GLint location, GLsizei count, const GLuint *value); +typedef void (APIENTRYP PFNGLTEXPARAMETERIIVPROC) (GLenum target, GLenum pname, const GLint *params); +typedef void (APIENTRYP PFNGLTEXPARAMETERIUIVPROC) (GLenum target, GLenum pname, const GLuint *params); +typedef void (APIENTRYP PFNGLGETTEXPARAMETERIIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETTEXPARAMETERIUIVPROC) (GLenum target, GLenum pname, GLuint *params); +typedef void (APIENTRYP PFNGLCLEARBUFFERIVPROC) (GLenum buffer, GLint drawbuffer, const GLint *value); +typedef void (APIENTRYP PFNGLCLEARBUFFERUIVPROC) (GLenum buffer, GLint drawbuffer, const GLuint *value); +typedef void (APIENTRYP PFNGLCLEARBUFFERFVPROC) (GLenum buffer, GLint drawbuffer, const GLfloat *value); +typedef void (APIENTRYP PFNGLCLEARBUFFERFIPROC) (GLenum buffer, GLint drawbuffer, GLfloat depth, GLint stencil); +typedef const GLubyte *(APIENTRYP PFNGLGETSTRINGIPROC) (GLenum name, GLuint index); +typedef GLboolean (APIENTRYP PFNGLISRENDERBUFFERPROC) (GLuint renderbuffer); +typedef void (APIENTRYP PFNGLBINDRENDERBUFFERPROC) (GLenum target, GLuint renderbuffer); +typedef void (APIENTRYP PFNGLDELETERENDERBUFFERSPROC) (GLsizei n, const GLuint *renderbuffers); +typedef void (APIENTRYP PFNGLGENRENDERBUFFERSPROC) (GLsizei n, GLuint *renderbuffers); +typedef void (APIENTRYP PFNGLRENDERBUFFERSTORAGEPROC) (GLenum target, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLGETRENDERBUFFERPARAMETERIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef GLboolean (APIENTRYP PFNGLISFRAMEBUFFERPROC) (GLuint framebuffer); +typedef void (APIENTRYP PFNGLBINDFRAMEBUFFERPROC) (GLenum target, GLuint framebuffer); +typedef void (APIENTRYP PFNGLDELETEFRAMEBUFFERSPROC) (GLsizei n, const GLuint *framebuffers); +typedef void (APIENTRYP PFNGLGENFRAMEBUFFERSPROC) (GLsizei n, GLuint *framebuffers); +typedef GLenum (APIENTRYP PFNGLCHECKFRAMEBUFFERSTATUSPROC) (GLenum target); +typedef void (APIENTRYP PFNGLFRAMEBUFFERTEXTURE1DPROC) (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +typedef void (APIENTRYP PFNGLFRAMEBUFFERTEXTURE2DPROC) (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +typedef void (APIENTRYP PFNGLFRAMEBUFFERTEXTURE3DPROC) (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLint zoffset); +typedef void (APIENTRYP PFNGLFRAMEBUFFERRENDERBUFFERPROC) (GLenum target, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer); +typedef void (APIENTRYP PFNGLGETFRAMEBUFFERATTACHMENTPARAMETERIVPROC) (GLenum target, GLenum attachment, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGENERATEMIPMAPPROC) (GLenum target); +typedef void (APIENTRYP PFNGLBLITFRAMEBUFFERPROC) (GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +typedef void (APIENTRYP PFNGLRENDERBUFFERSTORAGEMULTISAMPLEPROC) (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLFRAMEBUFFERTEXTURELAYERPROC) (GLenum target, GLenum attachment, GLuint texture, GLint level, GLint layer); +typedef void *(APIENTRYP PFNGLMAPBUFFERRANGEPROC) (GLenum target, GLintptr offset, GLsizeiptr length, GLbitfield access); +typedef void (APIENTRYP PFNGLFLUSHMAPPEDBUFFERRANGEPROC) (GLenum target, GLintptr offset, GLsizeiptr length); +typedef void (APIENTRYP PFNGLBINDVERTEXARRAYPROC) (GLuint array); +typedef void (APIENTRYP PFNGLDELETEVERTEXARRAYSPROC) (GLsizei n, const GLuint *arrays); +typedef void (APIENTRYP PFNGLGENVERTEXARRAYSPROC) (GLsizei n, GLuint *arrays); +typedef GLboolean (APIENTRYP PFNGLISVERTEXARRAYPROC) (GLuint array); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glColorMaski (GLuint index, GLboolean r, GLboolean g, GLboolean b, GLboolean a); +GLAPI void APIENTRY glGetBooleani_v (GLenum target, GLuint index, GLboolean *data); +GLAPI void APIENTRY glGetIntegeri_v (GLenum target, GLuint index, GLint *data); +GLAPI void APIENTRY glEnablei (GLenum target, GLuint index); +GLAPI void APIENTRY glDisablei (GLenum target, GLuint index); +GLAPI GLboolean APIENTRY glIsEnabledi (GLenum target, GLuint index); +GLAPI void APIENTRY glBeginTransformFeedback (GLenum primitiveMode); +GLAPI void APIENTRY glEndTransformFeedback (void); +GLAPI void APIENTRY glBindBufferRange (GLenum target, GLuint index, GLuint buffer, GLintptr offset, GLsizeiptr size); +GLAPI void APIENTRY glBindBufferBase (GLenum target, GLuint index, GLuint buffer); +GLAPI void APIENTRY glTransformFeedbackVaryings (GLuint program, GLsizei count, const GLchar *const*varyings, GLenum bufferMode); +GLAPI void APIENTRY glGetTransformFeedbackVarying (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLsizei *size, GLenum *type, GLchar *name); +GLAPI void APIENTRY glClampColor (GLenum target, GLenum clamp); +GLAPI void APIENTRY glBeginConditionalRender (GLuint id, GLenum mode); +GLAPI void APIENTRY glEndConditionalRender (void); +GLAPI void APIENTRY glVertexAttribIPointer (GLuint index, GLint size, GLenum type, GLsizei stride, const void *pointer); +GLAPI void APIENTRY glGetVertexAttribIiv (GLuint index, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetVertexAttribIuiv (GLuint index, GLenum pname, GLuint *params); +GLAPI void APIENTRY glVertexAttribI1i (GLuint index, GLint x); +GLAPI void APIENTRY glVertexAttribI2i (GLuint index, GLint x, GLint y); +GLAPI void APIENTRY glVertexAttribI3i (GLuint index, GLint x, GLint y, GLint z); +GLAPI void APIENTRY glVertexAttribI4i (GLuint index, GLint x, GLint y, GLint z, GLint w); +GLAPI void APIENTRY glVertexAttribI1ui (GLuint index, GLuint x); +GLAPI void APIENTRY glVertexAttribI2ui (GLuint index, GLuint x, GLuint y); +GLAPI void APIENTRY glVertexAttribI3ui (GLuint index, GLuint x, GLuint y, GLuint z); +GLAPI void APIENTRY glVertexAttribI4ui (GLuint index, GLuint x, GLuint y, GLuint z, GLuint w); +GLAPI void APIENTRY glVertexAttribI1iv (GLuint index, const GLint *v); +GLAPI void APIENTRY glVertexAttribI2iv (GLuint index, const GLint *v); +GLAPI void APIENTRY glVertexAttribI3iv (GLuint index, const GLint *v); +GLAPI void APIENTRY glVertexAttribI4iv (GLuint index, const GLint *v); +GLAPI void APIENTRY glVertexAttribI1uiv (GLuint index, const GLuint *v); +GLAPI void APIENTRY glVertexAttribI2uiv (GLuint index, const GLuint *v); +GLAPI void APIENTRY glVertexAttribI3uiv (GLuint index, const GLuint *v); +GLAPI void APIENTRY glVertexAttribI4uiv (GLuint index, const GLuint *v); +GLAPI void APIENTRY glVertexAttribI4bv (GLuint index, const GLbyte *v); +GLAPI void APIENTRY glVertexAttribI4sv (GLuint index, const GLshort *v); +GLAPI void APIENTRY glVertexAttribI4ubv (GLuint index, const GLubyte *v); +GLAPI void APIENTRY glVertexAttribI4usv (GLuint index, const GLushort *v); +GLAPI void APIENTRY glGetUniformuiv (GLuint program, GLint location, GLuint *params); +GLAPI void APIENTRY glBindFragDataLocation (GLuint program, GLuint color, const GLchar *name); +GLAPI GLint APIENTRY glGetFragDataLocation (GLuint program, const GLchar *name); +GLAPI void APIENTRY glUniform1ui (GLint location, GLuint v0); +GLAPI void APIENTRY glUniform2ui (GLint location, GLuint v0, GLuint v1); +GLAPI void APIENTRY glUniform3ui (GLint location, GLuint v0, GLuint v1, GLuint v2); +GLAPI void APIENTRY glUniform4ui (GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3); +GLAPI void APIENTRY glUniform1uiv (GLint location, GLsizei count, const GLuint *value); +GLAPI void APIENTRY glUniform2uiv (GLint location, GLsizei count, const GLuint *value); +GLAPI void APIENTRY glUniform3uiv (GLint location, GLsizei count, const GLuint *value); +GLAPI void APIENTRY glUniform4uiv (GLint location, GLsizei count, const GLuint *value); +GLAPI void APIENTRY glTexParameterIiv (GLenum target, GLenum pname, const GLint *params); +GLAPI void APIENTRY glTexParameterIuiv (GLenum target, GLenum pname, const GLuint *params); +GLAPI void APIENTRY glGetTexParameterIiv (GLenum target, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetTexParameterIuiv (GLenum target, GLenum pname, GLuint *params); +GLAPI void APIENTRY glClearBufferiv (GLenum buffer, GLint drawbuffer, const GLint *value); +GLAPI void APIENTRY glClearBufferuiv (GLenum buffer, GLint drawbuffer, const GLuint *value); +GLAPI void APIENTRY glClearBufferfv (GLenum buffer, GLint drawbuffer, const GLfloat *value); +GLAPI void APIENTRY glClearBufferfi (GLenum buffer, GLint drawbuffer, GLfloat depth, GLint stencil); +GLAPI const GLubyte *APIENTRY glGetStringi (GLenum name, GLuint index); +GLAPI GLboolean APIENTRY glIsRenderbuffer (GLuint renderbuffer); +GLAPI void APIENTRY glBindRenderbuffer (GLenum target, GLuint renderbuffer); +GLAPI void APIENTRY glDeleteRenderbuffers (GLsizei n, const GLuint *renderbuffers); +GLAPI void APIENTRY glGenRenderbuffers (GLsizei n, GLuint *renderbuffers); +GLAPI void APIENTRY glRenderbufferStorage (GLenum target, GLenum internalformat, GLsizei width, GLsizei height); +GLAPI void APIENTRY glGetRenderbufferParameteriv (GLenum target, GLenum pname, GLint *params); +GLAPI GLboolean APIENTRY glIsFramebuffer (GLuint framebuffer); +GLAPI void APIENTRY glBindFramebuffer (GLenum target, GLuint framebuffer); +GLAPI void APIENTRY glDeleteFramebuffers (GLsizei n, const GLuint *framebuffers); +GLAPI void APIENTRY glGenFramebuffers (GLsizei n, GLuint *framebuffers); +GLAPI GLenum APIENTRY glCheckFramebufferStatus (GLenum target); +GLAPI void APIENTRY glFramebufferTexture1D (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +GLAPI void APIENTRY glFramebufferTexture2D (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +GLAPI void APIENTRY glFramebufferTexture3D (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLint zoffset); +GLAPI void APIENTRY glFramebufferRenderbuffer (GLenum target, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer); +GLAPI void APIENTRY glGetFramebufferAttachmentParameteriv (GLenum target, GLenum attachment, GLenum pname, GLint *params); +GLAPI void APIENTRY glGenerateMipmap (GLenum target); +GLAPI void APIENTRY glBlitFramebuffer (GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +GLAPI void APIENTRY glRenderbufferStorageMultisample (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +GLAPI void APIENTRY glFramebufferTextureLayer (GLenum target, GLenum attachment, GLuint texture, GLint level, GLint layer); +GLAPI void *APIENTRY glMapBufferRange (GLenum target, GLintptr offset, GLsizeiptr length, GLbitfield access); +GLAPI void APIENTRY glFlushMappedBufferRange (GLenum target, GLintptr offset, GLsizeiptr length); +GLAPI void APIENTRY glBindVertexArray (GLuint array); +GLAPI void APIENTRY glDeleteVertexArrays (GLsizei n, const GLuint *arrays); +GLAPI void APIENTRY glGenVertexArrays (GLsizei n, GLuint *arrays); +GLAPI GLboolean APIENTRY glIsVertexArray (GLuint array); +#endif +#endif /* GL_VERSION_3_0 */ + +#ifndef GL_VERSION_3_1 +#define GL_VERSION_3_1 1 +#define GL_SAMPLER_2D_RECT 0x8B63 +#define GL_SAMPLER_2D_RECT_SHADOW 0x8B64 +#define GL_SAMPLER_BUFFER 0x8DC2 +#define GL_INT_SAMPLER_2D_RECT 0x8DCD +#define GL_INT_SAMPLER_BUFFER 0x8DD0 +#define GL_UNSIGNED_INT_SAMPLER_2D_RECT 0x8DD5 +#define GL_UNSIGNED_INT_SAMPLER_BUFFER 0x8DD8 +#define GL_TEXTURE_BUFFER 0x8C2A +#define GL_MAX_TEXTURE_BUFFER_SIZE 0x8C2B +#define GL_TEXTURE_BINDING_BUFFER 0x8C2C +#define GL_TEXTURE_BUFFER_DATA_STORE_BINDING 0x8C2D +#define GL_TEXTURE_RECTANGLE 0x84F5 +#define GL_TEXTURE_BINDING_RECTANGLE 0x84F6 +#define GL_PROXY_TEXTURE_RECTANGLE 0x84F7 +#define GL_MAX_RECTANGLE_TEXTURE_SIZE 0x84F8 +#define GL_R8_SNORM 0x8F94 +#define GL_RG8_SNORM 0x8F95 +#define GL_RGB8_SNORM 0x8F96 +#define GL_RGBA8_SNORM 0x8F97 +#define GL_R16_SNORM 0x8F98 +#define GL_RG16_SNORM 0x8F99 +#define GL_RGB16_SNORM 0x8F9A +#define GL_RGBA16_SNORM 0x8F9B +#define GL_SIGNED_NORMALIZED 0x8F9C +#define GL_PRIMITIVE_RESTART 0x8F9D +#define GL_PRIMITIVE_RESTART_INDEX 0x8F9E +#define GL_COPY_READ_BUFFER 0x8F36 +#define GL_COPY_WRITE_BUFFER 0x8F37 +#define GL_UNIFORM_BUFFER 0x8A11 +#define GL_UNIFORM_BUFFER_BINDING 0x8A28 +#define GL_UNIFORM_BUFFER_START 0x8A29 +#define GL_UNIFORM_BUFFER_SIZE 0x8A2A +#define GL_MAX_VERTEX_UNIFORM_BLOCKS 0x8A2B +#define GL_MAX_GEOMETRY_UNIFORM_BLOCKS 0x8A2C +#define GL_MAX_FRAGMENT_UNIFORM_BLOCKS 0x8A2D +#define GL_MAX_COMBINED_UNIFORM_BLOCKS 0x8A2E +#define GL_MAX_UNIFORM_BUFFER_BINDINGS 0x8A2F +#define GL_MAX_UNIFORM_BLOCK_SIZE 0x8A30 +#define GL_MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS 0x8A31 +#define GL_MAX_COMBINED_GEOMETRY_UNIFORM_COMPONENTS 0x8A32 +#define GL_MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS 0x8A33 +#define GL_UNIFORM_BUFFER_OFFSET_ALIGNMENT 0x8A34 +#define GL_ACTIVE_UNIFORM_BLOCK_MAX_NAME_LENGTH 0x8A35 +#define GL_ACTIVE_UNIFORM_BLOCKS 0x8A36 +#define GL_UNIFORM_TYPE 0x8A37 +#define GL_UNIFORM_SIZE 0x8A38 +#define GL_UNIFORM_NAME_LENGTH 0x8A39 +#define GL_UNIFORM_BLOCK_INDEX 0x8A3A +#define GL_UNIFORM_OFFSET 0x8A3B +#define GL_UNIFORM_ARRAY_STRIDE 0x8A3C +#define GL_UNIFORM_MATRIX_STRIDE 0x8A3D +#define GL_UNIFORM_IS_ROW_MAJOR 0x8A3E +#define GL_UNIFORM_BLOCK_BINDING 0x8A3F +#define GL_UNIFORM_BLOCK_DATA_SIZE 0x8A40 +#define GL_UNIFORM_BLOCK_NAME_LENGTH 0x8A41 +#define GL_UNIFORM_BLOCK_ACTIVE_UNIFORMS 0x8A42 +#define GL_UNIFORM_BLOCK_ACTIVE_UNIFORM_INDICES 0x8A43 +#define GL_UNIFORM_BLOCK_REFERENCED_BY_VERTEX_SHADER 0x8A44 +#define GL_UNIFORM_BLOCK_REFERENCED_BY_GEOMETRY_SHADER 0x8A45 +#define GL_UNIFORM_BLOCK_REFERENCED_BY_FRAGMENT_SHADER 0x8A46 +#define GL_INVALID_INDEX 0xFFFFFFFFu +typedef void (APIENTRYP PFNGLDRAWARRAYSINSTANCEDPROC) (GLenum mode, GLint first, GLsizei count, GLsizei instancecount); +typedef void (APIENTRYP PFNGLDRAWELEMENTSINSTANCEDPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount); +typedef void (APIENTRYP PFNGLTEXBUFFERPROC) (GLenum target, GLenum internalformat, GLuint buffer); +typedef void (APIENTRYP PFNGLPRIMITIVERESTARTINDEXPROC) (GLuint index); +typedef void (APIENTRYP PFNGLCOPYBUFFERSUBDATAPROC) (GLenum readTarget, GLenum writeTarget, GLintptr readOffset, GLintptr writeOffset, GLsizeiptr size); +typedef void (APIENTRYP PFNGLGETUNIFORMINDICESPROC) (GLuint program, GLsizei uniformCount, const GLchar *const*uniformNames, GLuint *uniformIndices); +typedef void (APIENTRYP PFNGLGETACTIVEUNIFORMSIVPROC) (GLuint program, GLsizei uniformCount, const GLuint *uniformIndices, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETACTIVEUNIFORMNAMEPROC) (GLuint program, GLuint uniformIndex, GLsizei bufSize, GLsizei *length, GLchar *uniformName); +typedef GLuint (APIENTRYP PFNGLGETUNIFORMBLOCKINDEXPROC) (GLuint program, const GLchar *uniformBlockName); +typedef void (APIENTRYP PFNGLGETACTIVEUNIFORMBLOCKIVPROC) (GLuint program, GLuint uniformBlockIndex, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETACTIVEUNIFORMBLOCKNAMEPROC) (GLuint program, GLuint uniformBlockIndex, GLsizei bufSize, GLsizei *length, GLchar *uniformBlockName); +typedef void (APIENTRYP PFNGLUNIFORMBLOCKBINDINGPROC) (GLuint program, GLuint uniformBlockIndex, GLuint uniformBlockBinding); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glDrawArraysInstanced (GLenum mode, GLint first, GLsizei count, GLsizei instancecount); +GLAPI void APIENTRY glDrawElementsInstanced (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount); +GLAPI void APIENTRY glTexBuffer (GLenum target, GLenum internalformat, GLuint buffer); +GLAPI void APIENTRY glPrimitiveRestartIndex (GLuint index); +GLAPI void APIENTRY glCopyBufferSubData (GLenum readTarget, GLenum writeTarget, GLintptr readOffset, GLintptr writeOffset, GLsizeiptr size); +GLAPI void APIENTRY glGetUniformIndices (GLuint program, GLsizei uniformCount, const GLchar *const*uniformNames, GLuint *uniformIndices); +GLAPI void APIENTRY glGetActiveUniformsiv (GLuint program, GLsizei uniformCount, const GLuint *uniformIndices, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetActiveUniformName (GLuint program, GLuint uniformIndex, GLsizei bufSize, GLsizei *length, GLchar *uniformName); +GLAPI GLuint APIENTRY glGetUniformBlockIndex (GLuint program, const GLchar *uniformBlockName); +GLAPI void APIENTRY glGetActiveUniformBlockiv (GLuint program, GLuint uniformBlockIndex, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetActiveUniformBlockName (GLuint program, GLuint uniformBlockIndex, GLsizei bufSize, GLsizei *length, GLchar *uniformBlockName); +GLAPI void APIENTRY glUniformBlockBinding (GLuint program, GLuint uniformBlockIndex, GLuint uniformBlockBinding); +#endif +#endif /* GL_VERSION_3_1 */ + +#ifndef GL_VERSION_3_2 +#define GL_VERSION_3_2 1 +typedef struct __GLsync *GLsync; +#ifndef GLEXT_64_TYPES_DEFINED +/* This code block is duplicated in glxext.h, so must be protected */ +#define GLEXT_64_TYPES_DEFINED +/* Define int32_t, int64_t, and uint64_t types for UST/MSC */ +/* (as used in the GL_EXT_timer_query extension). */ +#if defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L +#include +#elif defined(__sun__) || defined(__digital__) +#include +#if defined(__STDC__) +#if defined(__arch64__) || defined(_LP64) +typedef long int int64_t; +typedef unsigned long int uint64_t; +#else +typedef long long int int64_t; +typedef unsigned long long int uint64_t; +#endif /* __arch64__ */ +#endif /* __STDC__ */ +#elif defined( __VMS ) || defined(__sgi) +#include +#elif defined(__SCO__) || defined(__USLC__) +#include +#elif defined(__UNIXOS2__) || defined(__SOL64__) +typedef long int int32_t; +typedef long long int int64_t; +typedef unsigned long long int uint64_t; +#elif defined(_WIN32) && defined(__GNUC__) +#include +#elif defined(_WIN32) +typedef __int32 int32_t; +typedef __int64 int64_t; +typedef unsigned __int64 uint64_t; +#else +/* Fallback if nothing above works */ +#include +#endif +#endif +typedef uint64_t GLuint64; +typedef int64_t GLint64; +#define GL_CONTEXT_CORE_PROFILE_BIT 0x00000001 +#define GL_CONTEXT_COMPATIBILITY_PROFILE_BIT 0x00000002 +#define GL_LINES_ADJACENCY 0x000A +#define GL_LINE_STRIP_ADJACENCY 0x000B +#define GL_TRIANGLES_ADJACENCY 0x000C +#define GL_TRIANGLE_STRIP_ADJACENCY 0x000D +#define GL_PROGRAM_POINT_SIZE 0x8642 +#define GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS 0x8C29 +#define GL_FRAMEBUFFER_ATTACHMENT_LAYERED 0x8DA7 +#define GL_FRAMEBUFFER_INCOMPLETE_LAYER_TARGETS 0x8DA8 +#define GL_GEOMETRY_SHADER 0x8DD9 +#define GL_GEOMETRY_VERTICES_OUT 0x8916 +#define GL_GEOMETRY_INPUT_TYPE 0x8917 +#define GL_GEOMETRY_OUTPUT_TYPE 0x8918 +#define GL_MAX_GEOMETRY_UNIFORM_COMPONENTS 0x8DDF +#define GL_MAX_GEOMETRY_OUTPUT_VERTICES 0x8DE0 +#define GL_MAX_GEOMETRY_TOTAL_OUTPUT_COMPONENTS 0x8DE1 +#define GL_MAX_VERTEX_OUTPUT_COMPONENTS 0x9122 +#define GL_MAX_GEOMETRY_INPUT_COMPONENTS 0x9123 +#define GL_MAX_GEOMETRY_OUTPUT_COMPONENTS 0x9124 +#define GL_MAX_FRAGMENT_INPUT_COMPONENTS 0x9125 +#define GL_CONTEXT_PROFILE_MASK 0x9126 +#define GL_DEPTH_CLAMP 0x864F +#define GL_QUADS_FOLLOW_PROVOKING_VERTEX_CONVENTION 0x8E4C +#define GL_FIRST_VERTEX_CONVENTION 0x8E4D +#define GL_LAST_VERTEX_CONVENTION 0x8E4E +#define GL_PROVOKING_VERTEX 0x8E4F +#define GL_TEXTURE_CUBE_MAP_SEAMLESS 0x884F +#define GL_MAX_SERVER_WAIT_TIMEOUT 0x9111 +#define GL_OBJECT_TYPE 0x9112 +#define GL_SYNC_CONDITION 0x9113 +#define GL_SYNC_STATUS 0x9114 +#define GL_SYNC_FLAGS 0x9115 +#define GL_SYNC_FENCE 0x9116 +#define GL_SYNC_GPU_COMMANDS_COMPLETE 0x9117 +#define GL_UNSIGNALED 0x9118 +#define GL_SIGNALED 0x9119 +#define GL_ALREADY_SIGNALED 0x911A +#define GL_TIMEOUT_EXPIRED 0x911B +#define GL_CONDITION_SATISFIED 0x911C +#define GL_WAIT_FAILED 0x911D +#define GL_TIMEOUT_IGNORED 0xFFFFFFFFFFFFFFFFull +#define GL_SYNC_FLUSH_COMMANDS_BIT 0x00000001 +#define GL_SAMPLE_POSITION 0x8E50 +#define GL_SAMPLE_MASK 0x8E51 +#define GL_SAMPLE_MASK_VALUE 0x8E52 +#define GL_MAX_SAMPLE_MASK_WORDS 0x8E59 +#define GL_TEXTURE_2D_MULTISAMPLE 0x9100 +#define GL_PROXY_TEXTURE_2D_MULTISAMPLE 0x9101 +#define GL_TEXTURE_2D_MULTISAMPLE_ARRAY 0x9102 +#define GL_PROXY_TEXTURE_2D_MULTISAMPLE_ARRAY 0x9103 +#define GL_TEXTURE_BINDING_2D_MULTISAMPLE 0x9104 +#define GL_TEXTURE_BINDING_2D_MULTISAMPLE_ARRAY 0x9105 +#define GL_TEXTURE_SAMPLES 0x9106 +#define GL_TEXTURE_FIXED_SAMPLE_LOCATIONS 0x9107 +#define GL_SAMPLER_2D_MULTISAMPLE 0x9108 +#define GL_INT_SAMPLER_2D_MULTISAMPLE 0x9109 +#define GL_UNSIGNED_INT_SAMPLER_2D_MULTISAMPLE 0x910A +#define GL_SAMPLER_2D_MULTISAMPLE_ARRAY 0x910B +#define GL_INT_SAMPLER_2D_MULTISAMPLE_ARRAY 0x910C +#define GL_UNSIGNED_INT_SAMPLER_2D_MULTISAMPLE_ARRAY 0x910D +#define GL_MAX_COLOR_TEXTURE_SAMPLES 0x910E +#define GL_MAX_DEPTH_TEXTURE_SAMPLES 0x910F +#define GL_MAX_INTEGER_SAMPLES 0x9110 +typedef void (APIENTRYP PFNGLDRAWELEMENTSBASEVERTEXPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLint basevertex); +typedef void (APIENTRYP PFNGLDRAWRANGEELEMENTSBASEVERTEXPROC) (GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices, GLint basevertex); +typedef void (APIENTRYP PFNGLDRAWELEMENTSINSTANCEDBASEVERTEXPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLint basevertex); +typedef void (APIENTRYP PFNGLMULTIDRAWELEMENTSBASEVERTEXPROC) (GLenum mode, const GLsizei *count, GLenum type, const void *const*indices, GLsizei drawcount, const GLint *basevertex); +typedef void (APIENTRYP PFNGLPROVOKINGVERTEXPROC) (GLenum mode); +typedef GLsync (APIENTRYP PFNGLFENCESYNCPROC) (GLenum condition, GLbitfield flags); +typedef GLboolean (APIENTRYP PFNGLISSYNCPROC) (GLsync sync); +typedef void (APIENTRYP PFNGLDELETESYNCPROC) (GLsync sync); +typedef GLenum (APIENTRYP PFNGLCLIENTWAITSYNCPROC) (GLsync sync, GLbitfield flags, GLuint64 timeout); +typedef void (APIENTRYP PFNGLWAITSYNCPROC) (GLsync sync, GLbitfield flags, GLuint64 timeout); +typedef void (APIENTRYP PFNGLGETINTEGER64VPROC) (GLenum pname, GLint64 *data); +typedef void (APIENTRYP PFNGLGETSYNCIVPROC) (GLsync sync, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *values); +typedef void (APIENTRYP PFNGLGETINTEGER64I_VPROC) (GLenum target, GLuint index, GLint64 *data); +typedef void (APIENTRYP PFNGLGETBUFFERPARAMETERI64VPROC) (GLenum target, GLenum pname, GLint64 *params); +typedef void (APIENTRYP PFNGLFRAMEBUFFERTEXTUREPROC) (GLenum target, GLenum attachment, GLuint texture, GLint level); +typedef void (APIENTRYP PFNGLTEXIMAGE2DMULTISAMPLEPROC) (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLboolean fixedsamplelocations); +typedef void (APIENTRYP PFNGLTEXIMAGE3DMULTISAMPLEPROC) (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedsamplelocations); +typedef void (APIENTRYP PFNGLGETMULTISAMPLEFVPROC) (GLenum pname, GLuint index, GLfloat *val); +typedef void (APIENTRYP PFNGLSAMPLEMASKIPROC) (GLuint maskNumber, GLbitfield mask); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glDrawElementsBaseVertex (GLenum mode, GLsizei count, GLenum type, const void *indices, GLint basevertex); +GLAPI void APIENTRY glDrawRangeElementsBaseVertex (GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices, GLint basevertex); +GLAPI void APIENTRY glDrawElementsInstancedBaseVertex (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLint basevertex); +GLAPI void APIENTRY glMultiDrawElementsBaseVertex (GLenum mode, const GLsizei *count, GLenum type, const void *const*indices, GLsizei drawcount, const GLint *basevertex); +GLAPI void APIENTRY glProvokingVertex (GLenum mode); +GLAPI GLsync APIENTRY glFenceSync (GLenum condition, GLbitfield flags); +GLAPI GLboolean APIENTRY glIsSync (GLsync sync); +GLAPI void APIENTRY glDeleteSync (GLsync sync); +GLAPI GLenum APIENTRY glClientWaitSync (GLsync sync, GLbitfield flags, GLuint64 timeout); +GLAPI void APIENTRY glWaitSync (GLsync sync, GLbitfield flags, GLuint64 timeout); +GLAPI void APIENTRY glGetInteger64v (GLenum pname, GLint64 *data); +GLAPI void APIENTRY glGetSynciv (GLsync sync, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *values); +GLAPI void APIENTRY glGetInteger64i_v (GLenum target, GLuint index, GLint64 *data); +GLAPI void APIENTRY glGetBufferParameteri64v (GLenum target, GLenum pname, GLint64 *params); +GLAPI void APIENTRY glFramebufferTexture (GLenum target, GLenum attachment, GLuint texture, GLint level); +GLAPI void APIENTRY glTexImage2DMultisample (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLboolean fixedsamplelocations); +GLAPI void APIENTRY glTexImage3DMultisample (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedsamplelocations); +GLAPI void APIENTRY glGetMultisamplefv (GLenum pname, GLuint index, GLfloat *val); +GLAPI void APIENTRY glSampleMaski (GLuint maskNumber, GLbitfield mask); +#endif +#endif /* GL_VERSION_3_2 */ + +#ifndef GL_VERSION_3_3 +#define GL_VERSION_3_3 1 +#define GL_VERTEX_ATTRIB_ARRAY_DIVISOR 0x88FE +#define GL_SRC1_COLOR 0x88F9 +#define GL_ONE_MINUS_SRC1_COLOR 0x88FA +#define GL_ONE_MINUS_SRC1_ALPHA 0x88FB +#define GL_MAX_DUAL_SOURCE_DRAW_BUFFERS 0x88FC +#define GL_ANY_SAMPLES_PASSED 0x8C2F +#define GL_SAMPLER_BINDING 0x8919 +#define GL_RGB10_A2UI 0x906F +#define GL_TEXTURE_SWIZZLE_R 0x8E42 +#define GL_TEXTURE_SWIZZLE_G 0x8E43 +#define GL_TEXTURE_SWIZZLE_B 0x8E44 +#define GL_TEXTURE_SWIZZLE_A 0x8E45 +#define GL_TEXTURE_SWIZZLE_RGBA 0x8E46 +#define GL_TIME_ELAPSED 0x88BF +#define GL_TIMESTAMP 0x8E28 +#define GL_INT_2_10_10_10_REV 0x8D9F +typedef void (APIENTRYP PFNGLBINDFRAGDATALOCATIONINDEXEDPROC) (GLuint program, GLuint colorNumber, GLuint index, const GLchar *name); +typedef GLint (APIENTRYP PFNGLGETFRAGDATAINDEXPROC) (GLuint program, const GLchar *name); +typedef void (APIENTRYP PFNGLGENSAMPLERSPROC) (GLsizei count, GLuint *samplers); +typedef void (APIENTRYP PFNGLDELETESAMPLERSPROC) (GLsizei count, const GLuint *samplers); +typedef GLboolean (APIENTRYP PFNGLISSAMPLERPROC) (GLuint sampler); +typedef void (APIENTRYP PFNGLBINDSAMPLERPROC) (GLuint unit, GLuint sampler); +typedef void (APIENTRYP PFNGLSAMPLERPARAMETERIPROC) (GLuint sampler, GLenum pname, GLint param); +typedef void (APIENTRYP PFNGLSAMPLERPARAMETERIVPROC) (GLuint sampler, GLenum pname, const GLint *param); +typedef void (APIENTRYP PFNGLSAMPLERPARAMETERFPROC) (GLuint sampler, GLenum pname, GLfloat param); +typedef void (APIENTRYP PFNGLSAMPLERPARAMETERFVPROC) (GLuint sampler, GLenum pname, const GLfloat *param); +typedef void (APIENTRYP PFNGLSAMPLERPARAMETERIIVPROC) (GLuint sampler, GLenum pname, const GLint *param); +typedef void (APIENTRYP PFNGLSAMPLERPARAMETERIUIVPROC) (GLuint sampler, GLenum pname, const GLuint *param); +typedef void (APIENTRYP PFNGLGETSAMPLERPARAMETERIVPROC) (GLuint sampler, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETSAMPLERPARAMETERIIVPROC) (GLuint sampler, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETSAMPLERPARAMETERFVPROC) (GLuint sampler, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETSAMPLERPARAMETERIUIVPROC) (GLuint sampler, GLenum pname, GLuint *params); +typedef void (APIENTRYP PFNGLQUERYCOUNTERPROC) (GLuint id, GLenum target); +typedef void (APIENTRYP PFNGLGETQUERYOBJECTI64VPROC) (GLuint id, GLenum pname, GLint64 *params); +typedef void (APIENTRYP PFNGLGETQUERYOBJECTUI64VPROC) (GLuint id, GLenum pname, GLuint64 *params); +typedef void (APIENTRYP PFNGLVERTEXATTRIBDIVISORPROC) (GLuint index, GLuint divisor); +typedef void (APIENTRYP PFNGLVERTEXATTRIBP1UIPROC) (GLuint index, GLenum type, GLboolean normalized, GLuint value); +typedef void (APIENTRYP PFNGLVERTEXATTRIBP1UIVPROC) (GLuint index, GLenum type, GLboolean normalized, const GLuint *value); +typedef void (APIENTRYP PFNGLVERTEXATTRIBP2UIPROC) (GLuint index, GLenum type, GLboolean normalized, GLuint value); +typedef void (APIENTRYP PFNGLVERTEXATTRIBP2UIVPROC) (GLuint index, GLenum type, GLboolean normalized, const GLuint *value); +typedef void (APIENTRYP PFNGLVERTEXATTRIBP3UIPROC) (GLuint index, GLenum type, GLboolean normalized, GLuint value); +typedef void (APIENTRYP PFNGLVERTEXATTRIBP3UIVPROC) (GLuint index, GLenum type, GLboolean normalized, const GLuint *value); +typedef void (APIENTRYP PFNGLVERTEXATTRIBP4UIPROC) (GLuint index, GLenum type, GLboolean normalized, GLuint value); +typedef void (APIENTRYP PFNGLVERTEXATTRIBP4UIVPROC) (GLuint index, GLenum type, GLboolean normalized, const GLuint *value); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBindFragDataLocationIndexed (GLuint program, GLuint colorNumber, GLuint index, const GLchar *name); +GLAPI GLint APIENTRY glGetFragDataIndex (GLuint program, const GLchar *name); +GLAPI void APIENTRY glGenSamplers (GLsizei count, GLuint *samplers); +GLAPI void APIENTRY glDeleteSamplers (GLsizei count, const GLuint *samplers); +GLAPI GLboolean APIENTRY glIsSampler (GLuint sampler); +GLAPI void APIENTRY glBindSampler (GLuint unit, GLuint sampler); +GLAPI void APIENTRY glSamplerParameteri (GLuint sampler, GLenum pname, GLint param); +GLAPI void APIENTRY glSamplerParameteriv (GLuint sampler, GLenum pname, const GLint *param); +GLAPI void APIENTRY glSamplerParameterf (GLuint sampler, GLenum pname, GLfloat param); +GLAPI void APIENTRY glSamplerParameterfv (GLuint sampler, GLenum pname, const GLfloat *param); +GLAPI void APIENTRY glSamplerParameterIiv (GLuint sampler, GLenum pname, const GLint *param); +GLAPI void APIENTRY glSamplerParameterIuiv (GLuint sampler, GLenum pname, const GLuint *param); +GLAPI void APIENTRY glGetSamplerParameteriv (GLuint sampler, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetSamplerParameterIiv (GLuint sampler, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetSamplerParameterfv (GLuint sampler, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetSamplerParameterIuiv (GLuint sampler, GLenum pname, GLuint *params); +GLAPI void APIENTRY glQueryCounter (GLuint id, GLenum target); +GLAPI void APIENTRY glGetQueryObjecti64v (GLuint id, GLenum pname, GLint64 *params); +GLAPI void APIENTRY glGetQueryObjectui64v (GLuint id, GLenum pname, GLuint64 *params); +GLAPI void APIENTRY glVertexAttribDivisor (GLuint index, GLuint divisor); +GLAPI void APIENTRY glVertexAttribP1ui (GLuint index, GLenum type, GLboolean normalized, GLuint value); +GLAPI void APIENTRY glVertexAttribP1uiv (GLuint index, GLenum type, GLboolean normalized, const GLuint *value); +GLAPI void APIENTRY glVertexAttribP2ui (GLuint index, GLenum type, GLboolean normalized, GLuint value); +GLAPI void APIENTRY glVertexAttribP2uiv (GLuint index, GLenum type, GLboolean normalized, const GLuint *value); +GLAPI void APIENTRY glVertexAttribP3ui (GLuint index, GLenum type, GLboolean normalized, GLuint value); +GLAPI void APIENTRY glVertexAttribP3uiv (GLuint index, GLenum type, GLboolean normalized, const GLuint *value); +GLAPI void APIENTRY glVertexAttribP4ui (GLuint index, GLenum type, GLboolean normalized, GLuint value); +GLAPI void APIENTRY glVertexAttribP4uiv (GLuint index, GLenum type, GLboolean normalized, const GLuint *value); +#endif +#endif /* GL_VERSION_3_3 */ + +#ifndef GL_VERSION_4_0 +#define GL_VERSION_4_0 1 +#define GL_SAMPLE_SHADING 0x8C36 +#define GL_MIN_SAMPLE_SHADING_VALUE 0x8C37 +#define GL_MIN_PROGRAM_TEXTURE_GATHER_OFFSET 0x8E5E +#define GL_MAX_PROGRAM_TEXTURE_GATHER_OFFSET 0x8E5F +#define GL_TEXTURE_CUBE_MAP_ARRAY 0x9009 +#define GL_TEXTURE_BINDING_CUBE_MAP_ARRAY 0x900A +#define GL_PROXY_TEXTURE_CUBE_MAP_ARRAY 0x900B +#define GL_SAMPLER_CUBE_MAP_ARRAY 0x900C +#define GL_SAMPLER_CUBE_MAP_ARRAY_SHADOW 0x900D +#define GL_INT_SAMPLER_CUBE_MAP_ARRAY 0x900E +#define GL_UNSIGNED_INT_SAMPLER_CUBE_MAP_ARRAY 0x900F +#define GL_DRAW_INDIRECT_BUFFER 0x8F3F +#define GL_DRAW_INDIRECT_BUFFER_BINDING 0x8F43 +#define GL_GEOMETRY_SHADER_INVOCATIONS 0x887F +#define GL_MAX_GEOMETRY_SHADER_INVOCATIONS 0x8E5A +#define GL_MIN_FRAGMENT_INTERPOLATION_OFFSET 0x8E5B +#define GL_MAX_FRAGMENT_INTERPOLATION_OFFSET 0x8E5C +#define GL_FRAGMENT_INTERPOLATION_OFFSET_BITS 0x8E5D +#define GL_MAX_VERTEX_STREAMS 0x8E71 +#define GL_DOUBLE_VEC2 0x8FFC +#define GL_DOUBLE_VEC3 0x8FFD +#define GL_DOUBLE_VEC4 0x8FFE +#define GL_DOUBLE_MAT2 0x8F46 +#define GL_DOUBLE_MAT3 0x8F47 +#define GL_DOUBLE_MAT4 0x8F48 +#define GL_DOUBLE_MAT2x3 0x8F49 +#define GL_DOUBLE_MAT2x4 0x8F4A +#define GL_DOUBLE_MAT3x2 0x8F4B +#define GL_DOUBLE_MAT3x4 0x8F4C +#define GL_DOUBLE_MAT4x2 0x8F4D +#define GL_DOUBLE_MAT4x3 0x8F4E +#define GL_ACTIVE_SUBROUTINES 0x8DE5 +#define GL_ACTIVE_SUBROUTINE_UNIFORMS 0x8DE6 +#define GL_ACTIVE_SUBROUTINE_UNIFORM_LOCATIONS 0x8E47 +#define GL_ACTIVE_SUBROUTINE_MAX_LENGTH 0x8E48 +#define GL_ACTIVE_SUBROUTINE_UNIFORM_MAX_LENGTH 0x8E49 +#define GL_MAX_SUBROUTINES 0x8DE7 +#define GL_MAX_SUBROUTINE_UNIFORM_LOCATIONS 0x8DE8 +#define GL_NUM_COMPATIBLE_SUBROUTINES 0x8E4A +#define GL_COMPATIBLE_SUBROUTINES 0x8E4B +#define GL_PATCHES 0x000E +#define GL_PATCH_VERTICES 0x8E72 +#define GL_PATCH_DEFAULT_INNER_LEVEL 0x8E73 +#define GL_PATCH_DEFAULT_OUTER_LEVEL 0x8E74 +#define GL_TESS_CONTROL_OUTPUT_VERTICES 0x8E75 +#define GL_TESS_GEN_MODE 0x8E76 +#define GL_TESS_GEN_SPACING 0x8E77 +#define GL_TESS_GEN_VERTEX_ORDER 0x8E78 +#define GL_TESS_GEN_POINT_MODE 0x8E79 +#define GL_ISOLINES 0x8E7A +#define GL_FRACTIONAL_ODD 0x8E7B +#define GL_FRACTIONAL_EVEN 0x8E7C +#define GL_MAX_PATCH_VERTICES 0x8E7D +#define GL_MAX_TESS_GEN_LEVEL 0x8E7E +#define GL_MAX_TESS_CONTROL_UNIFORM_COMPONENTS 0x8E7F +#define GL_MAX_TESS_EVALUATION_UNIFORM_COMPONENTS 0x8E80 +#define GL_MAX_TESS_CONTROL_TEXTURE_IMAGE_UNITS 0x8E81 +#define GL_MAX_TESS_EVALUATION_TEXTURE_IMAGE_UNITS 0x8E82 +#define GL_MAX_TESS_CONTROL_OUTPUT_COMPONENTS 0x8E83 +#define GL_MAX_TESS_PATCH_COMPONENTS 0x8E84 +#define GL_MAX_TESS_CONTROL_TOTAL_OUTPUT_COMPONENTS 0x8E85 +#define GL_MAX_TESS_EVALUATION_OUTPUT_COMPONENTS 0x8E86 +#define GL_MAX_TESS_CONTROL_UNIFORM_BLOCKS 0x8E89 +#define GL_MAX_TESS_EVALUATION_UNIFORM_BLOCKS 0x8E8A +#define GL_MAX_TESS_CONTROL_INPUT_COMPONENTS 0x886C +#define GL_MAX_TESS_EVALUATION_INPUT_COMPONENTS 0x886D +#define GL_MAX_COMBINED_TESS_CONTROL_UNIFORM_COMPONENTS 0x8E1E +#define GL_MAX_COMBINED_TESS_EVALUATION_UNIFORM_COMPONENTS 0x8E1F +#define GL_UNIFORM_BLOCK_REFERENCED_BY_TESS_CONTROL_SHADER 0x84F0 +#define GL_UNIFORM_BLOCK_REFERENCED_BY_TESS_EVALUATION_SHADER 0x84F1 +#define GL_TESS_EVALUATION_SHADER 0x8E87 +#define GL_TESS_CONTROL_SHADER 0x8E88 +#define GL_TRANSFORM_FEEDBACK 0x8E22 +#define GL_TRANSFORM_FEEDBACK_BUFFER_PAUSED 0x8E23 +#define GL_TRANSFORM_FEEDBACK_BUFFER_ACTIVE 0x8E24 +#define GL_TRANSFORM_FEEDBACK_BINDING 0x8E25 +#define GL_MAX_TRANSFORM_FEEDBACK_BUFFERS 0x8E70 +typedef void (APIENTRYP PFNGLMINSAMPLESHADINGPROC) (GLfloat value); +typedef void (APIENTRYP PFNGLBLENDEQUATIONIPROC) (GLuint buf, GLenum mode); +typedef void (APIENTRYP PFNGLBLENDEQUATIONSEPARATEIPROC) (GLuint buf, GLenum modeRGB, GLenum modeAlpha); +typedef void (APIENTRYP PFNGLBLENDFUNCIPROC) (GLuint buf, GLenum src, GLenum dst); +typedef void (APIENTRYP PFNGLBLENDFUNCSEPARATEIPROC) (GLuint buf, GLenum srcRGB, GLenum dstRGB, GLenum srcAlpha, GLenum dstAlpha); +typedef void (APIENTRYP PFNGLDRAWARRAYSINDIRECTPROC) (GLenum mode, const void *indirect); +typedef void (APIENTRYP PFNGLDRAWELEMENTSINDIRECTPROC) (GLenum mode, GLenum type, const void *indirect); +typedef void (APIENTRYP PFNGLUNIFORM1DPROC) (GLint location, GLdouble x); +typedef void (APIENTRYP PFNGLUNIFORM2DPROC) (GLint location, GLdouble x, GLdouble y); +typedef void (APIENTRYP PFNGLUNIFORM3DPROC) (GLint location, GLdouble x, GLdouble y, GLdouble z); +typedef void (APIENTRYP PFNGLUNIFORM4DPROC) (GLint location, GLdouble x, GLdouble y, GLdouble z, GLdouble w); +typedef void (APIENTRYP PFNGLUNIFORM1DVPROC) (GLint location, GLsizei count, const GLdouble *value); +typedef void (APIENTRYP PFNGLUNIFORM2DVPROC) (GLint location, GLsizei count, const GLdouble *value); +typedef void (APIENTRYP PFNGLUNIFORM3DVPROC) (GLint location, GLsizei count, const GLdouble *value); +typedef void (APIENTRYP PFNGLUNIFORM4DVPROC) (GLint location, GLsizei count, const GLdouble *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX2DVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX3DVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX4DVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX2X3DVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX2X4DVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX3X2DVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX3X4DVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX4X2DVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX4X3DVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLGETUNIFORMDVPROC) (GLuint program, GLint location, GLdouble *params); +typedef GLint (APIENTRYP PFNGLGETSUBROUTINEUNIFORMLOCATIONPROC) (GLuint program, GLenum shadertype, const GLchar *name); +typedef GLuint (APIENTRYP PFNGLGETSUBROUTINEINDEXPROC) (GLuint program, GLenum shadertype, const GLchar *name); +typedef void (APIENTRYP PFNGLGETACTIVESUBROUTINEUNIFORMIVPROC) (GLuint program, GLenum shadertype, GLuint index, GLenum pname, GLint *values); +typedef void (APIENTRYP PFNGLGETACTIVESUBROUTINEUNIFORMNAMEPROC) (GLuint program, GLenum shadertype, GLuint index, GLsizei bufsize, GLsizei *length, GLchar *name); +typedef void (APIENTRYP PFNGLGETACTIVESUBROUTINENAMEPROC) (GLuint program, GLenum shadertype, GLuint index, GLsizei bufsize, GLsizei *length, GLchar *name); +typedef void (APIENTRYP PFNGLUNIFORMSUBROUTINESUIVPROC) (GLenum shadertype, GLsizei count, const GLuint *indices); +typedef void (APIENTRYP PFNGLGETUNIFORMSUBROUTINEUIVPROC) (GLenum shadertype, GLint location, GLuint *params); +typedef void (APIENTRYP PFNGLGETPROGRAMSTAGEIVPROC) (GLuint program, GLenum shadertype, GLenum pname, GLint *values); +typedef void (APIENTRYP PFNGLPATCHPARAMETERIPROC) (GLenum pname, GLint value); +typedef void (APIENTRYP PFNGLPATCHPARAMETERFVPROC) (GLenum pname, const GLfloat *values); +typedef void (APIENTRYP PFNGLBINDTRANSFORMFEEDBACKPROC) (GLenum target, GLuint id); +typedef void (APIENTRYP PFNGLDELETETRANSFORMFEEDBACKSPROC) (GLsizei n, const GLuint *ids); +typedef void (APIENTRYP PFNGLGENTRANSFORMFEEDBACKSPROC) (GLsizei n, GLuint *ids); +typedef GLboolean (APIENTRYP PFNGLISTRANSFORMFEEDBACKPROC) (GLuint id); +typedef void (APIENTRYP PFNGLPAUSETRANSFORMFEEDBACKPROC) (void); +typedef void (APIENTRYP PFNGLRESUMETRANSFORMFEEDBACKPROC) (void); +typedef void (APIENTRYP PFNGLDRAWTRANSFORMFEEDBACKPROC) (GLenum mode, GLuint id); +typedef void (APIENTRYP PFNGLDRAWTRANSFORMFEEDBACKSTREAMPROC) (GLenum mode, GLuint id, GLuint stream); +typedef void (APIENTRYP PFNGLBEGINQUERYINDEXEDPROC) (GLenum target, GLuint index, GLuint id); +typedef void (APIENTRYP PFNGLENDQUERYINDEXEDPROC) (GLenum target, GLuint index); +typedef void (APIENTRYP PFNGLGETQUERYINDEXEDIVPROC) (GLenum target, GLuint index, GLenum pname, GLint *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glMinSampleShading (GLfloat value); +GLAPI void APIENTRY glBlendEquationi (GLuint buf, GLenum mode); +GLAPI void APIENTRY glBlendEquationSeparatei (GLuint buf, GLenum modeRGB, GLenum modeAlpha); +GLAPI void APIENTRY glBlendFunci (GLuint buf, GLenum src, GLenum dst); +GLAPI void APIENTRY glBlendFuncSeparatei (GLuint buf, GLenum srcRGB, GLenum dstRGB, GLenum srcAlpha, GLenum dstAlpha); +GLAPI void APIENTRY glDrawArraysIndirect (GLenum mode, const void *indirect); +GLAPI void APIENTRY glDrawElementsIndirect (GLenum mode, GLenum type, const void *indirect); +GLAPI void APIENTRY glUniform1d (GLint location, GLdouble x); +GLAPI void APIENTRY glUniform2d (GLint location, GLdouble x, GLdouble y); +GLAPI void APIENTRY glUniform3d (GLint location, GLdouble x, GLdouble y, GLdouble z); +GLAPI void APIENTRY glUniform4d (GLint location, GLdouble x, GLdouble y, GLdouble z, GLdouble w); +GLAPI void APIENTRY glUniform1dv (GLint location, GLsizei count, const GLdouble *value); +GLAPI void APIENTRY glUniform2dv (GLint location, GLsizei count, const GLdouble *value); +GLAPI void APIENTRY glUniform3dv (GLint location, GLsizei count, const GLdouble *value); +GLAPI void APIENTRY glUniform4dv (GLint location, GLsizei count, const GLdouble *value); +GLAPI void APIENTRY glUniformMatrix2dv (GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glUniformMatrix3dv (GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glUniformMatrix4dv (GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glUniformMatrix2x3dv (GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glUniformMatrix2x4dv (GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glUniformMatrix3x2dv (GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glUniformMatrix3x4dv (GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glUniformMatrix4x2dv (GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glUniformMatrix4x3dv (GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glGetUniformdv (GLuint program, GLint location, GLdouble *params); +GLAPI GLint APIENTRY glGetSubroutineUniformLocation (GLuint program, GLenum shadertype, const GLchar *name); +GLAPI GLuint APIENTRY glGetSubroutineIndex (GLuint program, GLenum shadertype, const GLchar *name); +GLAPI void APIENTRY glGetActiveSubroutineUniformiv (GLuint program, GLenum shadertype, GLuint index, GLenum pname, GLint *values); +GLAPI void APIENTRY glGetActiveSubroutineUniformName (GLuint program, GLenum shadertype, GLuint index, GLsizei bufsize, GLsizei *length, GLchar *name); +GLAPI void APIENTRY glGetActiveSubroutineName (GLuint program, GLenum shadertype, GLuint index, GLsizei bufsize, GLsizei *length, GLchar *name); +GLAPI void APIENTRY glUniformSubroutinesuiv (GLenum shadertype, GLsizei count, const GLuint *indices); +GLAPI void APIENTRY glGetUniformSubroutineuiv (GLenum shadertype, GLint location, GLuint *params); +GLAPI void APIENTRY glGetProgramStageiv (GLuint program, GLenum shadertype, GLenum pname, GLint *values); +GLAPI void APIENTRY glPatchParameteri (GLenum pname, GLint value); +GLAPI void APIENTRY glPatchParameterfv (GLenum pname, const GLfloat *values); +GLAPI void APIENTRY glBindTransformFeedback (GLenum target, GLuint id); +GLAPI void APIENTRY glDeleteTransformFeedbacks (GLsizei n, const GLuint *ids); +GLAPI void APIENTRY glGenTransformFeedbacks (GLsizei n, GLuint *ids); +GLAPI GLboolean APIENTRY glIsTransformFeedback (GLuint id); +GLAPI void APIENTRY glPauseTransformFeedback (void); +GLAPI void APIENTRY glResumeTransformFeedback (void); +GLAPI void APIENTRY glDrawTransformFeedback (GLenum mode, GLuint id); +GLAPI void APIENTRY glDrawTransformFeedbackStream (GLenum mode, GLuint id, GLuint stream); +GLAPI void APIENTRY glBeginQueryIndexed (GLenum target, GLuint index, GLuint id); +GLAPI void APIENTRY glEndQueryIndexed (GLenum target, GLuint index); +GLAPI void APIENTRY glGetQueryIndexediv (GLenum target, GLuint index, GLenum pname, GLint *params); +#endif +#endif /* GL_VERSION_4_0 */ + +#ifndef GL_VERSION_4_1 +#define GL_VERSION_4_1 1 +#define GL_FIXED 0x140C +#define GL_IMPLEMENTATION_COLOR_READ_TYPE 0x8B9A +#define GL_IMPLEMENTATION_COLOR_READ_FORMAT 0x8B9B +#define GL_LOW_FLOAT 0x8DF0 +#define GL_MEDIUM_FLOAT 0x8DF1 +#define GL_HIGH_FLOAT 0x8DF2 +#define GL_LOW_INT 0x8DF3 +#define GL_MEDIUM_INT 0x8DF4 +#define GL_HIGH_INT 0x8DF5 +#define GL_SHADER_COMPILER 0x8DFA +#define GL_SHADER_BINARY_FORMATS 0x8DF8 +#define GL_NUM_SHADER_BINARY_FORMATS 0x8DF9 +#define GL_MAX_VERTEX_UNIFORM_VECTORS 0x8DFB +#define GL_MAX_VARYING_VECTORS 0x8DFC +#define GL_MAX_FRAGMENT_UNIFORM_VECTORS 0x8DFD +#define GL_RGB565 0x8D62 +#define GL_PROGRAM_BINARY_RETRIEVABLE_HINT 0x8257 +#define GL_PROGRAM_BINARY_LENGTH 0x8741 +#define GL_NUM_PROGRAM_BINARY_FORMATS 0x87FE +#define GL_PROGRAM_BINARY_FORMATS 0x87FF +#define GL_VERTEX_SHADER_BIT 0x00000001 +#define GL_FRAGMENT_SHADER_BIT 0x00000002 +#define GL_GEOMETRY_SHADER_BIT 0x00000004 +#define GL_TESS_CONTROL_SHADER_BIT 0x00000008 +#define GL_TESS_EVALUATION_SHADER_BIT 0x00000010 +#define GL_ALL_SHADER_BITS 0xFFFFFFFF +#define GL_PROGRAM_SEPARABLE 0x8258 +#define GL_ACTIVE_PROGRAM 0x8259 +#define GL_PROGRAM_PIPELINE_BINDING 0x825A +#define GL_MAX_VIEWPORTS 0x825B +#define GL_VIEWPORT_SUBPIXEL_BITS 0x825C +#define GL_VIEWPORT_BOUNDS_RANGE 0x825D +#define GL_LAYER_PROVOKING_VERTEX 0x825E +#define GL_VIEWPORT_INDEX_PROVOKING_VERTEX 0x825F +#define GL_UNDEFINED_VERTEX 0x8260 +typedef void (APIENTRYP PFNGLRELEASESHADERCOMPILERPROC) (void); +typedef void (APIENTRYP PFNGLSHADERBINARYPROC) (GLsizei count, const GLuint *shaders, GLenum binaryformat, const void *binary, GLsizei length); +typedef void (APIENTRYP PFNGLGETSHADERPRECISIONFORMATPROC) (GLenum shadertype, GLenum precisiontype, GLint *range, GLint *precision); +typedef void (APIENTRYP PFNGLDEPTHRANGEFPROC) (GLfloat n, GLfloat f); +typedef void (APIENTRYP PFNGLCLEARDEPTHFPROC) (GLfloat d); +typedef void (APIENTRYP PFNGLGETPROGRAMBINARYPROC) (GLuint program, GLsizei bufSize, GLsizei *length, GLenum *binaryFormat, void *binary); +typedef void (APIENTRYP PFNGLPROGRAMBINARYPROC) (GLuint program, GLenum binaryFormat, const void *binary, GLsizei length); +typedef void (APIENTRYP PFNGLPROGRAMPARAMETERIPROC) (GLuint program, GLenum pname, GLint value); +typedef void (APIENTRYP PFNGLUSEPROGRAMSTAGESPROC) (GLuint pipeline, GLbitfield stages, GLuint program); +typedef void (APIENTRYP PFNGLACTIVESHADERPROGRAMPROC) (GLuint pipeline, GLuint program); +typedef GLuint (APIENTRYP PFNGLCREATESHADERPROGRAMVPROC) (GLenum type, GLsizei count, const GLchar *const*strings); +typedef void (APIENTRYP PFNGLBINDPROGRAMPIPELINEPROC) (GLuint pipeline); +typedef void (APIENTRYP PFNGLDELETEPROGRAMPIPELINESPROC) (GLsizei n, const GLuint *pipelines); +typedef void (APIENTRYP PFNGLGENPROGRAMPIPELINESPROC) (GLsizei n, GLuint *pipelines); +typedef GLboolean (APIENTRYP PFNGLISPROGRAMPIPELINEPROC) (GLuint pipeline); +typedef void (APIENTRYP PFNGLGETPROGRAMPIPELINEIVPROC) (GLuint pipeline, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1IPROC) (GLuint program, GLint location, GLint v0); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1IVPROC) (GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1FPROC) (GLuint program, GLint location, GLfloat v0); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1FVPROC) (GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1DPROC) (GLuint program, GLint location, GLdouble v0); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1DVPROC) (GLuint program, GLint location, GLsizei count, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1UIPROC) (GLuint program, GLint location, GLuint v0); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1UIVPROC) (GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2IPROC) (GLuint program, GLint location, GLint v0, GLint v1); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2IVPROC) (GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2FPROC) (GLuint program, GLint location, GLfloat v0, GLfloat v1); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2FVPROC) (GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2DPROC) (GLuint program, GLint location, GLdouble v0, GLdouble v1); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2DVPROC) (GLuint program, GLint location, GLsizei count, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2UIPROC) (GLuint program, GLint location, GLuint v0, GLuint v1); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2UIVPROC) (GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3IPROC) (GLuint program, GLint location, GLint v0, GLint v1, GLint v2); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3IVPROC) (GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3FPROC) (GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3FVPROC) (GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3DPROC) (GLuint program, GLint location, GLdouble v0, GLdouble v1, GLdouble v2); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3DVPROC) (GLuint program, GLint location, GLsizei count, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3UIPROC) (GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3UIVPROC) (GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4IPROC) (GLuint program, GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4IVPROC) (GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4FPROC) (GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4FVPROC) (GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4DPROC) (GLuint program, GLint location, GLdouble v0, GLdouble v1, GLdouble v2, GLdouble v3); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4DVPROC) (GLuint program, GLint location, GLsizei count, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4UIPROC) (GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4UIVPROC) (GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2FVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3FVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4FVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2DVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3DVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4DVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2X3FVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3X2FVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2X4FVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4X2FVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3X4FVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4X3FVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2X3DVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3X2DVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2X4DVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4X2DVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3X4DVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4X3DVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLVALIDATEPROGRAMPIPELINEPROC) (GLuint pipeline); +typedef void (APIENTRYP PFNGLGETPROGRAMPIPELINEINFOLOGPROC) (GLuint pipeline, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL1DPROC) (GLuint index, GLdouble x); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL2DPROC) (GLuint index, GLdouble x, GLdouble y); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL3DPROC) (GLuint index, GLdouble x, GLdouble y, GLdouble z); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL4DPROC) (GLuint index, GLdouble x, GLdouble y, GLdouble z, GLdouble w); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL1DVPROC) (GLuint index, const GLdouble *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL2DVPROC) (GLuint index, const GLdouble *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL3DVPROC) (GLuint index, const GLdouble *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL4DVPROC) (GLuint index, const GLdouble *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBLPOINTERPROC) (GLuint index, GLint size, GLenum type, GLsizei stride, const void *pointer); +typedef void (APIENTRYP PFNGLGETVERTEXATTRIBLDVPROC) (GLuint index, GLenum pname, GLdouble *params); +typedef void (APIENTRYP PFNGLVIEWPORTARRAYVPROC) (GLuint first, GLsizei count, const GLfloat *v); +typedef void (APIENTRYP PFNGLVIEWPORTINDEXEDFPROC) (GLuint index, GLfloat x, GLfloat y, GLfloat w, GLfloat h); +typedef void (APIENTRYP PFNGLVIEWPORTINDEXEDFVPROC) (GLuint index, const GLfloat *v); +typedef void (APIENTRYP PFNGLSCISSORARRAYVPROC) (GLuint first, GLsizei count, const GLint *v); +typedef void (APIENTRYP PFNGLSCISSORINDEXEDPROC) (GLuint index, GLint left, GLint bottom, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLSCISSORINDEXEDVPROC) (GLuint index, const GLint *v); +typedef void (APIENTRYP PFNGLDEPTHRANGEARRAYVPROC) (GLuint first, GLsizei count, const GLdouble *v); +typedef void (APIENTRYP PFNGLDEPTHRANGEINDEXEDPROC) (GLuint index, GLdouble n, GLdouble f); +typedef void (APIENTRYP PFNGLGETFLOATI_VPROC) (GLenum target, GLuint index, GLfloat *data); +typedef void (APIENTRYP PFNGLGETDOUBLEI_VPROC) (GLenum target, GLuint index, GLdouble *data); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glReleaseShaderCompiler (void); +GLAPI void APIENTRY glShaderBinary (GLsizei count, const GLuint *shaders, GLenum binaryformat, const void *binary, GLsizei length); +GLAPI void APIENTRY glGetShaderPrecisionFormat (GLenum shadertype, GLenum precisiontype, GLint *range, GLint *precision); +GLAPI void APIENTRY glDepthRangef (GLfloat n, GLfloat f); +GLAPI void APIENTRY glClearDepthf (GLfloat d); +GLAPI void APIENTRY glGetProgramBinary (GLuint program, GLsizei bufSize, GLsizei *length, GLenum *binaryFormat, void *binary); +GLAPI void APIENTRY glProgramBinary (GLuint program, GLenum binaryFormat, const void *binary, GLsizei length); +GLAPI void APIENTRY glProgramParameteri (GLuint program, GLenum pname, GLint value); +GLAPI void APIENTRY glUseProgramStages (GLuint pipeline, GLbitfield stages, GLuint program); +GLAPI void APIENTRY glActiveShaderProgram (GLuint pipeline, GLuint program); +GLAPI GLuint APIENTRY glCreateShaderProgramv (GLenum type, GLsizei count, const GLchar *const*strings); +GLAPI void APIENTRY glBindProgramPipeline (GLuint pipeline); +GLAPI void APIENTRY glDeleteProgramPipelines (GLsizei n, const GLuint *pipelines); +GLAPI void APIENTRY glGenProgramPipelines (GLsizei n, GLuint *pipelines); +GLAPI GLboolean APIENTRY glIsProgramPipeline (GLuint pipeline); +GLAPI void APIENTRY glGetProgramPipelineiv (GLuint pipeline, GLenum pname, GLint *params); +GLAPI void APIENTRY glProgramUniform1i (GLuint program, GLint location, GLint v0); +GLAPI void APIENTRY glProgramUniform1iv (GLuint program, GLint location, GLsizei count, const GLint *value); +GLAPI void APIENTRY glProgramUniform1f (GLuint program, GLint location, GLfloat v0); +GLAPI void APIENTRY glProgramUniform1fv (GLuint program, GLint location, GLsizei count, const GLfloat *value); +GLAPI void APIENTRY glProgramUniform1d (GLuint program, GLint location, GLdouble v0); +GLAPI void APIENTRY glProgramUniform1dv (GLuint program, GLint location, GLsizei count, const GLdouble *value); +GLAPI void APIENTRY glProgramUniform1ui (GLuint program, GLint location, GLuint v0); +GLAPI void APIENTRY glProgramUniform1uiv (GLuint program, GLint location, GLsizei count, const GLuint *value); +GLAPI void APIENTRY glProgramUniform2i (GLuint program, GLint location, GLint v0, GLint v1); +GLAPI void APIENTRY glProgramUniform2iv (GLuint program, GLint location, GLsizei count, const GLint *value); +GLAPI void APIENTRY glProgramUniform2f (GLuint program, GLint location, GLfloat v0, GLfloat v1); +GLAPI void APIENTRY glProgramUniform2fv (GLuint program, GLint location, GLsizei count, const GLfloat *value); +GLAPI void APIENTRY glProgramUniform2d (GLuint program, GLint location, GLdouble v0, GLdouble v1); +GLAPI void APIENTRY glProgramUniform2dv (GLuint program, GLint location, GLsizei count, const GLdouble *value); +GLAPI void APIENTRY glProgramUniform2ui (GLuint program, GLint location, GLuint v0, GLuint v1); +GLAPI void APIENTRY glProgramUniform2uiv (GLuint program, GLint location, GLsizei count, const GLuint *value); +GLAPI void APIENTRY glProgramUniform3i (GLuint program, GLint location, GLint v0, GLint v1, GLint v2); +GLAPI void APIENTRY glProgramUniform3iv (GLuint program, GLint location, GLsizei count, const GLint *value); +GLAPI void APIENTRY glProgramUniform3f (GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +GLAPI void APIENTRY glProgramUniform3fv (GLuint program, GLint location, GLsizei count, const GLfloat *value); +GLAPI void APIENTRY glProgramUniform3d (GLuint program, GLint location, GLdouble v0, GLdouble v1, GLdouble v2); +GLAPI void APIENTRY glProgramUniform3dv (GLuint program, GLint location, GLsizei count, const GLdouble *value); +GLAPI void APIENTRY glProgramUniform3ui (GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2); +GLAPI void APIENTRY glProgramUniform3uiv (GLuint program, GLint location, GLsizei count, const GLuint *value); +GLAPI void APIENTRY glProgramUniform4i (GLuint program, GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +GLAPI void APIENTRY glProgramUniform4iv (GLuint program, GLint location, GLsizei count, const GLint *value); +GLAPI void APIENTRY glProgramUniform4f (GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +GLAPI void APIENTRY glProgramUniform4fv (GLuint program, GLint location, GLsizei count, const GLfloat *value); +GLAPI void APIENTRY glProgramUniform4d (GLuint program, GLint location, GLdouble v0, GLdouble v1, GLdouble v2, GLdouble v3); +GLAPI void APIENTRY glProgramUniform4dv (GLuint program, GLint location, GLsizei count, const GLdouble *value); +GLAPI void APIENTRY glProgramUniform4ui (GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3); +GLAPI void APIENTRY glProgramUniform4uiv (GLuint program, GLint location, GLsizei count, const GLuint *value); +GLAPI void APIENTRY glProgramUniformMatrix2fv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glProgramUniformMatrix3fv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glProgramUniformMatrix4fv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glProgramUniformMatrix2dv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glProgramUniformMatrix3dv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glProgramUniformMatrix4dv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glProgramUniformMatrix2x3fv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glProgramUniformMatrix3x2fv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glProgramUniformMatrix2x4fv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glProgramUniformMatrix4x2fv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glProgramUniformMatrix3x4fv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glProgramUniformMatrix4x3fv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glProgramUniformMatrix2x3dv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glProgramUniformMatrix3x2dv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glProgramUniformMatrix2x4dv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glProgramUniformMatrix4x2dv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glProgramUniformMatrix3x4dv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glProgramUniformMatrix4x3dv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glValidateProgramPipeline (GLuint pipeline); +GLAPI void APIENTRY glGetProgramPipelineInfoLog (GLuint pipeline, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +GLAPI void APIENTRY glVertexAttribL1d (GLuint index, GLdouble x); +GLAPI void APIENTRY glVertexAttribL2d (GLuint index, GLdouble x, GLdouble y); +GLAPI void APIENTRY glVertexAttribL3d (GLuint index, GLdouble x, GLdouble y, GLdouble z); +GLAPI void APIENTRY glVertexAttribL4d (GLuint index, GLdouble x, GLdouble y, GLdouble z, GLdouble w); +GLAPI void APIENTRY glVertexAttribL1dv (GLuint index, const GLdouble *v); +GLAPI void APIENTRY glVertexAttribL2dv (GLuint index, const GLdouble *v); +GLAPI void APIENTRY glVertexAttribL3dv (GLuint index, const GLdouble *v); +GLAPI void APIENTRY glVertexAttribL4dv (GLuint index, const GLdouble *v); +GLAPI void APIENTRY glVertexAttribLPointer (GLuint index, GLint size, GLenum type, GLsizei stride, const void *pointer); +GLAPI void APIENTRY glGetVertexAttribLdv (GLuint index, GLenum pname, GLdouble *params); +GLAPI void APIENTRY glViewportArrayv (GLuint first, GLsizei count, const GLfloat *v); +GLAPI void APIENTRY glViewportIndexedf (GLuint index, GLfloat x, GLfloat y, GLfloat w, GLfloat h); +GLAPI void APIENTRY glViewportIndexedfv (GLuint index, const GLfloat *v); +GLAPI void APIENTRY glScissorArrayv (GLuint first, GLsizei count, const GLint *v); +GLAPI void APIENTRY glScissorIndexed (GLuint index, GLint left, GLint bottom, GLsizei width, GLsizei height); +GLAPI void APIENTRY glScissorIndexedv (GLuint index, const GLint *v); +GLAPI void APIENTRY glDepthRangeArrayv (GLuint first, GLsizei count, const GLdouble *v); +GLAPI void APIENTRY glDepthRangeIndexed (GLuint index, GLdouble n, GLdouble f); +GLAPI void APIENTRY glGetFloati_v (GLenum target, GLuint index, GLfloat *data); +GLAPI void APIENTRY glGetDoublei_v (GLenum target, GLuint index, GLdouble *data); +#endif +#endif /* GL_VERSION_4_1 */ + +#ifndef GL_VERSION_4_2 +#define GL_VERSION_4_2 1 +#define GL_COPY_READ_BUFFER_BINDING 0x8F36 +#define GL_COPY_WRITE_BUFFER_BINDING 0x8F37 +#define GL_TRANSFORM_FEEDBACK_ACTIVE 0x8E24 +#define GL_TRANSFORM_FEEDBACK_PAUSED 0x8E23 +#define GL_UNPACK_COMPRESSED_BLOCK_WIDTH 0x9127 +#define GL_UNPACK_COMPRESSED_BLOCK_HEIGHT 0x9128 +#define GL_UNPACK_COMPRESSED_BLOCK_DEPTH 0x9129 +#define GL_UNPACK_COMPRESSED_BLOCK_SIZE 0x912A +#define GL_PACK_COMPRESSED_BLOCK_WIDTH 0x912B +#define GL_PACK_COMPRESSED_BLOCK_HEIGHT 0x912C +#define GL_PACK_COMPRESSED_BLOCK_DEPTH 0x912D +#define GL_PACK_COMPRESSED_BLOCK_SIZE 0x912E +#define GL_NUM_SAMPLE_COUNTS 0x9380 +#define GL_MIN_MAP_BUFFER_ALIGNMENT 0x90BC +#define GL_ATOMIC_COUNTER_BUFFER 0x92C0 +#define GL_ATOMIC_COUNTER_BUFFER_BINDING 0x92C1 +#define GL_ATOMIC_COUNTER_BUFFER_START 0x92C2 +#define GL_ATOMIC_COUNTER_BUFFER_SIZE 0x92C3 +#define GL_ATOMIC_COUNTER_BUFFER_DATA_SIZE 0x92C4 +#define GL_ATOMIC_COUNTER_BUFFER_ACTIVE_ATOMIC_COUNTERS 0x92C5 +#define GL_ATOMIC_COUNTER_BUFFER_ACTIVE_ATOMIC_COUNTER_INDICES 0x92C6 +#define GL_ATOMIC_COUNTER_BUFFER_REFERENCED_BY_VERTEX_SHADER 0x92C7 +#define GL_ATOMIC_COUNTER_BUFFER_REFERENCED_BY_TESS_CONTROL_SHADER 0x92C8 +#define GL_ATOMIC_COUNTER_BUFFER_REFERENCED_BY_TESS_EVALUATION_SHADER 0x92C9 +#define GL_ATOMIC_COUNTER_BUFFER_REFERENCED_BY_GEOMETRY_SHADER 0x92CA +#define GL_ATOMIC_COUNTER_BUFFER_REFERENCED_BY_FRAGMENT_SHADER 0x92CB +#define GL_MAX_VERTEX_ATOMIC_COUNTER_BUFFERS 0x92CC +#define GL_MAX_TESS_CONTROL_ATOMIC_COUNTER_BUFFERS 0x92CD +#define GL_MAX_TESS_EVALUATION_ATOMIC_COUNTER_BUFFERS 0x92CE +#define GL_MAX_GEOMETRY_ATOMIC_COUNTER_BUFFERS 0x92CF +#define GL_MAX_FRAGMENT_ATOMIC_COUNTER_BUFFERS 0x92D0 +#define GL_MAX_COMBINED_ATOMIC_COUNTER_BUFFERS 0x92D1 +#define GL_MAX_VERTEX_ATOMIC_COUNTERS 0x92D2 +#define GL_MAX_TESS_CONTROL_ATOMIC_COUNTERS 0x92D3 +#define GL_MAX_TESS_EVALUATION_ATOMIC_COUNTERS 0x92D4 +#define GL_MAX_GEOMETRY_ATOMIC_COUNTERS 0x92D5 +#define GL_MAX_FRAGMENT_ATOMIC_COUNTERS 0x92D6 +#define GL_MAX_COMBINED_ATOMIC_COUNTERS 0x92D7 +#define GL_MAX_ATOMIC_COUNTER_BUFFER_SIZE 0x92D8 +#define GL_MAX_ATOMIC_COUNTER_BUFFER_BINDINGS 0x92DC +#define GL_ACTIVE_ATOMIC_COUNTER_BUFFERS 0x92D9 +#define GL_UNIFORM_ATOMIC_COUNTER_BUFFER_INDEX 0x92DA +#define GL_UNSIGNED_INT_ATOMIC_COUNTER 0x92DB +#define GL_VERTEX_ATTRIB_ARRAY_BARRIER_BIT 0x00000001 +#define GL_ELEMENT_ARRAY_BARRIER_BIT 0x00000002 +#define GL_UNIFORM_BARRIER_BIT 0x00000004 +#define GL_TEXTURE_FETCH_BARRIER_BIT 0x00000008 +#define GL_SHADER_IMAGE_ACCESS_BARRIER_BIT 0x00000020 +#define GL_COMMAND_BARRIER_BIT 0x00000040 +#define GL_PIXEL_BUFFER_BARRIER_BIT 0x00000080 +#define GL_TEXTURE_UPDATE_BARRIER_BIT 0x00000100 +#define GL_BUFFER_UPDATE_BARRIER_BIT 0x00000200 +#define GL_FRAMEBUFFER_BARRIER_BIT 0x00000400 +#define GL_TRANSFORM_FEEDBACK_BARRIER_BIT 0x00000800 +#define GL_ATOMIC_COUNTER_BARRIER_BIT 0x00001000 +#define GL_ALL_BARRIER_BITS 0xFFFFFFFF +#define GL_MAX_IMAGE_UNITS 0x8F38 +#define GL_MAX_COMBINED_IMAGE_UNITS_AND_FRAGMENT_OUTPUTS 0x8F39 +#define GL_IMAGE_BINDING_NAME 0x8F3A +#define GL_IMAGE_BINDING_LEVEL 0x8F3B +#define GL_IMAGE_BINDING_LAYERED 0x8F3C +#define GL_IMAGE_BINDING_LAYER 0x8F3D +#define GL_IMAGE_BINDING_ACCESS 0x8F3E +#define GL_IMAGE_1D 0x904C +#define GL_IMAGE_2D 0x904D +#define GL_IMAGE_3D 0x904E +#define GL_IMAGE_2D_RECT 0x904F +#define GL_IMAGE_CUBE 0x9050 +#define GL_IMAGE_BUFFER 0x9051 +#define GL_IMAGE_1D_ARRAY 0x9052 +#define GL_IMAGE_2D_ARRAY 0x9053 +#define GL_IMAGE_CUBE_MAP_ARRAY 0x9054 +#define GL_IMAGE_2D_MULTISAMPLE 0x9055 +#define GL_IMAGE_2D_MULTISAMPLE_ARRAY 0x9056 +#define GL_INT_IMAGE_1D 0x9057 +#define GL_INT_IMAGE_2D 0x9058 +#define GL_INT_IMAGE_3D 0x9059 +#define GL_INT_IMAGE_2D_RECT 0x905A +#define GL_INT_IMAGE_CUBE 0x905B +#define GL_INT_IMAGE_BUFFER 0x905C +#define GL_INT_IMAGE_1D_ARRAY 0x905D +#define GL_INT_IMAGE_2D_ARRAY 0x905E +#define GL_INT_IMAGE_CUBE_MAP_ARRAY 0x905F +#define GL_INT_IMAGE_2D_MULTISAMPLE 0x9060 +#define GL_INT_IMAGE_2D_MULTISAMPLE_ARRAY 0x9061 +#define GL_UNSIGNED_INT_IMAGE_1D 0x9062 +#define GL_UNSIGNED_INT_IMAGE_2D 0x9063 +#define GL_UNSIGNED_INT_IMAGE_3D 0x9064 +#define GL_UNSIGNED_INT_IMAGE_2D_RECT 0x9065 +#define GL_UNSIGNED_INT_IMAGE_CUBE 0x9066 +#define GL_UNSIGNED_INT_IMAGE_BUFFER 0x9067 +#define GL_UNSIGNED_INT_IMAGE_1D_ARRAY 0x9068 +#define GL_UNSIGNED_INT_IMAGE_2D_ARRAY 0x9069 +#define GL_UNSIGNED_INT_IMAGE_CUBE_MAP_ARRAY 0x906A +#define GL_UNSIGNED_INT_IMAGE_2D_MULTISAMPLE 0x906B +#define GL_UNSIGNED_INT_IMAGE_2D_MULTISAMPLE_ARRAY 0x906C +#define GL_MAX_IMAGE_SAMPLES 0x906D +#define GL_IMAGE_BINDING_FORMAT 0x906E +#define GL_IMAGE_FORMAT_COMPATIBILITY_TYPE 0x90C7 +#define GL_IMAGE_FORMAT_COMPATIBILITY_BY_SIZE 0x90C8 +#define GL_IMAGE_FORMAT_COMPATIBILITY_BY_CLASS 0x90C9 +#define GL_MAX_VERTEX_IMAGE_UNIFORMS 0x90CA +#define GL_MAX_TESS_CONTROL_IMAGE_UNIFORMS 0x90CB +#define GL_MAX_TESS_EVALUATION_IMAGE_UNIFORMS 0x90CC +#define GL_MAX_GEOMETRY_IMAGE_UNIFORMS 0x90CD +#define GL_MAX_FRAGMENT_IMAGE_UNIFORMS 0x90CE +#define GL_MAX_COMBINED_IMAGE_UNIFORMS 0x90CF +#define GL_COMPRESSED_RGBA_BPTC_UNORM 0x8E8C +#define GL_COMPRESSED_SRGB_ALPHA_BPTC_UNORM 0x8E8D +#define GL_COMPRESSED_RGB_BPTC_SIGNED_FLOAT 0x8E8E +#define GL_COMPRESSED_RGB_BPTC_UNSIGNED_FLOAT 0x8E8F +#define GL_TEXTURE_IMMUTABLE_FORMAT 0x912F +typedef void (APIENTRYP PFNGLDRAWARRAYSINSTANCEDBASEINSTANCEPROC) (GLenum mode, GLint first, GLsizei count, GLsizei instancecount, GLuint baseinstance); +typedef void (APIENTRYP PFNGLDRAWELEMENTSINSTANCEDBASEINSTANCEPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLuint baseinstance); +typedef void (APIENTRYP PFNGLDRAWELEMENTSINSTANCEDBASEVERTEXBASEINSTANCEPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLint basevertex, GLuint baseinstance); +typedef void (APIENTRYP PFNGLGETINTERNALFORMATIVPROC) (GLenum target, GLenum internalformat, GLenum pname, GLsizei bufSize, GLint *params); +typedef void (APIENTRYP PFNGLGETACTIVEATOMICCOUNTERBUFFERIVPROC) (GLuint program, GLuint bufferIndex, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLBINDIMAGETEXTUREPROC) (GLuint unit, GLuint texture, GLint level, GLboolean layered, GLint layer, GLenum access, GLenum format); +typedef void (APIENTRYP PFNGLMEMORYBARRIERPROC) (GLbitfield barriers); +typedef void (APIENTRYP PFNGLTEXSTORAGE1DPROC) (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width); +typedef void (APIENTRYP PFNGLTEXSTORAGE2DPROC) (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLTEXSTORAGE3DPROC) (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +typedef void (APIENTRYP PFNGLDRAWTRANSFORMFEEDBACKINSTANCEDPROC) (GLenum mode, GLuint id, GLsizei instancecount); +typedef void (APIENTRYP PFNGLDRAWTRANSFORMFEEDBACKSTREAMINSTANCEDPROC) (GLenum mode, GLuint id, GLuint stream, GLsizei instancecount); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glDrawArraysInstancedBaseInstance (GLenum mode, GLint first, GLsizei count, GLsizei instancecount, GLuint baseinstance); +GLAPI void APIENTRY glDrawElementsInstancedBaseInstance (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLuint baseinstance); +GLAPI void APIENTRY glDrawElementsInstancedBaseVertexBaseInstance (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLint basevertex, GLuint baseinstance); +GLAPI void APIENTRY glGetInternalformativ (GLenum target, GLenum internalformat, GLenum pname, GLsizei bufSize, GLint *params); +GLAPI void APIENTRY glGetActiveAtomicCounterBufferiv (GLuint program, GLuint bufferIndex, GLenum pname, GLint *params); +GLAPI void APIENTRY glBindImageTexture (GLuint unit, GLuint texture, GLint level, GLboolean layered, GLint layer, GLenum access, GLenum format); +GLAPI void APIENTRY glMemoryBarrier (GLbitfield barriers); +GLAPI void APIENTRY glTexStorage1D (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width); +GLAPI void APIENTRY glTexStorage2D (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +GLAPI void APIENTRY glTexStorage3D (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +GLAPI void APIENTRY glDrawTransformFeedbackInstanced (GLenum mode, GLuint id, GLsizei instancecount); +GLAPI void APIENTRY glDrawTransformFeedbackStreamInstanced (GLenum mode, GLuint id, GLuint stream, GLsizei instancecount); +#endif +#endif /* GL_VERSION_4_2 */ + +#ifndef GL_VERSION_4_3 +#define GL_VERSION_4_3 1 +typedef void (APIENTRY *GLDEBUGPROC)(GLenum source,GLenum type,GLuint id,GLenum severity,GLsizei length,const GLchar *message,const void *userParam); +#define GL_NUM_SHADING_LANGUAGE_VERSIONS 0x82E9 +#define GL_VERTEX_ATTRIB_ARRAY_LONG 0x874E +#define GL_COMPRESSED_RGB8_ETC2 0x9274 +#define GL_COMPRESSED_SRGB8_ETC2 0x9275 +#define GL_COMPRESSED_RGB8_PUNCHTHROUGH_ALPHA1_ETC2 0x9276 +#define GL_COMPRESSED_SRGB8_PUNCHTHROUGH_ALPHA1_ETC2 0x9277 +#define GL_COMPRESSED_RGBA8_ETC2_EAC 0x9278 +#define GL_COMPRESSED_SRGB8_ALPHA8_ETC2_EAC 0x9279 +#define GL_COMPRESSED_R11_EAC 0x9270 +#define GL_COMPRESSED_SIGNED_R11_EAC 0x9271 +#define GL_COMPRESSED_RG11_EAC 0x9272 +#define GL_COMPRESSED_SIGNED_RG11_EAC 0x9273 +#define GL_PRIMITIVE_RESTART_FIXED_INDEX 0x8D69 +#define GL_ANY_SAMPLES_PASSED_CONSERVATIVE 0x8D6A +#define GL_MAX_ELEMENT_INDEX 0x8D6B +#define GL_COMPUTE_SHADER 0x91B9 +#define GL_MAX_COMPUTE_UNIFORM_BLOCKS 0x91BB +#define GL_MAX_COMPUTE_TEXTURE_IMAGE_UNITS 0x91BC +#define GL_MAX_COMPUTE_IMAGE_UNIFORMS 0x91BD +#define GL_MAX_COMPUTE_SHARED_MEMORY_SIZE 0x8262 +#define GL_MAX_COMPUTE_UNIFORM_COMPONENTS 0x8263 +#define GL_MAX_COMPUTE_ATOMIC_COUNTER_BUFFERS 0x8264 +#define GL_MAX_COMPUTE_ATOMIC_COUNTERS 0x8265 +#define GL_MAX_COMBINED_COMPUTE_UNIFORM_COMPONENTS 0x8266 +#define GL_MAX_COMPUTE_WORK_GROUP_INVOCATIONS 0x90EB +#define GL_MAX_COMPUTE_WORK_GROUP_COUNT 0x91BE +#define GL_MAX_COMPUTE_WORK_GROUP_SIZE 0x91BF +#define GL_COMPUTE_WORK_GROUP_SIZE 0x8267 +#define GL_UNIFORM_BLOCK_REFERENCED_BY_COMPUTE_SHADER 0x90EC +#define GL_ATOMIC_COUNTER_BUFFER_REFERENCED_BY_COMPUTE_SHADER 0x90ED +#define GL_DISPATCH_INDIRECT_BUFFER 0x90EE +#define GL_DISPATCH_INDIRECT_BUFFER_BINDING 0x90EF +#define GL_COMPUTE_SHADER_BIT 0x00000020 +#define GL_DEBUG_OUTPUT_SYNCHRONOUS 0x8242 +#define GL_DEBUG_NEXT_LOGGED_MESSAGE_LENGTH 0x8243 +#define GL_DEBUG_CALLBACK_FUNCTION 0x8244 +#define GL_DEBUG_CALLBACK_USER_PARAM 0x8245 +#define GL_DEBUG_SOURCE_API 0x8246 +#define GL_DEBUG_SOURCE_WINDOW_SYSTEM 0x8247 +#define GL_DEBUG_SOURCE_SHADER_COMPILER 0x8248 +#define GL_DEBUG_SOURCE_THIRD_PARTY 0x8249 +#define GL_DEBUG_SOURCE_APPLICATION 0x824A +#define GL_DEBUG_SOURCE_OTHER 0x824B +#define GL_DEBUG_TYPE_ERROR 0x824C +#define GL_DEBUG_TYPE_DEPRECATED_BEHAVIOR 0x824D +#define GL_DEBUG_TYPE_UNDEFINED_BEHAVIOR 0x824E +#define GL_DEBUG_TYPE_PORTABILITY 0x824F +#define GL_DEBUG_TYPE_PERFORMANCE 0x8250 +#define GL_DEBUG_TYPE_OTHER 0x8251 +#define GL_MAX_DEBUG_MESSAGE_LENGTH 0x9143 +#define GL_MAX_DEBUG_LOGGED_MESSAGES 0x9144 +#define GL_DEBUG_LOGGED_MESSAGES 0x9145 +#define GL_DEBUG_SEVERITY_HIGH 0x9146 +#define GL_DEBUG_SEVERITY_MEDIUM 0x9147 +#define GL_DEBUG_SEVERITY_LOW 0x9148 +#define GL_DEBUG_TYPE_MARKER 0x8268 +#define GL_DEBUG_TYPE_PUSH_GROUP 0x8269 +#define GL_DEBUG_TYPE_POP_GROUP 0x826A +#define GL_DEBUG_SEVERITY_NOTIFICATION 0x826B +#define GL_MAX_DEBUG_GROUP_STACK_DEPTH 0x826C +#define GL_DEBUG_GROUP_STACK_DEPTH 0x826D +#define GL_BUFFER 0x82E0 +#define GL_SHADER 0x82E1 +#define GL_PROGRAM 0x82E2 +#define GL_QUERY 0x82E3 +#define GL_PROGRAM_PIPELINE 0x82E4 +#define GL_SAMPLER 0x82E6 +#define GL_MAX_LABEL_LENGTH 0x82E8 +#define GL_DEBUG_OUTPUT 0x92E0 +#define GL_CONTEXT_FLAG_DEBUG_BIT 0x00000002 +#define GL_MAX_UNIFORM_LOCATIONS 0x826E +#define GL_FRAMEBUFFER_DEFAULT_WIDTH 0x9310 +#define GL_FRAMEBUFFER_DEFAULT_HEIGHT 0x9311 +#define GL_FRAMEBUFFER_DEFAULT_LAYERS 0x9312 +#define GL_FRAMEBUFFER_DEFAULT_SAMPLES 0x9313 +#define GL_FRAMEBUFFER_DEFAULT_FIXED_SAMPLE_LOCATIONS 0x9314 +#define GL_MAX_FRAMEBUFFER_WIDTH 0x9315 +#define GL_MAX_FRAMEBUFFER_HEIGHT 0x9316 +#define GL_MAX_FRAMEBUFFER_LAYERS 0x9317 +#define GL_MAX_FRAMEBUFFER_SAMPLES 0x9318 +#define GL_INTERNALFORMAT_SUPPORTED 0x826F +#define GL_INTERNALFORMAT_PREFERRED 0x8270 +#define GL_INTERNALFORMAT_RED_SIZE 0x8271 +#define GL_INTERNALFORMAT_GREEN_SIZE 0x8272 +#define GL_INTERNALFORMAT_BLUE_SIZE 0x8273 +#define GL_INTERNALFORMAT_ALPHA_SIZE 0x8274 +#define GL_INTERNALFORMAT_DEPTH_SIZE 0x8275 +#define GL_INTERNALFORMAT_STENCIL_SIZE 0x8276 +#define GL_INTERNALFORMAT_SHARED_SIZE 0x8277 +#define GL_INTERNALFORMAT_RED_TYPE 0x8278 +#define GL_INTERNALFORMAT_GREEN_TYPE 0x8279 +#define GL_INTERNALFORMAT_BLUE_TYPE 0x827A +#define GL_INTERNALFORMAT_ALPHA_TYPE 0x827B +#define GL_INTERNALFORMAT_DEPTH_TYPE 0x827C +#define GL_INTERNALFORMAT_STENCIL_TYPE 0x827D +#define GL_MAX_WIDTH 0x827E +#define GL_MAX_HEIGHT 0x827F +#define GL_MAX_DEPTH 0x8280 +#define GL_MAX_LAYERS 0x8281 +#define GL_MAX_COMBINED_DIMENSIONS 0x8282 +#define GL_COLOR_COMPONENTS 0x8283 +#define GL_DEPTH_COMPONENTS 0x8284 +#define GL_STENCIL_COMPONENTS 0x8285 +#define GL_COLOR_RENDERABLE 0x8286 +#define GL_DEPTH_RENDERABLE 0x8287 +#define GL_STENCIL_RENDERABLE 0x8288 +#define GL_FRAMEBUFFER_RENDERABLE 0x8289 +#define GL_FRAMEBUFFER_RENDERABLE_LAYERED 0x828A +#define GL_FRAMEBUFFER_BLEND 0x828B +#define GL_READ_PIXELS 0x828C +#define GL_READ_PIXELS_FORMAT 0x828D +#define GL_READ_PIXELS_TYPE 0x828E +#define GL_TEXTURE_IMAGE_FORMAT 0x828F +#define GL_TEXTURE_IMAGE_TYPE 0x8290 +#define GL_GET_TEXTURE_IMAGE_FORMAT 0x8291 +#define GL_GET_TEXTURE_IMAGE_TYPE 0x8292 +#define GL_MIPMAP 0x8293 +#define GL_MANUAL_GENERATE_MIPMAP 0x8294 +#define GL_AUTO_GENERATE_MIPMAP 0x8295 +#define GL_COLOR_ENCODING 0x8296 +#define GL_SRGB_READ 0x8297 +#define GL_SRGB_WRITE 0x8298 +#define GL_FILTER 0x829A +#define GL_VERTEX_TEXTURE 0x829B +#define GL_TESS_CONTROL_TEXTURE 0x829C +#define GL_TESS_EVALUATION_TEXTURE 0x829D +#define GL_GEOMETRY_TEXTURE 0x829E +#define GL_FRAGMENT_TEXTURE 0x829F +#define GL_COMPUTE_TEXTURE 0x82A0 +#define GL_TEXTURE_SHADOW 0x82A1 +#define GL_TEXTURE_GATHER 0x82A2 +#define GL_TEXTURE_GATHER_SHADOW 0x82A3 +#define GL_SHADER_IMAGE_LOAD 0x82A4 +#define GL_SHADER_IMAGE_STORE 0x82A5 +#define GL_SHADER_IMAGE_ATOMIC 0x82A6 +#define GL_IMAGE_TEXEL_SIZE 0x82A7 +#define GL_IMAGE_COMPATIBILITY_CLASS 0x82A8 +#define GL_IMAGE_PIXEL_FORMAT 0x82A9 +#define GL_IMAGE_PIXEL_TYPE 0x82AA +#define GL_SIMULTANEOUS_TEXTURE_AND_DEPTH_TEST 0x82AC +#define GL_SIMULTANEOUS_TEXTURE_AND_STENCIL_TEST 0x82AD +#define GL_SIMULTANEOUS_TEXTURE_AND_DEPTH_WRITE 0x82AE +#define GL_SIMULTANEOUS_TEXTURE_AND_STENCIL_WRITE 0x82AF +#define GL_TEXTURE_COMPRESSED_BLOCK_WIDTH 0x82B1 +#define GL_TEXTURE_COMPRESSED_BLOCK_HEIGHT 0x82B2 +#define GL_TEXTURE_COMPRESSED_BLOCK_SIZE 0x82B3 +#define GL_CLEAR_BUFFER 0x82B4 +#define GL_TEXTURE_VIEW 0x82B5 +#define GL_VIEW_COMPATIBILITY_CLASS 0x82B6 +#define GL_FULL_SUPPORT 0x82B7 +#define GL_CAVEAT_SUPPORT 0x82B8 +#define GL_IMAGE_CLASS_4_X_32 0x82B9 +#define GL_IMAGE_CLASS_2_X_32 0x82BA +#define GL_IMAGE_CLASS_1_X_32 0x82BB +#define GL_IMAGE_CLASS_4_X_16 0x82BC +#define GL_IMAGE_CLASS_2_X_16 0x82BD +#define GL_IMAGE_CLASS_1_X_16 0x82BE +#define GL_IMAGE_CLASS_4_X_8 0x82BF +#define GL_IMAGE_CLASS_2_X_8 0x82C0 +#define GL_IMAGE_CLASS_1_X_8 0x82C1 +#define GL_IMAGE_CLASS_11_11_10 0x82C2 +#define GL_IMAGE_CLASS_10_10_10_2 0x82C3 +#define GL_VIEW_CLASS_128_BITS 0x82C4 +#define GL_VIEW_CLASS_96_BITS 0x82C5 +#define GL_VIEW_CLASS_64_BITS 0x82C6 +#define GL_VIEW_CLASS_48_BITS 0x82C7 +#define GL_VIEW_CLASS_32_BITS 0x82C8 +#define GL_VIEW_CLASS_24_BITS 0x82C9 +#define GL_VIEW_CLASS_16_BITS 0x82CA +#define GL_VIEW_CLASS_8_BITS 0x82CB +#define GL_VIEW_CLASS_S3TC_DXT1_RGB 0x82CC +#define GL_VIEW_CLASS_S3TC_DXT1_RGBA 0x82CD +#define GL_VIEW_CLASS_S3TC_DXT3_RGBA 0x82CE +#define GL_VIEW_CLASS_S3TC_DXT5_RGBA 0x82CF +#define GL_VIEW_CLASS_RGTC1_RED 0x82D0 +#define GL_VIEW_CLASS_RGTC2_RG 0x82D1 +#define GL_VIEW_CLASS_BPTC_UNORM 0x82D2 +#define GL_VIEW_CLASS_BPTC_FLOAT 0x82D3 +#define GL_UNIFORM 0x92E1 +#define GL_UNIFORM_BLOCK 0x92E2 +#define GL_PROGRAM_INPUT 0x92E3 +#define GL_PROGRAM_OUTPUT 0x92E4 +#define GL_BUFFER_VARIABLE 0x92E5 +#define GL_SHADER_STORAGE_BLOCK 0x92E6 +#define GL_VERTEX_SUBROUTINE 0x92E8 +#define GL_TESS_CONTROL_SUBROUTINE 0x92E9 +#define GL_TESS_EVALUATION_SUBROUTINE 0x92EA +#define GL_GEOMETRY_SUBROUTINE 0x92EB +#define GL_FRAGMENT_SUBROUTINE 0x92EC +#define GL_COMPUTE_SUBROUTINE 0x92ED +#define GL_VERTEX_SUBROUTINE_UNIFORM 0x92EE +#define GL_TESS_CONTROL_SUBROUTINE_UNIFORM 0x92EF +#define GL_TESS_EVALUATION_SUBROUTINE_UNIFORM 0x92F0 +#define GL_GEOMETRY_SUBROUTINE_UNIFORM 0x92F1 +#define GL_FRAGMENT_SUBROUTINE_UNIFORM 0x92F2 +#define GL_COMPUTE_SUBROUTINE_UNIFORM 0x92F3 +#define GL_TRANSFORM_FEEDBACK_VARYING 0x92F4 +#define GL_ACTIVE_RESOURCES 0x92F5 +#define GL_MAX_NAME_LENGTH 0x92F6 +#define GL_MAX_NUM_ACTIVE_VARIABLES 0x92F7 +#define GL_MAX_NUM_COMPATIBLE_SUBROUTINES 0x92F8 +#define GL_NAME_LENGTH 0x92F9 +#define GL_TYPE 0x92FA +#define GL_ARRAY_SIZE 0x92FB +#define GL_OFFSET 0x92FC +#define GL_BLOCK_INDEX 0x92FD +#define GL_ARRAY_STRIDE 0x92FE +#define GL_MATRIX_STRIDE 0x92FF +#define GL_IS_ROW_MAJOR 0x9300 +#define GL_ATOMIC_COUNTER_BUFFER_INDEX 0x9301 +#define GL_BUFFER_BINDING 0x9302 +#define GL_BUFFER_DATA_SIZE 0x9303 +#define GL_NUM_ACTIVE_VARIABLES 0x9304 +#define GL_ACTIVE_VARIABLES 0x9305 +#define GL_REFERENCED_BY_VERTEX_SHADER 0x9306 +#define GL_REFERENCED_BY_TESS_CONTROL_SHADER 0x9307 +#define GL_REFERENCED_BY_TESS_EVALUATION_SHADER 0x9308 +#define GL_REFERENCED_BY_GEOMETRY_SHADER 0x9309 +#define GL_REFERENCED_BY_FRAGMENT_SHADER 0x930A +#define GL_REFERENCED_BY_COMPUTE_SHADER 0x930B +#define GL_TOP_LEVEL_ARRAY_SIZE 0x930C +#define GL_TOP_LEVEL_ARRAY_STRIDE 0x930D +#define GL_LOCATION 0x930E +#define GL_LOCATION_INDEX 0x930F +#define GL_IS_PER_PATCH 0x92E7 +#define GL_SHADER_STORAGE_BUFFER 0x90D2 +#define GL_SHADER_STORAGE_BUFFER_BINDING 0x90D3 +#define GL_SHADER_STORAGE_BUFFER_START 0x90D4 +#define GL_SHADER_STORAGE_BUFFER_SIZE 0x90D5 +#define GL_MAX_VERTEX_SHADER_STORAGE_BLOCKS 0x90D6 +#define GL_MAX_GEOMETRY_SHADER_STORAGE_BLOCKS 0x90D7 +#define GL_MAX_TESS_CONTROL_SHADER_STORAGE_BLOCKS 0x90D8 +#define GL_MAX_TESS_EVALUATION_SHADER_STORAGE_BLOCKS 0x90D9 +#define GL_MAX_FRAGMENT_SHADER_STORAGE_BLOCKS 0x90DA +#define GL_MAX_COMPUTE_SHADER_STORAGE_BLOCKS 0x90DB +#define GL_MAX_COMBINED_SHADER_STORAGE_BLOCKS 0x90DC +#define GL_MAX_SHADER_STORAGE_BUFFER_BINDINGS 0x90DD +#define GL_MAX_SHADER_STORAGE_BLOCK_SIZE 0x90DE +#define GL_SHADER_STORAGE_BUFFER_OFFSET_ALIGNMENT 0x90DF +#define GL_SHADER_STORAGE_BARRIER_BIT 0x00002000 +#define GL_MAX_COMBINED_SHADER_OUTPUT_RESOURCES 0x8F39 +#define GL_DEPTH_STENCIL_TEXTURE_MODE 0x90EA +#define GL_TEXTURE_BUFFER_OFFSET 0x919D +#define GL_TEXTURE_BUFFER_SIZE 0x919E +#define GL_TEXTURE_BUFFER_OFFSET_ALIGNMENT 0x919F +#define GL_TEXTURE_VIEW_MIN_LEVEL 0x82DB +#define GL_TEXTURE_VIEW_NUM_LEVELS 0x82DC +#define GL_TEXTURE_VIEW_MIN_LAYER 0x82DD +#define GL_TEXTURE_VIEW_NUM_LAYERS 0x82DE +#define GL_TEXTURE_IMMUTABLE_LEVELS 0x82DF +#define GL_VERTEX_ATTRIB_BINDING 0x82D4 +#define GL_VERTEX_ATTRIB_RELATIVE_OFFSET 0x82D5 +#define GL_VERTEX_BINDING_DIVISOR 0x82D6 +#define GL_VERTEX_BINDING_OFFSET 0x82D7 +#define GL_VERTEX_BINDING_STRIDE 0x82D8 +#define GL_MAX_VERTEX_ATTRIB_RELATIVE_OFFSET 0x82D9 +#define GL_MAX_VERTEX_ATTRIB_BINDINGS 0x82DA +#define GL_VERTEX_BINDING_BUFFER 0x8F4F +typedef void (APIENTRYP PFNGLCLEARBUFFERDATAPROC) (GLenum target, GLenum internalformat, GLenum format, GLenum type, const void *data); +typedef void (APIENTRYP PFNGLCLEARBUFFERSUBDATAPROC) (GLenum target, GLenum internalformat, GLintptr offset, GLsizeiptr size, GLenum format, GLenum type, const void *data); +typedef void (APIENTRYP PFNGLDISPATCHCOMPUTEPROC) (GLuint num_groups_x, GLuint num_groups_y, GLuint num_groups_z); +typedef void (APIENTRYP PFNGLDISPATCHCOMPUTEINDIRECTPROC) (GLintptr indirect); +typedef void (APIENTRYP PFNGLCOPYIMAGESUBDATAPROC) (GLuint srcName, GLenum srcTarget, GLint srcLevel, GLint srcX, GLint srcY, GLint srcZ, GLuint dstName, GLenum dstTarget, GLint dstLevel, GLint dstX, GLint dstY, GLint dstZ, GLsizei srcWidth, GLsizei srcHeight, GLsizei srcDepth); +typedef void (APIENTRYP PFNGLFRAMEBUFFERPARAMETERIPROC) (GLenum target, GLenum pname, GLint param); +typedef void (APIENTRYP PFNGLGETFRAMEBUFFERPARAMETERIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETINTERNALFORMATI64VPROC) (GLenum target, GLenum internalformat, GLenum pname, GLsizei bufSize, GLint64 *params); +typedef void (APIENTRYP PFNGLINVALIDATETEXSUBIMAGEPROC) (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth); +typedef void (APIENTRYP PFNGLINVALIDATETEXIMAGEPROC) (GLuint texture, GLint level); +typedef void (APIENTRYP PFNGLINVALIDATEBUFFERSUBDATAPROC) (GLuint buffer, GLintptr offset, GLsizeiptr length); +typedef void (APIENTRYP PFNGLINVALIDATEBUFFERDATAPROC) (GLuint buffer); +typedef void (APIENTRYP PFNGLINVALIDATEFRAMEBUFFERPROC) (GLenum target, GLsizei numAttachments, const GLenum *attachments); +typedef void (APIENTRYP PFNGLINVALIDATESUBFRAMEBUFFERPROC) (GLenum target, GLsizei numAttachments, const GLenum *attachments, GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLMULTIDRAWARRAYSINDIRECTPROC) (GLenum mode, const void *indirect, GLsizei drawcount, GLsizei stride); +typedef void (APIENTRYP PFNGLMULTIDRAWELEMENTSINDIRECTPROC) (GLenum mode, GLenum type, const void *indirect, GLsizei drawcount, GLsizei stride); +typedef void (APIENTRYP PFNGLGETPROGRAMINTERFACEIVPROC) (GLuint program, GLenum programInterface, GLenum pname, GLint *params); +typedef GLuint (APIENTRYP PFNGLGETPROGRAMRESOURCEINDEXPROC) (GLuint program, GLenum programInterface, const GLchar *name); +typedef void (APIENTRYP PFNGLGETPROGRAMRESOURCENAMEPROC) (GLuint program, GLenum programInterface, GLuint index, GLsizei bufSize, GLsizei *length, GLchar *name); +typedef void (APIENTRYP PFNGLGETPROGRAMRESOURCEIVPROC) (GLuint program, GLenum programInterface, GLuint index, GLsizei propCount, const GLenum *props, GLsizei bufSize, GLsizei *length, GLint *params); +typedef GLint (APIENTRYP PFNGLGETPROGRAMRESOURCELOCATIONPROC) (GLuint program, GLenum programInterface, const GLchar *name); +typedef GLint (APIENTRYP PFNGLGETPROGRAMRESOURCELOCATIONINDEXPROC) (GLuint program, GLenum programInterface, const GLchar *name); +typedef void (APIENTRYP PFNGLSHADERSTORAGEBLOCKBINDINGPROC) (GLuint program, GLuint storageBlockIndex, GLuint storageBlockBinding); +typedef void (APIENTRYP PFNGLTEXBUFFERRANGEPROC) (GLenum target, GLenum internalformat, GLuint buffer, GLintptr offset, GLsizeiptr size); +typedef void (APIENTRYP PFNGLTEXSTORAGE2DMULTISAMPLEPROC) (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLboolean fixedsamplelocations); +typedef void (APIENTRYP PFNGLTEXSTORAGE3DMULTISAMPLEPROC) (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedsamplelocations); +typedef void (APIENTRYP PFNGLTEXTUREVIEWPROC) (GLuint texture, GLenum target, GLuint origtexture, GLenum internalformat, GLuint minlevel, GLuint numlevels, GLuint minlayer, GLuint numlayers); +typedef void (APIENTRYP PFNGLBINDVERTEXBUFFERPROC) (GLuint bindingindex, GLuint buffer, GLintptr offset, GLsizei stride); +typedef void (APIENTRYP PFNGLVERTEXATTRIBFORMATPROC) (GLuint attribindex, GLint size, GLenum type, GLboolean normalized, GLuint relativeoffset); +typedef void (APIENTRYP PFNGLVERTEXATTRIBIFORMATPROC) (GLuint attribindex, GLint size, GLenum type, GLuint relativeoffset); +typedef void (APIENTRYP PFNGLVERTEXATTRIBLFORMATPROC) (GLuint attribindex, GLint size, GLenum type, GLuint relativeoffset); +typedef void (APIENTRYP PFNGLVERTEXATTRIBBINDINGPROC) (GLuint attribindex, GLuint bindingindex); +typedef void (APIENTRYP PFNGLVERTEXBINDINGDIVISORPROC) (GLuint bindingindex, GLuint divisor); +typedef void (APIENTRYP PFNGLDEBUGMESSAGECONTROLPROC) (GLenum source, GLenum type, GLenum severity, GLsizei count, const GLuint *ids, GLboolean enabled); +typedef void (APIENTRYP PFNGLDEBUGMESSAGEINSERTPROC) (GLenum source, GLenum type, GLuint id, GLenum severity, GLsizei length, const GLchar *buf); +typedef void (APIENTRYP PFNGLDEBUGMESSAGECALLBACKPROC) (GLDEBUGPROC callback, const void *userParam); +typedef GLuint (APIENTRYP PFNGLGETDEBUGMESSAGELOGPROC) (GLuint count, GLsizei bufSize, GLenum *sources, GLenum *types, GLuint *ids, GLenum *severities, GLsizei *lengths, GLchar *messageLog); +typedef void (APIENTRYP PFNGLPUSHDEBUGGROUPPROC) (GLenum source, GLuint id, GLsizei length, const GLchar *message); +typedef void (APIENTRYP PFNGLPOPDEBUGGROUPPROC) (void); +typedef void (APIENTRYP PFNGLOBJECTLABELPROC) (GLenum identifier, GLuint name, GLsizei length, const GLchar *label); +typedef void (APIENTRYP PFNGLGETOBJECTLABELPROC) (GLenum identifier, GLuint name, GLsizei bufSize, GLsizei *length, GLchar *label); +typedef void (APIENTRYP PFNGLOBJECTPTRLABELPROC) (const void *ptr, GLsizei length, const GLchar *label); +typedef void (APIENTRYP PFNGLGETOBJECTPTRLABELPROC) (const void *ptr, GLsizei bufSize, GLsizei *length, GLchar *label); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glClearBufferData (GLenum target, GLenum internalformat, GLenum format, GLenum type, const void *data); +GLAPI void APIENTRY glClearBufferSubData (GLenum target, GLenum internalformat, GLintptr offset, GLsizeiptr size, GLenum format, GLenum type, const void *data); +GLAPI void APIENTRY glDispatchCompute (GLuint num_groups_x, GLuint num_groups_y, GLuint num_groups_z); +GLAPI void APIENTRY glDispatchComputeIndirect (GLintptr indirect); +GLAPI void APIENTRY glCopyImageSubData (GLuint srcName, GLenum srcTarget, GLint srcLevel, GLint srcX, GLint srcY, GLint srcZ, GLuint dstName, GLenum dstTarget, GLint dstLevel, GLint dstX, GLint dstY, GLint dstZ, GLsizei srcWidth, GLsizei srcHeight, GLsizei srcDepth); +GLAPI void APIENTRY glFramebufferParameteri (GLenum target, GLenum pname, GLint param); +GLAPI void APIENTRY glGetFramebufferParameteriv (GLenum target, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetInternalformati64v (GLenum target, GLenum internalformat, GLenum pname, GLsizei bufSize, GLint64 *params); +GLAPI void APIENTRY glInvalidateTexSubImage (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth); +GLAPI void APIENTRY glInvalidateTexImage (GLuint texture, GLint level); +GLAPI void APIENTRY glInvalidateBufferSubData (GLuint buffer, GLintptr offset, GLsizeiptr length); +GLAPI void APIENTRY glInvalidateBufferData (GLuint buffer); +GLAPI void APIENTRY glInvalidateFramebuffer (GLenum target, GLsizei numAttachments, const GLenum *attachments); +GLAPI void APIENTRY glInvalidateSubFramebuffer (GLenum target, GLsizei numAttachments, const GLenum *attachments, GLint x, GLint y, GLsizei width, GLsizei height); +GLAPI void APIENTRY glMultiDrawArraysIndirect (GLenum mode, const void *indirect, GLsizei drawcount, GLsizei stride); +GLAPI void APIENTRY glMultiDrawElementsIndirect (GLenum mode, GLenum type, const void *indirect, GLsizei drawcount, GLsizei stride); +GLAPI void APIENTRY glGetProgramInterfaceiv (GLuint program, GLenum programInterface, GLenum pname, GLint *params); +GLAPI GLuint APIENTRY glGetProgramResourceIndex (GLuint program, GLenum programInterface, const GLchar *name); +GLAPI void APIENTRY glGetProgramResourceName (GLuint program, GLenum programInterface, GLuint index, GLsizei bufSize, GLsizei *length, GLchar *name); +GLAPI void APIENTRY glGetProgramResourceiv (GLuint program, GLenum programInterface, GLuint index, GLsizei propCount, const GLenum *props, GLsizei bufSize, GLsizei *length, GLint *params); +GLAPI GLint APIENTRY glGetProgramResourceLocation (GLuint program, GLenum programInterface, const GLchar *name); +GLAPI GLint APIENTRY glGetProgramResourceLocationIndex (GLuint program, GLenum programInterface, const GLchar *name); +GLAPI void APIENTRY glShaderStorageBlockBinding (GLuint program, GLuint storageBlockIndex, GLuint storageBlockBinding); +GLAPI void APIENTRY glTexBufferRange (GLenum target, GLenum internalformat, GLuint buffer, GLintptr offset, GLsizeiptr size); +GLAPI void APIENTRY glTexStorage2DMultisample (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLboolean fixedsamplelocations); +GLAPI void APIENTRY glTexStorage3DMultisample (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedsamplelocations); +GLAPI void APIENTRY glTextureView (GLuint texture, GLenum target, GLuint origtexture, GLenum internalformat, GLuint minlevel, GLuint numlevels, GLuint minlayer, GLuint numlayers); +GLAPI void APIENTRY glBindVertexBuffer (GLuint bindingindex, GLuint buffer, GLintptr offset, GLsizei stride); +GLAPI void APIENTRY glVertexAttribFormat (GLuint attribindex, GLint size, GLenum type, GLboolean normalized, GLuint relativeoffset); +GLAPI void APIENTRY glVertexAttribIFormat (GLuint attribindex, GLint size, GLenum type, GLuint relativeoffset); +GLAPI void APIENTRY glVertexAttribLFormat (GLuint attribindex, GLint size, GLenum type, GLuint relativeoffset); +GLAPI void APIENTRY glVertexAttribBinding (GLuint attribindex, GLuint bindingindex); +GLAPI void APIENTRY glVertexBindingDivisor (GLuint bindingindex, GLuint divisor); +GLAPI void APIENTRY glDebugMessageControl (GLenum source, GLenum type, GLenum severity, GLsizei count, const GLuint *ids, GLboolean enabled); +GLAPI void APIENTRY glDebugMessageInsert (GLenum source, GLenum type, GLuint id, GLenum severity, GLsizei length, const GLchar *buf); +GLAPI void APIENTRY glDebugMessageCallback (GLDEBUGPROC callback, const void *userParam); +GLAPI GLuint APIENTRY glGetDebugMessageLog (GLuint count, GLsizei bufSize, GLenum *sources, GLenum *types, GLuint *ids, GLenum *severities, GLsizei *lengths, GLchar *messageLog); +GLAPI void APIENTRY glPushDebugGroup (GLenum source, GLuint id, GLsizei length, const GLchar *message); +GLAPI void APIENTRY glPopDebugGroup (void); +GLAPI void APIENTRY glObjectLabel (GLenum identifier, GLuint name, GLsizei length, const GLchar *label); +GLAPI void APIENTRY glGetObjectLabel (GLenum identifier, GLuint name, GLsizei bufSize, GLsizei *length, GLchar *label); +GLAPI void APIENTRY glObjectPtrLabel (const void *ptr, GLsizei length, const GLchar *label); +GLAPI void APIENTRY glGetObjectPtrLabel (const void *ptr, GLsizei bufSize, GLsizei *length, GLchar *label); +#endif +#endif /* GL_VERSION_4_3 */ + +#ifndef GL_VERSION_4_4 +#define GL_VERSION_4_4 1 +#define GL_MAX_VERTEX_ATTRIB_STRIDE 0x82E5 +#define GL_PRIMITIVE_RESTART_FOR_PATCHES_SUPPORTED 0x8221 +#define GL_TEXTURE_BUFFER_BINDING 0x8C2A +#define GL_MAP_PERSISTENT_BIT 0x0040 +#define GL_MAP_COHERENT_BIT 0x0080 +#define GL_DYNAMIC_STORAGE_BIT 0x0100 +#define GL_CLIENT_STORAGE_BIT 0x0200 +#define GL_CLIENT_MAPPED_BUFFER_BARRIER_BIT 0x00004000 +#define GL_BUFFER_IMMUTABLE_STORAGE 0x821F +#define GL_BUFFER_STORAGE_FLAGS 0x8220 +#define GL_CLEAR_TEXTURE 0x9365 +#define GL_LOCATION_COMPONENT 0x934A +#define GL_TRANSFORM_FEEDBACK_BUFFER_INDEX 0x934B +#define GL_TRANSFORM_FEEDBACK_BUFFER_STRIDE 0x934C +#define GL_QUERY_BUFFER 0x9192 +#define GL_QUERY_BUFFER_BARRIER_BIT 0x00008000 +#define GL_QUERY_BUFFER_BINDING 0x9193 +#define GL_QUERY_RESULT_NO_WAIT 0x9194 +#define GL_MIRROR_CLAMP_TO_EDGE 0x8743 +typedef void (APIENTRYP PFNGLBUFFERSTORAGEPROC) (GLenum target, GLsizeiptr size, const void *data, GLbitfield flags); +typedef void (APIENTRYP PFNGLCLEARTEXIMAGEPROC) (GLuint texture, GLint level, GLenum format, GLenum type, const void *data); +typedef void (APIENTRYP PFNGLCLEARTEXSUBIMAGEPROC) (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *data); +typedef void (APIENTRYP PFNGLBINDBUFFERSBASEPROC) (GLenum target, GLuint first, GLsizei count, const GLuint *buffers); +typedef void (APIENTRYP PFNGLBINDBUFFERSRANGEPROC) (GLenum target, GLuint first, GLsizei count, const GLuint *buffers, const GLintptr *offsets, const GLsizeiptr *sizes); +typedef void (APIENTRYP PFNGLBINDTEXTURESPROC) (GLuint first, GLsizei count, const GLuint *textures); +typedef void (APIENTRYP PFNGLBINDSAMPLERSPROC) (GLuint first, GLsizei count, const GLuint *samplers); +typedef void (APIENTRYP PFNGLBINDIMAGETEXTURESPROC) (GLuint first, GLsizei count, const GLuint *textures); +typedef void (APIENTRYP PFNGLBINDVERTEXBUFFERSPROC) (GLuint first, GLsizei count, const GLuint *buffers, const GLintptr *offsets, const GLsizei *strides); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBufferStorage (GLenum target, GLsizeiptr size, const void *data, GLbitfield flags); +GLAPI void APIENTRY glClearTexImage (GLuint texture, GLint level, GLenum format, GLenum type, const void *data); +GLAPI void APIENTRY glClearTexSubImage (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *data); +GLAPI void APIENTRY glBindBuffersBase (GLenum target, GLuint first, GLsizei count, const GLuint *buffers); +GLAPI void APIENTRY glBindBuffersRange (GLenum target, GLuint first, GLsizei count, const GLuint *buffers, const GLintptr *offsets, const GLsizeiptr *sizes); +GLAPI void APIENTRY glBindTextures (GLuint first, GLsizei count, const GLuint *textures); +GLAPI void APIENTRY glBindSamplers (GLuint first, GLsizei count, const GLuint *samplers); +GLAPI void APIENTRY glBindImageTextures (GLuint first, GLsizei count, const GLuint *textures); +GLAPI void APIENTRY glBindVertexBuffers (GLuint first, GLsizei count, const GLuint *buffers, const GLintptr *offsets, const GLsizei *strides); +#endif +#endif /* GL_VERSION_4_4 */ + +#ifndef GL_VERSION_4_5 +#define GL_VERSION_4_5 1 +#define GL_CONTEXT_LOST 0x0507 +#define GL_NEGATIVE_ONE_TO_ONE 0x935E +#define GL_ZERO_TO_ONE 0x935F +#define GL_CLIP_ORIGIN 0x935C +#define GL_CLIP_DEPTH_MODE 0x935D +#define GL_QUERY_WAIT_INVERTED 0x8E17 +#define GL_QUERY_NO_WAIT_INVERTED 0x8E18 +#define GL_QUERY_BY_REGION_WAIT_INVERTED 0x8E19 +#define GL_QUERY_BY_REGION_NO_WAIT_INVERTED 0x8E1A +#define GL_MAX_CULL_DISTANCES 0x82F9 +#define GL_MAX_COMBINED_CLIP_AND_CULL_DISTANCES 0x82FA +#define GL_TEXTURE_TARGET 0x1006 +#define GL_QUERY_TARGET 0x82EA +#define GL_GUILTY_CONTEXT_RESET 0x8253 +#define GL_INNOCENT_CONTEXT_RESET 0x8254 +#define GL_UNKNOWN_CONTEXT_RESET 0x8255 +#define GL_RESET_NOTIFICATION_STRATEGY 0x8256 +#define GL_LOSE_CONTEXT_ON_RESET 0x8252 +#define GL_NO_RESET_NOTIFICATION 0x8261 +#define GL_CONTEXT_FLAG_ROBUST_ACCESS_BIT 0x00000004 +#define GL_CONTEXT_RELEASE_BEHAVIOR 0x82FB +#define GL_CONTEXT_RELEASE_BEHAVIOR_FLUSH 0x82FC +typedef void (APIENTRYP PFNGLCLIPCONTROLPROC) (GLenum origin, GLenum depth); +typedef void (APIENTRYP PFNGLCREATETRANSFORMFEEDBACKSPROC) (GLsizei n, GLuint *ids); +typedef void (APIENTRYP PFNGLTRANSFORMFEEDBACKBUFFERBASEPROC) (GLuint xfb, GLuint index, GLuint buffer); +typedef void (APIENTRYP PFNGLTRANSFORMFEEDBACKBUFFERRANGEPROC) (GLuint xfb, GLuint index, GLuint buffer, GLintptr offset, GLsizeiptr size); +typedef void (APIENTRYP PFNGLGETTRANSFORMFEEDBACKIVPROC) (GLuint xfb, GLenum pname, GLint *param); +typedef void (APIENTRYP PFNGLGETTRANSFORMFEEDBACKI_VPROC) (GLuint xfb, GLenum pname, GLuint index, GLint *param); +typedef void (APIENTRYP PFNGLGETTRANSFORMFEEDBACKI64_VPROC) (GLuint xfb, GLenum pname, GLuint index, GLint64 *param); +typedef void (APIENTRYP PFNGLCREATEBUFFERSPROC) (GLsizei n, GLuint *buffers); +typedef void (APIENTRYP PFNGLNAMEDBUFFERSTORAGEPROC) (GLuint buffer, GLsizeiptr size, const void *data, GLbitfield flags); +typedef void (APIENTRYP PFNGLNAMEDBUFFERDATAPROC) (GLuint buffer, GLsizeiptr size, const void *data, GLenum usage); +typedef void (APIENTRYP PFNGLNAMEDBUFFERSUBDATAPROC) (GLuint buffer, GLintptr offset, GLsizeiptr size, const void *data); +typedef void (APIENTRYP PFNGLCOPYNAMEDBUFFERSUBDATAPROC) (GLuint readBuffer, GLuint writeBuffer, GLintptr readOffset, GLintptr writeOffset, GLsizeiptr size); +typedef void (APIENTRYP PFNGLCLEARNAMEDBUFFERDATAPROC) (GLuint buffer, GLenum internalformat, GLenum format, GLenum type, const void *data); +typedef void (APIENTRYP PFNGLCLEARNAMEDBUFFERSUBDATAPROC) (GLuint buffer, GLenum internalformat, GLintptr offset, GLsizeiptr size, GLenum format, GLenum type, const void *data); +typedef void *(APIENTRYP PFNGLMAPNAMEDBUFFERPROC) (GLuint buffer, GLenum access); +typedef void *(APIENTRYP PFNGLMAPNAMEDBUFFERRANGEPROC) (GLuint buffer, GLintptr offset, GLsizeiptr length, GLbitfield access); +typedef GLboolean (APIENTRYP PFNGLUNMAPNAMEDBUFFERPROC) (GLuint buffer); +typedef void (APIENTRYP PFNGLFLUSHMAPPEDNAMEDBUFFERRANGEPROC) (GLuint buffer, GLintptr offset, GLsizeiptr length); +typedef void (APIENTRYP PFNGLGETNAMEDBUFFERPARAMETERIVPROC) (GLuint buffer, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETNAMEDBUFFERPARAMETERI64VPROC) (GLuint buffer, GLenum pname, GLint64 *params); +typedef void (APIENTRYP PFNGLGETNAMEDBUFFERPOINTERVPROC) (GLuint buffer, GLenum pname, void **params); +typedef void (APIENTRYP PFNGLGETNAMEDBUFFERSUBDATAPROC) (GLuint buffer, GLintptr offset, GLsizeiptr size, void *data); +typedef void (APIENTRYP PFNGLCREATEFRAMEBUFFERSPROC) (GLsizei n, GLuint *framebuffers); +typedef void (APIENTRYP PFNGLNAMEDFRAMEBUFFERRENDERBUFFERPROC) (GLuint framebuffer, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer); +typedef void (APIENTRYP PFNGLNAMEDFRAMEBUFFERPARAMETERIPROC) (GLuint framebuffer, GLenum pname, GLint param); +typedef void (APIENTRYP PFNGLNAMEDFRAMEBUFFERTEXTUREPROC) (GLuint framebuffer, GLenum attachment, GLuint texture, GLint level); +typedef void (APIENTRYP PFNGLNAMEDFRAMEBUFFERTEXTURELAYERPROC) (GLuint framebuffer, GLenum attachment, GLuint texture, GLint level, GLint layer); +typedef void (APIENTRYP PFNGLNAMEDFRAMEBUFFERDRAWBUFFERPROC) (GLuint framebuffer, GLenum buf); +typedef void (APIENTRYP PFNGLNAMEDFRAMEBUFFERDRAWBUFFERSPROC) (GLuint framebuffer, GLsizei n, const GLenum *bufs); +typedef void (APIENTRYP PFNGLNAMEDFRAMEBUFFERREADBUFFERPROC) (GLuint framebuffer, GLenum src); +typedef void (APIENTRYP PFNGLINVALIDATENAMEDFRAMEBUFFERDATAPROC) (GLuint framebuffer, GLsizei numAttachments, const GLenum *attachments); +typedef void (APIENTRYP PFNGLINVALIDATENAMEDFRAMEBUFFERSUBDATAPROC) (GLuint framebuffer, GLsizei numAttachments, const GLenum *attachments, GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLCLEARNAMEDFRAMEBUFFERIVPROC) (GLuint framebuffer, GLenum buffer, GLint drawbuffer, const GLint *value); +typedef void (APIENTRYP PFNGLCLEARNAMEDFRAMEBUFFERUIVPROC) (GLuint framebuffer, GLenum buffer, GLint drawbuffer, const GLuint *value); +typedef void (APIENTRYP PFNGLCLEARNAMEDFRAMEBUFFERFVPROC) (GLuint framebuffer, GLenum buffer, GLint drawbuffer, const GLfloat *value); +typedef void (APIENTRYP PFNGLCLEARNAMEDFRAMEBUFFERFIPROC) (GLuint framebuffer, GLenum buffer, GLint drawbuffer, GLfloat depth, GLint stencil); +typedef void (APIENTRYP PFNGLBLITNAMEDFRAMEBUFFERPROC) (GLuint readFramebuffer, GLuint drawFramebuffer, GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +typedef GLenum (APIENTRYP PFNGLCHECKNAMEDFRAMEBUFFERSTATUSPROC) (GLuint framebuffer, GLenum target); +typedef void (APIENTRYP PFNGLGETNAMEDFRAMEBUFFERPARAMETERIVPROC) (GLuint framebuffer, GLenum pname, GLint *param); +typedef void (APIENTRYP PFNGLGETNAMEDFRAMEBUFFERATTACHMENTPARAMETERIVPROC) (GLuint framebuffer, GLenum attachment, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLCREATERENDERBUFFERSPROC) (GLsizei n, GLuint *renderbuffers); +typedef void (APIENTRYP PFNGLNAMEDRENDERBUFFERSTORAGEPROC) (GLuint renderbuffer, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLNAMEDRENDERBUFFERSTORAGEMULTISAMPLEPROC) (GLuint renderbuffer, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLGETNAMEDRENDERBUFFERPARAMETERIVPROC) (GLuint renderbuffer, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLCREATETEXTURESPROC) (GLenum target, GLsizei n, GLuint *textures); +typedef void (APIENTRYP PFNGLTEXTUREBUFFERPROC) (GLuint texture, GLenum internalformat, GLuint buffer); +typedef void (APIENTRYP PFNGLTEXTUREBUFFERRANGEPROC) (GLuint texture, GLenum internalformat, GLuint buffer, GLintptr offset, GLsizeiptr size); +typedef void (APIENTRYP PFNGLTEXTURESTORAGE1DPROC) (GLuint texture, GLsizei levels, GLenum internalformat, GLsizei width); +typedef void (APIENTRYP PFNGLTEXTURESTORAGE2DPROC) (GLuint texture, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLTEXTURESTORAGE3DPROC) (GLuint texture, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +typedef void (APIENTRYP PFNGLTEXTURESTORAGE2DMULTISAMPLEPROC) (GLuint texture, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLboolean fixedsamplelocations); +typedef void (APIENTRYP PFNGLTEXTURESTORAGE3DMULTISAMPLEPROC) (GLuint texture, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedsamplelocations); +typedef void (APIENTRYP PFNGLTEXTURESUBIMAGE1DPROC) (GLuint texture, GLint level, GLint xoffset, GLsizei width, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLTEXTURESUBIMAGE2DPROC) (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLTEXTURESUBIMAGE3DPROC) (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLCOMPRESSEDTEXTURESUBIMAGE1DPROC) (GLuint texture, GLint level, GLint xoffset, GLsizei width, GLenum format, GLsizei imageSize, const void *data); +typedef void (APIENTRYP PFNGLCOMPRESSEDTEXTURESUBIMAGE2DPROC) (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *data); +typedef void (APIENTRYP PFNGLCOMPRESSEDTEXTURESUBIMAGE3DPROC) (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void *data); +typedef void (APIENTRYP PFNGLCOPYTEXTURESUBIMAGE1DPROC) (GLuint texture, GLint level, GLint xoffset, GLint x, GLint y, GLsizei width); +typedef void (APIENTRYP PFNGLCOPYTEXTURESUBIMAGE2DPROC) (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLCOPYTEXTURESUBIMAGE3DPROC) (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLTEXTUREPARAMETERFPROC) (GLuint texture, GLenum pname, GLfloat param); +typedef void (APIENTRYP PFNGLTEXTUREPARAMETERFVPROC) (GLuint texture, GLenum pname, const GLfloat *param); +typedef void (APIENTRYP PFNGLTEXTUREPARAMETERIPROC) (GLuint texture, GLenum pname, GLint param); +typedef void (APIENTRYP PFNGLTEXTUREPARAMETERIIVPROC) (GLuint texture, GLenum pname, const GLint *params); +typedef void (APIENTRYP PFNGLTEXTUREPARAMETERIUIVPROC) (GLuint texture, GLenum pname, const GLuint *params); +typedef void (APIENTRYP PFNGLTEXTUREPARAMETERIVPROC) (GLuint texture, GLenum pname, const GLint *param); +typedef void (APIENTRYP PFNGLGENERATETEXTUREMIPMAPPROC) (GLuint texture); +typedef void (APIENTRYP PFNGLBINDTEXTUREUNITPROC) (GLuint unit, GLuint texture); +typedef void (APIENTRYP PFNGLGETTEXTUREIMAGEPROC) (GLuint texture, GLint level, GLenum format, GLenum type, GLsizei bufSize, void *pixels); +typedef void (APIENTRYP PFNGLGETCOMPRESSEDTEXTUREIMAGEPROC) (GLuint texture, GLint level, GLsizei bufSize, void *pixels); +typedef void (APIENTRYP PFNGLGETTEXTURELEVELPARAMETERFVPROC) (GLuint texture, GLint level, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETTEXTURELEVELPARAMETERIVPROC) (GLuint texture, GLint level, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETTEXTUREPARAMETERFVPROC) (GLuint texture, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETTEXTUREPARAMETERIIVPROC) (GLuint texture, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETTEXTUREPARAMETERIUIVPROC) (GLuint texture, GLenum pname, GLuint *params); +typedef void (APIENTRYP PFNGLGETTEXTUREPARAMETERIVPROC) (GLuint texture, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLCREATEVERTEXARRAYSPROC) (GLsizei n, GLuint *arrays); +typedef void (APIENTRYP PFNGLDISABLEVERTEXARRAYATTRIBPROC) (GLuint vaobj, GLuint index); +typedef void (APIENTRYP PFNGLENABLEVERTEXARRAYATTRIBPROC) (GLuint vaobj, GLuint index); +typedef void (APIENTRYP PFNGLVERTEXARRAYELEMENTBUFFERPROC) (GLuint vaobj, GLuint buffer); +typedef void (APIENTRYP PFNGLVERTEXARRAYVERTEXBUFFERPROC) (GLuint vaobj, GLuint bindingindex, GLuint buffer, GLintptr offset, GLsizei stride); +typedef void (APIENTRYP PFNGLVERTEXARRAYVERTEXBUFFERSPROC) (GLuint vaobj, GLuint first, GLsizei count, const GLuint *buffers, const GLintptr *offsets, const GLsizei *strides); +typedef void (APIENTRYP PFNGLVERTEXARRAYATTRIBBINDINGPROC) (GLuint vaobj, GLuint attribindex, GLuint bindingindex); +typedef void (APIENTRYP PFNGLVERTEXARRAYATTRIBFORMATPROC) (GLuint vaobj, GLuint attribindex, GLint size, GLenum type, GLboolean normalized, GLuint relativeoffset); +typedef void (APIENTRYP PFNGLVERTEXARRAYATTRIBIFORMATPROC) (GLuint vaobj, GLuint attribindex, GLint size, GLenum type, GLuint relativeoffset); +typedef void (APIENTRYP PFNGLVERTEXARRAYATTRIBLFORMATPROC) (GLuint vaobj, GLuint attribindex, GLint size, GLenum type, GLuint relativeoffset); +typedef void (APIENTRYP PFNGLVERTEXARRAYBINDINGDIVISORPROC) (GLuint vaobj, GLuint bindingindex, GLuint divisor); +typedef void (APIENTRYP PFNGLGETVERTEXARRAYIVPROC) (GLuint vaobj, GLenum pname, GLint *param); +typedef void (APIENTRYP PFNGLGETVERTEXARRAYINDEXEDIVPROC) (GLuint vaobj, GLuint index, GLenum pname, GLint *param); +typedef void (APIENTRYP PFNGLGETVERTEXARRAYINDEXED64IVPROC) (GLuint vaobj, GLuint index, GLenum pname, GLint64 *param); +typedef void (APIENTRYP PFNGLCREATESAMPLERSPROC) (GLsizei n, GLuint *samplers); +typedef void (APIENTRYP PFNGLCREATEPROGRAMPIPELINESPROC) (GLsizei n, GLuint *pipelines); +typedef void (APIENTRYP PFNGLCREATEQUERIESPROC) (GLenum target, GLsizei n, GLuint *ids); +typedef void (APIENTRYP PFNGLGETQUERYBUFFEROBJECTI64VPROC) (GLuint id, GLuint buffer, GLenum pname, GLintptr offset); +typedef void (APIENTRYP PFNGLGETQUERYBUFFEROBJECTIVPROC) (GLuint id, GLuint buffer, GLenum pname, GLintptr offset); +typedef void (APIENTRYP PFNGLGETQUERYBUFFEROBJECTUI64VPROC) (GLuint id, GLuint buffer, GLenum pname, GLintptr offset); +typedef void (APIENTRYP PFNGLGETQUERYBUFFEROBJECTUIVPROC) (GLuint id, GLuint buffer, GLenum pname, GLintptr offset); +typedef void (APIENTRYP PFNGLMEMORYBARRIERBYREGIONPROC) (GLbitfield barriers); +typedef void (APIENTRYP PFNGLGETTEXTURESUBIMAGEPROC) (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, GLsizei bufSize, void *pixels); +typedef void (APIENTRYP PFNGLGETCOMPRESSEDTEXTURESUBIMAGEPROC) (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLsizei bufSize, void *pixels); +typedef GLenum (APIENTRYP PFNGLGETGRAPHICSRESETSTATUSPROC) (void); +typedef void (APIENTRYP PFNGLGETNCOMPRESSEDTEXIMAGEPROC) (GLenum target, GLint lod, GLsizei bufSize, void *pixels); +typedef void (APIENTRYP PFNGLGETNTEXIMAGEPROC) (GLenum target, GLint level, GLenum format, GLenum type, GLsizei bufSize, void *pixels); +typedef void (APIENTRYP PFNGLGETNUNIFORMDVPROC) (GLuint program, GLint location, GLsizei bufSize, GLdouble *params); +typedef void (APIENTRYP PFNGLGETNUNIFORMFVPROC) (GLuint program, GLint location, GLsizei bufSize, GLfloat *params); +typedef void (APIENTRYP PFNGLGETNUNIFORMIVPROC) (GLuint program, GLint location, GLsizei bufSize, GLint *params); +typedef void (APIENTRYP PFNGLGETNUNIFORMUIVPROC) (GLuint program, GLint location, GLsizei bufSize, GLuint *params); +typedef void (APIENTRYP PFNGLREADNPIXELSPROC) (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, void *data); +typedef void (APIENTRYP PFNGLTEXTUREBARRIERPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glClipControl (GLenum origin, GLenum depth); +GLAPI void APIENTRY glCreateTransformFeedbacks (GLsizei n, GLuint *ids); +GLAPI void APIENTRY glTransformFeedbackBufferBase (GLuint xfb, GLuint index, GLuint buffer); +GLAPI void APIENTRY glTransformFeedbackBufferRange (GLuint xfb, GLuint index, GLuint buffer, GLintptr offset, GLsizeiptr size); +GLAPI void APIENTRY glGetTransformFeedbackiv (GLuint xfb, GLenum pname, GLint *param); +GLAPI void APIENTRY glGetTransformFeedbacki_v (GLuint xfb, GLenum pname, GLuint index, GLint *param); +GLAPI void APIENTRY glGetTransformFeedbacki64_v (GLuint xfb, GLenum pname, GLuint index, GLint64 *param); +GLAPI void APIENTRY glCreateBuffers (GLsizei n, GLuint *buffers); +GLAPI void APIENTRY glNamedBufferStorage (GLuint buffer, GLsizeiptr size, const void *data, GLbitfield flags); +GLAPI void APIENTRY glNamedBufferData (GLuint buffer, GLsizeiptr size, const void *data, GLenum usage); +GLAPI void APIENTRY glNamedBufferSubData (GLuint buffer, GLintptr offset, GLsizeiptr size, const void *data); +GLAPI void APIENTRY glCopyNamedBufferSubData (GLuint readBuffer, GLuint writeBuffer, GLintptr readOffset, GLintptr writeOffset, GLsizeiptr size); +GLAPI void APIENTRY glClearNamedBufferData (GLuint buffer, GLenum internalformat, GLenum format, GLenum type, const void *data); +GLAPI void APIENTRY glClearNamedBufferSubData (GLuint buffer, GLenum internalformat, GLintptr offset, GLsizeiptr size, GLenum format, GLenum type, const void *data); +GLAPI void *APIENTRY glMapNamedBuffer (GLuint buffer, GLenum access); +GLAPI void *APIENTRY glMapNamedBufferRange (GLuint buffer, GLintptr offset, GLsizeiptr length, GLbitfield access); +GLAPI GLboolean APIENTRY glUnmapNamedBuffer (GLuint buffer); +GLAPI void APIENTRY glFlushMappedNamedBufferRange (GLuint buffer, GLintptr offset, GLsizeiptr length); +GLAPI void APIENTRY glGetNamedBufferParameteriv (GLuint buffer, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetNamedBufferParameteri64v (GLuint buffer, GLenum pname, GLint64 *params); +GLAPI void APIENTRY glGetNamedBufferPointerv (GLuint buffer, GLenum pname, void **params); +GLAPI void APIENTRY glGetNamedBufferSubData (GLuint buffer, GLintptr offset, GLsizeiptr size, void *data); +GLAPI void APIENTRY glCreateFramebuffers (GLsizei n, GLuint *framebuffers); +GLAPI void APIENTRY glNamedFramebufferRenderbuffer (GLuint framebuffer, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer); +GLAPI void APIENTRY glNamedFramebufferParameteri (GLuint framebuffer, GLenum pname, GLint param); +GLAPI void APIENTRY glNamedFramebufferTexture (GLuint framebuffer, GLenum attachment, GLuint texture, GLint level); +GLAPI void APIENTRY glNamedFramebufferTextureLayer (GLuint framebuffer, GLenum attachment, GLuint texture, GLint level, GLint layer); +GLAPI void APIENTRY glNamedFramebufferDrawBuffer (GLuint framebuffer, GLenum buf); +GLAPI void APIENTRY glNamedFramebufferDrawBuffers (GLuint framebuffer, GLsizei n, const GLenum *bufs); +GLAPI void APIENTRY glNamedFramebufferReadBuffer (GLuint framebuffer, GLenum src); +GLAPI void APIENTRY glInvalidateNamedFramebufferData (GLuint framebuffer, GLsizei numAttachments, const GLenum *attachments); +GLAPI void APIENTRY glInvalidateNamedFramebufferSubData (GLuint framebuffer, GLsizei numAttachments, const GLenum *attachments, GLint x, GLint y, GLsizei width, GLsizei height); +GLAPI void APIENTRY glClearNamedFramebufferiv (GLuint framebuffer, GLenum buffer, GLint drawbuffer, const GLint *value); +GLAPI void APIENTRY glClearNamedFramebufferuiv (GLuint framebuffer, GLenum buffer, GLint drawbuffer, const GLuint *value); +GLAPI void APIENTRY glClearNamedFramebufferfv (GLuint framebuffer, GLenum buffer, GLint drawbuffer, const GLfloat *value); +GLAPI void APIENTRY glClearNamedFramebufferfi (GLuint framebuffer, GLenum buffer, GLint drawbuffer, GLfloat depth, GLint stencil); +GLAPI void APIENTRY glBlitNamedFramebuffer (GLuint readFramebuffer, GLuint drawFramebuffer, GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +GLAPI GLenum APIENTRY glCheckNamedFramebufferStatus (GLuint framebuffer, GLenum target); +GLAPI void APIENTRY glGetNamedFramebufferParameteriv (GLuint framebuffer, GLenum pname, GLint *param); +GLAPI void APIENTRY glGetNamedFramebufferAttachmentParameteriv (GLuint framebuffer, GLenum attachment, GLenum pname, GLint *params); +GLAPI void APIENTRY glCreateRenderbuffers (GLsizei n, GLuint *renderbuffers); +GLAPI void APIENTRY glNamedRenderbufferStorage (GLuint renderbuffer, GLenum internalformat, GLsizei width, GLsizei height); +GLAPI void APIENTRY glNamedRenderbufferStorageMultisample (GLuint renderbuffer, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +GLAPI void APIENTRY glGetNamedRenderbufferParameteriv (GLuint renderbuffer, GLenum pname, GLint *params); +GLAPI void APIENTRY glCreateTextures (GLenum target, GLsizei n, GLuint *textures); +GLAPI void APIENTRY glTextureBuffer (GLuint texture, GLenum internalformat, GLuint buffer); +GLAPI void APIENTRY glTextureBufferRange (GLuint texture, GLenum internalformat, GLuint buffer, GLintptr offset, GLsizeiptr size); +GLAPI void APIENTRY glTextureStorage1D (GLuint texture, GLsizei levels, GLenum internalformat, GLsizei width); +GLAPI void APIENTRY glTextureStorage2D (GLuint texture, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +GLAPI void APIENTRY glTextureStorage3D (GLuint texture, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +GLAPI void APIENTRY glTextureStorage2DMultisample (GLuint texture, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLboolean fixedsamplelocations); +GLAPI void APIENTRY glTextureStorage3DMultisample (GLuint texture, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedsamplelocations); +GLAPI void APIENTRY glTextureSubImage1D (GLuint texture, GLint level, GLint xoffset, GLsizei width, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glTextureSubImage2D (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glTextureSubImage3D (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glCompressedTextureSubImage1D (GLuint texture, GLint level, GLint xoffset, GLsizei width, GLenum format, GLsizei imageSize, const void *data); +GLAPI void APIENTRY glCompressedTextureSubImage2D (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *data); +GLAPI void APIENTRY glCompressedTextureSubImage3D (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void *data); +GLAPI void APIENTRY glCopyTextureSubImage1D (GLuint texture, GLint level, GLint xoffset, GLint x, GLint y, GLsizei width); +GLAPI void APIENTRY glCopyTextureSubImage2D (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint x, GLint y, GLsizei width, GLsizei height); +GLAPI void APIENTRY glCopyTextureSubImage3D (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height); +GLAPI void APIENTRY glTextureParameterf (GLuint texture, GLenum pname, GLfloat param); +GLAPI void APIENTRY glTextureParameterfv (GLuint texture, GLenum pname, const GLfloat *param); +GLAPI void APIENTRY glTextureParameteri (GLuint texture, GLenum pname, GLint param); +GLAPI void APIENTRY glTextureParameterIiv (GLuint texture, GLenum pname, const GLint *params); +GLAPI void APIENTRY glTextureParameterIuiv (GLuint texture, GLenum pname, const GLuint *params); +GLAPI void APIENTRY glTextureParameteriv (GLuint texture, GLenum pname, const GLint *param); +GLAPI void APIENTRY glGenerateTextureMipmap (GLuint texture); +GLAPI void APIENTRY glBindTextureUnit (GLuint unit, GLuint texture); +GLAPI void APIENTRY glGetTextureImage (GLuint texture, GLint level, GLenum format, GLenum type, GLsizei bufSize, void *pixels); +GLAPI void APIENTRY glGetCompressedTextureImage (GLuint texture, GLint level, GLsizei bufSize, void *pixels); +GLAPI void APIENTRY glGetTextureLevelParameterfv (GLuint texture, GLint level, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetTextureLevelParameteriv (GLuint texture, GLint level, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetTextureParameterfv (GLuint texture, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetTextureParameterIiv (GLuint texture, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetTextureParameterIuiv (GLuint texture, GLenum pname, GLuint *params); +GLAPI void APIENTRY glGetTextureParameteriv (GLuint texture, GLenum pname, GLint *params); +GLAPI void APIENTRY glCreateVertexArrays (GLsizei n, GLuint *arrays); +GLAPI void APIENTRY glDisableVertexArrayAttrib (GLuint vaobj, GLuint index); +GLAPI void APIENTRY glEnableVertexArrayAttrib (GLuint vaobj, GLuint index); +GLAPI void APIENTRY glVertexArrayElementBuffer (GLuint vaobj, GLuint buffer); +GLAPI void APIENTRY glVertexArrayVertexBuffer (GLuint vaobj, GLuint bindingindex, GLuint buffer, GLintptr offset, GLsizei stride); +GLAPI void APIENTRY glVertexArrayVertexBuffers (GLuint vaobj, GLuint first, GLsizei count, const GLuint *buffers, const GLintptr *offsets, const GLsizei *strides); +GLAPI void APIENTRY glVertexArrayAttribBinding (GLuint vaobj, GLuint attribindex, GLuint bindingindex); +GLAPI void APIENTRY glVertexArrayAttribFormat (GLuint vaobj, GLuint attribindex, GLint size, GLenum type, GLboolean normalized, GLuint relativeoffset); +GLAPI void APIENTRY glVertexArrayAttribIFormat (GLuint vaobj, GLuint attribindex, GLint size, GLenum type, GLuint relativeoffset); +GLAPI void APIENTRY glVertexArrayAttribLFormat (GLuint vaobj, GLuint attribindex, GLint size, GLenum type, GLuint relativeoffset); +GLAPI void APIENTRY glVertexArrayBindingDivisor (GLuint vaobj, GLuint bindingindex, GLuint divisor); +GLAPI void APIENTRY glGetVertexArrayiv (GLuint vaobj, GLenum pname, GLint *param); +GLAPI void APIENTRY glGetVertexArrayIndexediv (GLuint vaobj, GLuint index, GLenum pname, GLint *param); +GLAPI void APIENTRY glGetVertexArrayIndexed64iv (GLuint vaobj, GLuint index, GLenum pname, GLint64 *param); +GLAPI void APIENTRY glCreateSamplers (GLsizei n, GLuint *samplers); +GLAPI void APIENTRY glCreateProgramPipelines (GLsizei n, GLuint *pipelines); +GLAPI void APIENTRY glCreateQueries (GLenum target, GLsizei n, GLuint *ids); +GLAPI void APIENTRY glGetQueryBufferObjecti64v (GLuint id, GLuint buffer, GLenum pname, GLintptr offset); +GLAPI void APIENTRY glGetQueryBufferObjectiv (GLuint id, GLuint buffer, GLenum pname, GLintptr offset); +GLAPI void APIENTRY glGetQueryBufferObjectui64v (GLuint id, GLuint buffer, GLenum pname, GLintptr offset); +GLAPI void APIENTRY glGetQueryBufferObjectuiv (GLuint id, GLuint buffer, GLenum pname, GLintptr offset); +GLAPI void APIENTRY glMemoryBarrierByRegion (GLbitfield barriers); +GLAPI void APIENTRY glGetTextureSubImage (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, GLsizei bufSize, void *pixels); +GLAPI void APIENTRY glGetCompressedTextureSubImage (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLsizei bufSize, void *pixels); +GLAPI GLenum APIENTRY glGetGraphicsResetStatus (void); +GLAPI void APIENTRY glGetnCompressedTexImage (GLenum target, GLint lod, GLsizei bufSize, void *pixels); +GLAPI void APIENTRY glGetnTexImage (GLenum target, GLint level, GLenum format, GLenum type, GLsizei bufSize, void *pixels); +GLAPI void APIENTRY glGetnUniformdv (GLuint program, GLint location, GLsizei bufSize, GLdouble *params); +GLAPI void APIENTRY glGetnUniformfv (GLuint program, GLint location, GLsizei bufSize, GLfloat *params); +GLAPI void APIENTRY glGetnUniformiv (GLuint program, GLint location, GLsizei bufSize, GLint *params); +GLAPI void APIENTRY glGetnUniformuiv (GLuint program, GLint location, GLsizei bufSize, GLuint *params); +GLAPI void APIENTRY glReadnPixels (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, void *data); +GLAPI void APIENTRY glTextureBarrier (void); +#endif +#endif /* GL_VERSION_4_5 */ + +#ifndef GL_VERSION_4_6 +#define GL_VERSION_4_6 1 +#define GL_SHADER_BINARY_FORMAT_SPIR_V 0x9551 +#define GL_SPIR_V_BINARY 0x9552 +#define GL_PARAMETER_BUFFER 0x80EE +#define GL_PARAMETER_BUFFER_BINDING 0x80EF +#define GL_CONTEXT_FLAG_NO_ERROR_BIT 0x00000008 +#define GL_VERTICES_SUBMITTED 0x82EE +#define GL_PRIMITIVES_SUBMITTED 0x82EF +#define GL_VERTEX_SHADER_INVOCATIONS 0x82F0 +#define GL_TESS_CONTROL_SHADER_PATCHES 0x82F1 +#define GL_TESS_EVALUATION_SHADER_INVOCATIONS 0x82F2 +#define GL_GEOMETRY_SHADER_PRIMITIVES_EMITTED 0x82F3 +#define GL_FRAGMENT_SHADER_INVOCATIONS 0x82F4 +#define GL_COMPUTE_SHADER_INVOCATIONS 0x82F5 +#define GL_CLIPPING_INPUT_PRIMITIVES 0x82F6 +#define GL_CLIPPING_OUTPUT_PRIMITIVES 0x82F7 +#define GL_POLYGON_OFFSET_CLAMP 0x8E1B +#define GL_SPIR_V_EXTENSIONS 0x9553 +#define GL_NUM_SPIR_V_EXTENSIONS 0x9554 +#define GL_TEXTURE_MAX_ANISOTROPY 0x84FE +#define GL_MAX_TEXTURE_MAX_ANISOTROPY 0x84FF +#define GL_TRANSFORM_FEEDBACK_OVERFLOW 0x82EC +#define GL_TRANSFORM_FEEDBACK_STREAM_OVERFLOW 0x82ED +typedef void (APIENTRYP PFNGLSPECIALIZESHADERPROC) (GLuint shader, const GLchar *pEntryPoint, GLuint numSpecializationConstants, const GLuint *pConstantIndex, const GLuint *pConstantValue); +typedef void (APIENTRYP PFNGLMULTIDRAWARRAYSINDIRECTCOUNTPROC) (GLenum mode, const void *indirect, GLintptr drawcount, GLsizei maxdrawcount, GLsizei stride); +typedef void (APIENTRYP PFNGLMULTIDRAWELEMENTSINDIRECTCOUNTPROC) (GLenum mode, GLenum type, const void *indirect, GLintptr drawcount, GLsizei maxdrawcount, GLsizei stride); +typedef void (APIENTRYP PFNGLPOLYGONOFFSETCLAMPPROC) (GLfloat factor, GLfloat units, GLfloat clamp); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glSpecializeShader (GLuint shader, const GLchar *pEntryPoint, GLuint numSpecializationConstants, const GLuint *pConstantIndex, const GLuint *pConstantValue); +GLAPI void APIENTRY glMultiDrawArraysIndirectCount (GLenum mode, const void *indirect, GLintptr drawcount, GLsizei maxdrawcount, GLsizei stride); +GLAPI void APIENTRY glMultiDrawElementsIndirectCount (GLenum mode, GLenum type, const void *indirect, GLintptr drawcount, GLsizei maxdrawcount, GLsizei stride); +GLAPI void APIENTRY glPolygonOffsetClamp (GLfloat factor, GLfloat units, GLfloat clamp); +#endif +#endif /* GL_VERSION_4_6 */ + +#ifndef GL_ARB_ES2_compatibility +#define GL_ARB_ES2_compatibility 1 +#endif /* GL_ARB_ES2_compatibility */ + +#ifndef GL_ARB_ES3_1_compatibility +#define GL_ARB_ES3_1_compatibility 1 +#endif /* GL_ARB_ES3_1_compatibility */ + +#ifndef GL_ARB_ES3_2_compatibility +#define GL_ARB_ES3_2_compatibility 1 +#define GL_PRIMITIVE_BOUNDING_BOX_ARB 0x92BE +#define GL_MULTISAMPLE_LINE_WIDTH_RANGE_ARB 0x9381 +#define GL_MULTISAMPLE_LINE_WIDTH_GRANULARITY_ARB 0x9382 +typedef void (APIENTRYP PFNGLPRIMITIVEBOUNDINGBOXARBPROC) (GLfloat minX, GLfloat minY, GLfloat minZ, GLfloat minW, GLfloat maxX, GLfloat maxY, GLfloat maxZ, GLfloat maxW); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glPrimitiveBoundingBoxARB (GLfloat minX, GLfloat minY, GLfloat minZ, GLfloat minW, GLfloat maxX, GLfloat maxY, GLfloat maxZ, GLfloat maxW); +#endif +#endif /* GL_ARB_ES3_2_compatibility */ + +#ifndef GL_ARB_ES3_compatibility +#define GL_ARB_ES3_compatibility 1 +#endif /* GL_ARB_ES3_compatibility */ + +#ifndef GL_ARB_arrays_of_arrays +#define GL_ARB_arrays_of_arrays 1 +#endif /* GL_ARB_arrays_of_arrays */ + +#ifndef GL_ARB_base_instance +#define GL_ARB_base_instance 1 +#endif /* GL_ARB_base_instance */ + +#ifndef GL_ARB_bindless_texture +#define GL_ARB_bindless_texture 1 +typedef uint64_t GLuint64EXT; +#define GL_UNSIGNED_INT64_ARB 0x140F +typedef GLuint64 (APIENTRYP PFNGLGETTEXTUREHANDLEARBPROC) (GLuint texture); +typedef GLuint64 (APIENTRYP PFNGLGETTEXTURESAMPLERHANDLEARBPROC) (GLuint texture, GLuint sampler); +typedef void (APIENTRYP PFNGLMAKETEXTUREHANDLERESIDENTARBPROC) (GLuint64 handle); +typedef void (APIENTRYP PFNGLMAKETEXTUREHANDLENONRESIDENTARBPROC) (GLuint64 handle); +typedef GLuint64 (APIENTRYP PFNGLGETIMAGEHANDLEARBPROC) (GLuint texture, GLint level, GLboolean layered, GLint layer, GLenum format); +typedef void (APIENTRYP PFNGLMAKEIMAGEHANDLERESIDENTARBPROC) (GLuint64 handle, GLenum access); +typedef void (APIENTRYP PFNGLMAKEIMAGEHANDLENONRESIDENTARBPROC) (GLuint64 handle); +typedef void (APIENTRYP PFNGLUNIFORMHANDLEUI64ARBPROC) (GLint location, GLuint64 value); +typedef void (APIENTRYP PFNGLUNIFORMHANDLEUI64VARBPROC) (GLint location, GLsizei count, const GLuint64 *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMHANDLEUI64ARBPROC) (GLuint program, GLint location, GLuint64 value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMHANDLEUI64VARBPROC) (GLuint program, GLint location, GLsizei count, const GLuint64 *values); +typedef GLboolean (APIENTRYP PFNGLISTEXTUREHANDLERESIDENTARBPROC) (GLuint64 handle); +typedef GLboolean (APIENTRYP PFNGLISIMAGEHANDLERESIDENTARBPROC) (GLuint64 handle); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL1UI64ARBPROC) (GLuint index, GLuint64EXT x); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL1UI64VARBPROC) (GLuint index, const GLuint64EXT *v); +typedef void (APIENTRYP PFNGLGETVERTEXATTRIBLUI64VARBPROC) (GLuint index, GLenum pname, GLuint64EXT *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI GLuint64 APIENTRY glGetTextureHandleARB (GLuint texture); +GLAPI GLuint64 APIENTRY glGetTextureSamplerHandleARB (GLuint texture, GLuint sampler); +GLAPI void APIENTRY glMakeTextureHandleResidentARB (GLuint64 handle); +GLAPI void APIENTRY glMakeTextureHandleNonResidentARB (GLuint64 handle); +GLAPI GLuint64 APIENTRY glGetImageHandleARB (GLuint texture, GLint level, GLboolean layered, GLint layer, GLenum format); +GLAPI void APIENTRY glMakeImageHandleResidentARB (GLuint64 handle, GLenum access); +GLAPI void APIENTRY glMakeImageHandleNonResidentARB (GLuint64 handle); +GLAPI void APIENTRY glUniformHandleui64ARB (GLint location, GLuint64 value); +GLAPI void APIENTRY glUniformHandleui64vARB (GLint location, GLsizei count, const GLuint64 *value); +GLAPI void APIENTRY glProgramUniformHandleui64ARB (GLuint program, GLint location, GLuint64 value); +GLAPI void APIENTRY glProgramUniformHandleui64vARB (GLuint program, GLint location, GLsizei count, const GLuint64 *values); +GLAPI GLboolean APIENTRY glIsTextureHandleResidentARB (GLuint64 handle); +GLAPI GLboolean APIENTRY glIsImageHandleResidentARB (GLuint64 handle); +GLAPI void APIENTRY glVertexAttribL1ui64ARB (GLuint index, GLuint64EXT x); +GLAPI void APIENTRY glVertexAttribL1ui64vARB (GLuint index, const GLuint64EXT *v); +GLAPI void APIENTRY glGetVertexAttribLui64vARB (GLuint index, GLenum pname, GLuint64EXT *params); +#endif +#endif /* GL_ARB_bindless_texture */ + +#ifndef GL_ARB_blend_func_extended +#define GL_ARB_blend_func_extended 1 +#endif /* GL_ARB_blend_func_extended */ + +#ifndef GL_ARB_buffer_storage +#define GL_ARB_buffer_storage 1 +#endif /* GL_ARB_buffer_storage */ + +#ifndef GL_ARB_cl_event +#define GL_ARB_cl_event 1 +struct _cl_context; +struct _cl_event; +#define GL_SYNC_CL_EVENT_ARB 0x8240 +#define GL_SYNC_CL_EVENT_COMPLETE_ARB 0x8241 +typedef GLsync (APIENTRYP PFNGLCREATESYNCFROMCLEVENTARBPROC) (struct _cl_context *context, struct _cl_event *event, GLbitfield flags); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI GLsync APIENTRY glCreateSyncFromCLeventARB (struct _cl_context *context, struct _cl_event *event, GLbitfield flags); +#endif +#endif /* GL_ARB_cl_event */ + +#ifndef GL_ARB_clear_buffer_object +#define GL_ARB_clear_buffer_object 1 +#endif /* GL_ARB_clear_buffer_object */ + +#ifndef GL_ARB_clear_texture +#define GL_ARB_clear_texture 1 +#endif /* GL_ARB_clear_texture */ + +#ifndef GL_ARB_clip_control +#define GL_ARB_clip_control 1 +#endif /* GL_ARB_clip_control */ + +#ifndef GL_ARB_compressed_texture_pixel_storage +#define GL_ARB_compressed_texture_pixel_storage 1 +#endif /* GL_ARB_compressed_texture_pixel_storage */ + +#ifndef GL_ARB_compute_shader +#define GL_ARB_compute_shader 1 +#endif /* GL_ARB_compute_shader */ + +#ifndef GL_ARB_compute_variable_group_size +#define GL_ARB_compute_variable_group_size 1 +#define GL_MAX_COMPUTE_VARIABLE_GROUP_INVOCATIONS_ARB 0x9344 +#define GL_MAX_COMPUTE_FIXED_GROUP_INVOCATIONS_ARB 0x90EB +#define GL_MAX_COMPUTE_VARIABLE_GROUP_SIZE_ARB 0x9345 +#define GL_MAX_COMPUTE_FIXED_GROUP_SIZE_ARB 0x91BF +typedef void (APIENTRYP PFNGLDISPATCHCOMPUTEGROUPSIZEARBPROC) (GLuint num_groups_x, GLuint num_groups_y, GLuint num_groups_z, GLuint group_size_x, GLuint group_size_y, GLuint group_size_z); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glDispatchComputeGroupSizeARB (GLuint num_groups_x, GLuint num_groups_y, GLuint num_groups_z, GLuint group_size_x, GLuint group_size_y, GLuint group_size_z); +#endif +#endif /* GL_ARB_compute_variable_group_size */ + +#ifndef GL_ARB_conditional_render_inverted +#define GL_ARB_conditional_render_inverted 1 +#endif /* GL_ARB_conditional_render_inverted */ + +#ifndef GL_ARB_conservative_depth +#define GL_ARB_conservative_depth 1 +#endif /* GL_ARB_conservative_depth */ + +#ifndef GL_ARB_copy_buffer +#define GL_ARB_copy_buffer 1 +#endif /* GL_ARB_copy_buffer */ + +#ifndef GL_ARB_copy_image +#define GL_ARB_copy_image 1 +#endif /* GL_ARB_copy_image */ + +#ifndef GL_ARB_cull_distance +#define GL_ARB_cull_distance 1 +#endif /* GL_ARB_cull_distance */ + +#ifndef GL_ARB_debug_output +#define GL_ARB_debug_output 1 +typedef void (APIENTRY *GLDEBUGPROCARB)(GLenum source,GLenum type,GLuint id,GLenum severity,GLsizei length,const GLchar *message,const void *userParam); +#define GL_DEBUG_OUTPUT_SYNCHRONOUS_ARB 0x8242 +#define GL_DEBUG_NEXT_LOGGED_MESSAGE_LENGTH_ARB 0x8243 +#define GL_DEBUG_CALLBACK_FUNCTION_ARB 0x8244 +#define GL_DEBUG_CALLBACK_USER_PARAM_ARB 0x8245 +#define GL_DEBUG_SOURCE_API_ARB 0x8246 +#define GL_DEBUG_SOURCE_WINDOW_SYSTEM_ARB 0x8247 +#define GL_DEBUG_SOURCE_SHADER_COMPILER_ARB 0x8248 +#define GL_DEBUG_SOURCE_THIRD_PARTY_ARB 0x8249 +#define GL_DEBUG_SOURCE_APPLICATION_ARB 0x824A +#define GL_DEBUG_SOURCE_OTHER_ARB 0x824B +#define GL_DEBUG_TYPE_ERROR_ARB 0x824C +#define GL_DEBUG_TYPE_DEPRECATED_BEHAVIOR_ARB 0x824D +#define GL_DEBUG_TYPE_UNDEFINED_BEHAVIOR_ARB 0x824E +#define GL_DEBUG_TYPE_PORTABILITY_ARB 0x824F +#define GL_DEBUG_TYPE_PERFORMANCE_ARB 0x8250 +#define GL_DEBUG_TYPE_OTHER_ARB 0x8251 +#define GL_MAX_DEBUG_MESSAGE_LENGTH_ARB 0x9143 +#define GL_MAX_DEBUG_LOGGED_MESSAGES_ARB 0x9144 +#define GL_DEBUG_LOGGED_MESSAGES_ARB 0x9145 +#define GL_DEBUG_SEVERITY_HIGH_ARB 0x9146 +#define GL_DEBUG_SEVERITY_MEDIUM_ARB 0x9147 +#define GL_DEBUG_SEVERITY_LOW_ARB 0x9148 +typedef void (APIENTRYP PFNGLDEBUGMESSAGECONTROLARBPROC) (GLenum source, GLenum type, GLenum severity, GLsizei count, const GLuint *ids, GLboolean enabled); +typedef void (APIENTRYP PFNGLDEBUGMESSAGEINSERTARBPROC) (GLenum source, GLenum type, GLuint id, GLenum severity, GLsizei length, const GLchar *buf); +typedef void (APIENTRYP PFNGLDEBUGMESSAGECALLBACKARBPROC) (GLDEBUGPROCARB callback, const void *userParam); +typedef GLuint (APIENTRYP PFNGLGETDEBUGMESSAGELOGARBPROC) (GLuint count, GLsizei bufSize, GLenum *sources, GLenum *types, GLuint *ids, GLenum *severities, GLsizei *lengths, GLchar *messageLog); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glDebugMessageControlARB (GLenum source, GLenum type, GLenum severity, GLsizei count, const GLuint *ids, GLboolean enabled); +GLAPI void APIENTRY glDebugMessageInsertARB (GLenum source, GLenum type, GLuint id, GLenum severity, GLsizei length, const GLchar *buf); +GLAPI void APIENTRY glDebugMessageCallbackARB (GLDEBUGPROCARB callback, const void *userParam); +GLAPI GLuint APIENTRY glGetDebugMessageLogARB (GLuint count, GLsizei bufSize, GLenum *sources, GLenum *types, GLuint *ids, GLenum *severities, GLsizei *lengths, GLchar *messageLog); +#endif +#endif /* GL_ARB_debug_output */ + +#ifndef GL_ARB_depth_buffer_float +#define GL_ARB_depth_buffer_float 1 +#endif /* GL_ARB_depth_buffer_float */ + +#ifndef GL_ARB_depth_clamp +#define GL_ARB_depth_clamp 1 +#endif /* GL_ARB_depth_clamp */ + +#ifndef GL_ARB_derivative_control +#define GL_ARB_derivative_control 1 +#endif /* GL_ARB_derivative_control */ + +#ifndef GL_ARB_direct_state_access +#define GL_ARB_direct_state_access 1 +#endif /* GL_ARB_direct_state_access */ + +#ifndef GL_ARB_draw_buffers_blend +#define GL_ARB_draw_buffers_blend 1 +typedef void (APIENTRYP PFNGLBLENDEQUATIONIARBPROC) (GLuint buf, GLenum mode); +typedef void (APIENTRYP PFNGLBLENDEQUATIONSEPARATEIARBPROC) (GLuint buf, GLenum modeRGB, GLenum modeAlpha); +typedef void (APIENTRYP PFNGLBLENDFUNCIARBPROC) (GLuint buf, GLenum src, GLenum dst); +typedef void (APIENTRYP PFNGLBLENDFUNCSEPARATEIARBPROC) (GLuint buf, GLenum srcRGB, GLenum dstRGB, GLenum srcAlpha, GLenum dstAlpha); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBlendEquationiARB (GLuint buf, GLenum mode); +GLAPI void APIENTRY glBlendEquationSeparateiARB (GLuint buf, GLenum modeRGB, GLenum modeAlpha); +GLAPI void APIENTRY glBlendFunciARB (GLuint buf, GLenum src, GLenum dst); +GLAPI void APIENTRY glBlendFuncSeparateiARB (GLuint buf, GLenum srcRGB, GLenum dstRGB, GLenum srcAlpha, GLenum dstAlpha); +#endif +#endif /* GL_ARB_draw_buffers_blend */ + +#ifndef GL_ARB_draw_elements_base_vertex +#define GL_ARB_draw_elements_base_vertex 1 +#endif /* GL_ARB_draw_elements_base_vertex */ + +#ifndef GL_ARB_draw_indirect +#define GL_ARB_draw_indirect 1 +#endif /* GL_ARB_draw_indirect */ + +#ifndef GL_ARB_draw_instanced +#define GL_ARB_draw_instanced 1 +typedef void (APIENTRYP PFNGLDRAWARRAYSINSTANCEDARBPROC) (GLenum mode, GLint first, GLsizei count, GLsizei primcount); +typedef void (APIENTRYP PFNGLDRAWELEMENTSINSTANCEDARBPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei primcount); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glDrawArraysInstancedARB (GLenum mode, GLint first, GLsizei count, GLsizei primcount); +GLAPI void APIENTRY glDrawElementsInstancedARB (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei primcount); +#endif +#endif /* GL_ARB_draw_instanced */ + +#ifndef GL_ARB_enhanced_layouts +#define GL_ARB_enhanced_layouts 1 +#endif /* GL_ARB_enhanced_layouts */ + +#ifndef GL_ARB_explicit_attrib_location +#define GL_ARB_explicit_attrib_location 1 +#endif /* GL_ARB_explicit_attrib_location */ + +#ifndef GL_ARB_explicit_uniform_location +#define GL_ARB_explicit_uniform_location 1 +#endif /* GL_ARB_explicit_uniform_location */ + +#ifndef GL_ARB_fragment_coord_conventions +#define GL_ARB_fragment_coord_conventions 1 +#endif /* GL_ARB_fragment_coord_conventions */ + +#ifndef GL_ARB_fragment_layer_viewport +#define GL_ARB_fragment_layer_viewport 1 +#endif /* GL_ARB_fragment_layer_viewport */ + +#ifndef GL_ARB_fragment_shader_interlock +#define GL_ARB_fragment_shader_interlock 1 +#endif /* GL_ARB_fragment_shader_interlock */ + +#ifndef GL_ARB_framebuffer_no_attachments +#define GL_ARB_framebuffer_no_attachments 1 +#endif /* GL_ARB_framebuffer_no_attachments */ + +#ifndef GL_ARB_framebuffer_object +#define GL_ARB_framebuffer_object 1 +#endif /* GL_ARB_framebuffer_object */ + +#ifndef GL_ARB_framebuffer_sRGB +#define GL_ARB_framebuffer_sRGB 1 +#endif /* GL_ARB_framebuffer_sRGB */ + +#ifndef GL_ARB_geometry_shader4 +#define GL_ARB_geometry_shader4 1 +#define GL_LINES_ADJACENCY_ARB 0x000A +#define GL_LINE_STRIP_ADJACENCY_ARB 0x000B +#define GL_TRIANGLES_ADJACENCY_ARB 0x000C +#define GL_TRIANGLE_STRIP_ADJACENCY_ARB 0x000D +#define GL_PROGRAM_POINT_SIZE_ARB 0x8642 +#define GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS_ARB 0x8C29 +#define GL_FRAMEBUFFER_ATTACHMENT_LAYERED_ARB 0x8DA7 +#define GL_FRAMEBUFFER_INCOMPLETE_LAYER_TARGETS_ARB 0x8DA8 +#define GL_FRAMEBUFFER_INCOMPLETE_LAYER_COUNT_ARB 0x8DA9 +#define GL_GEOMETRY_SHADER_ARB 0x8DD9 +#define GL_GEOMETRY_VERTICES_OUT_ARB 0x8DDA +#define GL_GEOMETRY_INPUT_TYPE_ARB 0x8DDB +#define GL_GEOMETRY_OUTPUT_TYPE_ARB 0x8DDC +#define GL_MAX_GEOMETRY_VARYING_COMPONENTS_ARB 0x8DDD +#define GL_MAX_VERTEX_VARYING_COMPONENTS_ARB 0x8DDE +#define GL_MAX_GEOMETRY_UNIFORM_COMPONENTS_ARB 0x8DDF +#define GL_MAX_GEOMETRY_OUTPUT_VERTICES_ARB 0x8DE0 +#define GL_MAX_GEOMETRY_TOTAL_OUTPUT_COMPONENTS_ARB 0x8DE1 +typedef void (APIENTRYP PFNGLPROGRAMPARAMETERIARBPROC) (GLuint program, GLenum pname, GLint value); +typedef void (APIENTRYP PFNGLFRAMEBUFFERTEXTUREARBPROC) (GLenum target, GLenum attachment, GLuint texture, GLint level); +typedef void (APIENTRYP PFNGLFRAMEBUFFERTEXTURELAYERARBPROC) (GLenum target, GLenum attachment, GLuint texture, GLint level, GLint layer); +typedef void (APIENTRYP PFNGLFRAMEBUFFERTEXTUREFACEARBPROC) (GLenum target, GLenum attachment, GLuint texture, GLint level, GLenum face); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glProgramParameteriARB (GLuint program, GLenum pname, GLint value); +GLAPI void APIENTRY glFramebufferTextureARB (GLenum target, GLenum attachment, GLuint texture, GLint level); +GLAPI void APIENTRY glFramebufferTextureLayerARB (GLenum target, GLenum attachment, GLuint texture, GLint level, GLint layer); +GLAPI void APIENTRY glFramebufferTextureFaceARB (GLenum target, GLenum attachment, GLuint texture, GLint level, GLenum face); +#endif +#endif /* GL_ARB_geometry_shader4 */ + +#ifndef GL_ARB_get_program_binary +#define GL_ARB_get_program_binary 1 +#endif /* GL_ARB_get_program_binary */ + +#ifndef GL_ARB_get_texture_sub_image +#define GL_ARB_get_texture_sub_image 1 +#endif /* GL_ARB_get_texture_sub_image */ + +#ifndef GL_ARB_gl_spirv +#define GL_ARB_gl_spirv 1 +#define GL_SHADER_BINARY_FORMAT_SPIR_V_ARB 0x9551 +#define GL_SPIR_V_BINARY_ARB 0x9552 +typedef void (APIENTRYP PFNGLSPECIALIZESHADERARBPROC) (GLuint shader, const GLchar *pEntryPoint, GLuint numSpecializationConstants, const GLuint *pConstantIndex, const GLuint *pConstantValue); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glSpecializeShaderARB (GLuint shader, const GLchar *pEntryPoint, GLuint numSpecializationConstants, const GLuint *pConstantIndex, const GLuint *pConstantValue); +#endif +#endif /* GL_ARB_gl_spirv */ + +#ifndef GL_ARB_gpu_shader5 +#define GL_ARB_gpu_shader5 1 +#endif /* GL_ARB_gpu_shader5 */ + +#ifndef GL_ARB_gpu_shader_fp64 +#define GL_ARB_gpu_shader_fp64 1 +#endif /* GL_ARB_gpu_shader_fp64 */ + +#ifndef GL_ARB_gpu_shader_int64 +#define GL_ARB_gpu_shader_int64 1 +#define GL_INT64_ARB 0x140E +#define GL_INT64_VEC2_ARB 0x8FE9 +#define GL_INT64_VEC3_ARB 0x8FEA +#define GL_INT64_VEC4_ARB 0x8FEB +#define GL_UNSIGNED_INT64_VEC2_ARB 0x8FF5 +#define GL_UNSIGNED_INT64_VEC3_ARB 0x8FF6 +#define GL_UNSIGNED_INT64_VEC4_ARB 0x8FF7 +typedef void (APIENTRYP PFNGLUNIFORM1I64ARBPROC) (GLint location, GLint64 x); +typedef void (APIENTRYP PFNGLUNIFORM2I64ARBPROC) (GLint location, GLint64 x, GLint64 y); +typedef void (APIENTRYP PFNGLUNIFORM3I64ARBPROC) (GLint location, GLint64 x, GLint64 y, GLint64 z); +typedef void (APIENTRYP PFNGLUNIFORM4I64ARBPROC) (GLint location, GLint64 x, GLint64 y, GLint64 z, GLint64 w); +typedef void (APIENTRYP PFNGLUNIFORM1I64VARBPROC) (GLint location, GLsizei count, const GLint64 *value); +typedef void (APIENTRYP PFNGLUNIFORM2I64VARBPROC) (GLint location, GLsizei count, const GLint64 *value); +typedef void (APIENTRYP PFNGLUNIFORM3I64VARBPROC) (GLint location, GLsizei count, const GLint64 *value); +typedef void (APIENTRYP PFNGLUNIFORM4I64VARBPROC) (GLint location, GLsizei count, const GLint64 *value); +typedef void (APIENTRYP PFNGLUNIFORM1UI64ARBPROC) (GLint location, GLuint64 x); +typedef void (APIENTRYP PFNGLUNIFORM2UI64ARBPROC) (GLint location, GLuint64 x, GLuint64 y); +typedef void (APIENTRYP PFNGLUNIFORM3UI64ARBPROC) (GLint location, GLuint64 x, GLuint64 y, GLuint64 z); +typedef void (APIENTRYP PFNGLUNIFORM4UI64ARBPROC) (GLint location, GLuint64 x, GLuint64 y, GLuint64 z, GLuint64 w); +typedef void (APIENTRYP PFNGLUNIFORM1UI64VARBPROC) (GLint location, GLsizei count, const GLuint64 *value); +typedef void (APIENTRYP PFNGLUNIFORM2UI64VARBPROC) (GLint location, GLsizei count, const GLuint64 *value); +typedef void (APIENTRYP PFNGLUNIFORM3UI64VARBPROC) (GLint location, GLsizei count, const GLuint64 *value); +typedef void (APIENTRYP PFNGLUNIFORM4UI64VARBPROC) (GLint location, GLsizei count, const GLuint64 *value); +typedef void (APIENTRYP PFNGLGETUNIFORMI64VARBPROC) (GLuint program, GLint location, GLint64 *params); +typedef void (APIENTRYP PFNGLGETUNIFORMUI64VARBPROC) (GLuint program, GLint location, GLuint64 *params); +typedef void (APIENTRYP PFNGLGETNUNIFORMI64VARBPROC) (GLuint program, GLint location, GLsizei bufSize, GLint64 *params); +typedef void (APIENTRYP PFNGLGETNUNIFORMUI64VARBPROC) (GLuint program, GLint location, GLsizei bufSize, GLuint64 *params); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1I64ARBPROC) (GLuint program, GLint location, GLint64 x); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2I64ARBPROC) (GLuint program, GLint location, GLint64 x, GLint64 y); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3I64ARBPROC) (GLuint program, GLint location, GLint64 x, GLint64 y, GLint64 z); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4I64ARBPROC) (GLuint program, GLint location, GLint64 x, GLint64 y, GLint64 z, GLint64 w); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1I64VARBPROC) (GLuint program, GLint location, GLsizei count, const GLint64 *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2I64VARBPROC) (GLuint program, GLint location, GLsizei count, const GLint64 *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3I64VARBPROC) (GLuint program, GLint location, GLsizei count, const GLint64 *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4I64VARBPROC) (GLuint program, GLint location, GLsizei count, const GLint64 *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1UI64ARBPROC) (GLuint program, GLint location, GLuint64 x); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2UI64ARBPROC) (GLuint program, GLint location, GLuint64 x, GLuint64 y); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3UI64ARBPROC) (GLuint program, GLint location, GLuint64 x, GLuint64 y, GLuint64 z); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4UI64ARBPROC) (GLuint program, GLint location, GLuint64 x, GLuint64 y, GLuint64 z, GLuint64 w); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1UI64VARBPROC) (GLuint program, GLint location, GLsizei count, const GLuint64 *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2UI64VARBPROC) (GLuint program, GLint location, GLsizei count, const GLuint64 *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3UI64VARBPROC) (GLuint program, GLint location, GLsizei count, const GLuint64 *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4UI64VARBPROC) (GLuint program, GLint location, GLsizei count, const GLuint64 *value); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glUniform1i64ARB (GLint location, GLint64 x); +GLAPI void APIENTRY glUniform2i64ARB (GLint location, GLint64 x, GLint64 y); +GLAPI void APIENTRY glUniform3i64ARB (GLint location, GLint64 x, GLint64 y, GLint64 z); +GLAPI void APIENTRY glUniform4i64ARB (GLint location, GLint64 x, GLint64 y, GLint64 z, GLint64 w); +GLAPI void APIENTRY glUniform1i64vARB (GLint location, GLsizei count, const GLint64 *value); +GLAPI void APIENTRY glUniform2i64vARB (GLint location, GLsizei count, const GLint64 *value); +GLAPI void APIENTRY glUniform3i64vARB (GLint location, GLsizei count, const GLint64 *value); +GLAPI void APIENTRY glUniform4i64vARB (GLint location, GLsizei count, const GLint64 *value); +GLAPI void APIENTRY glUniform1ui64ARB (GLint location, GLuint64 x); +GLAPI void APIENTRY glUniform2ui64ARB (GLint location, GLuint64 x, GLuint64 y); +GLAPI void APIENTRY glUniform3ui64ARB (GLint location, GLuint64 x, GLuint64 y, GLuint64 z); +GLAPI void APIENTRY glUniform4ui64ARB (GLint location, GLuint64 x, GLuint64 y, GLuint64 z, GLuint64 w); +GLAPI void APIENTRY glUniform1ui64vARB (GLint location, GLsizei count, const GLuint64 *value); +GLAPI void APIENTRY glUniform2ui64vARB (GLint location, GLsizei count, const GLuint64 *value); +GLAPI void APIENTRY glUniform3ui64vARB (GLint location, GLsizei count, const GLuint64 *value); +GLAPI void APIENTRY glUniform4ui64vARB (GLint location, GLsizei count, const GLuint64 *value); +GLAPI void APIENTRY glGetUniformi64vARB (GLuint program, GLint location, GLint64 *params); +GLAPI void APIENTRY glGetUniformui64vARB (GLuint program, GLint location, GLuint64 *params); +GLAPI void APIENTRY glGetnUniformi64vARB (GLuint program, GLint location, GLsizei bufSize, GLint64 *params); +GLAPI void APIENTRY glGetnUniformui64vARB (GLuint program, GLint location, GLsizei bufSize, GLuint64 *params); +GLAPI void APIENTRY glProgramUniform1i64ARB (GLuint program, GLint location, GLint64 x); +GLAPI void APIENTRY glProgramUniform2i64ARB (GLuint program, GLint location, GLint64 x, GLint64 y); +GLAPI void APIENTRY glProgramUniform3i64ARB (GLuint program, GLint location, GLint64 x, GLint64 y, GLint64 z); +GLAPI void APIENTRY glProgramUniform4i64ARB (GLuint program, GLint location, GLint64 x, GLint64 y, GLint64 z, GLint64 w); +GLAPI void APIENTRY glProgramUniform1i64vARB (GLuint program, GLint location, GLsizei count, const GLint64 *value); +GLAPI void APIENTRY glProgramUniform2i64vARB (GLuint program, GLint location, GLsizei count, const GLint64 *value); +GLAPI void APIENTRY glProgramUniform3i64vARB (GLuint program, GLint location, GLsizei count, const GLint64 *value); +GLAPI void APIENTRY glProgramUniform4i64vARB (GLuint program, GLint location, GLsizei count, const GLint64 *value); +GLAPI void APIENTRY glProgramUniform1ui64ARB (GLuint program, GLint location, GLuint64 x); +GLAPI void APIENTRY glProgramUniform2ui64ARB (GLuint program, GLint location, GLuint64 x, GLuint64 y); +GLAPI void APIENTRY glProgramUniform3ui64ARB (GLuint program, GLint location, GLuint64 x, GLuint64 y, GLuint64 z); +GLAPI void APIENTRY glProgramUniform4ui64ARB (GLuint program, GLint location, GLuint64 x, GLuint64 y, GLuint64 z, GLuint64 w); +GLAPI void APIENTRY glProgramUniform1ui64vARB (GLuint program, GLint location, GLsizei count, const GLuint64 *value); +GLAPI void APIENTRY glProgramUniform2ui64vARB (GLuint program, GLint location, GLsizei count, const GLuint64 *value); +GLAPI void APIENTRY glProgramUniform3ui64vARB (GLuint program, GLint location, GLsizei count, const GLuint64 *value); +GLAPI void APIENTRY glProgramUniform4ui64vARB (GLuint program, GLint location, GLsizei count, const GLuint64 *value); +#endif +#endif /* GL_ARB_gpu_shader_int64 */ + +#ifndef GL_ARB_half_float_vertex +#define GL_ARB_half_float_vertex 1 +#endif /* GL_ARB_half_float_vertex */ + +#ifndef GL_ARB_imaging +#define GL_ARB_imaging 1 +#endif /* GL_ARB_imaging */ + +#ifndef GL_ARB_indirect_parameters +#define GL_ARB_indirect_parameters 1 +#define GL_PARAMETER_BUFFER_ARB 0x80EE +#define GL_PARAMETER_BUFFER_BINDING_ARB 0x80EF +typedef void (APIENTRYP PFNGLMULTIDRAWARRAYSINDIRECTCOUNTARBPROC) (GLenum mode, const void *indirect, GLintptr drawcount, GLsizei maxdrawcount, GLsizei stride); +typedef void (APIENTRYP PFNGLMULTIDRAWELEMENTSINDIRECTCOUNTARBPROC) (GLenum mode, GLenum type, const void *indirect, GLintptr drawcount, GLsizei maxdrawcount, GLsizei stride); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glMultiDrawArraysIndirectCountARB (GLenum mode, const void *indirect, GLintptr drawcount, GLsizei maxdrawcount, GLsizei stride); +GLAPI void APIENTRY glMultiDrawElementsIndirectCountARB (GLenum mode, GLenum type, const void *indirect, GLintptr drawcount, GLsizei maxdrawcount, GLsizei stride); +#endif +#endif /* GL_ARB_indirect_parameters */ + +#ifndef GL_ARB_instanced_arrays +#define GL_ARB_instanced_arrays 1 +#define GL_VERTEX_ATTRIB_ARRAY_DIVISOR_ARB 0x88FE +typedef void (APIENTRYP PFNGLVERTEXATTRIBDIVISORARBPROC) (GLuint index, GLuint divisor); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glVertexAttribDivisorARB (GLuint index, GLuint divisor); +#endif +#endif /* GL_ARB_instanced_arrays */ + +#ifndef GL_ARB_internalformat_query +#define GL_ARB_internalformat_query 1 +#endif /* GL_ARB_internalformat_query */ + +#ifndef GL_ARB_internalformat_query2 +#define GL_ARB_internalformat_query2 1 +#define GL_SRGB_DECODE_ARB 0x8299 +#endif /* GL_ARB_internalformat_query2 */ + +#ifndef GL_ARB_invalidate_subdata +#define GL_ARB_invalidate_subdata 1 +#endif /* GL_ARB_invalidate_subdata */ + +#ifndef GL_ARB_map_buffer_alignment +#define GL_ARB_map_buffer_alignment 1 +#endif /* GL_ARB_map_buffer_alignment */ + +#ifndef GL_ARB_map_buffer_range +#define GL_ARB_map_buffer_range 1 +#endif /* GL_ARB_map_buffer_range */ + +#ifndef GL_ARB_multi_bind +#define GL_ARB_multi_bind 1 +#endif /* GL_ARB_multi_bind */ + +#ifndef GL_ARB_multi_draw_indirect +#define GL_ARB_multi_draw_indirect 1 +#endif /* GL_ARB_multi_draw_indirect */ + +#ifndef GL_ARB_occlusion_query2 +#define GL_ARB_occlusion_query2 1 +#endif /* GL_ARB_occlusion_query2 */ + +#ifndef GL_ARB_parallel_shader_compile +#define GL_ARB_parallel_shader_compile 1 +#define GL_MAX_SHADER_COMPILER_THREADS_ARB 0x91B0 +#define GL_COMPLETION_STATUS_ARB 0x91B1 +typedef void (APIENTRYP PFNGLMAXSHADERCOMPILERTHREADSARBPROC) (GLuint count); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glMaxShaderCompilerThreadsARB (GLuint count); +#endif +#endif /* GL_ARB_parallel_shader_compile */ + +#ifndef GL_ARB_pipeline_statistics_query +#define GL_ARB_pipeline_statistics_query 1 +#define GL_VERTICES_SUBMITTED_ARB 0x82EE +#define GL_PRIMITIVES_SUBMITTED_ARB 0x82EF +#define GL_VERTEX_SHADER_INVOCATIONS_ARB 0x82F0 +#define GL_TESS_CONTROL_SHADER_PATCHES_ARB 0x82F1 +#define GL_TESS_EVALUATION_SHADER_INVOCATIONS_ARB 0x82F2 +#define GL_GEOMETRY_SHADER_PRIMITIVES_EMITTED_ARB 0x82F3 +#define GL_FRAGMENT_SHADER_INVOCATIONS_ARB 0x82F4 +#define GL_COMPUTE_SHADER_INVOCATIONS_ARB 0x82F5 +#define GL_CLIPPING_INPUT_PRIMITIVES_ARB 0x82F6 +#define GL_CLIPPING_OUTPUT_PRIMITIVES_ARB 0x82F7 +#endif /* GL_ARB_pipeline_statistics_query */ + +#ifndef GL_ARB_pixel_buffer_object +#define GL_ARB_pixel_buffer_object 1 +#define GL_PIXEL_PACK_BUFFER_ARB 0x88EB +#define GL_PIXEL_UNPACK_BUFFER_ARB 0x88EC +#define GL_PIXEL_PACK_BUFFER_BINDING_ARB 0x88ED +#define GL_PIXEL_UNPACK_BUFFER_BINDING_ARB 0x88EF +#endif /* GL_ARB_pixel_buffer_object */ + +#ifndef GL_ARB_polygon_offset_clamp +#define GL_ARB_polygon_offset_clamp 1 +#endif /* GL_ARB_polygon_offset_clamp */ + +#ifndef GL_ARB_post_depth_coverage +#define GL_ARB_post_depth_coverage 1 +#endif /* GL_ARB_post_depth_coverage */ + +#ifndef GL_ARB_program_interface_query +#define GL_ARB_program_interface_query 1 +#endif /* GL_ARB_program_interface_query */ + +#ifndef GL_ARB_provoking_vertex +#define GL_ARB_provoking_vertex 1 +#endif /* GL_ARB_provoking_vertex */ + +#ifndef GL_ARB_query_buffer_object +#define GL_ARB_query_buffer_object 1 +#endif /* GL_ARB_query_buffer_object */ + +#ifndef GL_ARB_robust_buffer_access_behavior +#define GL_ARB_robust_buffer_access_behavior 1 +#endif /* GL_ARB_robust_buffer_access_behavior */ + +#ifndef GL_ARB_robustness +#define GL_ARB_robustness 1 +#define GL_CONTEXT_FLAG_ROBUST_ACCESS_BIT_ARB 0x00000004 +#define GL_LOSE_CONTEXT_ON_RESET_ARB 0x8252 +#define GL_GUILTY_CONTEXT_RESET_ARB 0x8253 +#define GL_INNOCENT_CONTEXT_RESET_ARB 0x8254 +#define GL_UNKNOWN_CONTEXT_RESET_ARB 0x8255 +#define GL_RESET_NOTIFICATION_STRATEGY_ARB 0x8256 +#define GL_NO_RESET_NOTIFICATION_ARB 0x8261 +typedef GLenum (APIENTRYP PFNGLGETGRAPHICSRESETSTATUSARBPROC) (void); +typedef void (APIENTRYP PFNGLGETNTEXIMAGEARBPROC) (GLenum target, GLint level, GLenum format, GLenum type, GLsizei bufSize, void *img); +typedef void (APIENTRYP PFNGLREADNPIXELSARBPROC) (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, void *data); +typedef void (APIENTRYP PFNGLGETNCOMPRESSEDTEXIMAGEARBPROC) (GLenum target, GLint lod, GLsizei bufSize, void *img); +typedef void (APIENTRYP PFNGLGETNUNIFORMFVARBPROC) (GLuint program, GLint location, GLsizei bufSize, GLfloat *params); +typedef void (APIENTRYP PFNGLGETNUNIFORMIVARBPROC) (GLuint program, GLint location, GLsizei bufSize, GLint *params); +typedef void (APIENTRYP PFNGLGETNUNIFORMUIVARBPROC) (GLuint program, GLint location, GLsizei bufSize, GLuint *params); +typedef void (APIENTRYP PFNGLGETNUNIFORMDVARBPROC) (GLuint program, GLint location, GLsizei bufSize, GLdouble *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI GLenum APIENTRY glGetGraphicsResetStatusARB (void); +GLAPI void APIENTRY glGetnTexImageARB (GLenum target, GLint level, GLenum format, GLenum type, GLsizei bufSize, void *img); +GLAPI void APIENTRY glReadnPixelsARB (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, void *data); +GLAPI void APIENTRY glGetnCompressedTexImageARB (GLenum target, GLint lod, GLsizei bufSize, void *img); +GLAPI void APIENTRY glGetnUniformfvARB (GLuint program, GLint location, GLsizei bufSize, GLfloat *params); +GLAPI void APIENTRY glGetnUniformivARB (GLuint program, GLint location, GLsizei bufSize, GLint *params); +GLAPI void APIENTRY glGetnUniformuivARB (GLuint program, GLint location, GLsizei bufSize, GLuint *params); +GLAPI void APIENTRY glGetnUniformdvARB (GLuint program, GLint location, GLsizei bufSize, GLdouble *params); +#endif +#endif /* GL_ARB_robustness */ + +#ifndef GL_ARB_robustness_isolation +#define GL_ARB_robustness_isolation 1 +#endif /* GL_ARB_robustness_isolation */ + +#ifndef GL_ARB_sample_locations +#define GL_ARB_sample_locations 1 +#define GL_SAMPLE_LOCATION_SUBPIXEL_BITS_ARB 0x933D +#define GL_SAMPLE_LOCATION_PIXEL_GRID_WIDTH_ARB 0x933E +#define GL_SAMPLE_LOCATION_PIXEL_GRID_HEIGHT_ARB 0x933F +#define GL_PROGRAMMABLE_SAMPLE_LOCATION_TABLE_SIZE_ARB 0x9340 +#define GL_SAMPLE_LOCATION_ARB 0x8E50 +#define GL_PROGRAMMABLE_SAMPLE_LOCATION_ARB 0x9341 +#define GL_FRAMEBUFFER_PROGRAMMABLE_SAMPLE_LOCATIONS_ARB 0x9342 +#define GL_FRAMEBUFFER_SAMPLE_LOCATION_PIXEL_GRID_ARB 0x9343 +typedef void (APIENTRYP PFNGLFRAMEBUFFERSAMPLELOCATIONSFVARBPROC) (GLenum target, GLuint start, GLsizei count, const GLfloat *v); +typedef void (APIENTRYP PFNGLNAMEDFRAMEBUFFERSAMPLELOCATIONSFVARBPROC) (GLuint framebuffer, GLuint start, GLsizei count, const GLfloat *v); +typedef void (APIENTRYP PFNGLEVALUATEDEPTHVALUESARBPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glFramebufferSampleLocationsfvARB (GLenum target, GLuint start, GLsizei count, const GLfloat *v); +GLAPI void APIENTRY glNamedFramebufferSampleLocationsfvARB (GLuint framebuffer, GLuint start, GLsizei count, const GLfloat *v); +GLAPI void APIENTRY glEvaluateDepthValuesARB (void); +#endif +#endif /* GL_ARB_sample_locations */ + +#ifndef GL_ARB_sample_shading +#define GL_ARB_sample_shading 1 +#define GL_SAMPLE_SHADING_ARB 0x8C36 +#define GL_MIN_SAMPLE_SHADING_VALUE_ARB 0x8C37 +typedef void (APIENTRYP PFNGLMINSAMPLESHADINGARBPROC) (GLfloat value); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glMinSampleShadingARB (GLfloat value); +#endif +#endif /* GL_ARB_sample_shading */ + +#ifndef GL_ARB_sampler_objects +#define GL_ARB_sampler_objects 1 +#endif /* GL_ARB_sampler_objects */ + +#ifndef GL_ARB_seamless_cube_map +#define GL_ARB_seamless_cube_map 1 +#endif /* GL_ARB_seamless_cube_map */ + +#ifndef GL_ARB_seamless_cubemap_per_texture +#define GL_ARB_seamless_cubemap_per_texture 1 +#endif /* GL_ARB_seamless_cubemap_per_texture */ + +#ifndef GL_ARB_separate_shader_objects +#define GL_ARB_separate_shader_objects 1 +#endif /* GL_ARB_separate_shader_objects */ + +#ifndef GL_ARB_shader_atomic_counter_ops +#define GL_ARB_shader_atomic_counter_ops 1 +#endif /* GL_ARB_shader_atomic_counter_ops */ + +#ifndef GL_ARB_shader_atomic_counters +#define GL_ARB_shader_atomic_counters 1 +#endif /* GL_ARB_shader_atomic_counters */ + +#ifndef GL_ARB_shader_ballot +#define GL_ARB_shader_ballot 1 +#endif /* GL_ARB_shader_ballot */ + +#ifndef GL_ARB_shader_bit_encoding +#define GL_ARB_shader_bit_encoding 1 +#endif /* GL_ARB_shader_bit_encoding */ + +#ifndef GL_ARB_shader_clock +#define GL_ARB_shader_clock 1 +#endif /* GL_ARB_shader_clock */ + +#ifndef GL_ARB_shader_draw_parameters +#define GL_ARB_shader_draw_parameters 1 +#endif /* GL_ARB_shader_draw_parameters */ + +#ifndef GL_ARB_shader_group_vote +#define GL_ARB_shader_group_vote 1 +#endif /* GL_ARB_shader_group_vote */ + +#ifndef GL_ARB_shader_image_load_store +#define GL_ARB_shader_image_load_store 1 +#endif /* GL_ARB_shader_image_load_store */ + +#ifndef GL_ARB_shader_image_size +#define GL_ARB_shader_image_size 1 +#endif /* GL_ARB_shader_image_size */ + +#ifndef GL_ARB_shader_precision +#define GL_ARB_shader_precision 1 +#endif /* GL_ARB_shader_precision */ + +#ifndef GL_ARB_shader_stencil_export +#define GL_ARB_shader_stencil_export 1 +#endif /* GL_ARB_shader_stencil_export */ + +#ifndef GL_ARB_shader_storage_buffer_object +#define GL_ARB_shader_storage_buffer_object 1 +#endif /* GL_ARB_shader_storage_buffer_object */ + +#ifndef GL_ARB_shader_subroutine +#define GL_ARB_shader_subroutine 1 +#endif /* GL_ARB_shader_subroutine */ + +#ifndef GL_ARB_shader_texture_image_samples +#define GL_ARB_shader_texture_image_samples 1 +#endif /* GL_ARB_shader_texture_image_samples */ + +#ifndef GL_ARB_shader_viewport_layer_array +#define GL_ARB_shader_viewport_layer_array 1 +#endif /* GL_ARB_shader_viewport_layer_array */ + +#ifndef GL_ARB_shading_language_420pack +#define GL_ARB_shading_language_420pack 1 +#endif /* GL_ARB_shading_language_420pack */ + +#ifndef GL_ARB_shading_language_include +#define GL_ARB_shading_language_include 1 +#define GL_SHADER_INCLUDE_ARB 0x8DAE +#define GL_NAMED_STRING_LENGTH_ARB 0x8DE9 +#define GL_NAMED_STRING_TYPE_ARB 0x8DEA +typedef void (APIENTRYP PFNGLNAMEDSTRINGARBPROC) (GLenum type, GLint namelen, const GLchar *name, GLint stringlen, const GLchar *string); +typedef void (APIENTRYP PFNGLDELETENAMEDSTRINGARBPROC) (GLint namelen, const GLchar *name); +typedef void (APIENTRYP PFNGLCOMPILESHADERINCLUDEARBPROC) (GLuint shader, GLsizei count, const GLchar *const*path, const GLint *length); +typedef GLboolean (APIENTRYP PFNGLISNAMEDSTRINGARBPROC) (GLint namelen, const GLchar *name); +typedef void (APIENTRYP PFNGLGETNAMEDSTRINGARBPROC) (GLint namelen, const GLchar *name, GLsizei bufSize, GLint *stringlen, GLchar *string); +typedef void (APIENTRYP PFNGLGETNAMEDSTRINGIVARBPROC) (GLint namelen, const GLchar *name, GLenum pname, GLint *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glNamedStringARB (GLenum type, GLint namelen, const GLchar *name, GLint stringlen, const GLchar *string); +GLAPI void APIENTRY glDeleteNamedStringARB (GLint namelen, const GLchar *name); +GLAPI void APIENTRY glCompileShaderIncludeARB (GLuint shader, GLsizei count, const GLchar *const*path, const GLint *length); +GLAPI GLboolean APIENTRY glIsNamedStringARB (GLint namelen, const GLchar *name); +GLAPI void APIENTRY glGetNamedStringARB (GLint namelen, const GLchar *name, GLsizei bufSize, GLint *stringlen, GLchar *string); +GLAPI void APIENTRY glGetNamedStringivARB (GLint namelen, const GLchar *name, GLenum pname, GLint *params); +#endif +#endif /* GL_ARB_shading_language_include */ + +#ifndef GL_ARB_shading_language_packing +#define GL_ARB_shading_language_packing 1 +#endif /* GL_ARB_shading_language_packing */ + +#ifndef GL_ARB_sparse_buffer +#define GL_ARB_sparse_buffer 1 +#define GL_SPARSE_STORAGE_BIT_ARB 0x0400 +#define GL_SPARSE_BUFFER_PAGE_SIZE_ARB 0x82F8 +typedef void (APIENTRYP PFNGLBUFFERPAGECOMMITMENTARBPROC) (GLenum target, GLintptr offset, GLsizeiptr size, GLboolean commit); +typedef void (APIENTRYP PFNGLNAMEDBUFFERPAGECOMMITMENTEXTPROC) (GLuint buffer, GLintptr offset, GLsizeiptr size, GLboolean commit); +typedef void (APIENTRYP PFNGLNAMEDBUFFERPAGECOMMITMENTARBPROC) (GLuint buffer, GLintptr offset, GLsizeiptr size, GLboolean commit); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBufferPageCommitmentARB (GLenum target, GLintptr offset, GLsizeiptr size, GLboolean commit); +GLAPI void APIENTRY glNamedBufferPageCommitmentEXT (GLuint buffer, GLintptr offset, GLsizeiptr size, GLboolean commit); +GLAPI void APIENTRY glNamedBufferPageCommitmentARB (GLuint buffer, GLintptr offset, GLsizeiptr size, GLboolean commit); +#endif +#endif /* GL_ARB_sparse_buffer */ + +#ifndef GL_ARB_sparse_texture +#define GL_ARB_sparse_texture 1 +#define GL_TEXTURE_SPARSE_ARB 0x91A6 +#define GL_VIRTUAL_PAGE_SIZE_INDEX_ARB 0x91A7 +#define GL_NUM_SPARSE_LEVELS_ARB 0x91AA +#define GL_NUM_VIRTUAL_PAGE_SIZES_ARB 0x91A8 +#define GL_VIRTUAL_PAGE_SIZE_X_ARB 0x9195 +#define GL_VIRTUAL_PAGE_SIZE_Y_ARB 0x9196 +#define GL_VIRTUAL_PAGE_SIZE_Z_ARB 0x9197 +#define GL_MAX_SPARSE_TEXTURE_SIZE_ARB 0x9198 +#define GL_MAX_SPARSE_3D_TEXTURE_SIZE_ARB 0x9199 +#define GL_MAX_SPARSE_ARRAY_TEXTURE_LAYERS_ARB 0x919A +#define GL_SPARSE_TEXTURE_FULL_ARRAY_CUBE_MIPMAPS_ARB 0x91A9 +typedef void (APIENTRYP PFNGLTEXPAGECOMMITMENTARBPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLboolean commit); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glTexPageCommitmentARB (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLboolean commit); +#endif +#endif /* GL_ARB_sparse_texture */ + +#ifndef GL_ARB_sparse_texture2 +#define GL_ARB_sparse_texture2 1 +#endif /* GL_ARB_sparse_texture2 */ + +#ifndef GL_ARB_sparse_texture_clamp +#define GL_ARB_sparse_texture_clamp 1 +#endif /* GL_ARB_sparse_texture_clamp */ + +#ifndef GL_ARB_spirv_extensions +#define GL_ARB_spirv_extensions 1 +#endif /* GL_ARB_spirv_extensions */ + +#ifndef GL_ARB_stencil_texturing +#define GL_ARB_stencil_texturing 1 +#endif /* GL_ARB_stencil_texturing */ + +#ifndef GL_ARB_sync +#define GL_ARB_sync 1 +#endif /* GL_ARB_sync */ + +#ifndef GL_ARB_tessellation_shader +#define GL_ARB_tessellation_shader 1 +#endif /* GL_ARB_tessellation_shader */ + +#ifndef GL_ARB_texture_barrier +#define GL_ARB_texture_barrier 1 +#endif /* GL_ARB_texture_barrier */ + +#ifndef GL_ARB_texture_border_clamp +#define GL_ARB_texture_border_clamp 1 +#define GL_CLAMP_TO_BORDER_ARB 0x812D +#endif /* GL_ARB_texture_border_clamp */ + +#ifndef GL_ARB_texture_buffer_object +#define GL_ARB_texture_buffer_object 1 +#define GL_TEXTURE_BUFFER_ARB 0x8C2A +#define GL_MAX_TEXTURE_BUFFER_SIZE_ARB 0x8C2B +#define GL_TEXTURE_BINDING_BUFFER_ARB 0x8C2C +#define GL_TEXTURE_BUFFER_DATA_STORE_BINDING_ARB 0x8C2D +#define GL_TEXTURE_BUFFER_FORMAT_ARB 0x8C2E +typedef void (APIENTRYP PFNGLTEXBUFFERARBPROC) (GLenum target, GLenum internalformat, GLuint buffer); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glTexBufferARB (GLenum target, GLenum internalformat, GLuint buffer); +#endif +#endif /* GL_ARB_texture_buffer_object */ + +#ifndef GL_ARB_texture_buffer_object_rgb32 +#define GL_ARB_texture_buffer_object_rgb32 1 +#endif /* GL_ARB_texture_buffer_object_rgb32 */ + +#ifndef GL_ARB_texture_buffer_range +#define GL_ARB_texture_buffer_range 1 +#endif /* GL_ARB_texture_buffer_range */ + +#ifndef GL_ARB_texture_compression_bptc +#define GL_ARB_texture_compression_bptc 1 +#define GL_COMPRESSED_RGBA_BPTC_UNORM_ARB 0x8E8C +#define GL_COMPRESSED_SRGB_ALPHA_BPTC_UNORM_ARB 0x8E8D +#define GL_COMPRESSED_RGB_BPTC_SIGNED_FLOAT_ARB 0x8E8E +#define GL_COMPRESSED_RGB_BPTC_UNSIGNED_FLOAT_ARB 0x8E8F +#endif /* GL_ARB_texture_compression_bptc */ + +#ifndef GL_ARB_texture_compression_rgtc +#define GL_ARB_texture_compression_rgtc 1 +#endif /* GL_ARB_texture_compression_rgtc */ + +#ifndef GL_ARB_texture_cube_map_array +#define GL_ARB_texture_cube_map_array 1 +#define GL_TEXTURE_CUBE_MAP_ARRAY_ARB 0x9009 +#define GL_TEXTURE_BINDING_CUBE_MAP_ARRAY_ARB 0x900A +#define GL_PROXY_TEXTURE_CUBE_MAP_ARRAY_ARB 0x900B +#define GL_SAMPLER_CUBE_MAP_ARRAY_ARB 0x900C +#define GL_SAMPLER_CUBE_MAP_ARRAY_SHADOW_ARB 0x900D +#define GL_INT_SAMPLER_CUBE_MAP_ARRAY_ARB 0x900E +#define GL_UNSIGNED_INT_SAMPLER_CUBE_MAP_ARRAY_ARB 0x900F +#endif /* GL_ARB_texture_cube_map_array */ + +#ifndef GL_ARB_texture_filter_anisotropic +#define GL_ARB_texture_filter_anisotropic 1 +#endif /* GL_ARB_texture_filter_anisotropic */ + +#ifndef GL_ARB_texture_filter_minmax +#define GL_ARB_texture_filter_minmax 1 +#define GL_TEXTURE_REDUCTION_MODE_ARB 0x9366 +#define GL_WEIGHTED_AVERAGE_ARB 0x9367 +#endif /* GL_ARB_texture_filter_minmax */ + +#ifndef GL_ARB_texture_gather +#define GL_ARB_texture_gather 1 +#define GL_MIN_PROGRAM_TEXTURE_GATHER_OFFSET_ARB 0x8E5E +#define GL_MAX_PROGRAM_TEXTURE_GATHER_OFFSET_ARB 0x8E5F +#define GL_MAX_PROGRAM_TEXTURE_GATHER_COMPONENTS_ARB 0x8F9F +#endif /* GL_ARB_texture_gather */ + +#ifndef GL_ARB_texture_mirror_clamp_to_edge +#define GL_ARB_texture_mirror_clamp_to_edge 1 +#endif /* GL_ARB_texture_mirror_clamp_to_edge */ + +#ifndef GL_ARB_texture_mirrored_repeat +#define GL_ARB_texture_mirrored_repeat 1 +#define GL_MIRRORED_REPEAT_ARB 0x8370 +#endif /* GL_ARB_texture_mirrored_repeat */ + +#ifndef GL_ARB_texture_multisample +#define GL_ARB_texture_multisample 1 +#endif /* GL_ARB_texture_multisample */ + +#ifndef GL_ARB_texture_non_power_of_two +#define GL_ARB_texture_non_power_of_two 1 +#endif /* GL_ARB_texture_non_power_of_two */ + +#ifndef GL_ARB_texture_query_levels +#define GL_ARB_texture_query_levels 1 +#endif /* GL_ARB_texture_query_levels */ + +#ifndef GL_ARB_texture_query_lod +#define GL_ARB_texture_query_lod 1 +#endif /* GL_ARB_texture_query_lod */ + +#ifndef GL_ARB_texture_rg +#define GL_ARB_texture_rg 1 +#endif /* GL_ARB_texture_rg */ + +#ifndef GL_ARB_texture_rgb10_a2ui +#define GL_ARB_texture_rgb10_a2ui 1 +#endif /* GL_ARB_texture_rgb10_a2ui */ + +#ifndef GL_ARB_texture_stencil8 +#define GL_ARB_texture_stencil8 1 +#endif /* GL_ARB_texture_stencil8 */ + +#ifndef GL_ARB_texture_storage +#define GL_ARB_texture_storage 1 +#endif /* GL_ARB_texture_storage */ + +#ifndef GL_ARB_texture_storage_multisample +#define GL_ARB_texture_storage_multisample 1 +#endif /* GL_ARB_texture_storage_multisample */ + +#ifndef GL_ARB_texture_swizzle +#define GL_ARB_texture_swizzle 1 +#endif /* GL_ARB_texture_swizzle */ + +#ifndef GL_ARB_texture_view +#define GL_ARB_texture_view 1 +#endif /* GL_ARB_texture_view */ + +#ifndef GL_ARB_timer_query +#define GL_ARB_timer_query 1 +#endif /* GL_ARB_timer_query */ + +#ifndef GL_ARB_transform_feedback2 +#define GL_ARB_transform_feedback2 1 +#endif /* GL_ARB_transform_feedback2 */ + +#ifndef GL_ARB_transform_feedback3 +#define GL_ARB_transform_feedback3 1 +#endif /* GL_ARB_transform_feedback3 */ + +#ifndef GL_ARB_transform_feedback_instanced +#define GL_ARB_transform_feedback_instanced 1 +#endif /* GL_ARB_transform_feedback_instanced */ + +#ifndef GL_ARB_transform_feedback_overflow_query +#define GL_ARB_transform_feedback_overflow_query 1 +#define GL_TRANSFORM_FEEDBACK_OVERFLOW_ARB 0x82EC +#define GL_TRANSFORM_FEEDBACK_STREAM_OVERFLOW_ARB 0x82ED +#endif /* GL_ARB_transform_feedback_overflow_query */ + +#ifndef GL_ARB_uniform_buffer_object +#define GL_ARB_uniform_buffer_object 1 +#endif /* GL_ARB_uniform_buffer_object */ + +#ifndef GL_ARB_vertex_array_bgra +#define GL_ARB_vertex_array_bgra 1 +#endif /* GL_ARB_vertex_array_bgra */ + +#ifndef GL_ARB_vertex_array_object +#define GL_ARB_vertex_array_object 1 +#endif /* GL_ARB_vertex_array_object */ + +#ifndef GL_ARB_vertex_attrib_64bit +#define GL_ARB_vertex_attrib_64bit 1 +#endif /* GL_ARB_vertex_attrib_64bit */ + +#ifndef GL_ARB_vertex_attrib_binding +#define GL_ARB_vertex_attrib_binding 1 +#endif /* GL_ARB_vertex_attrib_binding */ + +#ifndef GL_ARB_vertex_type_10f_11f_11f_rev +#define GL_ARB_vertex_type_10f_11f_11f_rev 1 +#endif /* GL_ARB_vertex_type_10f_11f_11f_rev */ + +#ifndef GL_ARB_vertex_type_2_10_10_10_rev +#define GL_ARB_vertex_type_2_10_10_10_rev 1 +#endif /* GL_ARB_vertex_type_2_10_10_10_rev */ + +#ifndef GL_ARB_viewport_array +#define GL_ARB_viewport_array 1 +#endif /* GL_ARB_viewport_array */ + +#ifndef GL_KHR_blend_equation_advanced +#define GL_KHR_blend_equation_advanced 1 +#define GL_MULTIPLY_KHR 0x9294 +#define GL_SCREEN_KHR 0x9295 +#define GL_OVERLAY_KHR 0x9296 +#define GL_DARKEN_KHR 0x9297 +#define GL_LIGHTEN_KHR 0x9298 +#define GL_COLORDODGE_KHR 0x9299 +#define GL_COLORBURN_KHR 0x929A +#define GL_HARDLIGHT_KHR 0x929B +#define GL_SOFTLIGHT_KHR 0x929C +#define GL_DIFFERENCE_KHR 0x929E +#define GL_EXCLUSION_KHR 0x92A0 +#define GL_HSL_HUE_KHR 0x92AD +#define GL_HSL_SATURATION_KHR 0x92AE +#define GL_HSL_COLOR_KHR 0x92AF +#define GL_HSL_LUMINOSITY_KHR 0x92B0 +typedef void (APIENTRYP PFNGLBLENDBARRIERKHRPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBlendBarrierKHR (void); +#endif +#endif /* GL_KHR_blend_equation_advanced */ + +#ifndef GL_KHR_blend_equation_advanced_coherent +#define GL_KHR_blend_equation_advanced_coherent 1 +#define GL_BLEND_ADVANCED_COHERENT_KHR 0x9285 +#endif /* GL_KHR_blend_equation_advanced_coherent */ + +#ifndef GL_KHR_context_flush_control +#define GL_KHR_context_flush_control 1 +#endif /* GL_KHR_context_flush_control */ + +#ifndef GL_KHR_debug +#define GL_KHR_debug 1 +#endif /* GL_KHR_debug */ + +#ifndef GL_KHR_no_error +#define GL_KHR_no_error 1 +#define GL_CONTEXT_FLAG_NO_ERROR_BIT_KHR 0x00000008 +#endif /* GL_KHR_no_error */ + +#ifndef GL_KHR_parallel_shader_compile +#define GL_KHR_parallel_shader_compile 1 +#define GL_MAX_SHADER_COMPILER_THREADS_KHR 0x91B0 +#define GL_COMPLETION_STATUS_KHR 0x91B1 +typedef void (APIENTRYP PFNGLMAXSHADERCOMPILERTHREADSKHRPROC) (GLuint count); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glMaxShaderCompilerThreadsKHR (GLuint count); +#endif +#endif /* GL_KHR_parallel_shader_compile */ + +#ifndef GL_KHR_robust_buffer_access_behavior +#define GL_KHR_robust_buffer_access_behavior 1 +#endif /* GL_KHR_robust_buffer_access_behavior */ + +#ifndef GL_KHR_robustness +#define GL_KHR_robustness 1 +#define GL_CONTEXT_ROBUST_ACCESS 0x90F3 +#endif /* GL_KHR_robustness */ + +#ifndef GL_KHR_texture_compression_astc_hdr +#define GL_KHR_texture_compression_astc_hdr 1 +#define GL_COMPRESSED_RGBA_ASTC_4x4_KHR 0x93B0 +#define GL_COMPRESSED_RGBA_ASTC_5x4_KHR 0x93B1 +#define GL_COMPRESSED_RGBA_ASTC_5x5_KHR 0x93B2 +#define GL_COMPRESSED_RGBA_ASTC_6x5_KHR 0x93B3 +#define GL_COMPRESSED_RGBA_ASTC_6x6_KHR 0x93B4 +#define GL_COMPRESSED_RGBA_ASTC_8x5_KHR 0x93B5 +#define GL_COMPRESSED_RGBA_ASTC_8x6_KHR 0x93B6 +#define GL_COMPRESSED_RGBA_ASTC_8x8_KHR 0x93B7 +#define GL_COMPRESSED_RGBA_ASTC_10x5_KHR 0x93B8 +#define GL_COMPRESSED_RGBA_ASTC_10x6_KHR 0x93B9 +#define GL_COMPRESSED_RGBA_ASTC_10x8_KHR 0x93BA +#define GL_COMPRESSED_RGBA_ASTC_10x10_KHR 0x93BB +#define GL_COMPRESSED_RGBA_ASTC_12x10_KHR 0x93BC +#define GL_COMPRESSED_RGBA_ASTC_12x12_KHR 0x93BD +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_4x4_KHR 0x93D0 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x4_KHR 0x93D1 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x5_KHR 0x93D2 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x5_KHR 0x93D3 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x6_KHR 0x93D4 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x5_KHR 0x93D5 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x6_KHR 0x93D6 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x8_KHR 0x93D7 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x5_KHR 0x93D8 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x6_KHR 0x93D9 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x8_KHR 0x93DA +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x10_KHR 0x93DB +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_12x10_KHR 0x93DC +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_12x12_KHR 0x93DD +#endif /* GL_KHR_texture_compression_astc_hdr */ + +#ifndef GL_KHR_texture_compression_astc_ldr +#define GL_KHR_texture_compression_astc_ldr 1 +#endif /* GL_KHR_texture_compression_astc_ldr */ + +#ifndef GL_KHR_texture_compression_astc_sliced_3d +#define GL_KHR_texture_compression_astc_sliced_3d 1 +#endif /* GL_KHR_texture_compression_astc_sliced_3d */ + +#ifndef GL_AMD_performance_monitor +#define GL_AMD_performance_monitor 1 +#define GL_COUNTER_TYPE_AMD 0x8BC0 +#define GL_COUNTER_RANGE_AMD 0x8BC1 +#define GL_UNSIGNED_INT64_AMD 0x8BC2 +#define GL_PERCENTAGE_AMD 0x8BC3 +#define GL_PERFMON_RESULT_AVAILABLE_AMD 0x8BC4 +#define GL_PERFMON_RESULT_SIZE_AMD 0x8BC5 +#define GL_PERFMON_RESULT_AMD 0x8BC6 +typedef void (APIENTRYP PFNGLGETPERFMONITORGROUPSAMDPROC) (GLint *numGroups, GLsizei groupsSize, GLuint *groups); +typedef void (APIENTRYP PFNGLGETPERFMONITORCOUNTERSAMDPROC) (GLuint group, GLint *numCounters, GLint *maxActiveCounters, GLsizei counterSize, GLuint *counters); +typedef void (APIENTRYP PFNGLGETPERFMONITORGROUPSTRINGAMDPROC) (GLuint group, GLsizei bufSize, GLsizei *length, GLchar *groupString); +typedef void (APIENTRYP PFNGLGETPERFMONITORCOUNTERSTRINGAMDPROC) (GLuint group, GLuint counter, GLsizei bufSize, GLsizei *length, GLchar *counterString); +typedef void (APIENTRYP PFNGLGETPERFMONITORCOUNTERINFOAMDPROC) (GLuint group, GLuint counter, GLenum pname, void *data); +typedef void (APIENTRYP PFNGLGENPERFMONITORSAMDPROC) (GLsizei n, GLuint *monitors); +typedef void (APIENTRYP PFNGLDELETEPERFMONITORSAMDPROC) (GLsizei n, GLuint *monitors); +typedef void (APIENTRYP PFNGLSELECTPERFMONITORCOUNTERSAMDPROC) (GLuint monitor, GLboolean enable, GLuint group, GLint numCounters, GLuint *counterList); +typedef void (APIENTRYP PFNGLBEGINPERFMONITORAMDPROC) (GLuint monitor); +typedef void (APIENTRYP PFNGLENDPERFMONITORAMDPROC) (GLuint monitor); +typedef void (APIENTRYP PFNGLGETPERFMONITORCOUNTERDATAAMDPROC) (GLuint monitor, GLenum pname, GLsizei dataSize, GLuint *data, GLint *bytesWritten); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glGetPerfMonitorGroupsAMD (GLint *numGroups, GLsizei groupsSize, GLuint *groups); +GLAPI void APIENTRY glGetPerfMonitorCountersAMD (GLuint group, GLint *numCounters, GLint *maxActiveCounters, GLsizei counterSize, GLuint *counters); +GLAPI void APIENTRY glGetPerfMonitorGroupStringAMD (GLuint group, GLsizei bufSize, GLsizei *length, GLchar *groupString); +GLAPI void APIENTRY glGetPerfMonitorCounterStringAMD (GLuint group, GLuint counter, GLsizei bufSize, GLsizei *length, GLchar *counterString); +GLAPI void APIENTRY glGetPerfMonitorCounterInfoAMD (GLuint group, GLuint counter, GLenum pname, void *data); +GLAPI void APIENTRY glGenPerfMonitorsAMD (GLsizei n, GLuint *monitors); +GLAPI void APIENTRY glDeletePerfMonitorsAMD (GLsizei n, GLuint *monitors); +GLAPI void APIENTRY glSelectPerfMonitorCountersAMD (GLuint monitor, GLboolean enable, GLuint group, GLint numCounters, GLuint *counterList); +GLAPI void APIENTRY glBeginPerfMonitorAMD (GLuint monitor); +GLAPI void APIENTRY glEndPerfMonitorAMD (GLuint monitor); +GLAPI void APIENTRY glGetPerfMonitorCounterDataAMD (GLuint monitor, GLenum pname, GLsizei dataSize, GLuint *data, GLint *bytesWritten); +#endif +#endif /* GL_AMD_performance_monitor */ + +#ifndef GL_APPLE_rgb_422 +#define GL_APPLE_rgb_422 1 +#define GL_RGB_422_APPLE 0x8A1F +#define GL_UNSIGNED_SHORT_8_8_APPLE 0x85BA +#define GL_UNSIGNED_SHORT_8_8_REV_APPLE 0x85BB +#define GL_RGB_RAW_422_APPLE 0x8A51 +#endif /* GL_APPLE_rgb_422 */ + +#ifndef GL_EXT_EGL_image_storage +#define GL_EXT_EGL_image_storage 1 +typedef void *GLeglImageOES; +typedef void (APIENTRYP PFNGLEGLIMAGETARGETTEXSTORAGEEXTPROC) (GLenum target, GLeglImageOES image, const GLint* attrib_list); +typedef void (APIENTRYP PFNGLEGLIMAGETARGETTEXTURESTORAGEEXTPROC) (GLuint texture, GLeglImageOES image, const GLint* attrib_list); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glEGLImageTargetTexStorageEXT (GLenum target, GLeglImageOES image, const GLint* attrib_list); +GLAPI void APIENTRY glEGLImageTargetTextureStorageEXT (GLuint texture, GLeglImageOES image, const GLint* attrib_list); +#endif +#endif /* GL_EXT_EGL_image_storage */ + +#ifndef GL_EXT_debug_label +#define GL_EXT_debug_label 1 +#define GL_PROGRAM_PIPELINE_OBJECT_EXT 0x8A4F +#define GL_PROGRAM_OBJECT_EXT 0x8B40 +#define GL_SHADER_OBJECT_EXT 0x8B48 +#define GL_BUFFER_OBJECT_EXT 0x9151 +#define GL_QUERY_OBJECT_EXT 0x9153 +#define GL_VERTEX_ARRAY_OBJECT_EXT 0x9154 +typedef void (APIENTRYP PFNGLLABELOBJECTEXTPROC) (GLenum type, GLuint object, GLsizei length, const GLchar *label); +typedef void (APIENTRYP PFNGLGETOBJECTLABELEXTPROC) (GLenum type, GLuint object, GLsizei bufSize, GLsizei *length, GLchar *label); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glLabelObjectEXT (GLenum type, GLuint object, GLsizei length, const GLchar *label); +GLAPI void APIENTRY glGetObjectLabelEXT (GLenum type, GLuint object, GLsizei bufSize, GLsizei *length, GLchar *label); +#endif +#endif /* GL_EXT_debug_label */ + +#ifndef GL_EXT_debug_marker +#define GL_EXT_debug_marker 1 +typedef void (APIENTRYP PFNGLINSERTEVENTMARKEREXTPROC) (GLsizei length, const GLchar *marker); +typedef void (APIENTRYP PFNGLPUSHGROUPMARKEREXTPROC) (GLsizei length, const GLchar *marker); +typedef void (APIENTRYP PFNGLPOPGROUPMARKEREXTPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glInsertEventMarkerEXT (GLsizei length, const GLchar *marker); +GLAPI void APIENTRY glPushGroupMarkerEXT (GLsizei length, const GLchar *marker); +GLAPI void APIENTRY glPopGroupMarkerEXT (void); +#endif +#endif /* GL_EXT_debug_marker */ + +#ifndef GL_EXT_direct_state_access +#define GL_EXT_direct_state_access 1 +#define GL_PROGRAM_MATRIX_EXT 0x8E2D +#define GL_TRANSPOSE_PROGRAM_MATRIX_EXT 0x8E2E +#define GL_PROGRAM_MATRIX_STACK_DEPTH_EXT 0x8E2F +typedef void (APIENTRYP PFNGLMATRIXLOADFEXTPROC) (GLenum mode, const GLfloat *m); +typedef void (APIENTRYP PFNGLMATRIXLOADDEXTPROC) (GLenum mode, const GLdouble *m); +typedef void (APIENTRYP PFNGLMATRIXMULTFEXTPROC) (GLenum mode, const GLfloat *m); +typedef void (APIENTRYP PFNGLMATRIXMULTDEXTPROC) (GLenum mode, const GLdouble *m); +typedef void (APIENTRYP PFNGLMATRIXLOADIDENTITYEXTPROC) (GLenum mode); +typedef void (APIENTRYP PFNGLMATRIXROTATEFEXTPROC) (GLenum mode, GLfloat angle, GLfloat x, GLfloat y, GLfloat z); +typedef void (APIENTRYP PFNGLMATRIXROTATEDEXTPROC) (GLenum mode, GLdouble angle, GLdouble x, GLdouble y, GLdouble z); +typedef void (APIENTRYP PFNGLMATRIXSCALEFEXTPROC) (GLenum mode, GLfloat x, GLfloat y, GLfloat z); +typedef void (APIENTRYP PFNGLMATRIXSCALEDEXTPROC) (GLenum mode, GLdouble x, GLdouble y, GLdouble z); +typedef void (APIENTRYP PFNGLMATRIXTRANSLATEFEXTPROC) (GLenum mode, GLfloat x, GLfloat y, GLfloat z); +typedef void (APIENTRYP PFNGLMATRIXTRANSLATEDEXTPROC) (GLenum mode, GLdouble x, GLdouble y, GLdouble z); +typedef void (APIENTRYP PFNGLMATRIXFRUSTUMEXTPROC) (GLenum mode, GLdouble left, GLdouble right, GLdouble bottom, GLdouble top, GLdouble zNear, GLdouble zFar); +typedef void (APIENTRYP PFNGLMATRIXORTHOEXTPROC) (GLenum mode, GLdouble left, GLdouble right, GLdouble bottom, GLdouble top, GLdouble zNear, GLdouble zFar); +typedef void (APIENTRYP PFNGLMATRIXPOPEXTPROC) (GLenum mode); +typedef void (APIENTRYP PFNGLMATRIXPUSHEXTPROC) (GLenum mode); +typedef void (APIENTRYP PFNGLCLIENTATTRIBDEFAULTEXTPROC) (GLbitfield mask); +typedef void (APIENTRYP PFNGLPUSHCLIENTATTRIBDEFAULTEXTPROC) (GLbitfield mask); +typedef void (APIENTRYP PFNGLTEXTUREPARAMETERFEXTPROC) (GLuint texture, GLenum target, GLenum pname, GLfloat param); +typedef void (APIENTRYP PFNGLTEXTUREPARAMETERFVEXTPROC) (GLuint texture, GLenum target, GLenum pname, const GLfloat *params); +typedef void (APIENTRYP PFNGLTEXTUREPARAMETERIEXTPROC) (GLuint texture, GLenum target, GLenum pname, GLint param); +typedef void (APIENTRYP PFNGLTEXTUREPARAMETERIVEXTPROC) (GLuint texture, GLenum target, GLenum pname, const GLint *params); +typedef void (APIENTRYP PFNGLTEXTUREIMAGE1DEXTPROC) (GLuint texture, GLenum target, GLint level, GLint internalformat, GLsizei width, GLint border, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLTEXTUREIMAGE2DEXTPROC) (GLuint texture, GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLTEXTURESUBIMAGE1DEXTPROC) (GLuint texture, GLenum target, GLint level, GLint xoffset, GLsizei width, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLTEXTURESUBIMAGE2DEXTPROC) (GLuint texture, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLCOPYTEXTUREIMAGE1DEXTPROC) (GLuint texture, GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLint border); +typedef void (APIENTRYP PFNGLCOPYTEXTUREIMAGE2DEXTPROC) (GLuint texture, GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLsizei height, GLint border); +typedef void (APIENTRYP PFNGLCOPYTEXTURESUBIMAGE1DEXTPROC) (GLuint texture, GLenum target, GLint level, GLint xoffset, GLint x, GLint y, GLsizei width); +typedef void (APIENTRYP PFNGLCOPYTEXTURESUBIMAGE2DEXTPROC) (GLuint texture, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLGETTEXTUREIMAGEEXTPROC) (GLuint texture, GLenum target, GLint level, GLenum format, GLenum type, void *pixels); +typedef void (APIENTRYP PFNGLGETTEXTUREPARAMETERFVEXTPROC) (GLuint texture, GLenum target, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETTEXTUREPARAMETERIVEXTPROC) (GLuint texture, GLenum target, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETTEXTURELEVELPARAMETERFVEXTPROC) (GLuint texture, GLenum target, GLint level, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETTEXTURELEVELPARAMETERIVEXTPROC) (GLuint texture, GLenum target, GLint level, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLTEXTUREIMAGE3DEXTPROC) (GLuint texture, GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLTEXTURESUBIMAGE3DEXTPROC) (GLuint texture, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLCOPYTEXTURESUBIMAGE3DEXTPROC) (GLuint texture, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLBINDMULTITEXTUREEXTPROC) (GLenum texunit, GLenum target, GLuint texture); +typedef void (APIENTRYP PFNGLMULTITEXCOORDPOINTEREXTPROC) (GLenum texunit, GLint size, GLenum type, GLsizei stride, const void *pointer); +typedef void (APIENTRYP PFNGLMULTITEXENVFEXTPROC) (GLenum texunit, GLenum target, GLenum pname, GLfloat param); +typedef void (APIENTRYP PFNGLMULTITEXENVFVEXTPROC) (GLenum texunit, GLenum target, GLenum pname, const GLfloat *params); +typedef void (APIENTRYP PFNGLMULTITEXENVIEXTPROC) (GLenum texunit, GLenum target, GLenum pname, GLint param); +typedef void (APIENTRYP PFNGLMULTITEXENVIVEXTPROC) (GLenum texunit, GLenum target, GLenum pname, const GLint *params); +typedef void (APIENTRYP PFNGLMULTITEXGENDEXTPROC) (GLenum texunit, GLenum coord, GLenum pname, GLdouble param); +typedef void (APIENTRYP PFNGLMULTITEXGENDVEXTPROC) (GLenum texunit, GLenum coord, GLenum pname, const GLdouble *params); +typedef void (APIENTRYP PFNGLMULTITEXGENFEXTPROC) (GLenum texunit, GLenum coord, GLenum pname, GLfloat param); +typedef void (APIENTRYP PFNGLMULTITEXGENFVEXTPROC) (GLenum texunit, GLenum coord, GLenum pname, const GLfloat *params); +typedef void (APIENTRYP PFNGLMULTITEXGENIEXTPROC) (GLenum texunit, GLenum coord, GLenum pname, GLint param); +typedef void (APIENTRYP PFNGLMULTITEXGENIVEXTPROC) (GLenum texunit, GLenum coord, GLenum pname, const GLint *params); +typedef void (APIENTRYP PFNGLGETMULTITEXENVFVEXTPROC) (GLenum texunit, GLenum target, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETMULTITEXENVIVEXTPROC) (GLenum texunit, GLenum target, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETMULTITEXGENDVEXTPROC) (GLenum texunit, GLenum coord, GLenum pname, GLdouble *params); +typedef void (APIENTRYP PFNGLGETMULTITEXGENFVEXTPROC) (GLenum texunit, GLenum coord, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETMULTITEXGENIVEXTPROC) (GLenum texunit, GLenum coord, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLMULTITEXPARAMETERIEXTPROC) (GLenum texunit, GLenum target, GLenum pname, GLint param); +typedef void (APIENTRYP PFNGLMULTITEXPARAMETERIVEXTPROC) (GLenum texunit, GLenum target, GLenum pname, const GLint *params); +typedef void (APIENTRYP PFNGLMULTITEXPARAMETERFEXTPROC) (GLenum texunit, GLenum target, GLenum pname, GLfloat param); +typedef void (APIENTRYP PFNGLMULTITEXPARAMETERFVEXTPROC) (GLenum texunit, GLenum target, GLenum pname, const GLfloat *params); +typedef void (APIENTRYP PFNGLMULTITEXIMAGE1DEXTPROC) (GLenum texunit, GLenum target, GLint level, GLint internalformat, GLsizei width, GLint border, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLMULTITEXIMAGE2DEXTPROC) (GLenum texunit, GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLMULTITEXSUBIMAGE1DEXTPROC) (GLenum texunit, GLenum target, GLint level, GLint xoffset, GLsizei width, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLMULTITEXSUBIMAGE2DEXTPROC) (GLenum texunit, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLCOPYMULTITEXIMAGE1DEXTPROC) (GLenum texunit, GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLint border); +typedef void (APIENTRYP PFNGLCOPYMULTITEXIMAGE2DEXTPROC) (GLenum texunit, GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLsizei height, GLint border); +typedef void (APIENTRYP PFNGLCOPYMULTITEXSUBIMAGE1DEXTPROC) (GLenum texunit, GLenum target, GLint level, GLint xoffset, GLint x, GLint y, GLsizei width); +typedef void (APIENTRYP PFNGLCOPYMULTITEXSUBIMAGE2DEXTPROC) (GLenum texunit, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLGETMULTITEXIMAGEEXTPROC) (GLenum texunit, GLenum target, GLint level, GLenum format, GLenum type, void *pixels); +typedef void (APIENTRYP PFNGLGETMULTITEXPARAMETERFVEXTPROC) (GLenum texunit, GLenum target, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETMULTITEXPARAMETERIVEXTPROC) (GLenum texunit, GLenum target, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETMULTITEXLEVELPARAMETERFVEXTPROC) (GLenum texunit, GLenum target, GLint level, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETMULTITEXLEVELPARAMETERIVEXTPROC) (GLenum texunit, GLenum target, GLint level, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLMULTITEXIMAGE3DEXTPROC) (GLenum texunit, GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLMULTITEXSUBIMAGE3DEXTPROC) (GLenum texunit, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLCOPYMULTITEXSUBIMAGE3DEXTPROC) (GLenum texunit, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLENABLECLIENTSTATEINDEXEDEXTPROC) (GLenum array, GLuint index); +typedef void (APIENTRYP PFNGLDISABLECLIENTSTATEINDEXEDEXTPROC) (GLenum array, GLuint index); +typedef void (APIENTRYP PFNGLGETFLOATINDEXEDVEXTPROC) (GLenum target, GLuint index, GLfloat *data); +typedef void (APIENTRYP PFNGLGETDOUBLEINDEXEDVEXTPROC) (GLenum target, GLuint index, GLdouble *data); +typedef void (APIENTRYP PFNGLGETPOINTERINDEXEDVEXTPROC) (GLenum target, GLuint index, void **data); +typedef void (APIENTRYP PFNGLENABLEINDEXEDEXTPROC) (GLenum target, GLuint index); +typedef void (APIENTRYP PFNGLDISABLEINDEXEDEXTPROC) (GLenum target, GLuint index); +typedef GLboolean (APIENTRYP PFNGLISENABLEDINDEXEDEXTPROC) (GLenum target, GLuint index); +typedef void (APIENTRYP PFNGLGETINTEGERINDEXEDVEXTPROC) (GLenum target, GLuint index, GLint *data); +typedef void (APIENTRYP PFNGLGETBOOLEANINDEXEDVEXTPROC) (GLenum target, GLuint index, GLboolean *data); +typedef void (APIENTRYP PFNGLCOMPRESSEDTEXTUREIMAGE3DEXTPROC) (GLuint texture, GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, const void *bits); +typedef void (APIENTRYP PFNGLCOMPRESSEDTEXTUREIMAGE2DEXTPROC) (GLuint texture, GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, const void *bits); +typedef void (APIENTRYP PFNGLCOMPRESSEDTEXTUREIMAGE1DEXTPROC) (GLuint texture, GLenum target, GLint level, GLenum internalformat, GLsizei width, GLint border, GLsizei imageSize, const void *bits); +typedef void (APIENTRYP PFNGLCOMPRESSEDTEXTURESUBIMAGE3DEXTPROC) (GLuint texture, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void *bits); +typedef void (APIENTRYP PFNGLCOMPRESSEDTEXTURESUBIMAGE2DEXTPROC) (GLuint texture, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *bits); +typedef void (APIENTRYP PFNGLCOMPRESSEDTEXTURESUBIMAGE1DEXTPROC) (GLuint texture, GLenum target, GLint level, GLint xoffset, GLsizei width, GLenum format, GLsizei imageSize, const void *bits); +typedef void (APIENTRYP PFNGLGETCOMPRESSEDTEXTUREIMAGEEXTPROC) (GLuint texture, GLenum target, GLint lod, void *img); +typedef void (APIENTRYP PFNGLCOMPRESSEDMULTITEXIMAGE3DEXTPROC) (GLenum texunit, GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, const void *bits); +typedef void (APIENTRYP PFNGLCOMPRESSEDMULTITEXIMAGE2DEXTPROC) (GLenum texunit, GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, const void *bits); +typedef void (APIENTRYP PFNGLCOMPRESSEDMULTITEXIMAGE1DEXTPROC) (GLenum texunit, GLenum target, GLint level, GLenum internalformat, GLsizei width, GLint border, GLsizei imageSize, const void *bits); +typedef void (APIENTRYP PFNGLCOMPRESSEDMULTITEXSUBIMAGE3DEXTPROC) (GLenum texunit, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void *bits); +typedef void (APIENTRYP PFNGLCOMPRESSEDMULTITEXSUBIMAGE2DEXTPROC) (GLenum texunit, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *bits); +typedef void (APIENTRYP PFNGLCOMPRESSEDMULTITEXSUBIMAGE1DEXTPROC) (GLenum texunit, GLenum target, GLint level, GLint xoffset, GLsizei width, GLenum format, GLsizei imageSize, const void *bits); +typedef void (APIENTRYP PFNGLGETCOMPRESSEDMULTITEXIMAGEEXTPROC) (GLenum texunit, GLenum target, GLint lod, void *img); +typedef void (APIENTRYP PFNGLMATRIXLOADTRANSPOSEFEXTPROC) (GLenum mode, const GLfloat *m); +typedef void (APIENTRYP PFNGLMATRIXLOADTRANSPOSEDEXTPROC) (GLenum mode, const GLdouble *m); +typedef void (APIENTRYP PFNGLMATRIXMULTTRANSPOSEFEXTPROC) (GLenum mode, const GLfloat *m); +typedef void (APIENTRYP PFNGLMATRIXMULTTRANSPOSEDEXTPROC) (GLenum mode, const GLdouble *m); +typedef void (APIENTRYP PFNGLNAMEDBUFFERDATAEXTPROC) (GLuint buffer, GLsizeiptr size, const void *data, GLenum usage); +typedef void (APIENTRYP PFNGLNAMEDBUFFERSUBDATAEXTPROC) (GLuint buffer, GLintptr offset, GLsizeiptr size, const void *data); +typedef void *(APIENTRYP PFNGLMAPNAMEDBUFFEREXTPROC) (GLuint buffer, GLenum access); +typedef GLboolean (APIENTRYP PFNGLUNMAPNAMEDBUFFEREXTPROC) (GLuint buffer); +typedef void (APIENTRYP PFNGLGETNAMEDBUFFERPARAMETERIVEXTPROC) (GLuint buffer, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETNAMEDBUFFERPOINTERVEXTPROC) (GLuint buffer, GLenum pname, void **params); +typedef void (APIENTRYP PFNGLGETNAMEDBUFFERSUBDATAEXTPROC) (GLuint buffer, GLintptr offset, GLsizeiptr size, void *data); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1FEXTPROC) (GLuint program, GLint location, GLfloat v0); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2FEXTPROC) (GLuint program, GLint location, GLfloat v0, GLfloat v1); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3FEXTPROC) (GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4FEXTPROC) (GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1IEXTPROC) (GLuint program, GLint location, GLint v0); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2IEXTPROC) (GLuint program, GLint location, GLint v0, GLint v1); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3IEXTPROC) (GLuint program, GLint location, GLint v0, GLint v1, GLint v2); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4IEXTPROC) (GLuint program, GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1FVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2FVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3FVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4FVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1IVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2IVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3IVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4IVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2FVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3FVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4FVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2X3FVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3X2FVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2X4FVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4X2FVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3X4FVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4X3FVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLTEXTUREBUFFEREXTPROC) (GLuint texture, GLenum target, GLenum internalformat, GLuint buffer); +typedef void (APIENTRYP PFNGLMULTITEXBUFFEREXTPROC) (GLenum texunit, GLenum target, GLenum internalformat, GLuint buffer); +typedef void (APIENTRYP PFNGLTEXTUREPARAMETERIIVEXTPROC) (GLuint texture, GLenum target, GLenum pname, const GLint *params); +typedef void (APIENTRYP PFNGLTEXTUREPARAMETERIUIVEXTPROC) (GLuint texture, GLenum target, GLenum pname, const GLuint *params); +typedef void (APIENTRYP PFNGLGETTEXTUREPARAMETERIIVEXTPROC) (GLuint texture, GLenum target, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETTEXTUREPARAMETERIUIVEXTPROC) (GLuint texture, GLenum target, GLenum pname, GLuint *params); +typedef void (APIENTRYP PFNGLMULTITEXPARAMETERIIVEXTPROC) (GLenum texunit, GLenum target, GLenum pname, const GLint *params); +typedef void (APIENTRYP PFNGLMULTITEXPARAMETERIUIVEXTPROC) (GLenum texunit, GLenum target, GLenum pname, const GLuint *params); +typedef void (APIENTRYP PFNGLGETMULTITEXPARAMETERIIVEXTPROC) (GLenum texunit, GLenum target, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETMULTITEXPARAMETERIUIVEXTPROC) (GLenum texunit, GLenum target, GLenum pname, GLuint *params); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1UIEXTPROC) (GLuint program, GLint location, GLuint v0); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2UIEXTPROC) (GLuint program, GLint location, GLuint v0, GLuint v1); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3UIEXTPROC) (GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4UIEXTPROC) (GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1UIVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2UIVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3UIVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4UIVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (APIENTRYP PFNGLNAMEDPROGRAMLOCALPARAMETERS4FVEXTPROC) (GLuint program, GLenum target, GLuint index, GLsizei count, const GLfloat *params); +typedef void (APIENTRYP PFNGLNAMEDPROGRAMLOCALPARAMETERI4IEXTPROC) (GLuint program, GLenum target, GLuint index, GLint x, GLint y, GLint z, GLint w); +typedef void (APIENTRYP PFNGLNAMEDPROGRAMLOCALPARAMETERI4IVEXTPROC) (GLuint program, GLenum target, GLuint index, const GLint *params); +typedef void (APIENTRYP PFNGLNAMEDPROGRAMLOCALPARAMETERSI4IVEXTPROC) (GLuint program, GLenum target, GLuint index, GLsizei count, const GLint *params); +typedef void (APIENTRYP PFNGLNAMEDPROGRAMLOCALPARAMETERI4UIEXTPROC) (GLuint program, GLenum target, GLuint index, GLuint x, GLuint y, GLuint z, GLuint w); +typedef void (APIENTRYP PFNGLNAMEDPROGRAMLOCALPARAMETERI4UIVEXTPROC) (GLuint program, GLenum target, GLuint index, const GLuint *params); +typedef void (APIENTRYP PFNGLNAMEDPROGRAMLOCALPARAMETERSI4UIVEXTPROC) (GLuint program, GLenum target, GLuint index, GLsizei count, const GLuint *params); +typedef void (APIENTRYP PFNGLGETNAMEDPROGRAMLOCALPARAMETERIIVEXTPROC) (GLuint program, GLenum target, GLuint index, GLint *params); +typedef void (APIENTRYP PFNGLGETNAMEDPROGRAMLOCALPARAMETERIUIVEXTPROC) (GLuint program, GLenum target, GLuint index, GLuint *params); +typedef void (APIENTRYP PFNGLENABLECLIENTSTATEIEXTPROC) (GLenum array, GLuint index); +typedef void (APIENTRYP PFNGLDISABLECLIENTSTATEIEXTPROC) (GLenum array, GLuint index); +typedef void (APIENTRYP PFNGLGETFLOATI_VEXTPROC) (GLenum pname, GLuint index, GLfloat *params); +typedef void (APIENTRYP PFNGLGETDOUBLEI_VEXTPROC) (GLenum pname, GLuint index, GLdouble *params); +typedef void (APIENTRYP PFNGLGETPOINTERI_VEXTPROC) (GLenum pname, GLuint index, void **params); +typedef void (APIENTRYP PFNGLNAMEDPROGRAMSTRINGEXTPROC) (GLuint program, GLenum target, GLenum format, GLsizei len, const void *string); +typedef void (APIENTRYP PFNGLNAMEDPROGRAMLOCALPARAMETER4DEXTPROC) (GLuint program, GLenum target, GLuint index, GLdouble x, GLdouble y, GLdouble z, GLdouble w); +typedef void (APIENTRYP PFNGLNAMEDPROGRAMLOCALPARAMETER4DVEXTPROC) (GLuint program, GLenum target, GLuint index, const GLdouble *params); +typedef void (APIENTRYP PFNGLNAMEDPROGRAMLOCALPARAMETER4FEXTPROC) (GLuint program, GLenum target, GLuint index, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +typedef void (APIENTRYP PFNGLNAMEDPROGRAMLOCALPARAMETER4FVEXTPROC) (GLuint program, GLenum target, GLuint index, const GLfloat *params); +typedef void (APIENTRYP PFNGLGETNAMEDPROGRAMLOCALPARAMETERDVEXTPROC) (GLuint program, GLenum target, GLuint index, GLdouble *params); +typedef void (APIENTRYP PFNGLGETNAMEDPROGRAMLOCALPARAMETERFVEXTPROC) (GLuint program, GLenum target, GLuint index, GLfloat *params); +typedef void (APIENTRYP PFNGLGETNAMEDPROGRAMIVEXTPROC) (GLuint program, GLenum target, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETNAMEDPROGRAMSTRINGEXTPROC) (GLuint program, GLenum target, GLenum pname, void *string); +typedef void (APIENTRYP PFNGLNAMEDRENDERBUFFERSTORAGEEXTPROC) (GLuint renderbuffer, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLGETNAMEDRENDERBUFFERPARAMETERIVEXTPROC) (GLuint renderbuffer, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLNAMEDRENDERBUFFERSTORAGEMULTISAMPLEEXTPROC) (GLuint renderbuffer, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLNAMEDRENDERBUFFERSTORAGEMULTISAMPLECOVERAGEEXTPROC) (GLuint renderbuffer, GLsizei coverageSamples, GLsizei colorSamples, GLenum internalformat, GLsizei width, GLsizei height); +typedef GLenum (APIENTRYP PFNGLCHECKNAMEDFRAMEBUFFERSTATUSEXTPROC) (GLuint framebuffer, GLenum target); +typedef void (APIENTRYP PFNGLNAMEDFRAMEBUFFERTEXTURE1DEXTPROC) (GLuint framebuffer, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +typedef void (APIENTRYP PFNGLNAMEDFRAMEBUFFERTEXTURE2DEXTPROC) (GLuint framebuffer, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +typedef void (APIENTRYP PFNGLNAMEDFRAMEBUFFERTEXTURE3DEXTPROC) (GLuint framebuffer, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLint zoffset); +typedef void (APIENTRYP PFNGLNAMEDFRAMEBUFFERRENDERBUFFEREXTPROC) (GLuint framebuffer, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer); +typedef void (APIENTRYP PFNGLGETNAMEDFRAMEBUFFERATTACHMENTPARAMETERIVEXTPROC) (GLuint framebuffer, GLenum attachment, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGENERATETEXTUREMIPMAPEXTPROC) (GLuint texture, GLenum target); +typedef void (APIENTRYP PFNGLGENERATEMULTITEXMIPMAPEXTPROC) (GLenum texunit, GLenum target); +typedef void (APIENTRYP PFNGLFRAMEBUFFERDRAWBUFFEREXTPROC) (GLuint framebuffer, GLenum mode); +typedef void (APIENTRYP PFNGLFRAMEBUFFERDRAWBUFFERSEXTPROC) (GLuint framebuffer, GLsizei n, const GLenum *bufs); +typedef void (APIENTRYP PFNGLFRAMEBUFFERREADBUFFEREXTPROC) (GLuint framebuffer, GLenum mode); +typedef void (APIENTRYP PFNGLGETFRAMEBUFFERPARAMETERIVEXTPROC) (GLuint framebuffer, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLNAMEDCOPYBUFFERSUBDATAEXTPROC) (GLuint readBuffer, GLuint writeBuffer, GLintptr readOffset, GLintptr writeOffset, GLsizeiptr size); +typedef void (APIENTRYP PFNGLNAMEDFRAMEBUFFERTEXTUREEXTPROC) (GLuint framebuffer, GLenum attachment, GLuint texture, GLint level); +typedef void (APIENTRYP PFNGLNAMEDFRAMEBUFFERTEXTURELAYEREXTPROC) (GLuint framebuffer, GLenum attachment, GLuint texture, GLint level, GLint layer); +typedef void (APIENTRYP PFNGLNAMEDFRAMEBUFFERTEXTUREFACEEXTPROC) (GLuint framebuffer, GLenum attachment, GLuint texture, GLint level, GLenum face); +typedef void (APIENTRYP PFNGLTEXTURERENDERBUFFEREXTPROC) (GLuint texture, GLenum target, GLuint renderbuffer); +typedef void (APIENTRYP PFNGLMULTITEXRENDERBUFFEREXTPROC) (GLenum texunit, GLenum target, GLuint renderbuffer); +typedef void (APIENTRYP PFNGLVERTEXARRAYVERTEXOFFSETEXTPROC) (GLuint vaobj, GLuint buffer, GLint size, GLenum type, GLsizei stride, GLintptr offset); +typedef void (APIENTRYP PFNGLVERTEXARRAYCOLOROFFSETEXTPROC) (GLuint vaobj, GLuint buffer, GLint size, GLenum type, GLsizei stride, GLintptr offset); +typedef void (APIENTRYP PFNGLVERTEXARRAYEDGEFLAGOFFSETEXTPROC) (GLuint vaobj, GLuint buffer, GLsizei stride, GLintptr offset); +typedef void (APIENTRYP PFNGLVERTEXARRAYINDEXOFFSETEXTPROC) (GLuint vaobj, GLuint buffer, GLenum type, GLsizei stride, GLintptr offset); +typedef void (APIENTRYP PFNGLVERTEXARRAYNORMALOFFSETEXTPROC) (GLuint vaobj, GLuint buffer, GLenum type, GLsizei stride, GLintptr offset); +typedef void (APIENTRYP PFNGLVERTEXARRAYTEXCOORDOFFSETEXTPROC) (GLuint vaobj, GLuint buffer, GLint size, GLenum type, GLsizei stride, GLintptr offset); +typedef void (APIENTRYP PFNGLVERTEXARRAYMULTITEXCOORDOFFSETEXTPROC) (GLuint vaobj, GLuint buffer, GLenum texunit, GLint size, GLenum type, GLsizei stride, GLintptr offset); +typedef void (APIENTRYP PFNGLVERTEXARRAYFOGCOORDOFFSETEXTPROC) (GLuint vaobj, GLuint buffer, GLenum type, GLsizei stride, GLintptr offset); +typedef void (APIENTRYP PFNGLVERTEXARRAYSECONDARYCOLOROFFSETEXTPROC) (GLuint vaobj, GLuint buffer, GLint size, GLenum type, GLsizei stride, GLintptr offset); +typedef void (APIENTRYP PFNGLVERTEXARRAYVERTEXATTRIBOFFSETEXTPROC) (GLuint vaobj, GLuint buffer, GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride, GLintptr offset); +typedef void (APIENTRYP PFNGLVERTEXARRAYVERTEXATTRIBIOFFSETEXTPROC) (GLuint vaobj, GLuint buffer, GLuint index, GLint size, GLenum type, GLsizei stride, GLintptr offset); +typedef void (APIENTRYP PFNGLENABLEVERTEXARRAYEXTPROC) (GLuint vaobj, GLenum array); +typedef void (APIENTRYP PFNGLDISABLEVERTEXARRAYEXTPROC) (GLuint vaobj, GLenum array); +typedef void (APIENTRYP PFNGLENABLEVERTEXARRAYATTRIBEXTPROC) (GLuint vaobj, GLuint index); +typedef void (APIENTRYP PFNGLDISABLEVERTEXARRAYATTRIBEXTPROC) (GLuint vaobj, GLuint index); +typedef void (APIENTRYP PFNGLGETVERTEXARRAYINTEGERVEXTPROC) (GLuint vaobj, GLenum pname, GLint *param); +typedef void (APIENTRYP PFNGLGETVERTEXARRAYPOINTERVEXTPROC) (GLuint vaobj, GLenum pname, void **param); +typedef void (APIENTRYP PFNGLGETVERTEXARRAYINTEGERI_VEXTPROC) (GLuint vaobj, GLuint index, GLenum pname, GLint *param); +typedef void (APIENTRYP PFNGLGETVERTEXARRAYPOINTERI_VEXTPROC) (GLuint vaobj, GLuint index, GLenum pname, void **param); +typedef void *(APIENTRYP PFNGLMAPNAMEDBUFFERRANGEEXTPROC) (GLuint buffer, GLintptr offset, GLsizeiptr length, GLbitfield access); +typedef void (APIENTRYP PFNGLFLUSHMAPPEDNAMEDBUFFERRANGEEXTPROC) (GLuint buffer, GLintptr offset, GLsizeiptr length); +typedef void (APIENTRYP PFNGLNAMEDBUFFERSTORAGEEXTPROC) (GLuint buffer, GLsizeiptr size, const void *data, GLbitfield flags); +typedef void (APIENTRYP PFNGLCLEARNAMEDBUFFERDATAEXTPROC) (GLuint buffer, GLenum internalformat, GLenum format, GLenum type, const void *data); +typedef void (APIENTRYP PFNGLCLEARNAMEDBUFFERSUBDATAEXTPROC) (GLuint buffer, GLenum internalformat, GLsizeiptr offset, GLsizeiptr size, GLenum format, GLenum type, const void *data); +typedef void (APIENTRYP PFNGLNAMEDFRAMEBUFFERPARAMETERIEXTPROC) (GLuint framebuffer, GLenum pname, GLint param); +typedef void (APIENTRYP PFNGLGETNAMEDFRAMEBUFFERPARAMETERIVEXTPROC) (GLuint framebuffer, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1DEXTPROC) (GLuint program, GLint location, GLdouble x); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2DEXTPROC) (GLuint program, GLint location, GLdouble x, GLdouble y); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3DEXTPROC) (GLuint program, GLint location, GLdouble x, GLdouble y, GLdouble z); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4DEXTPROC) (GLuint program, GLint location, GLdouble x, GLdouble y, GLdouble z, GLdouble w); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1DVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2DVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3DVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4DVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2DVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3DVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4DVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2X3DVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2X4DVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3X2DVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3X4DVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4X2DVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4X3DVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLTEXTUREBUFFERRANGEEXTPROC) (GLuint texture, GLenum target, GLenum internalformat, GLuint buffer, GLintptr offset, GLsizeiptr size); +typedef void (APIENTRYP PFNGLTEXTURESTORAGE1DEXTPROC) (GLuint texture, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width); +typedef void (APIENTRYP PFNGLTEXTURESTORAGE2DEXTPROC) (GLuint texture, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLTEXTURESTORAGE3DEXTPROC) (GLuint texture, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +typedef void (APIENTRYP PFNGLTEXTURESTORAGE2DMULTISAMPLEEXTPROC) (GLuint texture, GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLboolean fixedsamplelocations); +typedef void (APIENTRYP PFNGLTEXTURESTORAGE3DMULTISAMPLEEXTPROC) (GLuint texture, GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedsamplelocations); +typedef void (APIENTRYP PFNGLVERTEXARRAYBINDVERTEXBUFFEREXTPROC) (GLuint vaobj, GLuint bindingindex, GLuint buffer, GLintptr offset, GLsizei stride); +typedef void (APIENTRYP PFNGLVERTEXARRAYVERTEXATTRIBFORMATEXTPROC) (GLuint vaobj, GLuint attribindex, GLint size, GLenum type, GLboolean normalized, GLuint relativeoffset); +typedef void (APIENTRYP PFNGLVERTEXARRAYVERTEXATTRIBIFORMATEXTPROC) (GLuint vaobj, GLuint attribindex, GLint size, GLenum type, GLuint relativeoffset); +typedef void (APIENTRYP PFNGLVERTEXARRAYVERTEXATTRIBLFORMATEXTPROC) (GLuint vaobj, GLuint attribindex, GLint size, GLenum type, GLuint relativeoffset); +typedef void (APIENTRYP PFNGLVERTEXARRAYVERTEXATTRIBBINDINGEXTPROC) (GLuint vaobj, GLuint attribindex, GLuint bindingindex); +typedef void (APIENTRYP PFNGLVERTEXARRAYVERTEXBINDINGDIVISOREXTPROC) (GLuint vaobj, GLuint bindingindex, GLuint divisor); +typedef void (APIENTRYP PFNGLVERTEXARRAYVERTEXATTRIBLOFFSETEXTPROC) (GLuint vaobj, GLuint buffer, GLuint index, GLint size, GLenum type, GLsizei stride, GLintptr offset); +typedef void (APIENTRYP PFNGLTEXTUREPAGECOMMITMENTEXTPROC) (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLboolean commit); +typedef void (APIENTRYP PFNGLVERTEXARRAYVERTEXATTRIBDIVISOREXTPROC) (GLuint vaobj, GLuint index, GLuint divisor); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glMatrixLoadfEXT (GLenum mode, const GLfloat *m); +GLAPI void APIENTRY glMatrixLoaddEXT (GLenum mode, const GLdouble *m); +GLAPI void APIENTRY glMatrixMultfEXT (GLenum mode, const GLfloat *m); +GLAPI void APIENTRY glMatrixMultdEXT (GLenum mode, const GLdouble *m); +GLAPI void APIENTRY glMatrixLoadIdentityEXT (GLenum mode); +GLAPI void APIENTRY glMatrixRotatefEXT (GLenum mode, GLfloat angle, GLfloat x, GLfloat y, GLfloat z); +GLAPI void APIENTRY glMatrixRotatedEXT (GLenum mode, GLdouble angle, GLdouble x, GLdouble y, GLdouble z); +GLAPI void APIENTRY glMatrixScalefEXT (GLenum mode, GLfloat x, GLfloat y, GLfloat z); +GLAPI void APIENTRY glMatrixScaledEXT (GLenum mode, GLdouble x, GLdouble y, GLdouble z); +GLAPI void APIENTRY glMatrixTranslatefEXT (GLenum mode, GLfloat x, GLfloat y, GLfloat z); +GLAPI void APIENTRY glMatrixTranslatedEXT (GLenum mode, GLdouble x, GLdouble y, GLdouble z); +GLAPI void APIENTRY glMatrixFrustumEXT (GLenum mode, GLdouble left, GLdouble right, GLdouble bottom, GLdouble top, GLdouble zNear, GLdouble zFar); +GLAPI void APIENTRY glMatrixOrthoEXT (GLenum mode, GLdouble left, GLdouble right, GLdouble bottom, GLdouble top, GLdouble zNear, GLdouble zFar); +GLAPI void APIENTRY glMatrixPopEXT (GLenum mode); +GLAPI void APIENTRY glMatrixPushEXT (GLenum mode); +GLAPI void APIENTRY glClientAttribDefaultEXT (GLbitfield mask); +GLAPI void APIENTRY glPushClientAttribDefaultEXT (GLbitfield mask); +GLAPI void APIENTRY glTextureParameterfEXT (GLuint texture, GLenum target, GLenum pname, GLfloat param); +GLAPI void APIENTRY glTextureParameterfvEXT (GLuint texture, GLenum target, GLenum pname, const GLfloat *params); +GLAPI void APIENTRY glTextureParameteriEXT (GLuint texture, GLenum target, GLenum pname, GLint param); +GLAPI void APIENTRY glTextureParameterivEXT (GLuint texture, GLenum target, GLenum pname, const GLint *params); +GLAPI void APIENTRY glTextureImage1DEXT (GLuint texture, GLenum target, GLint level, GLint internalformat, GLsizei width, GLint border, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glTextureImage2DEXT (GLuint texture, GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glTextureSubImage1DEXT (GLuint texture, GLenum target, GLint level, GLint xoffset, GLsizei width, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glTextureSubImage2DEXT (GLuint texture, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glCopyTextureImage1DEXT (GLuint texture, GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLint border); +GLAPI void APIENTRY glCopyTextureImage2DEXT (GLuint texture, GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLsizei height, GLint border); +GLAPI void APIENTRY glCopyTextureSubImage1DEXT (GLuint texture, GLenum target, GLint level, GLint xoffset, GLint x, GLint y, GLsizei width); +GLAPI void APIENTRY glCopyTextureSubImage2DEXT (GLuint texture, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint x, GLint y, GLsizei width, GLsizei height); +GLAPI void APIENTRY glGetTextureImageEXT (GLuint texture, GLenum target, GLint level, GLenum format, GLenum type, void *pixels); +GLAPI void APIENTRY glGetTextureParameterfvEXT (GLuint texture, GLenum target, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetTextureParameterivEXT (GLuint texture, GLenum target, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetTextureLevelParameterfvEXT (GLuint texture, GLenum target, GLint level, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetTextureLevelParameterivEXT (GLuint texture, GLenum target, GLint level, GLenum pname, GLint *params); +GLAPI void APIENTRY glTextureImage3DEXT (GLuint texture, GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glTextureSubImage3DEXT (GLuint texture, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glCopyTextureSubImage3DEXT (GLuint texture, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height); +GLAPI void APIENTRY glBindMultiTextureEXT (GLenum texunit, GLenum target, GLuint texture); +GLAPI void APIENTRY glMultiTexCoordPointerEXT (GLenum texunit, GLint size, GLenum type, GLsizei stride, const void *pointer); +GLAPI void APIENTRY glMultiTexEnvfEXT (GLenum texunit, GLenum target, GLenum pname, GLfloat param); +GLAPI void APIENTRY glMultiTexEnvfvEXT (GLenum texunit, GLenum target, GLenum pname, const GLfloat *params); +GLAPI void APIENTRY glMultiTexEnviEXT (GLenum texunit, GLenum target, GLenum pname, GLint param); +GLAPI void APIENTRY glMultiTexEnvivEXT (GLenum texunit, GLenum target, GLenum pname, const GLint *params); +GLAPI void APIENTRY glMultiTexGendEXT (GLenum texunit, GLenum coord, GLenum pname, GLdouble param); +GLAPI void APIENTRY glMultiTexGendvEXT (GLenum texunit, GLenum coord, GLenum pname, const GLdouble *params); +GLAPI void APIENTRY glMultiTexGenfEXT (GLenum texunit, GLenum coord, GLenum pname, GLfloat param); +GLAPI void APIENTRY glMultiTexGenfvEXT (GLenum texunit, GLenum coord, GLenum pname, const GLfloat *params); +GLAPI void APIENTRY glMultiTexGeniEXT (GLenum texunit, GLenum coord, GLenum pname, GLint param); +GLAPI void APIENTRY glMultiTexGenivEXT (GLenum texunit, GLenum coord, GLenum pname, const GLint *params); +GLAPI void APIENTRY glGetMultiTexEnvfvEXT (GLenum texunit, GLenum target, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetMultiTexEnvivEXT (GLenum texunit, GLenum target, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetMultiTexGendvEXT (GLenum texunit, GLenum coord, GLenum pname, GLdouble *params); +GLAPI void APIENTRY glGetMultiTexGenfvEXT (GLenum texunit, GLenum coord, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetMultiTexGenivEXT (GLenum texunit, GLenum coord, GLenum pname, GLint *params); +GLAPI void APIENTRY glMultiTexParameteriEXT (GLenum texunit, GLenum target, GLenum pname, GLint param); +GLAPI void APIENTRY glMultiTexParameterivEXT (GLenum texunit, GLenum target, GLenum pname, const GLint *params); +GLAPI void APIENTRY glMultiTexParameterfEXT (GLenum texunit, GLenum target, GLenum pname, GLfloat param); +GLAPI void APIENTRY glMultiTexParameterfvEXT (GLenum texunit, GLenum target, GLenum pname, const GLfloat *params); +GLAPI void APIENTRY glMultiTexImage1DEXT (GLenum texunit, GLenum target, GLint level, GLint internalformat, GLsizei width, GLint border, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glMultiTexImage2DEXT (GLenum texunit, GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glMultiTexSubImage1DEXT (GLenum texunit, GLenum target, GLint level, GLint xoffset, GLsizei width, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glMultiTexSubImage2DEXT (GLenum texunit, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glCopyMultiTexImage1DEXT (GLenum texunit, GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLint border); +GLAPI void APIENTRY glCopyMultiTexImage2DEXT (GLenum texunit, GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLsizei height, GLint border); +GLAPI void APIENTRY glCopyMultiTexSubImage1DEXT (GLenum texunit, GLenum target, GLint level, GLint xoffset, GLint x, GLint y, GLsizei width); +GLAPI void APIENTRY glCopyMultiTexSubImage2DEXT (GLenum texunit, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint x, GLint y, GLsizei width, GLsizei height); +GLAPI void APIENTRY glGetMultiTexImageEXT (GLenum texunit, GLenum target, GLint level, GLenum format, GLenum type, void *pixels); +GLAPI void APIENTRY glGetMultiTexParameterfvEXT (GLenum texunit, GLenum target, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetMultiTexParameterivEXT (GLenum texunit, GLenum target, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetMultiTexLevelParameterfvEXT (GLenum texunit, GLenum target, GLint level, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetMultiTexLevelParameterivEXT (GLenum texunit, GLenum target, GLint level, GLenum pname, GLint *params); +GLAPI void APIENTRY glMultiTexImage3DEXT (GLenum texunit, GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glMultiTexSubImage3DEXT (GLenum texunit, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glCopyMultiTexSubImage3DEXT (GLenum texunit, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height); +GLAPI void APIENTRY glEnableClientStateIndexedEXT (GLenum array, GLuint index); +GLAPI void APIENTRY glDisableClientStateIndexedEXT (GLenum array, GLuint index); +GLAPI void APIENTRY glGetFloatIndexedvEXT (GLenum target, GLuint index, GLfloat *data); +GLAPI void APIENTRY glGetDoubleIndexedvEXT (GLenum target, GLuint index, GLdouble *data); +GLAPI void APIENTRY glGetPointerIndexedvEXT (GLenum target, GLuint index, void **data); +GLAPI void APIENTRY glEnableIndexedEXT (GLenum target, GLuint index); +GLAPI void APIENTRY glDisableIndexedEXT (GLenum target, GLuint index); +GLAPI GLboolean APIENTRY glIsEnabledIndexedEXT (GLenum target, GLuint index); +GLAPI void APIENTRY glGetIntegerIndexedvEXT (GLenum target, GLuint index, GLint *data); +GLAPI void APIENTRY glGetBooleanIndexedvEXT (GLenum target, GLuint index, GLboolean *data); +GLAPI void APIENTRY glCompressedTextureImage3DEXT (GLuint texture, GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, const void *bits); +GLAPI void APIENTRY glCompressedTextureImage2DEXT (GLuint texture, GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, const void *bits); +GLAPI void APIENTRY glCompressedTextureImage1DEXT (GLuint texture, GLenum target, GLint level, GLenum internalformat, GLsizei width, GLint border, GLsizei imageSize, const void *bits); +GLAPI void APIENTRY glCompressedTextureSubImage3DEXT (GLuint texture, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void *bits); +GLAPI void APIENTRY glCompressedTextureSubImage2DEXT (GLuint texture, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *bits); +GLAPI void APIENTRY glCompressedTextureSubImage1DEXT (GLuint texture, GLenum target, GLint level, GLint xoffset, GLsizei width, GLenum format, GLsizei imageSize, const void *bits); +GLAPI void APIENTRY glGetCompressedTextureImageEXT (GLuint texture, GLenum target, GLint lod, void *img); +GLAPI void APIENTRY glCompressedMultiTexImage3DEXT (GLenum texunit, GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, const void *bits); +GLAPI void APIENTRY glCompressedMultiTexImage2DEXT (GLenum texunit, GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, const void *bits); +GLAPI void APIENTRY glCompressedMultiTexImage1DEXT (GLenum texunit, GLenum target, GLint level, GLenum internalformat, GLsizei width, GLint border, GLsizei imageSize, const void *bits); +GLAPI void APIENTRY glCompressedMultiTexSubImage3DEXT (GLenum texunit, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void *bits); +GLAPI void APIENTRY glCompressedMultiTexSubImage2DEXT (GLenum texunit, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *bits); +GLAPI void APIENTRY glCompressedMultiTexSubImage1DEXT (GLenum texunit, GLenum target, GLint level, GLint xoffset, GLsizei width, GLenum format, GLsizei imageSize, const void *bits); +GLAPI void APIENTRY glGetCompressedMultiTexImageEXT (GLenum texunit, GLenum target, GLint lod, void *img); +GLAPI void APIENTRY glMatrixLoadTransposefEXT (GLenum mode, const GLfloat *m); +GLAPI void APIENTRY glMatrixLoadTransposedEXT (GLenum mode, const GLdouble *m); +GLAPI void APIENTRY glMatrixMultTransposefEXT (GLenum mode, const GLfloat *m); +GLAPI void APIENTRY glMatrixMultTransposedEXT (GLenum mode, const GLdouble *m); +GLAPI void APIENTRY glNamedBufferDataEXT (GLuint buffer, GLsizeiptr size, const void *data, GLenum usage); +GLAPI void APIENTRY glNamedBufferSubDataEXT (GLuint buffer, GLintptr offset, GLsizeiptr size, const void *data); +GLAPI void *APIENTRY glMapNamedBufferEXT (GLuint buffer, GLenum access); +GLAPI GLboolean APIENTRY glUnmapNamedBufferEXT (GLuint buffer); +GLAPI void APIENTRY glGetNamedBufferParameterivEXT (GLuint buffer, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetNamedBufferPointervEXT (GLuint buffer, GLenum pname, void **params); +GLAPI void APIENTRY glGetNamedBufferSubDataEXT (GLuint buffer, GLintptr offset, GLsizeiptr size, void *data); +GLAPI void APIENTRY glProgramUniform1fEXT (GLuint program, GLint location, GLfloat v0); +GLAPI void APIENTRY glProgramUniform2fEXT (GLuint program, GLint location, GLfloat v0, GLfloat v1); +GLAPI void APIENTRY glProgramUniform3fEXT (GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +GLAPI void APIENTRY glProgramUniform4fEXT (GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +GLAPI void APIENTRY glProgramUniform1iEXT (GLuint program, GLint location, GLint v0); +GLAPI void APIENTRY glProgramUniform2iEXT (GLuint program, GLint location, GLint v0, GLint v1); +GLAPI void APIENTRY glProgramUniform3iEXT (GLuint program, GLint location, GLint v0, GLint v1, GLint v2); +GLAPI void APIENTRY glProgramUniform4iEXT (GLuint program, GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +GLAPI void APIENTRY glProgramUniform1fvEXT (GLuint program, GLint location, GLsizei count, const GLfloat *value); +GLAPI void APIENTRY glProgramUniform2fvEXT (GLuint program, GLint location, GLsizei count, const GLfloat *value); +GLAPI void APIENTRY glProgramUniform3fvEXT (GLuint program, GLint location, GLsizei count, const GLfloat *value); +GLAPI void APIENTRY glProgramUniform4fvEXT (GLuint program, GLint location, GLsizei count, const GLfloat *value); +GLAPI void APIENTRY glProgramUniform1ivEXT (GLuint program, GLint location, GLsizei count, const GLint *value); +GLAPI void APIENTRY glProgramUniform2ivEXT (GLuint program, GLint location, GLsizei count, const GLint *value); +GLAPI void APIENTRY glProgramUniform3ivEXT (GLuint program, GLint location, GLsizei count, const GLint *value); +GLAPI void APIENTRY glProgramUniform4ivEXT (GLuint program, GLint location, GLsizei count, const GLint *value); +GLAPI void APIENTRY glProgramUniformMatrix2fvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glProgramUniformMatrix3fvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glProgramUniformMatrix4fvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glProgramUniformMatrix2x3fvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glProgramUniformMatrix3x2fvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glProgramUniformMatrix2x4fvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glProgramUniformMatrix4x2fvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glProgramUniformMatrix3x4fvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glProgramUniformMatrix4x3fvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glTextureBufferEXT (GLuint texture, GLenum target, GLenum internalformat, GLuint buffer); +GLAPI void APIENTRY glMultiTexBufferEXT (GLenum texunit, GLenum target, GLenum internalformat, GLuint buffer); +GLAPI void APIENTRY glTextureParameterIivEXT (GLuint texture, GLenum target, GLenum pname, const GLint *params); +GLAPI void APIENTRY glTextureParameterIuivEXT (GLuint texture, GLenum target, GLenum pname, const GLuint *params); +GLAPI void APIENTRY glGetTextureParameterIivEXT (GLuint texture, GLenum target, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetTextureParameterIuivEXT (GLuint texture, GLenum target, GLenum pname, GLuint *params); +GLAPI void APIENTRY glMultiTexParameterIivEXT (GLenum texunit, GLenum target, GLenum pname, const GLint *params); +GLAPI void APIENTRY glMultiTexParameterIuivEXT (GLenum texunit, GLenum target, GLenum pname, const GLuint *params); +GLAPI void APIENTRY glGetMultiTexParameterIivEXT (GLenum texunit, GLenum target, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetMultiTexParameterIuivEXT (GLenum texunit, GLenum target, GLenum pname, GLuint *params); +GLAPI void APIENTRY glProgramUniform1uiEXT (GLuint program, GLint location, GLuint v0); +GLAPI void APIENTRY glProgramUniform2uiEXT (GLuint program, GLint location, GLuint v0, GLuint v1); +GLAPI void APIENTRY glProgramUniform3uiEXT (GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2); +GLAPI void APIENTRY glProgramUniform4uiEXT (GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3); +GLAPI void APIENTRY glProgramUniform1uivEXT (GLuint program, GLint location, GLsizei count, const GLuint *value); +GLAPI void APIENTRY glProgramUniform2uivEXT (GLuint program, GLint location, GLsizei count, const GLuint *value); +GLAPI void APIENTRY glProgramUniform3uivEXT (GLuint program, GLint location, GLsizei count, const GLuint *value); +GLAPI void APIENTRY glProgramUniform4uivEXT (GLuint program, GLint location, GLsizei count, const GLuint *value); +GLAPI void APIENTRY glNamedProgramLocalParameters4fvEXT (GLuint program, GLenum target, GLuint index, GLsizei count, const GLfloat *params); +GLAPI void APIENTRY glNamedProgramLocalParameterI4iEXT (GLuint program, GLenum target, GLuint index, GLint x, GLint y, GLint z, GLint w); +GLAPI void APIENTRY glNamedProgramLocalParameterI4ivEXT (GLuint program, GLenum target, GLuint index, const GLint *params); +GLAPI void APIENTRY glNamedProgramLocalParametersI4ivEXT (GLuint program, GLenum target, GLuint index, GLsizei count, const GLint *params); +GLAPI void APIENTRY glNamedProgramLocalParameterI4uiEXT (GLuint program, GLenum target, GLuint index, GLuint x, GLuint y, GLuint z, GLuint w); +GLAPI void APIENTRY glNamedProgramLocalParameterI4uivEXT (GLuint program, GLenum target, GLuint index, const GLuint *params); +GLAPI void APIENTRY glNamedProgramLocalParametersI4uivEXT (GLuint program, GLenum target, GLuint index, GLsizei count, const GLuint *params); +GLAPI void APIENTRY glGetNamedProgramLocalParameterIivEXT (GLuint program, GLenum target, GLuint index, GLint *params); +GLAPI void APIENTRY glGetNamedProgramLocalParameterIuivEXT (GLuint program, GLenum target, GLuint index, GLuint *params); +GLAPI void APIENTRY glEnableClientStateiEXT (GLenum array, GLuint index); +GLAPI void APIENTRY glDisableClientStateiEXT (GLenum array, GLuint index); +GLAPI void APIENTRY glGetFloati_vEXT (GLenum pname, GLuint index, GLfloat *params); +GLAPI void APIENTRY glGetDoublei_vEXT (GLenum pname, GLuint index, GLdouble *params); +GLAPI void APIENTRY glGetPointeri_vEXT (GLenum pname, GLuint index, void **params); +GLAPI void APIENTRY glNamedProgramStringEXT (GLuint program, GLenum target, GLenum format, GLsizei len, const void *string); +GLAPI void APIENTRY glNamedProgramLocalParameter4dEXT (GLuint program, GLenum target, GLuint index, GLdouble x, GLdouble y, GLdouble z, GLdouble w); +GLAPI void APIENTRY glNamedProgramLocalParameter4dvEXT (GLuint program, GLenum target, GLuint index, const GLdouble *params); +GLAPI void APIENTRY glNamedProgramLocalParameter4fEXT (GLuint program, GLenum target, GLuint index, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +GLAPI void APIENTRY glNamedProgramLocalParameter4fvEXT (GLuint program, GLenum target, GLuint index, const GLfloat *params); +GLAPI void APIENTRY glGetNamedProgramLocalParameterdvEXT (GLuint program, GLenum target, GLuint index, GLdouble *params); +GLAPI void APIENTRY glGetNamedProgramLocalParameterfvEXT (GLuint program, GLenum target, GLuint index, GLfloat *params); +GLAPI void APIENTRY glGetNamedProgramivEXT (GLuint program, GLenum target, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetNamedProgramStringEXT (GLuint program, GLenum target, GLenum pname, void *string); +GLAPI void APIENTRY glNamedRenderbufferStorageEXT (GLuint renderbuffer, GLenum internalformat, GLsizei width, GLsizei height); +GLAPI void APIENTRY glGetNamedRenderbufferParameterivEXT (GLuint renderbuffer, GLenum pname, GLint *params); +GLAPI void APIENTRY glNamedRenderbufferStorageMultisampleEXT (GLuint renderbuffer, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +GLAPI void APIENTRY glNamedRenderbufferStorageMultisampleCoverageEXT (GLuint renderbuffer, GLsizei coverageSamples, GLsizei colorSamples, GLenum internalformat, GLsizei width, GLsizei height); +GLAPI GLenum APIENTRY glCheckNamedFramebufferStatusEXT (GLuint framebuffer, GLenum target); +GLAPI void APIENTRY glNamedFramebufferTexture1DEXT (GLuint framebuffer, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +GLAPI void APIENTRY glNamedFramebufferTexture2DEXT (GLuint framebuffer, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +GLAPI void APIENTRY glNamedFramebufferTexture3DEXT (GLuint framebuffer, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLint zoffset); +GLAPI void APIENTRY glNamedFramebufferRenderbufferEXT (GLuint framebuffer, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer); +GLAPI void APIENTRY glGetNamedFramebufferAttachmentParameterivEXT (GLuint framebuffer, GLenum attachment, GLenum pname, GLint *params); +GLAPI void APIENTRY glGenerateTextureMipmapEXT (GLuint texture, GLenum target); +GLAPI void APIENTRY glGenerateMultiTexMipmapEXT (GLenum texunit, GLenum target); +GLAPI void APIENTRY glFramebufferDrawBufferEXT (GLuint framebuffer, GLenum mode); +GLAPI void APIENTRY glFramebufferDrawBuffersEXT (GLuint framebuffer, GLsizei n, const GLenum *bufs); +GLAPI void APIENTRY glFramebufferReadBufferEXT (GLuint framebuffer, GLenum mode); +GLAPI void APIENTRY glGetFramebufferParameterivEXT (GLuint framebuffer, GLenum pname, GLint *params); +GLAPI void APIENTRY glNamedCopyBufferSubDataEXT (GLuint readBuffer, GLuint writeBuffer, GLintptr readOffset, GLintptr writeOffset, GLsizeiptr size); +GLAPI void APIENTRY glNamedFramebufferTextureEXT (GLuint framebuffer, GLenum attachment, GLuint texture, GLint level); +GLAPI void APIENTRY glNamedFramebufferTextureLayerEXT (GLuint framebuffer, GLenum attachment, GLuint texture, GLint level, GLint layer); +GLAPI void APIENTRY glNamedFramebufferTextureFaceEXT (GLuint framebuffer, GLenum attachment, GLuint texture, GLint level, GLenum face); +GLAPI void APIENTRY glTextureRenderbufferEXT (GLuint texture, GLenum target, GLuint renderbuffer); +GLAPI void APIENTRY glMultiTexRenderbufferEXT (GLenum texunit, GLenum target, GLuint renderbuffer); +GLAPI void APIENTRY glVertexArrayVertexOffsetEXT (GLuint vaobj, GLuint buffer, GLint size, GLenum type, GLsizei stride, GLintptr offset); +GLAPI void APIENTRY glVertexArrayColorOffsetEXT (GLuint vaobj, GLuint buffer, GLint size, GLenum type, GLsizei stride, GLintptr offset); +GLAPI void APIENTRY glVertexArrayEdgeFlagOffsetEXT (GLuint vaobj, GLuint buffer, GLsizei stride, GLintptr offset); +GLAPI void APIENTRY glVertexArrayIndexOffsetEXT (GLuint vaobj, GLuint buffer, GLenum type, GLsizei stride, GLintptr offset); +GLAPI void APIENTRY glVertexArrayNormalOffsetEXT (GLuint vaobj, GLuint buffer, GLenum type, GLsizei stride, GLintptr offset); +GLAPI void APIENTRY glVertexArrayTexCoordOffsetEXT (GLuint vaobj, GLuint buffer, GLint size, GLenum type, GLsizei stride, GLintptr offset); +GLAPI void APIENTRY glVertexArrayMultiTexCoordOffsetEXT (GLuint vaobj, GLuint buffer, GLenum texunit, GLint size, GLenum type, GLsizei stride, GLintptr offset); +GLAPI void APIENTRY glVertexArrayFogCoordOffsetEXT (GLuint vaobj, GLuint buffer, GLenum type, GLsizei stride, GLintptr offset); +GLAPI void APIENTRY glVertexArraySecondaryColorOffsetEXT (GLuint vaobj, GLuint buffer, GLint size, GLenum type, GLsizei stride, GLintptr offset); +GLAPI void APIENTRY glVertexArrayVertexAttribOffsetEXT (GLuint vaobj, GLuint buffer, GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride, GLintptr offset); +GLAPI void APIENTRY glVertexArrayVertexAttribIOffsetEXT (GLuint vaobj, GLuint buffer, GLuint index, GLint size, GLenum type, GLsizei stride, GLintptr offset); +GLAPI void APIENTRY glEnableVertexArrayEXT (GLuint vaobj, GLenum array); +GLAPI void APIENTRY glDisableVertexArrayEXT (GLuint vaobj, GLenum array); +GLAPI void APIENTRY glEnableVertexArrayAttribEXT (GLuint vaobj, GLuint index); +GLAPI void APIENTRY glDisableVertexArrayAttribEXT (GLuint vaobj, GLuint index); +GLAPI void APIENTRY glGetVertexArrayIntegervEXT (GLuint vaobj, GLenum pname, GLint *param); +GLAPI void APIENTRY glGetVertexArrayPointervEXT (GLuint vaobj, GLenum pname, void **param); +GLAPI void APIENTRY glGetVertexArrayIntegeri_vEXT (GLuint vaobj, GLuint index, GLenum pname, GLint *param); +GLAPI void APIENTRY glGetVertexArrayPointeri_vEXT (GLuint vaobj, GLuint index, GLenum pname, void **param); +GLAPI void *APIENTRY glMapNamedBufferRangeEXT (GLuint buffer, GLintptr offset, GLsizeiptr length, GLbitfield access); +GLAPI void APIENTRY glFlushMappedNamedBufferRangeEXT (GLuint buffer, GLintptr offset, GLsizeiptr length); +GLAPI void APIENTRY glNamedBufferStorageEXT (GLuint buffer, GLsizeiptr size, const void *data, GLbitfield flags); +GLAPI void APIENTRY glClearNamedBufferDataEXT (GLuint buffer, GLenum internalformat, GLenum format, GLenum type, const void *data); +GLAPI void APIENTRY glClearNamedBufferSubDataEXT (GLuint buffer, GLenum internalformat, GLsizeiptr offset, GLsizeiptr size, GLenum format, GLenum type, const void *data); +GLAPI void APIENTRY glNamedFramebufferParameteriEXT (GLuint framebuffer, GLenum pname, GLint param); +GLAPI void APIENTRY glGetNamedFramebufferParameterivEXT (GLuint framebuffer, GLenum pname, GLint *params); +GLAPI void APIENTRY glProgramUniform1dEXT (GLuint program, GLint location, GLdouble x); +GLAPI void APIENTRY glProgramUniform2dEXT (GLuint program, GLint location, GLdouble x, GLdouble y); +GLAPI void APIENTRY glProgramUniform3dEXT (GLuint program, GLint location, GLdouble x, GLdouble y, GLdouble z); +GLAPI void APIENTRY glProgramUniform4dEXT (GLuint program, GLint location, GLdouble x, GLdouble y, GLdouble z, GLdouble w); +GLAPI void APIENTRY glProgramUniform1dvEXT (GLuint program, GLint location, GLsizei count, const GLdouble *value); +GLAPI void APIENTRY glProgramUniform2dvEXT (GLuint program, GLint location, GLsizei count, const GLdouble *value); +GLAPI void APIENTRY glProgramUniform3dvEXT (GLuint program, GLint location, GLsizei count, const GLdouble *value); +GLAPI void APIENTRY glProgramUniform4dvEXT (GLuint program, GLint location, GLsizei count, const GLdouble *value); +GLAPI void APIENTRY glProgramUniformMatrix2dvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glProgramUniformMatrix3dvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glProgramUniformMatrix4dvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glProgramUniformMatrix2x3dvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glProgramUniformMatrix2x4dvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glProgramUniformMatrix3x2dvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glProgramUniformMatrix3x4dvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glProgramUniformMatrix4x2dvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glProgramUniformMatrix4x3dvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glTextureBufferRangeEXT (GLuint texture, GLenum target, GLenum internalformat, GLuint buffer, GLintptr offset, GLsizeiptr size); +GLAPI void APIENTRY glTextureStorage1DEXT (GLuint texture, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width); +GLAPI void APIENTRY glTextureStorage2DEXT (GLuint texture, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +GLAPI void APIENTRY glTextureStorage3DEXT (GLuint texture, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +GLAPI void APIENTRY glTextureStorage2DMultisampleEXT (GLuint texture, GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLboolean fixedsamplelocations); +GLAPI void APIENTRY glTextureStorage3DMultisampleEXT (GLuint texture, GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedsamplelocations); +GLAPI void APIENTRY glVertexArrayBindVertexBufferEXT (GLuint vaobj, GLuint bindingindex, GLuint buffer, GLintptr offset, GLsizei stride); +GLAPI void APIENTRY glVertexArrayVertexAttribFormatEXT (GLuint vaobj, GLuint attribindex, GLint size, GLenum type, GLboolean normalized, GLuint relativeoffset); +GLAPI void APIENTRY glVertexArrayVertexAttribIFormatEXT (GLuint vaobj, GLuint attribindex, GLint size, GLenum type, GLuint relativeoffset); +GLAPI void APIENTRY glVertexArrayVertexAttribLFormatEXT (GLuint vaobj, GLuint attribindex, GLint size, GLenum type, GLuint relativeoffset); +GLAPI void APIENTRY glVertexArrayVertexAttribBindingEXT (GLuint vaobj, GLuint attribindex, GLuint bindingindex); +GLAPI void APIENTRY glVertexArrayVertexBindingDivisorEXT (GLuint vaobj, GLuint bindingindex, GLuint divisor); +GLAPI void APIENTRY glVertexArrayVertexAttribLOffsetEXT (GLuint vaobj, GLuint buffer, GLuint index, GLint size, GLenum type, GLsizei stride, GLintptr offset); +GLAPI void APIENTRY glTexturePageCommitmentEXT (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLboolean commit); +GLAPI void APIENTRY glVertexArrayVertexAttribDivisorEXT (GLuint vaobj, GLuint index, GLuint divisor); +#endif +#endif /* GL_EXT_direct_state_access */ + +#ifndef GL_EXT_draw_instanced +#define GL_EXT_draw_instanced 1 +typedef void (APIENTRYP PFNGLDRAWARRAYSINSTANCEDEXTPROC) (GLenum mode, GLint start, GLsizei count, GLsizei primcount); +typedef void (APIENTRYP PFNGLDRAWELEMENTSINSTANCEDEXTPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei primcount); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glDrawArraysInstancedEXT (GLenum mode, GLint start, GLsizei count, GLsizei primcount); +GLAPI void APIENTRY glDrawElementsInstancedEXT (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei primcount); +#endif +#endif /* GL_EXT_draw_instanced */ + +#ifndef GL_EXT_polygon_offset_clamp +#define GL_EXT_polygon_offset_clamp 1 +#define GL_POLYGON_OFFSET_CLAMP_EXT 0x8E1B +typedef void (APIENTRYP PFNGLPOLYGONOFFSETCLAMPEXTPROC) (GLfloat factor, GLfloat units, GLfloat clamp); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glPolygonOffsetClampEXT (GLfloat factor, GLfloat units, GLfloat clamp); +#endif +#endif /* GL_EXT_polygon_offset_clamp */ + +#ifndef GL_EXT_post_depth_coverage +#define GL_EXT_post_depth_coverage 1 +#endif /* GL_EXT_post_depth_coverage */ + +#ifndef GL_EXT_raster_multisample +#define GL_EXT_raster_multisample 1 +#define GL_RASTER_MULTISAMPLE_EXT 0x9327 +#define GL_RASTER_SAMPLES_EXT 0x9328 +#define GL_MAX_RASTER_SAMPLES_EXT 0x9329 +#define GL_RASTER_FIXED_SAMPLE_LOCATIONS_EXT 0x932A +#define GL_MULTISAMPLE_RASTERIZATION_ALLOWED_EXT 0x932B +#define GL_EFFECTIVE_RASTER_SAMPLES_EXT 0x932C +typedef void (APIENTRYP PFNGLRASTERSAMPLESEXTPROC) (GLuint samples, GLboolean fixedsamplelocations); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glRasterSamplesEXT (GLuint samples, GLboolean fixedsamplelocations); +#endif +#endif /* GL_EXT_raster_multisample */ + +#ifndef GL_EXT_separate_shader_objects +#define GL_EXT_separate_shader_objects 1 +#define GL_ACTIVE_PROGRAM_EXT 0x8B8D +typedef void (APIENTRYP PFNGLUSESHADERPROGRAMEXTPROC) (GLenum type, GLuint program); +typedef void (APIENTRYP PFNGLACTIVEPROGRAMEXTPROC) (GLuint program); +typedef GLuint (APIENTRYP PFNGLCREATESHADERPROGRAMEXTPROC) (GLenum type, const GLchar *string); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glUseShaderProgramEXT (GLenum type, GLuint program); +GLAPI void APIENTRY glActiveProgramEXT (GLuint program); +GLAPI GLuint APIENTRY glCreateShaderProgramEXT (GLenum type, const GLchar *string); +#endif +#endif /* GL_EXT_separate_shader_objects */ + +#ifndef GL_EXT_shader_framebuffer_fetch +#define GL_EXT_shader_framebuffer_fetch 1 +#define GL_FRAGMENT_SHADER_DISCARDS_SAMPLES_EXT 0x8A52 +#endif /* GL_EXT_shader_framebuffer_fetch */ + +#ifndef GL_EXT_shader_framebuffer_fetch_non_coherent +#define GL_EXT_shader_framebuffer_fetch_non_coherent 1 +typedef void (APIENTRYP PFNGLFRAMEBUFFERFETCHBARRIEREXTPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glFramebufferFetchBarrierEXT (void); +#endif +#endif /* GL_EXT_shader_framebuffer_fetch_non_coherent */ + +#ifndef GL_EXT_shader_integer_mix +#define GL_EXT_shader_integer_mix 1 +#endif /* GL_EXT_shader_integer_mix */ + +#ifndef GL_EXT_texture_compression_s3tc +#define GL_EXT_texture_compression_s3tc 1 +#define GL_COMPRESSED_RGB_S3TC_DXT1_EXT 0x83F0 +#define GL_COMPRESSED_RGBA_S3TC_DXT1_EXT 0x83F1 +#define GL_COMPRESSED_RGBA_S3TC_DXT3_EXT 0x83F2 +#define GL_COMPRESSED_RGBA_S3TC_DXT5_EXT 0x83F3 +#endif /* GL_EXT_texture_compression_s3tc */ + +#ifndef GL_EXT_texture_filter_minmax +#define GL_EXT_texture_filter_minmax 1 +#define GL_TEXTURE_REDUCTION_MODE_EXT 0x9366 +#define GL_WEIGHTED_AVERAGE_EXT 0x9367 +#endif /* GL_EXT_texture_filter_minmax */ + +#ifndef GL_EXT_texture_sRGB_decode +#define GL_EXT_texture_sRGB_decode 1 +#define GL_TEXTURE_SRGB_DECODE_EXT 0x8A48 +#define GL_DECODE_EXT 0x8A49 +#define GL_SKIP_DECODE_EXT 0x8A4A +#endif /* GL_EXT_texture_sRGB_decode */ + +#ifndef GL_EXT_window_rectangles +#define GL_EXT_window_rectangles 1 +#define GL_INCLUSIVE_EXT 0x8F10 +#define GL_EXCLUSIVE_EXT 0x8F11 +#define GL_WINDOW_RECTANGLE_EXT 0x8F12 +#define GL_WINDOW_RECTANGLE_MODE_EXT 0x8F13 +#define GL_MAX_WINDOW_RECTANGLES_EXT 0x8F14 +#define GL_NUM_WINDOW_RECTANGLES_EXT 0x8F15 +typedef void (APIENTRYP PFNGLWINDOWRECTANGLESEXTPROC) (GLenum mode, GLsizei count, const GLint *box); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glWindowRectanglesEXT (GLenum mode, GLsizei count, const GLint *box); +#endif +#endif /* GL_EXT_window_rectangles */ + +#ifndef GL_INTEL_blackhole_render +#define GL_INTEL_blackhole_render 1 +#define GL_BLACKHOLE_RENDER_INTEL 0x83FC +#endif /* GL_INTEL_blackhole_render */ + +#ifndef GL_INTEL_conservative_rasterization +#define GL_INTEL_conservative_rasterization 1 +#define GL_CONSERVATIVE_RASTERIZATION_INTEL 0x83FE +#endif /* GL_INTEL_conservative_rasterization */ + +#ifndef GL_INTEL_framebuffer_CMAA +#define GL_INTEL_framebuffer_CMAA 1 +typedef void (APIENTRYP PFNGLAPPLYFRAMEBUFFERATTACHMENTCMAAINTELPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glApplyFramebufferAttachmentCMAAINTEL (void); +#endif +#endif /* GL_INTEL_framebuffer_CMAA */ + +#ifndef GL_INTEL_performance_query +#define GL_INTEL_performance_query 1 +#define GL_PERFQUERY_SINGLE_CONTEXT_INTEL 0x00000000 +#define GL_PERFQUERY_GLOBAL_CONTEXT_INTEL 0x00000001 +#define GL_PERFQUERY_WAIT_INTEL 0x83FB +#define GL_PERFQUERY_FLUSH_INTEL 0x83FA +#define GL_PERFQUERY_DONOT_FLUSH_INTEL 0x83F9 +#define GL_PERFQUERY_COUNTER_EVENT_INTEL 0x94F0 +#define GL_PERFQUERY_COUNTER_DURATION_NORM_INTEL 0x94F1 +#define GL_PERFQUERY_COUNTER_DURATION_RAW_INTEL 0x94F2 +#define GL_PERFQUERY_COUNTER_THROUGHPUT_INTEL 0x94F3 +#define GL_PERFQUERY_COUNTER_RAW_INTEL 0x94F4 +#define GL_PERFQUERY_COUNTER_TIMESTAMP_INTEL 0x94F5 +#define GL_PERFQUERY_COUNTER_DATA_UINT32_INTEL 0x94F8 +#define GL_PERFQUERY_COUNTER_DATA_UINT64_INTEL 0x94F9 +#define GL_PERFQUERY_COUNTER_DATA_FLOAT_INTEL 0x94FA +#define GL_PERFQUERY_COUNTER_DATA_DOUBLE_INTEL 0x94FB +#define GL_PERFQUERY_COUNTER_DATA_BOOL32_INTEL 0x94FC +#define GL_PERFQUERY_QUERY_NAME_LENGTH_MAX_INTEL 0x94FD +#define GL_PERFQUERY_COUNTER_NAME_LENGTH_MAX_INTEL 0x94FE +#define GL_PERFQUERY_COUNTER_DESC_LENGTH_MAX_INTEL 0x94FF +#define GL_PERFQUERY_GPA_EXTENDED_COUNTERS_INTEL 0x9500 +typedef void (APIENTRYP PFNGLBEGINPERFQUERYINTELPROC) (GLuint queryHandle); +typedef void (APIENTRYP PFNGLCREATEPERFQUERYINTELPROC) (GLuint queryId, GLuint *queryHandle); +typedef void (APIENTRYP PFNGLDELETEPERFQUERYINTELPROC) (GLuint queryHandle); +typedef void (APIENTRYP PFNGLENDPERFQUERYINTELPROC) (GLuint queryHandle); +typedef void (APIENTRYP PFNGLGETFIRSTPERFQUERYIDINTELPROC) (GLuint *queryId); +typedef void (APIENTRYP PFNGLGETNEXTPERFQUERYIDINTELPROC) (GLuint queryId, GLuint *nextQueryId); +typedef void (APIENTRYP PFNGLGETPERFCOUNTERINFOINTELPROC) (GLuint queryId, GLuint counterId, GLuint counterNameLength, GLchar *counterName, GLuint counterDescLength, GLchar *counterDesc, GLuint *counterOffset, GLuint *counterDataSize, GLuint *counterTypeEnum, GLuint *counterDataTypeEnum, GLuint64 *rawCounterMaxValue); +typedef void (APIENTRYP PFNGLGETPERFQUERYDATAINTELPROC) (GLuint queryHandle, GLuint flags, GLsizei dataSize, void *data, GLuint *bytesWritten); +typedef void (APIENTRYP PFNGLGETPERFQUERYIDBYNAMEINTELPROC) (GLchar *queryName, GLuint *queryId); +typedef void (APIENTRYP PFNGLGETPERFQUERYINFOINTELPROC) (GLuint queryId, GLuint queryNameLength, GLchar *queryName, GLuint *dataSize, GLuint *noCounters, GLuint *noInstances, GLuint *capsMask); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBeginPerfQueryINTEL (GLuint queryHandle); +GLAPI void APIENTRY glCreatePerfQueryINTEL (GLuint queryId, GLuint *queryHandle); +GLAPI void APIENTRY glDeletePerfQueryINTEL (GLuint queryHandle); +GLAPI void APIENTRY glEndPerfQueryINTEL (GLuint queryHandle); +GLAPI void APIENTRY glGetFirstPerfQueryIdINTEL (GLuint *queryId); +GLAPI void APIENTRY glGetNextPerfQueryIdINTEL (GLuint queryId, GLuint *nextQueryId); +GLAPI void APIENTRY glGetPerfCounterInfoINTEL (GLuint queryId, GLuint counterId, GLuint counterNameLength, GLchar *counterName, GLuint counterDescLength, GLchar *counterDesc, GLuint *counterOffset, GLuint *counterDataSize, GLuint *counterTypeEnum, GLuint *counterDataTypeEnum, GLuint64 *rawCounterMaxValue); +GLAPI void APIENTRY glGetPerfQueryDataINTEL (GLuint queryHandle, GLuint flags, GLsizei dataSize, void *data, GLuint *bytesWritten); +GLAPI void APIENTRY glGetPerfQueryIdByNameINTEL (GLchar *queryName, GLuint *queryId); +GLAPI void APIENTRY glGetPerfQueryInfoINTEL (GLuint queryId, GLuint queryNameLength, GLchar *queryName, GLuint *dataSize, GLuint *noCounters, GLuint *noInstances, GLuint *capsMask); +#endif +#endif /* GL_INTEL_performance_query */ + +#ifndef GL_NV_bindless_multi_draw_indirect +#define GL_NV_bindless_multi_draw_indirect 1 +typedef void (APIENTRYP PFNGLMULTIDRAWARRAYSINDIRECTBINDLESSNVPROC) (GLenum mode, const void *indirect, GLsizei drawCount, GLsizei stride, GLint vertexBufferCount); +typedef void (APIENTRYP PFNGLMULTIDRAWELEMENTSINDIRECTBINDLESSNVPROC) (GLenum mode, GLenum type, const void *indirect, GLsizei drawCount, GLsizei stride, GLint vertexBufferCount); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glMultiDrawArraysIndirectBindlessNV (GLenum mode, const void *indirect, GLsizei drawCount, GLsizei stride, GLint vertexBufferCount); +GLAPI void APIENTRY glMultiDrawElementsIndirectBindlessNV (GLenum mode, GLenum type, const void *indirect, GLsizei drawCount, GLsizei stride, GLint vertexBufferCount); +#endif +#endif /* GL_NV_bindless_multi_draw_indirect */ + +#ifndef GL_NV_bindless_multi_draw_indirect_count +#define GL_NV_bindless_multi_draw_indirect_count 1 +typedef void (APIENTRYP PFNGLMULTIDRAWARRAYSINDIRECTBINDLESSCOUNTNVPROC) (GLenum mode, const void *indirect, GLsizei drawCount, GLsizei maxDrawCount, GLsizei stride, GLint vertexBufferCount); +typedef void (APIENTRYP PFNGLMULTIDRAWELEMENTSINDIRECTBINDLESSCOUNTNVPROC) (GLenum mode, GLenum type, const void *indirect, GLsizei drawCount, GLsizei maxDrawCount, GLsizei stride, GLint vertexBufferCount); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glMultiDrawArraysIndirectBindlessCountNV (GLenum mode, const void *indirect, GLsizei drawCount, GLsizei maxDrawCount, GLsizei stride, GLint vertexBufferCount); +GLAPI void APIENTRY glMultiDrawElementsIndirectBindlessCountNV (GLenum mode, GLenum type, const void *indirect, GLsizei drawCount, GLsizei maxDrawCount, GLsizei stride, GLint vertexBufferCount); +#endif +#endif /* GL_NV_bindless_multi_draw_indirect_count */ + +#ifndef GL_NV_bindless_texture +#define GL_NV_bindless_texture 1 +typedef GLuint64 (APIENTRYP PFNGLGETTEXTUREHANDLENVPROC) (GLuint texture); +typedef GLuint64 (APIENTRYP PFNGLGETTEXTURESAMPLERHANDLENVPROC) (GLuint texture, GLuint sampler); +typedef void (APIENTRYP PFNGLMAKETEXTUREHANDLERESIDENTNVPROC) (GLuint64 handle); +typedef void (APIENTRYP PFNGLMAKETEXTUREHANDLENONRESIDENTNVPROC) (GLuint64 handle); +typedef GLuint64 (APIENTRYP PFNGLGETIMAGEHANDLENVPROC) (GLuint texture, GLint level, GLboolean layered, GLint layer, GLenum format); +typedef void (APIENTRYP PFNGLMAKEIMAGEHANDLERESIDENTNVPROC) (GLuint64 handle, GLenum access); +typedef void (APIENTRYP PFNGLMAKEIMAGEHANDLENONRESIDENTNVPROC) (GLuint64 handle); +typedef void (APIENTRYP PFNGLUNIFORMHANDLEUI64NVPROC) (GLint location, GLuint64 value); +typedef void (APIENTRYP PFNGLUNIFORMHANDLEUI64VNVPROC) (GLint location, GLsizei count, const GLuint64 *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMHANDLEUI64NVPROC) (GLuint program, GLint location, GLuint64 value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMHANDLEUI64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLuint64 *values); +typedef GLboolean (APIENTRYP PFNGLISTEXTUREHANDLERESIDENTNVPROC) (GLuint64 handle); +typedef GLboolean (APIENTRYP PFNGLISIMAGEHANDLERESIDENTNVPROC) (GLuint64 handle); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI GLuint64 APIENTRY glGetTextureHandleNV (GLuint texture); +GLAPI GLuint64 APIENTRY glGetTextureSamplerHandleNV (GLuint texture, GLuint sampler); +GLAPI void APIENTRY glMakeTextureHandleResidentNV (GLuint64 handle); +GLAPI void APIENTRY glMakeTextureHandleNonResidentNV (GLuint64 handle); +GLAPI GLuint64 APIENTRY glGetImageHandleNV (GLuint texture, GLint level, GLboolean layered, GLint layer, GLenum format); +GLAPI void APIENTRY glMakeImageHandleResidentNV (GLuint64 handle, GLenum access); +GLAPI void APIENTRY glMakeImageHandleNonResidentNV (GLuint64 handle); +GLAPI void APIENTRY glUniformHandleui64NV (GLint location, GLuint64 value); +GLAPI void APIENTRY glUniformHandleui64vNV (GLint location, GLsizei count, const GLuint64 *value); +GLAPI void APIENTRY glProgramUniformHandleui64NV (GLuint program, GLint location, GLuint64 value); +GLAPI void APIENTRY glProgramUniformHandleui64vNV (GLuint program, GLint location, GLsizei count, const GLuint64 *values); +GLAPI GLboolean APIENTRY glIsTextureHandleResidentNV (GLuint64 handle); +GLAPI GLboolean APIENTRY glIsImageHandleResidentNV (GLuint64 handle); +#endif +#endif /* GL_NV_bindless_texture */ + +#ifndef GL_NV_blend_equation_advanced +#define GL_NV_blend_equation_advanced 1 +#define GL_BLEND_OVERLAP_NV 0x9281 +#define GL_BLEND_PREMULTIPLIED_SRC_NV 0x9280 +#define GL_BLUE_NV 0x1905 +#define GL_COLORBURN_NV 0x929A +#define GL_COLORDODGE_NV 0x9299 +#define GL_CONJOINT_NV 0x9284 +#define GL_CONTRAST_NV 0x92A1 +#define GL_DARKEN_NV 0x9297 +#define GL_DIFFERENCE_NV 0x929E +#define GL_DISJOINT_NV 0x9283 +#define GL_DST_ATOP_NV 0x928F +#define GL_DST_IN_NV 0x928B +#define GL_DST_NV 0x9287 +#define GL_DST_OUT_NV 0x928D +#define GL_DST_OVER_NV 0x9289 +#define GL_EXCLUSION_NV 0x92A0 +#define GL_GREEN_NV 0x1904 +#define GL_HARDLIGHT_NV 0x929B +#define GL_HARDMIX_NV 0x92A9 +#define GL_HSL_COLOR_NV 0x92AF +#define GL_HSL_HUE_NV 0x92AD +#define GL_HSL_LUMINOSITY_NV 0x92B0 +#define GL_HSL_SATURATION_NV 0x92AE +#define GL_INVERT_OVG_NV 0x92B4 +#define GL_INVERT_RGB_NV 0x92A3 +#define GL_LIGHTEN_NV 0x9298 +#define GL_LINEARBURN_NV 0x92A5 +#define GL_LINEARDODGE_NV 0x92A4 +#define GL_LINEARLIGHT_NV 0x92A7 +#define GL_MINUS_CLAMPED_NV 0x92B3 +#define GL_MINUS_NV 0x929F +#define GL_MULTIPLY_NV 0x9294 +#define GL_OVERLAY_NV 0x9296 +#define GL_PINLIGHT_NV 0x92A8 +#define GL_PLUS_CLAMPED_ALPHA_NV 0x92B2 +#define GL_PLUS_CLAMPED_NV 0x92B1 +#define GL_PLUS_DARKER_NV 0x9292 +#define GL_PLUS_NV 0x9291 +#define GL_RED_NV 0x1903 +#define GL_SCREEN_NV 0x9295 +#define GL_SOFTLIGHT_NV 0x929C +#define GL_SRC_ATOP_NV 0x928E +#define GL_SRC_IN_NV 0x928A +#define GL_SRC_NV 0x9286 +#define GL_SRC_OUT_NV 0x928C +#define GL_SRC_OVER_NV 0x9288 +#define GL_UNCORRELATED_NV 0x9282 +#define GL_VIVIDLIGHT_NV 0x92A6 +#define GL_XOR_NV 0x1506 +typedef void (APIENTRYP PFNGLBLENDPARAMETERINVPROC) (GLenum pname, GLint value); +typedef void (APIENTRYP PFNGLBLENDBARRIERNVPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBlendParameteriNV (GLenum pname, GLint value); +GLAPI void APIENTRY glBlendBarrierNV (void); +#endif +#endif /* GL_NV_blend_equation_advanced */ + +#ifndef GL_NV_blend_equation_advanced_coherent +#define GL_NV_blend_equation_advanced_coherent 1 +#define GL_BLEND_ADVANCED_COHERENT_NV 0x9285 +#endif /* GL_NV_blend_equation_advanced_coherent */ + +#ifndef GL_NV_blend_minmax_factor +#define GL_NV_blend_minmax_factor 1 +#define GL_FACTOR_MIN_AMD 0x901C +#define GL_FACTOR_MAX_AMD 0x901D +#endif /* GL_NV_blend_minmax_factor */ + +#ifndef GL_NV_clip_space_w_scaling +#define GL_NV_clip_space_w_scaling 1 +#define GL_VIEWPORT_POSITION_W_SCALE_NV 0x937C +#define GL_VIEWPORT_POSITION_W_SCALE_X_COEFF_NV 0x937D +#define GL_VIEWPORT_POSITION_W_SCALE_Y_COEFF_NV 0x937E +typedef void (APIENTRYP PFNGLVIEWPORTPOSITIONWSCALENVPROC) (GLuint index, GLfloat xcoeff, GLfloat ycoeff); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glViewportPositionWScaleNV (GLuint index, GLfloat xcoeff, GLfloat ycoeff); +#endif +#endif /* GL_NV_clip_space_w_scaling */ + +#ifndef GL_NV_command_list +#define GL_NV_command_list 1 +#define GL_TERMINATE_SEQUENCE_COMMAND_NV 0x0000 +#define GL_NOP_COMMAND_NV 0x0001 +#define GL_DRAW_ELEMENTS_COMMAND_NV 0x0002 +#define GL_DRAW_ARRAYS_COMMAND_NV 0x0003 +#define GL_DRAW_ELEMENTS_STRIP_COMMAND_NV 0x0004 +#define GL_DRAW_ARRAYS_STRIP_COMMAND_NV 0x0005 +#define GL_DRAW_ELEMENTS_INSTANCED_COMMAND_NV 0x0006 +#define GL_DRAW_ARRAYS_INSTANCED_COMMAND_NV 0x0007 +#define GL_ELEMENT_ADDRESS_COMMAND_NV 0x0008 +#define GL_ATTRIBUTE_ADDRESS_COMMAND_NV 0x0009 +#define GL_UNIFORM_ADDRESS_COMMAND_NV 0x000A +#define GL_BLEND_COLOR_COMMAND_NV 0x000B +#define GL_STENCIL_REF_COMMAND_NV 0x000C +#define GL_LINE_WIDTH_COMMAND_NV 0x000D +#define GL_POLYGON_OFFSET_COMMAND_NV 0x000E +#define GL_ALPHA_REF_COMMAND_NV 0x000F +#define GL_VIEWPORT_COMMAND_NV 0x0010 +#define GL_SCISSOR_COMMAND_NV 0x0011 +#define GL_FRONT_FACE_COMMAND_NV 0x0012 +typedef void (APIENTRYP PFNGLCREATESTATESNVPROC) (GLsizei n, GLuint *states); +typedef void (APIENTRYP PFNGLDELETESTATESNVPROC) (GLsizei n, const GLuint *states); +typedef GLboolean (APIENTRYP PFNGLISSTATENVPROC) (GLuint state); +typedef void (APIENTRYP PFNGLSTATECAPTURENVPROC) (GLuint state, GLenum mode); +typedef GLuint (APIENTRYP PFNGLGETCOMMANDHEADERNVPROC) (GLenum tokenID, GLuint size); +typedef GLushort (APIENTRYP PFNGLGETSTAGEINDEXNVPROC) (GLenum shadertype); +typedef void (APIENTRYP PFNGLDRAWCOMMANDSNVPROC) (GLenum primitiveMode, GLuint buffer, const GLintptr *indirects, const GLsizei *sizes, GLuint count); +typedef void (APIENTRYP PFNGLDRAWCOMMANDSADDRESSNVPROC) (GLenum primitiveMode, const GLuint64 *indirects, const GLsizei *sizes, GLuint count); +typedef void (APIENTRYP PFNGLDRAWCOMMANDSSTATESNVPROC) (GLuint buffer, const GLintptr *indirects, const GLsizei *sizes, const GLuint *states, const GLuint *fbos, GLuint count); +typedef void (APIENTRYP PFNGLDRAWCOMMANDSSTATESADDRESSNVPROC) (const GLuint64 *indirects, const GLsizei *sizes, const GLuint *states, const GLuint *fbos, GLuint count); +typedef void (APIENTRYP PFNGLCREATECOMMANDLISTSNVPROC) (GLsizei n, GLuint *lists); +typedef void (APIENTRYP PFNGLDELETECOMMANDLISTSNVPROC) (GLsizei n, const GLuint *lists); +typedef GLboolean (APIENTRYP PFNGLISCOMMANDLISTNVPROC) (GLuint list); +typedef void (APIENTRYP PFNGLLISTDRAWCOMMANDSSTATESCLIENTNVPROC) (GLuint list, GLuint segment, const void **indirects, const GLsizei *sizes, const GLuint *states, const GLuint *fbos, GLuint count); +typedef void (APIENTRYP PFNGLCOMMANDLISTSEGMENTSNVPROC) (GLuint list, GLuint segments); +typedef void (APIENTRYP PFNGLCOMPILECOMMANDLISTNVPROC) (GLuint list); +typedef void (APIENTRYP PFNGLCALLCOMMANDLISTNVPROC) (GLuint list); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glCreateStatesNV (GLsizei n, GLuint *states); +GLAPI void APIENTRY glDeleteStatesNV (GLsizei n, const GLuint *states); +GLAPI GLboolean APIENTRY glIsStateNV (GLuint state); +GLAPI void APIENTRY glStateCaptureNV (GLuint state, GLenum mode); +GLAPI GLuint APIENTRY glGetCommandHeaderNV (GLenum tokenID, GLuint size); +GLAPI GLushort APIENTRY glGetStageIndexNV (GLenum shadertype); +GLAPI void APIENTRY glDrawCommandsNV (GLenum primitiveMode, GLuint buffer, const GLintptr *indirects, const GLsizei *sizes, GLuint count); +GLAPI void APIENTRY glDrawCommandsAddressNV (GLenum primitiveMode, const GLuint64 *indirects, const GLsizei *sizes, GLuint count); +GLAPI void APIENTRY glDrawCommandsStatesNV (GLuint buffer, const GLintptr *indirects, const GLsizei *sizes, const GLuint *states, const GLuint *fbos, GLuint count); +GLAPI void APIENTRY glDrawCommandsStatesAddressNV (const GLuint64 *indirects, const GLsizei *sizes, const GLuint *states, const GLuint *fbos, GLuint count); +GLAPI void APIENTRY glCreateCommandListsNV (GLsizei n, GLuint *lists); +GLAPI void APIENTRY glDeleteCommandListsNV (GLsizei n, const GLuint *lists); +GLAPI GLboolean APIENTRY glIsCommandListNV (GLuint list); +GLAPI void APIENTRY glListDrawCommandsStatesClientNV (GLuint list, GLuint segment, const void **indirects, const GLsizei *sizes, const GLuint *states, const GLuint *fbos, GLuint count); +GLAPI void APIENTRY glCommandListSegmentsNV (GLuint list, GLuint segments); +GLAPI void APIENTRY glCompileCommandListNV (GLuint list); +GLAPI void APIENTRY glCallCommandListNV (GLuint list); +#endif +#endif /* GL_NV_command_list */ + +#ifndef GL_NV_conditional_render +#define GL_NV_conditional_render 1 +#define GL_QUERY_WAIT_NV 0x8E13 +#define GL_QUERY_NO_WAIT_NV 0x8E14 +#define GL_QUERY_BY_REGION_WAIT_NV 0x8E15 +#define GL_QUERY_BY_REGION_NO_WAIT_NV 0x8E16 +typedef void (APIENTRYP PFNGLBEGINCONDITIONALRENDERNVPROC) (GLuint id, GLenum mode); +typedef void (APIENTRYP PFNGLENDCONDITIONALRENDERNVPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBeginConditionalRenderNV (GLuint id, GLenum mode); +GLAPI void APIENTRY glEndConditionalRenderNV (void); +#endif +#endif /* GL_NV_conditional_render */ + +#ifndef GL_NV_conservative_raster +#define GL_NV_conservative_raster 1 +#define GL_CONSERVATIVE_RASTERIZATION_NV 0x9346 +#define GL_SUBPIXEL_PRECISION_BIAS_X_BITS_NV 0x9347 +#define GL_SUBPIXEL_PRECISION_BIAS_Y_BITS_NV 0x9348 +#define GL_MAX_SUBPIXEL_PRECISION_BIAS_BITS_NV 0x9349 +typedef void (APIENTRYP PFNGLSUBPIXELPRECISIONBIASNVPROC) (GLuint xbits, GLuint ybits); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glSubpixelPrecisionBiasNV (GLuint xbits, GLuint ybits); +#endif +#endif /* GL_NV_conservative_raster */ + +#ifndef GL_NV_conservative_raster_dilate +#define GL_NV_conservative_raster_dilate 1 +#define GL_CONSERVATIVE_RASTER_DILATE_NV 0x9379 +#define GL_CONSERVATIVE_RASTER_DILATE_RANGE_NV 0x937A +#define GL_CONSERVATIVE_RASTER_DILATE_GRANULARITY_NV 0x937B +typedef void (APIENTRYP PFNGLCONSERVATIVERASTERPARAMETERFNVPROC) (GLenum pname, GLfloat value); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glConservativeRasterParameterfNV (GLenum pname, GLfloat value); +#endif +#endif /* GL_NV_conservative_raster_dilate */ + +#ifndef GL_NV_conservative_raster_pre_snap +#define GL_NV_conservative_raster_pre_snap 1 +#define GL_CONSERVATIVE_RASTER_MODE_PRE_SNAP_NV 0x9550 +#endif /* GL_NV_conservative_raster_pre_snap */ + +#ifndef GL_NV_conservative_raster_pre_snap_triangles +#define GL_NV_conservative_raster_pre_snap_triangles 1 +#define GL_CONSERVATIVE_RASTER_MODE_NV 0x954D +#define GL_CONSERVATIVE_RASTER_MODE_POST_SNAP_NV 0x954E +#define GL_CONSERVATIVE_RASTER_MODE_PRE_SNAP_TRIANGLES_NV 0x954F +typedef void (APIENTRYP PFNGLCONSERVATIVERASTERPARAMETERINVPROC) (GLenum pname, GLint param); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glConservativeRasterParameteriNV (GLenum pname, GLint param); +#endif +#endif /* GL_NV_conservative_raster_pre_snap_triangles */ + +#ifndef GL_NV_conservative_raster_underestimation +#define GL_NV_conservative_raster_underestimation 1 +#endif /* GL_NV_conservative_raster_underestimation */ + +#ifndef GL_NV_draw_vulkan_image +#define GL_NV_draw_vulkan_image 1 +typedef void (APIENTRY *GLVULKANPROCNV)(void); +typedef void (APIENTRYP PFNGLDRAWVKIMAGENVPROC) (GLuint64 vkImage, GLuint sampler, GLfloat x0, GLfloat y0, GLfloat x1, GLfloat y1, GLfloat z, GLfloat s0, GLfloat t0, GLfloat s1, GLfloat t1); +typedef GLVULKANPROCNV (APIENTRYP PFNGLGETVKPROCADDRNVPROC) (const GLchar *name); +typedef void (APIENTRYP PFNGLWAITVKSEMAPHORENVPROC) (GLuint64 vkSemaphore); +typedef void (APIENTRYP PFNGLSIGNALVKSEMAPHORENVPROC) (GLuint64 vkSemaphore); +typedef void (APIENTRYP PFNGLSIGNALVKFENCENVPROC) (GLuint64 vkFence); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glDrawVkImageNV (GLuint64 vkImage, GLuint sampler, GLfloat x0, GLfloat y0, GLfloat x1, GLfloat y1, GLfloat z, GLfloat s0, GLfloat t0, GLfloat s1, GLfloat t1); +GLAPI GLVULKANPROCNV APIENTRY glGetVkProcAddrNV (const GLchar *name); +GLAPI void APIENTRY glWaitVkSemaphoreNV (GLuint64 vkSemaphore); +GLAPI void APIENTRY glSignalVkSemaphoreNV (GLuint64 vkSemaphore); +GLAPI void APIENTRY glSignalVkFenceNV (GLuint64 vkFence); +#endif +#endif /* GL_NV_draw_vulkan_image */ + +#ifndef GL_NV_fill_rectangle +#define GL_NV_fill_rectangle 1 +#define GL_FILL_RECTANGLE_NV 0x933C +#endif /* GL_NV_fill_rectangle */ + +#ifndef GL_NV_fragment_coverage_to_color +#define GL_NV_fragment_coverage_to_color 1 +#define GL_FRAGMENT_COVERAGE_TO_COLOR_NV 0x92DD +#define GL_FRAGMENT_COVERAGE_COLOR_NV 0x92DE +typedef void (APIENTRYP PFNGLFRAGMENTCOVERAGECOLORNVPROC) (GLuint color); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glFragmentCoverageColorNV (GLuint color); +#endif +#endif /* GL_NV_fragment_coverage_to_color */ + +#ifndef GL_NV_fragment_shader_interlock +#define GL_NV_fragment_shader_interlock 1 +#endif /* GL_NV_fragment_shader_interlock */ + +#ifndef GL_NV_framebuffer_mixed_samples +#define GL_NV_framebuffer_mixed_samples 1 +#define GL_COVERAGE_MODULATION_TABLE_NV 0x9331 +#define GL_COLOR_SAMPLES_NV 0x8E20 +#define GL_DEPTH_SAMPLES_NV 0x932D +#define GL_STENCIL_SAMPLES_NV 0x932E +#define GL_MIXED_DEPTH_SAMPLES_SUPPORTED_NV 0x932F +#define GL_MIXED_STENCIL_SAMPLES_SUPPORTED_NV 0x9330 +#define GL_COVERAGE_MODULATION_NV 0x9332 +#define GL_COVERAGE_MODULATION_TABLE_SIZE_NV 0x9333 +typedef void (APIENTRYP PFNGLCOVERAGEMODULATIONTABLENVPROC) (GLsizei n, const GLfloat *v); +typedef void (APIENTRYP PFNGLGETCOVERAGEMODULATIONTABLENVPROC) (GLsizei bufsize, GLfloat *v); +typedef void (APIENTRYP PFNGLCOVERAGEMODULATIONNVPROC) (GLenum components); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glCoverageModulationTableNV (GLsizei n, const GLfloat *v); +GLAPI void APIENTRY glGetCoverageModulationTableNV (GLsizei bufsize, GLfloat *v); +GLAPI void APIENTRY glCoverageModulationNV (GLenum components); +#endif +#endif /* GL_NV_framebuffer_mixed_samples */ + +#ifndef GL_NV_framebuffer_multisample_coverage +#define GL_NV_framebuffer_multisample_coverage 1 +#define GL_RENDERBUFFER_COVERAGE_SAMPLES_NV 0x8CAB +#define GL_RENDERBUFFER_COLOR_SAMPLES_NV 0x8E10 +#define GL_MAX_MULTISAMPLE_COVERAGE_MODES_NV 0x8E11 +#define GL_MULTISAMPLE_COVERAGE_MODES_NV 0x8E12 +typedef void (APIENTRYP PFNGLRENDERBUFFERSTORAGEMULTISAMPLECOVERAGENVPROC) (GLenum target, GLsizei coverageSamples, GLsizei colorSamples, GLenum internalformat, GLsizei width, GLsizei height); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glRenderbufferStorageMultisampleCoverageNV (GLenum target, GLsizei coverageSamples, GLsizei colorSamples, GLenum internalformat, GLsizei width, GLsizei height); +#endif +#endif /* GL_NV_framebuffer_multisample_coverage */ + +#ifndef GL_NV_geometry_shader_passthrough +#define GL_NV_geometry_shader_passthrough 1 +#endif /* GL_NV_geometry_shader_passthrough */ + +#ifndef GL_NV_gpu_shader5 +#define GL_NV_gpu_shader5 1 +typedef int64_t GLint64EXT; +#define GL_INT64_NV 0x140E +#define GL_UNSIGNED_INT64_NV 0x140F +#define GL_INT8_NV 0x8FE0 +#define GL_INT8_VEC2_NV 0x8FE1 +#define GL_INT8_VEC3_NV 0x8FE2 +#define GL_INT8_VEC4_NV 0x8FE3 +#define GL_INT16_NV 0x8FE4 +#define GL_INT16_VEC2_NV 0x8FE5 +#define GL_INT16_VEC3_NV 0x8FE6 +#define GL_INT16_VEC4_NV 0x8FE7 +#define GL_INT64_VEC2_NV 0x8FE9 +#define GL_INT64_VEC3_NV 0x8FEA +#define GL_INT64_VEC4_NV 0x8FEB +#define GL_UNSIGNED_INT8_NV 0x8FEC +#define GL_UNSIGNED_INT8_VEC2_NV 0x8FED +#define GL_UNSIGNED_INT8_VEC3_NV 0x8FEE +#define GL_UNSIGNED_INT8_VEC4_NV 0x8FEF +#define GL_UNSIGNED_INT16_NV 0x8FF0 +#define GL_UNSIGNED_INT16_VEC2_NV 0x8FF1 +#define GL_UNSIGNED_INT16_VEC3_NV 0x8FF2 +#define GL_UNSIGNED_INT16_VEC4_NV 0x8FF3 +#define GL_UNSIGNED_INT64_VEC2_NV 0x8FF5 +#define GL_UNSIGNED_INT64_VEC3_NV 0x8FF6 +#define GL_UNSIGNED_INT64_VEC4_NV 0x8FF7 +#define GL_FLOAT16_NV 0x8FF8 +#define GL_FLOAT16_VEC2_NV 0x8FF9 +#define GL_FLOAT16_VEC3_NV 0x8FFA +#define GL_FLOAT16_VEC4_NV 0x8FFB +typedef void (APIENTRYP PFNGLUNIFORM1I64NVPROC) (GLint location, GLint64EXT x); +typedef void (APIENTRYP PFNGLUNIFORM2I64NVPROC) (GLint location, GLint64EXT x, GLint64EXT y); +typedef void (APIENTRYP PFNGLUNIFORM3I64NVPROC) (GLint location, GLint64EXT x, GLint64EXT y, GLint64EXT z); +typedef void (APIENTRYP PFNGLUNIFORM4I64NVPROC) (GLint location, GLint64EXT x, GLint64EXT y, GLint64EXT z, GLint64EXT w); +typedef void (APIENTRYP PFNGLUNIFORM1I64VNVPROC) (GLint location, GLsizei count, const GLint64EXT *value); +typedef void (APIENTRYP PFNGLUNIFORM2I64VNVPROC) (GLint location, GLsizei count, const GLint64EXT *value); +typedef void (APIENTRYP PFNGLUNIFORM3I64VNVPROC) (GLint location, GLsizei count, const GLint64EXT *value); +typedef void (APIENTRYP PFNGLUNIFORM4I64VNVPROC) (GLint location, GLsizei count, const GLint64EXT *value); +typedef void (APIENTRYP PFNGLUNIFORM1UI64NVPROC) (GLint location, GLuint64EXT x); +typedef void (APIENTRYP PFNGLUNIFORM2UI64NVPROC) (GLint location, GLuint64EXT x, GLuint64EXT y); +typedef void (APIENTRYP PFNGLUNIFORM3UI64NVPROC) (GLint location, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z); +typedef void (APIENTRYP PFNGLUNIFORM4UI64NVPROC) (GLint location, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z, GLuint64EXT w); +typedef void (APIENTRYP PFNGLUNIFORM1UI64VNVPROC) (GLint location, GLsizei count, const GLuint64EXT *value); +typedef void (APIENTRYP PFNGLUNIFORM2UI64VNVPROC) (GLint location, GLsizei count, const GLuint64EXT *value); +typedef void (APIENTRYP PFNGLUNIFORM3UI64VNVPROC) (GLint location, GLsizei count, const GLuint64EXT *value); +typedef void (APIENTRYP PFNGLUNIFORM4UI64VNVPROC) (GLint location, GLsizei count, const GLuint64EXT *value); +typedef void (APIENTRYP PFNGLGETUNIFORMI64VNVPROC) (GLuint program, GLint location, GLint64EXT *params); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1I64NVPROC) (GLuint program, GLint location, GLint64EXT x); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2I64NVPROC) (GLuint program, GLint location, GLint64EXT x, GLint64EXT y); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3I64NVPROC) (GLuint program, GLint location, GLint64EXT x, GLint64EXT y, GLint64EXT z); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4I64NVPROC) (GLuint program, GLint location, GLint64EXT x, GLint64EXT y, GLint64EXT z, GLint64EXT w); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1I64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLint64EXT *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2I64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLint64EXT *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3I64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLint64EXT *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4I64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLint64EXT *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1UI64NVPROC) (GLuint program, GLint location, GLuint64EXT x); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2UI64NVPROC) (GLuint program, GLint location, GLuint64EXT x, GLuint64EXT y); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3UI64NVPROC) (GLuint program, GLint location, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4UI64NVPROC) (GLuint program, GLint location, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z, GLuint64EXT w); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1UI64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLuint64EXT *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2UI64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLuint64EXT *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3UI64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLuint64EXT *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4UI64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLuint64EXT *value); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glUniform1i64NV (GLint location, GLint64EXT x); +GLAPI void APIENTRY glUniform2i64NV (GLint location, GLint64EXT x, GLint64EXT y); +GLAPI void APIENTRY glUniform3i64NV (GLint location, GLint64EXT x, GLint64EXT y, GLint64EXT z); +GLAPI void APIENTRY glUniform4i64NV (GLint location, GLint64EXT x, GLint64EXT y, GLint64EXT z, GLint64EXT w); +GLAPI void APIENTRY glUniform1i64vNV (GLint location, GLsizei count, const GLint64EXT *value); +GLAPI void APIENTRY glUniform2i64vNV (GLint location, GLsizei count, const GLint64EXT *value); +GLAPI void APIENTRY glUniform3i64vNV (GLint location, GLsizei count, const GLint64EXT *value); +GLAPI void APIENTRY glUniform4i64vNV (GLint location, GLsizei count, const GLint64EXT *value); +GLAPI void APIENTRY glUniform1ui64NV (GLint location, GLuint64EXT x); +GLAPI void APIENTRY glUniform2ui64NV (GLint location, GLuint64EXT x, GLuint64EXT y); +GLAPI void APIENTRY glUniform3ui64NV (GLint location, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z); +GLAPI void APIENTRY glUniform4ui64NV (GLint location, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z, GLuint64EXT w); +GLAPI void APIENTRY glUniform1ui64vNV (GLint location, GLsizei count, const GLuint64EXT *value); +GLAPI void APIENTRY glUniform2ui64vNV (GLint location, GLsizei count, const GLuint64EXT *value); +GLAPI void APIENTRY glUniform3ui64vNV (GLint location, GLsizei count, const GLuint64EXT *value); +GLAPI void APIENTRY glUniform4ui64vNV (GLint location, GLsizei count, const GLuint64EXT *value); +GLAPI void APIENTRY glGetUniformi64vNV (GLuint program, GLint location, GLint64EXT *params); +GLAPI void APIENTRY glProgramUniform1i64NV (GLuint program, GLint location, GLint64EXT x); +GLAPI void APIENTRY glProgramUniform2i64NV (GLuint program, GLint location, GLint64EXT x, GLint64EXT y); +GLAPI void APIENTRY glProgramUniform3i64NV (GLuint program, GLint location, GLint64EXT x, GLint64EXT y, GLint64EXT z); +GLAPI void APIENTRY glProgramUniform4i64NV (GLuint program, GLint location, GLint64EXT x, GLint64EXT y, GLint64EXT z, GLint64EXT w); +GLAPI void APIENTRY glProgramUniform1i64vNV (GLuint program, GLint location, GLsizei count, const GLint64EXT *value); +GLAPI void APIENTRY glProgramUniform2i64vNV (GLuint program, GLint location, GLsizei count, const GLint64EXT *value); +GLAPI void APIENTRY glProgramUniform3i64vNV (GLuint program, GLint location, GLsizei count, const GLint64EXT *value); +GLAPI void APIENTRY glProgramUniform4i64vNV (GLuint program, GLint location, GLsizei count, const GLint64EXT *value); +GLAPI void APIENTRY glProgramUniform1ui64NV (GLuint program, GLint location, GLuint64EXT x); +GLAPI void APIENTRY glProgramUniform2ui64NV (GLuint program, GLint location, GLuint64EXT x, GLuint64EXT y); +GLAPI void APIENTRY glProgramUniform3ui64NV (GLuint program, GLint location, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z); +GLAPI void APIENTRY glProgramUniform4ui64NV (GLuint program, GLint location, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z, GLuint64EXT w); +GLAPI void APIENTRY glProgramUniform1ui64vNV (GLuint program, GLint location, GLsizei count, const GLuint64EXT *value); +GLAPI void APIENTRY glProgramUniform2ui64vNV (GLuint program, GLint location, GLsizei count, const GLuint64EXT *value); +GLAPI void APIENTRY glProgramUniform3ui64vNV (GLuint program, GLint location, GLsizei count, const GLuint64EXT *value); +GLAPI void APIENTRY glProgramUniform4ui64vNV (GLuint program, GLint location, GLsizei count, const GLuint64EXT *value); +#endif +#endif /* GL_NV_gpu_shader5 */ + +#ifndef GL_NV_internalformat_sample_query +#define GL_NV_internalformat_sample_query 1 +#define GL_MULTISAMPLES_NV 0x9371 +#define GL_SUPERSAMPLE_SCALE_X_NV 0x9372 +#define GL_SUPERSAMPLE_SCALE_Y_NV 0x9373 +#define GL_CONFORMANT_NV 0x9374 +typedef void (APIENTRYP PFNGLGETINTERNALFORMATSAMPLEIVNVPROC) (GLenum target, GLenum internalformat, GLsizei samples, GLenum pname, GLsizei bufSize, GLint *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glGetInternalformatSampleivNV (GLenum target, GLenum internalformat, GLsizei samples, GLenum pname, GLsizei bufSize, GLint *params); +#endif +#endif /* GL_NV_internalformat_sample_query */ + +#ifndef GL_NV_path_rendering +#define GL_NV_path_rendering 1 +#define GL_PATH_FORMAT_SVG_NV 0x9070 +#define GL_PATH_FORMAT_PS_NV 0x9071 +#define GL_STANDARD_FONT_NAME_NV 0x9072 +#define GL_SYSTEM_FONT_NAME_NV 0x9073 +#define GL_FILE_NAME_NV 0x9074 +#define GL_PATH_STROKE_WIDTH_NV 0x9075 +#define GL_PATH_END_CAPS_NV 0x9076 +#define GL_PATH_INITIAL_END_CAP_NV 0x9077 +#define GL_PATH_TERMINAL_END_CAP_NV 0x9078 +#define GL_PATH_JOIN_STYLE_NV 0x9079 +#define GL_PATH_MITER_LIMIT_NV 0x907A +#define GL_PATH_DASH_CAPS_NV 0x907B +#define GL_PATH_INITIAL_DASH_CAP_NV 0x907C +#define GL_PATH_TERMINAL_DASH_CAP_NV 0x907D +#define GL_PATH_DASH_OFFSET_NV 0x907E +#define GL_PATH_CLIENT_LENGTH_NV 0x907F +#define GL_PATH_FILL_MODE_NV 0x9080 +#define GL_PATH_FILL_MASK_NV 0x9081 +#define GL_PATH_FILL_COVER_MODE_NV 0x9082 +#define GL_PATH_STROKE_COVER_MODE_NV 0x9083 +#define GL_PATH_STROKE_MASK_NV 0x9084 +#define GL_COUNT_UP_NV 0x9088 +#define GL_COUNT_DOWN_NV 0x9089 +#define GL_PATH_OBJECT_BOUNDING_BOX_NV 0x908A +#define GL_CONVEX_HULL_NV 0x908B +#define GL_BOUNDING_BOX_NV 0x908D +#define GL_TRANSLATE_X_NV 0x908E +#define GL_TRANSLATE_Y_NV 0x908F +#define GL_TRANSLATE_2D_NV 0x9090 +#define GL_TRANSLATE_3D_NV 0x9091 +#define GL_AFFINE_2D_NV 0x9092 +#define GL_AFFINE_3D_NV 0x9094 +#define GL_TRANSPOSE_AFFINE_2D_NV 0x9096 +#define GL_TRANSPOSE_AFFINE_3D_NV 0x9098 +#define GL_UTF8_NV 0x909A +#define GL_UTF16_NV 0x909B +#define GL_BOUNDING_BOX_OF_BOUNDING_BOXES_NV 0x909C +#define GL_PATH_COMMAND_COUNT_NV 0x909D +#define GL_PATH_COORD_COUNT_NV 0x909E +#define GL_PATH_DASH_ARRAY_COUNT_NV 0x909F +#define GL_PATH_COMPUTED_LENGTH_NV 0x90A0 +#define GL_PATH_FILL_BOUNDING_BOX_NV 0x90A1 +#define GL_PATH_STROKE_BOUNDING_BOX_NV 0x90A2 +#define GL_SQUARE_NV 0x90A3 +#define GL_ROUND_NV 0x90A4 +#define GL_TRIANGULAR_NV 0x90A5 +#define GL_BEVEL_NV 0x90A6 +#define GL_MITER_REVERT_NV 0x90A7 +#define GL_MITER_TRUNCATE_NV 0x90A8 +#define GL_SKIP_MISSING_GLYPH_NV 0x90A9 +#define GL_USE_MISSING_GLYPH_NV 0x90AA +#define GL_PATH_ERROR_POSITION_NV 0x90AB +#define GL_ACCUM_ADJACENT_PAIRS_NV 0x90AD +#define GL_ADJACENT_PAIRS_NV 0x90AE +#define GL_FIRST_TO_REST_NV 0x90AF +#define GL_PATH_GEN_MODE_NV 0x90B0 +#define GL_PATH_GEN_COEFF_NV 0x90B1 +#define GL_PATH_GEN_COMPONENTS_NV 0x90B3 +#define GL_PATH_STENCIL_FUNC_NV 0x90B7 +#define GL_PATH_STENCIL_REF_NV 0x90B8 +#define GL_PATH_STENCIL_VALUE_MASK_NV 0x90B9 +#define GL_PATH_STENCIL_DEPTH_OFFSET_FACTOR_NV 0x90BD +#define GL_PATH_STENCIL_DEPTH_OFFSET_UNITS_NV 0x90BE +#define GL_PATH_COVER_DEPTH_FUNC_NV 0x90BF +#define GL_PATH_DASH_OFFSET_RESET_NV 0x90B4 +#define GL_MOVE_TO_RESETS_NV 0x90B5 +#define GL_MOVE_TO_CONTINUES_NV 0x90B6 +#define GL_CLOSE_PATH_NV 0x00 +#define GL_MOVE_TO_NV 0x02 +#define GL_RELATIVE_MOVE_TO_NV 0x03 +#define GL_LINE_TO_NV 0x04 +#define GL_RELATIVE_LINE_TO_NV 0x05 +#define GL_HORIZONTAL_LINE_TO_NV 0x06 +#define GL_RELATIVE_HORIZONTAL_LINE_TO_NV 0x07 +#define GL_VERTICAL_LINE_TO_NV 0x08 +#define GL_RELATIVE_VERTICAL_LINE_TO_NV 0x09 +#define GL_QUADRATIC_CURVE_TO_NV 0x0A +#define GL_RELATIVE_QUADRATIC_CURVE_TO_NV 0x0B +#define GL_CUBIC_CURVE_TO_NV 0x0C +#define GL_RELATIVE_CUBIC_CURVE_TO_NV 0x0D +#define GL_SMOOTH_QUADRATIC_CURVE_TO_NV 0x0E +#define GL_RELATIVE_SMOOTH_QUADRATIC_CURVE_TO_NV 0x0F +#define GL_SMOOTH_CUBIC_CURVE_TO_NV 0x10 +#define GL_RELATIVE_SMOOTH_CUBIC_CURVE_TO_NV 0x11 +#define GL_SMALL_CCW_ARC_TO_NV 0x12 +#define GL_RELATIVE_SMALL_CCW_ARC_TO_NV 0x13 +#define GL_SMALL_CW_ARC_TO_NV 0x14 +#define GL_RELATIVE_SMALL_CW_ARC_TO_NV 0x15 +#define GL_LARGE_CCW_ARC_TO_NV 0x16 +#define GL_RELATIVE_LARGE_CCW_ARC_TO_NV 0x17 +#define GL_LARGE_CW_ARC_TO_NV 0x18 +#define GL_RELATIVE_LARGE_CW_ARC_TO_NV 0x19 +#define GL_RESTART_PATH_NV 0xF0 +#define GL_DUP_FIRST_CUBIC_CURVE_TO_NV 0xF2 +#define GL_DUP_LAST_CUBIC_CURVE_TO_NV 0xF4 +#define GL_RECT_NV 0xF6 +#define GL_CIRCULAR_CCW_ARC_TO_NV 0xF8 +#define GL_CIRCULAR_CW_ARC_TO_NV 0xFA +#define GL_CIRCULAR_TANGENT_ARC_TO_NV 0xFC +#define GL_ARC_TO_NV 0xFE +#define GL_RELATIVE_ARC_TO_NV 0xFF +#define GL_BOLD_BIT_NV 0x01 +#define GL_ITALIC_BIT_NV 0x02 +#define GL_GLYPH_WIDTH_BIT_NV 0x01 +#define GL_GLYPH_HEIGHT_BIT_NV 0x02 +#define GL_GLYPH_HORIZONTAL_BEARING_X_BIT_NV 0x04 +#define GL_GLYPH_HORIZONTAL_BEARING_Y_BIT_NV 0x08 +#define GL_GLYPH_HORIZONTAL_BEARING_ADVANCE_BIT_NV 0x10 +#define GL_GLYPH_VERTICAL_BEARING_X_BIT_NV 0x20 +#define GL_GLYPH_VERTICAL_BEARING_Y_BIT_NV 0x40 +#define GL_GLYPH_VERTICAL_BEARING_ADVANCE_BIT_NV 0x80 +#define GL_GLYPH_HAS_KERNING_BIT_NV 0x100 +#define GL_FONT_X_MIN_BOUNDS_BIT_NV 0x00010000 +#define GL_FONT_Y_MIN_BOUNDS_BIT_NV 0x00020000 +#define GL_FONT_X_MAX_BOUNDS_BIT_NV 0x00040000 +#define GL_FONT_Y_MAX_BOUNDS_BIT_NV 0x00080000 +#define GL_FONT_UNITS_PER_EM_BIT_NV 0x00100000 +#define GL_FONT_ASCENDER_BIT_NV 0x00200000 +#define GL_FONT_DESCENDER_BIT_NV 0x00400000 +#define GL_FONT_HEIGHT_BIT_NV 0x00800000 +#define GL_FONT_MAX_ADVANCE_WIDTH_BIT_NV 0x01000000 +#define GL_FONT_MAX_ADVANCE_HEIGHT_BIT_NV 0x02000000 +#define GL_FONT_UNDERLINE_POSITION_BIT_NV 0x04000000 +#define GL_FONT_UNDERLINE_THICKNESS_BIT_NV 0x08000000 +#define GL_FONT_HAS_KERNING_BIT_NV 0x10000000 +#define GL_ROUNDED_RECT_NV 0xE8 +#define GL_RELATIVE_ROUNDED_RECT_NV 0xE9 +#define GL_ROUNDED_RECT2_NV 0xEA +#define GL_RELATIVE_ROUNDED_RECT2_NV 0xEB +#define GL_ROUNDED_RECT4_NV 0xEC +#define GL_RELATIVE_ROUNDED_RECT4_NV 0xED +#define GL_ROUNDED_RECT8_NV 0xEE +#define GL_RELATIVE_ROUNDED_RECT8_NV 0xEF +#define GL_RELATIVE_RECT_NV 0xF7 +#define GL_FONT_GLYPHS_AVAILABLE_NV 0x9368 +#define GL_FONT_TARGET_UNAVAILABLE_NV 0x9369 +#define GL_FONT_UNAVAILABLE_NV 0x936A +#define GL_FONT_UNINTELLIGIBLE_NV 0x936B +#define GL_CONIC_CURVE_TO_NV 0x1A +#define GL_RELATIVE_CONIC_CURVE_TO_NV 0x1B +#define GL_FONT_NUM_GLYPH_INDICES_BIT_NV 0x20000000 +#define GL_STANDARD_FONT_FORMAT_NV 0x936C +#define GL_PATH_PROJECTION_NV 0x1701 +#define GL_PATH_MODELVIEW_NV 0x1700 +#define GL_PATH_MODELVIEW_STACK_DEPTH_NV 0x0BA3 +#define GL_PATH_MODELVIEW_MATRIX_NV 0x0BA6 +#define GL_PATH_MAX_MODELVIEW_STACK_DEPTH_NV 0x0D36 +#define GL_PATH_TRANSPOSE_MODELVIEW_MATRIX_NV 0x84E3 +#define GL_PATH_PROJECTION_STACK_DEPTH_NV 0x0BA4 +#define GL_PATH_PROJECTION_MATRIX_NV 0x0BA7 +#define GL_PATH_MAX_PROJECTION_STACK_DEPTH_NV 0x0D38 +#define GL_PATH_TRANSPOSE_PROJECTION_MATRIX_NV 0x84E4 +#define GL_FRAGMENT_INPUT_NV 0x936D +typedef GLuint (APIENTRYP PFNGLGENPATHSNVPROC) (GLsizei range); +typedef void (APIENTRYP PFNGLDELETEPATHSNVPROC) (GLuint path, GLsizei range); +typedef GLboolean (APIENTRYP PFNGLISPATHNVPROC) (GLuint path); +typedef void (APIENTRYP PFNGLPATHCOMMANDSNVPROC) (GLuint path, GLsizei numCommands, const GLubyte *commands, GLsizei numCoords, GLenum coordType, const void *coords); +typedef void (APIENTRYP PFNGLPATHCOORDSNVPROC) (GLuint path, GLsizei numCoords, GLenum coordType, const void *coords); +typedef void (APIENTRYP PFNGLPATHSUBCOMMANDSNVPROC) (GLuint path, GLsizei commandStart, GLsizei commandsToDelete, GLsizei numCommands, const GLubyte *commands, GLsizei numCoords, GLenum coordType, const void *coords); +typedef void (APIENTRYP PFNGLPATHSUBCOORDSNVPROC) (GLuint path, GLsizei coordStart, GLsizei numCoords, GLenum coordType, const void *coords); +typedef void (APIENTRYP PFNGLPATHSTRINGNVPROC) (GLuint path, GLenum format, GLsizei length, const void *pathString); +typedef void (APIENTRYP PFNGLPATHGLYPHSNVPROC) (GLuint firstPathName, GLenum fontTarget, const void *fontName, GLbitfield fontStyle, GLsizei numGlyphs, GLenum type, const void *charcodes, GLenum handleMissingGlyphs, GLuint pathParameterTemplate, GLfloat emScale); +typedef void (APIENTRYP PFNGLPATHGLYPHRANGENVPROC) (GLuint firstPathName, GLenum fontTarget, const void *fontName, GLbitfield fontStyle, GLuint firstGlyph, GLsizei numGlyphs, GLenum handleMissingGlyphs, GLuint pathParameterTemplate, GLfloat emScale); +typedef void (APIENTRYP PFNGLWEIGHTPATHSNVPROC) (GLuint resultPath, GLsizei numPaths, const GLuint *paths, const GLfloat *weights); +typedef void (APIENTRYP PFNGLCOPYPATHNVPROC) (GLuint resultPath, GLuint srcPath); +typedef void (APIENTRYP PFNGLINTERPOLATEPATHSNVPROC) (GLuint resultPath, GLuint pathA, GLuint pathB, GLfloat weight); +typedef void (APIENTRYP PFNGLTRANSFORMPATHNVPROC) (GLuint resultPath, GLuint srcPath, GLenum transformType, const GLfloat *transformValues); +typedef void (APIENTRYP PFNGLPATHPARAMETERIVNVPROC) (GLuint path, GLenum pname, const GLint *value); +typedef void (APIENTRYP PFNGLPATHPARAMETERINVPROC) (GLuint path, GLenum pname, GLint value); +typedef void (APIENTRYP PFNGLPATHPARAMETERFVNVPROC) (GLuint path, GLenum pname, const GLfloat *value); +typedef void (APIENTRYP PFNGLPATHPARAMETERFNVPROC) (GLuint path, GLenum pname, GLfloat value); +typedef void (APIENTRYP PFNGLPATHDASHARRAYNVPROC) (GLuint path, GLsizei dashCount, const GLfloat *dashArray); +typedef void (APIENTRYP PFNGLPATHSTENCILFUNCNVPROC) (GLenum func, GLint ref, GLuint mask); +typedef void (APIENTRYP PFNGLPATHSTENCILDEPTHOFFSETNVPROC) (GLfloat factor, GLfloat units); +typedef void (APIENTRYP PFNGLSTENCILFILLPATHNVPROC) (GLuint path, GLenum fillMode, GLuint mask); +typedef void (APIENTRYP PFNGLSTENCILSTROKEPATHNVPROC) (GLuint path, GLint reference, GLuint mask); +typedef void (APIENTRYP PFNGLSTENCILFILLPATHINSTANCEDNVPROC) (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLenum fillMode, GLuint mask, GLenum transformType, const GLfloat *transformValues); +typedef void (APIENTRYP PFNGLSTENCILSTROKEPATHINSTANCEDNVPROC) (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLint reference, GLuint mask, GLenum transformType, const GLfloat *transformValues); +typedef void (APIENTRYP PFNGLPATHCOVERDEPTHFUNCNVPROC) (GLenum func); +typedef void (APIENTRYP PFNGLCOVERFILLPATHNVPROC) (GLuint path, GLenum coverMode); +typedef void (APIENTRYP PFNGLCOVERSTROKEPATHNVPROC) (GLuint path, GLenum coverMode); +typedef void (APIENTRYP PFNGLCOVERFILLPATHINSTANCEDNVPROC) (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLenum coverMode, GLenum transformType, const GLfloat *transformValues); +typedef void (APIENTRYP PFNGLCOVERSTROKEPATHINSTANCEDNVPROC) (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLenum coverMode, GLenum transformType, const GLfloat *transformValues); +typedef void (APIENTRYP PFNGLGETPATHPARAMETERIVNVPROC) (GLuint path, GLenum pname, GLint *value); +typedef void (APIENTRYP PFNGLGETPATHPARAMETERFVNVPROC) (GLuint path, GLenum pname, GLfloat *value); +typedef void (APIENTRYP PFNGLGETPATHCOMMANDSNVPROC) (GLuint path, GLubyte *commands); +typedef void (APIENTRYP PFNGLGETPATHCOORDSNVPROC) (GLuint path, GLfloat *coords); +typedef void (APIENTRYP PFNGLGETPATHDASHARRAYNVPROC) (GLuint path, GLfloat *dashArray); +typedef void (APIENTRYP PFNGLGETPATHMETRICSNVPROC) (GLbitfield metricQueryMask, GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLsizei stride, GLfloat *metrics); +typedef void (APIENTRYP PFNGLGETPATHMETRICRANGENVPROC) (GLbitfield metricQueryMask, GLuint firstPathName, GLsizei numPaths, GLsizei stride, GLfloat *metrics); +typedef void (APIENTRYP PFNGLGETPATHSPACINGNVPROC) (GLenum pathListMode, GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLfloat advanceScale, GLfloat kerningScale, GLenum transformType, GLfloat *returnedSpacing); +typedef GLboolean (APIENTRYP PFNGLISPOINTINFILLPATHNVPROC) (GLuint path, GLuint mask, GLfloat x, GLfloat y); +typedef GLboolean (APIENTRYP PFNGLISPOINTINSTROKEPATHNVPROC) (GLuint path, GLfloat x, GLfloat y); +typedef GLfloat (APIENTRYP PFNGLGETPATHLENGTHNVPROC) (GLuint path, GLsizei startSegment, GLsizei numSegments); +typedef GLboolean (APIENTRYP PFNGLPOINTALONGPATHNVPROC) (GLuint path, GLsizei startSegment, GLsizei numSegments, GLfloat distance, GLfloat *x, GLfloat *y, GLfloat *tangentX, GLfloat *tangentY); +typedef void (APIENTRYP PFNGLMATRIXLOAD3X2FNVPROC) (GLenum matrixMode, const GLfloat *m); +typedef void (APIENTRYP PFNGLMATRIXLOAD3X3FNVPROC) (GLenum matrixMode, const GLfloat *m); +typedef void (APIENTRYP PFNGLMATRIXLOADTRANSPOSE3X3FNVPROC) (GLenum matrixMode, const GLfloat *m); +typedef void (APIENTRYP PFNGLMATRIXMULT3X2FNVPROC) (GLenum matrixMode, const GLfloat *m); +typedef void (APIENTRYP PFNGLMATRIXMULT3X3FNVPROC) (GLenum matrixMode, const GLfloat *m); +typedef void (APIENTRYP PFNGLMATRIXMULTTRANSPOSE3X3FNVPROC) (GLenum matrixMode, const GLfloat *m); +typedef void (APIENTRYP PFNGLSTENCILTHENCOVERFILLPATHNVPROC) (GLuint path, GLenum fillMode, GLuint mask, GLenum coverMode); +typedef void (APIENTRYP PFNGLSTENCILTHENCOVERSTROKEPATHNVPROC) (GLuint path, GLint reference, GLuint mask, GLenum coverMode); +typedef void (APIENTRYP PFNGLSTENCILTHENCOVERFILLPATHINSTANCEDNVPROC) (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLenum fillMode, GLuint mask, GLenum coverMode, GLenum transformType, const GLfloat *transformValues); +typedef void (APIENTRYP PFNGLSTENCILTHENCOVERSTROKEPATHINSTANCEDNVPROC) (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLint reference, GLuint mask, GLenum coverMode, GLenum transformType, const GLfloat *transformValues); +typedef GLenum (APIENTRYP PFNGLPATHGLYPHINDEXRANGENVPROC) (GLenum fontTarget, const void *fontName, GLbitfield fontStyle, GLuint pathParameterTemplate, GLfloat emScale, GLuint baseAndCount[2]); +typedef GLenum (APIENTRYP PFNGLPATHGLYPHINDEXARRAYNVPROC) (GLuint firstPathName, GLenum fontTarget, const void *fontName, GLbitfield fontStyle, GLuint firstGlyphIndex, GLsizei numGlyphs, GLuint pathParameterTemplate, GLfloat emScale); +typedef GLenum (APIENTRYP PFNGLPATHMEMORYGLYPHINDEXARRAYNVPROC) (GLuint firstPathName, GLenum fontTarget, GLsizeiptr fontSize, const void *fontData, GLsizei faceIndex, GLuint firstGlyphIndex, GLsizei numGlyphs, GLuint pathParameterTemplate, GLfloat emScale); +typedef void (APIENTRYP PFNGLPROGRAMPATHFRAGMENTINPUTGENNVPROC) (GLuint program, GLint location, GLenum genMode, GLint components, const GLfloat *coeffs); +typedef void (APIENTRYP PFNGLGETPROGRAMRESOURCEFVNVPROC) (GLuint program, GLenum programInterface, GLuint index, GLsizei propCount, const GLenum *props, GLsizei bufSize, GLsizei *length, GLfloat *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI GLuint APIENTRY glGenPathsNV (GLsizei range); +GLAPI void APIENTRY glDeletePathsNV (GLuint path, GLsizei range); +GLAPI GLboolean APIENTRY glIsPathNV (GLuint path); +GLAPI void APIENTRY glPathCommandsNV (GLuint path, GLsizei numCommands, const GLubyte *commands, GLsizei numCoords, GLenum coordType, const void *coords); +GLAPI void APIENTRY glPathCoordsNV (GLuint path, GLsizei numCoords, GLenum coordType, const void *coords); +GLAPI void APIENTRY glPathSubCommandsNV (GLuint path, GLsizei commandStart, GLsizei commandsToDelete, GLsizei numCommands, const GLubyte *commands, GLsizei numCoords, GLenum coordType, const void *coords); +GLAPI void APIENTRY glPathSubCoordsNV (GLuint path, GLsizei coordStart, GLsizei numCoords, GLenum coordType, const void *coords); +GLAPI void APIENTRY glPathStringNV (GLuint path, GLenum format, GLsizei length, const void *pathString); +GLAPI void APIENTRY glPathGlyphsNV (GLuint firstPathName, GLenum fontTarget, const void *fontName, GLbitfield fontStyle, GLsizei numGlyphs, GLenum type, const void *charcodes, GLenum handleMissingGlyphs, GLuint pathParameterTemplate, GLfloat emScale); +GLAPI void APIENTRY glPathGlyphRangeNV (GLuint firstPathName, GLenum fontTarget, const void *fontName, GLbitfield fontStyle, GLuint firstGlyph, GLsizei numGlyphs, GLenum handleMissingGlyphs, GLuint pathParameterTemplate, GLfloat emScale); +GLAPI void APIENTRY glWeightPathsNV (GLuint resultPath, GLsizei numPaths, const GLuint *paths, const GLfloat *weights); +GLAPI void APIENTRY glCopyPathNV (GLuint resultPath, GLuint srcPath); +GLAPI void APIENTRY glInterpolatePathsNV (GLuint resultPath, GLuint pathA, GLuint pathB, GLfloat weight); +GLAPI void APIENTRY glTransformPathNV (GLuint resultPath, GLuint srcPath, GLenum transformType, const GLfloat *transformValues); +GLAPI void APIENTRY glPathParameterivNV (GLuint path, GLenum pname, const GLint *value); +GLAPI void APIENTRY glPathParameteriNV (GLuint path, GLenum pname, GLint value); +GLAPI void APIENTRY glPathParameterfvNV (GLuint path, GLenum pname, const GLfloat *value); +GLAPI void APIENTRY glPathParameterfNV (GLuint path, GLenum pname, GLfloat value); +GLAPI void APIENTRY glPathDashArrayNV (GLuint path, GLsizei dashCount, const GLfloat *dashArray); +GLAPI void APIENTRY glPathStencilFuncNV (GLenum func, GLint ref, GLuint mask); +GLAPI void APIENTRY glPathStencilDepthOffsetNV (GLfloat factor, GLfloat units); +GLAPI void APIENTRY glStencilFillPathNV (GLuint path, GLenum fillMode, GLuint mask); +GLAPI void APIENTRY glStencilStrokePathNV (GLuint path, GLint reference, GLuint mask); +GLAPI void APIENTRY glStencilFillPathInstancedNV (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLenum fillMode, GLuint mask, GLenum transformType, const GLfloat *transformValues); +GLAPI void APIENTRY glStencilStrokePathInstancedNV (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLint reference, GLuint mask, GLenum transformType, const GLfloat *transformValues); +GLAPI void APIENTRY glPathCoverDepthFuncNV (GLenum func); +GLAPI void APIENTRY glCoverFillPathNV (GLuint path, GLenum coverMode); +GLAPI void APIENTRY glCoverStrokePathNV (GLuint path, GLenum coverMode); +GLAPI void APIENTRY glCoverFillPathInstancedNV (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLenum coverMode, GLenum transformType, const GLfloat *transformValues); +GLAPI void APIENTRY glCoverStrokePathInstancedNV (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLenum coverMode, GLenum transformType, const GLfloat *transformValues); +GLAPI void APIENTRY glGetPathParameterivNV (GLuint path, GLenum pname, GLint *value); +GLAPI void APIENTRY glGetPathParameterfvNV (GLuint path, GLenum pname, GLfloat *value); +GLAPI void APIENTRY glGetPathCommandsNV (GLuint path, GLubyte *commands); +GLAPI void APIENTRY glGetPathCoordsNV (GLuint path, GLfloat *coords); +GLAPI void APIENTRY glGetPathDashArrayNV (GLuint path, GLfloat *dashArray); +GLAPI void APIENTRY glGetPathMetricsNV (GLbitfield metricQueryMask, GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLsizei stride, GLfloat *metrics); +GLAPI void APIENTRY glGetPathMetricRangeNV (GLbitfield metricQueryMask, GLuint firstPathName, GLsizei numPaths, GLsizei stride, GLfloat *metrics); +GLAPI void APIENTRY glGetPathSpacingNV (GLenum pathListMode, GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLfloat advanceScale, GLfloat kerningScale, GLenum transformType, GLfloat *returnedSpacing); +GLAPI GLboolean APIENTRY glIsPointInFillPathNV (GLuint path, GLuint mask, GLfloat x, GLfloat y); +GLAPI GLboolean APIENTRY glIsPointInStrokePathNV (GLuint path, GLfloat x, GLfloat y); +GLAPI GLfloat APIENTRY glGetPathLengthNV (GLuint path, GLsizei startSegment, GLsizei numSegments); +GLAPI GLboolean APIENTRY glPointAlongPathNV (GLuint path, GLsizei startSegment, GLsizei numSegments, GLfloat distance, GLfloat *x, GLfloat *y, GLfloat *tangentX, GLfloat *tangentY); +GLAPI void APIENTRY glMatrixLoad3x2fNV (GLenum matrixMode, const GLfloat *m); +GLAPI void APIENTRY glMatrixLoad3x3fNV (GLenum matrixMode, const GLfloat *m); +GLAPI void APIENTRY glMatrixLoadTranspose3x3fNV (GLenum matrixMode, const GLfloat *m); +GLAPI void APIENTRY glMatrixMult3x2fNV (GLenum matrixMode, const GLfloat *m); +GLAPI void APIENTRY glMatrixMult3x3fNV (GLenum matrixMode, const GLfloat *m); +GLAPI void APIENTRY glMatrixMultTranspose3x3fNV (GLenum matrixMode, const GLfloat *m); +GLAPI void APIENTRY glStencilThenCoverFillPathNV (GLuint path, GLenum fillMode, GLuint mask, GLenum coverMode); +GLAPI void APIENTRY glStencilThenCoverStrokePathNV (GLuint path, GLint reference, GLuint mask, GLenum coverMode); +GLAPI void APIENTRY glStencilThenCoverFillPathInstancedNV (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLenum fillMode, GLuint mask, GLenum coverMode, GLenum transformType, const GLfloat *transformValues); +GLAPI void APIENTRY glStencilThenCoverStrokePathInstancedNV (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLint reference, GLuint mask, GLenum coverMode, GLenum transformType, const GLfloat *transformValues); +GLAPI GLenum APIENTRY glPathGlyphIndexRangeNV (GLenum fontTarget, const void *fontName, GLbitfield fontStyle, GLuint pathParameterTemplate, GLfloat emScale, GLuint baseAndCount[2]); +GLAPI GLenum APIENTRY glPathGlyphIndexArrayNV (GLuint firstPathName, GLenum fontTarget, const void *fontName, GLbitfield fontStyle, GLuint firstGlyphIndex, GLsizei numGlyphs, GLuint pathParameterTemplate, GLfloat emScale); +GLAPI GLenum APIENTRY glPathMemoryGlyphIndexArrayNV (GLuint firstPathName, GLenum fontTarget, GLsizeiptr fontSize, const void *fontData, GLsizei faceIndex, GLuint firstGlyphIndex, GLsizei numGlyphs, GLuint pathParameterTemplate, GLfloat emScale); +GLAPI void APIENTRY glProgramPathFragmentInputGenNV (GLuint program, GLint location, GLenum genMode, GLint components, const GLfloat *coeffs); +GLAPI void APIENTRY glGetProgramResourcefvNV (GLuint program, GLenum programInterface, GLuint index, GLsizei propCount, const GLenum *props, GLsizei bufSize, GLsizei *length, GLfloat *params); +#endif +#endif /* GL_NV_path_rendering */ + +#ifndef GL_NV_path_rendering_shared_edge +#define GL_NV_path_rendering_shared_edge 1 +#define GL_SHARED_EDGE_NV 0xC0 +#endif /* GL_NV_path_rendering_shared_edge */ + +#ifndef GL_NV_sample_locations +#define GL_NV_sample_locations 1 +#define GL_SAMPLE_LOCATION_SUBPIXEL_BITS_NV 0x933D +#define GL_SAMPLE_LOCATION_PIXEL_GRID_WIDTH_NV 0x933E +#define GL_SAMPLE_LOCATION_PIXEL_GRID_HEIGHT_NV 0x933F +#define GL_PROGRAMMABLE_SAMPLE_LOCATION_TABLE_SIZE_NV 0x9340 +#define GL_SAMPLE_LOCATION_NV 0x8E50 +#define GL_PROGRAMMABLE_SAMPLE_LOCATION_NV 0x9341 +#define GL_FRAMEBUFFER_PROGRAMMABLE_SAMPLE_LOCATIONS_NV 0x9342 +#define GL_FRAMEBUFFER_SAMPLE_LOCATION_PIXEL_GRID_NV 0x9343 +typedef void (APIENTRYP PFNGLFRAMEBUFFERSAMPLELOCATIONSFVNVPROC) (GLenum target, GLuint start, GLsizei count, const GLfloat *v); +typedef void (APIENTRYP PFNGLNAMEDFRAMEBUFFERSAMPLELOCATIONSFVNVPROC) (GLuint framebuffer, GLuint start, GLsizei count, const GLfloat *v); +typedef void (APIENTRYP PFNGLRESOLVEDEPTHVALUESNVPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glFramebufferSampleLocationsfvNV (GLenum target, GLuint start, GLsizei count, const GLfloat *v); +GLAPI void APIENTRY glNamedFramebufferSampleLocationsfvNV (GLuint framebuffer, GLuint start, GLsizei count, const GLfloat *v); +GLAPI void APIENTRY glResolveDepthValuesNV (void); +#endif +#endif /* GL_NV_sample_locations */ + +#ifndef GL_NV_sample_mask_override_coverage +#define GL_NV_sample_mask_override_coverage 1 +#endif /* GL_NV_sample_mask_override_coverage */ + +#ifndef GL_NV_shader_atomic_counters +#define GL_NV_shader_atomic_counters 1 +#endif /* GL_NV_shader_atomic_counters */ + +#ifndef GL_NV_shader_atomic_float +#define GL_NV_shader_atomic_float 1 +#endif /* GL_NV_shader_atomic_float */ + +#ifndef GL_NV_shader_atomic_float64 +#define GL_NV_shader_atomic_float64 1 +#endif /* GL_NV_shader_atomic_float64 */ + +#ifndef GL_NV_shader_atomic_fp16_vector +#define GL_NV_shader_atomic_fp16_vector 1 +#endif /* GL_NV_shader_atomic_fp16_vector */ + +#ifndef GL_NV_shader_atomic_int64 +#define GL_NV_shader_atomic_int64 1 +#endif /* GL_NV_shader_atomic_int64 */ + +#ifndef GL_NV_shader_buffer_load +#define GL_NV_shader_buffer_load 1 +#define GL_BUFFER_GPU_ADDRESS_NV 0x8F1D +#define GL_GPU_ADDRESS_NV 0x8F34 +#define GL_MAX_SHADER_BUFFER_ADDRESS_NV 0x8F35 +typedef void (APIENTRYP PFNGLMAKEBUFFERRESIDENTNVPROC) (GLenum target, GLenum access); +typedef void (APIENTRYP PFNGLMAKEBUFFERNONRESIDENTNVPROC) (GLenum target); +typedef GLboolean (APIENTRYP PFNGLISBUFFERRESIDENTNVPROC) (GLenum target); +typedef void (APIENTRYP PFNGLMAKENAMEDBUFFERRESIDENTNVPROC) (GLuint buffer, GLenum access); +typedef void (APIENTRYP PFNGLMAKENAMEDBUFFERNONRESIDENTNVPROC) (GLuint buffer); +typedef GLboolean (APIENTRYP PFNGLISNAMEDBUFFERRESIDENTNVPROC) (GLuint buffer); +typedef void (APIENTRYP PFNGLGETBUFFERPARAMETERUI64VNVPROC) (GLenum target, GLenum pname, GLuint64EXT *params); +typedef void (APIENTRYP PFNGLGETNAMEDBUFFERPARAMETERUI64VNVPROC) (GLuint buffer, GLenum pname, GLuint64EXT *params); +typedef void (APIENTRYP PFNGLGETINTEGERUI64VNVPROC) (GLenum value, GLuint64EXT *result); +typedef void (APIENTRYP PFNGLUNIFORMUI64NVPROC) (GLint location, GLuint64EXT value); +typedef void (APIENTRYP PFNGLUNIFORMUI64VNVPROC) (GLint location, GLsizei count, const GLuint64EXT *value); +typedef void (APIENTRYP PFNGLGETUNIFORMUI64VNVPROC) (GLuint program, GLint location, GLuint64EXT *params); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMUI64NVPROC) (GLuint program, GLint location, GLuint64EXT value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMUI64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLuint64EXT *value); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glMakeBufferResidentNV (GLenum target, GLenum access); +GLAPI void APIENTRY glMakeBufferNonResidentNV (GLenum target); +GLAPI GLboolean APIENTRY glIsBufferResidentNV (GLenum target); +GLAPI void APIENTRY glMakeNamedBufferResidentNV (GLuint buffer, GLenum access); +GLAPI void APIENTRY glMakeNamedBufferNonResidentNV (GLuint buffer); +GLAPI GLboolean APIENTRY glIsNamedBufferResidentNV (GLuint buffer); +GLAPI void APIENTRY glGetBufferParameterui64vNV (GLenum target, GLenum pname, GLuint64EXT *params); +GLAPI void APIENTRY glGetNamedBufferParameterui64vNV (GLuint buffer, GLenum pname, GLuint64EXT *params); +GLAPI void APIENTRY glGetIntegerui64vNV (GLenum value, GLuint64EXT *result); +GLAPI void APIENTRY glUniformui64NV (GLint location, GLuint64EXT value); +GLAPI void APIENTRY glUniformui64vNV (GLint location, GLsizei count, const GLuint64EXT *value); +GLAPI void APIENTRY glGetUniformui64vNV (GLuint program, GLint location, GLuint64EXT *params); +GLAPI void APIENTRY glProgramUniformui64NV (GLuint program, GLint location, GLuint64EXT value); +GLAPI void APIENTRY glProgramUniformui64vNV (GLuint program, GLint location, GLsizei count, const GLuint64EXT *value); +#endif +#endif /* GL_NV_shader_buffer_load */ + +#ifndef GL_NV_shader_buffer_store +#define GL_NV_shader_buffer_store 1 +#define GL_SHADER_GLOBAL_ACCESS_BARRIER_BIT_NV 0x00000010 +#endif /* GL_NV_shader_buffer_store */ + +#ifndef GL_NV_shader_thread_group +#define GL_NV_shader_thread_group 1 +#define GL_WARP_SIZE_NV 0x9339 +#define GL_WARPS_PER_SM_NV 0x933A +#define GL_SM_COUNT_NV 0x933B +#endif /* GL_NV_shader_thread_group */ + +#ifndef GL_NV_shader_thread_shuffle +#define GL_NV_shader_thread_shuffle 1 +#endif /* GL_NV_shader_thread_shuffle */ + +#ifndef GL_NV_stereo_view_rendering +#define GL_NV_stereo_view_rendering 1 +#endif /* GL_NV_stereo_view_rendering */ + +#ifndef GL_NV_texture_barrier +#define GL_NV_texture_barrier 1 +typedef void (APIENTRYP PFNGLTEXTUREBARRIERNVPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glTextureBarrierNV (void); +#endif +#endif /* GL_NV_texture_barrier */ + +#ifndef GL_NV_texture_rectangle_compressed +#define GL_NV_texture_rectangle_compressed 1 +#endif /* GL_NV_texture_rectangle_compressed */ + +#ifndef GL_NV_uniform_buffer_unified_memory +#define GL_NV_uniform_buffer_unified_memory 1 +#define GL_UNIFORM_BUFFER_UNIFIED_NV 0x936E +#define GL_UNIFORM_BUFFER_ADDRESS_NV 0x936F +#define GL_UNIFORM_BUFFER_LENGTH_NV 0x9370 +#endif /* GL_NV_uniform_buffer_unified_memory */ + +#ifndef GL_NV_vertex_attrib_integer_64bit +#define GL_NV_vertex_attrib_integer_64bit 1 +typedef void (APIENTRYP PFNGLVERTEXATTRIBL1I64NVPROC) (GLuint index, GLint64EXT x); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL2I64NVPROC) (GLuint index, GLint64EXT x, GLint64EXT y); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL3I64NVPROC) (GLuint index, GLint64EXT x, GLint64EXT y, GLint64EXT z); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL4I64NVPROC) (GLuint index, GLint64EXT x, GLint64EXT y, GLint64EXT z, GLint64EXT w); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL1I64VNVPROC) (GLuint index, const GLint64EXT *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL2I64VNVPROC) (GLuint index, const GLint64EXT *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL3I64VNVPROC) (GLuint index, const GLint64EXT *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL4I64VNVPROC) (GLuint index, const GLint64EXT *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL1UI64NVPROC) (GLuint index, GLuint64EXT x); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL2UI64NVPROC) (GLuint index, GLuint64EXT x, GLuint64EXT y); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL3UI64NVPROC) (GLuint index, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL4UI64NVPROC) (GLuint index, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z, GLuint64EXT w); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL1UI64VNVPROC) (GLuint index, const GLuint64EXT *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL2UI64VNVPROC) (GLuint index, const GLuint64EXT *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL3UI64VNVPROC) (GLuint index, const GLuint64EXT *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL4UI64VNVPROC) (GLuint index, const GLuint64EXT *v); +typedef void (APIENTRYP PFNGLGETVERTEXATTRIBLI64VNVPROC) (GLuint index, GLenum pname, GLint64EXT *params); +typedef void (APIENTRYP PFNGLGETVERTEXATTRIBLUI64VNVPROC) (GLuint index, GLenum pname, GLuint64EXT *params); +typedef void (APIENTRYP PFNGLVERTEXATTRIBLFORMATNVPROC) (GLuint index, GLint size, GLenum type, GLsizei stride); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glVertexAttribL1i64NV (GLuint index, GLint64EXT x); +GLAPI void APIENTRY glVertexAttribL2i64NV (GLuint index, GLint64EXT x, GLint64EXT y); +GLAPI void APIENTRY glVertexAttribL3i64NV (GLuint index, GLint64EXT x, GLint64EXT y, GLint64EXT z); +GLAPI void APIENTRY glVertexAttribL4i64NV (GLuint index, GLint64EXT x, GLint64EXT y, GLint64EXT z, GLint64EXT w); +GLAPI void APIENTRY glVertexAttribL1i64vNV (GLuint index, const GLint64EXT *v); +GLAPI void APIENTRY glVertexAttribL2i64vNV (GLuint index, const GLint64EXT *v); +GLAPI void APIENTRY glVertexAttribL3i64vNV (GLuint index, const GLint64EXT *v); +GLAPI void APIENTRY glVertexAttribL4i64vNV (GLuint index, const GLint64EXT *v); +GLAPI void APIENTRY glVertexAttribL1ui64NV (GLuint index, GLuint64EXT x); +GLAPI void APIENTRY glVertexAttribL2ui64NV (GLuint index, GLuint64EXT x, GLuint64EXT y); +GLAPI void APIENTRY glVertexAttribL3ui64NV (GLuint index, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z); +GLAPI void APIENTRY glVertexAttribL4ui64NV (GLuint index, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z, GLuint64EXT w); +GLAPI void APIENTRY glVertexAttribL1ui64vNV (GLuint index, const GLuint64EXT *v); +GLAPI void APIENTRY glVertexAttribL2ui64vNV (GLuint index, const GLuint64EXT *v); +GLAPI void APIENTRY glVertexAttribL3ui64vNV (GLuint index, const GLuint64EXT *v); +GLAPI void APIENTRY glVertexAttribL4ui64vNV (GLuint index, const GLuint64EXT *v); +GLAPI void APIENTRY glGetVertexAttribLi64vNV (GLuint index, GLenum pname, GLint64EXT *params); +GLAPI void APIENTRY glGetVertexAttribLui64vNV (GLuint index, GLenum pname, GLuint64EXT *params); +GLAPI void APIENTRY glVertexAttribLFormatNV (GLuint index, GLint size, GLenum type, GLsizei stride); +#endif +#endif /* GL_NV_vertex_attrib_integer_64bit */ + +#ifndef GL_NV_vertex_buffer_unified_memory +#define GL_NV_vertex_buffer_unified_memory 1 +#define GL_VERTEX_ATTRIB_ARRAY_UNIFIED_NV 0x8F1E +#define GL_ELEMENT_ARRAY_UNIFIED_NV 0x8F1F +#define GL_VERTEX_ATTRIB_ARRAY_ADDRESS_NV 0x8F20 +#define GL_VERTEX_ARRAY_ADDRESS_NV 0x8F21 +#define GL_NORMAL_ARRAY_ADDRESS_NV 0x8F22 +#define GL_COLOR_ARRAY_ADDRESS_NV 0x8F23 +#define GL_INDEX_ARRAY_ADDRESS_NV 0x8F24 +#define GL_TEXTURE_COORD_ARRAY_ADDRESS_NV 0x8F25 +#define GL_EDGE_FLAG_ARRAY_ADDRESS_NV 0x8F26 +#define GL_SECONDARY_COLOR_ARRAY_ADDRESS_NV 0x8F27 +#define GL_FOG_COORD_ARRAY_ADDRESS_NV 0x8F28 +#define GL_ELEMENT_ARRAY_ADDRESS_NV 0x8F29 +#define GL_VERTEX_ATTRIB_ARRAY_LENGTH_NV 0x8F2A +#define GL_VERTEX_ARRAY_LENGTH_NV 0x8F2B +#define GL_NORMAL_ARRAY_LENGTH_NV 0x8F2C +#define GL_COLOR_ARRAY_LENGTH_NV 0x8F2D +#define GL_INDEX_ARRAY_LENGTH_NV 0x8F2E +#define GL_TEXTURE_COORD_ARRAY_LENGTH_NV 0x8F2F +#define GL_EDGE_FLAG_ARRAY_LENGTH_NV 0x8F30 +#define GL_SECONDARY_COLOR_ARRAY_LENGTH_NV 0x8F31 +#define GL_FOG_COORD_ARRAY_LENGTH_NV 0x8F32 +#define GL_ELEMENT_ARRAY_LENGTH_NV 0x8F33 +#define GL_DRAW_INDIRECT_UNIFIED_NV 0x8F40 +#define GL_DRAW_INDIRECT_ADDRESS_NV 0x8F41 +#define GL_DRAW_INDIRECT_LENGTH_NV 0x8F42 +typedef void (APIENTRYP PFNGLBUFFERADDRESSRANGENVPROC) (GLenum pname, GLuint index, GLuint64EXT address, GLsizeiptr length); +typedef void (APIENTRYP PFNGLVERTEXFORMATNVPROC) (GLint size, GLenum type, GLsizei stride); +typedef void (APIENTRYP PFNGLNORMALFORMATNVPROC) (GLenum type, GLsizei stride); +typedef void (APIENTRYP PFNGLCOLORFORMATNVPROC) (GLint size, GLenum type, GLsizei stride); +typedef void (APIENTRYP PFNGLINDEXFORMATNVPROC) (GLenum type, GLsizei stride); +typedef void (APIENTRYP PFNGLTEXCOORDFORMATNVPROC) (GLint size, GLenum type, GLsizei stride); +typedef void (APIENTRYP PFNGLEDGEFLAGFORMATNVPROC) (GLsizei stride); +typedef void (APIENTRYP PFNGLSECONDARYCOLORFORMATNVPROC) (GLint size, GLenum type, GLsizei stride); +typedef void (APIENTRYP PFNGLFOGCOORDFORMATNVPROC) (GLenum type, GLsizei stride); +typedef void (APIENTRYP PFNGLVERTEXATTRIBFORMATNVPROC) (GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride); +typedef void (APIENTRYP PFNGLVERTEXATTRIBIFORMATNVPROC) (GLuint index, GLint size, GLenum type, GLsizei stride); +typedef void (APIENTRYP PFNGLGETINTEGERUI64I_VNVPROC) (GLenum value, GLuint index, GLuint64EXT *result); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBufferAddressRangeNV (GLenum pname, GLuint index, GLuint64EXT address, GLsizeiptr length); +GLAPI void APIENTRY glVertexFormatNV (GLint size, GLenum type, GLsizei stride); +GLAPI void APIENTRY glNormalFormatNV (GLenum type, GLsizei stride); +GLAPI void APIENTRY glColorFormatNV (GLint size, GLenum type, GLsizei stride); +GLAPI void APIENTRY glIndexFormatNV (GLenum type, GLsizei stride); +GLAPI void APIENTRY glTexCoordFormatNV (GLint size, GLenum type, GLsizei stride); +GLAPI void APIENTRY glEdgeFlagFormatNV (GLsizei stride); +GLAPI void APIENTRY glSecondaryColorFormatNV (GLint size, GLenum type, GLsizei stride); +GLAPI void APIENTRY glFogCoordFormatNV (GLenum type, GLsizei stride); +GLAPI void APIENTRY glVertexAttribFormatNV (GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride); +GLAPI void APIENTRY glVertexAttribIFormatNV (GLuint index, GLint size, GLenum type, GLsizei stride); +GLAPI void APIENTRY glGetIntegerui64i_vNV (GLenum value, GLuint index, GLuint64EXT *result); +#endif +#endif /* GL_NV_vertex_buffer_unified_memory */ + +#ifndef GL_NV_viewport_array2 +#define GL_NV_viewport_array2 1 +#endif /* GL_NV_viewport_array2 */ + +#ifndef GL_NV_viewport_swizzle +#define GL_NV_viewport_swizzle 1 +#define GL_VIEWPORT_SWIZZLE_POSITIVE_X_NV 0x9350 +#define GL_VIEWPORT_SWIZZLE_NEGATIVE_X_NV 0x9351 +#define GL_VIEWPORT_SWIZZLE_POSITIVE_Y_NV 0x9352 +#define GL_VIEWPORT_SWIZZLE_NEGATIVE_Y_NV 0x9353 +#define GL_VIEWPORT_SWIZZLE_POSITIVE_Z_NV 0x9354 +#define GL_VIEWPORT_SWIZZLE_NEGATIVE_Z_NV 0x9355 +#define GL_VIEWPORT_SWIZZLE_POSITIVE_W_NV 0x9356 +#define GL_VIEWPORT_SWIZZLE_NEGATIVE_W_NV 0x9357 +#define GL_VIEWPORT_SWIZZLE_X_NV 0x9358 +#define GL_VIEWPORT_SWIZZLE_Y_NV 0x9359 +#define GL_VIEWPORT_SWIZZLE_Z_NV 0x935A +#define GL_VIEWPORT_SWIZZLE_W_NV 0x935B +typedef void (APIENTRYP PFNGLVIEWPORTSWIZZLENVPROC) (GLuint index, GLenum swizzlex, GLenum swizzley, GLenum swizzlez, GLenum swizzlew); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glViewportSwizzleNV (GLuint index, GLenum swizzlex, GLenum swizzley, GLenum swizzlez, GLenum swizzlew); +#endif +#endif /* GL_NV_viewport_swizzle */ + +#ifndef GL_OVR_multiview +#define GL_OVR_multiview 1 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_NUM_VIEWS_OVR 0x9630 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_BASE_VIEW_INDEX_OVR 0x9632 +#define GL_MAX_VIEWS_OVR 0x9631 +#define GL_FRAMEBUFFER_INCOMPLETE_VIEW_TARGETS_OVR 0x9633 +typedef void (APIENTRYP PFNGLFRAMEBUFFERTEXTUREMULTIVIEWOVRPROC) (GLenum target, GLenum attachment, GLuint texture, GLint level, GLint baseViewIndex, GLsizei numViews); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glFramebufferTextureMultiviewOVR (GLenum target, GLenum attachment, GLuint texture, GLint level, GLint baseViewIndex, GLsizei numViews); +#endif +#endif /* GL_OVR_multiview */ + +#ifndef GL_OVR_multiview2 +#define GL_OVR_multiview2 1 +#endif /* GL_OVR_multiview2 */ + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/swiftshader/include/GL/glext.h b/vendor/swiftshader/include/GL/glext.h new file mode 100644 index 00000000..acf881bf --- /dev/null +++ b/vendor/swiftshader/include/GL/glext.h @@ -0,0 +1,12571 @@ +#ifndef __gl_glext_h_ +#define __gl_glext_h_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2013-2018 The Khronos Group Inc. +** +** Permission is hereby granted, free of charge, to any person obtaining a +** copy of this software and/or associated documentation files (the +** "Materials"), to deal in the Materials without restriction, including +** without limitation the rights to use, copy, modify, merge, publish, +** distribute, sublicense, and/or sell copies of the Materials, and to +** permit persons to whom the Materials are furnished to do so, subject to +** the following conditions: +** +** The above copyright notice and this permission notice shall be included +** in all copies or substantial portions of the Materials. +** +** THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +** EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +** MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +** IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +** CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +** MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. +*/ +/* +** This header is generated from the Khronos OpenGL / OpenGL ES XML +** API Registry. The current version of the Registry, generator scripts +** used to make the header, and the header can be found at +** https://github.com/KhronosGroup/OpenGL-Registry +*/ + +#if defined(_WIN32) && !defined(APIENTRY) && !defined(__CYGWIN__) && !defined(__SCITECH_SNAP__) +#ifndef WIN32_LEAN_AND_MEAN +#define WIN32_LEAN_AND_MEAN 1 +#endif +#include +#endif + +#ifndef APIENTRY +#define APIENTRY +#endif +#ifndef APIENTRYP +#define APIENTRYP APIENTRY * +#endif +#ifndef GLAPI +#define GLAPI extern +#endif + +#define GL_GLEXT_VERSION 20180525 + +/* Generated C header for: + * API: gl + * Profile: compatibility + * Versions considered: .* + * Versions emitted: 1\.[2-9]|[234]\.[0-9] + * Default extensions included: gl + * Additional extensions included: _nomatch_^ + * Extensions removed: _nomatch_^ + */ + +#ifndef GL_VERSION_1_2 +#define GL_VERSION_1_2 1 +#define GL_UNSIGNED_BYTE_3_3_2 0x8032 +#define GL_UNSIGNED_SHORT_4_4_4_4 0x8033 +#define GL_UNSIGNED_SHORT_5_5_5_1 0x8034 +#define GL_UNSIGNED_INT_8_8_8_8 0x8035 +#define GL_UNSIGNED_INT_10_10_10_2 0x8036 +#define GL_TEXTURE_BINDING_3D 0x806A +#define GL_PACK_SKIP_IMAGES 0x806B +#define GL_PACK_IMAGE_HEIGHT 0x806C +#define GL_UNPACK_SKIP_IMAGES 0x806D +#define GL_UNPACK_IMAGE_HEIGHT 0x806E +#define GL_TEXTURE_3D 0x806F +#define GL_PROXY_TEXTURE_3D 0x8070 +#define GL_TEXTURE_DEPTH 0x8071 +#define GL_TEXTURE_WRAP_R 0x8072 +#define GL_MAX_3D_TEXTURE_SIZE 0x8073 +#define GL_UNSIGNED_BYTE_2_3_3_REV 0x8362 +#define GL_UNSIGNED_SHORT_5_6_5 0x8363 +#define GL_UNSIGNED_SHORT_5_6_5_REV 0x8364 +#define GL_UNSIGNED_SHORT_4_4_4_4_REV 0x8365 +#define GL_UNSIGNED_SHORT_1_5_5_5_REV 0x8366 +#define GL_UNSIGNED_INT_8_8_8_8_REV 0x8367 +#define GL_UNSIGNED_INT_2_10_10_10_REV 0x8368 +#define GL_BGR 0x80E0 +#define GL_BGRA 0x80E1 +#define GL_MAX_ELEMENTS_VERTICES 0x80E8 +#define GL_MAX_ELEMENTS_INDICES 0x80E9 +#define GL_CLAMP_TO_EDGE 0x812F +#define GL_TEXTURE_MIN_LOD 0x813A +#define GL_TEXTURE_MAX_LOD 0x813B +#define GL_TEXTURE_BASE_LEVEL 0x813C +#define GL_TEXTURE_MAX_LEVEL 0x813D +#define GL_SMOOTH_POINT_SIZE_RANGE 0x0B12 +#define GL_SMOOTH_POINT_SIZE_GRANULARITY 0x0B13 +#define GL_SMOOTH_LINE_WIDTH_RANGE 0x0B22 +#define GL_SMOOTH_LINE_WIDTH_GRANULARITY 0x0B23 +#define GL_ALIASED_LINE_WIDTH_RANGE 0x846E +#define GL_RESCALE_NORMAL 0x803A +#define GL_LIGHT_MODEL_COLOR_CONTROL 0x81F8 +#define GL_SINGLE_COLOR 0x81F9 +#define GL_SEPARATE_SPECULAR_COLOR 0x81FA +#define GL_ALIASED_POINT_SIZE_RANGE 0x846D +typedef void (APIENTRYP PFNGLDRAWRANGEELEMENTSPROC) (GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices); +typedef void (APIENTRYP PFNGLTEXIMAGE3DPROC) (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLTEXSUBIMAGE3DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLCOPYTEXSUBIMAGE3DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glDrawRangeElements (GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices); +GLAPI void APIENTRY glTexImage3D (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glTexSubImage3D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glCopyTexSubImage3D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height); +#endif +#endif /* GL_VERSION_1_2 */ + +#ifndef GL_VERSION_1_3 +#define GL_VERSION_1_3 1 +#define GL_TEXTURE0 0x84C0 +#define GL_TEXTURE1 0x84C1 +#define GL_TEXTURE2 0x84C2 +#define GL_TEXTURE3 0x84C3 +#define GL_TEXTURE4 0x84C4 +#define GL_TEXTURE5 0x84C5 +#define GL_TEXTURE6 0x84C6 +#define GL_TEXTURE7 0x84C7 +#define GL_TEXTURE8 0x84C8 +#define GL_TEXTURE9 0x84C9 +#define GL_TEXTURE10 0x84CA +#define GL_TEXTURE11 0x84CB +#define GL_TEXTURE12 0x84CC +#define GL_TEXTURE13 0x84CD +#define GL_TEXTURE14 0x84CE +#define GL_TEXTURE15 0x84CF +#define GL_TEXTURE16 0x84D0 +#define GL_TEXTURE17 0x84D1 +#define GL_TEXTURE18 0x84D2 +#define GL_TEXTURE19 0x84D3 +#define GL_TEXTURE20 0x84D4 +#define GL_TEXTURE21 0x84D5 +#define GL_TEXTURE22 0x84D6 +#define GL_TEXTURE23 0x84D7 +#define GL_TEXTURE24 0x84D8 +#define GL_TEXTURE25 0x84D9 +#define GL_TEXTURE26 0x84DA +#define GL_TEXTURE27 0x84DB +#define GL_TEXTURE28 0x84DC +#define GL_TEXTURE29 0x84DD +#define GL_TEXTURE30 0x84DE +#define GL_TEXTURE31 0x84DF +#define GL_ACTIVE_TEXTURE 0x84E0 +#define GL_MULTISAMPLE 0x809D +#define GL_SAMPLE_ALPHA_TO_COVERAGE 0x809E +#define GL_SAMPLE_ALPHA_TO_ONE 0x809F +#define GL_SAMPLE_COVERAGE 0x80A0 +#define GL_SAMPLE_BUFFERS 0x80A8 +#define GL_SAMPLES 0x80A9 +#define GL_SAMPLE_COVERAGE_VALUE 0x80AA +#define GL_SAMPLE_COVERAGE_INVERT 0x80AB +#define GL_TEXTURE_CUBE_MAP 0x8513 +#define GL_TEXTURE_BINDING_CUBE_MAP 0x8514 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_X 0x8515 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_X 0x8516 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_Y 0x8517 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_Y 0x8518 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_Z 0x8519 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_Z 0x851A +#define GL_PROXY_TEXTURE_CUBE_MAP 0x851B +#define GL_MAX_CUBE_MAP_TEXTURE_SIZE 0x851C +#define GL_COMPRESSED_RGB 0x84ED +#define GL_COMPRESSED_RGBA 0x84EE +#define GL_TEXTURE_COMPRESSION_HINT 0x84EF +#define GL_TEXTURE_COMPRESSED_IMAGE_SIZE 0x86A0 +#define GL_TEXTURE_COMPRESSED 0x86A1 +#define GL_NUM_COMPRESSED_TEXTURE_FORMATS 0x86A2 +#define GL_COMPRESSED_TEXTURE_FORMATS 0x86A3 +#define GL_CLAMP_TO_BORDER 0x812D +#define GL_CLIENT_ACTIVE_TEXTURE 0x84E1 +#define GL_MAX_TEXTURE_UNITS 0x84E2 +#define GL_TRANSPOSE_MODELVIEW_MATRIX 0x84E3 +#define GL_TRANSPOSE_PROJECTION_MATRIX 0x84E4 +#define GL_TRANSPOSE_TEXTURE_MATRIX 0x84E5 +#define GL_TRANSPOSE_COLOR_MATRIX 0x84E6 +#define GL_MULTISAMPLE_BIT 0x20000000 +#define GL_NORMAL_MAP 0x8511 +#define GL_REFLECTION_MAP 0x8512 +#define GL_COMPRESSED_ALPHA 0x84E9 +#define GL_COMPRESSED_LUMINANCE 0x84EA +#define GL_COMPRESSED_LUMINANCE_ALPHA 0x84EB +#define GL_COMPRESSED_INTENSITY 0x84EC +#define GL_COMBINE 0x8570 +#define GL_COMBINE_RGB 0x8571 +#define GL_COMBINE_ALPHA 0x8572 +#define GL_SOURCE0_RGB 0x8580 +#define GL_SOURCE1_RGB 0x8581 +#define GL_SOURCE2_RGB 0x8582 +#define GL_SOURCE0_ALPHA 0x8588 +#define GL_SOURCE1_ALPHA 0x8589 +#define GL_SOURCE2_ALPHA 0x858A +#define GL_OPERAND0_RGB 0x8590 +#define GL_OPERAND1_RGB 0x8591 +#define GL_OPERAND2_RGB 0x8592 +#define GL_OPERAND0_ALPHA 0x8598 +#define GL_OPERAND1_ALPHA 0x8599 +#define GL_OPERAND2_ALPHA 0x859A +#define GL_RGB_SCALE 0x8573 +#define GL_ADD_SIGNED 0x8574 +#define GL_INTERPOLATE 0x8575 +#define GL_SUBTRACT 0x84E7 +#define GL_CONSTANT 0x8576 +#define GL_PRIMARY_COLOR 0x8577 +#define GL_PREVIOUS 0x8578 +#define GL_DOT3_RGB 0x86AE +#define GL_DOT3_RGBA 0x86AF +typedef void (APIENTRYP PFNGLACTIVETEXTUREPROC) (GLenum texture); +typedef void (APIENTRYP PFNGLSAMPLECOVERAGEPROC) (GLfloat value, GLboolean invert); +typedef void (APIENTRYP PFNGLCOMPRESSEDTEXIMAGE3DPROC) (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, const void *data); +typedef void (APIENTRYP PFNGLCOMPRESSEDTEXIMAGE2DPROC) (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, const void *data); +typedef void (APIENTRYP PFNGLCOMPRESSEDTEXIMAGE1DPROC) (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLint border, GLsizei imageSize, const void *data); +typedef void (APIENTRYP PFNGLCOMPRESSEDTEXSUBIMAGE3DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void *data); +typedef void (APIENTRYP PFNGLCOMPRESSEDTEXSUBIMAGE2DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *data); +typedef void (APIENTRYP PFNGLCOMPRESSEDTEXSUBIMAGE1DPROC) (GLenum target, GLint level, GLint xoffset, GLsizei width, GLenum format, GLsizei imageSize, const void *data); +typedef void (APIENTRYP PFNGLGETCOMPRESSEDTEXIMAGEPROC) (GLenum target, GLint level, void *img); +typedef void (APIENTRYP PFNGLCLIENTACTIVETEXTUREPROC) (GLenum texture); +typedef void (APIENTRYP PFNGLMULTITEXCOORD1DPROC) (GLenum target, GLdouble s); +typedef void (APIENTRYP PFNGLMULTITEXCOORD1DVPROC) (GLenum target, const GLdouble *v); +typedef void (APIENTRYP PFNGLMULTITEXCOORD1FPROC) (GLenum target, GLfloat s); +typedef void (APIENTRYP PFNGLMULTITEXCOORD1FVPROC) (GLenum target, const GLfloat *v); +typedef void (APIENTRYP PFNGLMULTITEXCOORD1IPROC) (GLenum target, GLint s); +typedef void (APIENTRYP PFNGLMULTITEXCOORD1IVPROC) (GLenum target, const GLint *v); +typedef void (APIENTRYP PFNGLMULTITEXCOORD1SPROC) (GLenum target, GLshort s); +typedef void (APIENTRYP PFNGLMULTITEXCOORD1SVPROC) (GLenum target, const GLshort *v); +typedef void (APIENTRYP PFNGLMULTITEXCOORD2DPROC) (GLenum target, GLdouble s, GLdouble t); +typedef void (APIENTRYP PFNGLMULTITEXCOORD2DVPROC) (GLenum target, const GLdouble *v); +typedef void (APIENTRYP PFNGLMULTITEXCOORD2FPROC) (GLenum target, GLfloat s, GLfloat t); +typedef void (APIENTRYP PFNGLMULTITEXCOORD2FVPROC) (GLenum target, const GLfloat *v); +typedef void (APIENTRYP PFNGLMULTITEXCOORD2IPROC) (GLenum target, GLint s, GLint t); +typedef void (APIENTRYP PFNGLMULTITEXCOORD2IVPROC) (GLenum target, const GLint *v); +typedef void (APIENTRYP PFNGLMULTITEXCOORD2SPROC) (GLenum target, GLshort s, GLshort t); +typedef void (APIENTRYP PFNGLMULTITEXCOORD2SVPROC) (GLenum target, const GLshort *v); +typedef void (APIENTRYP PFNGLMULTITEXCOORD3DPROC) (GLenum target, GLdouble s, GLdouble t, GLdouble r); +typedef void (APIENTRYP PFNGLMULTITEXCOORD3DVPROC) (GLenum target, const GLdouble *v); +typedef void (APIENTRYP PFNGLMULTITEXCOORD3FPROC) (GLenum target, GLfloat s, GLfloat t, GLfloat r); +typedef void (APIENTRYP PFNGLMULTITEXCOORD3FVPROC) (GLenum target, const GLfloat *v); +typedef void (APIENTRYP PFNGLMULTITEXCOORD3IPROC) (GLenum target, GLint s, GLint t, GLint r); +typedef void (APIENTRYP PFNGLMULTITEXCOORD3IVPROC) (GLenum target, const GLint *v); +typedef void (APIENTRYP PFNGLMULTITEXCOORD3SPROC) (GLenum target, GLshort s, GLshort t, GLshort r); +typedef void (APIENTRYP PFNGLMULTITEXCOORD3SVPROC) (GLenum target, const GLshort *v); +typedef void (APIENTRYP PFNGLMULTITEXCOORD4DPROC) (GLenum target, GLdouble s, GLdouble t, GLdouble r, GLdouble q); +typedef void (APIENTRYP PFNGLMULTITEXCOORD4DVPROC) (GLenum target, const GLdouble *v); +typedef void (APIENTRYP PFNGLMULTITEXCOORD4FPROC) (GLenum target, GLfloat s, GLfloat t, GLfloat r, GLfloat q); +typedef void (APIENTRYP PFNGLMULTITEXCOORD4FVPROC) (GLenum target, const GLfloat *v); +typedef void (APIENTRYP PFNGLMULTITEXCOORD4IPROC) (GLenum target, GLint s, GLint t, GLint r, GLint q); +typedef void (APIENTRYP PFNGLMULTITEXCOORD4IVPROC) (GLenum target, const GLint *v); +typedef void (APIENTRYP PFNGLMULTITEXCOORD4SPROC) (GLenum target, GLshort s, GLshort t, GLshort r, GLshort q); +typedef void (APIENTRYP PFNGLMULTITEXCOORD4SVPROC) (GLenum target, const GLshort *v); +typedef void (APIENTRYP PFNGLLOADTRANSPOSEMATRIXFPROC) (const GLfloat *m); +typedef void (APIENTRYP PFNGLLOADTRANSPOSEMATRIXDPROC) (const GLdouble *m); +typedef void (APIENTRYP PFNGLMULTTRANSPOSEMATRIXFPROC) (const GLfloat *m); +typedef void (APIENTRYP PFNGLMULTTRANSPOSEMATRIXDPROC) (const GLdouble *m); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glActiveTexture (GLenum texture); +GLAPI void APIENTRY glSampleCoverage (GLfloat value, GLboolean invert); +GLAPI void APIENTRY glCompressedTexImage3D (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, const void *data); +GLAPI void APIENTRY glCompressedTexImage2D (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, const void *data); +GLAPI void APIENTRY glCompressedTexImage1D (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLint border, GLsizei imageSize, const void *data); +GLAPI void APIENTRY glCompressedTexSubImage3D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void *data); +GLAPI void APIENTRY glCompressedTexSubImage2D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *data); +GLAPI void APIENTRY glCompressedTexSubImage1D (GLenum target, GLint level, GLint xoffset, GLsizei width, GLenum format, GLsizei imageSize, const void *data); +GLAPI void APIENTRY glGetCompressedTexImage (GLenum target, GLint level, void *img); +GLAPI void APIENTRY glClientActiveTexture (GLenum texture); +GLAPI void APIENTRY glMultiTexCoord1d (GLenum target, GLdouble s); +GLAPI void APIENTRY glMultiTexCoord1dv (GLenum target, const GLdouble *v); +GLAPI void APIENTRY glMultiTexCoord1f (GLenum target, GLfloat s); +GLAPI void APIENTRY glMultiTexCoord1fv (GLenum target, const GLfloat *v); +GLAPI void APIENTRY glMultiTexCoord1i (GLenum target, GLint s); +GLAPI void APIENTRY glMultiTexCoord1iv (GLenum target, const GLint *v); +GLAPI void APIENTRY glMultiTexCoord1s (GLenum target, GLshort s); +GLAPI void APIENTRY glMultiTexCoord1sv (GLenum target, const GLshort *v); +GLAPI void APIENTRY glMultiTexCoord2d (GLenum target, GLdouble s, GLdouble t); +GLAPI void APIENTRY glMultiTexCoord2dv (GLenum target, const GLdouble *v); +GLAPI void APIENTRY glMultiTexCoord2f (GLenum target, GLfloat s, GLfloat t); +GLAPI void APIENTRY glMultiTexCoord2fv (GLenum target, const GLfloat *v); +GLAPI void APIENTRY glMultiTexCoord2i (GLenum target, GLint s, GLint t); +GLAPI void APIENTRY glMultiTexCoord2iv (GLenum target, const GLint *v); +GLAPI void APIENTRY glMultiTexCoord2s (GLenum target, GLshort s, GLshort t); +GLAPI void APIENTRY glMultiTexCoord2sv (GLenum target, const GLshort *v); +GLAPI void APIENTRY glMultiTexCoord3d (GLenum target, GLdouble s, GLdouble t, GLdouble r); +GLAPI void APIENTRY glMultiTexCoord3dv (GLenum target, const GLdouble *v); +GLAPI void APIENTRY glMultiTexCoord3f (GLenum target, GLfloat s, GLfloat t, GLfloat r); +GLAPI void APIENTRY glMultiTexCoord3fv (GLenum target, const GLfloat *v); +GLAPI void APIENTRY glMultiTexCoord3i (GLenum target, GLint s, GLint t, GLint r); +GLAPI void APIENTRY glMultiTexCoord3iv (GLenum target, const GLint *v); +GLAPI void APIENTRY glMultiTexCoord3s (GLenum target, GLshort s, GLshort t, GLshort r); +GLAPI void APIENTRY glMultiTexCoord3sv (GLenum target, const GLshort *v); +GLAPI void APIENTRY glMultiTexCoord4d (GLenum target, GLdouble s, GLdouble t, GLdouble r, GLdouble q); +GLAPI void APIENTRY glMultiTexCoord4dv (GLenum target, const GLdouble *v); +GLAPI void APIENTRY glMultiTexCoord4f (GLenum target, GLfloat s, GLfloat t, GLfloat r, GLfloat q); +GLAPI void APIENTRY glMultiTexCoord4fv (GLenum target, const GLfloat *v); +GLAPI void APIENTRY glMultiTexCoord4i (GLenum target, GLint s, GLint t, GLint r, GLint q); +GLAPI void APIENTRY glMultiTexCoord4iv (GLenum target, const GLint *v); +GLAPI void APIENTRY glMultiTexCoord4s (GLenum target, GLshort s, GLshort t, GLshort r, GLshort q); +GLAPI void APIENTRY glMultiTexCoord4sv (GLenum target, const GLshort *v); +GLAPI void APIENTRY glLoadTransposeMatrixf (const GLfloat *m); +GLAPI void APIENTRY glLoadTransposeMatrixd (const GLdouble *m); +GLAPI void APIENTRY glMultTransposeMatrixf (const GLfloat *m); +GLAPI void APIENTRY glMultTransposeMatrixd (const GLdouble *m); +#endif +#endif /* GL_VERSION_1_3 */ + +#ifndef GL_VERSION_1_4 +#define GL_VERSION_1_4 1 +#define GL_BLEND_DST_RGB 0x80C8 +#define GL_BLEND_SRC_RGB 0x80C9 +#define GL_BLEND_DST_ALPHA 0x80CA +#define GL_BLEND_SRC_ALPHA 0x80CB +#define GL_POINT_FADE_THRESHOLD_SIZE 0x8128 +#define GL_DEPTH_COMPONENT16 0x81A5 +#define GL_DEPTH_COMPONENT24 0x81A6 +#define GL_DEPTH_COMPONENT32 0x81A7 +#define GL_MIRRORED_REPEAT 0x8370 +#define GL_MAX_TEXTURE_LOD_BIAS 0x84FD +#define GL_TEXTURE_LOD_BIAS 0x8501 +#define GL_INCR_WRAP 0x8507 +#define GL_DECR_WRAP 0x8508 +#define GL_TEXTURE_DEPTH_SIZE 0x884A +#define GL_TEXTURE_COMPARE_MODE 0x884C +#define GL_TEXTURE_COMPARE_FUNC 0x884D +#define GL_POINT_SIZE_MIN 0x8126 +#define GL_POINT_SIZE_MAX 0x8127 +#define GL_POINT_DISTANCE_ATTENUATION 0x8129 +#define GL_GENERATE_MIPMAP 0x8191 +#define GL_GENERATE_MIPMAP_HINT 0x8192 +#define GL_FOG_COORDINATE_SOURCE 0x8450 +#define GL_FOG_COORDINATE 0x8451 +#define GL_FRAGMENT_DEPTH 0x8452 +#define GL_CURRENT_FOG_COORDINATE 0x8453 +#define GL_FOG_COORDINATE_ARRAY_TYPE 0x8454 +#define GL_FOG_COORDINATE_ARRAY_STRIDE 0x8455 +#define GL_FOG_COORDINATE_ARRAY_POINTER 0x8456 +#define GL_FOG_COORDINATE_ARRAY 0x8457 +#define GL_COLOR_SUM 0x8458 +#define GL_CURRENT_SECONDARY_COLOR 0x8459 +#define GL_SECONDARY_COLOR_ARRAY_SIZE 0x845A +#define GL_SECONDARY_COLOR_ARRAY_TYPE 0x845B +#define GL_SECONDARY_COLOR_ARRAY_STRIDE 0x845C +#define GL_SECONDARY_COLOR_ARRAY_POINTER 0x845D +#define GL_SECONDARY_COLOR_ARRAY 0x845E +#define GL_TEXTURE_FILTER_CONTROL 0x8500 +#define GL_DEPTH_TEXTURE_MODE 0x884B +#define GL_COMPARE_R_TO_TEXTURE 0x884E +#define GL_BLEND_COLOR 0x8005 +#define GL_BLEND_EQUATION 0x8009 +#define GL_CONSTANT_COLOR 0x8001 +#define GL_ONE_MINUS_CONSTANT_COLOR 0x8002 +#define GL_CONSTANT_ALPHA 0x8003 +#define GL_ONE_MINUS_CONSTANT_ALPHA 0x8004 +#define GL_FUNC_ADD 0x8006 +#define GL_FUNC_REVERSE_SUBTRACT 0x800B +#define GL_FUNC_SUBTRACT 0x800A +#define GL_MIN 0x8007 +#define GL_MAX 0x8008 +typedef void (APIENTRYP PFNGLBLENDFUNCSEPARATEPROC) (GLenum sfactorRGB, GLenum dfactorRGB, GLenum sfactorAlpha, GLenum dfactorAlpha); +typedef void (APIENTRYP PFNGLMULTIDRAWARRAYSPROC) (GLenum mode, const GLint *first, const GLsizei *count, GLsizei drawcount); +typedef void (APIENTRYP PFNGLMULTIDRAWELEMENTSPROC) (GLenum mode, const GLsizei *count, GLenum type, const void *const*indices, GLsizei drawcount); +typedef void (APIENTRYP PFNGLPOINTPARAMETERFPROC) (GLenum pname, GLfloat param); +typedef void (APIENTRYP PFNGLPOINTPARAMETERFVPROC) (GLenum pname, const GLfloat *params); +typedef void (APIENTRYP PFNGLPOINTPARAMETERIPROC) (GLenum pname, GLint param); +typedef void (APIENTRYP PFNGLPOINTPARAMETERIVPROC) (GLenum pname, const GLint *params); +typedef void (APIENTRYP PFNGLFOGCOORDFPROC) (GLfloat coord); +typedef void (APIENTRYP PFNGLFOGCOORDFVPROC) (const GLfloat *coord); +typedef void (APIENTRYP PFNGLFOGCOORDDPROC) (GLdouble coord); +typedef void (APIENTRYP PFNGLFOGCOORDDVPROC) (const GLdouble *coord); +typedef void (APIENTRYP PFNGLFOGCOORDPOINTERPROC) (GLenum type, GLsizei stride, const void *pointer); +typedef void (APIENTRYP PFNGLSECONDARYCOLOR3BPROC) (GLbyte red, GLbyte green, GLbyte blue); +typedef void (APIENTRYP PFNGLSECONDARYCOLOR3BVPROC) (const GLbyte *v); +typedef void (APIENTRYP PFNGLSECONDARYCOLOR3DPROC) (GLdouble red, GLdouble green, GLdouble blue); +typedef void (APIENTRYP PFNGLSECONDARYCOLOR3DVPROC) (const GLdouble *v); +typedef void (APIENTRYP PFNGLSECONDARYCOLOR3FPROC) (GLfloat red, GLfloat green, GLfloat blue); +typedef void (APIENTRYP PFNGLSECONDARYCOLOR3FVPROC) (const GLfloat *v); +typedef void (APIENTRYP PFNGLSECONDARYCOLOR3IPROC) (GLint red, GLint green, GLint blue); +typedef void (APIENTRYP PFNGLSECONDARYCOLOR3IVPROC) (const GLint *v); +typedef void (APIENTRYP PFNGLSECONDARYCOLOR3SPROC) (GLshort red, GLshort green, GLshort blue); +typedef void (APIENTRYP PFNGLSECONDARYCOLOR3SVPROC) (const GLshort *v); +typedef void (APIENTRYP PFNGLSECONDARYCOLOR3UBPROC) (GLubyte red, GLubyte green, GLubyte blue); +typedef void (APIENTRYP PFNGLSECONDARYCOLOR3UBVPROC) (const GLubyte *v); +typedef void (APIENTRYP PFNGLSECONDARYCOLOR3UIPROC) (GLuint red, GLuint green, GLuint blue); +typedef void (APIENTRYP PFNGLSECONDARYCOLOR3UIVPROC) (const GLuint *v); +typedef void (APIENTRYP PFNGLSECONDARYCOLOR3USPROC) (GLushort red, GLushort green, GLushort blue); +typedef void (APIENTRYP PFNGLSECONDARYCOLOR3USVPROC) (const GLushort *v); +typedef void (APIENTRYP PFNGLSECONDARYCOLORPOINTERPROC) (GLint size, GLenum type, GLsizei stride, const void *pointer); +typedef void (APIENTRYP PFNGLWINDOWPOS2DPROC) (GLdouble x, GLdouble y); +typedef void (APIENTRYP PFNGLWINDOWPOS2DVPROC) (const GLdouble *v); +typedef void (APIENTRYP PFNGLWINDOWPOS2FPROC) (GLfloat x, GLfloat y); +typedef void (APIENTRYP PFNGLWINDOWPOS2FVPROC) (const GLfloat *v); +typedef void (APIENTRYP PFNGLWINDOWPOS2IPROC) (GLint x, GLint y); +typedef void (APIENTRYP PFNGLWINDOWPOS2IVPROC) (const GLint *v); +typedef void (APIENTRYP PFNGLWINDOWPOS2SPROC) (GLshort x, GLshort y); +typedef void (APIENTRYP PFNGLWINDOWPOS2SVPROC) (const GLshort *v); +typedef void (APIENTRYP PFNGLWINDOWPOS3DPROC) (GLdouble x, GLdouble y, GLdouble z); +typedef void (APIENTRYP PFNGLWINDOWPOS3DVPROC) (const GLdouble *v); +typedef void (APIENTRYP PFNGLWINDOWPOS3FPROC) (GLfloat x, GLfloat y, GLfloat z); +typedef void (APIENTRYP PFNGLWINDOWPOS3FVPROC) (const GLfloat *v); +typedef void (APIENTRYP PFNGLWINDOWPOS3IPROC) (GLint x, GLint y, GLint z); +typedef void (APIENTRYP PFNGLWINDOWPOS3IVPROC) (const GLint *v); +typedef void (APIENTRYP PFNGLWINDOWPOS3SPROC) (GLshort x, GLshort y, GLshort z); +typedef void (APIENTRYP PFNGLWINDOWPOS3SVPROC) (const GLshort *v); +typedef void (APIENTRYP PFNGLBLENDCOLORPROC) (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +typedef void (APIENTRYP PFNGLBLENDEQUATIONPROC) (GLenum mode); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBlendFuncSeparate (GLenum sfactorRGB, GLenum dfactorRGB, GLenum sfactorAlpha, GLenum dfactorAlpha); +GLAPI void APIENTRY glMultiDrawArrays (GLenum mode, const GLint *first, const GLsizei *count, GLsizei drawcount); +GLAPI void APIENTRY glMultiDrawElements (GLenum mode, const GLsizei *count, GLenum type, const void *const*indices, GLsizei drawcount); +GLAPI void APIENTRY glPointParameterf (GLenum pname, GLfloat param); +GLAPI void APIENTRY glPointParameterfv (GLenum pname, const GLfloat *params); +GLAPI void APIENTRY glPointParameteri (GLenum pname, GLint param); +GLAPI void APIENTRY glPointParameteriv (GLenum pname, const GLint *params); +GLAPI void APIENTRY glFogCoordf (GLfloat coord); +GLAPI void APIENTRY glFogCoordfv (const GLfloat *coord); +GLAPI void APIENTRY glFogCoordd (GLdouble coord); +GLAPI void APIENTRY glFogCoorddv (const GLdouble *coord); +GLAPI void APIENTRY glFogCoordPointer (GLenum type, GLsizei stride, const void *pointer); +GLAPI void APIENTRY glSecondaryColor3b (GLbyte red, GLbyte green, GLbyte blue); +GLAPI void APIENTRY glSecondaryColor3bv (const GLbyte *v); +GLAPI void APIENTRY glSecondaryColor3d (GLdouble red, GLdouble green, GLdouble blue); +GLAPI void APIENTRY glSecondaryColor3dv (const GLdouble *v); +GLAPI void APIENTRY glSecondaryColor3f (GLfloat red, GLfloat green, GLfloat blue); +GLAPI void APIENTRY glSecondaryColor3fv (const GLfloat *v); +GLAPI void APIENTRY glSecondaryColor3i (GLint red, GLint green, GLint blue); +GLAPI void APIENTRY glSecondaryColor3iv (const GLint *v); +GLAPI void APIENTRY glSecondaryColor3s (GLshort red, GLshort green, GLshort blue); +GLAPI void APIENTRY glSecondaryColor3sv (const GLshort *v); +GLAPI void APIENTRY glSecondaryColor3ub (GLubyte red, GLubyte green, GLubyte blue); +GLAPI void APIENTRY glSecondaryColor3ubv (const GLubyte *v); +GLAPI void APIENTRY glSecondaryColor3ui (GLuint red, GLuint green, GLuint blue); +GLAPI void APIENTRY glSecondaryColor3uiv (const GLuint *v); +GLAPI void APIENTRY glSecondaryColor3us (GLushort red, GLushort green, GLushort blue); +GLAPI void APIENTRY glSecondaryColor3usv (const GLushort *v); +GLAPI void APIENTRY glSecondaryColorPointer (GLint size, GLenum type, GLsizei stride, const void *pointer); +GLAPI void APIENTRY glWindowPos2d (GLdouble x, GLdouble y); +GLAPI void APIENTRY glWindowPos2dv (const GLdouble *v); +GLAPI void APIENTRY glWindowPos2f (GLfloat x, GLfloat y); +GLAPI void APIENTRY glWindowPos2fv (const GLfloat *v); +GLAPI void APIENTRY glWindowPos2i (GLint x, GLint y); +GLAPI void APIENTRY glWindowPos2iv (const GLint *v); +GLAPI void APIENTRY glWindowPos2s (GLshort x, GLshort y); +GLAPI void APIENTRY glWindowPos2sv (const GLshort *v); +GLAPI void APIENTRY glWindowPos3d (GLdouble x, GLdouble y, GLdouble z); +GLAPI void APIENTRY glWindowPos3dv (const GLdouble *v); +GLAPI void APIENTRY glWindowPos3f (GLfloat x, GLfloat y, GLfloat z); +GLAPI void APIENTRY glWindowPos3fv (const GLfloat *v); +GLAPI void APIENTRY glWindowPos3i (GLint x, GLint y, GLint z); +GLAPI void APIENTRY glWindowPos3iv (const GLint *v); +GLAPI void APIENTRY glWindowPos3s (GLshort x, GLshort y, GLshort z); +GLAPI void APIENTRY glWindowPos3sv (const GLshort *v); +GLAPI void APIENTRY glBlendColor (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +GLAPI void APIENTRY glBlendEquation (GLenum mode); +#endif +#endif /* GL_VERSION_1_4 */ + +#ifndef GL_VERSION_1_5 +#define GL_VERSION_1_5 1 +#include +typedef khronos_ssize_t GLsizeiptr; +typedef khronos_intptr_t GLintptr; +#define GL_BUFFER_SIZE 0x8764 +#define GL_BUFFER_USAGE 0x8765 +#define GL_QUERY_COUNTER_BITS 0x8864 +#define GL_CURRENT_QUERY 0x8865 +#define GL_QUERY_RESULT 0x8866 +#define GL_QUERY_RESULT_AVAILABLE 0x8867 +#define GL_ARRAY_BUFFER 0x8892 +#define GL_ELEMENT_ARRAY_BUFFER 0x8893 +#define GL_ARRAY_BUFFER_BINDING 0x8894 +#define GL_ELEMENT_ARRAY_BUFFER_BINDING 0x8895 +#define GL_VERTEX_ATTRIB_ARRAY_BUFFER_BINDING 0x889F +#define GL_READ_ONLY 0x88B8 +#define GL_WRITE_ONLY 0x88B9 +#define GL_READ_WRITE 0x88BA +#define GL_BUFFER_ACCESS 0x88BB +#define GL_BUFFER_MAPPED 0x88BC +#define GL_BUFFER_MAP_POINTER 0x88BD +#define GL_STREAM_DRAW 0x88E0 +#define GL_STREAM_READ 0x88E1 +#define GL_STREAM_COPY 0x88E2 +#define GL_STATIC_DRAW 0x88E4 +#define GL_STATIC_READ 0x88E5 +#define GL_STATIC_COPY 0x88E6 +#define GL_DYNAMIC_DRAW 0x88E8 +#define GL_DYNAMIC_READ 0x88E9 +#define GL_DYNAMIC_COPY 0x88EA +#define GL_SAMPLES_PASSED 0x8914 +#define GL_SRC1_ALPHA 0x8589 +#define GL_VERTEX_ARRAY_BUFFER_BINDING 0x8896 +#define GL_NORMAL_ARRAY_BUFFER_BINDING 0x8897 +#define GL_COLOR_ARRAY_BUFFER_BINDING 0x8898 +#define GL_INDEX_ARRAY_BUFFER_BINDING 0x8899 +#define GL_TEXTURE_COORD_ARRAY_BUFFER_BINDING 0x889A +#define GL_EDGE_FLAG_ARRAY_BUFFER_BINDING 0x889B +#define GL_SECONDARY_COLOR_ARRAY_BUFFER_BINDING 0x889C +#define GL_FOG_COORDINATE_ARRAY_BUFFER_BINDING 0x889D +#define GL_WEIGHT_ARRAY_BUFFER_BINDING 0x889E +#define GL_FOG_COORD_SRC 0x8450 +#define GL_FOG_COORD 0x8451 +#define GL_CURRENT_FOG_COORD 0x8453 +#define GL_FOG_COORD_ARRAY_TYPE 0x8454 +#define GL_FOG_COORD_ARRAY_STRIDE 0x8455 +#define GL_FOG_COORD_ARRAY_POINTER 0x8456 +#define GL_FOG_COORD_ARRAY 0x8457 +#define GL_FOG_COORD_ARRAY_BUFFER_BINDING 0x889D +#define GL_SRC0_RGB 0x8580 +#define GL_SRC1_RGB 0x8581 +#define GL_SRC2_RGB 0x8582 +#define GL_SRC0_ALPHA 0x8588 +#define GL_SRC2_ALPHA 0x858A +typedef void (APIENTRYP PFNGLGENQUERIESPROC) (GLsizei n, GLuint *ids); +typedef void (APIENTRYP PFNGLDELETEQUERIESPROC) (GLsizei n, const GLuint *ids); +typedef GLboolean (APIENTRYP PFNGLISQUERYPROC) (GLuint id); +typedef void (APIENTRYP PFNGLBEGINQUERYPROC) (GLenum target, GLuint id); +typedef void (APIENTRYP PFNGLENDQUERYPROC) (GLenum target); +typedef void (APIENTRYP PFNGLGETQUERYIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETQUERYOBJECTIVPROC) (GLuint id, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETQUERYOBJECTUIVPROC) (GLuint id, GLenum pname, GLuint *params); +typedef void (APIENTRYP PFNGLBINDBUFFERPROC) (GLenum target, GLuint buffer); +typedef void (APIENTRYP PFNGLDELETEBUFFERSPROC) (GLsizei n, const GLuint *buffers); +typedef void (APIENTRYP PFNGLGENBUFFERSPROC) (GLsizei n, GLuint *buffers); +typedef GLboolean (APIENTRYP PFNGLISBUFFERPROC) (GLuint buffer); +typedef void (APIENTRYP PFNGLBUFFERDATAPROC) (GLenum target, GLsizeiptr size, const void *data, GLenum usage); +typedef void (APIENTRYP PFNGLBUFFERSUBDATAPROC) (GLenum target, GLintptr offset, GLsizeiptr size, const void *data); +typedef void (APIENTRYP PFNGLGETBUFFERSUBDATAPROC) (GLenum target, GLintptr offset, GLsizeiptr size, void *data); +typedef void *(APIENTRYP PFNGLMAPBUFFERPROC) (GLenum target, GLenum access); +typedef GLboolean (APIENTRYP PFNGLUNMAPBUFFERPROC) (GLenum target); +typedef void (APIENTRYP PFNGLGETBUFFERPARAMETERIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETBUFFERPOINTERVPROC) (GLenum target, GLenum pname, void **params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glGenQueries (GLsizei n, GLuint *ids); +GLAPI void APIENTRY glDeleteQueries (GLsizei n, const GLuint *ids); +GLAPI GLboolean APIENTRY glIsQuery (GLuint id); +GLAPI void APIENTRY glBeginQuery (GLenum target, GLuint id); +GLAPI void APIENTRY glEndQuery (GLenum target); +GLAPI void APIENTRY glGetQueryiv (GLenum target, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetQueryObjectiv (GLuint id, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetQueryObjectuiv (GLuint id, GLenum pname, GLuint *params); +GLAPI void APIENTRY glBindBuffer (GLenum target, GLuint buffer); +GLAPI void APIENTRY glDeleteBuffers (GLsizei n, const GLuint *buffers); +GLAPI void APIENTRY glGenBuffers (GLsizei n, GLuint *buffers); +GLAPI GLboolean APIENTRY glIsBuffer (GLuint buffer); +GLAPI void APIENTRY glBufferData (GLenum target, GLsizeiptr size, const void *data, GLenum usage); +GLAPI void APIENTRY glBufferSubData (GLenum target, GLintptr offset, GLsizeiptr size, const void *data); +GLAPI void APIENTRY glGetBufferSubData (GLenum target, GLintptr offset, GLsizeiptr size, void *data); +GLAPI void *APIENTRY glMapBuffer (GLenum target, GLenum access); +GLAPI GLboolean APIENTRY glUnmapBuffer (GLenum target); +GLAPI void APIENTRY glGetBufferParameteriv (GLenum target, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetBufferPointerv (GLenum target, GLenum pname, void **params); +#endif +#endif /* GL_VERSION_1_5 */ + +#ifndef GL_VERSION_2_0 +#define GL_VERSION_2_0 1 +typedef char GLchar; +#define GL_BLEND_EQUATION_RGB 0x8009 +#define GL_VERTEX_ATTRIB_ARRAY_ENABLED 0x8622 +#define GL_VERTEX_ATTRIB_ARRAY_SIZE 0x8623 +#define GL_VERTEX_ATTRIB_ARRAY_STRIDE 0x8624 +#define GL_VERTEX_ATTRIB_ARRAY_TYPE 0x8625 +#define GL_CURRENT_VERTEX_ATTRIB 0x8626 +#define GL_VERTEX_PROGRAM_POINT_SIZE 0x8642 +#define GL_VERTEX_ATTRIB_ARRAY_POINTER 0x8645 +#define GL_STENCIL_BACK_FUNC 0x8800 +#define GL_STENCIL_BACK_FAIL 0x8801 +#define GL_STENCIL_BACK_PASS_DEPTH_FAIL 0x8802 +#define GL_STENCIL_BACK_PASS_DEPTH_PASS 0x8803 +#define GL_MAX_DRAW_BUFFERS 0x8824 +#define GL_DRAW_BUFFER0 0x8825 +#define GL_DRAW_BUFFER1 0x8826 +#define GL_DRAW_BUFFER2 0x8827 +#define GL_DRAW_BUFFER3 0x8828 +#define GL_DRAW_BUFFER4 0x8829 +#define GL_DRAW_BUFFER5 0x882A +#define GL_DRAW_BUFFER6 0x882B +#define GL_DRAW_BUFFER7 0x882C +#define GL_DRAW_BUFFER8 0x882D +#define GL_DRAW_BUFFER9 0x882E +#define GL_DRAW_BUFFER10 0x882F +#define GL_DRAW_BUFFER11 0x8830 +#define GL_DRAW_BUFFER12 0x8831 +#define GL_DRAW_BUFFER13 0x8832 +#define GL_DRAW_BUFFER14 0x8833 +#define GL_DRAW_BUFFER15 0x8834 +#define GL_BLEND_EQUATION_ALPHA 0x883D +#define GL_MAX_VERTEX_ATTRIBS 0x8869 +#define GL_VERTEX_ATTRIB_ARRAY_NORMALIZED 0x886A +#define GL_MAX_TEXTURE_IMAGE_UNITS 0x8872 +#define GL_FRAGMENT_SHADER 0x8B30 +#define GL_VERTEX_SHADER 0x8B31 +#define GL_MAX_FRAGMENT_UNIFORM_COMPONENTS 0x8B49 +#define GL_MAX_VERTEX_UNIFORM_COMPONENTS 0x8B4A +#define GL_MAX_VARYING_FLOATS 0x8B4B +#define GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS 0x8B4C +#define GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS 0x8B4D +#define GL_SHADER_TYPE 0x8B4F +#define GL_FLOAT_VEC2 0x8B50 +#define GL_FLOAT_VEC3 0x8B51 +#define GL_FLOAT_VEC4 0x8B52 +#define GL_INT_VEC2 0x8B53 +#define GL_INT_VEC3 0x8B54 +#define GL_INT_VEC4 0x8B55 +#define GL_BOOL 0x8B56 +#define GL_BOOL_VEC2 0x8B57 +#define GL_BOOL_VEC3 0x8B58 +#define GL_BOOL_VEC4 0x8B59 +#define GL_FLOAT_MAT2 0x8B5A +#define GL_FLOAT_MAT3 0x8B5B +#define GL_FLOAT_MAT4 0x8B5C +#define GL_SAMPLER_1D 0x8B5D +#define GL_SAMPLER_2D 0x8B5E +#define GL_SAMPLER_3D 0x8B5F +#define GL_SAMPLER_CUBE 0x8B60 +#define GL_SAMPLER_1D_SHADOW 0x8B61 +#define GL_SAMPLER_2D_SHADOW 0x8B62 +#define GL_DELETE_STATUS 0x8B80 +#define GL_COMPILE_STATUS 0x8B81 +#define GL_LINK_STATUS 0x8B82 +#define GL_VALIDATE_STATUS 0x8B83 +#define GL_INFO_LOG_LENGTH 0x8B84 +#define GL_ATTACHED_SHADERS 0x8B85 +#define GL_ACTIVE_UNIFORMS 0x8B86 +#define GL_ACTIVE_UNIFORM_MAX_LENGTH 0x8B87 +#define GL_SHADER_SOURCE_LENGTH 0x8B88 +#define GL_ACTIVE_ATTRIBUTES 0x8B89 +#define GL_ACTIVE_ATTRIBUTE_MAX_LENGTH 0x8B8A +#define GL_FRAGMENT_SHADER_DERIVATIVE_HINT 0x8B8B +#define GL_SHADING_LANGUAGE_VERSION 0x8B8C +#define GL_CURRENT_PROGRAM 0x8B8D +#define GL_POINT_SPRITE_COORD_ORIGIN 0x8CA0 +#define GL_LOWER_LEFT 0x8CA1 +#define GL_UPPER_LEFT 0x8CA2 +#define GL_STENCIL_BACK_REF 0x8CA3 +#define GL_STENCIL_BACK_VALUE_MASK 0x8CA4 +#define GL_STENCIL_BACK_WRITEMASK 0x8CA5 +#define GL_VERTEX_PROGRAM_TWO_SIDE 0x8643 +#define GL_POINT_SPRITE 0x8861 +#define GL_COORD_REPLACE 0x8862 +#define GL_MAX_TEXTURE_COORDS 0x8871 +typedef void (APIENTRYP PFNGLBLENDEQUATIONSEPARATEPROC) (GLenum modeRGB, GLenum modeAlpha); +typedef void (APIENTRYP PFNGLDRAWBUFFERSPROC) (GLsizei n, const GLenum *bufs); +typedef void (APIENTRYP PFNGLSTENCILOPSEPARATEPROC) (GLenum face, GLenum sfail, GLenum dpfail, GLenum dppass); +typedef void (APIENTRYP PFNGLSTENCILFUNCSEPARATEPROC) (GLenum face, GLenum func, GLint ref, GLuint mask); +typedef void (APIENTRYP PFNGLSTENCILMASKSEPARATEPROC) (GLenum face, GLuint mask); +typedef void (APIENTRYP PFNGLATTACHSHADERPROC) (GLuint program, GLuint shader); +typedef void (APIENTRYP PFNGLBINDATTRIBLOCATIONPROC) (GLuint program, GLuint index, const GLchar *name); +typedef void (APIENTRYP PFNGLCOMPILESHADERPROC) (GLuint shader); +typedef GLuint (APIENTRYP PFNGLCREATEPROGRAMPROC) (void); +typedef GLuint (APIENTRYP PFNGLCREATESHADERPROC) (GLenum type); +typedef void (APIENTRYP PFNGLDELETEPROGRAMPROC) (GLuint program); +typedef void (APIENTRYP PFNGLDELETESHADERPROC) (GLuint shader); +typedef void (APIENTRYP PFNGLDETACHSHADERPROC) (GLuint program, GLuint shader); +typedef void (APIENTRYP PFNGLDISABLEVERTEXATTRIBARRAYPROC) (GLuint index); +typedef void (APIENTRYP PFNGLENABLEVERTEXATTRIBARRAYPROC) (GLuint index); +typedef void (APIENTRYP PFNGLGETACTIVEATTRIBPROC) (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name); +typedef void (APIENTRYP PFNGLGETACTIVEUNIFORMPROC) (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name); +typedef void (APIENTRYP PFNGLGETATTACHEDSHADERSPROC) (GLuint program, GLsizei maxCount, GLsizei *count, GLuint *shaders); +typedef GLint (APIENTRYP PFNGLGETATTRIBLOCATIONPROC) (GLuint program, const GLchar *name); +typedef void (APIENTRYP PFNGLGETPROGRAMIVPROC) (GLuint program, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETPROGRAMINFOLOGPROC) (GLuint program, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +typedef void (APIENTRYP PFNGLGETSHADERIVPROC) (GLuint shader, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETSHADERINFOLOGPROC) (GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +typedef void (APIENTRYP PFNGLGETSHADERSOURCEPROC) (GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *source); +typedef GLint (APIENTRYP PFNGLGETUNIFORMLOCATIONPROC) (GLuint program, const GLchar *name); +typedef void (APIENTRYP PFNGLGETUNIFORMFVPROC) (GLuint program, GLint location, GLfloat *params); +typedef void (APIENTRYP PFNGLGETUNIFORMIVPROC) (GLuint program, GLint location, GLint *params); +typedef void (APIENTRYP PFNGLGETVERTEXATTRIBDVPROC) (GLuint index, GLenum pname, GLdouble *params); +typedef void (APIENTRYP PFNGLGETVERTEXATTRIBFVPROC) (GLuint index, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETVERTEXATTRIBIVPROC) (GLuint index, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETVERTEXATTRIBPOINTERVPROC) (GLuint index, GLenum pname, void **pointer); +typedef GLboolean (APIENTRYP PFNGLISPROGRAMPROC) (GLuint program); +typedef GLboolean (APIENTRYP PFNGLISSHADERPROC) (GLuint shader); +typedef void (APIENTRYP PFNGLLINKPROGRAMPROC) (GLuint program); +typedef void (APIENTRYP PFNGLSHADERSOURCEPROC) (GLuint shader, GLsizei count, const GLchar *const*string, const GLint *length); +typedef void (APIENTRYP PFNGLUSEPROGRAMPROC) (GLuint program); +typedef void (APIENTRYP PFNGLUNIFORM1FPROC) (GLint location, GLfloat v0); +typedef void (APIENTRYP PFNGLUNIFORM2FPROC) (GLint location, GLfloat v0, GLfloat v1); +typedef void (APIENTRYP PFNGLUNIFORM3FPROC) (GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +typedef void (APIENTRYP PFNGLUNIFORM4FPROC) (GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +typedef void (APIENTRYP PFNGLUNIFORM1IPROC) (GLint location, GLint v0); +typedef void (APIENTRYP PFNGLUNIFORM2IPROC) (GLint location, GLint v0, GLint v1); +typedef void (APIENTRYP PFNGLUNIFORM3IPROC) (GLint location, GLint v0, GLint v1, GLint v2); +typedef void (APIENTRYP PFNGLUNIFORM4IPROC) (GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +typedef void (APIENTRYP PFNGLUNIFORM1FVPROC) (GLint location, GLsizei count, const GLfloat *value); +typedef void (APIENTRYP PFNGLUNIFORM2FVPROC) (GLint location, GLsizei count, const GLfloat *value); +typedef void (APIENTRYP PFNGLUNIFORM3FVPROC) (GLint location, GLsizei count, const GLfloat *value); +typedef void (APIENTRYP PFNGLUNIFORM4FVPROC) (GLint location, GLsizei count, const GLfloat *value); +typedef void (APIENTRYP PFNGLUNIFORM1IVPROC) (GLint location, GLsizei count, const GLint *value); +typedef void (APIENTRYP PFNGLUNIFORM2IVPROC) (GLint location, GLsizei count, const GLint *value); +typedef void (APIENTRYP PFNGLUNIFORM3IVPROC) (GLint location, GLsizei count, const GLint *value); +typedef void (APIENTRYP PFNGLUNIFORM4IVPROC) (GLint location, GLsizei count, const GLint *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX2FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX3FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX4FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLVALIDATEPROGRAMPROC) (GLuint program); +typedef void (APIENTRYP PFNGLVERTEXATTRIB1DPROC) (GLuint index, GLdouble x); +typedef void (APIENTRYP PFNGLVERTEXATTRIB1DVPROC) (GLuint index, const GLdouble *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB1FPROC) (GLuint index, GLfloat x); +typedef void (APIENTRYP PFNGLVERTEXATTRIB1FVPROC) (GLuint index, const GLfloat *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB1SPROC) (GLuint index, GLshort x); +typedef void (APIENTRYP PFNGLVERTEXATTRIB1SVPROC) (GLuint index, const GLshort *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB2DPROC) (GLuint index, GLdouble x, GLdouble y); +typedef void (APIENTRYP PFNGLVERTEXATTRIB2DVPROC) (GLuint index, const GLdouble *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB2FPROC) (GLuint index, GLfloat x, GLfloat y); +typedef void (APIENTRYP PFNGLVERTEXATTRIB2FVPROC) (GLuint index, const GLfloat *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB2SPROC) (GLuint index, GLshort x, GLshort y); +typedef void (APIENTRYP PFNGLVERTEXATTRIB2SVPROC) (GLuint index, const GLshort *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB3DPROC) (GLuint index, GLdouble x, GLdouble y, GLdouble z); +typedef void (APIENTRYP PFNGLVERTEXATTRIB3DVPROC) (GLuint index, const GLdouble *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB3FPROC) (GLuint index, GLfloat x, GLfloat y, GLfloat z); +typedef void (APIENTRYP PFNGLVERTEXATTRIB3FVPROC) (GLuint index, const GLfloat *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB3SPROC) (GLuint index, GLshort x, GLshort y, GLshort z); +typedef void (APIENTRYP PFNGLVERTEXATTRIB3SVPROC) (GLuint index, const GLshort *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4NBVPROC) (GLuint index, const GLbyte *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4NIVPROC) (GLuint index, const GLint *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4NSVPROC) (GLuint index, const GLshort *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4NUBPROC) (GLuint index, GLubyte x, GLubyte y, GLubyte z, GLubyte w); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4NUBVPROC) (GLuint index, const GLubyte *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4NUIVPROC) (GLuint index, const GLuint *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4NUSVPROC) (GLuint index, const GLushort *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4BVPROC) (GLuint index, const GLbyte *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4DPROC) (GLuint index, GLdouble x, GLdouble y, GLdouble z, GLdouble w); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4DVPROC) (GLuint index, const GLdouble *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4FPROC) (GLuint index, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4FVPROC) (GLuint index, const GLfloat *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4IVPROC) (GLuint index, const GLint *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4SPROC) (GLuint index, GLshort x, GLshort y, GLshort z, GLshort w); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4SVPROC) (GLuint index, const GLshort *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4UBVPROC) (GLuint index, const GLubyte *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4UIVPROC) (GLuint index, const GLuint *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4USVPROC) (GLuint index, const GLushort *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBPOINTERPROC) (GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride, const void *pointer); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBlendEquationSeparate (GLenum modeRGB, GLenum modeAlpha); +GLAPI void APIENTRY glDrawBuffers (GLsizei n, const GLenum *bufs); +GLAPI void APIENTRY glStencilOpSeparate (GLenum face, GLenum sfail, GLenum dpfail, GLenum dppass); +GLAPI void APIENTRY glStencilFuncSeparate (GLenum face, GLenum func, GLint ref, GLuint mask); +GLAPI void APIENTRY glStencilMaskSeparate (GLenum face, GLuint mask); +GLAPI void APIENTRY glAttachShader (GLuint program, GLuint shader); +GLAPI void APIENTRY glBindAttribLocation (GLuint program, GLuint index, const GLchar *name); +GLAPI void APIENTRY glCompileShader (GLuint shader); +GLAPI GLuint APIENTRY glCreateProgram (void); +GLAPI GLuint APIENTRY glCreateShader (GLenum type); +GLAPI void APIENTRY glDeleteProgram (GLuint program); +GLAPI void APIENTRY glDeleteShader (GLuint shader); +GLAPI void APIENTRY glDetachShader (GLuint program, GLuint shader); +GLAPI void APIENTRY glDisableVertexAttribArray (GLuint index); +GLAPI void APIENTRY glEnableVertexAttribArray (GLuint index); +GLAPI void APIENTRY glGetActiveAttrib (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name); +GLAPI void APIENTRY glGetActiveUniform (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name); +GLAPI void APIENTRY glGetAttachedShaders (GLuint program, GLsizei maxCount, GLsizei *count, GLuint *shaders); +GLAPI GLint APIENTRY glGetAttribLocation (GLuint program, const GLchar *name); +GLAPI void APIENTRY glGetProgramiv (GLuint program, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetProgramInfoLog (GLuint program, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +GLAPI void APIENTRY glGetShaderiv (GLuint shader, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetShaderInfoLog (GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +GLAPI void APIENTRY glGetShaderSource (GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *source); +GLAPI GLint APIENTRY glGetUniformLocation (GLuint program, const GLchar *name); +GLAPI void APIENTRY glGetUniformfv (GLuint program, GLint location, GLfloat *params); +GLAPI void APIENTRY glGetUniformiv (GLuint program, GLint location, GLint *params); +GLAPI void APIENTRY glGetVertexAttribdv (GLuint index, GLenum pname, GLdouble *params); +GLAPI void APIENTRY glGetVertexAttribfv (GLuint index, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetVertexAttribiv (GLuint index, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetVertexAttribPointerv (GLuint index, GLenum pname, void **pointer); +GLAPI GLboolean APIENTRY glIsProgram (GLuint program); +GLAPI GLboolean APIENTRY glIsShader (GLuint shader); +GLAPI void APIENTRY glLinkProgram (GLuint program); +GLAPI void APIENTRY glShaderSource (GLuint shader, GLsizei count, const GLchar *const*string, const GLint *length); +GLAPI void APIENTRY glUseProgram (GLuint program); +GLAPI void APIENTRY glUniform1f (GLint location, GLfloat v0); +GLAPI void APIENTRY glUniform2f (GLint location, GLfloat v0, GLfloat v1); +GLAPI void APIENTRY glUniform3f (GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +GLAPI void APIENTRY glUniform4f (GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +GLAPI void APIENTRY glUniform1i (GLint location, GLint v0); +GLAPI void APIENTRY glUniform2i (GLint location, GLint v0, GLint v1); +GLAPI void APIENTRY glUniform3i (GLint location, GLint v0, GLint v1, GLint v2); +GLAPI void APIENTRY glUniform4i (GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +GLAPI void APIENTRY glUniform1fv (GLint location, GLsizei count, const GLfloat *value); +GLAPI void APIENTRY glUniform2fv (GLint location, GLsizei count, const GLfloat *value); +GLAPI void APIENTRY glUniform3fv (GLint location, GLsizei count, const GLfloat *value); +GLAPI void APIENTRY glUniform4fv (GLint location, GLsizei count, const GLfloat *value); +GLAPI void APIENTRY glUniform1iv (GLint location, GLsizei count, const GLint *value); +GLAPI void APIENTRY glUniform2iv (GLint location, GLsizei count, const GLint *value); +GLAPI void APIENTRY glUniform3iv (GLint location, GLsizei count, const GLint *value); +GLAPI void APIENTRY glUniform4iv (GLint location, GLsizei count, const GLint *value); +GLAPI void APIENTRY glUniformMatrix2fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glUniformMatrix3fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glUniformMatrix4fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glValidateProgram (GLuint program); +GLAPI void APIENTRY glVertexAttrib1d (GLuint index, GLdouble x); +GLAPI void APIENTRY glVertexAttrib1dv (GLuint index, const GLdouble *v); +GLAPI void APIENTRY glVertexAttrib1f (GLuint index, GLfloat x); +GLAPI void APIENTRY glVertexAttrib1fv (GLuint index, const GLfloat *v); +GLAPI void APIENTRY glVertexAttrib1s (GLuint index, GLshort x); +GLAPI void APIENTRY glVertexAttrib1sv (GLuint index, const GLshort *v); +GLAPI void APIENTRY glVertexAttrib2d (GLuint index, GLdouble x, GLdouble y); +GLAPI void APIENTRY glVertexAttrib2dv (GLuint index, const GLdouble *v); +GLAPI void APIENTRY glVertexAttrib2f (GLuint index, GLfloat x, GLfloat y); +GLAPI void APIENTRY glVertexAttrib2fv (GLuint index, const GLfloat *v); +GLAPI void APIENTRY glVertexAttrib2s (GLuint index, GLshort x, GLshort y); +GLAPI void APIENTRY glVertexAttrib2sv (GLuint index, const GLshort *v); +GLAPI void APIENTRY glVertexAttrib3d (GLuint index, GLdouble x, GLdouble y, GLdouble z); +GLAPI void APIENTRY glVertexAttrib3dv (GLuint index, const GLdouble *v); +GLAPI void APIENTRY glVertexAttrib3f (GLuint index, GLfloat x, GLfloat y, GLfloat z); +GLAPI void APIENTRY glVertexAttrib3fv (GLuint index, const GLfloat *v); +GLAPI void APIENTRY glVertexAttrib3s (GLuint index, GLshort x, GLshort y, GLshort z); +GLAPI void APIENTRY glVertexAttrib3sv (GLuint index, const GLshort *v); +GLAPI void APIENTRY glVertexAttrib4Nbv (GLuint index, const GLbyte *v); +GLAPI void APIENTRY glVertexAttrib4Niv (GLuint index, const GLint *v); +GLAPI void APIENTRY glVertexAttrib4Nsv (GLuint index, const GLshort *v); +GLAPI void APIENTRY glVertexAttrib4Nub (GLuint index, GLubyte x, GLubyte y, GLubyte z, GLubyte w); +GLAPI void APIENTRY glVertexAttrib4Nubv (GLuint index, const GLubyte *v); +GLAPI void APIENTRY glVertexAttrib4Nuiv (GLuint index, const GLuint *v); +GLAPI void APIENTRY glVertexAttrib4Nusv (GLuint index, const GLushort *v); +GLAPI void APIENTRY glVertexAttrib4bv (GLuint index, const GLbyte *v); +GLAPI void APIENTRY glVertexAttrib4d (GLuint index, GLdouble x, GLdouble y, GLdouble z, GLdouble w); +GLAPI void APIENTRY glVertexAttrib4dv (GLuint index, const GLdouble *v); +GLAPI void APIENTRY glVertexAttrib4f (GLuint index, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +GLAPI void APIENTRY glVertexAttrib4fv (GLuint index, const GLfloat *v); +GLAPI void APIENTRY glVertexAttrib4iv (GLuint index, const GLint *v); +GLAPI void APIENTRY glVertexAttrib4s (GLuint index, GLshort x, GLshort y, GLshort z, GLshort w); +GLAPI void APIENTRY glVertexAttrib4sv (GLuint index, const GLshort *v); +GLAPI void APIENTRY glVertexAttrib4ubv (GLuint index, const GLubyte *v); +GLAPI void APIENTRY glVertexAttrib4uiv (GLuint index, const GLuint *v); +GLAPI void APIENTRY glVertexAttrib4usv (GLuint index, const GLushort *v); +GLAPI void APIENTRY glVertexAttribPointer (GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride, const void *pointer); +#endif +#endif /* GL_VERSION_2_0 */ + +#ifndef GL_VERSION_2_1 +#define GL_VERSION_2_1 1 +#define GL_PIXEL_PACK_BUFFER 0x88EB +#define GL_PIXEL_UNPACK_BUFFER 0x88EC +#define GL_PIXEL_PACK_BUFFER_BINDING 0x88ED +#define GL_PIXEL_UNPACK_BUFFER_BINDING 0x88EF +#define GL_FLOAT_MAT2x3 0x8B65 +#define GL_FLOAT_MAT2x4 0x8B66 +#define GL_FLOAT_MAT3x2 0x8B67 +#define GL_FLOAT_MAT3x4 0x8B68 +#define GL_FLOAT_MAT4x2 0x8B69 +#define GL_FLOAT_MAT4x3 0x8B6A +#define GL_SRGB 0x8C40 +#define GL_SRGB8 0x8C41 +#define GL_SRGB_ALPHA 0x8C42 +#define GL_SRGB8_ALPHA8 0x8C43 +#define GL_COMPRESSED_SRGB 0x8C48 +#define GL_COMPRESSED_SRGB_ALPHA 0x8C49 +#define GL_CURRENT_RASTER_SECONDARY_COLOR 0x845F +#define GL_SLUMINANCE_ALPHA 0x8C44 +#define GL_SLUMINANCE8_ALPHA8 0x8C45 +#define GL_SLUMINANCE 0x8C46 +#define GL_SLUMINANCE8 0x8C47 +#define GL_COMPRESSED_SLUMINANCE 0x8C4A +#define GL_COMPRESSED_SLUMINANCE_ALPHA 0x8C4B +typedef void (APIENTRYP PFNGLUNIFORMMATRIX2X3FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX3X2FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX2X4FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX4X2FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX3X4FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX4X3FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glUniformMatrix2x3fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glUniformMatrix3x2fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glUniformMatrix2x4fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glUniformMatrix4x2fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glUniformMatrix3x4fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glUniformMatrix4x3fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +#endif +#endif /* GL_VERSION_2_1 */ + +#ifndef GL_VERSION_3_0 +#define GL_VERSION_3_0 1 +typedef unsigned short GLhalf; +#define GL_COMPARE_REF_TO_TEXTURE 0x884E +#define GL_CLIP_DISTANCE0 0x3000 +#define GL_CLIP_DISTANCE1 0x3001 +#define GL_CLIP_DISTANCE2 0x3002 +#define GL_CLIP_DISTANCE3 0x3003 +#define GL_CLIP_DISTANCE4 0x3004 +#define GL_CLIP_DISTANCE5 0x3005 +#define GL_CLIP_DISTANCE6 0x3006 +#define GL_CLIP_DISTANCE7 0x3007 +#define GL_MAX_CLIP_DISTANCES 0x0D32 +#define GL_MAJOR_VERSION 0x821B +#define GL_MINOR_VERSION 0x821C +#define GL_NUM_EXTENSIONS 0x821D +#define GL_CONTEXT_FLAGS 0x821E +#define GL_COMPRESSED_RED 0x8225 +#define GL_COMPRESSED_RG 0x8226 +#define GL_CONTEXT_FLAG_FORWARD_COMPATIBLE_BIT 0x00000001 +#define GL_RGBA32F 0x8814 +#define GL_RGB32F 0x8815 +#define GL_RGBA16F 0x881A +#define GL_RGB16F 0x881B +#define GL_VERTEX_ATTRIB_ARRAY_INTEGER 0x88FD +#define GL_MAX_ARRAY_TEXTURE_LAYERS 0x88FF +#define GL_MIN_PROGRAM_TEXEL_OFFSET 0x8904 +#define GL_MAX_PROGRAM_TEXEL_OFFSET 0x8905 +#define GL_CLAMP_READ_COLOR 0x891C +#define GL_FIXED_ONLY 0x891D +#define GL_MAX_VARYING_COMPONENTS 0x8B4B +#define GL_TEXTURE_1D_ARRAY 0x8C18 +#define GL_PROXY_TEXTURE_1D_ARRAY 0x8C19 +#define GL_TEXTURE_2D_ARRAY 0x8C1A +#define GL_PROXY_TEXTURE_2D_ARRAY 0x8C1B +#define GL_TEXTURE_BINDING_1D_ARRAY 0x8C1C +#define GL_TEXTURE_BINDING_2D_ARRAY 0x8C1D +#define GL_R11F_G11F_B10F 0x8C3A +#define GL_UNSIGNED_INT_10F_11F_11F_REV 0x8C3B +#define GL_RGB9_E5 0x8C3D +#define GL_UNSIGNED_INT_5_9_9_9_REV 0x8C3E +#define GL_TEXTURE_SHARED_SIZE 0x8C3F +#define GL_TRANSFORM_FEEDBACK_VARYING_MAX_LENGTH 0x8C76 +#define GL_TRANSFORM_FEEDBACK_BUFFER_MODE 0x8C7F +#define GL_MAX_TRANSFORM_FEEDBACK_SEPARATE_COMPONENTS 0x8C80 +#define GL_TRANSFORM_FEEDBACK_VARYINGS 0x8C83 +#define GL_TRANSFORM_FEEDBACK_BUFFER_START 0x8C84 +#define GL_TRANSFORM_FEEDBACK_BUFFER_SIZE 0x8C85 +#define GL_PRIMITIVES_GENERATED 0x8C87 +#define GL_TRANSFORM_FEEDBACK_PRIMITIVES_WRITTEN 0x8C88 +#define GL_RASTERIZER_DISCARD 0x8C89 +#define GL_MAX_TRANSFORM_FEEDBACK_INTERLEAVED_COMPONENTS 0x8C8A +#define GL_MAX_TRANSFORM_FEEDBACK_SEPARATE_ATTRIBS 0x8C8B +#define GL_INTERLEAVED_ATTRIBS 0x8C8C +#define GL_SEPARATE_ATTRIBS 0x8C8D +#define GL_TRANSFORM_FEEDBACK_BUFFER 0x8C8E +#define GL_TRANSFORM_FEEDBACK_BUFFER_BINDING 0x8C8F +#define GL_RGBA32UI 0x8D70 +#define GL_RGB32UI 0x8D71 +#define GL_RGBA16UI 0x8D76 +#define GL_RGB16UI 0x8D77 +#define GL_RGBA8UI 0x8D7C +#define GL_RGB8UI 0x8D7D +#define GL_RGBA32I 0x8D82 +#define GL_RGB32I 0x8D83 +#define GL_RGBA16I 0x8D88 +#define GL_RGB16I 0x8D89 +#define GL_RGBA8I 0x8D8E +#define GL_RGB8I 0x8D8F +#define GL_RED_INTEGER 0x8D94 +#define GL_GREEN_INTEGER 0x8D95 +#define GL_BLUE_INTEGER 0x8D96 +#define GL_RGB_INTEGER 0x8D98 +#define GL_RGBA_INTEGER 0x8D99 +#define GL_BGR_INTEGER 0x8D9A +#define GL_BGRA_INTEGER 0x8D9B +#define GL_SAMPLER_1D_ARRAY 0x8DC0 +#define GL_SAMPLER_2D_ARRAY 0x8DC1 +#define GL_SAMPLER_1D_ARRAY_SHADOW 0x8DC3 +#define GL_SAMPLER_2D_ARRAY_SHADOW 0x8DC4 +#define GL_SAMPLER_CUBE_SHADOW 0x8DC5 +#define GL_UNSIGNED_INT_VEC2 0x8DC6 +#define GL_UNSIGNED_INT_VEC3 0x8DC7 +#define GL_UNSIGNED_INT_VEC4 0x8DC8 +#define GL_INT_SAMPLER_1D 0x8DC9 +#define GL_INT_SAMPLER_2D 0x8DCA +#define GL_INT_SAMPLER_3D 0x8DCB +#define GL_INT_SAMPLER_CUBE 0x8DCC +#define GL_INT_SAMPLER_1D_ARRAY 0x8DCE +#define GL_INT_SAMPLER_2D_ARRAY 0x8DCF +#define GL_UNSIGNED_INT_SAMPLER_1D 0x8DD1 +#define GL_UNSIGNED_INT_SAMPLER_2D 0x8DD2 +#define GL_UNSIGNED_INT_SAMPLER_3D 0x8DD3 +#define GL_UNSIGNED_INT_SAMPLER_CUBE 0x8DD4 +#define GL_UNSIGNED_INT_SAMPLER_1D_ARRAY 0x8DD6 +#define GL_UNSIGNED_INT_SAMPLER_2D_ARRAY 0x8DD7 +#define GL_QUERY_WAIT 0x8E13 +#define GL_QUERY_NO_WAIT 0x8E14 +#define GL_QUERY_BY_REGION_WAIT 0x8E15 +#define GL_QUERY_BY_REGION_NO_WAIT 0x8E16 +#define GL_BUFFER_ACCESS_FLAGS 0x911F +#define GL_BUFFER_MAP_LENGTH 0x9120 +#define GL_BUFFER_MAP_OFFSET 0x9121 +#define GL_DEPTH_COMPONENT32F 0x8CAC +#define GL_DEPTH32F_STENCIL8 0x8CAD +#define GL_FLOAT_32_UNSIGNED_INT_24_8_REV 0x8DAD +#define GL_INVALID_FRAMEBUFFER_OPERATION 0x0506 +#define GL_FRAMEBUFFER_ATTACHMENT_COLOR_ENCODING 0x8210 +#define GL_FRAMEBUFFER_ATTACHMENT_COMPONENT_TYPE 0x8211 +#define GL_FRAMEBUFFER_ATTACHMENT_RED_SIZE 0x8212 +#define GL_FRAMEBUFFER_ATTACHMENT_GREEN_SIZE 0x8213 +#define GL_FRAMEBUFFER_ATTACHMENT_BLUE_SIZE 0x8214 +#define GL_FRAMEBUFFER_ATTACHMENT_ALPHA_SIZE 0x8215 +#define GL_FRAMEBUFFER_ATTACHMENT_DEPTH_SIZE 0x8216 +#define GL_FRAMEBUFFER_ATTACHMENT_STENCIL_SIZE 0x8217 +#define GL_FRAMEBUFFER_DEFAULT 0x8218 +#define GL_FRAMEBUFFER_UNDEFINED 0x8219 +#define GL_DEPTH_STENCIL_ATTACHMENT 0x821A +#define GL_MAX_RENDERBUFFER_SIZE 0x84E8 +#define GL_DEPTH_STENCIL 0x84F9 +#define GL_UNSIGNED_INT_24_8 0x84FA +#define GL_DEPTH24_STENCIL8 0x88F0 +#define GL_TEXTURE_STENCIL_SIZE 0x88F1 +#define GL_TEXTURE_RED_TYPE 0x8C10 +#define GL_TEXTURE_GREEN_TYPE 0x8C11 +#define GL_TEXTURE_BLUE_TYPE 0x8C12 +#define GL_TEXTURE_ALPHA_TYPE 0x8C13 +#define GL_TEXTURE_DEPTH_TYPE 0x8C16 +#define GL_UNSIGNED_NORMALIZED 0x8C17 +#define GL_FRAMEBUFFER_BINDING 0x8CA6 +#define GL_DRAW_FRAMEBUFFER_BINDING 0x8CA6 +#define GL_RENDERBUFFER_BINDING 0x8CA7 +#define GL_READ_FRAMEBUFFER 0x8CA8 +#define GL_DRAW_FRAMEBUFFER 0x8CA9 +#define GL_READ_FRAMEBUFFER_BINDING 0x8CAA +#define GL_RENDERBUFFER_SAMPLES 0x8CAB +#define GL_FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE 0x8CD0 +#define GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME 0x8CD1 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL 0x8CD2 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE 0x8CD3 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LAYER 0x8CD4 +#define GL_FRAMEBUFFER_COMPLETE 0x8CD5 +#define GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT 0x8CD6 +#define GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT 0x8CD7 +#define GL_FRAMEBUFFER_INCOMPLETE_DRAW_BUFFER 0x8CDB +#define GL_FRAMEBUFFER_INCOMPLETE_READ_BUFFER 0x8CDC +#define GL_FRAMEBUFFER_UNSUPPORTED 0x8CDD +#define GL_MAX_COLOR_ATTACHMENTS 0x8CDF +#define GL_COLOR_ATTACHMENT0 0x8CE0 +#define GL_COLOR_ATTACHMENT1 0x8CE1 +#define GL_COLOR_ATTACHMENT2 0x8CE2 +#define GL_COLOR_ATTACHMENT3 0x8CE3 +#define GL_COLOR_ATTACHMENT4 0x8CE4 +#define GL_COLOR_ATTACHMENT5 0x8CE5 +#define GL_COLOR_ATTACHMENT6 0x8CE6 +#define GL_COLOR_ATTACHMENT7 0x8CE7 +#define GL_COLOR_ATTACHMENT8 0x8CE8 +#define GL_COLOR_ATTACHMENT9 0x8CE9 +#define GL_COLOR_ATTACHMENT10 0x8CEA +#define GL_COLOR_ATTACHMENT11 0x8CEB +#define GL_COLOR_ATTACHMENT12 0x8CEC +#define GL_COLOR_ATTACHMENT13 0x8CED +#define GL_COLOR_ATTACHMENT14 0x8CEE +#define GL_COLOR_ATTACHMENT15 0x8CEF +#define GL_COLOR_ATTACHMENT16 0x8CF0 +#define GL_COLOR_ATTACHMENT17 0x8CF1 +#define GL_COLOR_ATTACHMENT18 0x8CF2 +#define GL_COLOR_ATTACHMENT19 0x8CF3 +#define GL_COLOR_ATTACHMENT20 0x8CF4 +#define GL_COLOR_ATTACHMENT21 0x8CF5 +#define GL_COLOR_ATTACHMENT22 0x8CF6 +#define GL_COLOR_ATTACHMENT23 0x8CF7 +#define GL_COLOR_ATTACHMENT24 0x8CF8 +#define GL_COLOR_ATTACHMENT25 0x8CF9 +#define GL_COLOR_ATTACHMENT26 0x8CFA +#define GL_COLOR_ATTACHMENT27 0x8CFB +#define GL_COLOR_ATTACHMENT28 0x8CFC +#define GL_COLOR_ATTACHMENT29 0x8CFD +#define GL_COLOR_ATTACHMENT30 0x8CFE +#define GL_COLOR_ATTACHMENT31 0x8CFF +#define GL_DEPTH_ATTACHMENT 0x8D00 +#define GL_STENCIL_ATTACHMENT 0x8D20 +#define GL_FRAMEBUFFER 0x8D40 +#define GL_RENDERBUFFER 0x8D41 +#define GL_RENDERBUFFER_WIDTH 0x8D42 +#define GL_RENDERBUFFER_HEIGHT 0x8D43 +#define GL_RENDERBUFFER_INTERNAL_FORMAT 0x8D44 +#define GL_STENCIL_INDEX1 0x8D46 +#define GL_STENCIL_INDEX4 0x8D47 +#define GL_STENCIL_INDEX8 0x8D48 +#define GL_STENCIL_INDEX16 0x8D49 +#define GL_RENDERBUFFER_RED_SIZE 0x8D50 +#define GL_RENDERBUFFER_GREEN_SIZE 0x8D51 +#define GL_RENDERBUFFER_BLUE_SIZE 0x8D52 +#define GL_RENDERBUFFER_ALPHA_SIZE 0x8D53 +#define GL_RENDERBUFFER_DEPTH_SIZE 0x8D54 +#define GL_RENDERBUFFER_STENCIL_SIZE 0x8D55 +#define GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE 0x8D56 +#define GL_MAX_SAMPLES 0x8D57 +#define GL_INDEX 0x8222 +#define GL_TEXTURE_LUMINANCE_TYPE 0x8C14 +#define GL_TEXTURE_INTENSITY_TYPE 0x8C15 +#define GL_FRAMEBUFFER_SRGB 0x8DB9 +#define GL_HALF_FLOAT 0x140B +#define GL_MAP_READ_BIT 0x0001 +#define GL_MAP_WRITE_BIT 0x0002 +#define GL_MAP_INVALIDATE_RANGE_BIT 0x0004 +#define GL_MAP_INVALIDATE_BUFFER_BIT 0x0008 +#define GL_MAP_FLUSH_EXPLICIT_BIT 0x0010 +#define GL_MAP_UNSYNCHRONIZED_BIT 0x0020 +#define GL_COMPRESSED_RED_RGTC1 0x8DBB +#define GL_COMPRESSED_SIGNED_RED_RGTC1 0x8DBC +#define GL_COMPRESSED_RG_RGTC2 0x8DBD +#define GL_COMPRESSED_SIGNED_RG_RGTC2 0x8DBE +#define GL_RG 0x8227 +#define GL_RG_INTEGER 0x8228 +#define GL_R8 0x8229 +#define GL_R16 0x822A +#define GL_RG8 0x822B +#define GL_RG16 0x822C +#define GL_R16F 0x822D +#define GL_R32F 0x822E +#define GL_RG16F 0x822F +#define GL_RG32F 0x8230 +#define GL_R8I 0x8231 +#define GL_R8UI 0x8232 +#define GL_R16I 0x8233 +#define GL_R16UI 0x8234 +#define GL_R32I 0x8235 +#define GL_R32UI 0x8236 +#define GL_RG8I 0x8237 +#define GL_RG8UI 0x8238 +#define GL_RG16I 0x8239 +#define GL_RG16UI 0x823A +#define GL_RG32I 0x823B +#define GL_RG32UI 0x823C +#define GL_VERTEX_ARRAY_BINDING 0x85B5 +#define GL_CLAMP_VERTEX_COLOR 0x891A +#define GL_CLAMP_FRAGMENT_COLOR 0x891B +#define GL_ALPHA_INTEGER 0x8D97 +typedef void (APIENTRYP PFNGLCOLORMASKIPROC) (GLuint index, GLboolean r, GLboolean g, GLboolean b, GLboolean a); +typedef void (APIENTRYP PFNGLGETBOOLEANI_VPROC) (GLenum target, GLuint index, GLboolean *data); +typedef void (APIENTRYP PFNGLGETINTEGERI_VPROC) (GLenum target, GLuint index, GLint *data); +typedef void (APIENTRYP PFNGLENABLEIPROC) (GLenum target, GLuint index); +typedef void (APIENTRYP PFNGLDISABLEIPROC) (GLenum target, GLuint index); +typedef GLboolean (APIENTRYP PFNGLISENABLEDIPROC) (GLenum target, GLuint index); +typedef void (APIENTRYP PFNGLBEGINTRANSFORMFEEDBACKPROC) (GLenum primitiveMode); +typedef void (APIENTRYP PFNGLENDTRANSFORMFEEDBACKPROC) (void); +typedef void (APIENTRYP PFNGLBINDBUFFERRANGEPROC) (GLenum target, GLuint index, GLuint buffer, GLintptr offset, GLsizeiptr size); +typedef void (APIENTRYP PFNGLBINDBUFFERBASEPROC) (GLenum target, GLuint index, GLuint buffer); +typedef void (APIENTRYP PFNGLTRANSFORMFEEDBACKVARYINGSPROC) (GLuint program, GLsizei count, const GLchar *const*varyings, GLenum bufferMode); +typedef void (APIENTRYP PFNGLGETTRANSFORMFEEDBACKVARYINGPROC) (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLsizei *size, GLenum *type, GLchar *name); +typedef void (APIENTRYP PFNGLCLAMPCOLORPROC) (GLenum target, GLenum clamp); +typedef void (APIENTRYP PFNGLBEGINCONDITIONALRENDERPROC) (GLuint id, GLenum mode); +typedef void (APIENTRYP PFNGLENDCONDITIONALRENDERPROC) (void); +typedef void (APIENTRYP PFNGLVERTEXATTRIBIPOINTERPROC) (GLuint index, GLint size, GLenum type, GLsizei stride, const void *pointer); +typedef void (APIENTRYP PFNGLGETVERTEXATTRIBIIVPROC) (GLuint index, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETVERTEXATTRIBIUIVPROC) (GLuint index, GLenum pname, GLuint *params); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI1IPROC) (GLuint index, GLint x); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI2IPROC) (GLuint index, GLint x, GLint y); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI3IPROC) (GLuint index, GLint x, GLint y, GLint z); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI4IPROC) (GLuint index, GLint x, GLint y, GLint z, GLint w); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI1UIPROC) (GLuint index, GLuint x); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI2UIPROC) (GLuint index, GLuint x, GLuint y); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI3UIPROC) (GLuint index, GLuint x, GLuint y, GLuint z); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI4UIPROC) (GLuint index, GLuint x, GLuint y, GLuint z, GLuint w); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI1IVPROC) (GLuint index, const GLint *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI2IVPROC) (GLuint index, const GLint *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI3IVPROC) (GLuint index, const GLint *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI4IVPROC) (GLuint index, const GLint *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI1UIVPROC) (GLuint index, const GLuint *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI2UIVPROC) (GLuint index, const GLuint *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI3UIVPROC) (GLuint index, const GLuint *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI4UIVPROC) (GLuint index, const GLuint *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI4BVPROC) (GLuint index, const GLbyte *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI4SVPROC) (GLuint index, const GLshort *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI4UBVPROC) (GLuint index, const GLubyte *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI4USVPROC) (GLuint index, const GLushort *v); +typedef void (APIENTRYP PFNGLGETUNIFORMUIVPROC) (GLuint program, GLint location, GLuint *params); +typedef void (APIENTRYP PFNGLBINDFRAGDATALOCATIONPROC) (GLuint program, GLuint color, const GLchar *name); +typedef GLint (APIENTRYP PFNGLGETFRAGDATALOCATIONPROC) (GLuint program, const GLchar *name); +typedef void (APIENTRYP PFNGLUNIFORM1UIPROC) (GLint location, GLuint v0); +typedef void (APIENTRYP PFNGLUNIFORM2UIPROC) (GLint location, GLuint v0, GLuint v1); +typedef void (APIENTRYP PFNGLUNIFORM3UIPROC) (GLint location, GLuint v0, GLuint v1, GLuint v2); +typedef void (APIENTRYP PFNGLUNIFORM4UIPROC) (GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3); +typedef void (APIENTRYP PFNGLUNIFORM1UIVPROC) (GLint location, GLsizei count, const GLuint *value); +typedef void (APIENTRYP PFNGLUNIFORM2UIVPROC) (GLint location, GLsizei count, const GLuint *value); +typedef void (APIENTRYP PFNGLUNIFORM3UIVPROC) (GLint location, GLsizei count, const GLuint *value); +typedef void (APIENTRYP PFNGLUNIFORM4UIVPROC) (GLint location, GLsizei count, const GLuint *value); +typedef void (APIENTRYP PFNGLTEXPARAMETERIIVPROC) (GLenum target, GLenum pname, const GLint *params); +typedef void (APIENTRYP PFNGLTEXPARAMETERIUIVPROC) (GLenum target, GLenum pname, const GLuint *params); +typedef void (APIENTRYP PFNGLGETTEXPARAMETERIIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETTEXPARAMETERIUIVPROC) (GLenum target, GLenum pname, GLuint *params); +typedef void (APIENTRYP PFNGLCLEARBUFFERIVPROC) (GLenum buffer, GLint drawbuffer, const GLint *value); +typedef void (APIENTRYP PFNGLCLEARBUFFERUIVPROC) (GLenum buffer, GLint drawbuffer, const GLuint *value); +typedef void (APIENTRYP PFNGLCLEARBUFFERFVPROC) (GLenum buffer, GLint drawbuffer, const GLfloat *value); +typedef void (APIENTRYP PFNGLCLEARBUFFERFIPROC) (GLenum buffer, GLint drawbuffer, GLfloat depth, GLint stencil); +typedef const GLubyte *(APIENTRYP PFNGLGETSTRINGIPROC) (GLenum name, GLuint index); +typedef GLboolean (APIENTRYP PFNGLISRENDERBUFFERPROC) (GLuint renderbuffer); +typedef void (APIENTRYP PFNGLBINDRENDERBUFFERPROC) (GLenum target, GLuint renderbuffer); +typedef void (APIENTRYP PFNGLDELETERENDERBUFFERSPROC) (GLsizei n, const GLuint *renderbuffers); +typedef void (APIENTRYP PFNGLGENRENDERBUFFERSPROC) (GLsizei n, GLuint *renderbuffers); +typedef void (APIENTRYP PFNGLRENDERBUFFERSTORAGEPROC) (GLenum target, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLGETRENDERBUFFERPARAMETERIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef GLboolean (APIENTRYP PFNGLISFRAMEBUFFERPROC) (GLuint framebuffer); +typedef void (APIENTRYP PFNGLBINDFRAMEBUFFERPROC) (GLenum target, GLuint framebuffer); +typedef void (APIENTRYP PFNGLDELETEFRAMEBUFFERSPROC) (GLsizei n, const GLuint *framebuffers); +typedef void (APIENTRYP PFNGLGENFRAMEBUFFERSPROC) (GLsizei n, GLuint *framebuffers); +typedef GLenum (APIENTRYP PFNGLCHECKFRAMEBUFFERSTATUSPROC) (GLenum target); +typedef void (APIENTRYP PFNGLFRAMEBUFFERTEXTURE1DPROC) (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +typedef void (APIENTRYP PFNGLFRAMEBUFFERTEXTURE2DPROC) (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +typedef void (APIENTRYP PFNGLFRAMEBUFFERTEXTURE3DPROC) (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLint zoffset); +typedef void (APIENTRYP PFNGLFRAMEBUFFERRENDERBUFFERPROC) (GLenum target, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer); +typedef void (APIENTRYP PFNGLGETFRAMEBUFFERATTACHMENTPARAMETERIVPROC) (GLenum target, GLenum attachment, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGENERATEMIPMAPPROC) (GLenum target); +typedef void (APIENTRYP PFNGLBLITFRAMEBUFFERPROC) (GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +typedef void (APIENTRYP PFNGLRENDERBUFFERSTORAGEMULTISAMPLEPROC) (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLFRAMEBUFFERTEXTURELAYERPROC) (GLenum target, GLenum attachment, GLuint texture, GLint level, GLint layer); +typedef void *(APIENTRYP PFNGLMAPBUFFERRANGEPROC) (GLenum target, GLintptr offset, GLsizeiptr length, GLbitfield access); +typedef void (APIENTRYP PFNGLFLUSHMAPPEDBUFFERRANGEPROC) (GLenum target, GLintptr offset, GLsizeiptr length); +typedef void (APIENTRYP PFNGLBINDVERTEXARRAYPROC) (GLuint array); +typedef void (APIENTRYP PFNGLDELETEVERTEXARRAYSPROC) (GLsizei n, const GLuint *arrays); +typedef void (APIENTRYP PFNGLGENVERTEXARRAYSPROC) (GLsizei n, GLuint *arrays); +typedef GLboolean (APIENTRYP PFNGLISVERTEXARRAYPROC) (GLuint array); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glColorMaski (GLuint index, GLboolean r, GLboolean g, GLboolean b, GLboolean a); +GLAPI void APIENTRY glGetBooleani_v (GLenum target, GLuint index, GLboolean *data); +GLAPI void APIENTRY glGetIntegeri_v (GLenum target, GLuint index, GLint *data); +GLAPI void APIENTRY glEnablei (GLenum target, GLuint index); +GLAPI void APIENTRY glDisablei (GLenum target, GLuint index); +GLAPI GLboolean APIENTRY glIsEnabledi (GLenum target, GLuint index); +GLAPI void APIENTRY glBeginTransformFeedback (GLenum primitiveMode); +GLAPI void APIENTRY glEndTransformFeedback (void); +GLAPI void APIENTRY glBindBufferRange (GLenum target, GLuint index, GLuint buffer, GLintptr offset, GLsizeiptr size); +GLAPI void APIENTRY glBindBufferBase (GLenum target, GLuint index, GLuint buffer); +GLAPI void APIENTRY glTransformFeedbackVaryings (GLuint program, GLsizei count, const GLchar *const*varyings, GLenum bufferMode); +GLAPI void APIENTRY glGetTransformFeedbackVarying (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLsizei *size, GLenum *type, GLchar *name); +GLAPI void APIENTRY glClampColor (GLenum target, GLenum clamp); +GLAPI void APIENTRY glBeginConditionalRender (GLuint id, GLenum mode); +GLAPI void APIENTRY glEndConditionalRender (void); +GLAPI void APIENTRY glVertexAttribIPointer (GLuint index, GLint size, GLenum type, GLsizei stride, const void *pointer); +GLAPI void APIENTRY glGetVertexAttribIiv (GLuint index, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetVertexAttribIuiv (GLuint index, GLenum pname, GLuint *params); +GLAPI void APIENTRY glVertexAttribI1i (GLuint index, GLint x); +GLAPI void APIENTRY glVertexAttribI2i (GLuint index, GLint x, GLint y); +GLAPI void APIENTRY glVertexAttribI3i (GLuint index, GLint x, GLint y, GLint z); +GLAPI void APIENTRY glVertexAttribI4i (GLuint index, GLint x, GLint y, GLint z, GLint w); +GLAPI void APIENTRY glVertexAttribI1ui (GLuint index, GLuint x); +GLAPI void APIENTRY glVertexAttribI2ui (GLuint index, GLuint x, GLuint y); +GLAPI void APIENTRY glVertexAttribI3ui (GLuint index, GLuint x, GLuint y, GLuint z); +GLAPI void APIENTRY glVertexAttribI4ui (GLuint index, GLuint x, GLuint y, GLuint z, GLuint w); +GLAPI void APIENTRY glVertexAttribI1iv (GLuint index, const GLint *v); +GLAPI void APIENTRY glVertexAttribI2iv (GLuint index, const GLint *v); +GLAPI void APIENTRY glVertexAttribI3iv (GLuint index, const GLint *v); +GLAPI void APIENTRY glVertexAttribI4iv (GLuint index, const GLint *v); +GLAPI void APIENTRY glVertexAttribI1uiv (GLuint index, const GLuint *v); +GLAPI void APIENTRY glVertexAttribI2uiv (GLuint index, const GLuint *v); +GLAPI void APIENTRY glVertexAttribI3uiv (GLuint index, const GLuint *v); +GLAPI void APIENTRY glVertexAttribI4uiv (GLuint index, const GLuint *v); +GLAPI void APIENTRY glVertexAttribI4bv (GLuint index, const GLbyte *v); +GLAPI void APIENTRY glVertexAttribI4sv (GLuint index, const GLshort *v); +GLAPI void APIENTRY glVertexAttribI4ubv (GLuint index, const GLubyte *v); +GLAPI void APIENTRY glVertexAttribI4usv (GLuint index, const GLushort *v); +GLAPI void APIENTRY glGetUniformuiv (GLuint program, GLint location, GLuint *params); +GLAPI void APIENTRY glBindFragDataLocation (GLuint program, GLuint color, const GLchar *name); +GLAPI GLint APIENTRY glGetFragDataLocation (GLuint program, const GLchar *name); +GLAPI void APIENTRY glUniform1ui (GLint location, GLuint v0); +GLAPI void APIENTRY glUniform2ui (GLint location, GLuint v0, GLuint v1); +GLAPI void APIENTRY glUniform3ui (GLint location, GLuint v0, GLuint v1, GLuint v2); +GLAPI void APIENTRY glUniform4ui (GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3); +GLAPI void APIENTRY glUniform1uiv (GLint location, GLsizei count, const GLuint *value); +GLAPI void APIENTRY glUniform2uiv (GLint location, GLsizei count, const GLuint *value); +GLAPI void APIENTRY glUniform3uiv (GLint location, GLsizei count, const GLuint *value); +GLAPI void APIENTRY glUniform4uiv (GLint location, GLsizei count, const GLuint *value); +GLAPI void APIENTRY glTexParameterIiv (GLenum target, GLenum pname, const GLint *params); +GLAPI void APIENTRY glTexParameterIuiv (GLenum target, GLenum pname, const GLuint *params); +GLAPI void APIENTRY glGetTexParameterIiv (GLenum target, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetTexParameterIuiv (GLenum target, GLenum pname, GLuint *params); +GLAPI void APIENTRY glClearBufferiv (GLenum buffer, GLint drawbuffer, const GLint *value); +GLAPI void APIENTRY glClearBufferuiv (GLenum buffer, GLint drawbuffer, const GLuint *value); +GLAPI void APIENTRY glClearBufferfv (GLenum buffer, GLint drawbuffer, const GLfloat *value); +GLAPI void APIENTRY glClearBufferfi (GLenum buffer, GLint drawbuffer, GLfloat depth, GLint stencil); +GLAPI const GLubyte *APIENTRY glGetStringi (GLenum name, GLuint index); +GLAPI GLboolean APIENTRY glIsRenderbuffer (GLuint renderbuffer); +GLAPI void APIENTRY glBindRenderbuffer (GLenum target, GLuint renderbuffer); +GLAPI void APIENTRY glDeleteRenderbuffers (GLsizei n, const GLuint *renderbuffers); +GLAPI void APIENTRY glGenRenderbuffers (GLsizei n, GLuint *renderbuffers); +GLAPI void APIENTRY glRenderbufferStorage (GLenum target, GLenum internalformat, GLsizei width, GLsizei height); +GLAPI void APIENTRY glGetRenderbufferParameteriv (GLenum target, GLenum pname, GLint *params); +GLAPI GLboolean APIENTRY glIsFramebuffer (GLuint framebuffer); +GLAPI void APIENTRY glBindFramebuffer (GLenum target, GLuint framebuffer); +GLAPI void APIENTRY glDeleteFramebuffers (GLsizei n, const GLuint *framebuffers); +GLAPI void APIENTRY glGenFramebuffers (GLsizei n, GLuint *framebuffers); +GLAPI GLenum APIENTRY glCheckFramebufferStatus (GLenum target); +GLAPI void APIENTRY glFramebufferTexture1D (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +GLAPI void APIENTRY glFramebufferTexture2D (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +GLAPI void APIENTRY glFramebufferTexture3D (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLint zoffset); +GLAPI void APIENTRY glFramebufferRenderbuffer (GLenum target, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer); +GLAPI void APIENTRY glGetFramebufferAttachmentParameteriv (GLenum target, GLenum attachment, GLenum pname, GLint *params); +GLAPI void APIENTRY glGenerateMipmap (GLenum target); +GLAPI void APIENTRY glBlitFramebuffer (GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +GLAPI void APIENTRY glRenderbufferStorageMultisample (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +GLAPI void APIENTRY glFramebufferTextureLayer (GLenum target, GLenum attachment, GLuint texture, GLint level, GLint layer); +GLAPI void *APIENTRY glMapBufferRange (GLenum target, GLintptr offset, GLsizeiptr length, GLbitfield access); +GLAPI void APIENTRY glFlushMappedBufferRange (GLenum target, GLintptr offset, GLsizeiptr length); +GLAPI void APIENTRY glBindVertexArray (GLuint array); +GLAPI void APIENTRY glDeleteVertexArrays (GLsizei n, const GLuint *arrays); +GLAPI void APIENTRY glGenVertexArrays (GLsizei n, GLuint *arrays); +GLAPI GLboolean APIENTRY glIsVertexArray (GLuint array); +#endif +#endif /* GL_VERSION_3_0 */ + +#ifndef GL_VERSION_3_1 +#define GL_VERSION_3_1 1 +#define GL_SAMPLER_2D_RECT 0x8B63 +#define GL_SAMPLER_2D_RECT_SHADOW 0x8B64 +#define GL_SAMPLER_BUFFER 0x8DC2 +#define GL_INT_SAMPLER_2D_RECT 0x8DCD +#define GL_INT_SAMPLER_BUFFER 0x8DD0 +#define GL_UNSIGNED_INT_SAMPLER_2D_RECT 0x8DD5 +#define GL_UNSIGNED_INT_SAMPLER_BUFFER 0x8DD8 +#define GL_TEXTURE_BUFFER 0x8C2A +#define GL_MAX_TEXTURE_BUFFER_SIZE 0x8C2B +#define GL_TEXTURE_BINDING_BUFFER 0x8C2C +#define GL_TEXTURE_BUFFER_DATA_STORE_BINDING 0x8C2D +#define GL_TEXTURE_RECTANGLE 0x84F5 +#define GL_TEXTURE_BINDING_RECTANGLE 0x84F6 +#define GL_PROXY_TEXTURE_RECTANGLE 0x84F7 +#define GL_MAX_RECTANGLE_TEXTURE_SIZE 0x84F8 +#define GL_R8_SNORM 0x8F94 +#define GL_RG8_SNORM 0x8F95 +#define GL_RGB8_SNORM 0x8F96 +#define GL_RGBA8_SNORM 0x8F97 +#define GL_R16_SNORM 0x8F98 +#define GL_RG16_SNORM 0x8F99 +#define GL_RGB16_SNORM 0x8F9A +#define GL_RGBA16_SNORM 0x8F9B +#define GL_SIGNED_NORMALIZED 0x8F9C +#define GL_PRIMITIVE_RESTART 0x8F9D +#define GL_PRIMITIVE_RESTART_INDEX 0x8F9E +#define GL_COPY_READ_BUFFER 0x8F36 +#define GL_COPY_WRITE_BUFFER 0x8F37 +#define GL_UNIFORM_BUFFER 0x8A11 +#define GL_UNIFORM_BUFFER_BINDING 0x8A28 +#define GL_UNIFORM_BUFFER_START 0x8A29 +#define GL_UNIFORM_BUFFER_SIZE 0x8A2A +#define GL_MAX_VERTEX_UNIFORM_BLOCKS 0x8A2B +#define GL_MAX_GEOMETRY_UNIFORM_BLOCKS 0x8A2C +#define GL_MAX_FRAGMENT_UNIFORM_BLOCKS 0x8A2D +#define GL_MAX_COMBINED_UNIFORM_BLOCKS 0x8A2E +#define GL_MAX_UNIFORM_BUFFER_BINDINGS 0x8A2F +#define GL_MAX_UNIFORM_BLOCK_SIZE 0x8A30 +#define GL_MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS 0x8A31 +#define GL_MAX_COMBINED_GEOMETRY_UNIFORM_COMPONENTS 0x8A32 +#define GL_MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS 0x8A33 +#define GL_UNIFORM_BUFFER_OFFSET_ALIGNMENT 0x8A34 +#define GL_ACTIVE_UNIFORM_BLOCK_MAX_NAME_LENGTH 0x8A35 +#define GL_ACTIVE_UNIFORM_BLOCKS 0x8A36 +#define GL_UNIFORM_TYPE 0x8A37 +#define GL_UNIFORM_SIZE 0x8A38 +#define GL_UNIFORM_NAME_LENGTH 0x8A39 +#define GL_UNIFORM_BLOCK_INDEX 0x8A3A +#define GL_UNIFORM_OFFSET 0x8A3B +#define GL_UNIFORM_ARRAY_STRIDE 0x8A3C +#define GL_UNIFORM_MATRIX_STRIDE 0x8A3D +#define GL_UNIFORM_IS_ROW_MAJOR 0x8A3E +#define GL_UNIFORM_BLOCK_BINDING 0x8A3F +#define GL_UNIFORM_BLOCK_DATA_SIZE 0x8A40 +#define GL_UNIFORM_BLOCK_NAME_LENGTH 0x8A41 +#define GL_UNIFORM_BLOCK_ACTIVE_UNIFORMS 0x8A42 +#define GL_UNIFORM_BLOCK_ACTIVE_UNIFORM_INDICES 0x8A43 +#define GL_UNIFORM_BLOCK_REFERENCED_BY_VERTEX_SHADER 0x8A44 +#define GL_UNIFORM_BLOCK_REFERENCED_BY_GEOMETRY_SHADER 0x8A45 +#define GL_UNIFORM_BLOCK_REFERENCED_BY_FRAGMENT_SHADER 0x8A46 +#define GL_INVALID_INDEX 0xFFFFFFFFu +typedef void (APIENTRYP PFNGLDRAWARRAYSINSTANCEDPROC) (GLenum mode, GLint first, GLsizei count, GLsizei instancecount); +typedef void (APIENTRYP PFNGLDRAWELEMENTSINSTANCEDPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount); +typedef void (APIENTRYP PFNGLTEXBUFFERPROC) (GLenum target, GLenum internalformat, GLuint buffer); +typedef void (APIENTRYP PFNGLPRIMITIVERESTARTINDEXPROC) (GLuint index); +typedef void (APIENTRYP PFNGLCOPYBUFFERSUBDATAPROC) (GLenum readTarget, GLenum writeTarget, GLintptr readOffset, GLintptr writeOffset, GLsizeiptr size); +typedef void (APIENTRYP PFNGLGETUNIFORMINDICESPROC) (GLuint program, GLsizei uniformCount, const GLchar *const*uniformNames, GLuint *uniformIndices); +typedef void (APIENTRYP PFNGLGETACTIVEUNIFORMSIVPROC) (GLuint program, GLsizei uniformCount, const GLuint *uniformIndices, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETACTIVEUNIFORMNAMEPROC) (GLuint program, GLuint uniformIndex, GLsizei bufSize, GLsizei *length, GLchar *uniformName); +typedef GLuint (APIENTRYP PFNGLGETUNIFORMBLOCKINDEXPROC) (GLuint program, const GLchar *uniformBlockName); +typedef void (APIENTRYP PFNGLGETACTIVEUNIFORMBLOCKIVPROC) (GLuint program, GLuint uniformBlockIndex, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETACTIVEUNIFORMBLOCKNAMEPROC) (GLuint program, GLuint uniformBlockIndex, GLsizei bufSize, GLsizei *length, GLchar *uniformBlockName); +typedef void (APIENTRYP PFNGLUNIFORMBLOCKBINDINGPROC) (GLuint program, GLuint uniformBlockIndex, GLuint uniformBlockBinding); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glDrawArraysInstanced (GLenum mode, GLint first, GLsizei count, GLsizei instancecount); +GLAPI void APIENTRY glDrawElementsInstanced (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount); +GLAPI void APIENTRY glTexBuffer (GLenum target, GLenum internalformat, GLuint buffer); +GLAPI void APIENTRY glPrimitiveRestartIndex (GLuint index); +GLAPI void APIENTRY glCopyBufferSubData (GLenum readTarget, GLenum writeTarget, GLintptr readOffset, GLintptr writeOffset, GLsizeiptr size); +GLAPI void APIENTRY glGetUniformIndices (GLuint program, GLsizei uniformCount, const GLchar *const*uniformNames, GLuint *uniformIndices); +GLAPI void APIENTRY glGetActiveUniformsiv (GLuint program, GLsizei uniformCount, const GLuint *uniformIndices, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetActiveUniformName (GLuint program, GLuint uniformIndex, GLsizei bufSize, GLsizei *length, GLchar *uniformName); +GLAPI GLuint APIENTRY glGetUniformBlockIndex (GLuint program, const GLchar *uniformBlockName); +GLAPI void APIENTRY glGetActiveUniformBlockiv (GLuint program, GLuint uniformBlockIndex, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetActiveUniformBlockName (GLuint program, GLuint uniformBlockIndex, GLsizei bufSize, GLsizei *length, GLchar *uniformBlockName); +GLAPI void APIENTRY glUniformBlockBinding (GLuint program, GLuint uniformBlockIndex, GLuint uniformBlockBinding); +#endif +#endif /* GL_VERSION_3_1 */ + +#ifndef GL_VERSION_3_2 +#define GL_VERSION_3_2 1 +typedef struct __GLsync *GLsync; +#ifndef GLEXT_64_TYPES_DEFINED +/* This code block is duplicated in glxext.h, so must be protected */ +#define GLEXT_64_TYPES_DEFINED +/* Define int32_t, int64_t, and uint64_t types for UST/MSC */ +/* (as used in the GL_EXT_timer_query extension). */ +#if defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L +#include +#elif defined(__sun__) || defined(__digital__) +#include +#if defined(__STDC__) +#if defined(__arch64__) || defined(_LP64) +typedef long int int64_t; +typedef unsigned long int uint64_t; +#else +typedef long long int int64_t; +typedef unsigned long long int uint64_t; +#endif /* __arch64__ */ +#endif /* __STDC__ */ +#elif defined( __VMS ) || defined(__sgi) +#include +#elif defined(__SCO__) || defined(__USLC__) +#include +#elif defined(__UNIXOS2__) || defined(__SOL64__) +typedef long int int32_t; +typedef long long int int64_t; +typedef unsigned long long int uint64_t; +#elif defined(_WIN32) && defined(__GNUC__) +#include +#elif defined(_WIN32) +typedef __int32 int32_t; +typedef __int64 int64_t; +typedef unsigned __int64 uint64_t; +#else +/* Fallback if nothing above works */ +#include +#endif +#endif +typedef uint64_t GLuint64; +typedef int64_t GLint64; +#define GL_CONTEXT_CORE_PROFILE_BIT 0x00000001 +#define GL_CONTEXT_COMPATIBILITY_PROFILE_BIT 0x00000002 +#define GL_LINES_ADJACENCY 0x000A +#define GL_LINE_STRIP_ADJACENCY 0x000B +#define GL_TRIANGLES_ADJACENCY 0x000C +#define GL_TRIANGLE_STRIP_ADJACENCY 0x000D +#define GL_PROGRAM_POINT_SIZE 0x8642 +#define GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS 0x8C29 +#define GL_FRAMEBUFFER_ATTACHMENT_LAYERED 0x8DA7 +#define GL_FRAMEBUFFER_INCOMPLETE_LAYER_TARGETS 0x8DA8 +#define GL_GEOMETRY_SHADER 0x8DD9 +#define GL_GEOMETRY_VERTICES_OUT 0x8916 +#define GL_GEOMETRY_INPUT_TYPE 0x8917 +#define GL_GEOMETRY_OUTPUT_TYPE 0x8918 +#define GL_MAX_GEOMETRY_UNIFORM_COMPONENTS 0x8DDF +#define GL_MAX_GEOMETRY_OUTPUT_VERTICES 0x8DE0 +#define GL_MAX_GEOMETRY_TOTAL_OUTPUT_COMPONENTS 0x8DE1 +#define GL_MAX_VERTEX_OUTPUT_COMPONENTS 0x9122 +#define GL_MAX_GEOMETRY_INPUT_COMPONENTS 0x9123 +#define GL_MAX_GEOMETRY_OUTPUT_COMPONENTS 0x9124 +#define GL_MAX_FRAGMENT_INPUT_COMPONENTS 0x9125 +#define GL_CONTEXT_PROFILE_MASK 0x9126 +#define GL_DEPTH_CLAMP 0x864F +#define GL_QUADS_FOLLOW_PROVOKING_VERTEX_CONVENTION 0x8E4C +#define GL_FIRST_VERTEX_CONVENTION 0x8E4D +#define GL_LAST_VERTEX_CONVENTION 0x8E4E +#define GL_PROVOKING_VERTEX 0x8E4F +#define GL_TEXTURE_CUBE_MAP_SEAMLESS 0x884F +#define GL_MAX_SERVER_WAIT_TIMEOUT 0x9111 +#define GL_OBJECT_TYPE 0x9112 +#define GL_SYNC_CONDITION 0x9113 +#define GL_SYNC_STATUS 0x9114 +#define GL_SYNC_FLAGS 0x9115 +#define GL_SYNC_FENCE 0x9116 +#define GL_SYNC_GPU_COMMANDS_COMPLETE 0x9117 +#define GL_UNSIGNALED 0x9118 +#define GL_SIGNALED 0x9119 +#define GL_ALREADY_SIGNALED 0x911A +#define GL_TIMEOUT_EXPIRED 0x911B +#define GL_CONDITION_SATISFIED 0x911C +#define GL_WAIT_FAILED 0x911D +#define GL_TIMEOUT_IGNORED 0xFFFFFFFFFFFFFFFFull +#define GL_SYNC_FLUSH_COMMANDS_BIT 0x00000001 +#define GL_SAMPLE_POSITION 0x8E50 +#define GL_SAMPLE_MASK 0x8E51 +#define GL_SAMPLE_MASK_VALUE 0x8E52 +#define GL_MAX_SAMPLE_MASK_WORDS 0x8E59 +#define GL_TEXTURE_2D_MULTISAMPLE 0x9100 +#define GL_PROXY_TEXTURE_2D_MULTISAMPLE 0x9101 +#define GL_TEXTURE_2D_MULTISAMPLE_ARRAY 0x9102 +#define GL_PROXY_TEXTURE_2D_MULTISAMPLE_ARRAY 0x9103 +#define GL_TEXTURE_BINDING_2D_MULTISAMPLE 0x9104 +#define GL_TEXTURE_BINDING_2D_MULTISAMPLE_ARRAY 0x9105 +#define GL_TEXTURE_SAMPLES 0x9106 +#define GL_TEXTURE_FIXED_SAMPLE_LOCATIONS 0x9107 +#define GL_SAMPLER_2D_MULTISAMPLE 0x9108 +#define GL_INT_SAMPLER_2D_MULTISAMPLE 0x9109 +#define GL_UNSIGNED_INT_SAMPLER_2D_MULTISAMPLE 0x910A +#define GL_SAMPLER_2D_MULTISAMPLE_ARRAY 0x910B +#define GL_INT_SAMPLER_2D_MULTISAMPLE_ARRAY 0x910C +#define GL_UNSIGNED_INT_SAMPLER_2D_MULTISAMPLE_ARRAY 0x910D +#define GL_MAX_COLOR_TEXTURE_SAMPLES 0x910E +#define GL_MAX_DEPTH_TEXTURE_SAMPLES 0x910F +#define GL_MAX_INTEGER_SAMPLES 0x9110 +typedef void (APIENTRYP PFNGLDRAWELEMENTSBASEVERTEXPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLint basevertex); +typedef void (APIENTRYP PFNGLDRAWRANGEELEMENTSBASEVERTEXPROC) (GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices, GLint basevertex); +typedef void (APIENTRYP PFNGLDRAWELEMENTSINSTANCEDBASEVERTEXPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLint basevertex); +typedef void (APIENTRYP PFNGLMULTIDRAWELEMENTSBASEVERTEXPROC) (GLenum mode, const GLsizei *count, GLenum type, const void *const*indices, GLsizei drawcount, const GLint *basevertex); +typedef void (APIENTRYP PFNGLPROVOKINGVERTEXPROC) (GLenum mode); +typedef GLsync (APIENTRYP PFNGLFENCESYNCPROC) (GLenum condition, GLbitfield flags); +typedef GLboolean (APIENTRYP PFNGLISSYNCPROC) (GLsync sync); +typedef void (APIENTRYP PFNGLDELETESYNCPROC) (GLsync sync); +typedef GLenum (APIENTRYP PFNGLCLIENTWAITSYNCPROC) (GLsync sync, GLbitfield flags, GLuint64 timeout); +typedef void (APIENTRYP PFNGLWAITSYNCPROC) (GLsync sync, GLbitfield flags, GLuint64 timeout); +typedef void (APIENTRYP PFNGLGETINTEGER64VPROC) (GLenum pname, GLint64 *data); +typedef void (APIENTRYP PFNGLGETSYNCIVPROC) (GLsync sync, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *values); +typedef void (APIENTRYP PFNGLGETINTEGER64I_VPROC) (GLenum target, GLuint index, GLint64 *data); +typedef void (APIENTRYP PFNGLGETBUFFERPARAMETERI64VPROC) (GLenum target, GLenum pname, GLint64 *params); +typedef void (APIENTRYP PFNGLFRAMEBUFFERTEXTUREPROC) (GLenum target, GLenum attachment, GLuint texture, GLint level); +typedef void (APIENTRYP PFNGLTEXIMAGE2DMULTISAMPLEPROC) (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLboolean fixedsamplelocations); +typedef void (APIENTRYP PFNGLTEXIMAGE3DMULTISAMPLEPROC) (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedsamplelocations); +typedef void (APIENTRYP PFNGLGETMULTISAMPLEFVPROC) (GLenum pname, GLuint index, GLfloat *val); +typedef void (APIENTRYP PFNGLSAMPLEMASKIPROC) (GLuint maskNumber, GLbitfield mask); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glDrawElementsBaseVertex (GLenum mode, GLsizei count, GLenum type, const void *indices, GLint basevertex); +GLAPI void APIENTRY glDrawRangeElementsBaseVertex (GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices, GLint basevertex); +GLAPI void APIENTRY glDrawElementsInstancedBaseVertex (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLint basevertex); +GLAPI void APIENTRY glMultiDrawElementsBaseVertex (GLenum mode, const GLsizei *count, GLenum type, const void *const*indices, GLsizei drawcount, const GLint *basevertex); +GLAPI void APIENTRY glProvokingVertex (GLenum mode); +GLAPI GLsync APIENTRY glFenceSync (GLenum condition, GLbitfield flags); +GLAPI GLboolean APIENTRY glIsSync (GLsync sync); +GLAPI void APIENTRY glDeleteSync (GLsync sync); +GLAPI GLenum APIENTRY glClientWaitSync (GLsync sync, GLbitfield flags, GLuint64 timeout); +GLAPI void APIENTRY glWaitSync (GLsync sync, GLbitfield flags, GLuint64 timeout); +GLAPI void APIENTRY glGetInteger64v (GLenum pname, GLint64 *data); +GLAPI void APIENTRY glGetSynciv (GLsync sync, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *values); +GLAPI void APIENTRY glGetInteger64i_v (GLenum target, GLuint index, GLint64 *data); +GLAPI void APIENTRY glGetBufferParameteri64v (GLenum target, GLenum pname, GLint64 *params); +GLAPI void APIENTRY glFramebufferTexture (GLenum target, GLenum attachment, GLuint texture, GLint level); +GLAPI void APIENTRY glTexImage2DMultisample (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLboolean fixedsamplelocations); +GLAPI void APIENTRY glTexImage3DMultisample (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedsamplelocations); +GLAPI void APIENTRY glGetMultisamplefv (GLenum pname, GLuint index, GLfloat *val); +GLAPI void APIENTRY glSampleMaski (GLuint maskNumber, GLbitfield mask); +#endif +#endif /* GL_VERSION_3_2 */ + +#ifndef GL_VERSION_3_3 +#define GL_VERSION_3_3 1 +#define GL_VERTEX_ATTRIB_ARRAY_DIVISOR 0x88FE +#define GL_SRC1_COLOR 0x88F9 +#define GL_ONE_MINUS_SRC1_COLOR 0x88FA +#define GL_ONE_MINUS_SRC1_ALPHA 0x88FB +#define GL_MAX_DUAL_SOURCE_DRAW_BUFFERS 0x88FC +#define GL_ANY_SAMPLES_PASSED 0x8C2F +#define GL_SAMPLER_BINDING 0x8919 +#define GL_RGB10_A2UI 0x906F +#define GL_TEXTURE_SWIZZLE_R 0x8E42 +#define GL_TEXTURE_SWIZZLE_G 0x8E43 +#define GL_TEXTURE_SWIZZLE_B 0x8E44 +#define GL_TEXTURE_SWIZZLE_A 0x8E45 +#define GL_TEXTURE_SWIZZLE_RGBA 0x8E46 +#define GL_TIME_ELAPSED 0x88BF +#define GL_TIMESTAMP 0x8E28 +#define GL_INT_2_10_10_10_REV 0x8D9F +typedef void (APIENTRYP PFNGLBINDFRAGDATALOCATIONINDEXEDPROC) (GLuint program, GLuint colorNumber, GLuint index, const GLchar *name); +typedef GLint (APIENTRYP PFNGLGETFRAGDATAINDEXPROC) (GLuint program, const GLchar *name); +typedef void (APIENTRYP PFNGLGENSAMPLERSPROC) (GLsizei count, GLuint *samplers); +typedef void (APIENTRYP PFNGLDELETESAMPLERSPROC) (GLsizei count, const GLuint *samplers); +typedef GLboolean (APIENTRYP PFNGLISSAMPLERPROC) (GLuint sampler); +typedef void (APIENTRYP PFNGLBINDSAMPLERPROC) (GLuint unit, GLuint sampler); +typedef void (APIENTRYP PFNGLSAMPLERPARAMETERIPROC) (GLuint sampler, GLenum pname, GLint param); +typedef void (APIENTRYP PFNGLSAMPLERPARAMETERIVPROC) (GLuint sampler, GLenum pname, const GLint *param); +typedef void (APIENTRYP PFNGLSAMPLERPARAMETERFPROC) (GLuint sampler, GLenum pname, GLfloat param); +typedef void (APIENTRYP PFNGLSAMPLERPARAMETERFVPROC) (GLuint sampler, GLenum pname, const GLfloat *param); +typedef void (APIENTRYP PFNGLSAMPLERPARAMETERIIVPROC) (GLuint sampler, GLenum pname, const GLint *param); +typedef void (APIENTRYP PFNGLSAMPLERPARAMETERIUIVPROC) (GLuint sampler, GLenum pname, const GLuint *param); +typedef void (APIENTRYP PFNGLGETSAMPLERPARAMETERIVPROC) (GLuint sampler, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETSAMPLERPARAMETERIIVPROC) (GLuint sampler, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETSAMPLERPARAMETERFVPROC) (GLuint sampler, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETSAMPLERPARAMETERIUIVPROC) (GLuint sampler, GLenum pname, GLuint *params); +typedef void (APIENTRYP PFNGLQUERYCOUNTERPROC) (GLuint id, GLenum target); +typedef void (APIENTRYP PFNGLGETQUERYOBJECTI64VPROC) (GLuint id, GLenum pname, GLint64 *params); +typedef void (APIENTRYP PFNGLGETQUERYOBJECTUI64VPROC) (GLuint id, GLenum pname, GLuint64 *params); +typedef void (APIENTRYP PFNGLVERTEXATTRIBDIVISORPROC) (GLuint index, GLuint divisor); +typedef void (APIENTRYP PFNGLVERTEXATTRIBP1UIPROC) (GLuint index, GLenum type, GLboolean normalized, GLuint value); +typedef void (APIENTRYP PFNGLVERTEXATTRIBP1UIVPROC) (GLuint index, GLenum type, GLboolean normalized, const GLuint *value); +typedef void (APIENTRYP PFNGLVERTEXATTRIBP2UIPROC) (GLuint index, GLenum type, GLboolean normalized, GLuint value); +typedef void (APIENTRYP PFNGLVERTEXATTRIBP2UIVPROC) (GLuint index, GLenum type, GLboolean normalized, const GLuint *value); +typedef void (APIENTRYP PFNGLVERTEXATTRIBP3UIPROC) (GLuint index, GLenum type, GLboolean normalized, GLuint value); +typedef void (APIENTRYP PFNGLVERTEXATTRIBP3UIVPROC) (GLuint index, GLenum type, GLboolean normalized, const GLuint *value); +typedef void (APIENTRYP PFNGLVERTEXATTRIBP4UIPROC) (GLuint index, GLenum type, GLboolean normalized, GLuint value); +typedef void (APIENTRYP PFNGLVERTEXATTRIBP4UIVPROC) (GLuint index, GLenum type, GLboolean normalized, const GLuint *value); +typedef void (APIENTRYP PFNGLVERTEXP2UIPROC) (GLenum type, GLuint value); +typedef void (APIENTRYP PFNGLVERTEXP2UIVPROC) (GLenum type, const GLuint *value); +typedef void (APIENTRYP PFNGLVERTEXP3UIPROC) (GLenum type, GLuint value); +typedef void (APIENTRYP PFNGLVERTEXP3UIVPROC) (GLenum type, const GLuint *value); +typedef void (APIENTRYP PFNGLVERTEXP4UIPROC) (GLenum type, GLuint value); +typedef void (APIENTRYP PFNGLVERTEXP4UIVPROC) (GLenum type, const GLuint *value); +typedef void (APIENTRYP PFNGLTEXCOORDP1UIPROC) (GLenum type, GLuint coords); +typedef void (APIENTRYP PFNGLTEXCOORDP1UIVPROC) (GLenum type, const GLuint *coords); +typedef void (APIENTRYP PFNGLTEXCOORDP2UIPROC) (GLenum type, GLuint coords); +typedef void (APIENTRYP PFNGLTEXCOORDP2UIVPROC) (GLenum type, const GLuint *coords); +typedef void (APIENTRYP PFNGLTEXCOORDP3UIPROC) (GLenum type, GLuint coords); +typedef void (APIENTRYP PFNGLTEXCOORDP3UIVPROC) (GLenum type, const GLuint *coords); +typedef void (APIENTRYP PFNGLTEXCOORDP4UIPROC) (GLenum type, GLuint coords); +typedef void (APIENTRYP PFNGLTEXCOORDP4UIVPROC) (GLenum type, const GLuint *coords); +typedef void (APIENTRYP PFNGLMULTITEXCOORDP1UIPROC) (GLenum texture, GLenum type, GLuint coords); +typedef void (APIENTRYP PFNGLMULTITEXCOORDP1UIVPROC) (GLenum texture, GLenum type, const GLuint *coords); +typedef void (APIENTRYP PFNGLMULTITEXCOORDP2UIPROC) (GLenum texture, GLenum type, GLuint coords); +typedef void (APIENTRYP PFNGLMULTITEXCOORDP2UIVPROC) (GLenum texture, GLenum type, const GLuint *coords); +typedef void (APIENTRYP PFNGLMULTITEXCOORDP3UIPROC) (GLenum texture, GLenum type, GLuint coords); +typedef void (APIENTRYP PFNGLMULTITEXCOORDP3UIVPROC) (GLenum texture, GLenum type, const GLuint *coords); +typedef void (APIENTRYP PFNGLMULTITEXCOORDP4UIPROC) (GLenum texture, GLenum type, GLuint coords); +typedef void (APIENTRYP PFNGLMULTITEXCOORDP4UIVPROC) (GLenum texture, GLenum type, const GLuint *coords); +typedef void (APIENTRYP PFNGLNORMALP3UIPROC) (GLenum type, GLuint coords); +typedef void (APIENTRYP PFNGLNORMALP3UIVPROC) (GLenum type, const GLuint *coords); +typedef void (APIENTRYP PFNGLCOLORP3UIPROC) (GLenum type, GLuint color); +typedef void (APIENTRYP PFNGLCOLORP3UIVPROC) (GLenum type, const GLuint *color); +typedef void (APIENTRYP PFNGLCOLORP4UIPROC) (GLenum type, GLuint color); +typedef void (APIENTRYP PFNGLCOLORP4UIVPROC) (GLenum type, const GLuint *color); +typedef void (APIENTRYP PFNGLSECONDARYCOLORP3UIPROC) (GLenum type, GLuint color); +typedef void (APIENTRYP PFNGLSECONDARYCOLORP3UIVPROC) (GLenum type, const GLuint *color); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBindFragDataLocationIndexed (GLuint program, GLuint colorNumber, GLuint index, const GLchar *name); +GLAPI GLint APIENTRY glGetFragDataIndex (GLuint program, const GLchar *name); +GLAPI void APIENTRY glGenSamplers (GLsizei count, GLuint *samplers); +GLAPI void APIENTRY glDeleteSamplers (GLsizei count, const GLuint *samplers); +GLAPI GLboolean APIENTRY glIsSampler (GLuint sampler); +GLAPI void APIENTRY glBindSampler (GLuint unit, GLuint sampler); +GLAPI void APIENTRY glSamplerParameteri (GLuint sampler, GLenum pname, GLint param); +GLAPI void APIENTRY glSamplerParameteriv (GLuint sampler, GLenum pname, const GLint *param); +GLAPI void APIENTRY glSamplerParameterf (GLuint sampler, GLenum pname, GLfloat param); +GLAPI void APIENTRY glSamplerParameterfv (GLuint sampler, GLenum pname, const GLfloat *param); +GLAPI void APIENTRY glSamplerParameterIiv (GLuint sampler, GLenum pname, const GLint *param); +GLAPI void APIENTRY glSamplerParameterIuiv (GLuint sampler, GLenum pname, const GLuint *param); +GLAPI void APIENTRY glGetSamplerParameteriv (GLuint sampler, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetSamplerParameterIiv (GLuint sampler, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetSamplerParameterfv (GLuint sampler, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetSamplerParameterIuiv (GLuint sampler, GLenum pname, GLuint *params); +GLAPI void APIENTRY glQueryCounter (GLuint id, GLenum target); +GLAPI void APIENTRY glGetQueryObjecti64v (GLuint id, GLenum pname, GLint64 *params); +GLAPI void APIENTRY glGetQueryObjectui64v (GLuint id, GLenum pname, GLuint64 *params); +GLAPI void APIENTRY glVertexAttribDivisor (GLuint index, GLuint divisor); +GLAPI void APIENTRY glVertexAttribP1ui (GLuint index, GLenum type, GLboolean normalized, GLuint value); +GLAPI void APIENTRY glVertexAttribP1uiv (GLuint index, GLenum type, GLboolean normalized, const GLuint *value); +GLAPI void APIENTRY glVertexAttribP2ui (GLuint index, GLenum type, GLboolean normalized, GLuint value); +GLAPI void APIENTRY glVertexAttribP2uiv (GLuint index, GLenum type, GLboolean normalized, const GLuint *value); +GLAPI void APIENTRY glVertexAttribP3ui (GLuint index, GLenum type, GLboolean normalized, GLuint value); +GLAPI void APIENTRY glVertexAttribP3uiv (GLuint index, GLenum type, GLboolean normalized, const GLuint *value); +GLAPI void APIENTRY glVertexAttribP4ui (GLuint index, GLenum type, GLboolean normalized, GLuint value); +GLAPI void APIENTRY glVertexAttribP4uiv (GLuint index, GLenum type, GLboolean normalized, const GLuint *value); +GLAPI void APIENTRY glVertexP2ui (GLenum type, GLuint value); +GLAPI void APIENTRY glVertexP2uiv (GLenum type, const GLuint *value); +GLAPI void APIENTRY glVertexP3ui (GLenum type, GLuint value); +GLAPI void APIENTRY glVertexP3uiv (GLenum type, const GLuint *value); +GLAPI void APIENTRY glVertexP4ui (GLenum type, GLuint value); +GLAPI void APIENTRY glVertexP4uiv (GLenum type, const GLuint *value); +GLAPI void APIENTRY glTexCoordP1ui (GLenum type, GLuint coords); +GLAPI void APIENTRY glTexCoordP1uiv (GLenum type, const GLuint *coords); +GLAPI void APIENTRY glTexCoordP2ui (GLenum type, GLuint coords); +GLAPI void APIENTRY glTexCoordP2uiv (GLenum type, const GLuint *coords); +GLAPI void APIENTRY glTexCoordP3ui (GLenum type, GLuint coords); +GLAPI void APIENTRY glTexCoordP3uiv (GLenum type, const GLuint *coords); +GLAPI void APIENTRY glTexCoordP4ui (GLenum type, GLuint coords); +GLAPI void APIENTRY glTexCoordP4uiv (GLenum type, const GLuint *coords); +GLAPI void APIENTRY glMultiTexCoordP1ui (GLenum texture, GLenum type, GLuint coords); +GLAPI void APIENTRY glMultiTexCoordP1uiv (GLenum texture, GLenum type, const GLuint *coords); +GLAPI void APIENTRY glMultiTexCoordP2ui (GLenum texture, GLenum type, GLuint coords); +GLAPI void APIENTRY glMultiTexCoordP2uiv (GLenum texture, GLenum type, const GLuint *coords); +GLAPI void APIENTRY glMultiTexCoordP3ui (GLenum texture, GLenum type, GLuint coords); +GLAPI void APIENTRY glMultiTexCoordP3uiv (GLenum texture, GLenum type, const GLuint *coords); +GLAPI void APIENTRY glMultiTexCoordP4ui (GLenum texture, GLenum type, GLuint coords); +GLAPI void APIENTRY glMultiTexCoordP4uiv (GLenum texture, GLenum type, const GLuint *coords); +GLAPI void APIENTRY glNormalP3ui (GLenum type, GLuint coords); +GLAPI void APIENTRY glNormalP3uiv (GLenum type, const GLuint *coords); +GLAPI void APIENTRY glColorP3ui (GLenum type, GLuint color); +GLAPI void APIENTRY glColorP3uiv (GLenum type, const GLuint *color); +GLAPI void APIENTRY glColorP4ui (GLenum type, GLuint color); +GLAPI void APIENTRY glColorP4uiv (GLenum type, const GLuint *color); +GLAPI void APIENTRY glSecondaryColorP3ui (GLenum type, GLuint color); +GLAPI void APIENTRY glSecondaryColorP3uiv (GLenum type, const GLuint *color); +#endif +#endif /* GL_VERSION_3_3 */ + +#ifndef GL_VERSION_4_0 +#define GL_VERSION_4_0 1 +#define GL_SAMPLE_SHADING 0x8C36 +#define GL_MIN_SAMPLE_SHADING_VALUE 0x8C37 +#define GL_MIN_PROGRAM_TEXTURE_GATHER_OFFSET 0x8E5E +#define GL_MAX_PROGRAM_TEXTURE_GATHER_OFFSET 0x8E5F +#define GL_TEXTURE_CUBE_MAP_ARRAY 0x9009 +#define GL_TEXTURE_BINDING_CUBE_MAP_ARRAY 0x900A +#define GL_PROXY_TEXTURE_CUBE_MAP_ARRAY 0x900B +#define GL_SAMPLER_CUBE_MAP_ARRAY 0x900C +#define GL_SAMPLER_CUBE_MAP_ARRAY_SHADOW 0x900D +#define GL_INT_SAMPLER_CUBE_MAP_ARRAY 0x900E +#define GL_UNSIGNED_INT_SAMPLER_CUBE_MAP_ARRAY 0x900F +#define GL_DRAW_INDIRECT_BUFFER 0x8F3F +#define GL_DRAW_INDIRECT_BUFFER_BINDING 0x8F43 +#define GL_GEOMETRY_SHADER_INVOCATIONS 0x887F +#define GL_MAX_GEOMETRY_SHADER_INVOCATIONS 0x8E5A +#define GL_MIN_FRAGMENT_INTERPOLATION_OFFSET 0x8E5B +#define GL_MAX_FRAGMENT_INTERPOLATION_OFFSET 0x8E5C +#define GL_FRAGMENT_INTERPOLATION_OFFSET_BITS 0x8E5D +#define GL_MAX_VERTEX_STREAMS 0x8E71 +#define GL_DOUBLE_VEC2 0x8FFC +#define GL_DOUBLE_VEC3 0x8FFD +#define GL_DOUBLE_VEC4 0x8FFE +#define GL_DOUBLE_MAT2 0x8F46 +#define GL_DOUBLE_MAT3 0x8F47 +#define GL_DOUBLE_MAT4 0x8F48 +#define GL_DOUBLE_MAT2x3 0x8F49 +#define GL_DOUBLE_MAT2x4 0x8F4A +#define GL_DOUBLE_MAT3x2 0x8F4B +#define GL_DOUBLE_MAT3x4 0x8F4C +#define GL_DOUBLE_MAT4x2 0x8F4D +#define GL_DOUBLE_MAT4x3 0x8F4E +#define GL_ACTIVE_SUBROUTINES 0x8DE5 +#define GL_ACTIVE_SUBROUTINE_UNIFORMS 0x8DE6 +#define GL_ACTIVE_SUBROUTINE_UNIFORM_LOCATIONS 0x8E47 +#define GL_ACTIVE_SUBROUTINE_MAX_LENGTH 0x8E48 +#define GL_ACTIVE_SUBROUTINE_UNIFORM_MAX_LENGTH 0x8E49 +#define GL_MAX_SUBROUTINES 0x8DE7 +#define GL_MAX_SUBROUTINE_UNIFORM_LOCATIONS 0x8DE8 +#define GL_NUM_COMPATIBLE_SUBROUTINES 0x8E4A +#define GL_COMPATIBLE_SUBROUTINES 0x8E4B +#define GL_PATCHES 0x000E +#define GL_PATCH_VERTICES 0x8E72 +#define GL_PATCH_DEFAULT_INNER_LEVEL 0x8E73 +#define GL_PATCH_DEFAULT_OUTER_LEVEL 0x8E74 +#define GL_TESS_CONTROL_OUTPUT_VERTICES 0x8E75 +#define GL_TESS_GEN_MODE 0x8E76 +#define GL_TESS_GEN_SPACING 0x8E77 +#define GL_TESS_GEN_VERTEX_ORDER 0x8E78 +#define GL_TESS_GEN_POINT_MODE 0x8E79 +#define GL_ISOLINES 0x8E7A +#define GL_FRACTIONAL_ODD 0x8E7B +#define GL_FRACTIONAL_EVEN 0x8E7C +#define GL_MAX_PATCH_VERTICES 0x8E7D +#define GL_MAX_TESS_GEN_LEVEL 0x8E7E +#define GL_MAX_TESS_CONTROL_UNIFORM_COMPONENTS 0x8E7F +#define GL_MAX_TESS_EVALUATION_UNIFORM_COMPONENTS 0x8E80 +#define GL_MAX_TESS_CONTROL_TEXTURE_IMAGE_UNITS 0x8E81 +#define GL_MAX_TESS_EVALUATION_TEXTURE_IMAGE_UNITS 0x8E82 +#define GL_MAX_TESS_CONTROL_OUTPUT_COMPONENTS 0x8E83 +#define GL_MAX_TESS_PATCH_COMPONENTS 0x8E84 +#define GL_MAX_TESS_CONTROL_TOTAL_OUTPUT_COMPONENTS 0x8E85 +#define GL_MAX_TESS_EVALUATION_OUTPUT_COMPONENTS 0x8E86 +#define GL_MAX_TESS_CONTROL_UNIFORM_BLOCKS 0x8E89 +#define GL_MAX_TESS_EVALUATION_UNIFORM_BLOCKS 0x8E8A +#define GL_MAX_TESS_CONTROL_INPUT_COMPONENTS 0x886C +#define GL_MAX_TESS_EVALUATION_INPUT_COMPONENTS 0x886D +#define GL_MAX_COMBINED_TESS_CONTROL_UNIFORM_COMPONENTS 0x8E1E +#define GL_MAX_COMBINED_TESS_EVALUATION_UNIFORM_COMPONENTS 0x8E1F +#define GL_UNIFORM_BLOCK_REFERENCED_BY_TESS_CONTROL_SHADER 0x84F0 +#define GL_UNIFORM_BLOCK_REFERENCED_BY_TESS_EVALUATION_SHADER 0x84F1 +#define GL_TESS_EVALUATION_SHADER 0x8E87 +#define GL_TESS_CONTROL_SHADER 0x8E88 +#define GL_TRANSFORM_FEEDBACK 0x8E22 +#define GL_TRANSFORM_FEEDBACK_BUFFER_PAUSED 0x8E23 +#define GL_TRANSFORM_FEEDBACK_BUFFER_ACTIVE 0x8E24 +#define GL_TRANSFORM_FEEDBACK_BINDING 0x8E25 +#define GL_MAX_TRANSFORM_FEEDBACK_BUFFERS 0x8E70 +typedef void (APIENTRYP PFNGLMINSAMPLESHADINGPROC) (GLfloat value); +typedef void (APIENTRYP PFNGLBLENDEQUATIONIPROC) (GLuint buf, GLenum mode); +typedef void (APIENTRYP PFNGLBLENDEQUATIONSEPARATEIPROC) (GLuint buf, GLenum modeRGB, GLenum modeAlpha); +typedef void (APIENTRYP PFNGLBLENDFUNCIPROC) (GLuint buf, GLenum src, GLenum dst); +typedef void (APIENTRYP PFNGLBLENDFUNCSEPARATEIPROC) (GLuint buf, GLenum srcRGB, GLenum dstRGB, GLenum srcAlpha, GLenum dstAlpha); +typedef void (APIENTRYP PFNGLDRAWARRAYSINDIRECTPROC) (GLenum mode, const void *indirect); +typedef void (APIENTRYP PFNGLDRAWELEMENTSINDIRECTPROC) (GLenum mode, GLenum type, const void *indirect); +typedef void (APIENTRYP PFNGLUNIFORM1DPROC) (GLint location, GLdouble x); +typedef void (APIENTRYP PFNGLUNIFORM2DPROC) (GLint location, GLdouble x, GLdouble y); +typedef void (APIENTRYP PFNGLUNIFORM3DPROC) (GLint location, GLdouble x, GLdouble y, GLdouble z); +typedef void (APIENTRYP PFNGLUNIFORM4DPROC) (GLint location, GLdouble x, GLdouble y, GLdouble z, GLdouble w); +typedef void (APIENTRYP PFNGLUNIFORM1DVPROC) (GLint location, GLsizei count, const GLdouble *value); +typedef void (APIENTRYP PFNGLUNIFORM2DVPROC) (GLint location, GLsizei count, const GLdouble *value); +typedef void (APIENTRYP PFNGLUNIFORM3DVPROC) (GLint location, GLsizei count, const GLdouble *value); +typedef void (APIENTRYP PFNGLUNIFORM4DVPROC) (GLint location, GLsizei count, const GLdouble *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX2DVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX3DVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX4DVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX2X3DVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX2X4DVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX3X2DVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX3X4DVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX4X2DVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX4X3DVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLGETUNIFORMDVPROC) (GLuint program, GLint location, GLdouble *params); +typedef GLint (APIENTRYP PFNGLGETSUBROUTINEUNIFORMLOCATIONPROC) (GLuint program, GLenum shadertype, const GLchar *name); +typedef GLuint (APIENTRYP PFNGLGETSUBROUTINEINDEXPROC) (GLuint program, GLenum shadertype, const GLchar *name); +typedef void (APIENTRYP PFNGLGETACTIVESUBROUTINEUNIFORMIVPROC) (GLuint program, GLenum shadertype, GLuint index, GLenum pname, GLint *values); +typedef void (APIENTRYP PFNGLGETACTIVESUBROUTINEUNIFORMNAMEPROC) (GLuint program, GLenum shadertype, GLuint index, GLsizei bufsize, GLsizei *length, GLchar *name); +typedef void (APIENTRYP PFNGLGETACTIVESUBROUTINENAMEPROC) (GLuint program, GLenum shadertype, GLuint index, GLsizei bufsize, GLsizei *length, GLchar *name); +typedef void (APIENTRYP PFNGLUNIFORMSUBROUTINESUIVPROC) (GLenum shadertype, GLsizei count, const GLuint *indices); +typedef void (APIENTRYP PFNGLGETUNIFORMSUBROUTINEUIVPROC) (GLenum shadertype, GLint location, GLuint *params); +typedef void (APIENTRYP PFNGLGETPROGRAMSTAGEIVPROC) (GLuint program, GLenum shadertype, GLenum pname, GLint *values); +typedef void (APIENTRYP PFNGLPATCHPARAMETERIPROC) (GLenum pname, GLint value); +typedef void (APIENTRYP PFNGLPATCHPARAMETERFVPROC) (GLenum pname, const GLfloat *values); +typedef void (APIENTRYP PFNGLBINDTRANSFORMFEEDBACKPROC) (GLenum target, GLuint id); +typedef void (APIENTRYP PFNGLDELETETRANSFORMFEEDBACKSPROC) (GLsizei n, const GLuint *ids); +typedef void (APIENTRYP PFNGLGENTRANSFORMFEEDBACKSPROC) (GLsizei n, GLuint *ids); +typedef GLboolean (APIENTRYP PFNGLISTRANSFORMFEEDBACKPROC) (GLuint id); +typedef void (APIENTRYP PFNGLPAUSETRANSFORMFEEDBACKPROC) (void); +typedef void (APIENTRYP PFNGLRESUMETRANSFORMFEEDBACKPROC) (void); +typedef void (APIENTRYP PFNGLDRAWTRANSFORMFEEDBACKPROC) (GLenum mode, GLuint id); +typedef void (APIENTRYP PFNGLDRAWTRANSFORMFEEDBACKSTREAMPROC) (GLenum mode, GLuint id, GLuint stream); +typedef void (APIENTRYP PFNGLBEGINQUERYINDEXEDPROC) (GLenum target, GLuint index, GLuint id); +typedef void (APIENTRYP PFNGLENDQUERYINDEXEDPROC) (GLenum target, GLuint index); +typedef void (APIENTRYP PFNGLGETQUERYINDEXEDIVPROC) (GLenum target, GLuint index, GLenum pname, GLint *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glMinSampleShading (GLfloat value); +GLAPI void APIENTRY glBlendEquationi (GLuint buf, GLenum mode); +GLAPI void APIENTRY glBlendEquationSeparatei (GLuint buf, GLenum modeRGB, GLenum modeAlpha); +GLAPI void APIENTRY glBlendFunci (GLuint buf, GLenum src, GLenum dst); +GLAPI void APIENTRY glBlendFuncSeparatei (GLuint buf, GLenum srcRGB, GLenum dstRGB, GLenum srcAlpha, GLenum dstAlpha); +GLAPI void APIENTRY glDrawArraysIndirect (GLenum mode, const void *indirect); +GLAPI void APIENTRY glDrawElementsIndirect (GLenum mode, GLenum type, const void *indirect); +GLAPI void APIENTRY glUniform1d (GLint location, GLdouble x); +GLAPI void APIENTRY glUniform2d (GLint location, GLdouble x, GLdouble y); +GLAPI void APIENTRY glUniform3d (GLint location, GLdouble x, GLdouble y, GLdouble z); +GLAPI void APIENTRY glUniform4d (GLint location, GLdouble x, GLdouble y, GLdouble z, GLdouble w); +GLAPI void APIENTRY glUniform1dv (GLint location, GLsizei count, const GLdouble *value); +GLAPI void APIENTRY glUniform2dv (GLint location, GLsizei count, const GLdouble *value); +GLAPI void APIENTRY glUniform3dv (GLint location, GLsizei count, const GLdouble *value); +GLAPI void APIENTRY glUniform4dv (GLint location, GLsizei count, const GLdouble *value); +GLAPI void APIENTRY glUniformMatrix2dv (GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glUniformMatrix3dv (GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glUniformMatrix4dv (GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glUniformMatrix2x3dv (GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glUniformMatrix2x4dv (GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glUniformMatrix3x2dv (GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glUniformMatrix3x4dv (GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glUniformMatrix4x2dv (GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glUniformMatrix4x3dv (GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glGetUniformdv (GLuint program, GLint location, GLdouble *params); +GLAPI GLint APIENTRY glGetSubroutineUniformLocation (GLuint program, GLenum shadertype, const GLchar *name); +GLAPI GLuint APIENTRY glGetSubroutineIndex (GLuint program, GLenum shadertype, const GLchar *name); +GLAPI void APIENTRY glGetActiveSubroutineUniformiv (GLuint program, GLenum shadertype, GLuint index, GLenum pname, GLint *values); +GLAPI void APIENTRY glGetActiveSubroutineUniformName (GLuint program, GLenum shadertype, GLuint index, GLsizei bufsize, GLsizei *length, GLchar *name); +GLAPI void APIENTRY glGetActiveSubroutineName (GLuint program, GLenum shadertype, GLuint index, GLsizei bufsize, GLsizei *length, GLchar *name); +GLAPI void APIENTRY glUniformSubroutinesuiv (GLenum shadertype, GLsizei count, const GLuint *indices); +GLAPI void APIENTRY glGetUniformSubroutineuiv (GLenum shadertype, GLint location, GLuint *params); +GLAPI void APIENTRY glGetProgramStageiv (GLuint program, GLenum shadertype, GLenum pname, GLint *values); +GLAPI void APIENTRY glPatchParameteri (GLenum pname, GLint value); +GLAPI void APIENTRY glPatchParameterfv (GLenum pname, const GLfloat *values); +GLAPI void APIENTRY glBindTransformFeedback (GLenum target, GLuint id); +GLAPI void APIENTRY glDeleteTransformFeedbacks (GLsizei n, const GLuint *ids); +GLAPI void APIENTRY glGenTransformFeedbacks (GLsizei n, GLuint *ids); +GLAPI GLboolean APIENTRY glIsTransformFeedback (GLuint id); +GLAPI void APIENTRY glPauseTransformFeedback (void); +GLAPI void APIENTRY glResumeTransformFeedback (void); +GLAPI void APIENTRY glDrawTransformFeedback (GLenum mode, GLuint id); +GLAPI void APIENTRY glDrawTransformFeedbackStream (GLenum mode, GLuint id, GLuint stream); +GLAPI void APIENTRY glBeginQueryIndexed (GLenum target, GLuint index, GLuint id); +GLAPI void APIENTRY glEndQueryIndexed (GLenum target, GLuint index); +GLAPI void APIENTRY glGetQueryIndexediv (GLenum target, GLuint index, GLenum pname, GLint *params); +#endif +#endif /* GL_VERSION_4_0 */ + +#ifndef GL_VERSION_4_1 +#define GL_VERSION_4_1 1 +#define GL_FIXED 0x140C +#define GL_IMPLEMENTATION_COLOR_READ_TYPE 0x8B9A +#define GL_IMPLEMENTATION_COLOR_READ_FORMAT 0x8B9B +#define GL_LOW_FLOAT 0x8DF0 +#define GL_MEDIUM_FLOAT 0x8DF1 +#define GL_HIGH_FLOAT 0x8DF2 +#define GL_LOW_INT 0x8DF3 +#define GL_MEDIUM_INT 0x8DF4 +#define GL_HIGH_INT 0x8DF5 +#define GL_SHADER_COMPILER 0x8DFA +#define GL_SHADER_BINARY_FORMATS 0x8DF8 +#define GL_NUM_SHADER_BINARY_FORMATS 0x8DF9 +#define GL_MAX_VERTEX_UNIFORM_VECTORS 0x8DFB +#define GL_MAX_VARYING_VECTORS 0x8DFC +#define GL_MAX_FRAGMENT_UNIFORM_VECTORS 0x8DFD +#define GL_RGB565 0x8D62 +#define GL_PROGRAM_BINARY_RETRIEVABLE_HINT 0x8257 +#define GL_PROGRAM_BINARY_LENGTH 0x8741 +#define GL_NUM_PROGRAM_BINARY_FORMATS 0x87FE +#define GL_PROGRAM_BINARY_FORMATS 0x87FF +#define GL_VERTEX_SHADER_BIT 0x00000001 +#define GL_FRAGMENT_SHADER_BIT 0x00000002 +#define GL_GEOMETRY_SHADER_BIT 0x00000004 +#define GL_TESS_CONTROL_SHADER_BIT 0x00000008 +#define GL_TESS_EVALUATION_SHADER_BIT 0x00000010 +#define GL_ALL_SHADER_BITS 0xFFFFFFFF +#define GL_PROGRAM_SEPARABLE 0x8258 +#define GL_ACTIVE_PROGRAM 0x8259 +#define GL_PROGRAM_PIPELINE_BINDING 0x825A +#define GL_MAX_VIEWPORTS 0x825B +#define GL_VIEWPORT_SUBPIXEL_BITS 0x825C +#define GL_VIEWPORT_BOUNDS_RANGE 0x825D +#define GL_LAYER_PROVOKING_VERTEX 0x825E +#define GL_VIEWPORT_INDEX_PROVOKING_VERTEX 0x825F +#define GL_UNDEFINED_VERTEX 0x8260 +typedef void (APIENTRYP PFNGLRELEASESHADERCOMPILERPROC) (void); +typedef void (APIENTRYP PFNGLSHADERBINARYPROC) (GLsizei count, const GLuint *shaders, GLenum binaryformat, const void *binary, GLsizei length); +typedef void (APIENTRYP PFNGLGETSHADERPRECISIONFORMATPROC) (GLenum shadertype, GLenum precisiontype, GLint *range, GLint *precision); +typedef void (APIENTRYP PFNGLDEPTHRANGEFPROC) (GLfloat n, GLfloat f); +typedef void (APIENTRYP PFNGLCLEARDEPTHFPROC) (GLfloat d); +typedef void (APIENTRYP PFNGLGETPROGRAMBINARYPROC) (GLuint program, GLsizei bufSize, GLsizei *length, GLenum *binaryFormat, void *binary); +typedef void (APIENTRYP PFNGLPROGRAMBINARYPROC) (GLuint program, GLenum binaryFormat, const void *binary, GLsizei length); +typedef void (APIENTRYP PFNGLPROGRAMPARAMETERIPROC) (GLuint program, GLenum pname, GLint value); +typedef void (APIENTRYP PFNGLUSEPROGRAMSTAGESPROC) (GLuint pipeline, GLbitfield stages, GLuint program); +typedef void (APIENTRYP PFNGLACTIVESHADERPROGRAMPROC) (GLuint pipeline, GLuint program); +typedef GLuint (APIENTRYP PFNGLCREATESHADERPROGRAMVPROC) (GLenum type, GLsizei count, const GLchar *const*strings); +typedef void (APIENTRYP PFNGLBINDPROGRAMPIPELINEPROC) (GLuint pipeline); +typedef void (APIENTRYP PFNGLDELETEPROGRAMPIPELINESPROC) (GLsizei n, const GLuint *pipelines); +typedef void (APIENTRYP PFNGLGENPROGRAMPIPELINESPROC) (GLsizei n, GLuint *pipelines); +typedef GLboolean (APIENTRYP PFNGLISPROGRAMPIPELINEPROC) (GLuint pipeline); +typedef void (APIENTRYP PFNGLGETPROGRAMPIPELINEIVPROC) (GLuint pipeline, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1IPROC) (GLuint program, GLint location, GLint v0); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1IVPROC) (GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1FPROC) (GLuint program, GLint location, GLfloat v0); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1FVPROC) (GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1DPROC) (GLuint program, GLint location, GLdouble v0); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1DVPROC) (GLuint program, GLint location, GLsizei count, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1UIPROC) (GLuint program, GLint location, GLuint v0); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1UIVPROC) (GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2IPROC) (GLuint program, GLint location, GLint v0, GLint v1); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2IVPROC) (GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2FPROC) (GLuint program, GLint location, GLfloat v0, GLfloat v1); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2FVPROC) (GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2DPROC) (GLuint program, GLint location, GLdouble v0, GLdouble v1); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2DVPROC) (GLuint program, GLint location, GLsizei count, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2UIPROC) (GLuint program, GLint location, GLuint v0, GLuint v1); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2UIVPROC) (GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3IPROC) (GLuint program, GLint location, GLint v0, GLint v1, GLint v2); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3IVPROC) (GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3FPROC) (GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3FVPROC) (GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3DPROC) (GLuint program, GLint location, GLdouble v0, GLdouble v1, GLdouble v2); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3DVPROC) (GLuint program, GLint location, GLsizei count, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3UIPROC) (GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3UIVPROC) (GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4IPROC) (GLuint program, GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4IVPROC) (GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4FPROC) (GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4FVPROC) (GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4DPROC) (GLuint program, GLint location, GLdouble v0, GLdouble v1, GLdouble v2, GLdouble v3); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4DVPROC) (GLuint program, GLint location, GLsizei count, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4UIPROC) (GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4UIVPROC) (GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2FVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3FVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4FVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2DVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3DVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4DVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2X3FVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3X2FVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2X4FVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4X2FVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3X4FVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4X3FVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2X3DVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3X2DVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2X4DVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4X2DVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3X4DVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4X3DVPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLVALIDATEPROGRAMPIPELINEPROC) (GLuint pipeline); +typedef void (APIENTRYP PFNGLGETPROGRAMPIPELINEINFOLOGPROC) (GLuint pipeline, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL1DPROC) (GLuint index, GLdouble x); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL2DPROC) (GLuint index, GLdouble x, GLdouble y); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL3DPROC) (GLuint index, GLdouble x, GLdouble y, GLdouble z); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL4DPROC) (GLuint index, GLdouble x, GLdouble y, GLdouble z, GLdouble w); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL1DVPROC) (GLuint index, const GLdouble *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL2DVPROC) (GLuint index, const GLdouble *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL3DVPROC) (GLuint index, const GLdouble *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL4DVPROC) (GLuint index, const GLdouble *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBLPOINTERPROC) (GLuint index, GLint size, GLenum type, GLsizei stride, const void *pointer); +typedef void (APIENTRYP PFNGLGETVERTEXATTRIBLDVPROC) (GLuint index, GLenum pname, GLdouble *params); +typedef void (APIENTRYP PFNGLVIEWPORTARRAYVPROC) (GLuint first, GLsizei count, const GLfloat *v); +typedef void (APIENTRYP PFNGLVIEWPORTINDEXEDFPROC) (GLuint index, GLfloat x, GLfloat y, GLfloat w, GLfloat h); +typedef void (APIENTRYP PFNGLVIEWPORTINDEXEDFVPROC) (GLuint index, const GLfloat *v); +typedef void (APIENTRYP PFNGLSCISSORARRAYVPROC) (GLuint first, GLsizei count, const GLint *v); +typedef void (APIENTRYP PFNGLSCISSORINDEXEDPROC) (GLuint index, GLint left, GLint bottom, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLSCISSORINDEXEDVPROC) (GLuint index, const GLint *v); +typedef void (APIENTRYP PFNGLDEPTHRANGEARRAYVPROC) (GLuint first, GLsizei count, const GLdouble *v); +typedef void (APIENTRYP PFNGLDEPTHRANGEINDEXEDPROC) (GLuint index, GLdouble n, GLdouble f); +typedef void (APIENTRYP PFNGLGETFLOATI_VPROC) (GLenum target, GLuint index, GLfloat *data); +typedef void (APIENTRYP PFNGLGETDOUBLEI_VPROC) (GLenum target, GLuint index, GLdouble *data); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glReleaseShaderCompiler (void); +GLAPI void APIENTRY glShaderBinary (GLsizei count, const GLuint *shaders, GLenum binaryformat, const void *binary, GLsizei length); +GLAPI void APIENTRY glGetShaderPrecisionFormat (GLenum shadertype, GLenum precisiontype, GLint *range, GLint *precision); +GLAPI void APIENTRY glDepthRangef (GLfloat n, GLfloat f); +GLAPI void APIENTRY glClearDepthf (GLfloat d); +GLAPI void APIENTRY glGetProgramBinary (GLuint program, GLsizei bufSize, GLsizei *length, GLenum *binaryFormat, void *binary); +GLAPI void APIENTRY glProgramBinary (GLuint program, GLenum binaryFormat, const void *binary, GLsizei length); +GLAPI void APIENTRY glProgramParameteri (GLuint program, GLenum pname, GLint value); +GLAPI void APIENTRY glUseProgramStages (GLuint pipeline, GLbitfield stages, GLuint program); +GLAPI void APIENTRY glActiveShaderProgram (GLuint pipeline, GLuint program); +GLAPI GLuint APIENTRY glCreateShaderProgramv (GLenum type, GLsizei count, const GLchar *const*strings); +GLAPI void APIENTRY glBindProgramPipeline (GLuint pipeline); +GLAPI void APIENTRY glDeleteProgramPipelines (GLsizei n, const GLuint *pipelines); +GLAPI void APIENTRY glGenProgramPipelines (GLsizei n, GLuint *pipelines); +GLAPI GLboolean APIENTRY glIsProgramPipeline (GLuint pipeline); +GLAPI void APIENTRY glGetProgramPipelineiv (GLuint pipeline, GLenum pname, GLint *params); +GLAPI void APIENTRY glProgramUniform1i (GLuint program, GLint location, GLint v0); +GLAPI void APIENTRY glProgramUniform1iv (GLuint program, GLint location, GLsizei count, const GLint *value); +GLAPI void APIENTRY glProgramUniform1f (GLuint program, GLint location, GLfloat v0); +GLAPI void APIENTRY glProgramUniform1fv (GLuint program, GLint location, GLsizei count, const GLfloat *value); +GLAPI void APIENTRY glProgramUniform1d (GLuint program, GLint location, GLdouble v0); +GLAPI void APIENTRY glProgramUniform1dv (GLuint program, GLint location, GLsizei count, const GLdouble *value); +GLAPI void APIENTRY glProgramUniform1ui (GLuint program, GLint location, GLuint v0); +GLAPI void APIENTRY glProgramUniform1uiv (GLuint program, GLint location, GLsizei count, const GLuint *value); +GLAPI void APIENTRY glProgramUniform2i (GLuint program, GLint location, GLint v0, GLint v1); +GLAPI void APIENTRY glProgramUniform2iv (GLuint program, GLint location, GLsizei count, const GLint *value); +GLAPI void APIENTRY glProgramUniform2f (GLuint program, GLint location, GLfloat v0, GLfloat v1); +GLAPI void APIENTRY glProgramUniform2fv (GLuint program, GLint location, GLsizei count, const GLfloat *value); +GLAPI void APIENTRY glProgramUniform2d (GLuint program, GLint location, GLdouble v0, GLdouble v1); +GLAPI void APIENTRY glProgramUniform2dv (GLuint program, GLint location, GLsizei count, const GLdouble *value); +GLAPI void APIENTRY glProgramUniform2ui (GLuint program, GLint location, GLuint v0, GLuint v1); +GLAPI void APIENTRY glProgramUniform2uiv (GLuint program, GLint location, GLsizei count, const GLuint *value); +GLAPI void APIENTRY glProgramUniform3i (GLuint program, GLint location, GLint v0, GLint v1, GLint v2); +GLAPI void APIENTRY glProgramUniform3iv (GLuint program, GLint location, GLsizei count, const GLint *value); +GLAPI void APIENTRY glProgramUniform3f (GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +GLAPI void APIENTRY glProgramUniform3fv (GLuint program, GLint location, GLsizei count, const GLfloat *value); +GLAPI void APIENTRY glProgramUniform3d (GLuint program, GLint location, GLdouble v0, GLdouble v1, GLdouble v2); +GLAPI void APIENTRY glProgramUniform3dv (GLuint program, GLint location, GLsizei count, const GLdouble *value); +GLAPI void APIENTRY glProgramUniform3ui (GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2); +GLAPI void APIENTRY glProgramUniform3uiv (GLuint program, GLint location, GLsizei count, const GLuint *value); +GLAPI void APIENTRY glProgramUniform4i (GLuint program, GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +GLAPI void APIENTRY glProgramUniform4iv (GLuint program, GLint location, GLsizei count, const GLint *value); +GLAPI void APIENTRY glProgramUniform4f (GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +GLAPI void APIENTRY glProgramUniform4fv (GLuint program, GLint location, GLsizei count, const GLfloat *value); +GLAPI void APIENTRY glProgramUniform4d (GLuint program, GLint location, GLdouble v0, GLdouble v1, GLdouble v2, GLdouble v3); +GLAPI void APIENTRY glProgramUniform4dv (GLuint program, GLint location, GLsizei count, const GLdouble *value); +GLAPI void APIENTRY glProgramUniform4ui (GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3); +GLAPI void APIENTRY glProgramUniform4uiv (GLuint program, GLint location, GLsizei count, const GLuint *value); +GLAPI void APIENTRY glProgramUniformMatrix2fv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glProgramUniformMatrix3fv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glProgramUniformMatrix4fv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glProgramUniformMatrix2dv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glProgramUniformMatrix3dv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glProgramUniformMatrix4dv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glProgramUniformMatrix2x3fv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glProgramUniformMatrix3x2fv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glProgramUniformMatrix2x4fv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glProgramUniformMatrix4x2fv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glProgramUniformMatrix3x4fv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glProgramUniformMatrix4x3fv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glProgramUniformMatrix2x3dv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glProgramUniformMatrix3x2dv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glProgramUniformMatrix2x4dv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glProgramUniformMatrix4x2dv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glProgramUniformMatrix3x4dv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glProgramUniformMatrix4x3dv (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glValidateProgramPipeline (GLuint pipeline); +GLAPI void APIENTRY glGetProgramPipelineInfoLog (GLuint pipeline, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +GLAPI void APIENTRY glVertexAttribL1d (GLuint index, GLdouble x); +GLAPI void APIENTRY glVertexAttribL2d (GLuint index, GLdouble x, GLdouble y); +GLAPI void APIENTRY glVertexAttribL3d (GLuint index, GLdouble x, GLdouble y, GLdouble z); +GLAPI void APIENTRY glVertexAttribL4d (GLuint index, GLdouble x, GLdouble y, GLdouble z, GLdouble w); +GLAPI void APIENTRY glVertexAttribL1dv (GLuint index, const GLdouble *v); +GLAPI void APIENTRY glVertexAttribL2dv (GLuint index, const GLdouble *v); +GLAPI void APIENTRY glVertexAttribL3dv (GLuint index, const GLdouble *v); +GLAPI void APIENTRY glVertexAttribL4dv (GLuint index, const GLdouble *v); +GLAPI void APIENTRY glVertexAttribLPointer (GLuint index, GLint size, GLenum type, GLsizei stride, const void *pointer); +GLAPI void APIENTRY glGetVertexAttribLdv (GLuint index, GLenum pname, GLdouble *params); +GLAPI void APIENTRY glViewportArrayv (GLuint first, GLsizei count, const GLfloat *v); +GLAPI void APIENTRY glViewportIndexedf (GLuint index, GLfloat x, GLfloat y, GLfloat w, GLfloat h); +GLAPI void APIENTRY glViewportIndexedfv (GLuint index, const GLfloat *v); +GLAPI void APIENTRY glScissorArrayv (GLuint first, GLsizei count, const GLint *v); +GLAPI void APIENTRY glScissorIndexed (GLuint index, GLint left, GLint bottom, GLsizei width, GLsizei height); +GLAPI void APIENTRY glScissorIndexedv (GLuint index, const GLint *v); +GLAPI void APIENTRY glDepthRangeArrayv (GLuint first, GLsizei count, const GLdouble *v); +GLAPI void APIENTRY glDepthRangeIndexed (GLuint index, GLdouble n, GLdouble f); +GLAPI void APIENTRY glGetFloati_v (GLenum target, GLuint index, GLfloat *data); +GLAPI void APIENTRY glGetDoublei_v (GLenum target, GLuint index, GLdouble *data); +#endif +#endif /* GL_VERSION_4_1 */ + +#ifndef GL_VERSION_4_2 +#define GL_VERSION_4_2 1 +#define GL_COPY_READ_BUFFER_BINDING 0x8F36 +#define GL_COPY_WRITE_BUFFER_BINDING 0x8F37 +#define GL_TRANSFORM_FEEDBACK_ACTIVE 0x8E24 +#define GL_TRANSFORM_FEEDBACK_PAUSED 0x8E23 +#define GL_UNPACK_COMPRESSED_BLOCK_WIDTH 0x9127 +#define GL_UNPACK_COMPRESSED_BLOCK_HEIGHT 0x9128 +#define GL_UNPACK_COMPRESSED_BLOCK_DEPTH 0x9129 +#define GL_UNPACK_COMPRESSED_BLOCK_SIZE 0x912A +#define GL_PACK_COMPRESSED_BLOCK_WIDTH 0x912B +#define GL_PACK_COMPRESSED_BLOCK_HEIGHT 0x912C +#define GL_PACK_COMPRESSED_BLOCK_DEPTH 0x912D +#define GL_PACK_COMPRESSED_BLOCK_SIZE 0x912E +#define GL_NUM_SAMPLE_COUNTS 0x9380 +#define GL_MIN_MAP_BUFFER_ALIGNMENT 0x90BC +#define GL_ATOMIC_COUNTER_BUFFER 0x92C0 +#define GL_ATOMIC_COUNTER_BUFFER_BINDING 0x92C1 +#define GL_ATOMIC_COUNTER_BUFFER_START 0x92C2 +#define GL_ATOMIC_COUNTER_BUFFER_SIZE 0x92C3 +#define GL_ATOMIC_COUNTER_BUFFER_DATA_SIZE 0x92C4 +#define GL_ATOMIC_COUNTER_BUFFER_ACTIVE_ATOMIC_COUNTERS 0x92C5 +#define GL_ATOMIC_COUNTER_BUFFER_ACTIVE_ATOMIC_COUNTER_INDICES 0x92C6 +#define GL_ATOMIC_COUNTER_BUFFER_REFERENCED_BY_VERTEX_SHADER 0x92C7 +#define GL_ATOMIC_COUNTER_BUFFER_REFERENCED_BY_TESS_CONTROL_SHADER 0x92C8 +#define GL_ATOMIC_COUNTER_BUFFER_REFERENCED_BY_TESS_EVALUATION_SHADER 0x92C9 +#define GL_ATOMIC_COUNTER_BUFFER_REFERENCED_BY_GEOMETRY_SHADER 0x92CA +#define GL_ATOMIC_COUNTER_BUFFER_REFERENCED_BY_FRAGMENT_SHADER 0x92CB +#define GL_MAX_VERTEX_ATOMIC_COUNTER_BUFFERS 0x92CC +#define GL_MAX_TESS_CONTROL_ATOMIC_COUNTER_BUFFERS 0x92CD +#define GL_MAX_TESS_EVALUATION_ATOMIC_COUNTER_BUFFERS 0x92CE +#define GL_MAX_GEOMETRY_ATOMIC_COUNTER_BUFFERS 0x92CF +#define GL_MAX_FRAGMENT_ATOMIC_COUNTER_BUFFERS 0x92D0 +#define GL_MAX_COMBINED_ATOMIC_COUNTER_BUFFERS 0x92D1 +#define GL_MAX_VERTEX_ATOMIC_COUNTERS 0x92D2 +#define GL_MAX_TESS_CONTROL_ATOMIC_COUNTERS 0x92D3 +#define GL_MAX_TESS_EVALUATION_ATOMIC_COUNTERS 0x92D4 +#define GL_MAX_GEOMETRY_ATOMIC_COUNTERS 0x92D5 +#define GL_MAX_FRAGMENT_ATOMIC_COUNTERS 0x92D6 +#define GL_MAX_COMBINED_ATOMIC_COUNTERS 0x92D7 +#define GL_MAX_ATOMIC_COUNTER_BUFFER_SIZE 0x92D8 +#define GL_MAX_ATOMIC_COUNTER_BUFFER_BINDINGS 0x92DC +#define GL_ACTIVE_ATOMIC_COUNTER_BUFFERS 0x92D9 +#define GL_UNIFORM_ATOMIC_COUNTER_BUFFER_INDEX 0x92DA +#define GL_UNSIGNED_INT_ATOMIC_COUNTER 0x92DB +#define GL_VERTEX_ATTRIB_ARRAY_BARRIER_BIT 0x00000001 +#define GL_ELEMENT_ARRAY_BARRIER_BIT 0x00000002 +#define GL_UNIFORM_BARRIER_BIT 0x00000004 +#define GL_TEXTURE_FETCH_BARRIER_BIT 0x00000008 +#define GL_SHADER_IMAGE_ACCESS_BARRIER_BIT 0x00000020 +#define GL_COMMAND_BARRIER_BIT 0x00000040 +#define GL_PIXEL_BUFFER_BARRIER_BIT 0x00000080 +#define GL_TEXTURE_UPDATE_BARRIER_BIT 0x00000100 +#define GL_BUFFER_UPDATE_BARRIER_BIT 0x00000200 +#define GL_FRAMEBUFFER_BARRIER_BIT 0x00000400 +#define GL_TRANSFORM_FEEDBACK_BARRIER_BIT 0x00000800 +#define GL_ATOMIC_COUNTER_BARRIER_BIT 0x00001000 +#define GL_ALL_BARRIER_BITS 0xFFFFFFFF +#define GL_MAX_IMAGE_UNITS 0x8F38 +#define GL_MAX_COMBINED_IMAGE_UNITS_AND_FRAGMENT_OUTPUTS 0x8F39 +#define GL_IMAGE_BINDING_NAME 0x8F3A +#define GL_IMAGE_BINDING_LEVEL 0x8F3B +#define GL_IMAGE_BINDING_LAYERED 0x8F3C +#define GL_IMAGE_BINDING_LAYER 0x8F3D +#define GL_IMAGE_BINDING_ACCESS 0x8F3E +#define GL_IMAGE_1D 0x904C +#define GL_IMAGE_2D 0x904D +#define GL_IMAGE_3D 0x904E +#define GL_IMAGE_2D_RECT 0x904F +#define GL_IMAGE_CUBE 0x9050 +#define GL_IMAGE_BUFFER 0x9051 +#define GL_IMAGE_1D_ARRAY 0x9052 +#define GL_IMAGE_2D_ARRAY 0x9053 +#define GL_IMAGE_CUBE_MAP_ARRAY 0x9054 +#define GL_IMAGE_2D_MULTISAMPLE 0x9055 +#define GL_IMAGE_2D_MULTISAMPLE_ARRAY 0x9056 +#define GL_INT_IMAGE_1D 0x9057 +#define GL_INT_IMAGE_2D 0x9058 +#define GL_INT_IMAGE_3D 0x9059 +#define GL_INT_IMAGE_2D_RECT 0x905A +#define GL_INT_IMAGE_CUBE 0x905B +#define GL_INT_IMAGE_BUFFER 0x905C +#define GL_INT_IMAGE_1D_ARRAY 0x905D +#define GL_INT_IMAGE_2D_ARRAY 0x905E +#define GL_INT_IMAGE_CUBE_MAP_ARRAY 0x905F +#define GL_INT_IMAGE_2D_MULTISAMPLE 0x9060 +#define GL_INT_IMAGE_2D_MULTISAMPLE_ARRAY 0x9061 +#define GL_UNSIGNED_INT_IMAGE_1D 0x9062 +#define GL_UNSIGNED_INT_IMAGE_2D 0x9063 +#define GL_UNSIGNED_INT_IMAGE_3D 0x9064 +#define GL_UNSIGNED_INT_IMAGE_2D_RECT 0x9065 +#define GL_UNSIGNED_INT_IMAGE_CUBE 0x9066 +#define GL_UNSIGNED_INT_IMAGE_BUFFER 0x9067 +#define GL_UNSIGNED_INT_IMAGE_1D_ARRAY 0x9068 +#define GL_UNSIGNED_INT_IMAGE_2D_ARRAY 0x9069 +#define GL_UNSIGNED_INT_IMAGE_CUBE_MAP_ARRAY 0x906A +#define GL_UNSIGNED_INT_IMAGE_2D_MULTISAMPLE 0x906B +#define GL_UNSIGNED_INT_IMAGE_2D_MULTISAMPLE_ARRAY 0x906C +#define GL_MAX_IMAGE_SAMPLES 0x906D +#define GL_IMAGE_BINDING_FORMAT 0x906E +#define GL_IMAGE_FORMAT_COMPATIBILITY_TYPE 0x90C7 +#define GL_IMAGE_FORMAT_COMPATIBILITY_BY_SIZE 0x90C8 +#define GL_IMAGE_FORMAT_COMPATIBILITY_BY_CLASS 0x90C9 +#define GL_MAX_VERTEX_IMAGE_UNIFORMS 0x90CA +#define GL_MAX_TESS_CONTROL_IMAGE_UNIFORMS 0x90CB +#define GL_MAX_TESS_EVALUATION_IMAGE_UNIFORMS 0x90CC +#define GL_MAX_GEOMETRY_IMAGE_UNIFORMS 0x90CD +#define GL_MAX_FRAGMENT_IMAGE_UNIFORMS 0x90CE +#define GL_MAX_COMBINED_IMAGE_UNIFORMS 0x90CF +#define GL_COMPRESSED_RGBA_BPTC_UNORM 0x8E8C +#define GL_COMPRESSED_SRGB_ALPHA_BPTC_UNORM 0x8E8D +#define GL_COMPRESSED_RGB_BPTC_SIGNED_FLOAT 0x8E8E +#define GL_COMPRESSED_RGB_BPTC_UNSIGNED_FLOAT 0x8E8F +#define GL_TEXTURE_IMMUTABLE_FORMAT 0x912F +typedef void (APIENTRYP PFNGLDRAWARRAYSINSTANCEDBASEINSTANCEPROC) (GLenum mode, GLint first, GLsizei count, GLsizei instancecount, GLuint baseinstance); +typedef void (APIENTRYP PFNGLDRAWELEMENTSINSTANCEDBASEINSTANCEPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLuint baseinstance); +typedef void (APIENTRYP PFNGLDRAWELEMENTSINSTANCEDBASEVERTEXBASEINSTANCEPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLint basevertex, GLuint baseinstance); +typedef void (APIENTRYP PFNGLGETINTERNALFORMATIVPROC) (GLenum target, GLenum internalformat, GLenum pname, GLsizei bufSize, GLint *params); +typedef void (APIENTRYP PFNGLGETACTIVEATOMICCOUNTERBUFFERIVPROC) (GLuint program, GLuint bufferIndex, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLBINDIMAGETEXTUREPROC) (GLuint unit, GLuint texture, GLint level, GLboolean layered, GLint layer, GLenum access, GLenum format); +typedef void (APIENTRYP PFNGLMEMORYBARRIERPROC) (GLbitfield barriers); +typedef void (APIENTRYP PFNGLTEXSTORAGE1DPROC) (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width); +typedef void (APIENTRYP PFNGLTEXSTORAGE2DPROC) (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLTEXSTORAGE3DPROC) (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +typedef void (APIENTRYP PFNGLDRAWTRANSFORMFEEDBACKINSTANCEDPROC) (GLenum mode, GLuint id, GLsizei instancecount); +typedef void (APIENTRYP PFNGLDRAWTRANSFORMFEEDBACKSTREAMINSTANCEDPROC) (GLenum mode, GLuint id, GLuint stream, GLsizei instancecount); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glDrawArraysInstancedBaseInstance (GLenum mode, GLint first, GLsizei count, GLsizei instancecount, GLuint baseinstance); +GLAPI void APIENTRY glDrawElementsInstancedBaseInstance (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLuint baseinstance); +GLAPI void APIENTRY glDrawElementsInstancedBaseVertexBaseInstance (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLint basevertex, GLuint baseinstance); +GLAPI void APIENTRY glGetInternalformativ (GLenum target, GLenum internalformat, GLenum pname, GLsizei bufSize, GLint *params); +GLAPI void APIENTRY glGetActiveAtomicCounterBufferiv (GLuint program, GLuint bufferIndex, GLenum pname, GLint *params); +GLAPI void APIENTRY glBindImageTexture (GLuint unit, GLuint texture, GLint level, GLboolean layered, GLint layer, GLenum access, GLenum format); +GLAPI void APIENTRY glMemoryBarrier (GLbitfield barriers); +GLAPI void APIENTRY glTexStorage1D (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width); +GLAPI void APIENTRY glTexStorage2D (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +GLAPI void APIENTRY glTexStorage3D (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +GLAPI void APIENTRY glDrawTransformFeedbackInstanced (GLenum mode, GLuint id, GLsizei instancecount); +GLAPI void APIENTRY glDrawTransformFeedbackStreamInstanced (GLenum mode, GLuint id, GLuint stream, GLsizei instancecount); +#endif +#endif /* GL_VERSION_4_2 */ + +#ifndef GL_VERSION_4_3 +#define GL_VERSION_4_3 1 +typedef void (APIENTRY *GLDEBUGPROC)(GLenum source,GLenum type,GLuint id,GLenum severity,GLsizei length,const GLchar *message,const void *userParam); +#define GL_NUM_SHADING_LANGUAGE_VERSIONS 0x82E9 +#define GL_VERTEX_ATTRIB_ARRAY_LONG 0x874E +#define GL_COMPRESSED_RGB8_ETC2 0x9274 +#define GL_COMPRESSED_SRGB8_ETC2 0x9275 +#define GL_COMPRESSED_RGB8_PUNCHTHROUGH_ALPHA1_ETC2 0x9276 +#define GL_COMPRESSED_SRGB8_PUNCHTHROUGH_ALPHA1_ETC2 0x9277 +#define GL_COMPRESSED_RGBA8_ETC2_EAC 0x9278 +#define GL_COMPRESSED_SRGB8_ALPHA8_ETC2_EAC 0x9279 +#define GL_COMPRESSED_R11_EAC 0x9270 +#define GL_COMPRESSED_SIGNED_R11_EAC 0x9271 +#define GL_COMPRESSED_RG11_EAC 0x9272 +#define GL_COMPRESSED_SIGNED_RG11_EAC 0x9273 +#define GL_PRIMITIVE_RESTART_FIXED_INDEX 0x8D69 +#define GL_ANY_SAMPLES_PASSED_CONSERVATIVE 0x8D6A +#define GL_MAX_ELEMENT_INDEX 0x8D6B +#define GL_COMPUTE_SHADER 0x91B9 +#define GL_MAX_COMPUTE_UNIFORM_BLOCKS 0x91BB +#define GL_MAX_COMPUTE_TEXTURE_IMAGE_UNITS 0x91BC +#define GL_MAX_COMPUTE_IMAGE_UNIFORMS 0x91BD +#define GL_MAX_COMPUTE_SHARED_MEMORY_SIZE 0x8262 +#define GL_MAX_COMPUTE_UNIFORM_COMPONENTS 0x8263 +#define GL_MAX_COMPUTE_ATOMIC_COUNTER_BUFFERS 0x8264 +#define GL_MAX_COMPUTE_ATOMIC_COUNTERS 0x8265 +#define GL_MAX_COMBINED_COMPUTE_UNIFORM_COMPONENTS 0x8266 +#define GL_MAX_COMPUTE_WORK_GROUP_INVOCATIONS 0x90EB +#define GL_MAX_COMPUTE_WORK_GROUP_COUNT 0x91BE +#define GL_MAX_COMPUTE_WORK_GROUP_SIZE 0x91BF +#define GL_COMPUTE_WORK_GROUP_SIZE 0x8267 +#define GL_UNIFORM_BLOCK_REFERENCED_BY_COMPUTE_SHADER 0x90EC +#define GL_ATOMIC_COUNTER_BUFFER_REFERENCED_BY_COMPUTE_SHADER 0x90ED +#define GL_DISPATCH_INDIRECT_BUFFER 0x90EE +#define GL_DISPATCH_INDIRECT_BUFFER_BINDING 0x90EF +#define GL_COMPUTE_SHADER_BIT 0x00000020 +#define GL_DEBUG_OUTPUT_SYNCHRONOUS 0x8242 +#define GL_DEBUG_NEXT_LOGGED_MESSAGE_LENGTH 0x8243 +#define GL_DEBUG_CALLBACK_FUNCTION 0x8244 +#define GL_DEBUG_CALLBACK_USER_PARAM 0x8245 +#define GL_DEBUG_SOURCE_API 0x8246 +#define GL_DEBUG_SOURCE_WINDOW_SYSTEM 0x8247 +#define GL_DEBUG_SOURCE_SHADER_COMPILER 0x8248 +#define GL_DEBUG_SOURCE_THIRD_PARTY 0x8249 +#define GL_DEBUG_SOURCE_APPLICATION 0x824A +#define GL_DEBUG_SOURCE_OTHER 0x824B +#define GL_DEBUG_TYPE_ERROR 0x824C +#define GL_DEBUG_TYPE_DEPRECATED_BEHAVIOR 0x824D +#define GL_DEBUG_TYPE_UNDEFINED_BEHAVIOR 0x824E +#define GL_DEBUG_TYPE_PORTABILITY 0x824F +#define GL_DEBUG_TYPE_PERFORMANCE 0x8250 +#define GL_DEBUG_TYPE_OTHER 0x8251 +#define GL_MAX_DEBUG_MESSAGE_LENGTH 0x9143 +#define GL_MAX_DEBUG_LOGGED_MESSAGES 0x9144 +#define GL_DEBUG_LOGGED_MESSAGES 0x9145 +#define GL_DEBUG_SEVERITY_HIGH 0x9146 +#define GL_DEBUG_SEVERITY_MEDIUM 0x9147 +#define GL_DEBUG_SEVERITY_LOW 0x9148 +#define GL_DEBUG_TYPE_MARKER 0x8268 +#define GL_DEBUG_TYPE_PUSH_GROUP 0x8269 +#define GL_DEBUG_TYPE_POP_GROUP 0x826A +#define GL_DEBUG_SEVERITY_NOTIFICATION 0x826B +#define GL_MAX_DEBUG_GROUP_STACK_DEPTH 0x826C +#define GL_DEBUG_GROUP_STACK_DEPTH 0x826D +#define GL_BUFFER 0x82E0 +#define GL_SHADER 0x82E1 +#define GL_PROGRAM 0x82E2 +#define GL_QUERY 0x82E3 +#define GL_PROGRAM_PIPELINE 0x82E4 +#define GL_SAMPLER 0x82E6 +#define GL_MAX_LABEL_LENGTH 0x82E8 +#define GL_DEBUG_OUTPUT 0x92E0 +#define GL_CONTEXT_FLAG_DEBUG_BIT 0x00000002 +#define GL_MAX_UNIFORM_LOCATIONS 0x826E +#define GL_FRAMEBUFFER_DEFAULT_WIDTH 0x9310 +#define GL_FRAMEBUFFER_DEFAULT_HEIGHT 0x9311 +#define GL_FRAMEBUFFER_DEFAULT_LAYERS 0x9312 +#define GL_FRAMEBUFFER_DEFAULT_SAMPLES 0x9313 +#define GL_FRAMEBUFFER_DEFAULT_FIXED_SAMPLE_LOCATIONS 0x9314 +#define GL_MAX_FRAMEBUFFER_WIDTH 0x9315 +#define GL_MAX_FRAMEBUFFER_HEIGHT 0x9316 +#define GL_MAX_FRAMEBUFFER_LAYERS 0x9317 +#define GL_MAX_FRAMEBUFFER_SAMPLES 0x9318 +#define GL_INTERNALFORMAT_SUPPORTED 0x826F +#define GL_INTERNALFORMAT_PREFERRED 0x8270 +#define GL_INTERNALFORMAT_RED_SIZE 0x8271 +#define GL_INTERNALFORMAT_GREEN_SIZE 0x8272 +#define GL_INTERNALFORMAT_BLUE_SIZE 0x8273 +#define GL_INTERNALFORMAT_ALPHA_SIZE 0x8274 +#define GL_INTERNALFORMAT_DEPTH_SIZE 0x8275 +#define GL_INTERNALFORMAT_STENCIL_SIZE 0x8276 +#define GL_INTERNALFORMAT_SHARED_SIZE 0x8277 +#define GL_INTERNALFORMAT_RED_TYPE 0x8278 +#define GL_INTERNALFORMAT_GREEN_TYPE 0x8279 +#define GL_INTERNALFORMAT_BLUE_TYPE 0x827A +#define GL_INTERNALFORMAT_ALPHA_TYPE 0x827B +#define GL_INTERNALFORMAT_DEPTH_TYPE 0x827C +#define GL_INTERNALFORMAT_STENCIL_TYPE 0x827D +#define GL_MAX_WIDTH 0x827E +#define GL_MAX_HEIGHT 0x827F +#define GL_MAX_DEPTH 0x8280 +#define GL_MAX_LAYERS 0x8281 +#define GL_MAX_COMBINED_DIMENSIONS 0x8282 +#define GL_COLOR_COMPONENTS 0x8283 +#define GL_DEPTH_COMPONENTS 0x8284 +#define GL_STENCIL_COMPONENTS 0x8285 +#define GL_COLOR_RENDERABLE 0x8286 +#define GL_DEPTH_RENDERABLE 0x8287 +#define GL_STENCIL_RENDERABLE 0x8288 +#define GL_FRAMEBUFFER_RENDERABLE 0x8289 +#define GL_FRAMEBUFFER_RENDERABLE_LAYERED 0x828A +#define GL_FRAMEBUFFER_BLEND 0x828B +#define GL_READ_PIXELS 0x828C +#define GL_READ_PIXELS_FORMAT 0x828D +#define GL_READ_PIXELS_TYPE 0x828E +#define GL_TEXTURE_IMAGE_FORMAT 0x828F +#define GL_TEXTURE_IMAGE_TYPE 0x8290 +#define GL_GET_TEXTURE_IMAGE_FORMAT 0x8291 +#define GL_GET_TEXTURE_IMAGE_TYPE 0x8292 +#define GL_MIPMAP 0x8293 +#define GL_MANUAL_GENERATE_MIPMAP 0x8294 +#define GL_AUTO_GENERATE_MIPMAP 0x8295 +#define GL_COLOR_ENCODING 0x8296 +#define GL_SRGB_READ 0x8297 +#define GL_SRGB_WRITE 0x8298 +#define GL_FILTER 0x829A +#define GL_VERTEX_TEXTURE 0x829B +#define GL_TESS_CONTROL_TEXTURE 0x829C +#define GL_TESS_EVALUATION_TEXTURE 0x829D +#define GL_GEOMETRY_TEXTURE 0x829E +#define GL_FRAGMENT_TEXTURE 0x829F +#define GL_COMPUTE_TEXTURE 0x82A0 +#define GL_TEXTURE_SHADOW 0x82A1 +#define GL_TEXTURE_GATHER 0x82A2 +#define GL_TEXTURE_GATHER_SHADOW 0x82A3 +#define GL_SHADER_IMAGE_LOAD 0x82A4 +#define GL_SHADER_IMAGE_STORE 0x82A5 +#define GL_SHADER_IMAGE_ATOMIC 0x82A6 +#define GL_IMAGE_TEXEL_SIZE 0x82A7 +#define GL_IMAGE_COMPATIBILITY_CLASS 0x82A8 +#define GL_IMAGE_PIXEL_FORMAT 0x82A9 +#define GL_IMAGE_PIXEL_TYPE 0x82AA +#define GL_SIMULTANEOUS_TEXTURE_AND_DEPTH_TEST 0x82AC +#define GL_SIMULTANEOUS_TEXTURE_AND_STENCIL_TEST 0x82AD +#define GL_SIMULTANEOUS_TEXTURE_AND_DEPTH_WRITE 0x82AE +#define GL_SIMULTANEOUS_TEXTURE_AND_STENCIL_WRITE 0x82AF +#define GL_TEXTURE_COMPRESSED_BLOCK_WIDTH 0x82B1 +#define GL_TEXTURE_COMPRESSED_BLOCK_HEIGHT 0x82B2 +#define GL_TEXTURE_COMPRESSED_BLOCK_SIZE 0x82B3 +#define GL_CLEAR_BUFFER 0x82B4 +#define GL_TEXTURE_VIEW 0x82B5 +#define GL_VIEW_COMPATIBILITY_CLASS 0x82B6 +#define GL_FULL_SUPPORT 0x82B7 +#define GL_CAVEAT_SUPPORT 0x82B8 +#define GL_IMAGE_CLASS_4_X_32 0x82B9 +#define GL_IMAGE_CLASS_2_X_32 0x82BA +#define GL_IMAGE_CLASS_1_X_32 0x82BB +#define GL_IMAGE_CLASS_4_X_16 0x82BC +#define GL_IMAGE_CLASS_2_X_16 0x82BD +#define GL_IMAGE_CLASS_1_X_16 0x82BE +#define GL_IMAGE_CLASS_4_X_8 0x82BF +#define GL_IMAGE_CLASS_2_X_8 0x82C0 +#define GL_IMAGE_CLASS_1_X_8 0x82C1 +#define GL_IMAGE_CLASS_11_11_10 0x82C2 +#define GL_IMAGE_CLASS_10_10_10_2 0x82C3 +#define GL_VIEW_CLASS_128_BITS 0x82C4 +#define GL_VIEW_CLASS_96_BITS 0x82C5 +#define GL_VIEW_CLASS_64_BITS 0x82C6 +#define GL_VIEW_CLASS_48_BITS 0x82C7 +#define GL_VIEW_CLASS_32_BITS 0x82C8 +#define GL_VIEW_CLASS_24_BITS 0x82C9 +#define GL_VIEW_CLASS_16_BITS 0x82CA +#define GL_VIEW_CLASS_8_BITS 0x82CB +#define GL_VIEW_CLASS_S3TC_DXT1_RGB 0x82CC +#define GL_VIEW_CLASS_S3TC_DXT1_RGBA 0x82CD +#define GL_VIEW_CLASS_S3TC_DXT3_RGBA 0x82CE +#define GL_VIEW_CLASS_S3TC_DXT5_RGBA 0x82CF +#define GL_VIEW_CLASS_RGTC1_RED 0x82D0 +#define GL_VIEW_CLASS_RGTC2_RG 0x82D1 +#define GL_VIEW_CLASS_BPTC_UNORM 0x82D2 +#define GL_VIEW_CLASS_BPTC_FLOAT 0x82D3 +#define GL_UNIFORM 0x92E1 +#define GL_UNIFORM_BLOCK 0x92E2 +#define GL_PROGRAM_INPUT 0x92E3 +#define GL_PROGRAM_OUTPUT 0x92E4 +#define GL_BUFFER_VARIABLE 0x92E5 +#define GL_SHADER_STORAGE_BLOCK 0x92E6 +#define GL_VERTEX_SUBROUTINE 0x92E8 +#define GL_TESS_CONTROL_SUBROUTINE 0x92E9 +#define GL_TESS_EVALUATION_SUBROUTINE 0x92EA +#define GL_GEOMETRY_SUBROUTINE 0x92EB +#define GL_FRAGMENT_SUBROUTINE 0x92EC +#define GL_COMPUTE_SUBROUTINE 0x92ED +#define GL_VERTEX_SUBROUTINE_UNIFORM 0x92EE +#define GL_TESS_CONTROL_SUBROUTINE_UNIFORM 0x92EF +#define GL_TESS_EVALUATION_SUBROUTINE_UNIFORM 0x92F0 +#define GL_GEOMETRY_SUBROUTINE_UNIFORM 0x92F1 +#define GL_FRAGMENT_SUBROUTINE_UNIFORM 0x92F2 +#define GL_COMPUTE_SUBROUTINE_UNIFORM 0x92F3 +#define GL_TRANSFORM_FEEDBACK_VARYING 0x92F4 +#define GL_ACTIVE_RESOURCES 0x92F5 +#define GL_MAX_NAME_LENGTH 0x92F6 +#define GL_MAX_NUM_ACTIVE_VARIABLES 0x92F7 +#define GL_MAX_NUM_COMPATIBLE_SUBROUTINES 0x92F8 +#define GL_NAME_LENGTH 0x92F9 +#define GL_TYPE 0x92FA +#define GL_ARRAY_SIZE 0x92FB +#define GL_OFFSET 0x92FC +#define GL_BLOCK_INDEX 0x92FD +#define GL_ARRAY_STRIDE 0x92FE +#define GL_MATRIX_STRIDE 0x92FF +#define GL_IS_ROW_MAJOR 0x9300 +#define GL_ATOMIC_COUNTER_BUFFER_INDEX 0x9301 +#define GL_BUFFER_BINDING 0x9302 +#define GL_BUFFER_DATA_SIZE 0x9303 +#define GL_NUM_ACTIVE_VARIABLES 0x9304 +#define GL_ACTIVE_VARIABLES 0x9305 +#define GL_REFERENCED_BY_VERTEX_SHADER 0x9306 +#define GL_REFERENCED_BY_TESS_CONTROL_SHADER 0x9307 +#define GL_REFERENCED_BY_TESS_EVALUATION_SHADER 0x9308 +#define GL_REFERENCED_BY_GEOMETRY_SHADER 0x9309 +#define GL_REFERENCED_BY_FRAGMENT_SHADER 0x930A +#define GL_REFERENCED_BY_COMPUTE_SHADER 0x930B +#define GL_TOP_LEVEL_ARRAY_SIZE 0x930C +#define GL_TOP_LEVEL_ARRAY_STRIDE 0x930D +#define GL_LOCATION 0x930E +#define GL_LOCATION_INDEX 0x930F +#define GL_IS_PER_PATCH 0x92E7 +#define GL_SHADER_STORAGE_BUFFER 0x90D2 +#define GL_SHADER_STORAGE_BUFFER_BINDING 0x90D3 +#define GL_SHADER_STORAGE_BUFFER_START 0x90D4 +#define GL_SHADER_STORAGE_BUFFER_SIZE 0x90D5 +#define GL_MAX_VERTEX_SHADER_STORAGE_BLOCKS 0x90D6 +#define GL_MAX_GEOMETRY_SHADER_STORAGE_BLOCKS 0x90D7 +#define GL_MAX_TESS_CONTROL_SHADER_STORAGE_BLOCKS 0x90D8 +#define GL_MAX_TESS_EVALUATION_SHADER_STORAGE_BLOCKS 0x90D9 +#define GL_MAX_FRAGMENT_SHADER_STORAGE_BLOCKS 0x90DA +#define GL_MAX_COMPUTE_SHADER_STORAGE_BLOCKS 0x90DB +#define GL_MAX_COMBINED_SHADER_STORAGE_BLOCKS 0x90DC +#define GL_MAX_SHADER_STORAGE_BUFFER_BINDINGS 0x90DD +#define GL_MAX_SHADER_STORAGE_BLOCK_SIZE 0x90DE +#define GL_SHADER_STORAGE_BUFFER_OFFSET_ALIGNMENT 0x90DF +#define GL_SHADER_STORAGE_BARRIER_BIT 0x00002000 +#define GL_MAX_COMBINED_SHADER_OUTPUT_RESOURCES 0x8F39 +#define GL_DEPTH_STENCIL_TEXTURE_MODE 0x90EA +#define GL_TEXTURE_BUFFER_OFFSET 0x919D +#define GL_TEXTURE_BUFFER_SIZE 0x919E +#define GL_TEXTURE_BUFFER_OFFSET_ALIGNMENT 0x919F +#define GL_TEXTURE_VIEW_MIN_LEVEL 0x82DB +#define GL_TEXTURE_VIEW_NUM_LEVELS 0x82DC +#define GL_TEXTURE_VIEW_MIN_LAYER 0x82DD +#define GL_TEXTURE_VIEW_NUM_LAYERS 0x82DE +#define GL_TEXTURE_IMMUTABLE_LEVELS 0x82DF +#define GL_VERTEX_ATTRIB_BINDING 0x82D4 +#define GL_VERTEX_ATTRIB_RELATIVE_OFFSET 0x82D5 +#define GL_VERTEX_BINDING_DIVISOR 0x82D6 +#define GL_VERTEX_BINDING_OFFSET 0x82D7 +#define GL_VERTEX_BINDING_STRIDE 0x82D8 +#define GL_MAX_VERTEX_ATTRIB_RELATIVE_OFFSET 0x82D9 +#define GL_MAX_VERTEX_ATTRIB_BINDINGS 0x82DA +#define GL_VERTEX_BINDING_BUFFER 0x8F4F +#define GL_DISPLAY_LIST 0x82E7 +typedef void (APIENTRYP PFNGLCLEARBUFFERDATAPROC) (GLenum target, GLenum internalformat, GLenum format, GLenum type, const void *data); +typedef void (APIENTRYP PFNGLCLEARBUFFERSUBDATAPROC) (GLenum target, GLenum internalformat, GLintptr offset, GLsizeiptr size, GLenum format, GLenum type, const void *data); +typedef void (APIENTRYP PFNGLDISPATCHCOMPUTEPROC) (GLuint num_groups_x, GLuint num_groups_y, GLuint num_groups_z); +typedef void (APIENTRYP PFNGLDISPATCHCOMPUTEINDIRECTPROC) (GLintptr indirect); +typedef void (APIENTRYP PFNGLCOPYIMAGESUBDATAPROC) (GLuint srcName, GLenum srcTarget, GLint srcLevel, GLint srcX, GLint srcY, GLint srcZ, GLuint dstName, GLenum dstTarget, GLint dstLevel, GLint dstX, GLint dstY, GLint dstZ, GLsizei srcWidth, GLsizei srcHeight, GLsizei srcDepth); +typedef void (APIENTRYP PFNGLFRAMEBUFFERPARAMETERIPROC) (GLenum target, GLenum pname, GLint param); +typedef void (APIENTRYP PFNGLGETFRAMEBUFFERPARAMETERIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETINTERNALFORMATI64VPROC) (GLenum target, GLenum internalformat, GLenum pname, GLsizei bufSize, GLint64 *params); +typedef void (APIENTRYP PFNGLINVALIDATETEXSUBIMAGEPROC) (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth); +typedef void (APIENTRYP PFNGLINVALIDATETEXIMAGEPROC) (GLuint texture, GLint level); +typedef void (APIENTRYP PFNGLINVALIDATEBUFFERSUBDATAPROC) (GLuint buffer, GLintptr offset, GLsizeiptr length); +typedef void (APIENTRYP PFNGLINVALIDATEBUFFERDATAPROC) (GLuint buffer); +typedef void (APIENTRYP PFNGLINVALIDATEFRAMEBUFFERPROC) (GLenum target, GLsizei numAttachments, const GLenum *attachments); +typedef void (APIENTRYP PFNGLINVALIDATESUBFRAMEBUFFERPROC) (GLenum target, GLsizei numAttachments, const GLenum *attachments, GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLMULTIDRAWARRAYSINDIRECTPROC) (GLenum mode, const void *indirect, GLsizei drawcount, GLsizei stride); +typedef void (APIENTRYP PFNGLMULTIDRAWELEMENTSINDIRECTPROC) (GLenum mode, GLenum type, const void *indirect, GLsizei drawcount, GLsizei stride); +typedef void (APIENTRYP PFNGLGETPROGRAMINTERFACEIVPROC) (GLuint program, GLenum programInterface, GLenum pname, GLint *params); +typedef GLuint (APIENTRYP PFNGLGETPROGRAMRESOURCEINDEXPROC) (GLuint program, GLenum programInterface, const GLchar *name); +typedef void (APIENTRYP PFNGLGETPROGRAMRESOURCENAMEPROC) (GLuint program, GLenum programInterface, GLuint index, GLsizei bufSize, GLsizei *length, GLchar *name); +typedef void (APIENTRYP PFNGLGETPROGRAMRESOURCEIVPROC) (GLuint program, GLenum programInterface, GLuint index, GLsizei propCount, const GLenum *props, GLsizei bufSize, GLsizei *length, GLint *params); +typedef GLint (APIENTRYP PFNGLGETPROGRAMRESOURCELOCATIONPROC) (GLuint program, GLenum programInterface, const GLchar *name); +typedef GLint (APIENTRYP PFNGLGETPROGRAMRESOURCELOCATIONINDEXPROC) (GLuint program, GLenum programInterface, const GLchar *name); +typedef void (APIENTRYP PFNGLSHADERSTORAGEBLOCKBINDINGPROC) (GLuint program, GLuint storageBlockIndex, GLuint storageBlockBinding); +typedef void (APIENTRYP PFNGLTEXBUFFERRANGEPROC) (GLenum target, GLenum internalformat, GLuint buffer, GLintptr offset, GLsizeiptr size); +typedef void (APIENTRYP PFNGLTEXSTORAGE2DMULTISAMPLEPROC) (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLboolean fixedsamplelocations); +typedef void (APIENTRYP PFNGLTEXSTORAGE3DMULTISAMPLEPROC) (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedsamplelocations); +typedef void (APIENTRYP PFNGLTEXTUREVIEWPROC) (GLuint texture, GLenum target, GLuint origtexture, GLenum internalformat, GLuint minlevel, GLuint numlevels, GLuint minlayer, GLuint numlayers); +typedef void (APIENTRYP PFNGLBINDVERTEXBUFFERPROC) (GLuint bindingindex, GLuint buffer, GLintptr offset, GLsizei stride); +typedef void (APIENTRYP PFNGLVERTEXATTRIBFORMATPROC) (GLuint attribindex, GLint size, GLenum type, GLboolean normalized, GLuint relativeoffset); +typedef void (APIENTRYP PFNGLVERTEXATTRIBIFORMATPROC) (GLuint attribindex, GLint size, GLenum type, GLuint relativeoffset); +typedef void (APIENTRYP PFNGLVERTEXATTRIBLFORMATPROC) (GLuint attribindex, GLint size, GLenum type, GLuint relativeoffset); +typedef void (APIENTRYP PFNGLVERTEXATTRIBBINDINGPROC) (GLuint attribindex, GLuint bindingindex); +typedef void (APIENTRYP PFNGLVERTEXBINDINGDIVISORPROC) (GLuint bindingindex, GLuint divisor); +typedef void (APIENTRYP PFNGLDEBUGMESSAGECONTROLPROC) (GLenum source, GLenum type, GLenum severity, GLsizei count, const GLuint *ids, GLboolean enabled); +typedef void (APIENTRYP PFNGLDEBUGMESSAGEINSERTPROC) (GLenum source, GLenum type, GLuint id, GLenum severity, GLsizei length, const GLchar *buf); +typedef void (APIENTRYP PFNGLDEBUGMESSAGECALLBACKPROC) (GLDEBUGPROC callback, const void *userParam); +typedef GLuint (APIENTRYP PFNGLGETDEBUGMESSAGELOGPROC) (GLuint count, GLsizei bufSize, GLenum *sources, GLenum *types, GLuint *ids, GLenum *severities, GLsizei *lengths, GLchar *messageLog); +typedef void (APIENTRYP PFNGLPUSHDEBUGGROUPPROC) (GLenum source, GLuint id, GLsizei length, const GLchar *message); +typedef void (APIENTRYP PFNGLPOPDEBUGGROUPPROC) (void); +typedef void (APIENTRYP PFNGLOBJECTLABELPROC) (GLenum identifier, GLuint name, GLsizei length, const GLchar *label); +typedef void (APIENTRYP PFNGLGETOBJECTLABELPROC) (GLenum identifier, GLuint name, GLsizei bufSize, GLsizei *length, GLchar *label); +typedef void (APIENTRYP PFNGLOBJECTPTRLABELPROC) (const void *ptr, GLsizei length, const GLchar *label); +typedef void (APIENTRYP PFNGLGETOBJECTPTRLABELPROC) (const void *ptr, GLsizei bufSize, GLsizei *length, GLchar *label); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glClearBufferData (GLenum target, GLenum internalformat, GLenum format, GLenum type, const void *data); +GLAPI void APIENTRY glClearBufferSubData (GLenum target, GLenum internalformat, GLintptr offset, GLsizeiptr size, GLenum format, GLenum type, const void *data); +GLAPI void APIENTRY glDispatchCompute (GLuint num_groups_x, GLuint num_groups_y, GLuint num_groups_z); +GLAPI void APIENTRY glDispatchComputeIndirect (GLintptr indirect); +GLAPI void APIENTRY glCopyImageSubData (GLuint srcName, GLenum srcTarget, GLint srcLevel, GLint srcX, GLint srcY, GLint srcZ, GLuint dstName, GLenum dstTarget, GLint dstLevel, GLint dstX, GLint dstY, GLint dstZ, GLsizei srcWidth, GLsizei srcHeight, GLsizei srcDepth); +GLAPI void APIENTRY glFramebufferParameteri (GLenum target, GLenum pname, GLint param); +GLAPI void APIENTRY glGetFramebufferParameteriv (GLenum target, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetInternalformati64v (GLenum target, GLenum internalformat, GLenum pname, GLsizei bufSize, GLint64 *params); +GLAPI void APIENTRY glInvalidateTexSubImage (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth); +GLAPI void APIENTRY glInvalidateTexImage (GLuint texture, GLint level); +GLAPI void APIENTRY glInvalidateBufferSubData (GLuint buffer, GLintptr offset, GLsizeiptr length); +GLAPI void APIENTRY glInvalidateBufferData (GLuint buffer); +GLAPI void APIENTRY glInvalidateFramebuffer (GLenum target, GLsizei numAttachments, const GLenum *attachments); +GLAPI void APIENTRY glInvalidateSubFramebuffer (GLenum target, GLsizei numAttachments, const GLenum *attachments, GLint x, GLint y, GLsizei width, GLsizei height); +GLAPI void APIENTRY glMultiDrawArraysIndirect (GLenum mode, const void *indirect, GLsizei drawcount, GLsizei stride); +GLAPI void APIENTRY glMultiDrawElementsIndirect (GLenum mode, GLenum type, const void *indirect, GLsizei drawcount, GLsizei stride); +GLAPI void APIENTRY glGetProgramInterfaceiv (GLuint program, GLenum programInterface, GLenum pname, GLint *params); +GLAPI GLuint APIENTRY glGetProgramResourceIndex (GLuint program, GLenum programInterface, const GLchar *name); +GLAPI void APIENTRY glGetProgramResourceName (GLuint program, GLenum programInterface, GLuint index, GLsizei bufSize, GLsizei *length, GLchar *name); +GLAPI void APIENTRY glGetProgramResourceiv (GLuint program, GLenum programInterface, GLuint index, GLsizei propCount, const GLenum *props, GLsizei bufSize, GLsizei *length, GLint *params); +GLAPI GLint APIENTRY glGetProgramResourceLocation (GLuint program, GLenum programInterface, const GLchar *name); +GLAPI GLint APIENTRY glGetProgramResourceLocationIndex (GLuint program, GLenum programInterface, const GLchar *name); +GLAPI void APIENTRY glShaderStorageBlockBinding (GLuint program, GLuint storageBlockIndex, GLuint storageBlockBinding); +GLAPI void APIENTRY glTexBufferRange (GLenum target, GLenum internalformat, GLuint buffer, GLintptr offset, GLsizeiptr size); +GLAPI void APIENTRY glTexStorage2DMultisample (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLboolean fixedsamplelocations); +GLAPI void APIENTRY glTexStorage3DMultisample (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedsamplelocations); +GLAPI void APIENTRY glTextureView (GLuint texture, GLenum target, GLuint origtexture, GLenum internalformat, GLuint minlevel, GLuint numlevels, GLuint minlayer, GLuint numlayers); +GLAPI void APIENTRY glBindVertexBuffer (GLuint bindingindex, GLuint buffer, GLintptr offset, GLsizei stride); +GLAPI void APIENTRY glVertexAttribFormat (GLuint attribindex, GLint size, GLenum type, GLboolean normalized, GLuint relativeoffset); +GLAPI void APIENTRY glVertexAttribIFormat (GLuint attribindex, GLint size, GLenum type, GLuint relativeoffset); +GLAPI void APIENTRY glVertexAttribLFormat (GLuint attribindex, GLint size, GLenum type, GLuint relativeoffset); +GLAPI void APIENTRY glVertexAttribBinding (GLuint attribindex, GLuint bindingindex); +GLAPI void APIENTRY glVertexBindingDivisor (GLuint bindingindex, GLuint divisor); +GLAPI void APIENTRY glDebugMessageControl (GLenum source, GLenum type, GLenum severity, GLsizei count, const GLuint *ids, GLboolean enabled); +GLAPI void APIENTRY glDebugMessageInsert (GLenum source, GLenum type, GLuint id, GLenum severity, GLsizei length, const GLchar *buf); +GLAPI void APIENTRY glDebugMessageCallback (GLDEBUGPROC callback, const void *userParam); +GLAPI GLuint APIENTRY glGetDebugMessageLog (GLuint count, GLsizei bufSize, GLenum *sources, GLenum *types, GLuint *ids, GLenum *severities, GLsizei *lengths, GLchar *messageLog); +GLAPI void APIENTRY glPushDebugGroup (GLenum source, GLuint id, GLsizei length, const GLchar *message); +GLAPI void APIENTRY glPopDebugGroup (void); +GLAPI void APIENTRY glObjectLabel (GLenum identifier, GLuint name, GLsizei length, const GLchar *label); +GLAPI void APIENTRY glGetObjectLabel (GLenum identifier, GLuint name, GLsizei bufSize, GLsizei *length, GLchar *label); +GLAPI void APIENTRY glObjectPtrLabel (const void *ptr, GLsizei length, const GLchar *label); +GLAPI void APIENTRY glGetObjectPtrLabel (const void *ptr, GLsizei bufSize, GLsizei *length, GLchar *label); +#endif +#endif /* GL_VERSION_4_3 */ + +#ifndef GL_VERSION_4_4 +#define GL_VERSION_4_4 1 +#define GL_MAX_VERTEX_ATTRIB_STRIDE 0x82E5 +#define GL_PRIMITIVE_RESTART_FOR_PATCHES_SUPPORTED 0x8221 +#define GL_TEXTURE_BUFFER_BINDING 0x8C2A +#define GL_MAP_PERSISTENT_BIT 0x0040 +#define GL_MAP_COHERENT_BIT 0x0080 +#define GL_DYNAMIC_STORAGE_BIT 0x0100 +#define GL_CLIENT_STORAGE_BIT 0x0200 +#define GL_CLIENT_MAPPED_BUFFER_BARRIER_BIT 0x00004000 +#define GL_BUFFER_IMMUTABLE_STORAGE 0x821F +#define GL_BUFFER_STORAGE_FLAGS 0x8220 +#define GL_CLEAR_TEXTURE 0x9365 +#define GL_LOCATION_COMPONENT 0x934A +#define GL_TRANSFORM_FEEDBACK_BUFFER_INDEX 0x934B +#define GL_TRANSFORM_FEEDBACK_BUFFER_STRIDE 0x934C +#define GL_QUERY_BUFFER 0x9192 +#define GL_QUERY_BUFFER_BARRIER_BIT 0x00008000 +#define GL_QUERY_BUFFER_BINDING 0x9193 +#define GL_QUERY_RESULT_NO_WAIT 0x9194 +#define GL_MIRROR_CLAMP_TO_EDGE 0x8743 +typedef void (APIENTRYP PFNGLBUFFERSTORAGEPROC) (GLenum target, GLsizeiptr size, const void *data, GLbitfield flags); +typedef void (APIENTRYP PFNGLCLEARTEXIMAGEPROC) (GLuint texture, GLint level, GLenum format, GLenum type, const void *data); +typedef void (APIENTRYP PFNGLCLEARTEXSUBIMAGEPROC) (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *data); +typedef void (APIENTRYP PFNGLBINDBUFFERSBASEPROC) (GLenum target, GLuint first, GLsizei count, const GLuint *buffers); +typedef void (APIENTRYP PFNGLBINDBUFFERSRANGEPROC) (GLenum target, GLuint first, GLsizei count, const GLuint *buffers, const GLintptr *offsets, const GLsizeiptr *sizes); +typedef void (APIENTRYP PFNGLBINDTEXTURESPROC) (GLuint first, GLsizei count, const GLuint *textures); +typedef void (APIENTRYP PFNGLBINDSAMPLERSPROC) (GLuint first, GLsizei count, const GLuint *samplers); +typedef void (APIENTRYP PFNGLBINDIMAGETEXTURESPROC) (GLuint first, GLsizei count, const GLuint *textures); +typedef void (APIENTRYP PFNGLBINDVERTEXBUFFERSPROC) (GLuint first, GLsizei count, const GLuint *buffers, const GLintptr *offsets, const GLsizei *strides); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBufferStorage (GLenum target, GLsizeiptr size, const void *data, GLbitfield flags); +GLAPI void APIENTRY glClearTexImage (GLuint texture, GLint level, GLenum format, GLenum type, const void *data); +GLAPI void APIENTRY glClearTexSubImage (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *data); +GLAPI void APIENTRY glBindBuffersBase (GLenum target, GLuint first, GLsizei count, const GLuint *buffers); +GLAPI void APIENTRY glBindBuffersRange (GLenum target, GLuint first, GLsizei count, const GLuint *buffers, const GLintptr *offsets, const GLsizeiptr *sizes); +GLAPI void APIENTRY glBindTextures (GLuint first, GLsizei count, const GLuint *textures); +GLAPI void APIENTRY glBindSamplers (GLuint first, GLsizei count, const GLuint *samplers); +GLAPI void APIENTRY glBindImageTextures (GLuint first, GLsizei count, const GLuint *textures); +GLAPI void APIENTRY glBindVertexBuffers (GLuint first, GLsizei count, const GLuint *buffers, const GLintptr *offsets, const GLsizei *strides); +#endif +#endif /* GL_VERSION_4_4 */ + +#ifndef GL_VERSION_4_5 +#define GL_VERSION_4_5 1 +#define GL_CONTEXT_LOST 0x0507 +#define GL_NEGATIVE_ONE_TO_ONE 0x935E +#define GL_ZERO_TO_ONE 0x935F +#define GL_CLIP_ORIGIN 0x935C +#define GL_CLIP_DEPTH_MODE 0x935D +#define GL_QUERY_WAIT_INVERTED 0x8E17 +#define GL_QUERY_NO_WAIT_INVERTED 0x8E18 +#define GL_QUERY_BY_REGION_WAIT_INVERTED 0x8E19 +#define GL_QUERY_BY_REGION_NO_WAIT_INVERTED 0x8E1A +#define GL_MAX_CULL_DISTANCES 0x82F9 +#define GL_MAX_COMBINED_CLIP_AND_CULL_DISTANCES 0x82FA +#define GL_TEXTURE_TARGET 0x1006 +#define GL_QUERY_TARGET 0x82EA +#define GL_GUILTY_CONTEXT_RESET 0x8253 +#define GL_INNOCENT_CONTEXT_RESET 0x8254 +#define GL_UNKNOWN_CONTEXT_RESET 0x8255 +#define GL_RESET_NOTIFICATION_STRATEGY 0x8256 +#define GL_LOSE_CONTEXT_ON_RESET 0x8252 +#define GL_NO_RESET_NOTIFICATION 0x8261 +#define GL_CONTEXT_FLAG_ROBUST_ACCESS_BIT 0x00000004 +#define GL_CONTEXT_RELEASE_BEHAVIOR 0x82FB +#define GL_CONTEXT_RELEASE_BEHAVIOR_FLUSH 0x82FC +typedef void (APIENTRYP PFNGLCLIPCONTROLPROC) (GLenum origin, GLenum depth); +typedef void (APIENTRYP PFNGLCREATETRANSFORMFEEDBACKSPROC) (GLsizei n, GLuint *ids); +typedef void (APIENTRYP PFNGLTRANSFORMFEEDBACKBUFFERBASEPROC) (GLuint xfb, GLuint index, GLuint buffer); +typedef void (APIENTRYP PFNGLTRANSFORMFEEDBACKBUFFERRANGEPROC) (GLuint xfb, GLuint index, GLuint buffer, GLintptr offset, GLsizeiptr size); +typedef void (APIENTRYP PFNGLGETTRANSFORMFEEDBACKIVPROC) (GLuint xfb, GLenum pname, GLint *param); +typedef void (APIENTRYP PFNGLGETTRANSFORMFEEDBACKI_VPROC) (GLuint xfb, GLenum pname, GLuint index, GLint *param); +typedef void (APIENTRYP PFNGLGETTRANSFORMFEEDBACKI64_VPROC) (GLuint xfb, GLenum pname, GLuint index, GLint64 *param); +typedef void (APIENTRYP PFNGLCREATEBUFFERSPROC) (GLsizei n, GLuint *buffers); +typedef void (APIENTRYP PFNGLNAMEDBUFFERSTORAGEPROC) (GLuint buffer, GLsizeiptr size, const void *data, GLbitfield flags); +typedef void (APIENTRYP PFNGLNAMEDBUFFERDATAPROC) (GLuint buffer, GLsizeiptr size, const void *data, GLenum usage); +typedef void (APIENTRYP PFNGLNAMEDBUFFERSUBDATAPROC) (GLuint buffer, GLintptr offset, GLsizeiptr size, const void *data); +typedef void (APIENTRYP PFNGLCOPYNAMEDBUFFERSUBDATAPROC) (GLuint readBuffer, GLuint writeBuffer, GLintptr readOffset, GLintptr writeOffset, GLsizeiptr size); +typedef void (APIENTRYP PFNGLCLEARNAMEDBUFFERDATAPROC) (GLuint buffer, GLenum internalformat, GLenum format, GLenum type, const void *data); +typedef void (APIENTRYP PFNGLCLEARNAMEDBUFFERSUBDATAPROC) (GLuint buffer, GLenum internalformat, GLintptr offset, GLsizeiptr size, GLenum format, GLenum type, const void *data); +typedef void *(APIENTRYP PFNGLMAPNAMEDBUFFERPROC) (GLuint buffer, GLenum access); +typedef void *(APIENTRYP PFNGLMAPNAMEDBUFFERRANGEPROC) (GLuint buffer, GLintptr offset, GLsizeiptr length, GLbitfield access); +typedef GLboolean (APIENTRYP PFNGLUNMAPNAMEDBUFFERPROC) (GLuint buffer); +typedef void (APIENTRYP PFNGLFLUSHMAPPEDNAMEDBUFFERRANGEPROC) (GLuint buffer, GLintptr offset, GLsizeiptr length); +typedef void (APIENTRYP PFNGLGETNAMEDBUFFERPARAMETERIVPROC) (GLuint buffer, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETNAMEDBUFFERPARAMETERI64VPROC) (GLuint buffer, GLenum pname, GLint64 *params); +typedef void (APIENTRYP PFNGLGETNAMEDBUFFERPOINTERVPROC) (GLuint buffer, GLenum pname, void **params); +typedef void (APIENTRYP PFNGLGETNAMEDBUFFERSUBDATAPROC) (GLuint buffer, GLintptr offset, GLsizeiptr size, void *data); +typedef void (APIENTRYP PFNGLCREATEFRAMEBUFFERSPROC) (GLsizei n, GLuint *framebuffers); +typedef void (APIENTRYP PFNGLNAMEDFRAMEBUFFERRENDERBUFFERPROC) (GLuint framebuffer, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer); +typedef void (APIENTRYP PFNGLNAMEDFRAMEBUFFERPARAMETERIPROC) (GLuint framebuffer, GLenum pname, GLint param); +typedef void (APIENTRYP PFNGLNAMEDFRAMEBUFFERTEXTUREPROC) (GLuint framebuffer, GLenum attachment, GLuint texture, GLint level); +typedef void (APIENTRYP PFNGLNAMEDFRAMEBUFFERTEXTURELAYERPROC) (GLuint framebuffer, GLenum attachment, GLuint texture, GLint level, GLint layer); +typedef void (APIENTRYP PFNGLNAMEDFRAMEBUFFERDRAWBUFFERPROC) (GLuint framebuffer, GLenum buf); +typedef void (APIENTRYP PFNGLNAMEDFRAMEBUFFERDRAWBUFFERSPROC) (GLuint framebuffer, GLsizei n, const GLenum *bufs); +typedef void (APIENTRYP PFNGLNAMEDFRAMEBUFFERREADBUFFERPROC) (GLuint framebuffer, GLenum src); +typedef void (APIENTRYP PFNGLINVALIDATENAMEDFRAMEBUFFERDATAPROC) (GLuint framebuffer, GLsizei numAttachments, const GLenum *attachments); +typedef void (APIENTRYP PFNGLINVALIDATENAMEDFRAMEBUFFERSUBDATAPROC) (GLuint framebuffer, GLsizei numAttachments, const GLenum *attachments, GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLCLEARNAMEDFRAMEBUFFERIVPROC) (GLuint framebuffer, GLenum buffer, GLint drawbuffer, const GLint *value); +typedef void (APIENTRYP PFNGLCLEARNAMEDFRAMEBUFFERUIVPROC) (GLuint framebuffer, GLenum buffer, GLint drawbuffer, const GLuint *value); +typedef void (APIENTRYP PFNGLCLEARNAMEDFRAMEBUFFERFVPROC) (GLuint framebuffer, GLenum buffer, GLint drawbuffer, const GLfloat *value); +typedef void (APIENTRYP PFNGLCLEARNAMEDFRAMEBUFFERFIPROC) (GLuint framebuffer, GLenum buffer, GLint drawbuffer, GLfloat depth, GLint stencil); +typedef void (APIENTRYP PFNGLBLITNAMEDFRAMEBUFFERPROC) (GLuint readFramebuffer, GLuint drawFramebuffer, GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +typedef GLenum (APIENTRYP PFNGLCHECKNAMEDFRAMEBUFFERSTATUSPROC) (GLuint framebuffer, GLenum target); +typedef void (APIENTRYP PFNGLGETNAMEDFRAMEBUFFERPARAMETERIVPROC) (GLuint framebuffer, GLenum pname, GLint *param); +typedef void (APIENTRYP PFNGLGETNAMEDFRAMEBUFFERATTACHMENTPARAMETERIVPROC) (GLuint framebuffer, GLenum attachment, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLCREATERENDERBUFFERSPROC) (GLsizei n, GLuint *renderbuffers); +typedef void (APIENTRYP PFNGLNAMEDRENDERBUFFERSTORAGEPROC) (GLuint renderbuffer, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLNAMEDRENDERBUFFERSTORAGEMULTISAMPLEPROC) (GLuint renderbuffer, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLGETNAMEDRENDERBUFFERPARAMETERIVPROC) (GLuint renderbuffer, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLCREATETEXTURESPROC) (GLenum target, GLsizei n, GLuint *textures); +typedef void (APIENTRYP PFNGLTEXTUREBUFFERPROC) (GLuint texture, GLenum internalformat, GLuint buffer); +typedef void (APIENTRYP PFNGLTEXTUREBUFFERRANGEPROC) (GLuint texture, GLenum internalformat, GLuint buffer, GLintptr offset, GLsizeiptr size); +typedef void (APIENTRYP PFNGLTEXTURESTORAGE1DPROC) (GLuint texture, GLsizei levels, GLenum internalformat, GLsizei width); +typedef void (APIENTRYP PFNGLTEXTURESTORAGE2DPROC) (GLuint texture, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLTEXTURESTORAGE3DPROC) (GLuint texture, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +typedef void (APIENTRYP PFNGLTEXTURESTORAGE2DMULTISAMPLEPROC) (GLuint texture, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLboolean fixedsamplelocations); +typedef void (APIENTRYP PFNGLTEXTURESTORAGE3DMULTISAMPLEPROC) (GLuint texture, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedsamplelocations); +typedef void (APIENTRYP PFNGLTEXTURESUBIMAGE1DPROC) (GLuint texture, GLint level, GLint xoffset, GLsizei width, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLTEXTURESUBIMAGE2DPROC) (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLTEXTURESUBIMAGE3DPROC) (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLCOMPRESSEDTEXTURESUBIMAGE1DPROC) (GLuint texture, GLint level, GLint xoffset, GLsizei width, GLenum format, GLsizei imageSize, const void *data); +typedef void (APIENTRYP PFNGLCOMPRESSEDTEXTURESUBIMAGE2DPROC) (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *data); +typedef void (APIENTRYP PFNGLCOMPRESSEDTEXTURESUBIMAGE3DPROC) (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void *data); +typedef void (APIENTRYP PFNGLCOPYTEXTURESUBIMAGE1DPROC) (GLuint texture, GLint level, GLint xoffset, GLint x, GLint y, GLsizei width); +typedef void (APIENTRYP PFNGLCOPYTEXTURESUBIMAGE2DPROC) (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLCOPYTEXTURESUBIMAGE3DPROC) (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLTEXTUREPARAMETERFPROC) (GLuint texture, GLenum pname, GLfloat param); +typedef void (APIENTRYP PFNGLTEXTUREPARAMETERFVPROC) (GLuint texture, GLenum pname, const GLfloat *param); +typedef void (APIENTRYP PFNGLTEXTUREPARAMETERIPROC) (GLuint texture, GLenum pname, GLint param); +typedef void (APIENTRYP PFNGLTEXTUREPARAMETERIIVPROC) (GLuint texture, GLenum pname, const GLint *params); +typedef void (APIENTRYP PFNGLTEXTUREPARAMETERIUIVPROC) (GLuint texture, GLenum pname, const GLuint *params); +typedef void (APIENTRYP PFNGLTEXTUREPARAMETERIVPROC) (GLuint texture, GLenum pname, const GLint *param); +typedef void (APIENTRYP PFNGLGENERATETEXTUREMIPMAPPROC) (GLuint texture); +typedef void (APIENTRYP PFNGLBINDTEXTUREUNITPROC) (GLuint unit, GLuint texture); +typedef void (APIENTRYP PFNGLGETTEXTUREIMAGEPROC) (GLuint texture, GLint level, GLenum format, GLenum type, GLsizei bufSize, void *pixels); +typedef void (APIENTRYP PFNGLGETCOMPRESSEDTEXTUREIMAGEPROC) (GLuint texture, GLint level, GLsizei bufSize, void *pixels); +typedef void (APIENTRYP PFNGLGETTEXTURELEVELPARAMETERFVPROC) (GLuint texture, GLint level, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETTEXTURELEVELPARAMETERIVPROC) (GLuint texture, GLint level, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETTEXTUREPARAMETERFVPROC) (GLuint texture, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETTEXTUREPARAMETERIIVPROC) (GLuint texture, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETTEXTUREPARAMETERIUIVPROC) (GLuint texture, GLenum pname, GLuint *params); +typedef void (APIENTRYP PFNGLGETTEXTUREPARAMETERIVPROC) (GLuint texture, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLCREATEVERTEXARRAYSPROC) (GLsizei n, GLuint *arrays); +typedef void (APIENTRYP PFNGLDISABLEVERTEXARRAYATTRIBPROC) (GLuint vaobj, GLuint index); +typedef void (APIENTRYP PFNGLENABLEVERTEXARRAYATTRIBPROC) (GLuint vaobj, GLuint index); +typedef void (APIENTRYP PFNGLVERTEXARRAYELEMENTBUFFERPROC) (GLuint vaobj, GLuint buffer); +typedef void (APIENTRYP PFNGLVERTEXARRAYVERTEXBUFFERPROC) (GLuint vaobj, GLuint bindingindex, GLuint buffer, GLintptr offset, GLsizei stride); +typedef void (APIENTRYP PFNGLVERTEXARRAYVERTEXBUFFERSPROC) (GLuint vaobj, GLuint first, GLsizei count, const GLuint *buffers, const GLintptr *offsets, const GLsizei *strides); +typedef void (APIENTRYP PFNGLVERTEXARRAYATTRIBBINDINGPROC) (GLuint vaobj, GLuint attribindex, GLuint bindingindex); +typedef void (APIENTRYP PFNGLVERTEXARRAYATTRIBFORMATPROC) (GLuint vaobj, GLuint attribindex, GLint size, GLenum type, GLboolean normalized, GLuint relativeoffset); +typedef void (APIENTRYP PFNGLVERTEXARRAYATTRIBIFORMATPROC) (GLuint vaobj, GLuint attribindex, GLint size, GLenum type, GLuint relativeoffset); +typedef void (APIENTRYP PFNGLVERTEXARRAYATTRIBLFORMATPROC) (GLuint vaobj, GLuint attribindex, GLint size, GLenum type, GLuint relativeoffset); +typedef void (APIENTRYP PFNGLVERTEXARRAYBINDINGDIVISORPROC) (GLuint vaobj, GLuint bindingindex, GLuint divisor); +typedef void (APIENTRYP PFNGLGETVERTEXARRAYIVPROC) (GLuint vaobj, GLenum pname, GLint *param); +typedef void (APIENTRYP PFNGLGETVERTEXARRAYINDEXEDIVPROC) (GLuint vaobj, GLuint index, GLenum pname, GLint *param); +typedef void (APIENTRYP PFNGLGETVERTEXARRAYINDEXED64IVPROC) (GLuint vaobj, GLuint index, GLenum pname, GLint64 *param); +typedef void (APIENTRYP PFNGLCREATESAMPLERSPROC) (GLsizei n, GLuint *samplers); +typedef void (APIENTRYP PFNGLCREATEPROGRAMPIPELINESPROC) (GLsizei n, GLuint *pipelines); +typedef void (APIENTRYP PFNGLCREATEQUERIESPROC) (GLenum target, GLsizei n, GLuint *ids); +typedef void (APIENTRYP PFNGLGETQUERYBUFFEROBJECTI64VPROC) (GLuint id, GLuint buffer, GLenum pname, GLintptr offset); +typedef void (APIENTRYP PFNGLGETQUERYBUFFEROBJECTIVPROC) (GLuint id, GLuint buffer, GLenum pname, GLintptr offset); +typedef void (APIENTRYP PFNGLGETQUERYBUFFEROBJECTUI64VPROC) (GLuint id, GLuint buffer, GLenum pname, GLintptr offset); +typedef void (APIENTRYP PFNGLGETQUERYBUFFEROBJECTUIVPROC) (GLuint id, GLuint buffer, GLenum pname, GLintptr offset); +typedef void (APIENTRYP PFNGLMEMORYBARRIERBYREGIONPROC) (GLbitfield barriers); +typedef void (APIENTRYP PFNGLGETTEXTURESUBIMAGEPROC) (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, GLsizei bufSize, void *pixels); +typedef void (APIENTRYP PFNGLGETCOMPRESSEDTEXTURESUBIMAGEPROC) (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLsizei bufSize, void *pixels); +typedef GLenum (APIENTRYP PFNGLGETGRAPHICSRESETSTATUSPROC) (void); +typedef void (APIENTRYP PFNGLGETNCOMPRESSEDTEXIMAGEPROC) (GLenum target, GLint lod, GLsizei bufSize, void *pixels); +typedef void (APIENTRYP PFNGLGETNTEXIMAGEPROC) (GLenum target, GLint level, GLenum format, GLenum type, GLsizei bufSize, void *pixels); +typedef void (APIENTRYP PFNGLGETNUNIFORMDVPROC) (GLuint program, GLint location, GLsizei bufSize, GLdouble *params); +typedef void (APIENTRYP PFNGLGETNUNIFORMFVPROC) (GLuint program, GLint location, GLsizei bufSize, GLfloat *params); +typedef void (APIENTRYP PFNGLGETNUNIFORMIVPROC) (GLuint program, GLint location, GLsizei bufSize, GLint *params); +typedef void (APIENTRYP PFNGLGETNUNIFORMUIVPROC) (GLuint program, GLint location, GLsizei bufSize, GLuint *params); +typedef void (APIENTRYP PFNGLREADNPIXELSPROC) (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, void *data); +typedef void (APIENTRYP PFNGLGETNMAPDVPROC) (GLenum target, GLenum query, GLsizei bufSize, GLdouble *v); +typedef void (APIENTRYP PFNGLGETNMAPFVPROC) (GLenum target, GLenum query, GLsizei bufSize, GLfloat *v); +typedef void (APIENTRYP PFNGLGETNMAPIVPROC) (GLenum target, GLenum query, GLsizei bufSize, GLint *v); +typedef void (APIENTRYP PFNGLGETNPIXELMAPFVPROC) (GLenum map, GLsizei bufSize, GLfloat *values); +typedef void (APIENTRYP PFNGLGETNPIXELMAPUIVPROC) (GLenum map, GLsizei bufSize, GLuint *values); +typedef void (APIENTRYP PFNGLGETNPIXELMAPUSVPROC) (GLenum map, GLsizei bufSize, GLushort *values); +typedef void (APIENTRYP PFNGLGETNPOLYGONSTIPPLEPROC) (GLsizei bufSize, GLubyte *pattern); +typedef void (APIENTRYP PFNGLGETNCOLORTABLEPROC) (GLenum target, GLenum format, GLenum type, GLsizei bufSize, void *table); +typedef void (APIENTRYP PFNGLGETNCONVOLUTIONFILTERPROC) (GLenum target, GLenum format, GLenum type, GLsizei bufSize, void *image); +typedef void (APIENTRYP PFNGLGETNSEPARABLEFILTERPROC) (GLenum target, GLenum format, GLenum type, GLsizei rowBufSize, void *row, GLsizei columnBufSize, void *column, void *span); +typedef void (APIENTRYP PFNGLGETNHISTOGRAMPROC) (GLenum target, GLboolean reset, GLenum format, GLenum type, GLsizei bufSize, void *values); +typedef void (APIENTRYP PFNGLGETNMINMAXPROC) (GLenum target, GLboolean reset, GLenum format, GLenum type, GLsizei bufSize, void *values); +typedef void (APIENTRYP PFNGLTEXTUREBARRIERPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glClipControl (GLenum origin, GLenum depth); +GLAPI void APIENTRY glCreateTransformFeedbacks (GLsizei n, GLuint *ids); +GLAPI void APIENTRY glTransformFeedbackBufferBase (GLuint xfb, GLuint index, GLuint buffer); +GLAPI void APIENTRY glTransformFeedbackBufferRange (GLuint xfb, GLuint index, GLuint buffer, GLintptr offset, GLsizeiptr size); +GLAPI void APIENTRY glGetTransformFeedbackiv (GLuint xfb, GLenum pname, GLint *param); +GLAPI void APIENTRY glGetTransformFeedbacki_v (GLuint xfb, GLenum pname, GLuint index, GLint *param); +GLAPI void APIENTRY glGetTransformFeedbacki64_v (GLuint xfb, GLenum pname, GLuint index, GLint64 *param); +GLAPI void APIENTRY glCreateBuffers (GLsizei n, GLuint *buffers); +GLAPI void APIENTRY glNamedBufferStorage (GLuint buffer, GLsizeiptr size, const void *data, GLbitfield flags); +GLAPI void APIENTRY glNamedBufferData (GLuint buffer, GLsizeiptr size, const void *data, GLenum usage); +GLAPI void APIENTRY glNamedBufferSubData (GLuint buffer, GLintptr offset, GLsizeiptr size, const void *data); +GLAPI void APIENTRY glCopyNamedBufferSubData (GLuint readBuffer, GLuint writeBuffer, GLintptr readOffset, GLintptr writeOffset, GLsizeiptr size); +GLAPI void APIENTRY glClearNamedBufferData (GLuint buffer, GLenum internalformat, GLenum format, GLenum type, const void *data); +GLAPI void APIENTRY glClearNamedBufferSubData (GLuint buffer, GLenum internalformat, GLintptr offset, GLsizeiptr size, GLenum format, GLenum type, const void *data); +GLAPI void *APIENTRY glMapNamedBuffer (GLuint buffer, GLenum access); +GLAPI void *APIENTRY glMapNamedBufferRange (GLuint buffer, GLintptr offset, GLsizeiptr length, GLbitfield access); +GLAPI GLboolean APIENTRY glUnmapNamedBuffer (GLuint buffer); +GLAPI void APIENTRY glFlushMappedNamedBufferRange (GLuint buffer, GLintptr offset, GLsizeiptr length); +GLAPI void APIENTRY glGetNamedBufferParameteriv (GLuint buffer, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetNamedBufferParameteri64v (GLuint buffer, GLenum pname, GLint64 *params); +GLAPI void APIENTRY glGetNamedBufferPointerv (GLuint buffer, GLenum pname, void **params); +GLAPI void APIENTRY glGetNamedBufferSubData (GLuint buffer, GLintptr offset, GLsizeiptr size, void *data); +GLAPI void APIENTRY glCreateFramebuffers (GLsizei n, GLuint *framebuffers); +GLAPI void APIENTRY glNamedFramebufferRenderbuffer (GLuint framebuffer, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer); +GLAPI void APIENTRY glNamedFramebufferParameteri (GLuint framebuffer, GLenum pname, GLint param); +GLAPI void APIENTRY glNamedFramebufferTexture (GLuint framebuffer, GLenum attachment, GLuint texture, GLint level); +GLAPI void APIENTRY glNamedFramebufferTextureLayer (GLuint framebuffer, GLenum attachment, GLuint texture, GLint level, GLint layer); +GLAPI void APIENTRY glNamedFramebufferDrawBuffer (GLuint framebuffer, GLenum buf); +GLAPI void APIENTRY glNamedFramebufferDrawBuffers (GLuint framebuffer, GLsizei n, const GLenum *bufs); +GLAPI void APIENTRY glNamedFramebufferReadBuffer (GLuint framebuffer, GLenum src); +GLAPI void APIENTRY glInvalidateNamedFramebufferData (GLuint framebuffer, GLsizei numAttachments, const GLenum *attachments); +GLAPI void APIENTRY glInvalidateNamedFramebufferSubData (GLuint framebuffer, GLsizei numAttachments, const GLenum *attachments, GLint x, GLint y, GLsizei width, GLsizei height); +GLAPI void APIENTRY glClearNamedFramebufferiv (GLuint framebuffer, GLenum buffer, GLint drawbuffer, const GLint *value); +GLAPI void APIENTRY glClearNamedFramebufferuiv (GLuint framebuffer, GLenum buffer, GLint drawbuffer, const GLuint *value); +GLAPI void APIENTRY glClearNamedFramebufferfv (GLuint framebuffer, GLenum buffer, GLint drawbuffer, const GLfloat *value); +GLAPI void APIENTRY glClearNamedFramebufferfi (GLuint framebuffer, GLenum buffer, GLint drawbuffer, GLfloat depth, GLint stencil); +GLAPI void APIENTRY glBlitNamedFramebuffer (GLuint readFramebuffer, GLuint drawFramebuffer, GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +GLAPI GLenum APIENTRY glCheckNamedFramebufferStatus (GLuint framebuffer, GLenum target); +GLAPI void APIENTRY glGetNamedFramebufferParameteriv (GLuint framebuffer, GLenum pname, GLint *param); +GLAPI void APIENTRY glGetNamedFramebufferAttachmentParameteriv (GLuint framebuffer, GLenum attachment, GLenum pname, GLint *params); +GLAPI void APIENTRY glCreateRenderbuffers (GLsizei n, GLuint *renderbuffers); +GLAPI void APIENTRY glNamedRenderbufferStorage (GLuint renderbuffer, GLenum internalformat, GLsizei width, GLsizei height); +GLAPI void APIENTRY glNamedRenderbufferStorageMultisample (GLuint renderbuffer, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +GLAPI void APIENTRY glGetNamedRenderbufferParameteriv (GLuint renderbuffer, GLenum pname, GLint *params); +GLAPI void APIENTRY glCreateTextures (GLenum target, GLsizei n, GLuint *textures); +GLAPI void APIENTRY glTextureBuffer (GLuint texture, GLenum internalformat, GLuint buffer); +GLAPI void APIENTRY glTextureBufferRange (GLuint texture, GLenum internalformat, GLuint buffer, GLintptr offset, GLsizeiptr size); +GLAPI void APIENTRY glTextureStorage1D (GLuint texture, GLsizei levels, GLenum internalformat, GLsizei width); +GLAPI void APIENTRY glTextureStorage2D (GLuint texture, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +GLAPI void APIENTRY glTextureStorage3D (GLuint texture, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +GLAPI void APIENTRY glTextureStorage2DMultisample (GLuint texture, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLboolean fixedsamplelocations); +GLAPI void APIENTRY glTextureStorage3DMultisample (GLuint texture, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedsamplelocations); +GLAPI void APIENTRY glTextureSubImage1D (GLuint texture, GLint level, GLint xoffset, GLsizei width, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glTextureSubImage2D (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glTextureSubImage3D (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glCompressedTextureSubImage1D (GLuint texture, GLint level, GLint xoffset, GLsizei width, GLenum format, GLsizei imageSize, const void *data); +GLAPI void APIENTRY glCompressedTextureSubImage2D (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *data); +GLAPI void APIENTRY glCompressedTextureSubImage3D (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void *data); +GLAPI void APIENTRY glCopyTextureSubImage1D (GLuint texture, GLint level, GLint xoffset, GLint x, GLint y, GLsizei width); +GLAPI void APIENTRY glCopyTextureSubImage2D (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint x, GLint y, GLsizei width, GLsizei height); +GLAPI void APIENTRY glCopyTextureSubImage3D (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height); +GLAPI void APIENTRY glTextureParameterf (GLuint texture, GLenum pname, GLfloat param); +GLAPI void APIENTRY glTextureParameterfv (GLuint texture, GLenum pname, const GLfloat *param); +GLAPI void APIENTRY glTextureParameteri (GLuint texture, GLenum pname, GLint param); +GLAPI void APIENTRY glTextureParameterIiv (GLuint texture, GLenum pname, const GLint *params); +GLAPI void APIENTRY glTextureParameterIuiv (GLuint texture, GLenum pname, const GLuint *params); +GLAPI void APIENTRY glTextureParameteriv (GLuint texture, GLenum pname, const GLint *param); +GLAPI void APIENTRY glGenerateTextureMipmap (GLuint texture); +GLAPI void APIENTRY glBindTextureUnit (GLuint unit, GLuint texture); +GLAPI void APIENTRY glGetTextureImage (GLuint texture, GLint level, GLenum format, GLenum type, GLsizei bufSize, void *pixels); +GLAPI void APIENTRY glGetCompressedTextureImage (GLuint texture, GLint level, GLsizei bufSize, void *pixels); +GLAPI void APIENTRY glGetTextureLevelParameterfv (GLuint texture, GLint level, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetTextureLevelParameteriv (GLuint texture, GLint level, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetTextureParameterfv (GLuint texture, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetTextureParameterIiv (GLuint texture, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetTextureParameterIuiv (GLuint texture, GLenum pname, GLuint *params); +GLAPI void APIENTRY glGetTextureParameteriv (GLuint texture, GLenum pname, GLint *params); +GLAPI void APIENTRY glCreateVertexArrays (GLsizei n, GLuint *arrays); +GLAPI void APIENTRY glDisableVertexArrayAttrib (GLuint vaobj, GLuint index); +GLAPI void APIENTRY glEnableVertexArrayAttrib (GLuint vaobj, GLuint index); +GLAPI void APIENTRY glVertexArrayElementBuffer (GLuint vaobj, GLuint buffer); +GLAPI void APIENTRY glVertexArrayVertexBuffer (GLuint vaobj, GLuint bindingindex, GLuint buffer, GLintptr offset, GLsizei stride); +GLAPI void APIENTRY glVertexArrayVertexBuffers (GLuint vaobj, GLuint first, GLsizei count, const GLuint *buffers, const GLintptr *offsets, const GLsizei *strides); +GLAPI void APIENTRY glVertexArrayAttribBinding (GLuint vaobj, GLuint attribindex, GLuint bindingindex); +GLAPI void APIENTRY glVertexArrayAttribFormat (GLuint vaobj, GLuint attribindex, GLint size, GLenum type, GLboolean normalized, GLuint relativeoffset); +GLAPI void APIENTRY glVertexArrayAttribIFormat (GLuint vaobj, GLuint attribindex, GLint size, GLenum type, GLuint relativeoffset); +GLAPI void APIENTRY glVertexArrayAttribLFormat (GLuint vaobj, GLuint attribindex, GLint size, GLenum type, GLuint relativeoffset); +GLAPI void APIENTRY glVertexArrayBindingDivisor (GLuint vaobj, GLuint bindingindex, GLuint divisor); +GLAPI void APIENTRY glGetVertexArrayiv (GLuint vaobj, GLenum pname, GLint *param); +GLAPI void APIENTRY glGetVertexArrayIndexediv (GLuint vaobj, GLuint index, GLenum pname, GLint *param); +GLAPI void APIENTRY glGetVertexArrayIndexed64iv (GLuint vaobj, GLuint index, GLenum pname, GLint64 *param); +GLAPI void APIENTRY glCreateSamplers (GLsizei n, GLuint *samplers); +GLAPI void APIENTRY glCreateProgramPipelines (GLsizei n, GLuint *pipelines); +GLAPI void APIENTRY glCreateQueries (GLenum target, GLsizei n, GLuint *ids); +GLAPI void APIENTRY glGetQueryBufferObjecti64v (GLuint id, GLuint buffer, GLenum pname, GLintptr offset); +GLAPI void APIENTRY glGetQueryBufferObjectiv (GLuint id, GLuint buffer, GLenum pname, GLintptr offset); +GLAPI void APIENTRY glGetQueryBufferObjectui64v (GLuint id, GLuint buffer, GLenum pname, GLintptr offset); +GLAPI void APIENTRY glGetQueryBufferObjectuiv (GLuint id, GLuint buffer, GLenum pname, GLintptr offset); +GLAPI void APIENTRY glMemoryBarrierByRegion (GLbitfield barriers); +GLAPI void APIENTRY glGetTextureSubImage (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, GLsizei bufSize, void *pixels); +GLAPI void APIENTRY glGetCompressedTextureSubImage (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLsizei bufSize, void *pixels); +GLAPI GLenum APIENTRY glGetGraphicsResetStatus (void); +GLAPI void APIENTRY glGetnCompressedTexImage (GLenum target, GLint lod, GLsizei bufSize, void *pixels); +GLAPI void APIENTRY glGetnTexImage (GLenum target, GLint level, GLenum format, GLenum type, GLsizei bufSize, void *pixels); +GLAPI void APIENTRY glGetnUniformdv (GLuint program, GLint location, GLsizei bufSize, GLdouble *params); +GLAPI void APIENTRY glGetnUniformfv (GLuint program, GLint location, GLsizei bufSize, GLfloat *params); +GLAPI void APIENTRY glGetnUniformiv (GLuint program, GLint location, GLsizei bufSize, GLint *params); +GLAPI void APIENTRY glGetnUniformuiv (GLuint program, GLint location, GLsizei bufSize, GLuint *params); +GLAPI void APIENTRY glReadnPixels (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, void *data); +GLAPI void APIENTRY glGetnMapdv (GLenum target, GLenum query, GLsizei bufSize, GLdouble *v); +GLAPI void APIENTRY glGetnMapfv (GLenum target, GLenum query, GLsizei bufSize, GLfloat *v); +GLAPI void APIENTRY glGetnMapiv (GLenum target, GLenum query, GLsizei bufSize, GLint *v); +GLAPI void APIENTRY glGetnPixelMapfv (GLenum map, GLsizei bufSize, GLfloat *values); +GLAPI void APIENTRY glGetnPixelMapuiv (GLenum map, GLsizei bufSize, GLuint *values); +GLAPI void APIENTRY glGetnPixelMapusv (GLenum map, GLsizei bufSize, GLushort *values); +GLAPI void APIENTRY glGetnPolygonStipple (GLsizei bufSize, GLubyte *pattern); +GLAPI void APIENTRY glGetnColorTable (GLenum target, GLenum format, GLenum type, GLsizei bufSize, void *table); +GLAPI void APIENTRY glGetnConvolutionFilter (GLenum target, GLenum format, GLenum type, GLsizei bufSize, void *image); +GLAPI void APIENTRY glGetnSeparableFilter (GLenum target, GLenum format, GLenum type, GLsizei rowBufSize, void *row, GLsizei columnBufSize, void *column, void *span); +GLAPI void APIENTRY glGetnHistogram (GLenum target, GLboolean reset, GLenum format, GLenum type, GLsizei bufSize, void *values); +GLAPI void APIENTRY glGetnMinmax (GLenum target, GLboolean reset, GLenum format, GLenum type, GLsizei bufSize, void *values); +GLAPI void APIENTRY glTextureBarrier (void); +#endif +#endif /* GL_VERSION_4_5 */ + +#ifndef GL_VERSION_4_6 +#define GL_VERSION_4_6 1 +#define GL_SHADER_BINARY_FORMAT_SPIR_V 0x9551 +#define GL_SPIR_V_BINARY 0x9552 +#define GL_PARAMETER_BUFFER 0x80EE +#define GL_PARAMETER_BUFFER_BINDING 0x80EF +#define GL_CONTEXT_FLAG_NO_ERROR_BIT 0x00000008 +#define GL_VERTICES_SUBMITTED 0x82EE +#define GL_PRIMITIVES_SUBMITTED 0x82EF +#define GL_VERTEX_SHADER_INVOCATIONS 0x82F0 +#define GL_TESS_CONTROL_SHADER_PATCHES 0x82F1 +#define GL_TESS_EVALUATION_SHADER_INVOCATIONS 0x82F2 +#define GL_GEOMETRY_SHADER_PRIMITIVES_EMITTED 0x82F3 +#define GL_FRAGMENT_SHADER_INVOCATIONS 0x82F4 +#define GL_COMPUTE_SHADER_INVOCATIONS 0x82F5 +#define GL_CLIPPING_INPUT_PRIMITIVES 0x82F6 +#define GL_CLIPPING_OUTPUT_PRIMITIVES 0x82F7 +#define GL_POLYGON_OFFSET_CLAMP 0x8E1B +#define GL_SPIR_V_EXTENSIONS 0x9553 +#define GL_NUM_SPIR_V_EXTENSIONS 0x9554 +#define GL_TEXTURE_MAX_ANISOTROPY 0x84FE +#define GL_MAX_TEXTURE_MAX_ANISOTROPY 0x84FF +#define GL_TRANSFORM_FEEDBACK_OVERFLOW 0x82EC +#define GL_TRANSFORM_FEEDBACK_STREAM_OVERFLOW 0x82ED +typedef void (APIENTRYP PFNGLSPECIALIZESHADERPROC) (GLuint shader, const GLchar *pEntryPoint, GLuint numSpecializationConstants, const GLuint *pConstantIndex, const GLuint *pConstantValue); +typedef void (APIENTRYP PFNGLMULTIDRAWARRAYSINDIRECTCOUNTPROC) (GLenum mode, const void *indirect, GLintptr drawcount, GLsizei maxdrawcount, GLsizei stride); +typedef void (APIENTRYP PFNGLMULTIDRAWELEMENTSINDIRECTCOUNTPROC) (GLenum mode, GLenum type, const void *indirect, GLintptr drawcount, GLsizei maxdrawcount, GLsizei stride); +typedef void (APIENTRYP PFNGLPOLYGONOFFSETCLAMPPROC) (GLfloat factor, GLfloat units, GLfloat clamp); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glSpecializeShader (GLuint shader, const GLchar *pEntryPoint, GLuint numSpecializationConstants, const GLuint *pConstantIndex, const GLuint *pConstantValue); +GLAPI void APIENTRY glMultiDrawArraysIndirectCount (GLenum mode, const void *indirect, GLintptr drawcount, GLsizei maxdrawcount, GLsizei stride); +GLAPI void APIENTRY glMultiDrawElementsIndirectCount (GLenum mode, GLenum type, const void *indirect, GLintptr drawcount, GLsizei maxdrawcount, GLsizei stride); +GLAPI void APIENTRY glPolygonOffsetClamp (GLfloat factor, GLfloat units, GLfloat clamp); +#endif +#endif /* GL_VERSION_4_6 */ + +#ifndef GL_ARB_ES2_compatibility +#define GL_ARB_ES2_compatibility 1 +#endif /* GL_ARB_ES2_compatibility */ + +#ifndef GL_ARB_ES3_1_compatibility +#define GL_ARB_ES3_1_compatibility 1 +#endif /* GL_ARB_ES3_1_compatibility */ + +#ifndef GL_ARB_ES3_2_compatibility +#define GL_ARB_ES3_2_compatibility 1 +#define GL_PRIMITIVE_BOUNDING_BOX_ARB 0x92BE +#define GL_MULTISAMPLE_LINE_WIDTH_RANGE_ARB 0x9381 +#define GL_MULTISAMPLE_LINE_WIDTH_GRANULARITY_ARB 0x9382 +typedef void (APIENTRYP PFNGLPRIMITIVEBOUNDINGBOXARBPROC) (GLfloat minX, GLfloat minY, GLfloat minZ, GLfloat minW, GLfloat maxX, GLfloat maxY, GLfloat maxZ, GLfloat maxW); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glPrimitiveBoundingBoxARB (GLfloat minX, GLfloat minY, GLfloat minZ, GLfloat minW, GLfloat maxX, GLfloat maxY, GLfloat maxZ, GLfloat maxW); +#endif +#endif /* GL_ARB_ES3_2_compatibility */ + +#ifndef GL_ARB_ES3_compatibility +#define GL_ARB_ES3_compatibility 1 +#endif /* GL_ARB_ES3_compatibility */ + +#ifndef GL_ARB_arrays_of_arrays +#define GL_ARB_arrays_of_arrays 1 +#endif /* GL_ARB_arrays_of_arrays */ + +#ifndef GL_ARB_base_instance +#define GL_ARB_base_instance 1 +#endif /* GL_ARB_base_instance */ + +#ifndef GL_ARB_bindless_texture +#define GL_ARB_bindless_texture 1 +typedef uint64_t GLuint64EXT; +#define GL_UNSIGNED_INT64_ARB 0x140F +typedef GLuint64 (APIENTRYP PFNGLGETTEXTUREHANDLEARBPROC) (GLuint texture); +typedef GLuint64 (APIENTRYP PFNGLGETTEXTURESAMPLERHANDLEARBPROC) (GLuint texture, GLuint sampler); +typedef void (APIENTRYP PFNGLMAKETEXTUREHANDLERESIDENTARBPROC) (GLuint64 handle); +typedef void (APIENTRYP PFNGLMAKETEXTUREHANDLENONRESIDENTARBPROC) (GLuint64 handle); +typedef GLuint64 (APIENTRYP PFNGLGETIMAGEHANDLEARBPROC) (GLuint texture, GLint level, GLboolean layered, GLint layer, GLenum format); +typedef void (APIENTRYP PFNGLMAKEIMAGEHANDLERESIDENTARBPROC) (GLuint64 handle, GLenum access); +typedef void (APIENTRYP PFNGLMAKEIMAGEHANDLENONRESIDENTARBPROC) (GLuint64 handle); +typedef void (APIENTRYP PFNGLUNIFORMHANDLEUI64ARBPROC) (GLint location, GLuint64 value); +typedef void (APIENTRYP PFNGLUNIFORMHANDLEUI64VARBPROC) (GLint location, GLsizei count, const GLuint64 *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMHANDLEUI64ARBPROC) (GLuint program, GLint location, GLuint64 value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMHANDLEUI64VARBPROC) (GLuint program, GLint location, GLsizei count, const GLuint64 *values); +typedef GLboolean (APIENTRYP PFNGLISTEXTUREHANDLERESIDENTARBPROC) (GLuint64 handle); +typedef GLboolean (APIENTRYP PFNGLISIMAGEHANDLERESIDENTARBPROC) (GLuint64 handle); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL1UI64ARBPROC) (GLuint index, GLuint64EXT x); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL1UI64VARBPROC) (GLuint index, const GLuint64EXT *v); +typedef void (APIENTRYP PFNGLGETVERTEXATTRIBLUI64VARBPROC) (GLuint index, GLenum pname, GLuint64EXT *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI GLuint64 APIENTRY glGetTextureHandleARB (GLuint texture); +GLAPI GLuint64 APIENTRY glGetTextureSamplerHandleARB (GLuint texture, GLuint sampler); +GLAPI void APIENTRY glMakeTextureHandleResidentARB (GLuint64 handle); +GLAPI void APIENTRY glMakeTextureHandleNonResidentARB (GLuint64 handle); +GLAPI GLuint64 APIENTRY glGetImageHandleARB (GLuint texture, GLint level, GLboolean layered, GLint layer, GLenum format); +GLAPI void APIENTRY glMakeImageHandleResidentARB (GLuint64 handle, GLenum access); +GLAPI void APIENTRY glMakeImageHandleNonResidentARB (GLuint64 handle); +GLAPI void APIENTRY glUniformHandleui64ARB (GLint location, GLuint64 value); +GLAPI void APIENTRY glUniformHandleui64vARB (GLint location, GLsizei count, const GLuint64 *value); +GLAPI void APIENTRY glProgramUniformHandleui64ARB (GLuint program, GLint location, GLuint64 value); +GLAPI void APIENTRY glProgramUniformHandleui64vARB (GLuint program, GLint location, GLsizei count, const GLuint64 *values); +GLAPI GLboolean APIENTRY glIsTextureHandleResidentARB (GLuint64 handle); +GLAPI GLboolean APIENTRY glIsImageHandleResidentARB (GLuint64 handle); +GLAPI void APIENTRY glVertexAttribL1ui64ARB (GLuint index, GLuint64EXT x); +GLAPI void APIENTRY glVertexAttribL1ui64vARB (GLuint index, const GLuint64EXT *v); +GLAPI void APIENTRY glGetVertexAttribLui64vARB (GLuint index, GLenum pname, GLuint64EXT *params); +#endif +#endif /* GL_ARB_bindless_texture */ + +#ifndef GL_ARB_blend_func_extended +#define GL_ARB_blend_func_extended 1 +#endif /* GL_ARB_blend_func_extended */ + +#ifndef GL_ARB_buffer_storage +#define GL_ARB_buffer_storage 1 +#endif /* GL_ARB_buffer_storage */ + +#ifndef GL_ARB_cl_event +#define GL_ARB_cl_event 1 +struct _cl_context; +struct _cl_event; +#define GL_SYNC_CL_EVENT_ARB 0x8240 +#define GL_SYNC_CL_EVENT_COMPLETE_ARB 0x8241 +typedef GLsync (APIENTRYP PFNGLCREATESYNCFROMCLEVENTARBPROC) (struct _cl_context *context, struct _cl_event *event, GLbitfield flags); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI GLsync APIENTRY glCreateSyncFromCLeventARB (struct _cl_context *context, struct _cl_event *event, GLbitfield flags); +#endif +#endif /* GL_ARB_cl_event */ + +#ifndef GL_ARB_clear_buffer_object +#define GL_ARB_clear_buffer_object 1 +#endif /* GL_ARB_clear_buffer_object */ + +#ifndef GL_ARB_clear_texture +#define GL_ARB_clear_texture 1 +#endif /* GL_ARB_clear_texture */ + +#ifndef GL_ARB_clip_control +#define GL_ARB_clip_control 1 +#endif /* GL_ARB_clip_control */ + +#ifndef GL_ARB_color_buffer_float +#define GL_ARB_color_buffer_float 1 +#define GL_RGBA_FLOAT_MODE_ARB 0x8820 +#define GL_CLAMP_VERTEX_COLOR_ARB 0x891A +#define GL_CLAMP_FRAGMENT_COLOR_ARB 0x891B +#define GL_CLAMP_READ_COLOR_ARB 0x891C +#define GL_FIXED_ONLY_ARB 0x891D +typedef void (APIENTRYP PFNGLCLAMPCOLORARBPROC) (GLenum target, GLenum clamp); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glClampColorARB (GLenum target, GLenum clamp); +#endif +#endif /* GL_ARB_color_buffer_float */ + +#ifndef GL_ARB_compatibility +#define GL_ARB_compatibility 1 +#endif /* GL_ARB_compatibility */ + +#ifndef GL_ARB_compressed_texture_pixel_storage +#define GL_ARB_compressed_texture_pixel_storage 1 +#endif /* GL_ARB_compressed_texture_pixel_storage */ + +#ifndef GL_ARB_compute_shader +#define GL_ARB_compute_shader 1 +#endif /* GL_ARB_compute_shader */ + +#ifndef GL_ARB_compute_variable_group_size +#define GL_ARB_compute_variable_group_size 1 +#define GL_MAX_COMPUTE_VARIABLE_GROUP_INVOCATIONS_ARB 0x9344 +#define GL_MAX_COMPUTE_FIXED_GROUP_INVOCATIONS_ARB 0x90EB +#define GL_MAX_COMPUTE_VARIABLE_GROUP_SIZE_ARB 0x9345 +#define GL_MAX_COMPUTE_FIXED_GROUP_SIZE_ARB 0x91BF +typedef void (APIENTRYP PFNGLDISPATCHCOMPUTEGROUPSIZEARBPROC) (GLuint num_groups_x, GLuint num_groups_y, GLuint num_groups_z, GLuint group_size_x, GLuint group_size_y, GLuint group_size_z); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glDispatchComputeGroupSizeARB (GLuint num_groups_x, GLuint num_groups_y, GLuint num_groups_z, GLuint group_size_x, GLuint group_size_y, GLuint group_size_z); +#endif +#endif /* GL_ARB_compute_variable_group_size */ + +#ifndef GL_ARB_conditional_render_inverted +#define GL_ARB_conditional_render_inverted 1 +#endif /* GL_ARB_conditional_render_inverted */ + +#ifndef GL_ARB_conservative_depth +#define GL_ARB_conservative_depth 1 +#endif /* GL_ARB_conservative_depth */ + +#ifndef GL_ARB_copy_buffer +#define GL_ARB_copy_buffer 1 +#endif /* GL_ARB_copy_buffer */ + +#ifndef GL_ARB_copy_image +#define GL_ARB_copy_image 1 +#endif /* GL_ARB_copy_image */ + +#ifndef GL_ARB_cull_distance +#define GL_ARB_cull_distance 1 +#endif /* GL_ARB_cull_distance */ + +#ifndef GL_ARB_debug_output +#define GL_ARB_debug_output 1 +typedef void (APIENTRY *GLDEBUGPROCARB)(GLenum source,GLenum type,GLuint id,GLenum severity,GLsizei length,const GLchar *message,const void *userParam); +#define GL_DEBUG_OUTPUT_SYNCHRONOUS_ARB 0x8242 +#define GL_DEBUG_NEXT_LOGGED_MESSAGE_LENGTH_ARB 0x8243 +#define GL_DEBUG_CALLBACK_FUNCTION_ARB 0x8244 +#define GL_DEBUG_CALLBACK_USER_PARAM_ARB 0x8245 +#define GL_DEBUG_SOURCE_API_ARB 0x8246 +#define GL_DEBUG_SOURCE_WINDOW_SYSTEM_ARB 0x8247 +#define GL_DEBUG_SOURCE_SHADER_COMPILER_ARB 0x8248 +#define GL_DEBUG_SOURCE_THIRD_PARTY_ARB 0x8249 +#define GL_DEBUG_SOURCE_APPLICATION_ARB 0x824A +#define GL_DEBUG_SOURCE_OTHER_ARB 0x824B +#define GL_DEBUG_TYPE_ERROR_ARB 0x824C +#define GL_DEBUG_TYPE_DEPRECATED_BEHAVIOR_ARB 0x824D +#define GL_DEBUG_TYPE_UNDEFINED_BEHAVIOR_ARB 0x824E +#define GL_DEBUG_TYPE_PORTABILITY_ARB 0x824F +#define GL_DEBUG_TYPE_PERFORMANCE_ARB 0x8250 +#define GL_DEBUG_TYPE_OTHER_ARB 0x8251 +#define GL_MAX_DEBUG_MESSAGE_LENGTH_ARB 0x9143 +#define GL_MAX_DEBUG_LOGGED_MESSAGES_ARB 0x9144 +#define GL_DEBUG_LOGGED_MESSAGES_ARB 0x9145 +#define GL_DEBUG_SEVERITY_HIGH_ARB 0x9146 +#define GL_DEBUG_SEVERITY_MEDIUM_ARB 0x9147 +#define GL_DEBUG_SEVERITY_LOW_ARB 0x9148 +typedef void (APIENTRYP PFNGLDEBUGMESSAGECONTROLARBPROC) (GLenum source, GLenum type, GLenum severity, GLsizei count, const GLuint *ids, GLboolean enabled); +typedef void (APIENTRYP PFNGLDEBUGMESSAGEINSERTARBPROC) (GLenum source, GLenum type, GLuint id, GLenum severity, GLsizei length, const GLchar *buf); +typedef void (APIENTRYP PFNGLDEBUGMESSAGECALLBACKARBPROC) (GLDEBUGPROCARB callback, const void *userParam); +typedef GLuint (APIENTRYP PFNGLGETDEBUGMESSAGELOGARBPROC) (GLuint count, GLsizei bufSize, GLenum *sources, GLenum *types, GLuint *ids, GLenum *severities, GLsizei *lengths, GLchar *messageLog); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glDebugMessageControlARB (GLenum source, GLenum type, GLenum severity, GLsizei count, const GLuint *ids, GLboolean enabled); +GLAPI void APIENTRY glDebugMessageInsertARB (GLenum source, GLenum type, GLuint id, GLenum severity, GLsizei length, const GLchar *buf); +GLAPI void APIENTRY glDebugMessageCallbackARB (GLDEBUGPROCARB callback, const void *userParam); +GLAPI GLuint APIENTRY glGetDebugMessageLogARB (GLuint count, GLsizei bufSize, GLenum *sources, GLenum *types, GLuint *ids, GLenum *severities, GLsizei *lengths, GLchar *messageLog); +#endif +#endif /* GL_ARB_debug_output */ + +#ifndef GL_ARB_depth_buffer_float +#define GL_ARB_depth_buffer_float 1 +#endif /* GL_ARB_depth_buffer_float */ + +#ifndef GL_ARB_depth_clamp +#define GL_ARB_depth_clamp 1 +#endif /* GL_ARB_depth_clamp */ + +#ifndef GL_ARB_depth_texture +#define GL_ARB_depth_texture 1 +#define GL_DEPTH_COMPONENT16_ARB 0x81A5 +#define GL_DEPTH_COMPONENT24_ARB 0x81A6 +#define GL_DEPTH_COMPONENT32_ARB 0x81A7 +#define GL_TEXTURE_DEPTH_SIZE_ARB 0x884A +#define GL_DEPTH_TEXTURE_MODE_ARB 0x884B +#endif /* GL_ARB_depth_texture */ + +#ifndef GL_ARB_derivative_control +#define GL_ARB_derivative_control 1 +#endif /* GL_ARB_derivative_control */ + +#ifndef GL_ARB_direct_state_access +#define GL_ARB_direct_state_access 1 +#endif /* GL_ARB_direct_state_access */ + +#ifndef GL_ARB_draw_buffers +#define GL_ARB_draw_buffers 1 +#define GL_MAX_DRAW_BUFFERS_ARB 0x8824 +#define GL_DRAW_BUFFER0_ARB 0x8825 +#define GL_DRAW_BUFFER1_ARB 0x8826 +#define GL_DRAW_BUFFER2_ARB 0x8827 +#define GL_DRAW_BUFFER3_ARB 0x8828 +#define GL_DRAW_BUFFER4_ARB 0x8829 +#define GL_DRAW_BUFFER5_ARB 0x882A +#define GL_DRAW_BUFFER6_ARB 0x882B +#define GL_DRAW_BUFFER7_ARB 0x882C +#define GL_DRAW_BUFFER8_ARB 0x882D +#define GL_DRAW_BUFFER9_ARB 0x882E +#define GL_DRAW_BUFFER10_ARB 0x882F +#define GL_DRAW_BUFFER11_ARB 0x8830 +#define GL_DRAW_BUFFER12_ARB 0x8831 +#define GL_DRAW_BUFFER13_ARB 0x8832 +#define GL_DRAW_BUFFER14_ARB 0x8833 +#define GL_DRAW_BUFFER15_ARB 0x8834 +typedef void (APIENTRYP PFNGLDRAWBUFFERSARBPROC) (GLsizei n, const GLenum *bufs); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glDrawBuffersARB (GLsizei n, const GLenum *bufs); +#endif +#endif /* GL_ARB_draw_buffers */ + +#ifndef GL_ARB_draw_buffers_blend +#define GL_ARB_draw_buffers_blend 1 +typedef void (APIENTRYP PFNGLBLENDEQUATIONIARBPROC) (GLuint buf, GLenum mode); +typedef void (APIENTRYP PFNGLBLENDEQUATIONSEPARATEIARBPROC) (GLuint buf, GLenum modeRGB, GLenum modeAlpha); +typedef void (APIENTRYP PFNGLBLENDFUNCIARBPROC) (GLuint buf, GLenum src, GLenum dst); +typedef void (APIENTRYP PFNGLBLENDFUNCSEPARATEIARBPROC) (GLuint buf, GLenum srcRGB, GLenum dstRGB, GLenum srcAlpha, GLenum dstAlpha); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBlendEquationiARB (GLuint buf, GLenum mode); +GLAPI void APIENTRY glBlendEquationSeparateiARB (GLuint buf, GLenum modeRGB, GLenum modeAlpha); +GLAPI void APIENTRY glBlendFunciARB (GLuint buf, GLenum src, GLenum dst); +GLAPI void APIENTRY glBlendFuncSeparateiARB (GLuint buf, GLenum srcRGB, GLenum dstRGB, GLenum srcAlpha, GLenum dstAlpha); +#endif +#endif /* GL_ARB_draw_buffers_blend */ + +#ifndef GL_ARB_draw_elements_base_vertex +#define GL_ARB_draw_elements_base_vertex 1 +#endif /* GL_ARB_draw_elements_base_vertex */ + +#ifndef GL_ARB_draw_indirect +#define GL_ARB_draw_indirect 1 +#endif /* GL_ARB_draw_indirect */ + +#ifndef GL_ARB_draw_instanced +#define GL_ARB_draw_instanced 1 +typedef void (APIENTRYP PFNGLDRAWARRAYSINSTANCEDARBPROC) (GLenum mode, GLint first, GLsizei count, GLsizei primcount); +typedef void (APIENTRYP PFNGLDRAWELEMENTSINSTANCEDARBPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei primcount); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glDrawArraysInstancedARB (GLenum mode, GLint first, GLsizei count, GLsizei primcount); +GLAPI void APIENTRY glDrawElementsInstancedARB (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei primcount); +#endif +#endif /* GL_ARB_draw_instanced */ + +#ifndef GL_ARB_enhanced_layouts +#define GL_ARB_enhanced_layouts 1 +#endif /* GL_ARB_enhanced_layouts */ + +#ifndef GL_ARB_explicit_attrib_location +#define GL_ARB_explicit_attrib_location 1 +#endif /* GL_ARB_explicit_attrib_location */ + +#ifndef GL_ARB_explicit_uniform_location +#define GL_ARB_explicit_uniform_location 1 +#endif /* GL_ARB_explicit_uniform_location */ + +#ifndef GL_ARB_fragment_coord_conventions +#define GL_ARB_fragment_coord_conventions 1 +#endif /* GL_ARB_fragment_coord_conventions */ + +#ifndef GL_ARB_fragment_layer_viewport +#define GL_ARB_fragment_layer_viewport 1 +#endif /* GL_ARB_fragment_layer_viewport */ + +#ifndef GL_ARB_fragment_program +#define GL_ARB_fragment_program 1 +#define GL_FRAGMENT_PROGRAM_ARB 0x8804 +#define GL_PROGRAM_FORMAT_ASCII_ARB 0x8875 +#define GL_PROGRAM_LENGTH_ARB 0x8627 +#define GL_PROGRAM_FORMAT_ARB 0x8876 +#define GL_PROGRAM_BINDING_ARB 0x8677 +#define GL_PROGRAM_INSTRUCTIONS_ARB 0x88A0 +#define GL_MAX_PROGRAM_INSTRUCTIONS_ARB 0x88A1 +#define GL_PROGRAM_NATIVE_INSTRUCTIONS_ARB 0x88A2 +#define GL_MAX_PROGRAM_NATIVE_INSTRUCTIONS_ARB 0x88A3 +#define GL_PROGRAM_TEMPORARIES_ARB 0x88A4 +#define GL_MAX_PROGRAM_TEMPORARIES_ARB 0x88A5 +#define GL_PROGRAM_NATIVE_TEMPORARIES_ARB 0x88A6 +#define GL_MAX_PROGRAM_NATIVE_TEMPORARIES_ARB 0x88A7 +#define GL_PROGRAM_PARAMETERS_ARB 0x88A8 +#define GL_MAX_PROGRAM_PARAMETERS_ARB 0x88A9 +#define GL_PROGRAM_NATIVE_PARAMETERS_ARB 0x88AA +#define GL_MAX_PROGRAM_NATIVE_PARAMETERS_ARB 0x88AB +#define GL_PROGRAM_ATTRIBS_ARB 0x88AC +#define GL_MAX_PROGRAM_ATTRIBS_ARB 0x88AD +#define GL_PROGRAM_NATIVE_ATTRIBS_ARB 0x88AE +#define GL_MAX_PROGRAM_NATIVE_ATTRIBS_ARB 0x88AF +#define GL_MAX_PROGRAM_LOCAL_PARAMETERS_ARB 0x88B4 +#define GL_MAX_PROGRAM_ENV_PARAMETERS_ARB 0x88B5 +#define GL_PROGRAM_UNDER_NATIVE_LIMITS_ARB 0x88B6 +#define GL_PROGRAM_ALU_INSTRUCTIONS_ARB 0x8805 +#define GL_PROGRAM_TEX_INSTRUCTIONS_ARB 0x8806 +#define GL_PROGRAM_TEX_INDIRECTIONS_ARB 0x8807 +#define GL_PROGRAM_NATIVE_ALU_INSTRUCTIONS_ARB 0x8808 +#define GL_PROGRAM_NATIVE_TEX_INSTRUCTIONS_ARB 0x8809 +#define GL_PROGRAM_NATIVE_TEX_INDIRECTIONS_ARB 0x880A +#define GL_MAX_PROGRAM_ALU_INSTRUCTIONS_ARB 0x880B +#define GL_MAX_PROGRAM_TEX_INSTRUCTIONS_ARB 0x880C +#define GL_MAX_PROGRAM_TEX_INDIRECTIONS_ARB 0x880D +#define GL_MAX_PROGRAM_NATIVE_ALU_INSTRUCTIONS_ARB 0x880E +#define GL_MAX_PROGRAM_NATIVE_TEX_INSTRUCTIONS_ARB 0x880F +#define GL_MAX_PROGRAM_NATIVE_TEX_INDIRECTIONS_ARB 0x8810 +#define GL_PROGRAM_STRING_ARB 0x8628 +#define GL_PROGRAM_ERROR_POSITION_ARB 0x864B +#define GL_CURRENT_MATRIX_ARB 0x8641 +#define GL_TRANSPOSE_CURRENT_MATRIX_ARB 0x88B7 +#define GL_CURRENT_MATRIX_STACK_DEPTH_ARB 0x8640 +#define GL_MAX_PROGRAM_MATRICES_ARB 0x862F +#define GL_MAX_PROGRAM_MATRIX_STACK_DEPTH_ARB 0x862E +#define GL_MAX_TEXTURE_COORDS_ARB 0x8871 +#define GL_MAX_TEXTURE_IMAGE_UNITS_ARB 0x8872 +#define GL_PROGRAM_ERROR_STRING_ARB 0x8874 +#define GL_MATRIX0_ARB 0x88C0 +#define GL_MATRIX1_ARB 0x88C1 +#define GL_MATRIX2_ARB 0x88C2 +#define GL_MATRIX3_ARB 0x88C3 +#define GL_MATRIX4_ARB 0x88C4 +#define GL_MATRIX5_ARB 0x88C5 +#define GL_MATRIX6_ARB 0x88C6 +#define GL_MATRIX7_ARB 0x88C7 +#define GL_MATRIX8_ARB 0x88C8 +#define GL_MATRIX9_ARB 0x88C9 +#define GL_MATRIX10_ARB 0x88CA +#define GL_MATRIX11_ARB 0x88CB +#define GL_MATRIX12_ARB 0x88CC +#define GL_MATRIX13_ARB 0x88CD +#define GL_MATRIX14_ARB 0x88CE +#define GL_MATRIX15_ARB 0x88CF +#define GL_MATRIX16_ARB 0x88D0 +#define GL_MATRIX17_ARB 0x88D1 +#define GL_MATRIX18_ARB 0x88D2 +#define GL_MATRIX19_ARB 0x88D3 +#define GL_MATRIX20_ARB 0x88D4 +#define GL_MATRIX21_ARB 0x88D5 +#define GL_MATRIX22_ARB 0x88D6 +#define GL_MATRIX23_ARB 0x88D7 +#define GL_MATRIX24_ARB 0x88D8 +#define GL_MATRIX25_ARB 0x88D9 +#define GL_MATRIX26_ARB 0x88DA +#define GL_MATRIX27_ARB 0x88DB +#define GL_MATRIX28_ARB 0x88DC +#define GL_MATRIX29_ARB 0x88DD +#define GL_MATRIX30_ARB 0x88DE +#define GL_MATRIX31_ARB 0x88DF +typedef void (APIENTRYP PFNGLPROGRAMSTRINGARBPROC) (GLenum target, GLenum format, GLsizei len, const void *string); +typedef void (APIENTRYP PFNGLBINDPROGRAMARBPROC) (GLenum target, GLuint program); +typedef void (APIENTRYP PFNGLDELETEPROGRAMSARBPROC) (GLsizei n, const GLuint *programs); +typedef void (APIENTRYP PFNGLGENPROGRAMSARBPROC) (GLsizei n, GLuint *programs); +typedef void (APIENTRYP PFNGLPROGRAMENVPARAMETER4DARBPROC) (GLenum target, GLuint index, GLdouble x, GLdouble y, GLdouble z, GLdouble w); +typedef void (APIENTRYP PFNGLPROGRAMENVPARAMETER4DVARBPROC) (GLenum target, GLuint index, const GLdouble *params); +typedef void (APIENTRYP PFNGLPROGRAMENVPARAMETER4FARBPROC) (GLenum target, GLuint index, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +typedef void (APIENTRYP PFNGLPROGRAMENVPARAMETER4FVARBPROC) (GLenum target, GLuint index, const GLfloat *params); +typedef void (APIENTRYP PFNGLPROGRAMLOCALPARAMETER4DARBPROC) (GLenum target, GLuint index, GLdouble x, GLdouble y, GLdouble z, GLdouble w); +typedef void (APIENTRYP PFNGLPROGRAMLOCALPARAMETER4DVARBPROC) (GLenum target, GLuint index, const GLdouble *params); +typedef void (APIENTRYP PFNGLPROGRAMLOCALPARAMETER4FARBPROC) (GLenum target, GLuint index, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +typedef void (APIENTRYP PFNGLPROGRAMLOCALPARAMETER4FVARBPROC) (GLenum target, GLuint index, const GLfloat *params); +typedef void (APIENTRYP PFNGLGETPROGRAMENVPARAMETERDVARBPROC) (GLenum target, GLuint index, GLdouble *params); +typedef void (APIENTRYP PFNGLGETPROGRAMENVPARAMETERFVARBPROC) (GLenum target, GLuint index, GLfloat *params); +typedef void (APIENTRYP PFNGLGETPROGRAMLOCALPARAMETERDVARBPROC) (GLenum target, GLuint index, GLdouble *params); +typedef void (APIENTRYP PFNGLGETPROGRAMLOCALPARAMETERFVARBPROC) (GLenum target, GLuint index, GLfloat *params); +typedef void (APIENTRYP PFNGLGETPROGRAMIVARBPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETPROGRAMSTRINGARBPROC) (GLenum target, GLenum pname, void *string); +typedef GLboolean (APIENTRYP PFNGLISPROGRAMARBPROC) (GLuint program); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glProgramStringARB (GLenum target, GLenum format, GLsizei len, const void *string); +GLAPI void APIENTRY glBindProgramARB (GLenum target, GLuint program); +GLAPI void APIENTRY glDeleteProgramsARB (GLsizei n, const GLuint *programs); +GLAPI void APIENTRY glGenProgramsARB (GLsizei n, GLuint *programs); +GLAPI void APIENTRY glProgramEnvParameter4dARB (GLenum target, GLuint index, GLdouble x, GLdouble y, GLdouble z, GLdouble w); +GLAPI void APIENTRY glProgramEnvParameter4dvARB (GLenum target, GLuint index, const GLdouble *params); +GLAPI void APIENTRY glProgramEnvParameter4fARB (GLenum target, GLuint index, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +GLAPI void APIENTRY glProgramEnvParameter4fvARB (GLenum target, GLuint index, const GLfloat *params); +GLAPI void APIENTRY glProgramLocalParameter4dARB (GLenum target, GLuint index, GLdouble x, GLdouble y, GLdouble z, GLdouble w); +GLAPI void APIENTRY glProgramLocalParameter4dvARB (GLenum target, GLuint index, const GLdouble *params); +GLAPI void APIENTRY glProgramLocalParameter4fARB (GLenum target, GLuint index, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +GLAPI void APIENTRY glProgramLocalParameter4fvARB (GLenum target, GLuint index, const GLfloat *params); +GLAPI void APIENTRY glGetProgramEnvParameterdvARB (GLenum target, GLuint index, GLdouble *params); +GLAPI void APIENTRY glGetProgramEnvParameterfvARB (GLenum target, GLuint index, GLfloat *params); +GLAPI void APIENTRY glGetProgramLocalParameterdvARB (GLenum target, GLuint index, GLdouble *params); +GLAPI void APIENTRY glGetProgramLocalParameterfvARB (GLenum target, GLuint index, GLfloat *params); +GLAPI void APIENTRY glGetProgramivARB (GLenum target, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetProgramStringARB (GLenum target, GLenum pname, void *string); +GLAPI GLboolean APIENTRY glIsProgramARB (GLuint program); +#endif +#endif /* GL_ARB_fragment_program */ + +#ifndef GL_ARB_fragment_program_shadow +#define GL_ARB_fragment_program_shadow 1 +#endif /* GL_ARB_fragment_program_shadow */ + +#ifndef GL_ARB_fragment_shader +#define GL_ARB_fragment_shader 1 +#define GL_FRAGMENT_SHADER_ARB 0x8B30 +#define GL_MAX_FRAGMENT_UNIFORM_COMPONENTS_ARB 0x8B49 +#define GL_FRAGMENT_SHADER_DERIVATIVE_HINT_ARB 0x8B8B +#endif /* GL_ARB_fragment_shader */ + +#ifndef GL_ARB_fragment_shader_interlock +#define GL_ARB_fragment_shader_interlock 1 +#endif /* GL_ARB_fragment_shader_interlock */ + +#ifndef GL_ARB_framebuffer_no_attachments +#define GL_ARB_framebuffer_no_attachments 1 +#endif /* GL_ARB_framebuffer_no_attachments */ + +#ifndef GL_ARB_framebuffer_object +#define GL_ARB_framebuffer_object 1 +#endif /* GL_ARB_framebuffer_object */ + +#ifndef GL_ARB_framebuffer_sRGB +#define GL_ARB_framebuffer_sRGB 1 +#endif /* GL_ARB_framebuffer_sRGB */ + +#ifndef GL_ARB_geometry_shader4 +#define GL_ARB_geometry_shader4 1 +#define GL_LINES_ADJACENCY_ARB 0x000A +#define GL_LINE_STRIP_ADJACENCY_ARB 0x000B +#define GL_TRIANGLES_ADJACENCY_ARB 0x000C +#define GL_TRIANGLE_STRIP_ADJACENCY_ARB 0x000D +#define GL_PROGRAM_POINT_SIZE_ARB 0x8642 +#define GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS_ARB 0x8C29 +#define GL_FRAMEBUFFER_ATTACHMENT_LAYERED_ARB 0x8DA7 +#define GL_FRAMEBUFFER_INCOMPLETE_LAYER_TARGETS_ARB 0x8DA8 +#define GL_FRAMEBUFFER_INCOMPLETE_LAYER_COUNT_ARB 0x8DA9 +#define GL_GEOMETRY_SHADER_ARB 0x8DD9 +#define GL_GEOMETRY_VERTICES_OUT_ARB 0x8DDA +#define GL_GEOMETRY_INPUT_TYPE_ARB 0x8DDB +#define GL_GEOMETRY_OUTPUT_TYPE_ARB 0x8DDC +#define GL_MAX_GEOMETRY_VARYING_COMPONENTS_ARB 0x8DDD +#define GL_MAX_VERTEX_VARYING_COMPONENTS_ARB 0x8DDE +#define GL_MAX_GEOMETRY_UNIFORM_COMPONENTS_ARB 0x8DDF +#define GL_MAX_GEOMETRY_OUTPUT_VERTICES_ARB 0x8DE0 +#define GL_MAX_GEOMETRY_TOTAL_OUTPUT_COMPONENTS_ARB 0x8DE1 +typedef void (APIENTRYP PFNGLPROGRAMPARAMETERIARBPROC) (GLuint program, GLenum pname, GLint value); +typedef void (APIENTRYP PFNGLFRAMEBUFFERTEXTUREARBPROC) (GLenum target, GLenum attachment, GLuint texture, GLint level); +typedef void (APIENTRYP PFNGLFRAMEBUFFERTEXTURELAYERARBPROC) (GLenum target, GLenum attachment, GLuint texture, GLint level, GLint layer); +typedef void (APIENTRYP PFNGLFRAMEBUFFERTEXTUREFACEARBPROC) (GLenum target, GLenum attachment, GLuint texture, GLint level, GLenum face); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glProgramParameteriARB (GLuint program, GLenum pname, GLint value); +GLAPI void APIENTRY glFramebufferTextureARB (GLenum target, GLenum attachment, GLuint texture, GLint level); +GLAPI void APIENTRY glFramebufferTextureLayerARB (GLenum target, GLenum attachment, GLuint texture, GLint level, GLint layer); +GLAPI void APIENTRY glFramebufferTextureFaceARB (GLenum target, GLenum attachment, GLuint texture, GLint level, GLenum face); +#endif +#endif /* GL_ARB_geometry_shader4 */ + +#ifndef GL_ARB_get_program_binary +#define GL_ARB_get_program_binary 1 +#endif /* GL_ARB_get_program_binary */ + +#ifndef GL_ARB_get_texture_sub_image +#define GL_ARB_get_texture_sub_image 1 +#endif /* GL_ARB_get_texture_sub_image */ + +#ifndef GL_ARB_gl_spirv +#define GL_ARB_gl_spirv 1 +#define GL_SHADER_BINARY_FORMAT_SPIR_V_ARB 0x9551 +#define GL_SPIR_V_BINARY_ARB 0x9552 +typedef void (APIENTRYP PFNGLSPECIALIZESHADERARBPROC) (GLuint shader, const GLchar *pEntryPoint, GLuint numSpecializationConstants, const GLuint *pConstantIndex, const GLuint *pConstantValue); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glSpecializeShaderARB (GLuint shader, const GLchar *pEntryPoint, GLuint numSpecializationConstants, const GLuint *pConstantIndex, const GLuint *pConstantValue); +#endif +#endif /* GL_ARB_gl_spirv */ + +#ifndef GL_ARB_gpu_shader5 +#define GL_ARB_gpu_shader5 1 +#endif /* GL_ARB_gpu_shader5 */ + +#ifndef GL_ARB_gpu_shader_fp64 +#define GL_ARB_gpu_shader_fp64 1 +#endif /* GL_ARB_gpu_shader_fp64 */ + +#ifndef GL_ARB_gpu_shader_int64 +#define GL_ARB_gpu_shader_int64 1 +#define GL_INT64_ARB 0x140E +#define GL_INT64_VEC2_ARB 0x8FE9 +#define GL_INT64_VEC3_ARB 0x8FEA +#define GL_INT64_VEC4_ARB 0x8FEB +#define GL_UNSIGNED_INT64_VEC2_ARB 0x8FF5 +#define GL_UNSIGNED_INT64_VEC3_ARB 0x8FF6 +#define GL_UNSIGNED_INT64_VEC4_ARB 0x8FF7 +typedef void (APIENTRYP PFNGLUNIFORM1I64ARBPROC) (GLint location, GLint64 x); +typedef void (APIENTRYP PFNGLUNIFORM2I64ARBPROC) (GLint location, GLint64 x, GLint64 y); +typedef void (APIENTRYP PFNGLUNIFORM3I64ARBPROC) (GLint location, GLint64 x, GLint64 y, GLint64 z); +typedef void (APIENTRYP PFNGLUNIFORM4I64ARBPROC) (GLint location, GLint64 x, GLint64 y, GLint64 z, GLint64 w); +typedef void (APIENTRYP PFNGLUNIFORM1I64VARBPROC) (GLint location, GLsizei count, const GLint64 *value); +typedef void (APIENTRYP PFNGLUNIFORM2I64VARBPROC) (GLint location, GLsizei count, const GLint64 *value); +typedef void (APIENTRYP PFNGLUNIFORM3I64VARBPROC) (GLint location, GLsizei count, const GLint64 *value); +typedef void (APIENTRYP PFNGLUNIFORM4I64VARBPROC) (GLint location, GLsizei count, const GLint64 *value); +typedef void (APIENTRYP PFNGLUNIFORM1UI64ARBPROC) (GLint location, GLuint64 x); +typedef void (APIENTRYP PFNGLUNIFORM2UI64ARBPROC) (GLint location, GLuint64 x, GLuint64 y); +typedef void (APIENTRYP PFNGLUNIFORM3UI64ARBPROC) (GLint location, GLuint64 x, GLuint64 y, GLuint64 z); +typedef void (APIENTRYP PFNGLUNIFORM4UI64ARBPROC) (GLint location, GLuint64 x, GLuint64 y, GLuint64 z, GLuint64 w); +typedef void (APIENTRYP PFNGLUNIFORM1UI64VARBPROC) (GLint location, GLsizei count, const GLuint64 *value); +typedef void (APIENTRYP PFNGLUNIFORM2UI64VARBPROC) (GLint location, GLsizei count, const GLuint64 *value); +typedef void (APIENTRYP PFNGLUNIFORM3UI64VARBPROC) (GLint location, GLsizei count, const GLuint64 *value); +typedef void (APIENTRYP PFNGLUNIFORM4UI64VARBPROC) (GLint location, GLsizei count, const GLuint64 *value); +typedef void (APIENTRYP PFNGLGETUNIFORMI64VARBPROC) (GLuint program, GLint location, GLint64 *params); +typedef void (APIENTRYP PFNGLGETUNIFORMUI64VARBPROC) (GLuint program, GLint location, GLuint64 *params); +typedef void (APIENTRYP PFNGLGETNUNIFORMI64VARBPROC) (GLuint program, GLint location, GLsizei bufSize, GLint64 *params); +typedef void (APIENTRYP PFNGLGETNUNIFORMUI64VARBPROC) (GLuint program, GLint location, GLsizei bufSize, GLuint64 *params); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1I64ARBPROC) (GLuint program, GLint location, GLint64 x); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2I64ARBPROC) (GLuint program, GLint location, GLint64 x, GLint64 y); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3I64ARBPROC) (GLuint program, GLint location, GLint64 x, GLint64 y, GLint64 z); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4I64ARBPROC) (GLuint program, GLint location, GLint64 x, GLint64 y, GLint64 z, GLint64 w); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1I64VARBPROC) (GLuint program, GLint location, GLsizei count, const GLint64 *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2I64VARBPROC) (GLuint program, GLint location, GLsizei count, const GLint64 *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3I64VARBPROC) (GLuint program, GLint location, GLsizei count, const GLint64 *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4I64VARBPROC) (GLuint program, GLint location, GLsizei count, const GLint64 *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1UI64ARBPROC) (GLuint program, GLint location, GLuint64 x); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2UI64ARBPROC) (GLuint program, GLint location, GLuint64 x, GLuint64 y); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3UI64ARBPROC) (GLuint program, GLint location, GLuint64 x, GLuint64 y, GLuint64 z); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4UI64ARBPROC) (GLuint program, GLint location, GLuint64 x, GLuint64 y, GLuint64 z, GLuint64 w); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1UI64VARBPROC) (GLuint program, GLint location, GLsizei count, const GLuint64 *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2UI64VARBPROC) (GLuint program, GLint location, GLsizei count, const GLuint64 *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3UI64VARBPROC) (GLuint program, GLint location, GLsizei count, const GLuint64 *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4UI64VARBPROC) (GLuint program, GLint location, GLsizei count, const GLuint64 *value); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glUniform1i64ARB (GLint location, GLint64 x); +GLAPI void APIENTRY glUniform2i64ARB (GLint location, GLint64 x, GLint64 y); +GLAPI void APIENTRY glUniform3i64ARB (GLint location, GLint64 x, GLint64 y, GLint64 z); +GLAPI void APIENTRY glUniform4i64ARB (GLint location, GLint64 x, GLint64 y, GLint64 z, GLint64 w); +GLAPI void APIENTRY glUniform1i64vARB (GLint location, GLsizei count, const GLint64 *value); +GLAPI void APIENTRY glUniform2i64vARB (GLint location, GLsizei count, const GLint64 *value); +GLAPI void APIENTRY glUniform3i64vARB (GLint location, GLsizei count, const GLint64 *value); +GLAPI void APIENTRY glUniform4i64vARB (GLint location, GLsizei count, const GLint64 *value); +GLAPI void APIENTRY glUniform1ui64ARB (GLint location, GLuint64 x); +GLAPI void APIENTRY glUniform2ui64ARB (GLint location, GLuint64 x, GLuint64 y); +GLAPI void APIENTRY glUniform3ui64ARB (GLint location, GLuint64 x, GLuint64 y, GLuint64 z); +GLAPI void APIENTRY glUniform4ui64ARB (GLint location, GLuint64 x, GLuint64 y, GLuint64 z, GLuint64 w); +GLAPI void APIENTRY glUniform1ui64vARB (GLint location, GLsizei count, const GLuint64 *value); +GLAPI void APIENTRY glUniform2ui64vARB (GLint location, GLsizei count, const GLuint64 *value); +GLAPI void APIENTRY glUniform3ui64vARB (GLint location, GLsizei count, const GLuint64 *value); +GLAPI void APIENTRY glUniform4ui64vARB (GLint location, GLsizei count, const GLuint64 *value); +GLAPI void APIENTRY glGetUniformi64vARB (GLuint program, GLint location, GLint64 *params); +GLAPI void APIENTRY glGetUniformui64vARB (GLuint program, GLint location, GLuint64 *params); +GLAPI void APIENTRY glGetnUniformi64vARB (GLuint program, GLint location, GLsizei bufSize, GLint64 *params); +GLAPI void APIENTRY glGetnUniformui64vARB (GLuint program, GLint location, GLsizei bufSize, GLuint64 *params); +GLAPI void APIENTRY glProgramUniform1i64ARB (GLuint program, GLint location, GLint64 x); +GLAPI void APIENTRY glProgramUniform2i64ARB (GLuint program, GLint location, GLint64 x, GLint64 y); +GLAPI void APIENTRY glProgramUniform3i64ARB (GLuint program, GLint location, GLint64 x, GLint64 y, GLint64 z); +GLAPI void APIENTRY glProgramUniform4i64ARB (GLuint program, GLint location, GLint64 x, GLint64 y, GLint64 z, GLint64 w); +GLAPI void APIENTRY glProgramUniform1i64vARB (GLuint program, GLint location, GLsizei count, const GLint64 *value); +GLAPI void APIENTRY glProgramUniform2i64vARB (GLuint program, GLint location, GLsizei count, const GLint64 *value); +GLAPI void APIENTRY glProgramUniform3i64vARB (GLuint program, GLint location, GLsizei count, const GLint64 *value); +GLAPI void APIENTRY glProgramUniform4i64vARB (GLuint program, GLint location, GLsizei count, const GLint64 *value); +GLAPI void APIENTRY glProgramUniform1ui64ARB (GLuint program, GLint location, GLuint64 x); +GLAPI void APIENTRY glProgramUniform2ui64ARB (GLuint program, GLint location, GLuint64 x, GLuint64 y); +GLAPI void APIENTRY glProgramUniform3ui64ARB (GLuint program, GLint location, GLuint64 x, GLuint64 y, GLuint64 z); +GLAPI void APIENTRY glProgramUniform4ui64ARB (GLuint program, GLint location, GLuint64 x, GLuint64 y, GLuint64 z, GLuint64 w); +GLAPI void APIENTRY glProgramUniform1ui64vARB (GLuint program, GLint location, GLsizei count, const GLuint64 *value); +GLAPI void APIENTRY glProgramUniform2ui64vARB (GLuint program, GLint location, GLsizei count, const GLuint64 *value); +GLAPI void APIENTRY glProgramUniform3ui64vARB (GLuint program, GLint location, GLsizei count, const GLuint64 *value); +GLAPI void APIENTRY glProgramUniform4ui64vARB (GLuint program, GLint location, GLsizei count, const GLuint64 *value); +#endif +#endif /* GL_ARB_gpu_shader_int64 */ + +#ifndef GL_ARB_half_float_pixel +#define GL_ARB_half_float_pixel 1 +typedef unsigned short GLhalfARB; +#define GL_HALF_FLOAT_ARB 0x140B +#endif /* GL_ARB_half_float_pixel */ + +#ifndef GL_ARB_half_float_vertex +#define GL_ARB_half_float_vertex 1 +#endif /* GL_ARB_half_float_vertex */ + +#ifndef GL_ARB_imaging +#define GL_ARB_imaging 1 +#define GL_CONVOLUTION_1D 0x8010 +#define GL_CONVOLUTION_2D 0x8011 +#define GL_SEPARABLE_2D 0x8012 +#define GL_CONVOLUTION_BORDER_MODE 0x8013 +#define GL_CONVOLUTION_FILTER_SCALE 0x8014 +#define GL_CONVOLUTION_FILTER_BIAS 0x8015 +#define GL_REDUCE 0x8016 +#define GL_CONVOLUTION_FORMAT 0x8017 +#define GL_CONVOLUTION_WIDTH 0x8018 +#define GL_CONVOLUTION_HEIGHT 0x8019 +#define GL_MAX_CONVOLUTION_WIDTH 0x801A +#define GL_MAX_CONVOLUTION_HEIGHT 0x801B +#define GL_POST_CONVOLUTION_RED_SCALE 0x801C +#define GL_POST_CONVOLUTION_GREEN_SCALE 0x801D +#define GL_POST_CONVOLUTION_BLUE_SCALE 0x801E +#define GL_POST_CONVOLUTION_ALPHA_SCALE 0x801F +#define GL_POST_CONVOLUTION_RED_BIAS 0x8020 +#define GL_POST_CONVOLUTION_GREEN_BIAS 0x8021 +#define GL_POST_CONVOLUTION_BLUE_BIAS 0x8022 +#define GL_POST_CONVOLUTION_ALPHA_BIAS 0x8023 +#define GL_HISTOGRAM 0x8024 +#define GL_PROXY_HISTOGRAM 0x8025 +#define GL_HISTOGRAM_WIDTH 0x8026 +#define GL_HISTOGRAM_FORMAT 0x8027 +#define GL_HISTOGRAM_RED_SIZE 0x8028 +#define GL_HISTOGRAM_GREEN_SIZE 0x8029 +#define GL_HISTOGRAM_BLUE_SIZE 0x802A +#define GL_HISTOGRAM_ALPHA_SIZE 0x802B +#define GL_HISTOGRAM_LUMINANCE_SIZE 0x802C +#define GL_HISTOGRAM_SINK 0x802D +#define GL_MINMAX 0x802E +#define GL_MINMAX_FORMAT 0x802F +#define GL_MINMAX_SINK 0x8030 +#define GL_TABLE_TOO_LARGE 0x8031 +#define GL_COLOR_MATRIX 0x80B1 +#define GL_COLOR_MATRIX_STACK_DEPTH 0x80B2 +#define GL_MAX_COLOR_MATRIX_STACK_DEPTH 0x80B3 +#define GL_POST_COLOR_MATRIX_RED_SCALE 0x80B4 +#define GL_POST_COLOR_MATRIX_GREEN_SCALE 0x80B5 +#define GL_POST_COLOR_MATRIX_BLUE_SCALE 0x80B6 +#define GL_POST_COLOR_MATRIX_ALPHA_SCALE 0x80B7 +#define GL_POST_COLOR_MATRIX_RED_BIAS 0x80B8 +#define GL_POST_COLOR_MATRIX_GREEN_BIAS 0x80B9 +#define GL_POST_COLOR_MATRIX_BLUE_BIAS 0x80BA +#define GL_POST_COLOR_MATRIX_ALPHA_BIAS 0x80BB +#define GL_COLOR_TABLE 0x80D0 +#define GL_POST_CONVOLUTION_COLOR_TABLE 0x80D1 +#define GL_POST_COLOR_MATRIX_COLOR_TABLE 0x80D2 +#define GL_PROXY_COLOR_TABLE 0x80D3 +#define GL_PROXY_POST_CONVOLUTION_COLOR_TABLE 0x80D4 +#define GL_PROXY_POST_COLOR_MATRIX_COLOR_TABLE 0x80D5 +#define GL_COLOR_TABLE_SCALE 0x80D6 +#define GL_COLOR_TABLE_BIAS 0x80D7 +#define GL_COLOR_TABLE_FORMAT 0x80D8 +#define GL_COLOR_TABLE_WIDTH 0x80D9 +#define GL_COLOR_TABLE_RED_SIZE 0x80DA +#define GL_COLOR_TABLE_GREEN_SIZE 0x80DB +#define GL_COLOR_TABLE_BLUE_SIZE 0x80DC +#define GL_COLOR_TABLE_ALPHA_SIZE 0x80DD +#define GL_COLOR_TABLE_LUMINANCE_SIZE 0x80DE +#define GL_COLOR_TABLE_INTENSITY_SIZE 0x80DF +#define GL_CONSTANT_BORDER 0x8151 +#define GL_REPLICATE_BORDER 0x8153 +#define GL_CONVOLUTION_BORDER_COLOR 0x8154 +typedef void (APIENTRYP PFNGLCOLORTABLEPROC) (GLenum target, GLenum internalformat, GLsizei width, GLenum format, GLenum type, const void *table); +typedef void (APIENTRYP PFNGLCOLORTABLEPARAMETERFVPROC) (GLenum target, GLenum pname, const GLfloat *params); +typedef void (APIENTRYP PFNGLCOLORTABLEPARAMETERIVPROC) (GLenum target, GLenum pname, const GLint *params); +typedef void (APIENTRYP PFNGLCOPYCOLORTABLEPROC) (GLenum target, GLenum internalformat, GLint x, GLint y, GLsizei width); +typedef void (APIENTRYP PFNGLGETCOLORTABLEPROC) (GLenum target, GLenum format, GLenum type, void *table); +typedef void (APIENTRYP PFNGLGETCOLORTABLEPARAMETERFVPROC) (GLenum target, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETCOLORTABLEPARAMETERIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLCOLORSUBTABLEPROC) (GLenum target, GLsizei start, GLsizei count, GLenum format, GLenum type, const void *data); +typedef void (APIENTRYP PFNGLCOPYCOLORSUBTABLEPROC) (GLenum target, GLsizei start, GLint x, GLint y, GLsizei width); +typedef void (APIENTRYP PFNGLCONVOLUTIONFILTER1DPROC) (GLenum target, GLenum internalformat, GLsizei width, GLenum format, GLenum type, const void *image); +typedef void (APIENTRYP PFNGLCONVOLUTIONFILTER2DPROC) (GLenum target, GLenum internalformat, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *image); +typedef void (APIENTRYP PFNGLCONVOLUTIONPARAMETERFPROC) (GLenum target, GLenum pname, GLfloat params); +typedef void (APIENTRYP PFNGLCONVOLUTIONPARAMETERFVPROC) (GLenum target, GLenum pname, const GLfloat *params); +typedef void (APIENTRYP PFNGLCONVOLUTIONPARAMETERIPROC) (GLenum target, GLenum pname, GLint params); +typedef void (APIENTRYP PFNGLCONVOLUTIONPARAMETERIVPROC) (GLenum target, GLenum pname, const GLint *params); +typedef void (APIENTRYP PFNGLCOPYCONVOLUTIONFILTER1DPROC) (GLenum target, GLenum internalformat, GLint x, GLint y, GLsizei width); +typedef void (APIENTRYP PFNGLCOPYCONVOLUTIONFILTER2DPROC) (GLenum target, GLenum internalformat, GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLGETCONVOLUTIONFILTERPROC) (GLenum target, GLenum format, GLenum type, void *image); +typedef void (APIENTRYP PFNGLGETCONVOLUTIONPARAMETERFVPROC) (GLenum target, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETCONVOLUTIONPARAMETERIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETSEPARABLEFILTERPROC) (GLenum target, GLenum format, GLenum type, void *row, void *column, void *span); +typedef void (APIENTRYP PFNGLSEPARABLEFILTER2DPROC) (GLenum target, GLenum internalformat, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *row, const void *column); +typedef void (APIENTRYP PFNGLGETHISTOGRAMPROC) (GLenum target, GLboolean reset, GLenum format, GLenum type, void *values); +typedef void (APIENTRYP PFNGLGETHISTOGRAMPARAMETERFVPROC) (GLenum target, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETHISTOGRAMPARAMETERIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETMINMAXPROC) (GLenum target, GLboolean reset, GLenum format, GLenum type, void *values); +typedef void (APIENTRYP PFNGLGETMINMAXPARAMETERFVPROC) (GLenum target, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETMINMAXPARAMETERIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLHISTOGRAMPROC) (GLenum target, GLsizei width, GLenum internalformat, GLboolean sink); +typedef void (APIENTRYP PFNGLMINMAXPROC) (GLenum target, GLenum internalformat, GLboolean sink); +typedef void (APIENTRYP PFNGLRESETHISTOGRAMPROC) (GLenum target); +typedef void (APIENTRYP PFNGLRESETMINMAXPROC) (GLenum target); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glColorTable (GLenum target, GLenum internalformat, GLsizei width, GLenum format, GLenum type, const void *table); +GLAPI void APIENTRY glColorTableParameterfv (GLenum target, GLenum pname, const GLfloat *params); +GLAPI void APIENTRY glColorTableParameteriv (GLenum target, GLenum pname, const GLint *params); +GLAPI void APIENTRY glCopyColorTable (GLenum target, GLenum internalformat, GLint x, GLint y, GLsizei width); +GLAPI void APIENTRY glGetColorTable (GLenum target, GLenum format, GLenum type, void *table); +GLAPI void APIENTRY glGetColorTableParameterfv (GLenum target, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetColorTableParameteriv (GLenum target, GLenum pname, GLint *params); +GLAPI void APIENTRY glColorSubTable (GLenum target, GLsizei start, GLsizei count, GLenum format, GLenum type, const void *data); +GLAPI void APIENTRY glCopyColorSubTable (GLenum target, GLsizei start, GLint x, GLint y, GLsizei width); +GLAPI void APIENTRY glConvolutionFilter1D (GLenum target, GLenum internalformat, GLsizei width, GLenum format, GLenum type, const void *image); +GLAPI void APIENTRY glConvolutionFilter2D (GLenum target, GLenum internalformat, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *image); +GLAPI void APIENTRY glConvolutionParameterf (GLenum target, GLenum pname, GLfloat params); +GLAPI void APIENTRY glConvolutionParameterfv (GLenum target, GLenum pname, const GLfloat *params); +GLAPI void APIENTRY glConvolutionParameteri (GLenum target, GLenum pname, GLint params); +GLAPI void APIENTRY glConvolutionParameteriv (GLenum target, GLenum pname, const GLint *params); +GLAPI void APIENTRY glCopyConvolutionFilter1D (GLenum target, GLenum internalformat, GLint x, GLint y, GLsizei width); +GLAPI void APIENTRY glCopyConvolutionFilter2D (GLenum target, GLenum internalformat, GLint x, GLint y, GLsizei width, GLsizei height); +GLAPI void APIENTRY glGetConvolutionFilter (GLenum target, GLenum format, GLenum type, void *image); +GLAPI void APIENTRY glGetConvolutionParameterfv (GLenum target, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetConvolutionParameteriv (GLenum target, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetSeparableFilter (GLenum target, GLenum format, GLenum type, void *row, void *column, void *span); +GLAPI void APIENTRY glSeparableFilter2D (GLenum target, GLenum internalformat, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *row, const void *column); +GLAPI void APIENTRY glGetHistogram (GLenum target, GLboolean reset, GLenum format, GLenum type, void *values); +GLAPI void APIENTRY glGetHistogramParameterfv (GLenum target, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetHistogramParameteriv (GLenum target, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetMinmax (GLenum target, GLboolean reset, GLenum format, GLenum type, void *values); +GLAPI void APIENTRY glGetMinmaxParameterfv (GLenum target, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetMinmaxParameteriv (GLenum target, GLenum pname, GLint *params); +GLAPI void APIENTRY glHistogram (GLenum target, GLsizei width, GLenum internalformat, GLboolean sink); +GLAPI void APIENTRY glMinmax (GLenum target, GLenum internalformat, GLboolean sink); +GLAPI void APIENTRY glResetHistogram (GLenum target); +GLAPI void APIENTRY glResetMinmax (GLenum target); +#endif +#endif /* GL_ARB_imaging */ + +#ifndef GL_ARB_indirect_parameters +#define GL_ARB_indirect_parameters 1 +#define GL_PARAMETER_BUFFER_ARB 0x80EE +#define GL_PARAMETER_BUFFER_BINDING_ARB 0x80EF +typedef void (APIENTRYP PFNGLMULTIDRAWARRAYSINDIRECTCOUNTARBPROC) (GLenum mode, const void *indirect, GLintptr drawcount, GLsizei maxdrawcount, GLsizei stride); +typedef void (APIENTRYP PFNGLMULTIDRAWELEMENTSINDIRECTCOUNTARBPROC) (GLenum mode, GLenum type, const void *indirect, GLintptr drawcount, GLsizei maxdrawcount, GLsizei stride); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glMultiDrawArraysIndirectCountARB (GLenum mode, const void *indirect, GLintptr drawcount, GLsizei maxdrawcount, GLsizei stride); +GLAPI void APIENTRY glMultiDrawElementsIndirectCountARB (GLenum mode, GLenum type, const void *indirect, GLintptr drawcount, GLsizei maxdrawcount, GLsizei stride); +#endif +#endif /* GL_ARB_indirect_parameters */ + +#ifndef GL_ARB_instanced_arrays +#define GL_ARB_instanced_arrays 1 +#define GL_VERTEX_ATTRIB_ARRAY_DIVISOR_ARB 0x88FE +typedef void (APIENTRYP PFNGLVERTEXATTRIBDIVISORARBPROC) (GLuint index, GLuint divisor); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glVertexAttribDivisorARB (GLuint index, GLuint divisor); +#endif +#endif /* GL_ARB_instanced_arrays */ + +#ifndef GL_ARB_internalformat_query +#define GL_ARB_internalformat_query 1 +#endif /* GL_ARB_internalformat_query */ + +#ifndef GL_ARB_internalformat_query2 +#define GL_ARB_internalformat_query2 1 +#define GL_SRGB_DECODE_ARB 0x8299 +#endif /* GL_ARB_internalformat_query2 */ + +#ifndef GL_ARB_invalidate_subdata +#define GL_ARB_invalidate_subdata 1 +#endif /* GL_ARB_invalidate_subdata */ + +#ifndef GL_ARB_map_buffer_alignment +#define GL_ARB_map_buffer_alignment 1 +#endif /* GL_ARB_map_buffer_alignment */ + +#ifndef GL_ARB_map_buffer_range +#define GL_ARB_map_buffer_range 1 +#endif /* GL_ARB_map_buffer_range */ + +#ifndef GL_ARB_matrix_palette +#define GL_ARB_matrix_palette 1 +#define GL_MATRIX_PALETTE_ARB 0x8840 +#define GL_MAX_MATRIX_PALETTE_STACK_DEPTH_ARB 0x8841 +#define GL_MAX_PALETTE_MATRICES_ARB 0x8842 +#define GL_CURRENT_PALETTE_MATRIX_ARB 0x8843 +#define GL_MATRIX_INDEX_ARRAY_ARB 0x8844 +#define GL_CURRENT_MATRIX_INDEX_ARB 0x8845 +#define GL_MATRIX_INDEX_ARRAY_SIZE_ARB 0x8846 +#define GL_MATRIX_INDEX_ARRAY_TYPE_ARB 0x8847 +#define GL_MATRIX_INDEX_ARRAY_STRIDE_ARB 0x8848 +#define GL_MATRIX_INDEX_ARRAY_POINTER_ARB 0x8849 +typedef void (APIENTRYP PFNGLCURRENTPALETTEMATRIXARBPROC) (GLint index); +typedef void (APIENTRYP PFNGLMATRIXINDEXUBVARBPROC) (GLint size, const GLubyte *indices); +typedef void (APIENTRYP PFNGLMATRIXINDEXUSVARBPROC) (GLint size, const GLushort *indices); +typedef void (APIENTRYP PFNGLMATRIXINDEXUIVARBPROC) (GLint size, const GLuint *indices); +typedef void (APIENTRYP PFNGLMATRIXINDEXPOINTERARBPROC) (GLint size, GLenum type, GLsizei stride, const void *pointer); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glCurrentPaletteMatrixARB (GLint index); +GLAPI void APIENTRY glMatrixIndexubvARB (GLint size, const GLubyte *indices); +GLAPI void APIENTRY glMatrixIndexusvARB (GLint size, const GLushort *indices); +GLAPI void APIENTRY glMatrixIndexuivARB (GLint size, const GLuint *indices); +GLAPI void APIENTRY glMatrixIndexPointerARB (GLint size, GLenum type, GLsizei stride, const void *pointer); +#endif +#endif /* GL_ARB_matrix_palette */ + +#ifndef GL_ARB_multi_bind +#define GL_ARB_multi_bind 1 +#endif /* GL_ARB_multi_bind */ + +#ifndef GL_ARB_multi_draw_indirect +#define GL_ARB_multi_draw_indirect 1 +#endif /* GL_ARB_multi_draw_indirect */ + +#ifndef GL_ARB_multisample +#define GL_ARB_multisample 1 +#define GL_MULTISAMPLE_ARB 0x809D +#define GL_SAMPLE_ALPHA_TO_COVERAGE_ARB 0x809E +#define GL_SAMPLE_ALPHA_TO_ONE_ARB 0x809F +#define GL_SAMPLE_COVERAGE_ARB 0x80A0 +#define GL_SAMPLE_BUFFERS_ARB 0x80A8 +#define GL_SAMPLES_ARB 0x80A9 +#define GL_SAMPLE_COVERAGE_VALUE_ARB 0x80AA +#define GL_SAMPLE_COVERAGE_INVERT_ARB 0x80AB +#define GL_MULTISAMPLE_BIT_ARB 0x20000000 +typedef void (APIENTRYP PFNGLSAMPLECOVERAGEARBPROC) (GLfloat value, GLboolean invert); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glSampleCoverageARB (GLfloat value, GLboolean invert); +#endif +#endif /* GL_ARB_multisample */ + +#ifndef GL_ARB_multitexture +#define GL_ARB_multitexture 1 +#define GL_TEXTURE0_ARB 0x84C0 +#define GL_TEXTURE1_ARB 0x84C1 +#define GL_TEXTURE2_ARB 0x84C2 +#define GL_TEXTURE3_ARB 0x84C3 +#define GL_TEXTURE4_ARB 0x84C4 +#define GL_TEXTURE5_ARB 0x84C5 +#define GL_TEXTURE6_ARB 0x84C6 +#define GL_TEXTURE7_ARB 0x84C7 +#define GL_TEXTURE8_ARB 0x84C8 +#define GL_TEXTURE9_ARB 0x84C9 +#define GL_TEXTURE10_ARB 0x84CA +#define GL_TEXTURE11_ARB 0x84CB +#define GL_TEXTURE12_ARB 0x84CC +#define GL_TEXTURE13_ARB 0x84CD +#define GL_TEXTURE14_ARB 0x84CE +#define GL_TEXTURE15_ARB 0x84CF +#define GL_TEXTURE16_ARB 0x84D0 +#define GL_TEXTURE17_ARB 0x84D1 +#define GL_TEXTURE18_ARB 0x84D2 +#define GL_TEXTURE19_ARB 0x84D3 +#define GL_TEXTURE20_ARB 0x84D4 +#define GL_TEXTURE21_ARB 0x84D5 +#define GL_TEXTURE22_ARB 0x84D6 +#define GL_TEXTURE23_ARB 0x84D7 +#define GL_TEXTURE24_ARB 0x84D8 +#define GL_TEXTURE25_ARB 0x84D9 +#define GL_TEXTURE26_ARB 0x84DA +#define GL_TEXTURE27_ARB 0x84DB +#define GL_TEXTURE28_ARB 0x84DC +#define GL_TEXTURE29_ARB 0x84DD +#define GL_TEXTURE30_ARB 0x84DE +#define GL_TEXTURE31_ARB 0x84DF +#define GL_ACTIVE_TEXTURE_ARB 0x84E0 +#define GL_CLIENT_ACTIVE_TEXTURE_ARB 0x84E1 +#define GL_MAX_TEXTURE_UNITS_ARB 0x84E2 +typedef void (APIENTRYP PFNGLACTIVETEXTUREARBPROC) (GLenum texture); +typedef void (APIENTRYP PFNGLCLIENTACTIVETEXTUREARBPROC) (GLenum texture); +typedef void (APIENTRYP PFNGLMULTITEXCOORD1DARBPROC) (GLenum target, GLdouble s); +typedef void (APIENTRYP PFNGLMULTITEXCOORD1DVARBPROC) (GLenum target, const GLdouble *v); +typedef void (APIENTRYP PFNGLMULTITEXCOORD1FARBPROC) (GLenum target, GLfloat s); +typedef void (APIENTRYP PFNGLMULTITEXCOORD1FVARBPROC) (GLenum target, const GLfloat *v); +typedef void (APIENTRYP PFNGLMULTITEXCOORD1IARBPROC) (GLenum target, GLint s); +typedef void (APIENTRYP PFNGLMULTITEXCOORD1IVARBPROC) (GLenum target, const GLint *v); +typedef void (APIENTRYP PFNGLMULTITEXCOORD1SARBPROC) (GLenum target, GLshort s); +typedef void (APIENTRYP PFNGLMULTITEXCOORD1SVARBPROC) (GLenum target, const GLshort *v); +typedef void (APIENTRYP PFNGLMULTITEXCOORD2DARBPROC) (GLenum target, GLdouble s, GLdouble t); +typedef void (APIENTRYP PFNGLMULTITEXCOORD2DVARBPROC) (GLenum target, const GLdouble *v); +typedef void (APIENTRYP PFNGLMULTITEXCOORD2FARBPROC) (GLenum target, GLfloat s, GLfloat t); +typedef void (APIENTRYP PFNGLMULTITEXCOORD2FVARBPROC) (GLenum target, const GLfloat *v); +typedef void (APIENTRYP PFNGLMULTITEXCOORD2IARBPROC) (GLenum target, GLint s, GLint t); +typedef void (APIENTRYP PFNGLMULTITEXCOORD2IVARBPROC) (GLenum target, const GLint *v); +typedef void (APIENTRYP PFNGLMULTITEXCOORD2SARBPROC) (GLenum target, GLshort s, GLshort t); +typedef void (APIENTRYP PFNGLMULTITEXCOORD2SVARBPROC) (GLenum target, const GLshort *v); +typedef void (APIENTRYP PFNGLMULTITEXCOORD3DARBPROC) (GLenum target, GLdouble s, GLdouble t, GLdouble r); +typedef void (APIENTRYP PFNGLMULTITEXCOORD3DVARBPROC) (GLenum target, const GLdouble *v); +typedef void (APIENTRYP PFNGLMULTITEXCOORD3FARBPROC) (GLenum target, GLfloat s, GLfloat t, GLfloat r); +typedef void (APIENTRYP PFNGLMULTITEXCOORD3FVARBPROC) (GLenum target, const GLfloat *v); +typedef void (APIENTRYP PFNGLMULTITEXCOORD3IARBPROC) (GLenum target, GLint s, GLint t, GLint r); +typedef void (APIENTRYP PFNGLMULTITEXCOORD3IVARBPROC) (GLenum target, const GLint *v); +typedef void (APIENTRYP PFNGLMULTITEXCOORD3SARBPROC) (GLenum target, GLshort s, GLshort t, GLshort r); +typedef void (APIENTRYP PFNGLMULTITEXCOORD3SVARBPROC) (GLenum target, const GLshort *v); +typedef void (APIENTRYP PFNGLMULTITEXCOORD4DARBPROC) (GLenum target, GLdouble s, GLdouble t, GLdouble r, GLdouble q); +typedef void (APIENTRYP PFNGLMULTITEXCOORD4DVARBPROC) (GLenum target, const GLdouble *v); +typedef void (APIENTRYP PFNGLMULTITEXCOORD4FARBPROC) (GLenum target, GLfloat s, GLfloat t, GLfloat r, GLfloat q); +typedef void (APIENTRYP PFNGLMULTITEXCOORD4FVARBPROC) (GLenum target, const GLfloat *v); +typedef void (APIENTRYP PFNGLMULTITEXCOORD4IARBPROC) (GLenum target, GLint s, GLint t, GLint r, GLint q); +typedef void (APIENTRYP PFNGLMULTITEXCOORD4IVARBPROC) (GLenum target, const GLint *v); +typedef void (APIENTRYP PFNGLMULTITEXCOORD4SARBPROC) (GLenum target, GLshort s, GLshort t, GLshort r, GLshort q); +typedef void (APIENTRYP PFNGLMULTITEXCOORD4SVARBPROC) (GLenum target, const GLshort *v); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glActiveTextureARB (GLenum texture); +GLAPI void APIENTRY glClientActiveTextureARB (GLenum texture); +GLAPI void APIENTRY glMultiTexCoord1dARB (GLenum target, GLdouble s); +GLAPI void APIENTRY glMultiTexCoord1dvARB (GLenum target, const GLdouble *v); +GLAPI void APIENTRY glMultiTexCoord1fARB (GLenum target, GLfloat s); +GLAPI void APIENTRY glMultiTexCoord1fvARB (GLenum target, const GLfloat *v); +GLAPI void APIENTRY glMultiTexCoord1iARB (GLenum target, GLint s); +GLAPI void APIENTRY glMultiTexCoord1ivARB (GLenum target, const GLint *v); +GLAPI void APIENTRY glMultiTexCoord1sARB (GLenum target, GLshort s); +GLAPI void APIENTRY glMultiTexCoord1svARB (GLenum target, const GLshort *v); +GLAPI void APIENTRY glMultiTexCoord2dARB (GLenum target, GLdouble s, GLdouble t); +GLAPI void APIENTRY glMultiTexCoord2dvARB (GLenum target, const GLdouble *v); +GLAPI void APIENTRY glMultiTexCoord2fARB (GLenum target, GLfloat s, GLfloat t); +GLAPI void APIENTRY glMultiTexCoord2fvARB (GLenum target, const GLfloat *v); +GLAPI void APIENTRY glMultiTexCoord2iARB (GLenum target, GLint s, GLint t); +GLAPI void APIENTRY glMultiTexCoord2ivARB (GLenum target, const GLint *v); +GLAPI void APIENTRY glMultiTexCoord2sARB (GLenum target, GLshort s, GLshort t); +GLAPI void APIENTRY glMultiTexCoord2svARB (GLenum target, const GLshort *v); +GLAPI void APIENTRY glMultiTexCoord3dARB (GLenum target, GLdouble s, GLdouble t, GLdouble r); +GLAPI void APIENTRY glMultiTexCoord3dvARB (GLenum target, const GLdouble *v); +GLAPI void APIENTRY glMultiTexCoord3fARB (GLenum target, GLfloat s, GLfloat t, GLfloat r); +GLAPI void APIENTRY glMultiTexCoord3fvARB (GLenum target, const GLfloat *v); +GLAPI void APIENTRY glMultiTexCoord3iARB (GLenum target, GLint s, GLint t, GLint r); +GLAPI void APIENTRY glMultiTexCoord3ivARB (GLenum target, const GLint *v); +GLAPI void APIENTRY glMultiTexCoord3sARB (GLenum target, GLshort s, GLshort t, GLshort r); +GLAPI void APIENTRY glMultiTexCoord3svARB (GLenum target, const GLshort *v); +GLAPI void APIENTRY glMultiTexCoord4dARB (GLenum target, GLdouble s, GLdouble t, GLdouble r, GLdouble q); +GLAPI void APIENTRY glMultiTexCoord4dvARB (GLenum target, const GLdouble *v); +GLAPI void APIENTRY glMultiTexCoord4fARB (GLenum target, GLfloat s, GLfloat t, GLfloat r, GLfloat q); +GLAPI void APIENTRY glMultiTexCoord4fvARB (GLenum target, const GLfloat *v); +GLAPI void APIENTRY glMultiTexCoord4iARB (GLenum target, GLint s, GLint t, GLint r, GLint q); +GLAPI void APIENTRY glMultiTexCoord4ivARB (GLenum target, const GLint *v); +GLAPI void APIENTRY glMultiTexCoord4sARB (GLenum target, GLshort s, GLshort t, GLshort r, GLshort q); +GLAPI void APIENTRY glMultiTexCoord4svARB (GLenum target, const GLshort *v); +#endif +#endif /* GL_ARB_multitexture */ + +#ifndef GL_ARB_occlusion_query +#define GL_ARB_occlusion_query 1 +#define GL_QUERY_COUNTER_BITS_ARB 0x8864 +#define GL_CURRENT_QUERY_ARB 0x8865 +#define GL_QUERY_RESULT_ARB 0x8866 +#define GL_QUERY_RESULT_AVAILABLE_ARB 0x8867 +#define GL_SAMPLES_PASSED_ARB 0x8914 +typedef void (APIENTRYP PFNGLGENQUERIESARBPROC) (GLsizei n, GLuint *ids); +typedef void (APIENTRYP PFNGLDELETEQUERIESARBPROC) (GLsizei n, const GLuint *ids); +typedef GLboolean (APIENTRYP PFNGLISQUERYARBPROC) (GLuint id); +typedef void (APIENTRYP PFNGLBEGINQUERYARBPROC) (GLenum target, GLuint id); +typedef void (APIENTRYP PFNGLENDQUERYARBPROC) (GLenum target); +typedef void (APIENTRYP PFNGLGETQUERYIVARBPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETQUERYOBJECTIVARBPROC) (GLuint id, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETQUERYOBJECTUIVARBPROC) (GLuint id, GLenum pname, GLuint *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glGenQueriesARB (GLsizei n, GLuint *ids); +GLAPI void APIENTRY glDeleteQueriesARB (GLsizei n, const GLuint *ids); +GLAPI GLboolean APIENTRY glIsQueryARB (GLuint id); +GLAPI void APIENTRY glBeginQueryARB (GLenum target, GLuint id); +GLAPI void APIENTRY glEndQueryARB (GLenum target); +GLAPI void APIENTRY glGetQueryivARB (GLenum target, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetQueryObjectivARB (GLuint id, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetQueryObjectuivARB (GLuint id, GLenum pname, GLuint *params); +#endif +#endif /* GL_ARB_occlusion_query */ + +#ifndef GL_ARB_occlusion_query2 +#define GL_ARB_occlusion_query2 1 +#endif /* GL_ARB_occlusion_query2 */ + +#ifndef GL_ARB_parallel_shader_compile +#define GL_ARB_parallel_shader_compile 1 +#define GL_MAX_SHADER_COMPILER_THREADS_ARB 0x91B0 +#define GL_COMPLETION_STATUS_ARB 0x91B1 +typedef void (APIENTRYP PFNGLMAXSHADERCOMPILERTHREADSARBPROC) (GLuint count); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glMaxShaderCompilerThreadsARB (GLuint count); +#endif +#endif /* GL_ARB_parallel_shader_compile */ + +#ifndef GL_ARB_pipeline_statistics_query +#define GL_ARB_pipeline_statistics_query 1 +#define GL_VERTICES_SUBMITTED_ARB 0x82EE +#define GL_PRIMITIVES_SUBMITTED_ARB 0x82EF +#define GL_VERTEX_SHADER_INVOCATIONS_ARB 0x82F0 +#define GL_TESS_CONTROL_SHADER_PATCHES_ARB 0x82F1 +#define GL_TESS_EVALUATION_SHADER_INVOCATIONS_ARB 0x82F2 +#define GL_GEOMETRY_SHADER_PRIMITIVES_EMITTED_ARB 0x82F3 +#define GL_FRAGMENT_SHADER_INVOCATIONS_ARB 0x82F4 +#define GL_COMPUTE_SHADER_INVOCATIONS_ARB 0x82F5 +#define GL_CLIPPING_INPUT_PRIMITIVES_ARB 0x82F6 +#define GL_CLIPPING_OUTPUT_PRIMITIVES_ARB 0x82F7 +#endif /* GL_ARB_pipeline_statistics_query */ + +#ifndef GL_ARB_pixel_buffer_object +#define GL_ARB_pixel_buffer_object 1 +#define GL_PIXEL_PACK_BUFFER_ARB 0x88EB +#define GL_PIXEL_UNPACK_BUFFER_ARB 0x88EC +#define GL_PIXEL_PACK_BUFFER_BINDING_ARB 0x88ED +#define GL_PIXEL_UNPACK_BUFFER_BINDING_ARB 0x88EF +#endif /* GL_ARB_pixel_buffer_object */ + +#ifndef GL_ARB_point_parameters +#define GL_ARB_point_parameters 1 +#define GL_POINT_SIZE_MIN_ARB 0x8126 +#define GL_POINT_SIZE_MAX_ARB 0x8127 +#define GL_POINT_FADE_THRESHOLD_SIZE_ARB 0x8128 +#define GL_POINT_DISTANCE_ATTENUATION_ARB 0x8129 +typedef void (APIENTRYP PFNGLPOINTPARAMETERFARBPROC) (GLenum pname, GLfloat param); +typedef void (APIENTRYP PFNGLPOINTPARAMETERFVARBPROC) (GLenum pname, const GLfloat *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glPointParameterfARB (GLenum pname, GLfloat param); +GLAPI void APIENTRY glPointParameterfvARB (GLenum pname, const GLfloat *params); +#endif +#endif /* GL_ARB_point_parameters */ + +#ifndef GL_ARB_point_sprite +#define GL_ARB_point_sprite 1 +#define GL_POINT_SPRITE_ARB 0x8861 +#define GL_COORD_REPLACE_ARB 0x8862 +#endif /* GL_ARB_point_sprite */ + +#ifndef GL_ARB_polygon_offset_clamp +#define GL_ARB_polygon_offset_clamp 1 +#endif /* GL_ARB_polygon_offset_clamp */ + +#ifndef GL_ARB_post_depth_coverage +#define GL_ARB_post_depth_coverage 1 +#endif /* GL_ARB_post_depth_coverage */ + +#ifndef GL_ARB_program_interface_query +#define GL_ARB_program_interface_query 1 +#endif /* GL_ARB_program_interface_query */ + +#ifndef GL_ARB_provoking_vertex +#define GL_ARB_provoking_vertex 1 +#endif /* GL_ARB_provoking_vertex */ + +#ifndef GL_ARB_query_buffer_object +#define GL_ARB_query_buffer_object 1 +#endif /* GL_ARB_query_buffer_object */ + +#ifndef GL_ARB_robust_buffer_access_behavior +#define GL_ARB_robust_buffer_access_behavior 1 +#endif /* GL_ARB_robust_buffer_access_behavior */ + +#ifndef GL_ARB_robustness +#define GL_ARB_robustness 1 +#define GL_CONTEXT_FLAG_ROBUST_ACCESS_BIT_ARB 0x00000004 +#define GL_LOSE_CONTEXT_ON_RESET_ARB 0x8252 +#define GL_GUILTY_CONTEXT_RESET_ARB 0x8253 +#define GL_INNOCENT_CONTEXT_RESET_ARB 0x8254 +#define GL_UNKNOWN_CONTEXT_RESET_ARB 0x8255 +#define GL_RESET_NOTIFICATION_STRATEGY_ARB 0x8256 +#define GL_NO_RESET_NOTIFICATION_ARB 0x8261 +typedef GLenum (APIENTRYP PFNGLGETGRAPHICSRESETSTATUSARBPROC) (void); +typedef void (APIENTRYP PFNGLGETNTEXIMAGEARBPROC) (GLenum target, GLint level, GLenum format, GLenum type, GLsizei bufSize, void *img); +typedef void (APIENTRYP PFNGLREADNPIXELSARBPROC) (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, void *data); +typedef void (APIENTRYP PFNGLGETNCOMPRESSEDTEXIMAGEARBPROC) (GLenum target, GLint lod, GLsizei bufSize, void *img); +typedef void (APIENTRYP PFNGLGETNUNIFORMFVARBPROC) (GLuint program, GLint location, GLsizei bufSize, GLfloat *params); +typedef void (APIENTRYP PFNGLGETNUNIFORMIVARBPROC) (GLuint program, GLint location, GLsizei bufSize, GLint *params); +typedef void (APIENTRYP PFNGLGETNUNIFORMUIVARBPROC) (GLuint program, GLint location, GLsizei bufSize, GLuint *params); +typedef void (APIENTRYP PFNGLGETNUNIFORMDVARBPROC) (GLuint program, GLint location, GLsizei bufSize, GLdouble *params); +typedef void (APIENTRYP PFNGLGETNMAPDVARBPROC) (GLenum target, GLenum query, GLsizei bufSize, GLdouble *v); +typedef void (APIENTRYP PFNGLGETNMAPFVARBPROC) (GLenum target, GLenum query, GLsizei bufSize, GLfloat *v); +typedef void (APIENTRYP PFNGLGETNMAPIVARBPROC) (GLenum target, GLenum query, GLsizei bufSize, GLint *v); +typedef void (APIENTRYP PFNGLGETNPIXELMAPFVARBPROC) (GLenum map, GLsizei bufSize, GLfloat *values); +typedef void (APIENTRYP PFNGLGETNPIXELMAPUIVARBPROC) (GLenum map, GLsizei bufSize, GLuint *values); +typedef void (APIENTRYP PFNGLGETNPIXELMAPUSVARBPROC) (GLenum map, GLsizei bufSize, GLushort *values); +typedef void (APIENTRYP PFNGLGETNPOLYGONSTIPPLEARBPROC) (GLsizei bufSize, GLubyte *pattern); +typedef void (APIENTRYP PFNGLGETNCOLORTABLEARBPROC) (GLenum target, GLenum format, GLenum type, GLsizei bufSize, void *table); +typedef void (APIENTRYP PFNGLGETNCONVOLUTIONFILTERARBPROC) (GLenum target, GLenum format, GLenum type, GLsizei bufSize, void *image); +typedef void (APIENTRYP PFNGLGETNSEPARABLEFILTERARBPROC) (GLenum target, GLenum format, GLenum type, GLsizei rowBufSize, void *row, GLsizei columnBufSize, void *column, void *span); +typedef void (APIENTRYP PFNGLGETNHISTOGRAMARBPROC) (GLenum target, GLboolean reset, GLenum format, GLenum type, GLsizei bufSize, void *values); +typedef void (APIENTRYP PFNGLGETNMINMAXARBPROC) (GLenum target, GLboolean reset, GLenum format, GLenum type, GLsizei bufSize, void *values); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI GLenum APIENTRY glGetGraphicsResetStatusARB (void); +GLAPI void APIENTRY glGetnTexImageARB (GLenum target, GLint level, GLenum format, GLenum type, GLsizei bufSize, void *img); +GLAPI void APIENTRY glReadnPixelsARB (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, void *data); +GLAPI void APIENTRY glGetnCompressedTexImageARB (GLenum target, GLint lod, GLsizei bufSize, void *img); +GLAPI void APIENTRY glGetnUniformfvARB (GLuint program, GLint location, GLsizei bufSize, GLfloat *params); +GLAPI void APIENTRY glGetnUniformivARB (GLuint program, GLint location, GLsizei bufSize, GLint *params); +GLAPI void APIENTRY glGetnUniformuivARB (GLuint program, GLint location, GLsizei bufSize, GLuint *params); +GLAPI void APIENTRY glGetnUniformdvARB (GLuint program, GLint location, GLsizei bufSize, GLdouble *params); +GLAPI void APIENTRY glGetnMapdvARB (GLenum target, GLenum query, GLsizei bufSize, GLdouble *v); +GLAPI void APIENTRY glGetnMapfvARB (GLenum target, GLenum query, GLsizei bufSize, GLfloat *v); +GLAPI void APIENTRY glGetnMapivARB (GLenum target, GLenum query, GLsizei bufSize, GLint *v); +GLAPI void APIENTRY glGetnPixelMapfvARB (GLenum map, GLsizei bufSize, GLfloat *values); +GLAPI void APIENTRY glGetnPixelMapuivARB (GLenum map, GLsizei bufSize, GLuint *values); +GLAPI void APIENTRY glGetnPixelMapusvARB (GLenum map, GLsizei bufSize, GLushort *values); +GLAPI void APIENTRY glGetnPolygonStippleARB (GLsizei bufSize, GLubyte *pattern); +GLAPI void APIENTRY glGetnColorTableARB (GLenum target, GLenum format, GLenum type, GLsizei bufSize, void *table); +GLAPI void APIENTRY glGetnConvolutionFilterARB (GLenum target, GLenum format, GLenum type, GLsizei bufSize, void *image); +GLAPI void APIENTRY glGetnSeparableFilterARB (GLenum target, GLenum format, GLenum type, GLsizei rowBufSize, void *row, GLsizei columnBufSize, void *column, void *span); +GLAPI void APIENTRY glGetnHistogramARB (GLenum target, GLboolean reset, GLenum format, GLenum type, GLsizei bufSize, void *values); +GLAPI void APIENTRY glGetnMinmaxARB (GLenum target, GLboolean reset, GLenum format, GLenum type, GLsizei bufSize, void *values); +#endif +#endif /* GL_ARB_robustness */ + +#ifndef GL_ARB_robustness_isolation +#define GL_ARB_robustness_isolation 1 +#endif /* GL_ARB_robustness_isolation */ + +#ifndef GL_ARB_sample_locations +#define GL_ARB_sample_locations 1 +#define GL_SAMPLE_LOCATION_SUBPIXEL_BITS_ARB 0x933D +#define GL_SAMPLE_LOCATION_PIXEL_GRID_WIDTH_ARB 0x933E +#define GL_SAMPLE_LOCATION_PIXEL_GRID_HEIGHT_ARB 0x933F +#define GL_PROGRAMMABLE_SAMPLE_LOCATION_TABLE_SIZE_ARB 0x9340 +#define GL_SAMPLE_LOCATION_ARB 0x8E50 +#define GL_PROGRAMMABLE_SAMPLE_LOCATION_ARB 0x9341 +#define GL_FRAMEBUFFER_PROGRAMMABLE_SAMPLE_LOCATIONS_ARB 0x9342 +#define GL_FRAMEBUFFER_SAMPLE_LOCATION_PIXEL_GRID_ARB 0x9343 +typedef void (APIENTRYP PFNGLFRAMEBUFFERSAMPLELOCATIONSFVARBPROC) (GLenum target, GLuint start, GLsizei count, const GLfloat *v); +typedef void (APIENTRYP PFNGLNAMEDFRAMEBUFFERSAMPLELOCATIONSFVARBPROC) (GLuint framebuffer, GLuint start, GLsizei count, const GLfloat *v); +typedef void (APIENTRYP PFNGLEVALUATEDEPTHVALUESARBPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glFramebufferSampleLocationsfvARB (GLenum target, GLuint start, GLsizei count, const GLfloat *v); +GLAPI void APIENTRY glNamedFramebufferSampleLocationsfvARB (GLuint framebuffer, GLuint start, GLsizei count, const GLfloat *v); +GLAPI void APIENTRY glEvaluateDepthValuesARB (void); +#endif +#endif /* GL_ARB_sample_locations */ + +#ifndef GL_ARB_sample_shading +#define GL_ARB_sample_shading 1 +#define GL_SAMPLE_SHADING_ARB 0x8C36 +#define GL_MIN_SAMPLE_SHADING_VALUE_ARB 0x8C37 +typedef void (APIENTRYP PFNGLMINSAMPLESHADINGARBPROC) (GLfloat value); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glMinSampleShadingARB (GLfloat value); +#endif +#endif /* GL_ARB_sample_shading */ + +#ifndef GL_ARB_sampler_objects +#define GL_ARB_sampler_objects 1 +#endif /* GL_ARB_sampler_objects */ + +#ifndef GL_ARB_seamless_cube_map +#define GL_ARB_seamless_cube_map 1 +#endif /* GL_ARB_seamless_cube_map */ + +#ifndef GL_ARB_seamless_cubemap_per_texture +#define GL_ARB_seamless_cubemap_per_texture 1 +#endif /* GL_ARB_seamless_cubemap_per_texture */ + +#ifndef GL_ARB_separate_shader_objects +#define GL_ARB_separate_shader_objects 1 +#endif /* GL_ARB_separate_shader_objects */ + +#ifndef GL_ARB_shader_atomic_counter_ops +#define GL_ARB_shader_atomic_counter_ops 1 +#endif /* GL_ARB_shader_atomic_counter_ops */ + +#ifndef GL_ARB_shader_atomic_counters +#define GL_ARB_shader_atomic_counters 1 +#endif /* GL_ARB_shader_atomic_counters */ + +#ifndef GL_ARB_shader_ballot +#define GL_ARB_shader_ballot 1 +#endif /* GL_ARB_shader_ballot */ + +#ifndef GL_ARB_shader_bit_encoding +#define GL_ARB_shader_bit_encoding 1 +#endif /* GL_ARB_shader_bit_encoding */ + +#ifndef GL_ARB_shader_clock +#define GL_ARB_shader_clock 1 +#endif /* GL_ARB_shader_clock */ + +#ifndef GL_ARB_shader_draw_parameters +#define GL_ARB_shader_draw_parameters 1 +#endif /* GL_ARB_shader_draw_parameters */ + +#ifndef GL_ARB_shader_group_vote +#define GL_ARB_shader_group_vote 1 +#endif /* GL_ARB_shader_group_vote */ + +#ifndef GL_ARB_shader_image_load_store +#define GL_ARB_shader_image_load_store 1 +#endif /* GL_ARB_shader_image_load_store */ + +#ifndef GL_ARB_shader_image_size +#define GL_ARB_shader_image_size 1 +#endif /* GL_ARB_shader_image_size */ + +#ifndef GL_ARB_shader_objects +#define GL_ARB_shader_objects 1 +#ifdef __APPLE__ +typedef void *GLhandleARB; +#else +typedef unsigned int GLhandleARB; +#endif +typedef char GLcharARB; +#define GL_PROGRAM_OBJECT_ARB 0x8B40 +#define GL_SHADER_OBJECT_ARB 0x8B48 +#define GL_OBJECT_TYPE_ARB 0x8B4E +#define GL_OBJECT_SUBTYPE_ARB 0x8B4F +#define GL_FLOAT_VEC2_ARB 0x8B50 +#define GL_FLOAT_VEC3_ARB 0x8B51 +#define GL_FLOAT_VEC4_ARB 0x8B52 +#define GL_INT_VEC2_ARB 0x8B53 +#define GL_INT_VEC3_ARB 0x8B54 +#define GL_INT_VEC4_ARB 0x8B55 +#define GL_BOOL_ARB 0x8B56 +#define GL_BOOL_VEC2_ARB 0x8B57 +#define GL_BOOL_VEC3_ARB 0x8B58 +#define GL_BOOL_VEC4_ARB 0x8B59 +#define GL_FLOAT_MAT2_ARB 0x8B5A +#define GL_FLOAT_MAT3_ARB 0x8B5B +#define GL_FLOAT_MAT4_ARB 0x8B5C +#define GL_SAMPLER_1D_ARB 0x8B5D +#define GL_SAMPLER_2D_ARB 0x8B5E +#define GL_SAMPLER_3D_ARB 0x8B5F +#define GL_SAMPLER_CUBE_ARB 0x8B60 +#define GL_SAMPLER_1D_SHADOW_ARB 0x8B61 +#define GL_SAMPLER_2D_SHADOW_ARB 0x8B62 +#define GL_SAMPLER_2D_RECT_ARB 0x8B63 +#define GL_SAMPLER_2D_RECT_SHADOW_ARB 0x8B64 +#define GL_OBJECT_DELETE_STATUS_ARB 0x8B80 +#define GL_OBJECT_COMPILE_STATUS_ARB 0x8B81 +#define GL_OBJECT_LINK_STATUS_ARB 0x8B82 +#define GL_OBJECT_VALIDATE_STATUS_ARB 0x8B83 +#define GL_OBJECT_INFO_LOG_LENGTH_ARB 0x8B84 +#define GL_OBJECT_ATTACHED_OBJECTS_ARB 0x8B85 +#define GL_OBJECT_ACTIVE_UNIFORMS_ARB 0x8B86 +#define GL_OBJECT_ACTIVE_UNIFORM_MAX_LENGTH_ARB 0x8B87 +#define GL_OBJECT_SHADER_SOURCE_LENGTH_ARB 0x8B88 +typedef void (APIENTRYP PFNGLDELETEOBJECTARBPROC) (GLhandleARB obj); +typedef GLhandleARB (APIENTRYP PFNGLGETHANDLEARBPROC) (GLenum pname); +typedef void (APIENTRYP PFNGLDETACHOBJECTARBPROC) (GLhandleARB containerObj, GLhandleARB attachedObj); +typedef GLhandleARB (APIENTRYP PFNGLCREATESHADEROBJECTARBPROC) (GLenum shaderType); +typedef void (APIENTRYP PFNGLSHADERSOURCEARBPROC) (GLhandleARB shaderObj, GLsizei count, const GLcharARB **string, const GLint *length); +typedef void (APIENTRYP PFNGLCOMPILESHADERARBPROC) (GLhandleARB shaderObj); +typedef GLhandleARB (APIENTRYP PFNGLCREATEPROGRAMOBJECTARBPROC) (void); +typedef void (APIENTRYP PFNGLATTACHOBJECTARBPROC) (GLhandleARB containerObj, GLhandleARB obj); +typedef void (APIENTRYP PFNGLLINKPROGRAMARBPROC) (GLhandleARB programObj); +typedef void (APIENTRYP PFNGLUSEPROGRAMOBJECTARBPROC) (GLhandleARB programObj); +typedef void (APIENTRYP PFNGLVALIDATEPROGRAMARBPROC) (GLhandleARB programObj); +typedef void (APIENTRYP PFNGLUNIFORM1FARBPROC) (GLint location, GLfloat v0); +typedef void (APIENTRYP PFNGLUNIFORM2FARBPROC) (GLint location, GLfloat v0, GLfloat v1); +typedef void (APIENTRYP PFNGLUNIFORM3FARBPROC) (GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +typedef void (APIENTRYP PFNGLUNIFORM4FARBPROC) (GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +typedef void (APIENTRYP PFNGLUNIFORM1IARBPROC) (GLint location, GLint v0); +typedef void (APIENTRYP PFNGLUNIFORM2IARBPROC) (GLint location, GLint v0, GLint v1); +typedef void (APIENTRYP PFNGLUNIFORM3IARBPROC) (GLint location, GLint v0, GLint v1, GLint v2); +typedef void (APIENTRYP PFNGLUNIFORM4IARBPROC) (GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +typedef void (APIENTRYP PFNGLUNIFORM1FVARBPROC) (GLint location, GLsizei count, const GLfloat *value); +typedef void (APIENTRYP PFNGLUNIFORM2FVARBPROC) (GLint location, GLsizei count, const GLfloat *value); +typedef void (APIENTRYP PFNGLUNIFORM3FVARBPROC) (GLint location, GLsizei count, const GLfloat *value); +typedef void (APIENTRYP PFNGLUNIFORM4FVARBPROC) (GLint location, GLsizei count, const GLfloat *value); +typedef void (APIENTRYP PFNGLUNIFORM1IVARBPROC) (GLint location, GLsizei count, const GLint *value); +typedef void (APIENTRYP PFNGLUNIFORM2IVARBPROC) (GLint location, GLsizei count, const GLint *value); +typedef void (APIENTRYP PFNGLUNIFORM3IVARBPROC) (GLint location, GLsizei count, const GLint *value); +typedef void (APIENTRYP PFNGLUNIFORM4IVARBPROC) (GLint location, GLsizei count, const GLint *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX2FVARBPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX3FVARBPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLUNIFORMMATRIX4FVARBPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLGETOBJECTPARAMETERFVARBPROC) (GLhandleARB obj, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETOBJECTPARAMETERIVARBPROC) (GLhandleARB obj, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETINFOLOGARBPROC) (GLhandleARB obj, GLsizei maxLength, GLsizei *length, GLcharARB *infoLog); +typedef void (APIENTRYP PFNGLGETATTACHEDOBJECTSARBPROC) (GLhandleARB containerObj, GLsizei maxCount, GLsizei *count, GLhandleARB *obj); +typedef GLint (APIENTRYP PFNGLGETUNIFORMLOCATIONARBPROC) (GLhandleARB programObj, const GLcharARB *name); +typedef void (APIENTRYP PFNGLGETACTIVEUNIFORMARBPROC) (GLhandleARB programObj, GLuint index, GLsizei maxLength, GLsizei *length, GLint *size, GLenum *type, GLcharARB *name); +typedef void (APIENTRYP PFNGLGETUNIFORMFVARBPROC) (GLhandleARB programObj, GLint location, GLfloat *params); +typedef void (APIENTRYP PFNGLGETUNIFORMIVARBPROC) (GLhandleARB programObj, GLint location, GLint *params); +typedef void (APIENTRYP PFNGLGETSHADERSOURCEARBPROC) (GLhandleARB obj, GLsizei maxLength, GLsizei *length, GLcharARB *source); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glDeleteObjectARB (GLhandleARB obj); +GLAPI GLhandleARB APIENTRY glGetHandleARB (GLenum pname); +GLAPI void APIENTRY glDetachObjectARB (GLhandleARB containerObj, GLhandleARB attachedObj); +GLAPI GLhandleARB APIENTRY glCreateShaderObjectARB (GLenum shaderType); +GLAPI void APIENTRY glShaderSourceARB (GLhandleARB shaderObj, GLsizei count, const GLcharARB **string, const GLint *length); +GLAPI void APIENTRY glCompileShaderARB (GLhandleARB shaderObj); +GLAPI GLhandleARB APIENTRY glCreateProgramObjectARB (void); +GLAPI void APIENTRY glAttachObjectARB (GLhandleARB containerObj, GLhandleARB obj); +GLAPI void APIENTRY glLinkProgramARB (GLhandleARB programObj); +GLAPI void APIENTRY glUseProgramObjectARB (GLhandleARB programObj); +GLAPI void APIENTRY glValidateProgramARB (GLhandleARB programObj); +GLAPI void APIENTRY glUniform1fARB (GLint location, GLfloat v0); +GLAPI void APIENTRY glUniform2fARB (GLint location, GLfloat v0, GLfloat v1); +GLAPI void APIENTRY glUniform3fARB (GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +GLAPI void APIENTRY glUniform4fARB (GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +GLAPI void APIENTRY glUniform1iARB (GLint location, GLint v0); +GLAPI void APIENTRY glUniform2iARB (GLint location, GLint v0, GLint v1); +GLAPI void APIENTRY glUniform3iARB (GLint location, GLint v0, GLint v1, GLint v2); +GLAPI void APIENTRY glUniform4iARB (GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +GLAPI void APIENTRY glUniform1fvARB (GLint location, GLsizei count, const GLfloat *value); +GLAPI void APIENTRY glUniform2fvARB (GLint location, GLsizei count, const GLfloat *value); +GLAPI void APIENTRY glUniform3fvARB (GLint location, GLsizei count, const GLfloat *value); +GLAPI void APIENTRY glUniform4fvARB (GLint location, GLsizei count, const GLfloat *value); +GLAPI void APIENTRY glUniform1ivARB (GLint location, GLsizei count, const GLint *value); +GLAPI void APIENTRY glUniform2ivARB (GLint location, GLsizei count, const GLint *value); +GLAPI void APIENTRY glUniform3ivARB (GLint location, GLsizei count, const GLint *value); +GLAPI void APIENTRY glUniform4ivARB (GLint location, GLsizei count, const GLint *value); +GLAPI void APIENTRY glUniformMatrix2fvARB (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glUniformMatrix3fvARB (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glUniformMatrix4fvARB (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glGetObjectParameterfvARB (GLhandleARB obj, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetObjectParameterivARB (GLhandleARB obj, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetInfoLogARB (GLhandleARB obj, GLsizei maxLength, GLsizei *length, GLcharARB *infoLog); +GLAPI void APIENTRY glGetAttachedObjectsARB (GLhandleARB containerObj, GLsizei maxCount, GLsizei *count, GLhandleARB *obj); +GLAPI GLint APIENTRY glGetUniformLocationARB (GLhandleARB programObj, const GLcharARB *name); +GLAPI void APIENTRY glGetActiveUniformARB (GLhandleARB programObj, GLuint index, GLsizei maxLength, GLsizei *length, GLint *size, GLenum *type, GLcharARB *name); +GLAPI void APIENTRY glGetUniformfvARB (GLhandleARB programObj, GLint location, GLfloat *params); +GLAPI void APIENTRY glGetUniformivARB (GLhandleARB programObj, GLint location, GLint *params); +GLAPI void APIENTRY glGetShaderSourceARB (GLhandleARB obj, GLsizei maxLength, GLsizei *length, GLcharARB *source); +#endif +#endif /* GL_ARB_shader_objects */ + +#ifndef GL_ARB_shader_precision +#define GL_ARB_shader_precision 1 +#endif /* GL_ARB_shader_precision */ + +#ifndef GL_ARB_shader_stencil_export +#define GL_ARB_shader_stencil_export 1 +#endif /* GL_ARB_shader_stencil_export */ + +#ifndef GL_ARB_shader_storage_buffer_object +#define GL_ARB_shader_storage_buffer_object 1 +#endif /* GL_ARB_shader_storage_buffer_object */ + +#ifndef GL_ARB_shader_subroutine +#define GL_ARB_shader_subroutine 1 +#endif /* GL_ARB_shader_subroutine */ + +#ifndef GL_ARB_shader_texture_image_samples +#define GL_ARB_shader_texture_image_samples 1 +#endif /* GL_ARB_shader_texture_image_samples */ + +#ifndef GL_ARB_shader_texture_lod +#define GL_ARB_shader_texture_lod 1 +#endif /* GL_ARB_shader_texture_lod */ + +#ifndef GL_ARB_shader_viewport_layer_array +#define GL_ARB_shader_viewport_layer_array 1 +#endif /* GL_ARB_shader_viewport_layer_array */ + +#ifndef GL_ARB_shading_language_100 +#define GL_ARB_shading_language_100 1 +#define GL_SHADING_LANGUAGE_VERSION_ARB 0x8B8C +#endif /* GL_ARB_shading_language_100 */ + +#ifndef GL_ARB_shading_language_420pack +#define GL_ARB_shading_language_420pack 1 +#endif /* GL_ARB_shading_language_420pack */ + +#ifndef GL_ARB_shading_language_include +#define GL_ARB_shading_language_include 1 +#define GL_SHADER_INCLUDE_ARB 0x8DAE +#define GL_NAMED_STRING_LENGTH_ARB 0x8DE9 +#define GL_NAMED_STRING_TYPE_ARB 0x8DEA +typedef void (APIENTRYP PFNGLNAMEDSTRINGARBPROC) (GLenum type, GLint namelen, const GLchar *name, GLint stringlen, const GLchar *string); +typedef void (APIENTRYP PFNGLDELETENAMEDSTRINGARBPROC) (GLint namelen, const GLchar *name); +typedef void (APIENTRYP PFNGLCOMPILESHADERINCLUDEARBPROC) (GLuint shader, GLsizei count, const GLchar *const*path, const GLint *length); +typedef GLboolean (APIENTRYP PFNGLISNAMEDSTRINGARBPROC) (GLint namelen, const GLchar *name); +typedef void (APIENTRYP PFNGLGETNAMEDSTRINGARBPROC) (GLint namelen, const GLchar *name, GLsizei bufSize, GLint *stringlen, GLchar *string); +typedef void (APIENTRYP PFNGLGETNAMEDSTRINGIVARBPROC) (GLint namelen, const GLchar *name, GLenum pname, GLint *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glNamedStringARB (GLenum type, GLint namelen, const GLchar *name, GLint stringlen, const GLchar *string); +GLAPI void APIENTRY glDeleteNamedStringARB (GLint namelen, const GLchar *name); +GLAPI void APIENTRY glCompileShaderIncludeARB (GLuint shader, GLsizei count, const GLchar *const*path, const GLint *length); +GLAPI GLboolean APIENTRY glIsNamedStringARB (GLint namelen, const GLchar *name); +GLAPI void APIENTRY glGetNamedStringARB (GLint namelen, const GLchar *name, GLsizei bufSize, GLint *stringlen, GLchar *string); +GLAPI void APIENTRY glGetNamedStringivARB (GLint namelen, const GLchar *name, GLenum pname, GLint *params); +#endif +#endif /* GL_ARB_shading_language_include */ + +#ifndef GL_ARB_shading_language_packing +#define GL_ARB_shading_language_packing 1 +#endif /* GL_ARB_shading_language_packing */ + +#ifndef GL_ARB_shadow +#define GL_ARB_shadow 1 +#define GL_TEXTURE_COMPARE_MODE_ARB 0x884C +#define GL_TEXTURE_COMPARE_FUNC_ARB 0x884D +#define GL_COMPARE_R_TO_TEXTURE_ARB 0x884E +#endif /* GL_ARB_shadow */ + +#ifndef GL_ARB_shadow_ambient +#define GL_ARB_shadow_ambient 1 +#define GL_TEXTURE_COMPARE_FAIL_VALUE_ARB 0x80BF +#endif /* GL_ARB_shadow_ambient */ + +#ifndef GL_ARB_sparse_buffer +#define GL_ARB_sparse_buffer 1 +#define GL_SPARSE_STORAGE_BIT_ARB 0x0400 +#define GL_SPARSE_BUFFER_PAGE_SIZE_ARB 0x82F8 +typedef void (APIENTRYP PFNGLBUFFERPAGECOMMITMENTARBPROC) (GLenum target, GLintptr offset, GLsizeiptr size, GLboolean commit); +typedef void (APIENTRYP PFNGLNAMEDBUFFERPAGECOMMITMENTEXTPROC) (GLuint buffer, GLintptr offset, GLsizeiptr size, GLboolean commit); +typedef void (APIENTRYP PFNGLNAMEDBUFFERPAGECOMMITMENTARBPROC) (GLuint buffer, GLintptr offset, GLsizeiptr size, GLboolean commit); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBufferPageCommitmentARB (GLenum target, GLintptr offset, GLsizeiptr size, GLboolean commit); +GLAPI void APIENTRY glNamedBufferPageCommitmentEXT (GLuint buffer, GLintptr offset, GLsizeiptr size, GLboolean commit); +GLAPI void APIENTRY glNamedBufferPageCommitmentARB (GLuint buffer, GLintptr offset, GLsizeiptr size, GLboolean commit); +#endif +#endif /* GL_ARB_sparse_buffer */ + +#ifndef GL_ARB_sparse_texture +#define GL_ARB_sparse_texture 1 +#define GL_TEXTURE_SPARSE_ARB 0x91A6 +#define GL_VIRTUAL_PAGE_SIZE_INDEX_ARB 0x91A7 +#define GL_NUM_SPARSE_LEVELS_ARB 0x91AA +#define GL_NUM_VIRTUAL_PAGE_SIZES_ARB 0x91A8 +#define GL_VIRTUAL_PAGE_SIZE_X_ARB 0x9195 +#define GL_VIRTUAL_PAGE_SIZE_Y_ARB 0x9196 +#define GL_VIRTUAL_PAGE_SIZE_Z_ARB 0x9197 +#define GL_MAX_SPARSE_TEXTURE_SIZE_ARB 0x9198 +#define GL_MAX_SPARSE_3D_TEXTURE_SIZE_ARB 0x9199 +#define GL_MAX_SPARSE_ARRAY_TEXTURE_LAYERS_ARB 0x919A +#define GL_SPARSE_TEXTURE_FULL_ARRAY_CUBE_MIPMAPS_ARB 0x91A9 +typedef void (APIENTRYP PFNGLTEXPAGECOMMITMENTARBPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLboolean commit); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glTexPageCommitmentARB (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLboolean commit); +#endif +#endif /* GL_ARB_sparse_texture */ + +#ifndef GL_ARB_sparse_texture2 +#define GL_ARB_sparse_texture2 1 +#endif /* GL_ARB_sparse_texture2 */ + +#ifndef GL_ARB_sparse_texture_clamp +#define GL_ARB_sparse_texture_clamp 1 +#endif /* GL_ARB_sparse_texture_clamp */ + +#ifndef GL_ARB_spirv_extensions +#define GL_ARB_spirv_extensions 1 +#endif /* GL_ARB_spirv_extensions */ + +#ifndef GL_ARB_stencil_texturing +#define GL_ARB_stencil_texturing 1 +#endif /* GL_ARB_stencil_texturing */ + +#ifndef GL_ARB_sync +#define GL_ARB_sync 1 +#endif /* GL_ARB_sync */ + +#ifndef GL_ARB_tessellation_shader +#define GL_ARB_tessellation_shader 1 +#endif /* GL_ARB_tessellation_shader */ + +#ifndef GL_ARB_texture_barrier +#define GL_ARB_texture_barrier 1 +#endif /* GL_ARB_texture_barrier */ + +#ifndef GL_ARB_texture_border_clamp +#define GL_ARB_texture_border_clamp 1 +#define GL_CLAMP_TO_BORDER_ARB 0x812D +#endif /* GL_ARB_texture_border_clamp */ + +#ifndef GL_ARB_texture_buffer_object +#define GL_ARB_texture_buffer_object 1 +#define GL_TEXTURE_BUFFER_ARB 0x8C2A +#define GL_MAX_TEXTURE_BUFFER_SIZE_ARB 0x8C2B +#define GL_TEXTURE_BINDING_BUFFER_ARB 0x8C2C +#define GL_TEXTURE_BUFFER_DATA_STORE_BINDING_ARB 0x8C2D +#define GL_TEXTURE_BUFFER_FORMAT_ARB 0x8C2E +typedef void (APIENTRYP PFNGLTEXBUFFERARBPROC) (GLenum target, GLenum internalformat, GLuint buffer); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glTexBufferARB (GLenum target, GLenum internalformat, GLuint buffer); +#endif +#endif /* GL_ARB_texture_buffer_object */ + +#ifndef GL_ARB_texture_buffer_object_rgb32 +#define GL_ARB_texture_buffer_object_rgb32 1 +#endif /* GL_ARB_texture_buffer_object_rgb32 */ + +#ifndef GL_ARB_texture_buffer_range +#define GL_ARB_texture_buffer_range 1 +#endif /* GL_ARB_texture_buffer_range */ + +#ifndef GL_ARB_texture_compression +#define GL_ARB_texture_compression 1 +#define GL_COMPRESSED_ALPHA_ARB 0x84E9 +#define GL_COMPRESSED_LUMINANCE_ARB 0x84EA +#define GL_COMPRESSED_LUMINANCE_ALPHA_ARB 0x84EB +#define GL_COMPRESSED_INTENSITY_ARB 0x84EC +#define GL_COMPRESSED_RGB_ARB 0x84ED +#define GL_COMPRESSED_RGBA_ARB 0x84EE +#define GL_TEXTURE_COMPRESSION_HINT_ARB 0x84EF +#define GL_TEXTURE_COMPRESSED_IMAGE_SIZE_ARB 0x86A0 +#define GL_TEXTURE_COMPRESSED_ARB 0x86A1 +#define GL_NUM_COMPRESSED_TEXTURE_FORMATS_ARB 0x86A2 +#define GL_COMPRESSED_TEXTURE_FORMATS_ARB 0x86A3 +typedef void (APIENTRYP PFNGLCOMPRESSEDTEXIMAGE3DARBPROC) (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, const void *data); +typedef void (APIENTRYP PFNGLCOMPRESSEDTEXIMAGE2DARBPROC) (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, const void *data); +typedef void (APIENTRYP PFNGLCOMPRESSEDTEXIMAGE1DARBPROC) (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLint border, GLsizei imageSize, const void *data); +typedef void (APIENTRYP PFNGLCOMPRESSEDTEXSUBIMAGE3DARBPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void *data); +typedef void (APIENTRYP PFNGLCOMPRESSEDTEXSUBIMAGE2DARBPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *data); +typedef void (APIENTRYP PFNGLCOMPRESSEDTEXSUBIMAGE1DARBPROC) (GLenum target, GLint level, GLint xoffset, GLsizei width, GLenum format, GLsizei imageSize, const void *data); +typedef void (APIENTRYP PFNGLGETCOMPRESSEDTEXIMAGEARBPROC) (GLenum target, GLint level, void *img); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glCompressedTexImage3DARB (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, const void *data); +GLAPI void APIENTRY glCompressedTexImage2DARB (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, const void *data); +GLAPI void APIENTRY glCompressedTexImage1DARB (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLint border, GLsizei imageSize, const void *data); +GLAPI void APIENTRY glCompressedTexSubImage3DARB (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void *data); +GLAPI void APIENTRY glCompressedTexSubImage2DARB (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *data); +GLAPI void APIENTRY glCompressedTexSubImage1DARB (GLenum target, GLint level, GLint xoffset, GLsizei width, GLenum format, GLsizei imageSize, const void *data); +GLAPI void APIENTRY glGetCompressedTexImageARB (GLenum target, GLint level, void *img); +#endif +#endif /* GL_ARB_texture_compression */ + +#ifndef GL_ARB_texture_compression_bptc +#define GL_ARB_texture_compression_bptc 1 +#define GL_COMPRESSED_RGBA_BPTC_UNORM_ARB 0x8E8C +#define GL_COMPRESSED_SRGB_ALPHA_BPTC_UNORM_ARB 0x8E8D +#define GL_COMPRESSED_RGB_BPTC_SIGNED_FLOAT_ARB 0x8E8E +#define GL_COMPRESSED_RGB_BPTC_UNSIGNED_FLOAT_ARB 0x8E8F +#endif /* GL_ARB_texture_compression_bptc */ + +#ifndef GL_ARB_texture_compression_rgtc +#define GL_ARB_texture_compression_rgtc 1 +#endif /* GL_ARB_texture_compression_rgtc */ + +#ifndef GL_ARB_texture_cube_map +#define GL_ARB_texture_cube_map 1 +#define GL_NORMAL_MAP_ARB 0x8511 +#define GL_REFLECTION_MAP_ARB 0x8512 +#define GL_TEXTURE_CUBE_MAP_ARB 0x8513 +#define GL_TEXTURE_BINDING_CUBE_MAP_ARB 0x8514 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_X_ARB 0x8515 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_X_ARB 0x8516 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_Y_ARB 0x8517 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_Y_ARB 0x8518 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_Z_ARB 0x8519 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_Z_ARB 0x851A +#define GL_PROXY_TEXTURE_CUBE_MAP_ARB 0x851B +#define GL_MAX_CUBE_MAP_TEXTURE_SIZE_ARB 0x851C +#endif /* GL_ARB_texture_cube_map */ + +#ifndef GL_ARB_texture_cube_map_array +#define GL_ARB_texture_cube_map_array 1 +#define GL_TEXTURE_CUBE_MAP_ARRAY_ARB 0x9009 +#define GL_TEXTURE_BINDING_CUBE_MAP_ARRAY_ARB 0x900A +#define GL_PROXY_TEXTURE_CUBE_MAP_ARRAY_ARB 0x900B +#define GL_SAMPLER_CUBE_MAP_ARRAY_ARB 0x900C +#define GL_SAMPLER_CUBE_MAP_ARRAY_SHADOW_ARB 0x900D +#define GL_INT_SAMPLER_CUBE_MAP_ARRAY_ARB 0x900E +#define GL_UNSIGNED_INT_SAMPLER_CUBE_MAP_ARRAY_ARB 0x900F +#endif /* GL_ARB_texture_cube_map_array */ + +#ifndef GL_ARB_texture_env_add +#define GL_ARB_texture_env_add 1 +#endif /* GL_ARB_texture_env_add */ + +#ifndef GL_ARB_texture_env_combine +#define GL_ARB_texture_env_combine 1 +#define GL_COMBINE_ARB 0x8570 +#define GL_COMBINE_RGB_ARB 0x8571 +#define GL_COMBINE_ALPHA_ARB 0x8572 +#define GL_SOURCE0_RGB_ARB 0x8580 +#define GL_SOURCE1_RGB_ARB 0x8581 +#define GL_SOURCE2_RGB_ARB 0x8582 +#define GL_SOURCE0_ALPHA_ARB 0x8588 +#define GL_SOURCE1_ALPHA_ARB 0x8589 +#define GL_SOURCE2_ALPHA_ARB 0x858A +#define GL_OPERAND0_RGB_ARB 0x8590 +#define GL_OPERAND1_RGB_ARB 0x8591 +#define GL_OPERAND2_RGB_ARB 0x8592 +#define GL_OPERAND0_ALPHA_ARB 0x8598 +#define GL_OPERAND1_ALPHA_ARB 0x8599 +#define GL_OPERAND2_ALPHA_ARB 0x859A +#define GL_RGB_SCALE_ARB 0x8573 +#define GL_ADD_SIGNED_ARB 0x8574 +#define GL_INTERPOLATE_ARB 0x8575 +#define GL_SUBTRACT_ARB 0x84E7 +#define GL_CONSTANT_ARB 0x8576 +#define GL_PRIMARY_COLOR_ARB 0x8577 +#define GL_PREVIOUS_ARB 0x8578 +#endif /* GL_ARB_texture_env_combine */ + +#ifndef GL_ARB_texture_env_crossbar +#define GL_ARB_texture_env_crossbar 1 +#endif /* GL_ARB_texture_env_crossbar */ + +#ifndef GL_ARB_texture_env_dot3 +#define GL_ARB_texture_env_dot3 1 +#define GL_DOT3_RGB_ARB 0x86AE +#define GL_DOT3_RGBA_ARB 0x86AF +#endif /* GL_ARB_texture_env_dot3 */ + +#ifndef GL_ARB_texture_filter_anisotropic +#define GL_ARB_texture_filter_anisotropic 1 +#endif /* GL_ARB_texture_filter_anisotropic */ + +#ifndef GL_ARB_texture_filter_minmax +#define GL_ARB_texture_filter_minmax 1 +#define GL_TEXTURE_REDUCTION_MODE_ARB 0x9366 +#define GL_WEIGHTED_AVERAGE_ARB 0x9367 +#endif /* GL_ARB_texture_filter_minmax */ + +#ifndef GL_ARB_texture_float +#define GL_ARB_texture_float 1 +#define GL_TEXTURE_RED_TYPE_ARB 0x8C10 +#define GL_TEXTURE_GREEN_TYPE_ARB 0x8C11 +#define GL_TEXTURE_BLUE_TYPE_ARB 0x8C12 +#define GL_TEXTURE_ALPHA_TYPE_ARB 0x8C13 +#define GL_TEXTURE_LUMINANCE_TYPE_ARB 0x8C14 +#define GL_TEXTURE_INTENSITY_TYPE_ARB 0x8C15 +#define GL_TEXTURE_DEPTH_TYPE_ARB 0x8C16 +#define GL_UNSIGNED_NORMALIZED_ARB 0x8C17 +#define GL_RGBA32F_ARB 0x8814 +#define GL_RGB32F_ARB 0x8815 +#define GL_ALPHA32F_ARB 0x8816 +#define GL_INTENSITY32F_ARB 0x8817 +#define GL_LUMINANCE32F_ARB 0x8818 +#define GL_LUMINANCE_ALPHA32F_ARB 0x8819 +#define GL_RGBA16F_ARB 0x881A +#define GL_RGB16F_ARB 0x881B +#define GL_ALPHA16F_ARB 0x881C +#define GL_INTENSITY16F_ARB 0x881D +#define GL_LUMINANCE16F_ARB 0x881E +#define GL_LUMINANCE_ALPHA16F_ARB 0x881F +#endif /* GL_ARB_texture_float */ + +#ifndef GL_ARB_texture_gather +#define GL_ARB_texture_gather 1 +#define GL_MIN_PROGRAM_TEXTURE_GATHER_OFFSET_ARB 0x8E5E +#define GL_MAX_PROGRAM_TEXTURE_GATHER_OFFSET_ARB 0x8E5F +#define GL_MAX_PROGRAM_TEXTURE_GATHER_COMPONENTS_ARB 0x8F9F +#endif /* GL_ARB_texture_gather */ + +#ifndef GL_ARB_texture_mirror_clamp_to_edge +#define GL_ARB_texture_mirror_clamp_to_edge 1 +#endif /* GL_ARB_texture_mirror_clamp_to_edge */ + +#ifndef GL_ARB_texture_mirrored_repeat +#define GL_ARB_texture_mirrored_repeat 1 +#define GL_MIRRORED_REPEAT_ARB 0x8370 +#endif /* GL_ARB_texture_mirrored_repeat */ + +#ifndef GL_ARB_texture_multisample +#define GL_ARB_texture_multisample 1 +#endif /* GL_ARB_texture_multisample */ + +#ifndef GL_ARB_texture_non_power_of_two +#define GL_ARB_texture_non_power_of_two 1 +#endif /* GL_ARB_texture_non_power_of_two */ + +#ifndef GL_ARB_texture_query_levels +#define GL_ARB_texture_query_levels 1 +#endif /* GL_ARB_texture_query_levels */ + +#ifndef GL_ARB_texture_query_lod +#define GL_ARB_texture_query_lod 1 +#endif /* GL_ARB_texture_query_lod */ + +#ifndef GL_ARB_texture_rectangle +#define GL_ARB_texture_rectangle 1 +#define GL_TEXTURE_RECTANGLE_ARB 0x84F5 +#define GL_TEXTURE_BINDING_RECTANGLE_ARB 0x84F6 +#define GL_PROXY_TEXTURE_RECTANGLE_ARB 0x84F7 +#define GL_MAX_RECTANGLE_TEXTURE_SIZE_ARB 0x84F8 +#endif /* GL_ARB_texture_rectangle */ + +#ifndef GL_ARB_texture_rg +#define GL_ARB_texture_rg 1 +#endif /* GL_ARB_texture_rg */ + +#ifndef GL_ARB_texture_rgb10_a2ui +#define GL_ARB_texture_rgb10_a2ui 1 +#endif /* GL_ARB_texture_rgb10_a2ui */ + +#ifndef GL_ARB_texture_stencil8 +#define GL_ARB_texture_stencil8 1 +#endif /* GL_ARB_texture_stencil8 */ + +#ifndef GL_ARB_texture_storage +#define GL_ARB_texture_storage 1 +#endif /* GL_ARB_texture_storage */ + +#ifndef GL_ARB_texture_storage_multisample +#define GL_ARB_texture_storage_multisample 1 +#endif /* GL_ARB_texture_storage_multisample */ + +#ifndef GL_ARB_texture_swizzle +#define GL_ARB_texture_swizzle 1 +#endif /* GL_ARB_texture_swizzle */ + +#ifndef GL_ARB_texture_view +#define GL_ARB_texture_view 1 +#endif /* GL_ARB_texture_view */ + +#ifndef GL_ARB_timer_query +#define GL_ARB_timer_query 1 +#endif /* GL_ARB_timer_query */ + +#ifndef GL_ARB_transform_feedback2 +#define GL_ARB_transform_feedback2 1 +#endif /* GL_ARB_transform_feedback2 */ + +#ifndef GL_ARB_transform_feedback3 +#define GL_ARB_transform_feedback3 1 +#endif /* GL_ARB_transform_feedback3 */ + +#ifndef GL_ARB_transform_feedback_instanced +#define GL_ARB_transform_feedback_instanced 1 +#endif /* GL_ARB_transform_feedback_instanced */ + +#ifndef GL_ARB_transform_feedback_overflow_query +#define GL_ARB_transform_feedback_overflow_query 1 +#define GL_TRANSFORM_FEEDBACK_OVERFLOW_ARB 0x82EC +#define GL_TRANSFORM_FEEDBACK_STREAM_OVERFLOW_ARB 0x82ED +#endif /* GL_ARB_transform_feedback_overflow_query */ + +#ifndef GL_ARB_transpose_matrix +#define GL_ARB_transpose_matrix 1 +#define GL_TRANSPOSE_MODELVIEW_MATRIX_ARB 0x84E3 +#define GL_TRANSPOSE_PROJECTION_MATRIX_ARB 0x84E4 +#define GL_TRANSPOSE_TEXTURE_MATRIX_ARB 0x84E5 +#define GL_TRANSPOSE_COLOR_MATRIX_ARB 0x84E6 +typedef void (APIENTRYP PFNGLLOADTRANSPOSEMATRIXFARBPROC) (const GLfloat *m); +typedef void (APIENTRYP PFNGLLOADTRANSPOSEMATRIXDARBPROC) (const GLdouble *m); +typedef void (APIENTRYP PFNGLMULTTRANSPOSEMATRIXFARBPROC) (const GLfloat *m); +typedef void (APIENTRYP PFNGLMULTTRANSPOSEMATRIXDARBPROC) (const GLdouble *m); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glLoadTransposeMatrixfARB (const GLfloat *m); +GLAPI void APIENTRY glLoadTransposeMatrixdARB (const GLdouble *m); +GLAPI void APIENTRY glMultTransposeMatrixfARB (const GLfloat *m); +GLAPI void APIENTRY glMultTransposeMatrixdARB (const GLdouble *m); +#endif +#endif /* GL_ARB_transpose_matrix */ + +#ifndef GL_ARB_uniform_buffer_object +#define GL_ARB_uniform_buffer_object 1 +#endif /* GL_ARB_uniform_buffer_object */ + +#ifndef GL_ARB_vertex_array_bgra +#define GL_ARB_vertex_array_bgra 1 +#endif /* GL_ARB_vertex_array_bgra */ + +#ifndef GL_ARB_vertex_array_object +#define GL_ARB_vertex_array_object 1 +#endif /* GL_ARB_vertex_array_object */ + +#ifndef GL_ARB_vertex_attrib_64bit +#define GL_ARB_vertex_attrib_64bit 1 +#endif /* GL_ARB_vertex_attrib_64bit */ + +#ifndef GL_ARB_vertex_attrib_binding +#define GL_ARB_vertex_attrib_binding 1 +#endif /* GL_ARB_vertex_attrib_binding */ + +#ifndef GL_ARB_vertex_blend +#define GL_ARB_vertex_blend 1 +#define GL_MAX_VERTEX_UNITS_ARB 0x86A4 +#define GL_ACTIVE_VERTEX_UNITS_ARB 0x86A5 +#define GL_WEIGHT_SUM_UNITY_ARB 0x86A6 +#define GL_VERTEX_BLEND_ARB 0x86A7 +#define GL_CURRENT_WEIGHT_ARB 0x86A8 +#define GL_WEIGHT_ARRAY_TYPE_ARB 0x86A9 +#define GL_WEIGHT_ARRAY_STRIDE_ARB 0x86AA +#define GL_WEIGHT_ARRAY_SIZE_ARB 0x86AB +#define GL_WEIGHT_ARRAY_POINTER_ARB 0x86AC +#define GL_WEIGHT_ARRAY_ARB 0x86AD +#define GL_MODELVIEW0_ARB 0x1700 +#define GL_MODELVIEW1_ARB 0x850A +#define GL_MODELVIEW2_ARB 0x8722 +#define GL_MODELVIEW3_ARB 0x8723 +#define GL_MODELVIEW4_ARB 0x8724 +#define GL_MODELVIEW5_ARB 0x8725 +#define GL_MODELVIEW6_ARB 0x8726 +#define GL_MODELVIEW7_ARB 0x8727 +#define GL_MODELVIEW8_ARB 0x8728 +#define GL_MODELVIEW9_ARB 0x8729 +#define GL_MODELVIEW10_ARB 0x872A +#define GL_MODELVIEW11_ARB 0x872B +#define GL_MODELVIEW12_ARB 0x872C +#define GL_MODELVIEW13_ARB 0x872D +#define GL_MODELVIEW14_ARB 0x872E +#define GL_MODELVIEW15_ARB 0x872F +#define GL_MODELVIEW16_ARB 0x8730 +#define GL_MODELVIEW17_ARB 0x8731 +#define GL_MODELVIEW18_ARB 0x8732 +#define GL_MODELVIEW19_ARB 0x8733 +#define GL_MODELVIEW20_ARB 0x8734 +#define GL_MODELVIEW21_ARB 0x8735 +#define GL_MODELVIEW22_ARB 0x8736 +#define GL_MODELVIEW23_ARB 0x8737 +#define GL_MODELVIEW24_ARB 0x8738 +#define GL_MODELVIEW25_ARB 0x8739 +#define GL_MODELVIEW26_ARB 0x873A +#define GL_MODELVIEW27_ARB 0x873B +#define GL_MODELVIEW28_ARB 0x873C +#define GL_MODELVIEW29_ARB 0x873D +#define GL_MODELVIEW30_ARB 0x873E +#define GL_MODELVIEW31_ARB 0x873F +typedef void (APIENTRYP PFNGLWEIGHTBVARBPROC) (GLint size, const GLbyte *weights); +typedef void (APIENTRYP PFNGLWEIGHTSVARBPROC) (GLint size, const GLshort *weights); +typedef void (APIENTRYP PFNGLWEIGHTIVARBPROC) (GLint size, const GLint *weights); +typedef void (APIENTRYP PFNGLWEIGHTFVARBPROC) (GLint size, const GLfloat *weights); +typedef void (APIENTRYP PFNGLWEIGHTDVARBPROC) (GLint size, const GLdouble *weights); +typedef void (APIENTRYP PFNGLWEIGHTUBVARBPROC) (GLint size, const GLubyte *weights); +typedef void (APIENTRYP PFNGLWEIGHTUSVARBPROC) (GLint size, const GLushort *weights); +typedef void (APIENTRYP PFNGLWEIGHTUIVARBPROC) (GLint size, const GLuint *weights); +typedef void (APIENTRYP PFNGLWEIGHTPOINTERARBPROC) (GLint size, GLenum type, GLsizei stride, const void *pointer); +typedef void (APIENTRYP PFNGLVERTEXBLENDARBPROC) (GLint count); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glWeightbvARB (GLint size, const GLbyte *weights); +GLAPI void APIENTRY glWeightsvARB (GLint size, const GLshort *weights); +GLAPI void APIENTRY glWeightivARB (GLint size, const GLint *weights); +GLAPI void APIENTRY glWeightfvARB (GLint size, const GLfloat *weights); +GLAPI void APIENTRY glWeightdvARB (GLint size, const GLdouble *weights); +GLAPI void APIENTRY glWeightubvARB (GLint size, const GLubyte *weights); +GLAPI void APIENTRY glWeightusvARB (GLint size, const GLushort *weights); +GLAPI void APIENTRY glWeightuivARB (GLint size, const GLuint *weights); +GLAPI void APIENTRY glWeightPointerARB (GLint size, GLenum type, GLsizei stride, const void *pointer); +GLAPI void APIENTRY glVertexBlendARB (GLint count); +#endif +#endif /* GL_ARB_vertex_blend */ + +#ifndef GL_ARB_vertex_buffer_object +#define GL_ARB_vertex_buffer_object 1 +#include +typedef ptrdiff_t GLsizeiptrARB; +typedef ptrdiff_t GLintptrARB; +#define GL_BUFFER_SIZE_ARB 0x8764 +#define GL_BUFFER_USAGE_ARB 0x8765 +#define GL_ARRAY_BUFFER_ARB 0x8892 +#define GL_ELEMENT_ARRAY_BUFFER_ARB 0x8893 +#define GL_ARRAY_BUFFER_BINDING_ARB 0x8894 +#define GL_ELEMENT_ARRAY_BUFFER_BINDING_ARB 0x8895 +#define GL_VERTEX_ARRAY_BUFFER_BINDING_ARB 0x8896 +#define GL_NORMAL_ARRAY_BUFFER_BINDING_ARB 0x8897 +#define GL_COLOR_ARRAY_BUFFER_BINDING_ARB 0x8898 +#define GL_INDEX_ARRAY_BUFFER_BINDING_ARB 0x8899 +#define GL_TEXTURE_COORD_ARRAY_BUFFER_BINDING_ARB 0x889A +#define GL_EDGE_FLAG_ARRAY_BUFFER_BINDING_ARB 0x889B +#define GL_SECONDARY_COLOR_ARRAY_BUFFER_BINDING_ARB 0x889C +#define GL_FOG_COORDINATE_ARRAY_BUFFER_BINDING_ARB 0x889D +#define GL_WEIGHT_ARRAY_BUFFER_BINDING_ARB 0x889E +#define GL_VERTEX_ATTRIB_ARRAY_BUFFER_BINDING_ARB 0x889F +#define GL_READ_ONLY_ARB 0x88B8 +#define GL_WRITE_ONLY_ARB 0x88B9 +#define GL_READ_WRITE_ARB 0x88BA +#define GL_BUFFER_ACCESS_ARB 0x88BB +#define GL_BUFFER_MAPPED_ARB 0x88BC +#define GL_BUFFER_MAP_POINTER_ARB 0x88BD +#define GL_STREAM_DRAW_ARB 0x88E0 +#define GL_STREAM_READ_ARB 0x88E1 +#define GL_STREAM_COPY_ARB 0x88E2 +#define GL_STATIC_DRAW_ARB 0x88E4 +#define GL_STATIC_READ_ARB 0x88E5 +#define GL_STATIC_COPY_ARB 0x88E6 +#define GL_DYNAMIC_DRAW_ARB 0x88E8 +#define GL_DYNAMIC_READ_ARB 0x88E9 +#define GL_DYNAMIC_COPY_ARB 0x88EA +typedef void (APIENTRYP PFNGLBINDBUFFERARBPROC) (GLenum target, GLuint buffer); +typedef void (APIENTRYP PFNGLDELETEBUFFERSARBPROC) (GLsizei n, const GLuint *buffers); +typedef void (APIENTRYP PFNGLGENBUFFERSARBPROC) (GLsizei n, GLuint *buffers); +typedef GLboolean (APIENTRYP PFNGLISBUFFERARBPROC) (GLuint buffer); +typedef void (APIENTRYP PFNGLBUFFERDATAARBPROC) (GLenum target, GLsizeiptrARB size, const void *data, GLenum usage); +typedef void (APIENTRYP PFNGLBUFFERSUBDATAARBPROC) (GLenum target, GLintptrARB offset, GLsizeiptrARB size, const void *data); +typedef void (APIENTRYP PFNGLGETBUFFERSUBDATAARBPROC) (GLenum target, GLintptrARB offset, GLsizeiptrARB size, void *data); +typedef void *(APIENTRYP PFNGLMAPBUFFERARBPROC) (GLenum target, GLenum access); +typedef GLboolean (APIENTRYP PFNGLUNMAPBUFFERARBPROC) (GLenum target); +typedef void (APIENTRYP PFNGLGETBUFFERPARAMETERIVARBPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETBUFFERPOINTERVARBPROC) (GLenum target, GLenum pname, void **params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBindBufferARB (GLenum target, GLuint buffer); +GLAPI void APIENTRY glDeleteBuffersARB (GLsizei n, const GLuint *buffers); +GLAPI void APIENTRY glGenBuffersARB (GLsizei n, GLuint *buffers); +GLAPI GLboolean APIENTRY glIsBufferARB (GLuint buffer); +GLAPI void APIENTRY glBufferDataARB (GLenum target, GLsizeiptrARB size, const void *data, GLenum usage); +GLAPI void APIENTRY glBufferSubDataARB (GLenum target, GLintptrARB offset, GLsizeiptrARB size, const void *data); +GLAPI void APIENTRY glGetBufferSubDataARB (GLenum target, GLintptrARB offset, GLsizeiptrARB size, void *data); +GLAPI void *APIENTRY glMapBufferARB (GLenum target, GLenum access); +GLAPI GLboolean APIENTRY glUnmapBufferARB (GLenum target); +GLAPI void APIENTRY glGetBufferParameterivARB (GLenum target, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetBufferPointervARB (GLenum target, GLenum pname, void **params); +#endif +#endif /* GL_ARB_vertex_buffer_object */ + +#ifndef GL_ARB_vertex_program +#define GL_ARB_vertex_program 1 +#define GL_COLOR_SUM_ARB 0x8458 +#define GL_VERTEX_PROGRAM_ARB 0x8620 +#define GL_VERTEX_ATTRIB_ARRAY_ENABLED_ARB 0x8622 +#define GL_VERTEX_ATTRIB_ARRAY_SIZE_ARB 0x8623 +#define GL_VERTEX_ATTRIB_ARRAY_STRIDE_ARB 0x8624 +#define GL_VERTEX_ATTRIB_ARRAY_TYPE_ARB 0x8625 +#define GL_CURRENT_VERTEX_ATTRIB_ARB 0x8626 +#define GL_VERTEX_PROGRAM_POINT_SIZE_ARB 0x8642 +#define GL_VERTEX_PROGRAM_TWO_SIDE_ARB 0x8643 +#define GL_VERTEX_ATTRIB_ARRAY_POINTER_ARB 0x8645 +#define GL_MAX_VERTEX_ATTRIBS_ARB 0x8869 +#define GL_VERTEX_ATTRIB_ARRAY_NORMALIZED_ARB 0x886A +#define GL_PROGRAM_ADDRESS_REGISTERS_ARB 0x88B0 +#define GL_MAX_PROGRAM_ADDRESS_REGISTERS_ARB 0x88B1 +#define GL_PROGRAM_NATIVE_ADDRESS_REGISTERS_ARB 0x88B2 +#define GL_MAX_PROGRAM_NATIVE_ADDRESS_REGISTERS_ARB 0x88B3 +typedef void (APIENTRYP PFNGLVERTEXATTRIB1DARBPROC) (GLuint index, GLdouble x); +typedef void (APIENTRYP PFNGLVERTEXATTRIB1DVARBPROC) (GLuint index, const GLdouble *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB1FARBPROC) (GLuint index, GLfloat x); +typedef void (APIENTRYP PFNGLVERTEXATTRIB1FVARBPROC) (GLuint index, const GLfloat *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB1SARBPROC) (GLuint index, GLshort x); +typedef void (APIENTRYP PFNGLVERTEXATTRIB1SVARBPROC) (GLuint index, const GLshort *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB2DARBPROC) (GLuint index, GLdouble x, GLdouble y); +typedef void (APIENTRYP PFNGLVERTEXATTRIB2DVARBPROC) (GLuint index, const GLdouble *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB2FARBPROC) (GLuint index, GLfloat x, GLfloat y); +typedef void (APIENTRYP PFNGLVERTEXATTRIB2FVARBPROC) (GLuint index, const GLfloat *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB2SARBPROC) (GLuint index, GLshort x, GLshort y); +typedef void (APIENTRYP PFNGLVERTEXATTRIB2SVARBPROC) (GLuint index, const GLshort *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB3DARBPROC) (GLuint index, GLdouble x, GLdouble y, GLdouble z); +typedef void (APIENTRYP PFNGLVERTEXATTRIB3DVARBPROC) (GLuint index, const GLdouble *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB3FARBPROC) (GLuint index, GLfloat x, GLfloat y, GLfloat z); +typedef void (APIENTRYP PFNGLVERTEXATTRIB3FVARBPROC) (GLuint index, const GLfloat *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB3SARBPROC) (GLuint index, GLshort x, GLshort y, GLshort z); +typedef void (APIENTRYP PFNGLVERTEXATTRIB3SVARBPROC) (GLuint index, const GLshort *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4NBVARBPROC) (GLuint index, const GLbyte *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4NIVARBPROC) (GLuint index, const GLint *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4NSVARBPROC) (GLuint index, const GLshort *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4NUBARBPROC) (GLuint index, GLubyte x, GLubyte y, GLubyte z, GLubyte w); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4NUBVARBPROC) (GLuint index, const GLubyte *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4NUIVARBPROC) (GLuint index, const GLuint *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4NUSVARBPROC) (GLuint index, const GLushort *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4BVARBPROC) (GLuint index, const GLbyte *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4DARBPROC) (GLuint index, GLdouble x, GLdouble y, GLdouble z, GLdouble w); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4DVARBPROC) (GLuint index, const GLdouble *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4FARBPROC) (GLuint index, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4FVARBPROC) (GLuint index, const GLfloat *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4IVARBPROC) (GLuint index, const GLint *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4SARBPROC) (GLuint index, GLshort x, GLshort y, GLshort z, GLshort w); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4SVARBPROC) (GLuint index, const GLshort *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4UBVARBPROC) (GLuint index, const GLubyte *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4UIVARBPROC) (GLuint index, const GLuint *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4USVARBPROC) (GLuint index, const GLushort *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBPOINTERARBPROC) (GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride, const void *pointer); +typedef void (APIENTRYP PFNGLENABLEVERTEXATTRIBARRAYARBPROC) (GLuint index); +typedef void (APIENTRYP PFNGLDISABLEVERTEXATTRIBARRAYARBPROC) (GLuint index); +typedef void (APIENTRYP PFNGLGETVERTEXATTRIBDVARBPROC) (GLuint index, GLenum pname, GLdouble *params); +typedef void (APIENTRYP PFNGLGETVERTEXATTRIBFVARBPROC) (GLuint index, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETVERTEXATTRIBIVARBPROC) (GLuint index, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETVERTEXATTRIBPOINTERVARBPROC) (GLuint index, GLenum pname, void **pointer); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glVertexAttrib1dARB (GLuint index, GLdouble x); +GLAPI void APIENTRY glVertexAttrib1dvARB (GLuint index, const GLdouble *v); +GLAPI void APIENTRY glVertexAttrib1fARB (GLuint index, GLfloat x); +GLAPI void APIENTRY glVertexAttrib1fvARB (GLuint index, const GLfloat *v); +GLAPI void APIENTRY glVertexAttrib1sARB (GLuint index, GLshort x); +GLAPI void APIENTRY glVertexAttrib1svARB (GLuint index, const GLshort *v); +GLAPI void APIENTRY glVertexAttrib2dARB (GLuint index, GLdouble x, GLdouble y); +GLAPI void APIENTRY glVertexAttrib2dvARB (GLuint index, const GLdouble *v); +GLAPI void APIENTRY glVertexAttrib2fARB (GLuint index, GLfloat x, GLfloat y); +GLAPI void APIENTRY glVertexAttrib2fvARB (GLuint index, const GLfloat *v); +GLAPI void APIENTRY glVertexAttrib2sARB (GLuint index, GLshort x, GLshort y); +GLAPI void APIENTRY glVertexAttrib2svARB (GLuint index, const GLshort *v); +GLAPI void APIENTRY glVertexAttrib3dARB (GLuint index, GLdouble x, GLdouble y, GLdouble z); +GLAPI void APIENTRY glVertexAttrib3dvARB (GLuint index, const GLdouble *v); +GLAPI void APIENTRY glVertexAttrib3fARB (GLuint index, GLfloat x, GLfloat y, GLfloat z); +GLAPI void APIENTRY glVertexAttrib3fvARB (GLuint index, const GLfloat *v); +GLAPI void APIENTRY glVertexAttrib3sARB (GLuint index, GLshort x, GLshort y, GLshort z); +GLAPI void APIENTRY glVertexAttrib3svARB (GLuint index, const GLshort *v); +GLAPI void APIENTRY glVertexAttrib4NbvARB (GLuint index, const GLbyte *v); +GLAPI void APIENTRY glVertexAttrib4NivARB (GLuint index, const GLint *v); +GLAPI void APIENTRY glVertexAttrib4NsvARB (GLuint index, const GLshort *v); +GLAPI void APIENTRY glVertexAttrib4NubARB (GLuint index, GLubyte x, GLubyte y, GLubyte z, GLubyte w); +GLAPI void APIENTRY glVertexAttrib4NubvARB (GLuint index, const GLubyte *v); +GLAPI void APIENTRY glVertexAttrib4NuivARB (GLuint index, const GLuint *v); +GLAPI void APIENTRY glVertexAttrib4NusvARB (GLuint index, const GLushort *v); +GLAPI void APIENTRY glVertexAttrib4bvARB (GLuint index, const GLbyte *v); +GLAPI void APIENTRY glVertexAttrib4dARB (GLuint index, GLdouble x, GLdouble y, GLdouble z, GLdouble w); +GLAPI void APIENTRY glVertexAttrib4dvARB (GLuint index, const GLdouble *v); +GLAPI void APIENTRY glVertexAttrib4fARB (GLuint index, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +GLAPI void APIENTRY glVertexAttrib4fvARB (GLuint index, const GLfloat *v); +GLAPI void APIENTRY glVertexAttrib4ivARB (GLuint index, const GLint *v); +GLAPI void APIENTRY glVertexAttrib4sARB (GLuint index, GLshort x, GLshort y, GLshort z, GLshort w); +GLAPI void APIENTRY glVertexAttrib4svARB (GLuint index, const GLshort *v); +GLAPI void APIENTRY glVertexAttrib4ubvARB (GLuint index, const GLubyte *v); +GLAPI void APIENTRY glVertexAttrib4uivARB (GLuint index, const GLuint *v); +GLAPI void APIENTRY glVertexAttrib4usvARB (GLuint index, const GLushort *v); +GLAPI void APIENTRY glVertexAttribPointerARB (GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride, const void *pointer); +GLAPI void APIENTRY glEnableVertexAttribArrayARB (GLuint index); +GLAPI void APIENTRY glDisableVertexAttribArrayARB (GLuint index); +GLAPI void APIENTRY glGetVertexAttribdvARB (GLuint index, GLenum pname, GLdouble *params); +GLAPI void APIENTRY glGetVertexAttribfvARB (GLuint index, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetVertexAttribivARB (GLuint index, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetVertexAttribPointervARB (GLuint index, GLenum pname, void **pointer); +#endif +#endif /* GL_ARB_vertex_program */ + +#ifndef GL_ARB_vertex_shader +#define GL_ARB_vertex_shader 1 +#define GL_VERTEX_SHADER_ARB 0x8B31 +#define GL_MAX_VERTEX_UNIFORM_COMPONENTS_ARB 0x8B4A +#define GL_MAX_VARYING_FLOATS_ARB 0x8B4B +#define GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS_ARB 0x8B4C +#define GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS_ARB 0x8B4D +#define GL_OBJECT_ACTIVE_ATTRIBUTES_ARB 0x8B89 +#define GL_OBJECT_ACTIVE_ATTRIBUTE_MAX_LENGTH_ARB 0x8B8A +typedef void (APIENTRYP PFNGLBINDATTRIBLOCATIONARBPROC) (GLhandleARB programObj, GLuint index, const GLcharARB *name); +typedef void (APIENTRYP PFNGLGETACTIVEATTRIBARBPROC) (GLhandleARB programObj, GLuint index, GLsizei maxLength, GLsizei *length, GLint *size, GLenum *type, GLcharARB *name); +typedef GLint (APIENTRYP PFNGLGETATTRIBLOCATIONARBPROC) (GLhandleARB programObj, const GLcharARB *name); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBindAttribLocationARB (GLhandleARB programObj, GLuint index, const GLcharARB *name); +GLAPI void APIENTRY glGetActiveAttribARB (GLhandleARB programObj, GLuint index, GLsizei maxLength, GLsizei *length, GLint *size, GLenum *type, GLcharARB *name); +GLAPI GLint APIENTRY glGetAttribLocationARB (GLhandleARB programObj, const GLcharARB *name); +#endif +#endif /* GL_ARB_vertex_shader */ + +#ifndef GL_ARB_vertex_type_10f_11f_11f_rev +#define GL_ARB_vertex_type_10f_11f_11f_rev 1 +#endif /* GL_ARB_vertex_type_10f_11f_11f_rev */ + +#ifndef GL_ARB_vertex_type_2_10_10_10_rev +#define GL_ARB_vertex_type_2_10_10_10_rev 1 +#endif /* GL_ARB_vertex_type_2_10_10_10_rev */ + +#ifndef GL_ARB_viewport_array +#define GL_ARB_viewport_array 1 +#endif /* GL_ARB_viewport_array */ + +#ifndef GL_ARB_window_pos +#define GL_ARB_window_pos 1 +typedef void (APIENTRYP PFNGLWINDOWPOS2DARBPROC) (GLdouble x, GLdouble y); +typedef void (APIENTRYP PFNGLWINDOWPOS2DVARBPROC) (const GLdouble *v); +typedef void (APIENTRYP PFNGLWINDOWPOS2FARBPROC) (GLfloat x, GLfloat y); +typedef void (APIENTRYP PFNGLWINDOWPOS2FVARBPROC) (const GLfloat *v); +typedef void (APIENTRYP PFNGLWINDOWPOS2IARBPROC) (GLint x, GLint y); +typedef void (APIENTRYP PFNGLWINDOWPOS2IVARBPROC) (const GLint *v); +typedef void (APIENTRYP PFNGLWINDOWPOS2SARBPROC) (GLshort x, GLshort y); +typedef void (APIENTRYP PFNGLWINDOWPOS2SVARBPROC) (const GLshort *v); +typedef void (APIENTRYP PFNGLWINDOWPOS3DARBPROC) (GLdouble x, GLdouble y, GLdouble z); +typedef void (APIENTRYP PFNGLWINDOWPOS3DVARBPROC) (const GLdouble *v); +typedef void (APIENTRYP PFNGLWINDOWPOS3FARBPROC) (GLfloat x, GLfloat y, GLfloat z); +typedef void (APIENTRYP PFNGLWINDOWPOS3FVARBPROC) (const GLfloat *v); +typedef void (APIENTRYP PFNGLWINDOWPOS3IARBPROC) (GLint x, GLint y, GLint z); +typedef void (APIENTRYP PFNGLWINDOWPOS3IVARBPROC) (const GLint *v); +typedef void (APIENTRYP PFNGLWINDOWPOS3SARBPROC) (GLshort x, GLshort y, GLshort z); +typedef void (APIENTRYP PFNGLWINDOWPOS3SVARBPROC) (const GLshort *v); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glWindowPos2dARB (GLdouble x, GLdouble y); +GLAPI void APIENTRY glWindowPos2dvARB (const GLdouble *v); +GLAPI void APIENTRY glWindowPos2fARB (GLfloat x, GLfloat y); +GLAPI void APIENTRY glWindowPos2fvARB (const GLfloat *v); +GLAPI void APIENTRY glWindowPos2iARB (GLint x, GLint y); +GLAPI void APIENTRY glWindowPos2ivARB (const GLint *v); +GLAPI void APIENTRY glWindowPos2sARB (GLshort x, GLshort y); +GLAPI void APIENTRY glWindowPos2svARB (const GLshort *v); +GLAPI void APIENTRY glWindowPos3dARB (GLdouble x, GLdouble y, GLdouble z); +GLAPI void APIENTRY glWindowPos3dvARB (const GLdouble *v); +GLAPI void APIENTRY glWindowPos3fARB (GLfloat x, GLfloat y, GLfloat z); +GLAPI void APIENTRY glWindowPos3fvARB (const GLfloat *v); +GLAPI void APIENTRY glWindowPos3iARB (GLint x, GLint y, GLint z); +GLAPI void APIENTRY glWindowPos3ivARB (const GLint *v); +GLAPI void APIENTRY glWindowPos3sARB (GLshort x, GLshort y, GLshort z); +GLAPI void APIENTRY glWindowPos3svARB (const GLshort *v); +#endif +#endif /* GL_ARB_window_pos */ + +#ifndef GL_KHR_blend_equation_advanced +#define GL_KHR_blend_equation_advanced 1 +#define GL_MULTIPLY_KHR 0x9294 +#define GL_SCREEN_KHR 0x9295 +#define GL_OVERLAY_KHR 0x9296 +#define GL_DARKEN_KHR 0x9297 +#define GL_LIGHTEN_KHR 0x9298 +#define GL_COLORDODGE_KHR 0x9299 +#define GL_COLORBURN_KHR 0x929A +#define GL_HARDLIGHT_KHR 0x929B +#define GL_SOFTLIGHT_KHR 0x929C +#define GL_DIFFERENCE_KHR 0x929E +#define GL_EXCLUSION_KHR 0x92A0 +#define GL_HSL_HUE_KHR 0x92AD +#define GL_HSL_SATURATION_KHR 0x92AE +#define GL_HSL_COLOR_KHR 0x92AF +#define GL_HSL_LUMINOSITY_KHR 0x92B0 +typedef void (APIENTRYP PFNGLBLENDBARRIERKHRPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBlendBarrierKHR (void); +#endif +#endif /* GL_KHR_blend_equation_advanced */ + +#ifndef GL_KHR_blend_equation_advanced_coherent +#define GL_KHR_blend_equation_advanced_coherent 1 +#define GL_BLEND_ADVANCED_COHERENT_KHR 0x9285 +#endif /* GL_KHR_blend_equation_advanced_coherent */ + +#ifndef GL_KHR_context_flush_control +#define GL_KHR_context_flush_control 1 +#endif /* GL_KHR_context_flush_control */ + +#ifndef GL_KHR_debug +#define GL_KHR_debug 1 +#endif /* GL_KHR_debug */ + +#ifndef GL_KHR_no_error +#define GL_KHR_no_error 1 +#define GL_CONTEXT_FLAG_NO_ERROR_BIT_KHR 0x00000008 +#endif /* GL_KHR_no_error */ + +#ifndef GL_KHR_parallel_shader_compile +#define GL_KHR_parallel_shader_compile 1 +#define GL_MAX_SHADER_COMPILER_THREADS_KHR 0x91B0 +#define GL_COMPLETION_STATUS_KHR 0x91B1 +typedef void (APIENTRYP PFNGLMAXSHADERCOMPILERTHREADSKHRPROC) (GLuint count); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glMaxShaderCompilerThreadsKHR (GLuint count); +#endif +#endif /* GL_KHR_parallel_shader_compile */ + +#ifndef GL_KHR_robust_buffer_access_behavior +#define GL_KHR_robust_buffer_access_behavior 1 +#endif /* GL_KHR_robust_buffer_access_behavior */ + +#ifndef GL_KHR_robustness +#define GL_KHR_robustness 1 +#define GL_CONTEXT_ROBUST_ACCESS 0x90F3 +#endif /* GL_KHR_robustness */ + +#ifndef GL_KHR_texture_compression_astc_hdr +#define GL_KHR_texture_compression_astc_hdr 1 +#define GL_COMPRESSED_RGBA_ASTC_4x4_KHR 0x93B0 +#define GL_COMPRESSED_RGBA_ASTC_5x4_KHR 0x93B1 +#define GL_COMPRESSED_RGBA_ASTC_5x5_KHR 0x93B2 +#define GL_COMPRESSED_RGBA_ASTC_6x5_KHR 0x93B3 +#define GL_COMPRESSED_RGBA_ASTC_6x6_KHR 0x93B4 +#define GL_COMPRESSED_RGBA_ASTC_8x5_KHR 0x93B5 +#define GL_COMPRESSED_RGBA_ASTC_8x6_KHR 0x93B6 +#define GL_COMPRESSED_RGBA_ASTC_8x8_KHR 0x93B7 +#define GL_COMPRESSED_RGBA_ASTC_10x5_KHR 0x93B8 +#define GL_COMPRESSED_RGBA_ASTC_10x6_KHR 0x93B9 +#define GL_COMPRESSED_RGBA_ASTC_10x8_KHR 0x93BA +#define GL_COMPRESSED_RGBA_ASTC_10x10_KHR 0x93BB +#define GL_COMPRESSED_RGBA_ASTC_12x10_KHR 0x93BC +#define GL_COMPRESSED_RGBA_ASTC_12x12_KHR 0x93BD +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_4x4_KHR 0x93D0 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x4_KHR 0x93D1 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x5_KHR 0x93D2 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x5_KHR 0x93D3 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x6_KHR 0x93D4 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x5_KHR 0x93D5 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x6_KHR 0x93D6 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x8_KHR 0x93D7 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x5_KHR 0x93D8 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x6_KHR 0x93D9 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x8_KHR 0x93DA +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x10_KHR 0x93DB +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_12x10_KHR 0x93DC +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_12x12_KHR 0x93DD +#endif /* GL_KHR_texture_compression_astc_hdr */ + +#ifndef GL_KHR_texture_compression_astc_ldr +#define GL_KHR_texture_compression_astc_ldr 1 +#endif /* GL_KHR_texture_compression_astc_ldr */ + +#ifndef GL_KHR_texture_compression_astc_sliced_3d +#define GL_KHR_texture_compression_astc_sliced_3d 1 +#endif /* GL_KHR_texture_compression_astc_sliced_3d */ + +#ifndef GL_OES_byte_coordinates +#define GL_OES_byte_coordinates 1 +typedef void (APIENTRYP PFNGLMULTITEXCOORD1BOESPROC) (GLenum texture, GLbyte s); +typedef void (APIENTRYP PFNGLMULTITEXCOORD1BVOESPROC) (GLenum texture, const GLbyte *coords); +typedef void (APIENTRYP PFNGLMULTITEXCOORD2BOESPROC) (GLenum texture, GLbyte s, GLbyte t); +typedef void (APIENTRYP PFNGLMULTITEXCOORD2BVOESPROC) (GLenum texture, const GLbyte *coords); +typedef void (APIENTRYP PFNGLMULTITEXCOORD3BOESPROC) (GLenum texture, GLbyte s, GLbyte t, GLbyte r); +typedef void (APIENTRYP PFNGLMULTITEXCOORD3BVOESPROC) (GLenum texture, const GLbyte *coords); +typedef void (APIENTRYP PFNGLMULTITEXCOORD4BOESPROC) (GLenum texture, GLbyte s, GLbyte t, GLbyte r, GLbyte q); +typedef void (APIENTRYP PFNGLMULTITEXCOORD4BVOESPROC) (GLenum texture, const GLbyte *coords); +typedef void (APIENTRYP PFNGLTEXCOORD1BOESPROC) (GLbyte s); +typedef void (APIENTRYP PFNGLTEXCOORD1BVOESPROC) (const GLbyte *coords); +typedef void (APIENTRYP PFNGLTEXCOORD2BOESPROC) (GLbyte s, GLbyte t); +typedef void (APIENTRYP PFNGLTEXCOORD2BVOESPROC) (const GLbyte *coords); +typedef void (APIENTRYP PFNGLTEXCOORD3BOESPROC) (GLbyte s, GLbyte t, GLbyte r); +typedef void (APIENTRYP PFNGLTEXCOORD3BVOESPROC) (const GLbyte *coords); +typedef void (APIENTRYP PFNGLTEXCOORD4BOESPROC) (GLbyte s, GLbyte t, GLbyte r, GLbyte q); +typedef void (APIENTRYP PFNGLTEXCOORD4BVOESPROC) (const GLbyte *coords); +typedef void (APIENTRYP PFNGLVERTEX2BOESPROC) (GLbyte x, GLbyte y); +typedef void (APIENTRYP PFNGLVERTEX2BVOESPROC) (const GLbyte *coords); +typedef void (APIENTRYP PFNGLVERTEX3BOESPROC) (GLbyte x, GLbyte y, GLbyte z); +typedef void (APIENTRYP PFNGLVERTEX3BVOESPROC) (const GLbyte *coords); +typedef void (APIENTRYP PFNGLVERTEX4BOESPROC) (GLbyte x, GLbyte y, GLbyte z, GLbyte w); +typedef void (APIENTRYP PFNGLVERTEX4BVOESPROC) (const GLbyte *coords); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glMultiTexCoord1bOES (GLenum texture, GLbyte s); +GLAPI void APIENTRY glMultiTexCoord1bvOES (GLenum texture, const GLbyte *coords); +GLAPI void APIENTRY glMultiTexCoord2bOES (GLenum texture, GLbyte s, GLbyte t); +GLAPI void APIENTRY glMultiTexCoord2bvOES (GLenum texture, const GLbyte *coords); +GLAPI void APIENTRY glMultiTexCoord3bOES (GLenum texture, GLbyte s, GLbyte t, GLbyte r); +GLAPI void APIENTRY glMultiTexCoord3bvOES (GLenum texture, const GLbyte *coords); +GLAPI void APIENTRY glMultiTexCoord4bOES (GLenum texture, GLbyte s, GLbyte t, GLbyte r, GLbyte q); +GLAPI void APIENTRY glMultiTexCoord4bvOES (GLenum texture, const GLbyte *coords); +GLAPI void APIENTRY glTexCoord1bOES (GLbyte s); +GLAPI void APIENTRY glTexCoord1bvOES (const GLbyte *coords); +GLAPI void APIENTRY glTexCoord2bOES (GLbyte s, GLbyte t); +GLAPI void APIENTRY glTexCoord2bvOES (const GLbyte *coords); +GLAPI void APIENTRY glTexCoord3bOES (GLbyte s, GLbyte t, GLbyte r); +GLAPI void APIENTRY glTexCoord3bvOES (const GLbyte *coords); +GLAPI void APIENTRY glTexCoord4bOES (GLbyte s, GLbyte t, GLbyte r, GLbyte q); +GLAPI void APIENTRY glTexCoord4bvOES (const GLbyte *coords); +GLAPI void APIENTRY glVertex2bOES (GLbyte x, GLbyte y); +GLAPI void APIENTRY glVertex2bvOES (const GLbyte *coords); +GLAPI void APIENTRY glVertex3bOES (GLbyte x, GLbyte y, GLbyte z); +GLAPI void APIENTRY glVertex3bvOES (const GLbyte *coords); +GLAPI void APIENTRY glVertex4bOES (GLbyte x, GLbyte y, GLbyte z, GLbyte w); +GLAPI void APIENTRY glVertex4bvOES (const GLbyte *coords); +#endif +#endif /* GL_OES_byte_coordinates */ + +#ifndef GL_OES_compressed_paletted_texture +#define GL_OES_compressed_paletted_texture 1 +#define GL_PALETTE4_RGB8_OES 0x8B90 +#define GL_PALETTE4_RGBA8_OES 0x8B91 +#define GL_PALETTE4_R5_G6_B5_OES 0x8B92 +#define GL_PALETTE4_RGBA4_OES 0x8B93 +#define GL_PALETTE4_RGB5_A1_OES 0x8B94 +#define GL_PALETTE8_RGB8_OES 0x8B95 +#define GL_PALETTE8_RGBA8_OES 0x8B96 +#define GL_PALETTE8_R5_G6_B5_OES 0x8B97 +#define GL_PALETTE8_RGBA4_OES 0x8B98 +#define GL_PALETTE8_RGB5_A1_OES 0x8B99 +#endif /* GL_OES_compressed_paletted_texture */ + +#ifndef GL_OES_fixed_point +#define GL_OES_fixed_point 1 +typedef GLint GLfixed; +#define GL_FIXED_OES 0x140C +typedef void (APIENTRYP PFNGLALPHAFUNCXOESPROC) (GLenum func, GLfixed ref); +typedef void (APIENTRYP PFNGLCLEARCOLORXOESPROC) (GLfixed red, GLfixed green, GLfixed blue, GLfixed alpha); +typedef void (APIENTRYP PFNGLCLEARDEPTHXOESPROC) (GLfixed depth); +typedef void (APIENTRYP PFNGLCLIPPLANEXOESPROC) (GLenum plane, const GLfixed *equation); +typedef void (APIENTRYP PFNGLCOLOR4XOESPROC) (GLfixed red, GLfixed green, GLfixed blue, GLfixed alpha); +typedef void (APIENTRYP PFNGLDEPTHRANGEXOESPROC) (GLfixed n, GLfixed f); +typedef void (APIENTRYP PFNGLFOGXOESPROC) (GLenum pname, GLfixed param); +typedef void (APIENTRYP PFNGLFOGXVOESPROC) (GLenum pname, const GLfixed *param); +typedef void (APIENTRYP PFNGLFRUSTUMXOESPROC) (GLfixed l, GLfixed r, GLfixed b, GLfixed t, GLfixed n, GLfixed f); +typedef void (APIENTRYP PFNGLGETCLIPPLANEXOESPROC) (GLenum plane, GLfixed *equation); +typedef void (APIENTRYP PFNGLGETFIXEDVOESPROC) (GLenum pname, GLfixed *params); +typedef void (APIENTRYP PFNGLGETTEXENVXVOESPROC) (GLenum target, GLenum pname, GLfixed *params); +typedef void (APIENTRYP PFNGLGETTEXPARAMETERXVOESPROC) (GLenum target, GLenum pname, GLfixed *params); +typedef void (APIENTRYP PFNGLLIGHTMODELXOESPROC) (GLenum pname, GLfixed param); +typedef void (APIENTRYP PFNGLLIGHTMODELXVOESPROC) (GLenum pname, const GLfixed *param); +typedef void (APIENTRYP PFNGLLIGHTXOESPROC) (GLenum light, GLenum pname, GLfixed param); +typedef void (APIENTRYP PFNGLLIGHTXVOESPROC) (GLenum light, GLenum pname, const GLfixed *params); +typedef void (APIENTRYP PFNGLLINEWIDTHXOESPROC) (GLfixed width); +typedef void (APIENTRYP PFNGLLOADMATRIXXOESPROC) (const GLfixed *m); +typedef void (APIENTRYP PFNGLMATERIALXOESPROC) (GLenum face, GLenum pname, GLfixed param); +typedef void (APIENTRYP PFNGLMATERIALXVOESPROC) (GLenum face, GLenum pname, const GLfixed *param); +typedef void (APIENTRYP PFNGLMULTMATRIXXOESPROC) (const GLfixed *m); +typedef void (APIENTRYP PFNGLMULTITEXCOORD4XOESPROC) (GLenum texture, GLfixed s, GLfixed t, GLfixed r, GLfixed q); +typedef void (APIENTRYP PFNGLNORMAL3XOESPROC) (GLfixed nx, GLfixed ny, GLfixed nz); +typedef void (APIENTRYP PFNGLORTHOXOESPROC) (GLfixed l, GLfixed r, GLfixed b, GLfixed t, GLfixed n, GLfixed f); +typedef void (APIENTRYP PFNGLPOINTPARAMETERXVOESPROC) (GLenum pname, const GLfixed *params); +typedef void (APIENTRYP PFNGLPOINTSIZEXOESPROC) (GLfixed size); +typedef void (APIENTRYP PFNGLPOLYGONOFFSETXOESPROC) (GLfixed factor, GLfixed units); +typedef void (APIENTRYP PFNGLROTATEXOESPROC) (GLfixed angle, GLfixed x, GLfixed y, GLfixed z); +typedef void (APIENTRYP PFNGLSCALEXOESPROC) (GLfixed x, GLfixed y, GLfixed z); +typedef void (APIENTRYP PFNGLTEXENVXOESPROC) (GLenum target, GLenum pname, GLfixed param); +typedef void (APIENTRYP PFNGLTEXENVXVOESPROC) (GLenum target, GLenum pname, const GLfixed *params); +typedef void (APIENTRYP PFNGLTEXPARAMETERXOESPROC) (GLenum target, GLenum pname, GLfixed param); +typedef void (APIENTRYP PFNGLTEXPARAMETERXVOESPROC) (GLenum target, GLenum pname, const GLfixed *params); +typedef void (APIENTRYP PFNGLTRANSLATEXOESPROC) (GLfixed x, GLfixed y, GLfixed z); +typedef void (APIENTRYP PFNGLACCUMXOESPROC) (GLenum op, GLfixed value); +typedef void (APIENTRYP PFNGLBITMAPXOESPROC) (GLsizei width, GLsizei height, GLfixed xorig, GLfixed yorig, GLfixed xmove, GLfixed ymove, const GLubyte *bitmap); +typedef void (APIENTRYP PFNGLBLENDCOLORXOESPROC) (GLfixed red, GLfixed green, GLfixed blue, GLfixed alpha); +typedef void (APIENTRYP PFNGLCLEARACCUMXOESPROC) (GLfixed red, GLfixed green, GLfixed blue, GLfixed alpha); +typedef void (APIENTRYP PFNGLCOLOR3XOESPROC) (GLfixed red, GLfixed green, GLfixed blue); +typedef void (APIENTRYP PFNGLCOLOR3XVOESPROC) (const GLfixed *components); +typedef void (APIENTRYP PFNGLCOLOR4XVOESPROC) (const GLfixed *components); +typedef void (APIENTRYP PFNGLCONVOLUTIONPARAMETERXOESPROC) (GLenum target, GLenum pname, GLfixed param); +typedef void (APIENTRYP PFNGLCONVOLUTIONPARAMETERXVOESPROC) (GLenum target, GLenum pname, const GLfixed *params); +typedef void (APIENTRYP PFNGLEVALCOORD1XOESPROC) (GLfixed u); +typedef void (APIENTRYP PFNGLEVALCOORD1XVOESPROC) (const GLfixed *coords); +typedef void (APIENTRYP PFNGLEVALCOORD2XOESPROC) (GLfixed u, GLfixed v); +typedef void (APIENTRYP PFNGLEVALCOORD2XVOESPROC) (const GLfixed *coords); +typedef void (APIENTRYP PFNGLFEEDBACKBUFFERXOESPROC) (GLsizei n, GLenum type, const GLfixed *buffer); +typedef void (APIENTRYP PFNGLGETCONVOLUTIONPARAMETERXVOESPROC) (GLenum target, GLenum pname, GLfixed *params); +typedef void (APIENTRYP PFNGLGETHISTOGRAMPARAMETERXVOESPROC) (GLenum target, GLenum pname, GLfixed *params); +typedef void (APIENTRYP PFNGLGETLIGHTXOESPROC) (GLenum light, GLenum pname, GLfixed *params); +typedef void (APIENTRYP PFNGLGETMAPXVOESPROC) (GLenum target, GLenum query, GLfixed *v); +typedef void (APIENTRYP PFNGLGETMATERIALXOESPROC) (GLenum face, GLenum pname, GLfixed param); +typedef void (APIENTRYP PFNGLGETPIXELMAPXVPROC) (GLenum map, GLint size, GLfixed *values); +typedef void (APIENTRYP PFNGLGETTEXGENXVOESPROC) (GLenum coord, GLenum pname, GLfixed *params); +typedef void (APIENTRYP PFNGLGETTEXLEVELPARAMETERXVOESPROC) (GLenum target, GLint level, GLenum pname, GLfixed *params); +typedef void (APIENTRYP PFNGLINDEXXOESPROC) (GLfixed component); +typedef void (APIENTRYP PFNGLINDEXXVOESPROC) (const GLfixed *component); +typedef void (APIENTRYP PFNGLLOADTRANSPOSEMATRIXXOESPROC) (const GLfixed *m); +typedef void (APIENTRYP PFNGLMAP1XOESPROC) (GLenum target, GLfixed u1, GLfixed u2, GLint stride, GLint order, GLfixed points); +typedef void (APIENTRYP PFNGLMAP2XOESPROC) (GLenum target, GLfixed u1, GLfixed u2, GLint ustride, GLint uorder, GLfixed v1, GLfixed v2, GLint vstride, GLint vorder, GLfixed points); +typedef void (APIENTRYP PFNGLMAPGRID1XOESPROC) (GLint n, GLfixed u1, GLfixed u2); +typedef void (APIENTRYP PFNGLMAPGRID2XOESPROC) (GLint n, GLfixed u1, GLfixed u2, GLfixed v1, GLfixed v2); +typedef void (APIENTRYP PFNGLMULTTRANSPOSEMATRIXXOESPROC) (const GLfixed *m); +typedef void (APIENTRYP PFNGLMULTITEXCOORD1XOESPROC) (GLenum texture, GLfixed s); +typedef void (APIENTRYP PFNGLMULTITEXCOORD1XVOESPROC) (GLenum texture, const GLfixed *coords); +typedef void (APIENTRYP PFNGLMULTITEXCOORD2XOESPROC) (GLenum texture, GLfixed s, GLfixed t); +typedef void (APIENTRYP PFNGLMULTITEXCOORD2XVOESPROC) (GLenum texture, const GLfixed *coords); +typedef void (APIENTRYP PFNGLMULTITEXCOORD3XOESPROC) (GLenum texture, GLfixed s, GLfixed t, GLfixed r); +typedef void (APIENTRYP PFNGLMULTITEXCOORD3XVOESPROC) (GLenum texture, const GLfixed *coords); +typedef void (APIENTRYP PFNGLMULTITEXCOORD4XVOESPROC) (GLenum texture, const GLfixed *coords); +typedef void (APIENTRYP PFNGLNORMAL3XVOESPROC) (const GLfixed *coords); +typedef void (APIENTRYP PFNGLPASSTHROUGHXOESPROC) (GLfixed token); +typedef void (APIENTRYP PFNGLPIXELMAPXPROC) (GLenum map, GLint size, const GLfixed *values); +typedef void (APIENTRYP PFNGLPIXELSTOREXPROC) (GLenum pname, GLfixed param); +typedef void (APIENTRYP PFNGLPIXELTRANSFERXOESPROC) (GLenum pname, GLfixed param); +typedef void (APIENTRYP PFNGLPIXELZOOMXOESPROC) (GLfixed xfactor, GLfixed yfactor); +typedef void (APIENTRYP PFNGLPRIORITIZETEXTURESXOESPROC) (GLsizei n, const GLuint *textures, const GLfixed *priorities); +typedef void (APIENTRYP PFNGLRASTERPOS2XOESPROC) (GLfixed x, GLfixed y); +typedef void (APIENTRYP PFNGLRASTERPOS2XVOESPROC) (const GLfixed *coords); +typedef void (APIENTRYP PFNGLRASTERPOS3XOESPROC) (GLfixed x, GLfixed y, GLfixed z); +typedef void (APIENTRYP PFNGLRASTERPOS3XVOESPROC) (const GLfixed *coords); +typedef void (APIENTRYP PFNGLRASTERPOS4XOESPROC) (GLfixed x, GLfixed y, GLfixed z, GLfixed w); +typedef void (APIENTRYP PFNGLRASTERPOS4XVOESPROC) (const GLfixed *coords); +typedef void (APIENTRYP PFNGLRECTXOESPROC) (GLfixed x1, GLfixed y1, GLfixed x2, GLfixed y2); +typedef void (APIENTRYP PFNGLRECTXVOESPROC) (const GLfixed *v1, const GLfixed *v2); +typedef void (APIENTRYP PFNGLTEXCOORD1XOESPROC) (GLfixed s); +typedef void (APIENTRYP PFNGLTEXCOORD1XVOESPROC) (const GLfixed *coords); +typedef void (APIENTRYP PFNGLTEXCOORD2XOESPROC) (GLfixed s, GLfixed t); +typedef void (APIENTRYP PFNGLTEXCOORD2XVOESPROC) (const GLfixed *coords); +typedef void (APIENTRYP PFNGLTEXCOORD3XOESPROC) (GLfixed s, GLfixed t, GLfixed r); +typedef void (APIENTRYP PFNGLTEXCOORD3XVOESPROC) (const GLfixed *coords); +typedef void (APIENTRYP PFNGLTEXCOORD4XOESPROC) (GLfixed s, GLfixed t, GLfixed r, GLfixed q); +typedef void (APIENTRYP PFNGLTEXCOORD4XVOESPROC) (const GLfixed *coords); +typedef void (APIENTRYP PFNGLTEXGENXOESPROC) (GLenum coord, GLenum pname, GLfixed param); +typedef void (APIENTRYP PFNGLTEXGENXVOESPROC) (GLenum coord, GLenum pname, const GLfixed *params); +typedef void (APIENTRYP PFNGLVERTEX2XOESPROC) (GLfixed x); +typedef void (APIENTRYP PFNGLVERTEX2XVOESPROC) (const GLfixed *coords); +typedef void (APIENTRYP PFNGLVERTEX3XOESPROC) (GLfixed x, GLfixed y); +typedef void (APIENTRYP PFNGLVERTEX3XVOESPROC) (const GLfixed *coords); +typedef void (APIENTRYP PFNGLVERTEX4XOESPROC) (GLfixed x, GLfixed y, GLfixed z); +typedef void (APIENTRYP PFNGLVERTEX4XVOESPROC) (const GLfixed *coords); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glAlphaFuncxOES (GLenum func, GLfixed ref); +GLAPI void APIENTRY glClearColorxOES (GLfixed red, GLfixed green, GLfixed blue, GLfixed alpha); +GLAPI void APIENTRY glClearDepthxOES (GLfixed depth); +GLAPI void APIENTRY glClipPlanexOES (GLenum plane, const GLfixed *equation); +GLAPI void APIENTRY glColor4xOES (GLfixed red, GLfixed green, GLfixed blue, GLfixed alpha); +GLAPI void APIENTRY glDepthRangexOES (GLfixed n, GLfixed f); +GLAPI void APIENTRY glFogxOES (GLenum pname, GLfixed param); +GLAPI void APIENTRY glFogxvOES (GLenum pname, const GLfixed *param); +GLAPI void APIENTRY glFrustumxOES (GLfixed l, GLfixed r, GLfixed b, GLfixed t, GLfixed n, GLfixed f); +GLAPI void APIENTRY glGetClipPlanexOES (GLenum plane, GLfixed *equation); +GLAPI void APIENTRY glGetFixedvOES (GLenum pname, GLfixed *params); +GLAPI void APIENTRY glGetTexEnvxvOES (GLenum target, GLenum pname, GLfixed *params); +GLAPI void APIENTRY glGetTexParameterxvOES (GLenum target, GLenum pname, GLfixed *params); +GLAPI void APIENTRY glLightModelxOES (GLenum pname, GLfixed param); +GLAPI void APIENTRY glLightModelxvOES (GLenum pname, const GLfixed *param); +GLAPI void APIENTRY glLightxOES (GLenum light, GLenum pname, GLfixed param); +GLAPI void APIENTRY glLightxvOES (GLenum light, GLenum pname, const GLfixed *params); +GLAPI void APIENTRY glLineWidthxOES (GLfixed width); +GLAPI void APIENTRY glLoadMatrixxOES (const GLfixed *m); +GLAPI void APIENTRY glMaterialxOES (GLenum face, GLenum pname, GLfixed param); +GLAPI void APIENTRY glMaterialxvOES (GLenum face, GLenum pname, const GLfixed *param); +GLAPI void APIENTRY glMultMatrixxOES (const GLfixed *m); +GLAPI void APIENTRY glMultiTexCoord4xOES (GLenum texture, GLfixed s, GLfixed t, GLfixed r, GLfixed q); +GLAPI void APIENTRY glNormal3xOES (GLfixed nx, GLfixed ny, GLfixed nz); +GLAPI void APIENTRY glOrthoxOES (GLfixed l, GLfixed r, GLfixed b, GLfixed t, GLfixed n, GLfixed f); +GLAPI void APIENTRY glPointParameterxvOES (GLenum pname, const GLfixed *params); +GLAPI void APIENTRY glPointSizexOES (GLfixed size); +GLAPI void APIENTRY glPolygonOffsetxOES (GLfixed factor, GLfixed units); +GLAPI void APIENTRY glRotatexOES (GLfixed angle, GLfixed x, GLfixed y, GLfixed z); +GLAPI void APIENTRY glScalexOES (GLfixed x, GLfixed y, GLfixed z); +GLAPI void APIENTRY glTexEnvxOES (GLenum target, GLenum pname, GLfixed param); +GLAPI void APIENTRY glTexEnvxvOES (GLenum target, GLenum pname, const GLfixed *params); +GLAPI void APIENTRY glTexParameterxOES (GLenum target, GLenum pname, GLfixed param); +GLAPI void APIENTRY glTexParameterxvOES (GLenum target, GLenum pname, const GLfixed *params); +GLAPI void APIENTRY glTranslatexOES (GLfixed x, GLfixed y, GLfixed z); +GLAPI void APIENTRY glAccumxOES (GLenum op, GLfixed value); +GLAPI void APIENTRY glBitmapxOES (GLsizei width, GLsizei height, GLfixed xorig, GLfixed yorig, GLfixed xmove, GLfixed ymove, const GLubyte *bitmap); +GLAPI void APIENTRY glBlendColorxOES (GLfixed red, GLfixed green, GLfixed blue, GLfixed alpha); +GLAPI void APIENTRY glClearAccumxOES (GLfixed red, GLfixed green, GLfixed blue, GLfixed alpha); +GLAPI void APIENTRY glColor3xOES (GLfixed red, GLfixed green, GLfixed blue); +GLAPI void APIENTRY glColor3xvOES (const GLfixed *components); +GLAPI void APIENTRY glColor4xvOES (const GLfixed *components); +GLAPI void APIENTRY glConvolutionParameterxOES (GLenum target, GLenum pname, GLfixed param); +GLAPI void APIENTRY glConvolutionParameterxvOES (GLenum target, GLenum pname, const GLfixed *params); +GLAPI void APIENTRY glEvalCoord1xOES (GLfixed u); +GLAPI void APIENTRY glEvalCoord1xvOES (const GLfixed *coords); +GLAPI void APIENTRY glEvalCoord2xOES (GLfixed u, GLfixed v); +GLAPI void APIENTRY glEvalCoord2xvOES (const GLfixed *coords); +GLAPI void APIENTRY glFeedbackBufferxOES (GLsizei n, GLenum type, const GLfixed *buffer); +GLAPI void APIENTRY glGetConvolutionParameterxvOES (GLenum target, GLenum pname, GLfixed *params); +GLAPI void APIENTRY glGetHistogramParameterxvOES (GLenum target, GLenum pname, GLfixed *params); +GLAPI void APIENTRY glGetLightxOES (GLenum light, GLenum pname, GLfixed *params); +GLAPI void APIENTRY glGetMapxvOES (GLenum target, GLenum query, GLfixed *v); +GLAPI void APIENTRY glGetMaterialxOES (GLenum face, GLenum pname, GLfixed param); +GLAPI void APIENTRY glGetPixelMapxv (GLenum map, GLint size, GLfixed *values); +GLAPI void APIENTRY glGetTexGenxvOES (GLenum coord, GLenum pname, GLfixed *params); +GLAPI void APIENTRY glGetTexLevelParameterxvOES (GLenum target, GLint level, GLenum pname, GLfixed *params); +GLAPI void APIENTRY glIndexxOES (GLfixed component); +GLAPI void APIENTRY glIndexxvOES (const GLfixed *component); +GLAPI void APIENTRY glLoadTransposeMatrixxOES (const GLfixed *m); +GLAPI void APIENTRY glMap1xOES (GLenum target, GLfixed u1, GLfixed u2, GLint stride, GLint order, GLfixed points); +GLAPI void APIENTRY glMap2xOES (GLenum target, GLfixed u1, GLfixed u2, GLint ustride, GLint uorder, GLfixed v1, GLfixed v2, GLint vstride, GLint vorder, GLfixed points); +GLAPI void APIENTRY glMapGrid1xOES (GLint n, GLfixed u1, GLfixed u2); +GLAPI void APIENTRY glMapGrid2xOES (GLint n, GLfixed u1, GLfixed u2, GLfixed v1, GLfixed v2); +GLAPI void APIENTRY glMultTransposeMatrixxOES (const GLfixed *m); +GLAPI void APIENTRY glMultiTexCoord1xOES (GLenum texture, GLfixed s); +GLAPI void APIENTRY glMultiTexCoord1xvOES (GLenum texture, const GLfixed *coords); +GLAPI void APIENTRY glMultiTexCoord2xOES (GLenum texture, GLfixed s, GLfixed t); +GLAPI void APIENTRY glMultiTexCoord2xvOES (GLenum texture, const GLfixed *coords); +GLAPI void APIENTRY glMultiTexCoord3xOES (GLenum texture, GLfixed s, GLfixed t, GLfixed r); +GLAPI void APIENTRY glMultiTexCoord3xvOES (GLenum texture, const GLfixed *coords); +GLAPI void APIENTRY glMultiTexCoord4xvOES (GLenum texture, const GLfixed *coords); +GLAPI void APIENTRY glNormal3xvOES (const GLfixed *coords); +GLAPI void APIENTRY glPassThroughxOES (GLfixed token); +GLAPI void APIENTRY glPixelMapx (GLenum map, GLint size, const GLfixed *values); +GLAPI void APIENTRY glPixelStorex (GLenum pname, GLfixed param); +GLAPI void APIENTRY glPixelTransferxOES (GLenum pname, GLfixed param); +GLAPI void APIENTRY glPixelZoomxOES (GLfixed xfactor, GLfixed yfactor); +GLAPI void APIENTRY glPrioritizeTexturesxOES (GLsizei n, const GLuint *textures, const GLfixed *priorities); +GLAPI void APIENTRY glRasterPos2xOES (GLfixed x, GLfixed y); +GLAPI void APIENTRY glRasterPos2xvOES (const GLfixed *coords); +GLAPI void APIENTRY glRasterPos3xOES (GLfixed x, GLfixed y, GLfixed z); +GLAPI void APIENTRY glRasterPos3xvOES (const GLfixed *coords); +GLAPI void APIENTRY glRasterPos4xOES (GLfixed x, GLfixed y, GLfixed z, GLfixed w); +GLAPI void APIENTRY glRasterPos4xvOES (const GLfixed *coords); +GLAPI void APIENTRY glRectxOES (GLfixed x1, GLfixed y1, GLfixed x2, GLfixed y2); +GLAPI void APIENTRY glRectxvOES (const GLfixed *v1, const GLfixed *v2); +GLAPI void APIENTRY glTexCoord1xOES (GLfixed s); +GLAPI void APIENTRY glTexCoord1xvOES (const GLfixed *coords); +GLAPI void APIENTRY glTexCoord2xOES (GLfixed s, GLfixed t); +GLAPI void APIENTRY glTexCoord2xvOES (const GLfixed *coords); +GLAPI void APIENTRY glTexCoord3xOES (GLfixed s, GLfixed t, GLfixed r); +GLAPI void APIENTRY glTexCoord3xvOES (const GLfixed *coords); +GLAPI void APIENTRY glTexCoord4xOES (GLfixed s, GLfixed t, GLfixed r, GLfixed q); +GLAPI void APIENTRY glTexCoord4xvOES (const GLfixed *coords); +GLAPI void APIENTRY glTexGenxOES (GLenum coord, GLenum pname, GLfixed param); +GLAPI void APIENTRY glTexGenxvOES (GLenum coord, GLenum pname, const GLfixed *params); +GLAPI void APIENTRY glVertex2xOES (GLfixed x); +GLAPI void APIENTRY glVertex2xvOES (const GLfixed *coords); +GLAPI void APIENTRY glVertex3xOES (GLfixed x, GLfixed y); +GLAPI void APIENTRY glVertex3xvOES (const GLfixed *coords); +GLAPI void APIENTRY glVertex4xOES (GLfixed x, GLfixed y, GLfixed z); +GLAPI void APIENTRY glVertex4xvOES (const GLfixed *coords); +#endif +#endif /* GL_OES_fixed_point */ + +#ifndef GL_OES_query_matrix +#define GL_OES_query_matrix 1 +typedef GLbitfield (APIENTRYP PFNGLQUERYMATRIXXOESPROC) (GLfixed *mantissa, GLint *exponent); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI GLbitfield APIENTRY glQueryMatrixxOES (GLfixed *mantissa, GLint *exponent); +#endif +#endif /* GL_OES_query_matrix */ + +#ifndef GL_OES_read_format +#define GL_OES_read_format 1 +#define GL_IMPLEMENTATION_COLOR_READ_TYPE_OES 0x8B9A +#define GL_IMPLEMENTATION_COLOR_READ_FORMAT_OES 0x8B9B +#endif /* GL_OES_read_format */ + +#ifndef GL_OES_single_precision +#define GL_OES_single_precision 1 +typedef void (APIENTRYP PFNGLCLEARDEPTHFOESPROC) (GLclampf depth); +typedef void (APIENTRYP PFNGLCLIPPLANEFOESPROC) (GLenum plane, const GLfloat *equation); +typedef void (APIENTRYP PFNGLDEPTHRANGEFOESPROC) (GLclampf n, GLclampf f); +typedef void (APIENTRYP PFNGLFRUSTUMFOESPROC) (GLfloat l, GLfloat r, GLfloat b, GLfloat t, GLfloat n, GLfloat f); +typedef void (APIENTRYP PFNGLGETCLIPPLANEFOESPROC) (GLenum plane, GLfloat *equation); +typedef void (APIENTRYP PFNGLORTHOFOESPROC) (GLfloat l, GLfloat r, GLfloat b, GLfloat t, GLfloat n, GLfloat f); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glClearDepthfOES (GLclampf depth); +GLAPI void APIENTRY glClipPlanefOES (GLenum plane, const GLfloat *equation); +GLAPI void APIENTRY glDepthRangefOES (GLclampf n, GLclampf f); +GLAPI void APIENTRY glFrustumfOES (GLfloat l, GLfloat r, GLfloat b, GLfloat t, GLfloat n, GLfloat f); +GLAPI void APIENTRY glGetClipPlanefOES (GLenum plane, GLfloat *equation); +GLAPI void APIENTRY glOrthofOES (GLfloat l, GLfloat r, GLfloat b, GLfloat t, GLfloat n, GLfloat f); +#endif +#endif /* GL_OES_single_precision */ + +#ifndef GL_3DFX_multisample +#define GL_3DFX_multisample 1 +#define GL_MULTISAMPLE_3DFX 0x86B2 +#define GL_SAMPLE_BUFFERS_3DFX 0x86B3 +#define GL_SAMPLES_3DFX 0x86B4 +#define GL_MULTISAMPLE_BIT_3DFX 0x20000000 +#endif /* GL_3DFX_multisample */ + +#ifndef GL_3DFX_tbuffer +#define GL_3DFX_tbuffer 1 +typedef void (APIENTRYP PFNGLTBUFFERMASK3DFXPROC) (GLuint mask); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glTbufferMask3DFX (GLuint mask); +#endif +#endif /* GL_3DFX_tbuffer */ + +#ifndef GL_3DFX_texture_compression_FXT1 +#define GL_3DFX_texture_compression_FXT1 1 +#define GL_COMPRESSED_RGB_FXT1_3DFX 0x86B0 +#define GL_COMPRESSED_RGBA_FXT1_3DFX 0x86B1 +#endif /* GL_3DFX_texture_compression_FXT1 */ + +#ifndef GL_AMD_blend_minmax_factor +#define GL_AMD_blend_minmax_factor 1 +#define GL_FACTOR_MIN_AMD 0x901C +#define GL_FACTOR_MAX_AMD 0x901D +#endif /* GL_AMD_blend_minmax_factor */ + +#ifndef GL_AMD_conservative_depth +#define GL_AMD_conservative_depth 1 +#endif /* GL_AMD_conservative_depth */ + +#ifndef GL_AMD_debug_output +#define GL_AMD_debug_output 1 +typedef void (APIENTRY *GLDEBUGPROCAMD)(GLuint id,GLenum category,GLenum severity,GLsizei length,const GLchar *message,void *userParam); +#define GL_MAX_DEBUG_MESSAGE_LENGTH_AMD 0x9143 +#define GL_MAX_DEBUG_LOGGED_MESSAGES_AMD 0x9144 +#define GL_DEBUG_LOGGED_MESSAGES_AMD 0x9145 +#define GL_DEBUG_SEVERITY_HIGH_AMD 0x9146 +#define GL_DEBUG_SEVERITY_MEDIUM_AMD 0x9147 +#define GL_DEBUG_SEVERITY_LOW_AMD 0x9148 +#define GL_DEBUG_CATEGORY_API_ERROR_AMD 0x9149 +#define GL_DEBUG_CATEGORY_WINDOW_SYSTEM_AMD 0x914A +#define GL_DEBUG_CATEGORY_DEPRECATION_AMD 0x914B +#define GL_DEBUG_CATEGORY_UNDEFINED_BEHAVIOR_AMD 0x914C +#define GL_DEBUG_CATEGORY_PERFORMANCE_AMD 0x914D +#define GL_DEBUG_CATEGORY_SHADER_COMPILER_AMD 0x914E +#define GL_DEBUG_CATEGORY_APPLICATION_AMD 0x914F +#define GL_DEBUG_CATEGORY_OTHER_AMD 0x9150 +typedef void (APIENTRYP PFNGLDEBUGMESSAGEENABLEAMDPROC) (GLenum category, GLenum severity, GLsizei count, const GLuint *ids, GLboolean enabled); +typedef void (APIENTRYP PFNGLDEBUGMESSAGEINSERTAMDPROC) (GLenum category, GLenum severity, GLuint id, GLsizei length, const GLchar *buf); +typedef void (APIENTRYP PFNGLDEBUGMESSAGECALLBACKAMDPROC) (GLDEBUGPROCAMD callback, void *userParam); +typedef GLuint (APIENTRYP PFNGLGETDEBUGMESSAGELOGAMDPROC) (GLuint count, GLsizei bufsize, GLenum *categories, GLuint *severities, GLuint *ids, GLsizei *lengths, GLchar *message); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glDebugMessageEnableAMD (GLenum category, GLenum severity, GLsizei count, const GLuint *ids, GLboolean enabled); +GLAPI void APIENTRY glDebugMessageInsertAMD (GLenum category, GLenum severity, GLuint id, GLsizei length, const GLchar *buf); +GLAPI void APIENTRY glDebugMessageCallbackAMD (GLDEBUGPROCAMD callback, void *userParam); +GLAPI GLuint APIENTRY glGetDebugMessageLogAMD (GLuint count, GLsizei bufsize, GLenum *categories, GLuint *severities, GLuint *ids, GLsizei *lengths, GLchar *message); +#endif +#endif /* GL_AMD_debug_output */ + +#ifndef GL_AMD_depth_clamp_separate +#define GL_AMD_depth_clamp_separate 1 +#define GL_DEPTH_CLAMP_NEAR_AMD 0x901E +#define GL_DEPTH_CLAMP_FAR_AMD 0x901F +#endif /* GL_AMD_depth_clamp_separate */ + +#ifndef GL_AMD_draw_buffers_blend +#define GL_AMD_draw_buffers_blend 1 +typedef void (APIENTRYP PFNGLBLENDFUNCINDEXEDAMDPROC) (GLuint buf, GLenum src, GLenum dst); +typedef void (APIENTRYP PFNGLBLENDFUNCSEPARATEINDEXEDAMDPROC) (GLuint buf, GLenum srcRGB, GLenum dstRGB, GLenum srcAlpha, GLenum dstAlpha); +typedef void (APIENTRYP PFNGLBLENDEQUATIONINDEXEDAMDPROC) (GLuint buf, GLenum mode); +typedef void (APIENTRYP PFNGLBLENDEQUATIONSEPARATEINDEXEDAMDPROC) (GLuint buf, GLenum modeRGB, GLenum modeAlpha); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBlendFuncIndexedAMD (GLuint buf, GLenum src, GLenum dst); +GLAPI void APIENTRY glBlendFuncSeparateIndexedAMD (GLuint buf, GLenum srcRGB, GLenum dstRGB, GLenum srcAlpha, GLenum dstAlpha); +GLAPI void APIENTRY glBlendEquationIndexedAMD (GLuint buf, GLenum mode); +GLAPI void APIENTRY glBlendEquationSeparateIndexedAMD (GLuint buf, GLenum modeRGB, GLenum modeAlpha); +#endif +#endif /* GL_AMD_draw_buffers_blend */ + +#ifndef GL_AMD_framebuffer_sample_positions +#define GL_AMD_framebuffer_sample_positions 1 +#define GL_SUBSAMPLE_DISTANCE_AMD 0x883F +#define GL_PIXELS_PER_SAMPLE_PATTERN_X_AMD 0x91AE +#define GL_PIXELS_PER_SAMPLE_PATTERN_Y_AMD 0x91AF +#define GL_ALL_PIXELS_AMD 0xFFFFFFFF +typedef void (APIENTRYP PFNGLFRAMEBUFFERSAMPLEPOSITIONSFVAMDPROC) (GLenum target, GLuint numsamples, GLuint pixelindex, const GLfloat *values); +typedef void (APIENTRYP PFNGLNAMEDFRAMEBUFFERSAMPLEPOSITIONSFVAMDPROC) (GLuint framebuffer, GLuint numsamples, GLuint pixelindex, const GLfloat *values); +typedef void (APIENTRYP PFNGLGETFRAMEBUFFERPARAMETERFVAMDPROC) (GLenum target, GLenum pname, GLuint numsamples, GLuint pixelindex, GLsizei size, GLfloat *values); +typedef void (APIENTRYP PFNGLGETNAMEDFRAMEBUFFERPARAMETERFVAMDPROC) (GLuint framebuffer, GLenum pname, GLuint numsamples, GLuint pixelindex, GLsizei size, GLfloat *values); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glFramebufferSamplePositionsfvAMD (GLenum target, GLuint numsamples, GLuint pixelindex, const GLfloat *values); +GLAPI void APIENTRY glNamedFramebufferSamplePositionsfvAMD (GLuint framebuffer, GLuint numsamples, GLuint pixelindex, const GLfloat *values); +GLAPI void APIENTRY glGetFramebufferParameterfvAMD (GLenum target, GLenum pname, GLuint numsamples, GLuint pixelindex, GLsizei size, GLfloat *values); +GLAPI void APIENTRY glGetNamedFramebufferParameterfvAMD (GLuint framebuffer, GLenum pname, GLuint numsamples, GLuint pixelindex, GLsizei size, GLfloat *values); +#endif +#endif /* GL_AMD_framebuffer_sample_positions */ + +#ifndef GL_AMD_gcn_shader +#define GL_AMD_gcn_shader 1 +#endif /* GL_AMD_gcn_shader */ + +#ifndef GL_AMD_gpu_shader_half_float +#define GL_AMD_gpu_shader_half_float 1 +#define GL_FLOAT16_NV 0x8FF8 +#define GL_FLOAT16_VEC2_NV 0x8FF9 +#define GL_FLOAT16_VEC3_NV 0x8FFA +#define GL_FLOAT16_VEC4_NV 0x8FFB +#define GL_FLOAT16_MAT2_AMD 0x91C5 +#define GL_FLOAT16_MAT3_AMD 0x91C6 +#define GL_FLOAT16_MAT4_AMD 0x91C7 +#define GL_FLOAT16_MAT2x3_AMD 0x91C8 +#define GL_FLOAT16_MAT2x4_AMD 0x91C9 +#define GL_FLOAT16_MAT3x2_AMD 0x91CA +#define GL_FLOAT16_MAT3x4_AMD 0x91CB +#define GL_FLOAT16_MAT4x2_AMD 0x91CC +#define GL_FLOAT16_MAT4x3_AMD 0x91CD +#endif /* GL_AMD_gpu_shader_half_float */ + +#ifndef GL_AMD_gpu_shader_int16 +#define GL_AMD_gpu_shader_int16 1 +#endif /* GL_AMD_gpu_shader_int16 */ + +#ifndef GL_AMD_gpu_shader_int64 +#define GL_AMD_gpu_shader_int64 1 +typedef int64_t GLint64EXT; +#define GL_INT64_NV 0x140E +#define GL_UNSIGNED_INT64_NV 0x140F +#define GL_INT8_NV 0x8FE0 +#define GL_INT8_VEC2_NV 0x8FE1 +#define GL_INT8_VEC3_NV 0x8FE2 +#define GL_INT8_VEC4_NV 0x8FE3 +#define GL_INT16_NV 0x8FE4 +#define GL_INT16_VEC2_NV 0x8FE5 +#define GL_INT16_VEC3_NV 0x8FE6 +#define GL_INT16_VEC4_NV 0x8FE7 +#define GL_INT64_VEC2_NV 0x8FE9 +#define GL_INT64_VEC3_NV 0x8FEA +#define GL_INT64_VEC4_NV 0x8FEB +#define GL_UNSIGNED_INT8_NV 0x8FEC +#define GL_UNSIGNED_INT8_VEC2_NV 0x8FED +#define GL_UNSIGNED_INT8_VEC3_NV 0x8FEE +#define GL_UNSIGNED_INT8_VEC4_NV 0x8FEF +#define GL_UNSIGNED_INT16_NV 0x8FF0 +#define GL_UNSIGNED_INT16_VEC2_NV 0x8FF1 +#define GL_UNSIGNED_INT16_VEC3_NV 0x8FF2 +#define GL_UNSIGNED_INT16_VEC4_NV 0x8FF3 +#define GL_UNSIGNED_INT64_VEC2_NV 0x8FF5 +#define GL_UNSIGNED_INT64_VEC3_NV 0x8FF6 +#define GL_UNSIGNED_INT64_VEC4_NV 0x8FF7 +typedef void (APIENTRYP PFNGLUNIFORM1I64NVPROC) (GLint location, GLint64EXT x); +typedef void (APIENTRYP PFNGLUNIFORM2I64NVPROC) (GLint location, GLint64EXT x, GLint64EXT y); +typedef void (APIENTRYP PFNGLUNIFORM3I64NVPROC) (GLint location, GLint64EXT x, GLint64EXT y, GLint64EXT z); +typedef void (APIENTRYP PFNGLUNIFORM4I64NVPROC) (GLint location, GLint64EXT x, GLint64EXT y, GLint64EXT z, GLint64EXT w); +typedef void (APIENTRYP PFNGLUNIFORM1I64VNVPROC) (GLint location, GLsizei count, const GLint64EXT *value); +typedef void (APIENTRYP PFNGLUNIFORM2I64VNVPROC) (GLint location, GLsizei count, const GLint64EXT *value); +typedef void (APIENTRYP PFNGLUNIFORM3I64VNVPROC) (GLint location, GLsizei count, const GLint64EXT *value); +typedef void (APIENTRYP PFNGLUNIFORM4I64VNVPROC) (GLint location, GLsizei count, const GLint64EXT *value); +typedef void (APIENTRYP PFNGLUNIFORM1UI64NVPROC) (GLint location, GLuint64EXT x); +typedef void (APIENTRYP PFNGLUNIFORM2UI64NVPROC) (GLint location, GLuint64EXT x, GLuint64EXT y); +typedef void (APIENTRYP PFNGLUNIFORM3UI64NVPROC) (GLint location, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z); +typedef void (APIENTRYP PFNGLUNIFORM4UI64NVPROC) (GLint location, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z, GLuint64EXT w); +typedef void (APIENTRYP PFNGLUNIFORM1UI64VNVPROC) (GLint location, GLsizei count, const GLuint64EXT *value); +typedef void (APIENTRYP PFNGLUNIFORM2UI64VNVPROC) (GLint location, GLsizei count, const GLuint64EXT *value); +typedef void (APIENTRYP PFNGLUNIFORM3UI64VNVPROC) (GLint location, GLsizei count, const GLuint64EXT *value); +typedef void (APIENTRYP PFNGLUNIFORM4UI64VNVPROC) (GLint location, GLsizei count, const GLuint64EXT *value); +typedef void (APIENTRYP PFNGLGETUNIFORMI64VNVPROC) (GLuint program, GLint location, GLint64EXT *params); +typedef void (APIENTRYP PFNGLGETUNIFORMUI64VNVPROC) (GLuint program, GLint location, GLuint64EXT *params); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1I64NVPROC) (GLuint program, GLint location, GLint64EXT x); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2I64NVPROC) (GLuint program, GLint location, GLint64EXT x, GLint64EXT y); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3I64NVPROC) (GLuint program, GLint location, GLint64EXT x, GLint64EXT y, GLint64EXT z); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4I64NVPROC) (GLuint program, GLint location, GLint64EXT x, GLint64EXT y, GLint64EXT z, GLint64EXT w); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1I64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLint64EXT *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2I64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLint64EXT *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3I64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLint64EXT *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4I64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLint64EXT *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1UI64NVPROC) (GLuint program, GLint location, GLuint64EXT x); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2UI64NVPROC) (GLuint program, GLint location, GLuint64EXT x, GLuint64EXT y); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3UI64NVPROC) (GLuint program, GLint location, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4UI64NVPROC) (GLuint program, GLint location, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z, GLuint64EXT w); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1UI64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLuint64EXT *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2UI64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLuint64EXT *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3UI64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLuint64EXT *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4UI64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLuint64EXT *value); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glUniform1i64NV (GLint location, GLint64EXT x); +GLAPI void APIENTRY glUniform2i64NV (GLint location, GLint64EXT x, GLint64EXT y); +GLAPI void APIENTRY glUniform3i64NV (GLint location, GLint64EXT x, GLint64EXT y, GLint64EXT z); +GLAPI void APIENTRY glUniform4i64NV (GLint location, GLint64EXT x, GLint64EXT y, GLint64EXT z, GLint64EXT w); +GLAPI void APIENTRY glUniform1i64vNV (GLint location, GLsizei count, const GLint64EXT *value); +GLAPI void APIENTRY glUniform2i64vNV (GLint location, GLsizei count, const GLint64EXT *value); +GLAPI void APIENTRY glUniform3i64vNV (GLint location, GLsizei count, const GLint64EXT *value); +GLAPI void APIENTRY glUniform4i64vNV (GLint location, GLsizei count, const GLint64EXT *value); +GLAPI void APIENTRY glUniform1ui64NV (GLint location, GLuint64EXT x); +GLAPI void APIENTRY glUniform2ui64NV (GLint location, GLuint64EXT x, GLuint64EXT y); +GLAPI void APIENTRY glUniform3ui64NV (GLint location, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z); +GLAPI void APIENTRY glUniform4ui64NV (GLint location, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z, GLuint64EXT w); +GLAPI void APIENTRY glUniform1ui64vNV (GLint location, GLsizei count, const GLuint64EXT *value); +GLAPI void APIENTRY glUniform2ui64vNV (GLint location, GLsizei count, const GLuint64EXT *value); +GLAPI void APIENTRY glUniform3ui64vNV (GLint location, GLsizei count, const GLuint64EXT *value); +GLAPI void APIENTRY glUniform4ui64vNV (GLint location, GLsizei count, const GLuint64EXT *value); +GLAPI void APIENTRY glGetUniformi64vNV (GLuint program, GLint location, GLint64EXT *params); +GLAPI void APIENTRY glGetUniformui64vNV (GLuint program, GLint location, GLuint64EXT *params); +GLAPI void APIENTRY glProgramUniform1i64NV (GLuint program, GLint location, GLint64EXT x); +GLAPI void APIENTRY glProgramUniform2i64NV (GLuint program, GLint location, GLint64EXT x, GLint64EXT y); +GLAPI void APIENTRY glProgramUniform3i64NV (GLuint program, GLint location, GLint64EXT x, GLint64EXT y, GLint64EXT z); +GLAPI void APIENTRY glProgramUniform4i64NV (GLuint program, GLint location, GLint64EXT x, GLint64EXT y, GLint64EXT z, GLint64EXT w); +GLAPI void APIENTRY glProgramUniform1i64vNV (GLuint program, GLint location, GLsizei count, const GLint64EXT *value); +GLAPI void APIENTRY glProgramUniform2i64vNV (GLuint program, GLint location, GLsizei count, const GLint64EXT *value); +GLAPI void APIENTRY glProgramUniform3i64vNV (GLuint program, GLint location, GLsizei count, const GLint64EXT *value); +GLAPI void APIENTRY glProgramUniform4i64vNV (GLuint program, GLint location, GLsizei count, const GLint64EXT *value); +GLAPI void APIENTRY glProgramUniform1ui64NV (GLuint program, GLint location, GLuint64EXT x); +GLAPI void APIENTRY glProgramUniform2ui64NV (GLuint program, GLint location, GLuint64EXT x, GLuint64EXT y); +GLAPI void APIENTRY glProgramUniform3ui64NV (GLuint program, GLint location, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z); +GLAPI void APIENTRY glProgramUniform4ui64NV (GLuint program, GLint location, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z, GLuint64EXT w); +GLAPI void APIENTRY glProgramUniform1ui64vNV (GLuint program, GLint location, GLsizei count, const GLuint64EXT *value); +GLAPI void APIENTRY glProgramUniform2ui64vNV (GLuint program, GLint location, GLsizei count, const GLuint64EXT *value); +GLAPI void APIENTRY glProgramUniform3ui64vNV (GLuint program, GLint location, GLsizei count, const GLuint64EXT *value); +GLAPI void APIENTRY glProgramUniform4ui64vNV (GLuint program, GLint location, GLsizei count, const GLuint64EXT *value); +#endif +#endif /* GL_AMD_gpu_shader_int64 */ + +#ifndef GL_AMD_interleaved_elements +#define GL_AMD_interleaved_elements 1 +#define GL_VERTEX_ELEMENT_SWIZZLE_AMD 0x91A4 +#define GL_VERTEX_ID_SWIZZLE_AMD 0x91A5 +typedef void (APIENTRYP PFNGLVERTEXATTRIBPARAMETERIAMDPROC) (GLuint index, GLenum pname, GLint param); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glVertexAttribParameteriAMD (GLuint index, GLenum pname, GLint param); +#endif +#endif /* GL_AMD_interleaved_elements */ + +#ifndef GL_AMD_multi_draw_indirect +#define GL_AMD_multi_draw_indirect 1 +typedef void (APIENTRYP PFNGLMULTIDRAWARRAYSINDIRECTAMDPROC) (GLenum mode, const void *indirect, GLsizei primcount, GLsizei stride); +typedef void (APIENTRYP PFNGLMULTIDRAWELEMENTSINDIRECTAMDPROC) (GLenum mode, GLenum type, const void *indirect, GLsizei primcount, GLsizei stride); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glMultiDrawArraysIndirectAMD (GLenum mode, const void *indirect, GLsizei primcount, GLsizei stride); +GLAPI void APIENTRY glMultiDrawElementsIndirectAMD (GLenum mode, GLenum type, const void *indirect, GLsizei primcount, GLsizei stride); +#endif +#endif /* GL_AMD_multi_draw_indirect */ + +#ifndef GL_AMD_name_gen_delete +#define GL_AMD_name_gen_delete 1 +#define GL_DATA_BUFFER_AMD 0x9151 +#define GL_PERFORMANCE_MONITOR_AMD 0x9152 +#define GL_QUERY_OBJECT_AMD 0x9153 +#define GL_VERTEX_ARRAY_OBJECT_AMD 0x9154 +#define GL_SAMPLER_OBJECT_AMD 0x9155 +typedef void (APIENTRYP PFNGLGENNAMESAMDPROC) (GLenum identifier, GLuint num, GLuint *names); +typedef void (APIENTRYP PFNGLDELETENAMESAMDPROC) (GLenum identifier, GLuint num, const GLuint *names); +typedef GLboolean (APIENTRYP PFNGLISNAMEAMDPROC) (GLenum identifier, GLuint name); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glGenNamesAMD (GLenum identifier, GLuint num, GLuint *names); +GLAPI void APIENTRY glDeleteNamesAMD (GLenum identifier, GLuint num, const GLuint *names); +GLAPI GLboolean APIENTRY glIsNameAMD (GLenum identifier, GLuint name); +#endif +#endif /* GL_AMD_name_gen_delete */ + +#ifndef GL_AMD_occlusion_query_event +#define GL_AMD_occlusion_query_event 1 +#define GL_OCCLUSION_QUERY_EVENT_MASK_AMD 0x874F +#define GL_QUERY_DEPTH_PASS_EVENT_BIT_AMD 0x00000001 +#define GL_QUERY_DEPTH_FAIL_EVENT_BIT_AMD 0x00000002 +#define GL_QUERY_STENCIL_FAIL_EVENT_BIT_AMD 0x00000004 +#define GL_QUERY_DEPTH_BOUNDS_FAIL_EVENT_BIT_AMD 0x00000008 +#define GL_QUERY_ALL_EVENT_BITS_AMD 0xFFFFFFFF +typedef void (APIENTRYP PFNGLQUERYOBJECTPARAMETERUIAMDPROC) (GLenum target, GLuint id, GLenum pname, GLuint param); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glQueryObjectParameteruiAMD (GLenum target, GLuint id, GLenum pname, GLuint param); +#endif +#endif /* GL_AMD_occlusion_query_event */ + +#ifndef GL_AMD_performance_monitor +#define GL_AMD_performance_monitor 1 +#define GL_COUNTER_TYPE_AMD 0x8BC0 +#define GL_COUNTER_RANGE_AMD 0x8BC1 +#define GL_UNSIGNED_INT64_AMD 0x8BC2 +#define GL_PERCENTAGE_AMD 0x8BC3 +#define GL_PERFMON_RESULT_AVAILABLE_AMD 0x8BC4 +#define GL_PERFMON_RESULT_SIZE_AMD 0x8BC5 +#define GL_PERFMON_RESULT_AMD 0x8BC6 +typedef void (APIENTRYP PFNGLGETPERFMONITORGROUPSAMDPROC) (GLint *numGroups, GLsizei groupsSize, GLuint *groups); +typedef void (APIENTRYP PFNGLGETPERFMONITORCOUNTERSAMDPROC) (GLuint group, GLint *numCounters, GLint *maxActiveCounters, GLsizei counterSize, GLuint *counters); +typedef void (APIENTRYP PFNGLGETPERFMONITORGROUPSTRINGAMDPROC) (GLuint group, GLsizei bufSize, GLsizei *length, GLchar *groupString); +typedef void (APIENTRYP PFNGLGETPERFMONITORCOUNTERSTRINGAMDPROC) (GLuint group, GLuint counter, GLsizei bufSize, GLsizei *length, GLchar *counterString); +typedef void (APIENTRYP PFNGLGETPERFMONITORCOUNTERINFOAMDPROC) (GLuint group, GLuint counter, GLenum pname, void *data); +typedef void (APIENTRYP PFNGLGENPERFMONITORSAMDPROC) (GLsizei n, GLuint *monitors); +typedef void (APIENTRYP PFNGLDELETEPERFMONITORSAMDPROC) (GLsizei n, GLuint *monitors); +typedef void (APIENTRYP PFNGLSELECTPERFMONITORCOUNTERSAMDPROC) (GLuint monitor, GLboolean enable, GLuint group, GLint numCounters, GLuint *counterList); +typedef void (APIENTRYP PFNGLBEGINPERFMONITORAMDPROC) (GLuint monitor); +typedef void (APIENTRYP PFNGLENDPERFMONITORAMDPROC) (GLuint monitor); +typedef void (APIENTRYP PFNGLGETPERFMONITORCOUNTERDATAAMDPROC) (GLuint monitor, GLenum pname, GLsizei dataSize, GLuint *data, GLint *bytesWritten); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glGetPerfMonitorGroupsAMD (GLint *numGroups, GLsizei groupsSize, GLuint *groups); +GLAPI void APIENTRY glGetPerfMonitorCountersAMD (GLuint group, GLint *numCounters, GLint *maxActiveCounters, GLsizei counterSize, GLuint *counters); +GLAPI void APIENTRY glGetPerfMonitorGroupStringAMD (GLuint group, GLsizei bufSize, GLsizei *length, GLchar *groupString); +GLAPI void APIENTRY glGetPerfMonitorCounterStringAMD (GLuint group, GLuint counter, GLsizei bufSize, GLsizei *length, GLchar *counterString); +GLAPI void APIENTRY glGetPerfMonitorCounterInfoAMD (GLuint group, GLuint counter, GLenum pname, void *data); +GLAPI void APIENTRY glGenPerfMonitorsAMD (GLsizei n, GLuint *monitors); +GLAPI void APIENTRY glDeletePerfMonitorsAMD (GLsizei n, GLuint *monitors); +GLAPI void APIENTRY glSelectPerfMonitorCountersAMD (GLuint monitor, GLboolean enable, GLuint group, GLint numCounters, GLuint *counterList); +GLAPI void APIENTRY glBeginPerfMonitorAMD (GLuint monitor); +GLAPI void APIENTRY glEndPerfMonitorAMD (GLuint monitor); +GLAPI void APIENTRY glGetPerfMonitorCounterDataAMD (GLuint monitor, GLenum pname, GLsizei dataSize, GLuint *data, GLint *bytesWritten); +#endif +#endif /* GL_AMD_performance_monitor */ + +#ifndef GL_AMD_pinned_memory +#define GL_AMD_pinned_memory 1 +#define GL_EXTERNAL_VIRTUAL_MEMORY_BUFFER_AMD 0x9160 +#endif /* GL_AMD_pinned_memory */ + +#ifndef GL_AMD_query_buffer_object +#define GL_AMD_query_buffer_object 1 +#define GL_QUERY_BUFFER_AMD 0x9192 +#define GL_QUERY_BUFFER_BINDING_AMD 0x9193 +#define GL_QUERY_RESULT_NO_WAIT_AMD 0x9194 +#endif /* GL_AMD_query_buffer_object */ + +#ifndef GL_AMD_sample_positions +#define GL_AMD_sample_positions 1 +typedef void (APIENTRYP PFNGLSETMULTISAMPLEFVAMDPROC) (GLenum pname, GLuint index, const GLfloat *val); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glSetMultisamplefvAMD (GLenum pname, GLuint index, const GLfloat *val); +#endif +#endif /* GL_AMD_sample_positions */ + +#ifndef GL_AMD_seamless_cubemap_per_texture +#define GL_AMD_seamless_cubemap_per_texture 1 +#endif /* GL_AMD_seamless_cubemap_per_texture */ + +#ifndef GL_AMD_shader_atomic_counter_ops +#define GL_AMD_shader_atomic_counter_ops 1 +#endif /* GL_AMD_shader_atomic_counter_ops */ + +#ifndef GL_AMD_shader_ballot +#define GL_AMD_shader_ballot 1 +#endif /* GL_AMD_shader_ballot */ + +#ifndef GL_AMD_shader_explicit_vertex_parameter +#define GL_AMD_shader_explicit_vertex_parameter 1 +#endif /* GL_AMD_shader_explicit_vertex_parameter */ + +#ifndef GL_AMD_shader_gpu_shader_half_float_fetch +#define GL_AMD_shader_gpu_shader_half_float_fetch 1 +#endif /* GL_AMD_shader_gpu_shader_half_float_fetch */ + +#ifndef GL_AMD_shader_image_load_store_lod +#define GL_AMD_shader_image_load_store_lod 1 +#endif /* GL_AMD_shader_image_load_store_lod */ + +#ifndef GL_AMD_shader_stencil_export +#define GL_AMD_shader_stencil_export 1 +#endif /* GL_AMD_shader_stencil_export */ + +#ifndef GL_AMD_shader_trinary_minmax +#define GL_AMD_shader_trinary_minmax 1 +#endif /* GL_AMD_shader_trinary_minmax */ + +#ifndef GL_AMD_sparse_texture +#define GL_AMD_sparse_texture 1 +#define GL_VIRTUAL_PAGE_SIZE_X_AMD 0x9195 +#define GL_VIRTUAL_PAGE_SIZE_Y_AMD 0x9196 +#define GL_VIRTUAL_PAGE_SIZE_Z_AMD 0x9197 +#define GL_MAX_SPARSE_TEXTURE_SIZE_AMD 0x9198 +#define GL_MAX_SPARSE_3D_TEXTURE_SIZE_AMD 0x9199 +#define GL_MAX_SPARSE_ARRAY_TEXTURE_LAYERS 0x919A +#define GL_MIN_SPARSE_LEVEL_AMD 0x919B +#define GL_MIN_LOD_WARNING_AMD 0x919C +#define GL_TEXTURE_STORAGE_SPARSE_BIT_AMD 0x00000001 +typedef void (APIENTRYP PFNGLTEXSTORAGESPARSEAMDPROC) (GLenum target, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLsizei layers, GLbitfield flags); +typedef void (APIENTRYP PFNGLTEXTURESTORAGESPARSEAMDPROC) (GLuint texture, GLenum target, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLsizei layers, GLbitfield flags); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glTexStorageSparseAMD (GLenum target, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLsizei layers, GLbitfield flags); +GLAPI void APIENTRY glTextureStorageSparseAMD (GLuint texture, GLenum target, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLsizei layers, GLbitfield flags); +#endif +#endif /* GL_AMD_sparse_texture */ + +#ifndef GL_AMD_stencil_operation_extended +#define GL_AMD_stencil_operation_extended 1 +#define GL_SET_AMD 0x874A +#define GL_REPLACE_VALUE_AMD 0x874B +#define GL_STENCIL_OP_VALUE_AMD 0x874C +#define GL_STENCIL_BACK_OP_VALUE_AMD 0x874D +typedef void (APIENTRYP PFNGLSTENCILOPVALUEAMDPROC) (GLenum face, GLuint value); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glStencilOpValueAMD (GLenum face, GLuint value); +#endif +#endif /* GL_AMD_stencil_operation_extended */ + +#ifndef GL_AMD_texture_gather_bias_lod +#define GL_AMD_texture_gather_bias_lod 1 +#endif /* GL_AMD_texture_gather_bias_lod */ + +#ifndef GL_AMD_texture_texture4 +#define GL_AMD_texture_texture4 1 +#endif /* GL_AMD_texture_texture4 */ + +#ifndef GL_AMD_transform_feedback3_lines_triangles +#define GL_AMD_transform_feedback3_lines_triangles 1 +#endif /* GL_AMD_transform_feedback3_lines_triangles */ + +#ifndef GL_AMD_transform_feedback4 +#define GL_AMD_transform_feedback4 1 +#define GL_STREAM_RASTERIZATION_AMD 0x91A0 +#endif /* GL_AMD_transform_feedback4 */ + +#ifndef GL_AMD_vertex_shader_layer +#define GL_AMD_vertex_shader_layer 1 +#endif /* GL_AMD_vertex_shader_layer */ + +#ifndef GL_AMD_vertex_shader_tessellator +#define GL_AMD_vertex_shader_tessellator 1 +#define GL_SAMPLER_BUFFER_AMD 0x9001 +#define GL_INT_SAMPLER_BUFFER_AMD 0x9002 +#define GL_UNSIGNED_INT_SAMPLER_BUFFER_AMD 0x9003 +#define GL_TESSELLATION_MODE_AMD 0x9004 +#define GL_TESSELLATION_FACTOR_AMD 0x9005 +#define GL_DISCRETE_AMD 0x9006 +#define GL_CONTINUOUS_AMD 0x9007 +typedef void (APIENTRYP PFNGLTESSELLATIONFACTORAMDPROC) (GLfloat factor); +typedef void (APIENTRYP PFNGLTESSELLATIONMODEAMDPROC) (GLenum mode); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glTessellationFactorAMD (GLfloat factor); +GLAPI void APIENTRY glTessellationModeAMD (GLenum mode); +#endif +#endif /* GL_AMD_vertex_shader_tessellator */ + +#ifndef GL_AMD_vertex_shader_viewport_index +#define GL_AMD_vertex_shader_viewport_index 1 +#endif /* GL_AMD_vertex_shader_viewport_index */ + +#ifndef GL_APPLE_aux_depth_stencil +#define GL_APPLE_aux_depth_stencil 1 +#define GL_AUX_DEPTH_STENCIL_APPLE 0x8A14 +#endif /* GL_APPLE_aux_depth_stencil */ + +#ifndef GL_APPLE_client_storage +#define GL_APPLE_client_storage 1 +#define GL_UNPACK_CLIENT_STORAGE_APPLE 0x85B2 +#endif /* GL_APPLE_client_storage */ + +#ifndef GL_APPLE_element_array +#define GL_APPLE_element_array 1 +#define GL_ELEMENT_ARRAY_APPLE 0x8A0C +#define GL_ELEMENT_ARRAY_TYPE_APPLE 0x8A0D +#define GL_ELEMENT_ARRAY_POINTER_APPLE 0x8A0E +typedef void (APIENTRYP PFNGLELEMENTPOINTERAPPLEPROC) (GLenum type, const void *pointer); +typedef void (APIENTRYP PFNGLDRAWELEMENTARRAYAPPLEPROC) (GLenum mode, GLint first, GLsizei count); +typedef void (APIENTRYP PFNGLDRAWRANGEELEMENTARRAYAPPLEPROC) (GLenum mode, GLuint start, GLuint end, GLint first, GLsizei count); +typedef void (APIENTRYP PFNGLMULTIDRAWELEMENTARRAYAPPLEPROC) (GLenum mode, const GLint *first, const GLsizei *count, GLsizei primcount); +typedef void (APIENTRYP PFNGLMULTIDRAWRANGEELEMENTARRAYAPPLEPROC) (GLenum mode, GLuint start, GLuint end, const GLint *first, const GLsizei *count, GLsizei primcount); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glElementPointerAPPLE (GLenum type, const void *pointer); +GLAPI void APIENTRY glDrawElementArrayAPPLE (GLenum mode, GLint first, GLsizei count); +GLAPI void APIENTRY glDrawRangeElementArrayAPPLE (GLenum mode, GLuint start, GLuint end, GLint first, GLsizei count); +GLAPI void APIENTRY glMultiDrawElementArrayAPPLE (GLenum mode, const GLint *first, const GLsizei *count, GLsizei primcount); +GLAPI void APIENTRY glMultiDrawRangeElementArrayAPPLE (GLenum mode, GLuint start, GLuint end, const GLint *first, const GLsizei *count, GLsizei primcount); +#endif +#endif /* GL_APPLE_element_array */ + +#ifndef GL_APPLE_fence +#define GL_APPLE_fence 1 +#define GL_DRAW_PIXELS_APPLE 0x8A0A +#define GL_FENCE_APPLE 0x8A0B +typedef void (APIENTRYP PFNGLGENFENCESAPPLEPROC) (GLsizei n, GLuint *fences); +typedef void (APIENTRYP PFNGLDELETEFENCESAPPLEPROC) (GLsizei n, const GLuint *fences); +typedef void (APIENTRYP PFNGLSETFENCEAPPLEPROC) (GLuint fence); +typedef GLboolean (APIENTRYP PFNGLISFENCEAPPLEPROC) (GLuint fence); +typedef GLboolean (APIENTRYP PFNGLTESTFENCEAPPLEPROC) (GLuint fence); +typedef void (APIENTRYP PFNGLFINISHFENCEAPPLEPROC) (GLuint fence); +typedef GLboolean (APIENTRYP PFNGLTESTOBJECTAPPLEPROC) (GLenum object, GLuint name); +typedef void (APIENTRYP PFNGLFINISHOBJECTAPPLEPROC) (GLenum object, GLint name); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glGenFencesAPPLE (GLsizei n, GLuint *fences); +GLAPI void APIENTRY glDeleteFencesAPPLE (GLsizei n, const GLuint *fences); +GLAPI void APIENTRY glSetFenceAPPLE (GLuint fence); +GLAPI GLboolean APIENTRY glIsFenceAPPLE (GLuint fence); +GLAPI GLboolean APIENTRY glTestFenceAPPLE (GLuint fence); +GLAPI void APIENTRY glFinishFenceAPPLE (GLuint fence); +GLAPI GLboolean APIENTRY glTestObjectAPPLE (GLenum object, GLuint name); +GLAPI void APIENTRY glFinishObjectAPPLE (GLenum object, GLint name); +#endif +#endif /* GL_APPLE_fence */ + +#ifndef GL_APPLE_float_pixels +#define GL_APPLE_float_pixels 1 +#define GL_HALF_APPLE 0x140B +#define GL_RGBA_FLOAT32_APPLE 0x8814 +#define GL_RGB_FLOAT32_APPLE 0x8815 +#define GL_ALPHA_FLOAT32_APPLE 0x8816 +#define GL_INTENSITY_FLOAT32_APPLE 0x8817 +#define GL_LUMINANCE_FLOAT32_APPLE 0x8818 +#define GL_LUMINANCE_ALPHA_FLOAT32_APPLE 0x8819 +#define GL_RGBA_FLOAT16_APPLE 0x881A +#define GL_RGB_FLOAT16_APPLE 0x881B +#define GL_ALPHA_FLOAT16_APPLE 0x881C +#define GL_INTENSITY_FLOAT16_APPLE 0x881D +#define GL_LUMINANCE_FLOAT16_APPLE 0x881E +#define GL_LUMINANCE_ALPHA_FLOAT16_APPLE 0x881F +#define GL_COLOR_FLOAT_APPLE 0x8A0F +#endif /* GL_APPLE_float_pixels */ + +#ifndef GL_APPLE_flush_buffer_range +#define GL_APPLE_flush_buffer_range 1 +#define GL_BUFFER_SERIALIZED_MODIFY_APPLE 0x8A12 +#define GL_BUFFER_FLUSHING_UNMAP_APPLE 0x8A13 +typedef void (APIENTRYP PFNGLBUFFERPARAMETERIAPPLEPROC) (GLenum target, GLenum pname, GLint param); +typedef void (APIENTRYP PFNGLFLUSHMAPPEDBUFFERRANGEAPPLEPROC) (GLenum target, GLintptr offset, GLsizeiptr size); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBufferParameteriAPPLE (GLenum target, GLenum pname, GLint param); +GLAPI void APIENTRY glFlushMappedBufferRangeAPPLE (GLenum target, GLintptr offset, GLsizeiptr size); +#endif +#endif /* GL_APPLE_flush_buffer_range */ + +#ifndef GL_APPLE_object_purgeable +#define GL_APPLE_object_purgeable 1 +#define GL_BUFFER_OBJECT_APPLE 0x85B3 +#define GL_RELEASED_APPLE 0x8A19 +#define GL_VOLATILE_APPLE 0x8A1A +#define GL_RETAINED_APPLE 0x8A1B +#define GL_UNDEFINED_APPLE 0x8A1C +#define GL_PURGEABLE_APPLE 0x8A1D +typedef GLenum (APIENTRYP PFNGLOBJECTPURGEABLEAPPLEPROC) (GLenum objectType, GLuint name, GLenum option); +typedef GLenum (APIENTRYP PFNGLOBJECTUNPURGEABLEAPPLEPROC) (GLenum objectType, GLuint name, GLenum option); +typedef void (APIENTRYP PFNGLGETOBJECTPARAMETERIVAPPLEPROC) (GLenum objectType, GLuint name, GLenum pname, GLint *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI GLenum APIENTRY glObjectPurgeableAPPLE (GLenum objectType, GLuint name, GLenum option); +GLAPI GLenum APIENTRY glObjectUnpurgeableAPPLE (GLenum objectType, GLuint name, GLenum option); +GLAPI void APIENTRY glGetObjectParameterivAPPLE (GLenum objectType, GLuint name, GLenum pname, GLint *params); +#endif +#endif /* GL_APPLE_object_purgeable */ + +#ifndef GL_APPLE_rgb_422 +#define GL_APPLE_rgb_422 1 +#define GL_RGB_422_APPLE 0x8A1F +#define GL_UNSIGNED_SHORT_8_8_APPLE 0x85BA +#define GL_UNSIGNED_SHORT_8_8_REV_APPLE 0x85BB +#define GL_RGB_RAW_422_APPLE 0x8A51 +#endif /* GL_APPLE_rgb_422 */ + +#ifndef GL_APPLE_row_bytes +#define GL_APPLE_row_bytes 1 +#define GL_PACK_ROW_BYTES_APPLE 0x8A15 +#define GL_UNPACK_ROW_BYTES_APPLE 0x8A16 +#endif /* GL_APPLE_row_bytes */ + +#ifndef GL_APPLE_specular_vector +#define GL_APPLE_specular_vector 1 +#define GL_LIGHT_MODEL_SPECULAR_VECTOR_APPLE 0x85B0 +#endif /* GL_APPLE_specular_vector */ + +#ifndef GL_APPLE_texture_range +#define GL_APPLE_texture_range 1 +#define GL_TEXTURE_RANGE_LENGTH_APPLE 0x85B7 +#define GL_TEXTURE_RANGE_POINTER_APPLE 0x85B8 +#define GL_TEXTURE_STORAGE_HINT_APPLE 0x85BC +#define GL_STORAGE_PRIVATE_APPLE 0x85BD +#define GL_STORAGE_CACHED_APPLE 0x85BE +#define GL_STORAGE_SHARED_APPLE 0x85BF +typedef void (APIENTRYP PFNGLTEXTURERANGEAPPLEPROC) (GLenum target, GLsizei length, const void *pointer); +typedef void (APIENTRYP PFNGLGETTEXPARAMETERPOINTERVAPPLEPROC) (GLenum target, GLenum pname, void **params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glTextureRangeAPPLE (GLenum target, GLsizei length, const void *pointer); +GLAPI void APIENTRY glGetTexParameterPointervAPPLE (GLenum target, GLenum pname, void **params); +#endif +#endif /* GL_APPLE_texture_range */ + +#ifndef GL_APPLE_transform_hint +#define GL_APPLE_transform_hint 1 +#define GL_TRANSFORM_HINT_APPLE 0x85B1 +#endif /* GL_APPLE_transform_hint */ + +#ifndef GL_APPLE_vertex_array_object +#define GL_APPLE_vertex_array_object 1 +#define GL_VERTEX_ARRAY_BINDING_APPLE 0x85B5 +typedef void (APIENTRYP PFNGLBINDVERTEXARRAYAPPLEPROC) (GLuint array); +typedef void (APIENTRYP PFNGLDELETEVERTEXARRAYSAPPLEPROC) (GLsizei n, const GLuint *arrays); +typedef void (APIENTRYP PFNGLGENVERTEXARRAYSAPPLEPROC) (GLsizei n, GLuint *arrays); +typedef GLboolean (APIENTRYP PFNGLISVERTEXARRAYAPPLEPROC) (GLuint array); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBindVertexArrayAPPLE (GLuint array); +GLAPI void APIENTRY glDeleteVertexArraysAPPLE (GLsizei n, const GLuint *arrays); +GLAPI void APIENTRY glGenVertexArraysAPPLE (GLsizei n, GLuint *arrays); +GLAPI GLboolean APIENTRY glIsVertexArrayAPPLE (GLuint array); +#endif +#endif /* GL_APPLE_vertex_array_object */ + +#ifndef GL_APPLE_vertex_array_range +#define GL_APPLE_vertex_array_range 1 +#define GL_VERTEX_ARRAY_RANGE_APPLE 0x851D +#define GL_VERTEX_ARRAY_RANGE_LENGTH_APPLE 0x851E +#define GL_VERTEX_ARRAY_STORAGE_HINT_APPLE 0x851F +#define GL_VERTEX_ARRAY_RANGE_POINTER_APPLE 0x8521 +#define GL_STORAGE_CLIENT_APPLE 0x85B4 +typedef void (APIENTRYP PFNGLVERTEXARRAYRANGEAPPLEPROC) (GLsizei length, void *pointer); +typedef void (APIENTRYP PFNGLFLUSHVERTEXARRAYRANGEAPPLEPROC) (GLsizei length, void *pointer); +typedef void (APIENTRYP PFNGLVERTEXARRAYPARAMETERIAPPLEPROC) (GLenum pname, GLint param); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glVertexArrayRangeAPPLE (GLsizei length, void *pointer); +GLAPI void APIENTRY glFlushVertexArrayRangeAPPLE (GLsizei length, void *pointer); +GLAPI void APIENTRY glVertexArrayParameteriAPPLE (GLenum pname, GLint param); +#endif +#endif /* GL_APPLE_vertex_array_range */ + +#ifndef GL_APPLE_vertex_program_evaluators +#define GL_APPLE_vertex_program_evaluators 1 +#define GL_VERTEX_ATTRIB_MAP1_APPLE 0x8A00 +#define GL_VERTEX_ATTRIB_MAP2_APPLE 0x8A01 +#define GL_VERTEX_ATTRIB_MAP1_SIZE_APPLE 0x8A02 +#define GL_VERTEX_ATTRIB_MAP1_COEFF_APPLE 0x8A03 +#define GL_VERTEX_ATTRIB_MAP1_ORDER_APPLE 0x8A04 +#define GL_VERTEX_ATTRIB_MAP1_DOMAIN_APPLE 0x8A05 +#define GL_VERTEX_ATTRIB_MAP2_SIZE_APPLE 0x8A06 +#define GL_VERTEX_ATTRIB_MAP2_COEFF_APPLE 0x8A07 +#define GL_VERTEX_ATTRIB_MAP2_ORDER_APPLE 0x8A08 +#define GL_VERTEX_ATTRIB_MAP2_DOMAIN_APPLE 0x8A09 +typedef void (APIENTRYP PFNGLENABLEVERTEXATTRIBAPPLEPROC) (GLuint index, GLenum pname); +typedef void (APIENTRYP PFNGLDISABLEVERTEXATTRIBAPPLEPROC) (GLuint index, GLenum pname); +typedef GLboolean (APIENTRYP PFNGLISVERTEXATTRIBENABLEDAPPLEPROC) (GLuint index, GLenum pname); +typedef void (APIENTRYP PFNGLMAPVERTEXATTRIB1DAPPLEPROC) (GLuint index, GLuint size, GLdouble u1, GLdouble u2, GLint stride, GLint order, const GLdouble *points); +typedef void (APIENTRYP PFNGLMAPVERTEXATTRIB1FAPPLEPROC) (GLuint index, GLuint size, GLfloat u1, GLfloat u2, GLint stride, GLint order, const GLfloat *points); +typedef void (APIENTRYP PFNGLMAPVERTEXATTRIB2DAPPLEPROC) (GLuint index, GLuint size, GLdouble u1, GLdouble u2, GLint ustride, GLint uorder, GLdouble v1, GLdouble v2, GLint vstride, GLint vorder, const GLdouble *points); +typedef void (APIENTRYP PFNGLMAPVERTEXATTRIB2FAPPLEPROC) (GLuint index, GLuint size, GLfloat u1, GLfloat u2, GLint ustride, GLint uorder, GLfloat v1, GLfloat v2, GLint vstride, GLint vorder, const GLfloat *points); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glEnableVertexAttribAPPLE (GLuint index, GLenum pname); +GLAPI void APIENTRY glDisableVertexAttribAPPLE (GLuint index, GLenum pname); +GLAPI GLboolean APIENTRY glIsVertexAttribEnabledAPPLE (GLuint index, GLenum pname); +GLAPI void APIENTRY glMapVertexAttrib1dAPPLE (GLuint index, GLuint size, GLdouble u1, GLdouble u2, GLint stride, GLint order, const GLdouble *points); +GLAPI void APIENTRY glMapVertexAttrib1fAPPLE (GLuint index, GLuint size, GLfloat u1, GLfloat u2, GLint stride, GLint order, const GLfloat *points); +GLAPI void APIENTRY glMapVertexAttrib2dAPPLE (GLuint index, GLuint size, GLdouble u1, GLdouble u2, GLint ustride, GLint uorder, GLdouble v1, GLdouble v2, GLint vstride, GLint vorder, const GLdouble *points); +GLAPI void APIENTRY glMapVertexAttrib2fAPPLE (GLuint index, GLuint size, GLfloat u1, GLfloat u2, GLint ustride, GLint uorder, GLfloat v1, GLfloat v2, GLint vstride, GLint vorder, const GLfloat *points); +#endif +#endif /* GL_APPLE_vertex_program_evaluators */ + +#ifndef GL_APPLE_ycbcr_422 +#define GL_APPLE_ycbcr_422 1 +#define GL_YCBCR_422_APPLE 0x85B9 +#endif /* GL_APPLE_ycbcr_422 */ + +#ifndef GL_ATI_draw_buffers +#define GL_ATI_draw_buffers 1 +#define GL_MAX_DRAW_BUFFERS_ATI 0x8824 +#define GL_DRAW_BUFFER0_ATI 0x8825 +#define GL_DRAW_BUFFER1_ATI 0x8826 +#define GL_DRAW_BUFFER2_ATI 0x8827 +#define GL_DRAW_BUFFER3_ATI 0x8828 +#define GL_DRAW_BUFFER4_ATI 0x8829 +#define GL_DRAW_BUFFER5_ATI 0x882A +#define GL_DRAW_BUFFER6_ATI 0x882B +#define GL_DRAW_BUFFER7_ATI 0x882C +#define GL_DRAW_BUFFER8_ATI 0x882D +#define GL_DRAW_BUFFER9_ATI 0x882E +#define GL_DRAW_BUFFER10_ATI 0x882F +#define GL_DRAW_BUFFER11_ATI 0x8830 +#define GL_DRAW_BUFFER12_ATI 0x8831 +#define GL_DRAW_BUFFER13_ATI 0x8832 +#define GL_DRAW_BUFFER14_ATI 0x8833 +#define GL_DRAW_BUFFER15_ATI 0x8834 +typedef void (APIENTRYP PFNGLDRAWBUFFERSATIPROC) (GLsizei n, const GLenum *bufs); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glDrawBuffersATI (GLsizei n, const GLenum *bufs); +#endif +#endif /* GL_ATI_draw_buffers */ + +#ifndef GL_ATI_element_array +#define GL_ATI_element_array 1 +#define GL_ELEMENT_ARRAY_ATI 0x8768 +#define GL_ELEMENT_ARRAY_TYPE_ATI 0x8769 +#define GL_ELEMENT_ARRAY_POINTER_ATI 0x876A +typedef void (APIENTRYP PFNGLELEMENTPOINTERATIPROC) (GLenum type, const void *pointer); +typedef void (APIENTRYP PFNGLDRAWELEMENTARRAYATIPROC) (GLenum mode, GLsizei count); +typedef void (APIENTRYP PFNGLDRAWRANGEELEMENTARRAYATIPROC) (GLenum mode, GLuint start, GLuint end, GLsizei count); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glElementPointerATI (GLenum type, const void *pointer); +GLAPI void APIENTRY glDrawElementArrayATI (GLenum mode, GLsizei count); +GLAPI void APIENTRY glDrawRangeElementArrayATI (GLenum mode, GLuint start, GLuint end, GLsizei count); +#endif +#endif /* GL_ATI_element_array */ + +#ifndef GL_ATI_envmap_bumpmap +#define GL_ATI_envmap_bumpmap 1 +#define GL_BUMP_ROT_MATRIX_ATI 0x8775 +#define GL_BUMP_ROT_MATRIX_SIZE_ATI 0x8776 +#define GL_BUMP_NUM_TEX_UNITS_ATI 0x8777 +#define GL_BUMP_TEX_UNITS_ATI 0x8778 +#define GL_DUDV_ATI 0x8779 +#define GL_DU8DV8_ATI 0x877A +#define GL_BUMP_ENVMAP_ATI 0x877B +#define GL_BUMP_TARGET_ATI 0x877C +typedef void (APIENTRYP PFNGLTEXBUMPPARAMETERIVATIPROC) (GLenum pname, const GLint *param); +typedef void (APIENTRYP PFNGLTEXBUMPPARAMETERFVATIPROC) (GLenum pname, const GLfloat *param); +typedef void (APIENTRYP PFNGLGETTEXBUMPPARAMETERIVATIPROC) (GLenum pname, GLint *param); +typedef void (APIENTRYP PFNGLGETTEXBUMPPARAMETERFVATIPROC) (GLenum pname, GLfloat *param); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glTexBumpParameterivATI (GLenum pname, const GLint *param); +GLAPI void APIENTRY glTexBumpParameterfvATI (GLenum pname, const GLfloat *param); +GLAPI void APIENTRY glGetTexBumpParameterivATI (GLenum pname, GLint *param); +GLAPI void APIENTRY glGetTexBumpParameterfvATI (GLenum pname, GLfloat *param); +#endif +#endif /* GL_ATI_envmap_bumpmap */ + +#ifndef GL_ATI_fragment_shader +#define GL_ATI_fragment_shader 1 +#define GL_FRAGMENT_SHADER_ATI 0x8920 +#define GL_REG_0_ATI 0x8921 +#define GL_REG_1_ATI 0x8922 +#define GL_REG_2_ATI 0x8923 +#define GL_REG_3_ATI 0x8924 +#define GL_REG_4_ATI 0x8925 +#define GL_REG_5_ATI 0x8926 +#define GL_REG_6_ATI 0x8927 +#define GL_REG_7_ATI 0x8928 +#define GL_REG_8_ATI 0x8929 +#define GL_REG_9_ATI 0x892A +#define GL_REG_10_ATI 0x892B +#define GL_REG_11_ATI 0x892C +#define GL_REG_12_ATI 0x892D +#define GL_REG_13_ATI 0x892E +#define GL_REG_14_ATI 0x892F +#define GL_REG_15_ATI 0x8930 +#define GL_REG_16_ATI 0x8931 +#define GL_REG_17_ATI 0x8932 +#define GL_REG_18_ATI 0x8933 +#define GL_REG_19_ATI 0x8934 +#define GL_REG_20_ATI 0x8935 +#define GL_REG_21_ATI 0x8936 +#define GL_REG_22_ATI 0x8937 +#define GL_REG_23_ATI 0x8938 +#define GL_REG_24_ATI 0x8939 +#define GL_REG_25_ATI 0x893A +#define GL_REG_26_ATI 0x893B +#define GL_REG_27_ATI 0x893C +#define GL_REG_28_ATI 0x893D +#define GL_REG_29_ATI 0x893E +#define GL_REG_30_ATI 0x893F +#define GL_REG_31_ATI 0x8940 +#define GL_CON_0_ATI 0x8941 +#define GL_CON_1_ATI 0x8942 +#define GL_CON_2_ATI 0x8943 +#define GL_CON_3_ATI 0x8944 +#define GL_CON_4_ATI 0x8945 +#define GL_CON_5_ATI 0x8946 +#define GL_CON_6_ATI 0x8947 +#define GL_CON_7_ATI 0x8948 +#define GL_CON_8_ATI 0x8949 +#define GL_CON_9_ATI 0x894A +#define GL_CON_10_ATI 0x894B +#define GL_CON_11_ATI 0x894C +#define GL_CON_12_ATI 0x894D +#define GL_CON_13_ATI 0x894E +#define GL_CON_14_ATI 0x894F +#define GL_CON_15_ATI 0x8950 +#define GL_CON_16_ATI 0x8951 +#define GL_CON_17_ATI 0x8952 +#define GL_CON_18_ATI 0x8953 +#define GL_CON_19_ATI 0x8954 +#define GL_CON_20_ATI 0x8955 +#define GL_CON_21_ATI 0x8956 +#define GL_CON_22_ATI 0x8957 +#define GL_CON_23_ATI 0x8958 +#define GL_CON_24_ATI 0x8959 +#define GL_CON_25_ATI 0x895A +#define GL_CON_26_ATI 0x895B +#define GL_CON_27_ATI 0x895C +#define GL_CON_28_ATI 0x895D +#define GL_CON_29_ATI 0x895E +#define GL_CON_30_ATI 0x895F +#define GL_CON_31_ATI 0x8960 +#define GL_MOV_ATI 0x8961 +#define GL_ADD_ATI 0x8963 +#define GL_MUL_ATI 0x8964 +#define GL_SUB_ATI 0x8965 +#define GL_DOT3_ATI 0x8966 +#define GL_DOT4_ATI 0x8967 +#define GL_MAD_ATI 0x8968 +#define GL_LERP_ATI 0x8969 +#define GL_CND_ATI 0x896A +#define GL_CND0_ATI 0x896B +#define GL_DOT2_ADD_ATI 0x896C +#define GL_SECONDARY_INTERPOLATOR_ATI 0x896D +#define GL_NUM_FRAGMENT_REGISTERS_ATI 0x896E +#define GL_NUM_FRAGMENT_CONSTANTS_ATI 0x896F +#define GL_NUM_PASSES_ATI 0x8970 +#define GL_NUM_INSTRUCTIONS_PER_PASS_ATI 0x8971 +#define GL_NUM_INSTRUCTIONS_TOTAL_ATI 0x8972 +#define GL_NUM_INPUT_INTERPOLATOR_COMPONENTS_ATI 0x8973 +#define GL_NUM_LOOPBACK_COMPONENTS_ATI 0x8974 +#define GL_COLOR_ALPHA_PAIRING_ATI 0x8975 +#define GL_SWIZZLE_STR_ATI 0x8976 +#define GL_SWIZZLE_STQ_ATI 0x8977 +#define GL_SWIZZLE_STR_DR_ATI 0x8978 +#define GL_SWIZZLE_STQ_DQ_ATI 0x8979 +#define GL_SWIZZLE_STRQ_ATI 0x897A +#define GL_SWIZZLE_STRQ_DQ_ATI 0x897B +#define GL_RED_BIT_ATI 0x00000001 +#define GL_GREEN_BIT_ATI 0x00000002 +#define GL_BLUE_BIT_ATI 0x00000004 +#define GL_2X_BIT_ATI 0x00000001 +#define GL_4X_BIT_ATI 0x00000002 +#define GL_8X_BIT_ATI 0x00000004 +#define GL_HALF_BIT_ATI 0x00000008 +#define GL_QUARTER_BIT_ATI 0x00000010 +#define GL_EIGHTH_BIT_ATI 0x00000020 +#define GL_SATURATE_BIT_ATI 0x00000040 +#define GL_COMP_BIT_ATI 0x00000002 +#define GL_NEGATE_BIT_ATI 0x00000004 +#define GL_BIAS_BIT_ATI 0x00000008 +typedef GLuint (APIENTRYP PFNGLGENFRAGMENTSHADERSATIPROC) (GLuint range); +typedef void (APIENTRYP PFNGLBINDFRAGMENTSHADERATIPROC) (GLuint id); +typedef void (APIENTRYP PFNGLDELETEFRAGMENTSHADERATIPROC) (GLuint id); +typedef void (APIENTRYP PFNGLBEGINFRAGMENTSHADERATIPROC) (void); +typedef void (APIENTRYP PFNGLENDFRAGMENTSHADERATIPROC) (void); +typedef void (APIENTRYP PFNGLPASSTEXCOORDATIPROC) (GLuint dst, GLuint coord, GLenum swizzle); +typedef void (APIENTRYP PFNGLSAMPLEMAPATIPROC) (GLuint dst, GLuint interp, GLenum swizzle); +typedef void (APIENTRYP PFNGLCOLORFRAGMENTOP1ATIPROC) (GLenum op, GLuint dst, GLuint dstMask, GLuint dstMod, GLuint arg1, GLuint arg1Rep, GLuint arg1Mod); +typedef void (APIENTRYP PFNGLCOLORFRAGMENTOP2ATIPROC) (GLenum op, GLuint dst, GLuint dstMask, GLuint dstMod, GLuint arg1, GLuint arg1Rep, GLuint arg1Mod, GLuint arg2, GLuint arg2Rep, GLuint arg2Mod); +typedef void (APIENTRYP PFNGLCOLORFRAGMENTOP3ATIPROC) (GLenum op, GLuint dst, GLuint dstMask, GLuint dstMod, GLuint arg1, GLuint arg1Rep, GLuint arg1Mod, GLuint arg2, GLuint arg2Rep, GLuint arg2Mod, GLuint arg3, GLuint arg3Rep, GLuint arg3Mod); +typedef void (APIENTRYP PFNGLALPHAFRAGMENTOP1ATIPROC) (GLenum op, GLuint dst, GLuint dstMod, GLuint arg1, GLuint arg1Rep, GLuint arg1Mod); +typedef void (APIENTRYP PFNGLALPHAFRAGMENTOP2ATIPROC) (GLenum op, GLuint dst, GLuint dstMod, GLuint arg1, GLuint arg1Rep, GLuint arg1Mod, GLuint arg2, GLuint arg2Rep, GLuint arg2Mod); +typedef void (APIENTRYP PFNGLALPHAFRAGMENTOP3ATIPROC) (GLenum op, GLuint dst, GLuint dstMod, GLuint arg1, GLuint arg1Rep, GLuint arg1Mod, GLuint arg2, GLuint arg2Rep, GLuint arg2Mod, GLuint arg3, GLuint arg3Rep, GLuint arg3Mod); +typedef void (APIENTRYP PFNGLSETFRAGMENTSHADERCONSTANTATIPROC) (GLuint dst, const GLfloat *value); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI GLuint APIENTRY glGenFragmentShadersATI (GLuint range); +GLAPI void APIENTRY glBindFragmentShaderATI (GLuint id); +GLAPI void APIENTRY glDeleteFragmentShaderATI (GLuint id); +GLAPI void APIENTRY glBeginFragmentShaderATI (void); +GLAPI void APIENTRY glEndFragmentShaderATI (void); +GLAPI void APIENTRY glPassTexCoordATI (GLuint dst, GLuint coord, GLenum swizzle); +GLAPI void APIENTRY glSampleMapATI (GLuint dst, GLuint interp, GLenum swizzle); +GLAPI void APIENTRY glColorFragmentOp1ATI (GLenum op, GLuint dst, GLuint dstMask, GLuint dstMod, GLuint arg1, GLuint arg1Rep, GLuint arg1Mod); +GLAPI void APIENTRY glColorFragmentOp2ATI (GLenum op, GLuint dst, GLuint dstMask, GLuint dstMod, GLuint arg1, GLuint arg1Rep, GLuint arg1Mod, GLuint arg2, GLuint arg2Rep, GLuint arg2Mod); +GLAPI void APIENTRY glColorFragmentOp3ATI (GLenum op, GLuint dst, GLuint dstMask, GLuint dstMod, GLuint arg1, GLuint arg1Rep, GLuint arg1Mod, GLuint arg2, GLuint arg2Rep, GLuint arg2Mod, GLuint arg3, GLuint arg3Rep, GLuint arg3Mod); +GLAPI void APIENTRY glAlphaFragmentOp1ATI (GLenum op, GLuint dst, GLuint dstMod, GLuint arg1, GLuint arg1Rep, GLuint arg1Mod); +GLAPI void APIENTRY glAlphaFragmentOp2ATI (GLenum op, GLuint dst, GLuint dstMod, GLuint arg1, GLuint arg1Rep, GLuint arg1Mod, GLuint arg2, GLuint arg2Rep, GLuint arg2Mod); +GLAPI void APIENTRY glAlphaFragmentOp3ATI (GLenum op, GLuint dst, GLuint dstMod, GLuint arg1, GLuint arg1Rep, GLuint arg1Mod, GLuint arg2, GLuint arg2Rep, GLuint arg2Mod, GLuint arg3, GLuint arg3Rep, GLuint arg3Mod); +GLAPI void APIENTRY glSetFragmentShaderConstantATI (GLuint dst, const GLfloat *value); +#endif +#endif /* GL_ATI_fragment_shader */ + +#ifndef GL_ATI_map_object_buffer +#define GL_ATI_map_object_buffer 1 +typedef void *(APIENTRYP PFNGLMAPOBJECTBUFFERATIPROC) (GLuint buffer); +typedef void (APIENTRYP PFNGLUNMAPOBJECTBUFFERATIPROC) (GLuint buffer); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void *APIENTRY glMapObjectBufferATI (GLuint buffer); +GLAPI void APIENTRY glUnmapObjectBufferATI (GLuint buffer); +#endif +#endif /* GL_ATI_map_object_buffer */ + +#ifndef GL_ATI_meminfo +#define GL_ATI_meminfo 1 +#define GL_VBO_FREE_MEMORY_ATI 0x87FB +#define GL_TEXTURE_FREE_MEMORY_ATI 0x87FC +#define GL_RENDERBUFFER_FREE_MEMORY_ATI 0x87FD +#endif /* GL_ATI_meminfo */ + +#ifndef GL_ATI_pixel_format_float +#define GL_ATI_pixel_format_float 1 +#define GL_RGBA_FLOAT_MODE_ATI 0x8820 +#define GL_COLOR_CLEAR_UNCLAMPED_VALUE_ATI 0x8835 +#endif /* GL_ATI_pixel_format_float */ + +#ifndef GL_ATI_pn_triangles +#define GL_ATI_pn_triangles 1 +#define GL_PN_TRIANGLES_ATI 0x87F0 +#define GL_MAX_PN_TRIANGLES_TESSELATION_LEVEL_ATI 0x87F1 +#define GL_PN_TRIANGLES_POINT_MODE_ATI 0x87F2 +#define GL_PN_TRIANGLES_NORMAL_MODE_ATI 0x87F3 +#define GL_PN_TRIANGLES_TESSELATION_LEVEL_ATI 0x87F4 +#define GL_PN_TRIANGLES_POINT_MODE_LINEAR_ATI 0x87F5 +#define GL_PN_TRIANGLES_POINT_MODE_CUBIC_ATI 0x87F6 +#define GL_PN_TRIANGLES_NORMAL_MODE_LINEAR_ATI 0x87F7 +#define GL_PN_TRIANGLES_NORMAL_MODE_QUADRATIC_ATI 0x87F8 +typedef void (APIENTRYP PFNGLPNTRIANGLESIATIPROC) (GLenum pname, GLint param); +typedef void (APIENTRYP PFNGLPNTRIANGLESFATIPROC) (GLenum pname, GLfloat param); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glPNTrianglesiATI (GLenum pname, GLint param); +GLAPI void APIENTRY glPNTrianglesfATI (GLenum pname, GLfloat param); +#endif +#endif /* GL_ATI_pn_triangles */ + +#ifndef GL_ATI_separate_stencil +#define GL_ATI_separate_stencil 1 +#define GL_STENCIL_BACK_FUNC_ATI 0x8800 +#define GL_STENCIL_BACK_FAIL_ATI 0x8801 +#define GL_STENCIL_BACK_PASS_DEPTH_FAIL_ATI 0x8802 +#define GL_STENCIL_BACK_PASS_DEPTH_PASS_ATI 0x8803 +typedef void (APIENTRYP PFNGLSTENCILOPSEPARATEATIPROC) (GLenum face, GLenum sfail, GLenum dpfail, GLenum dppass); +typedef void (APIENTRYP PFNGLSTENCILFUNCSEPARATEATIPROC) (GLenum frontfunc, GLenum backfunc, GLint ref, GLuint mask); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glStencilOpSeparateATI (GLenum face, GLenum sfail, GLenum dpfail, GLenum dppass); +GLAPI void APIENTRY glStencilFuncSeparateATI (GLenum frontfunc, GLenum backfunc, GLint ref, GLuint mask); +#endif +#endif /* GL_ATI_separate_stencil */ + +#ifndef GL_ATI_text_fragment_shader +#define GL_ATI_text_fragment_shader 1 +#define GL_TEXT_FRAGMENT_SHADER_ATI 0x8200 +#endif /* GL_ATI_text_fragment_shader */ + +#ifndef GL_ATI_texture_env_combine3 +#define GL_ATI_texture_env_combine3 1 +#define GL_MODULATE_ADD_ATI 0x8744 +#define GL_MODULATE_SIGNED_ADD_ATI 0x8745 +#define GL_MODULATE_SUBTRACT_ATI 0x8746 +#endif /* GL_ATI_texture_env_combine3 */ + +#ifndef GL_ATI_texture_float +#define GL_ATI_texture_float 1 +#define GL_RGBA_FLOAT32_ATI 0x8814 +#define GL_RGB_FLOAT32_ATI 0x8815 +#define GL_ALPHA_FLOAT32_ATI 0x8816 +#define GL_INTENSITY_FLOAT32_ATI 0x8817 +#define GL_LUMINANCE_FLOAT32_ATI 0x8818 +#define GL_LUMINANCE_ALPHA_FLOAT32_ATI 0x8819 +#define GL_RGBA_FLOAT16_ATI 0x881A +#define GL_RGB_FLOAT16_ATI 0x881B +#define GL_ALPHA_FLOAT16_ATI 0x881C +#define GL_INTENSITY_FLOAT16_ATI 0x881D +#define GL_LUMINANCE_FLOAT16_ATI 0x881E +#define GL_LUMINANCE_ALPHA_FLOAT16_ATI 0x881F +#endif /* GL_ATI_texture_float */ + +#ifndef GL_ATI_texture_mirror_once +#define GL_ATI_texture_mirror_once 1 +#define GL_MIRROR_CLAMP_ATI 0x8742 +#define GL_MIRROR_CLAMP_TO_EDGE_ATI 0x8743 +#endif /* GL_ATI_texture_mirror_once */ + +#ifndef GL_ATI_vertex_array_object +#define GL_ATI_vertex_array_object 1 +#define GL_STATIC_ATI 0x8760 +#define GL_DYNAMIC_ATI 0x8761 +#define GL_PRESERVE_ATI 0x8762 +#define GL_DISCARD_ATI 0x8763 +#define GL_OBJECT_BUFFER_SIZE_ATI 0x8764 +#define GL_OBJECT_BUFFER_USAGE_ATI 0x8765 +#define GL_ARRAY_OBJECT_BUFFER_ATI 0x8766 +#define GL_ARRAY_OBJECT_OFFSET_ATI 0x8767 +typedef GLuint (APIENTRYP PFNGLNEWOBJECTBUFFERATIPROC) (GLsizei size, const void *pointer, GLenum usage); +typedef GLboolean (APIENTRYP PFNGLISOBJECTBUFFERATIPROC) (GLuint buffer); +typedef void (APIENTRYP PFNGLUPDATEOBJECTBUFFERATIPROC) (GLuint buffer, GLuint offset, GLsizei size, const void *pointer, GLenum preserve); +typedef void (APIENTRYP PFNGLGETOBJECTBUFFERFVATIPROC) (GLuint buffer, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETOBJECTBUFFERIVATIPROC) (GLuint buffer, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLFREEOBJECTBUFFERATIPROC) (GLuint buffer); +typedef void (APIENTRYP PFNGLARRAYOBJECTATIPROC) (GLenum array, GLint size, GLenum type, GLsizei stride, GLuint buffer, GLuint offset); +typedef void (APIENTRYP PFNGLGETARRAYOBJECTFVATIPROC) (GLenum array, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETARRAYOBJECTIVATIPROC) (GLenum array, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLVARIANTARRAYOBJECTATIPROC) (GLuint id, GLenum type, GLsizei stride, GLuint buffer, GLuint offset); +typedef void (APIENTRYP PFNGLGETVARIANTARRAYOBJECTFVATIPROC) (GLuint id, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETVARIANTARRAYOBJECTIVATIPROC) (GLuint id, GLenum pname, GLint *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI GLuint APIENTRY glNewObjectBufferATI (GLsizei size, const void *pointer, GLenum usage); +GLAPI GLboolean APIENTRY glIsObjectBufferATI (GLuint buffer); +GLAPI void APIENTRY glUpdateObjectBufferATI (GLuint buffer, GLuint offset, GLsizei size, const void *pointer, GLenum preserve); +GLAPI void APIENTRY glGetObjectBufferfvATI (GLuint buffer, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetObjectBufferivATI (GLuint buffer, GLenum pname, GLint *params); +GLAPI void APIENTRY glFreeObjectBufferATI (GLuint buffer); +GLAPI void APIENTRY glArrayObjectATI (GLenum array, GLint size, GLenum type, GLsizei stride, GLuint buffer, GLuint offset); +GLAPI void APIENTRY glGetArrayObjectfvATI (GLenum array, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetArrayObjectivATI (GLenum array, GLenum pname, GLint *params); +GLAPI void APIENTRY glVariantArrayObjectATI (GLuint id, GLenum type, GLsizei stride, GLuint buffer, GLuint offset); +GLAPI void APIENTRY glGetVariantArrayObjectfvATI (GLuint id, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetVariantArrayObjectivATI (GLuint id, GLenum pname, GLint *params); +#endif +#endif /* GL_ATI_vertex_array_object */ + +#ifndef GL_ATI_vertex_attrib_array_object +#define GL_ATI_vertex_attrib_array_object 1 +typedef void (APIENTRYP PFNGLVERTEXATTRIBARRAYOBJECTATIPROC) (GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride, GLuint buffer, GLuint offset); +typedef void (APIENTRYP PFNGLGETVERTEXATTRIBARRAYOBJECTFVATIPROC) (GLuint index, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETVERTEXATTRIBARRAYOBJECTIVATIPROC) (GLuint index, GLenum pname, GLint *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glVertexAttribArrayObjectATI (GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride, GLuint buffer, GLuint offset); +GLAPI void APIENTRY glGetVertexAttribArrayObjectfvATI (GLuint index, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetVertexAttribArrayObjectivATI (GLuint index, GLenum pname, GLint *params); +#endif +#endif /* GL_ATI_vertex_attrib_array_object */ + +#ifndef GL_ATI_vertex_streams +#define GL_ATI_vertex_streams 1 +#define GL_MAX_VERTEX_STREAMS_ATI 0x876B +#define GL_VERTEX_STREAM0_ATI 0x876C +#define GL_VERTEX_STREAM1_ATI 0x876D +#define GL_VERTEX_STREAM2_ATI 0x876E +#define GL_VERTEX_STREAM3_ATI 0x876F +#define GL_VERTEX_STREAM4_ATI 0x8770 +#define GL_VERTEX_STREAM5_ATI 0x8771 +#define GL_VERTEX_STREAM6_ATI 0x8772 +#define GL_VERTEX_STREAM7_ATI 0x8773 +#define GL_VERTEX_SOURCE_ATI 0x8774 +typedef void (APIENTRYP PFNGLVERTEXSTREAM1SATIPROC) (GLenum stream, GLshort x); +typedef void (APIENTRYP PFNGLVERTEXSTREAM1SVATIPROC) (GLenum stream, const GLshort *coords); +typedef void (APIENTRYP PFNGLVERTEXSTREAM1IATIPROC) (GLenum stream, GLint x); +typedef void (APIENTRYP PFNGLVERTEXSTREAM1IVATIPROC) (GLenum stream, const GLint *coords); +typedef void (APIENTRYP PFNGLVERTEXSTREAM1FATIPROC) (GLenum stream, GLfloat x); +typedef void (APIENTRYP PFNGLVERTEXSTREAM1FVATIPROC) (GLenum stream, const GLfloat *coords); +typedef void (APIENTRYP PFNGLVERTEXSTREAM1DATIPROC) (GLenum stream, GLdouble x); +typedef void (APIENTRYP PFNGLVERTEXSTREAM1DVATIPROC) (GLenum stream, const GLdouble *coords); +typedef void (APIENTRYP PFNGLVERTEXSTREAM2SATIPROC) (GLenum stream, GLshort x, GLshort y); +typedef void (APIENTRYP PFNGLVERTEXSTREAM2SVATIPROC) (GLenum stream, const GLshort *coords); +typedef void (APIENTRYP PFNGLVERTEXSTREAM2IATIPROC) (GLenum stream, GLint x, GLint y); +typedef void (APIENTRYP PFNGLVERTEXSTREAM2IVATIPROC) (GLenum stream, const GLint *coords); +typedef void (APIENTRYP PFNGLVERTEXSTREAM2FATIPROC) (GLenum stream, GLfloat x, GLfloat y); +typedef void (APIENTRYP PFNGLVERTEXSTREAM2FVATIPROC) (GLenum stream, const GLfloat *coords); +typedef void (APIENTRYP PFNGLVERTEXSTREAM2DATIPROC) (GLenum stream, GLdouble x, GLdouble y); +typedef void (APIENTRYP PFNGLVERTEXSTREAM2DVATIPROC) (GLenum stream, const GLdouble *coords); +typedef void (APIENTRYP PFNGLVERTEXSTREAM3SATIPROC) (GLenum stream, GLshort x, GLshort y, GLshort z); +typedef void (APIENTRYP PFNGLVERTEXSTREAM3SVATIPROC) (GLenum stream, const GLshort *coords); +typedef void (APIENTRYP PFNGLVERTEXSTREAM3IATIPROC) (GLenum stream, GLint x, GLint y, GLint z); +typedef void (APIENTRYP PFNGLVERTEXSTREAM3IVATIPROC) (GLenum stream, const GLint *coords); +typedef void (APIENTRYP PFNGLVERTEXSTREAM3FATIPROC) (GLenum stream, GLfloat x, GLfloat y, GLfloat z); +typedef void (APIENTRYP PFNGLVERTEXSTREAM3FVATIPROC) (GLenum stream, const GLfloat *coords); +typedef void (APIENTRYP PFNGLVERTEXSTREAM3DATIPROC) (GLenum stream, GLdouble x, GLdouble y, GLdouble z); +typedef void (APIENTRYP PFNGLVERTEXSTREAM3DVATIPROC) (GLenum stream, const GLdouble *coords); +typedef void (APIENTRYP PFNGLVERTEXSTREAM4SATIPROC) (GLenum stream, GLshort x, GLshort y, GLshort z, GLshort w); +typedef void (APIENTRYP PFNGLVERTEXSTREAM4SVATIPROC) (GLenum stream, const GLshort *coords); +typedef void (APIENTRYP PFNGLVERTEXSTREAM4IATIPROC) (GLenum stream, GLint x, GLint y, GLint z, GLint w); +typedef void (APIENTRYP PFNGLVERTEXSTREAM4IVATIPROC) (GLenum stream, const GLint *coords); +typedef void (APIENTRYP PFNGLVERTEXSTREAM4FATIPROC) (GLenum stream, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +typedef void (APIENTRYP PFNGLVERTEXSTREAM4FVATIPROC) (GLenum stream, const GLfloat *coords); +typedef void (APIENTRYP PFNGLVERTEXSTREAM4DATIPROC) (GLenum stream, GLdouble x, GLdouble y, GLdouble z, GLdouble w); +typedef void (APIENTRYP PFNGLVERTEXSTREAM4DVATIPROC) (GLenum stream, const GLdouble *coords); +typedef void (APIENTRYP PFNGLNORMALSTREAM3BATIPROC) (GLenum stream, GLbyte nx, GLbyte ny, GLbyte nz); +typedef void (APIENTRYP PFNGLNORMALSTREAM3BVATIPROC) (GLenum stream, const GLbyte *coords); +typedef void (APIENTRYP PFNGLNORMALSTREAM3SATIPROC) (GLenum stream, GLshort nx, GLshort ny, GLshort nz); +typedef void (APIENTRYP PFNGLNORMALSTREAM3SVATIPROC) (GLenum stream, const GLshort *coords); +typedef void (APIENTRYP PFNGLNORMALSTREAM3IATIPROC) (GLenum stream, GLint nx, GLint ny, GLint nz); +typedef void (APIENTRYP PFNGLNORMALSTREAM3IVATIPROC) (GLenum stream, const GLint *coords); +typedef void (APIENTRYP PFNGLNORMALSTREAM3FATIPROC) (GLenum stream, GLfloat nx, GLfloat ny, GLfloat nz); +typedef void (APIENTRYP PFNGLNORMALSTREAM3FVATIPROC) (GLenum stream, const GLfloat *coords); +typedef void (APIENTRYP PFNGLNORMALSTREAM3DATIPROC) (GLenum stream, GLdouble nx, GLdouble ny, GLdouble nz); +typedef void (APIENTRYP PFNGLNORMALSTREAM3DVATIPROC) (GLenum stream, const GLdouble *coords); +typedef void (APIENTRYP PFNGLCLIENTACTIVEVERTEXSTREAMATIPROC) (GLenum stream); +typedef void (APIENTRYP PFNGLVERTEXBLENDENVIATIPROC) (GLenum pname, GLint param); +typedef void (APIENTRYP PFNGLVERTEXBLENDENVFATIPROC) (GLenum pname, GLfloat param); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glVertexStream1sATI (GLenum stream, GLshort x); +GLAPI void APIENTRY glVertexStream1svATI (GLenum stream, const GLshort *coords); +GLAPI void APIENTRY glVertexStream1iATI (GLenum stream, GLint x); +GLAPI void APIENTRY glVertexStream1ivATI (GLenum stream, const GLint *coords); +GLAPI void APIENTRY glVertexStream1fATI (GLenum stream, GLfloat x); +GLAPI void APIENTRY glVertexStream1fvATI (GLenum stream, const GLfloat *coords); +GLAPI void APIENTRY glVertexStream1dATI (GLenum stream, GLdouble x); +GLAPI void APIENTRY glVertexStream1dvATI (GLenum stream, const GLdouble *coords); +GLAPI void APIENTRY glVertexStream2sATI (GLenum stream, GLshort x, GLshort y); +GLAPI void APIENTRY glVertexStream2svATI (GLenum stream, const GLshort *coords); +GLAPI void APIENTRY glVertexStream2iATI (GLenum stream, GLint x, GLint y); +GLAPI void APIENTRY glVertexStream2ivATI (GLenum stream, const GLint *coords); +GLAPI void APIENTRY glVertexStream2fATI (GLenum stream, GLfloat x, GLfloat y); +GLAPI void APIENTRY glVertexStream2fvATI (GLenum stream, const GLfloat *coords); +GLAPI void APIENTRY glVertexStream2dATI (GLenum stream, GLdouble x, GLdouble y); +GLAPI void APIENTRY glVertexStream2dvATI (GLenum stream, const GLdouble *coords); +GLAPI void APIENTRY glVertexStream3sATI (GLenum stream, GLshort x, GLshort y, GLshort z); +GLAPI void APIENTRY glVertexStream3svATI (GLenum stream, const GLshort *coords); +GLAPI void APIENTRY glVertexStream3iATI (GLenum stream, GLint x, GLint y, GLint z); +GLAPI void APIENTRY glVertexStream3ivATI (GLenum stream, const GLint *coords); +GLAPI void APIENTRY glVertexStream3fATI (GLenum stream, GLfloat x, GLfloat y, GLfloat z); +GLAPI void APIENTRY glVertexStream3fvATI (GLenum stream, const GLfloat *coords); +GLAPI void APIENTRY glVertexStream3dATI (GLenum stream, GLdouble x, GLdouble y, GLdouble z); +GLAPI void APIENTRY glVertexStream3dvATI (GLenum stream, const GLdouble *coords); +GLAPI void APIENTRY glVertexStream4sATI (GLenum stream, GLshort x, GLshort y, GLshort z, GLshort w); +GLAPI void APIENTRY glVertexStream4svATI (GLenum stream, const GLshort *coords); +GLAPI void APIENTRY glVertexStream4iATI (GLenum stream, GLint x, GLint y, GLint z, GLint w); +GLAPI void APIENTRY glVertexStream4ivATI (GLenum stream, const GLint *coords); +GLAPI void APIENTRY glVertexStream4fATI (GLenum stream, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +GLAPI void APIENTRY glVertexStream4fvATI (GLenum stream, const GLfloat *coords); +GLAPI void APIENTRY glVertexStream4dATI (GLenum stream, GLdouble x, GLdouble y, GLdouble z, GLdouble w); +GLAPI void APIENTRY glVertexStream4dvATI (GLenum stream, const GLdouble *coords); +GLAPI void APIENTRY glNormalStream3bATI (GLenum stream, GLbyte nx, GLbyte ny, GLbyte nz); +GLAPI void APIENTRY glNormalStream3bvATI (GLenum stream, const GLbyte *coords); +GLAPI void APIENTRY glNormalStream3sATI (GLenum stream, GLshort nx, GLshort ny, GLshort nz); +GLAPI void APIENTRY glNormalStream3svATI (GLenum stream, const GLshort *coords); +GLAPI void APIENTRY glNormalStream3iATI (GLenum stream, GLint nx, GLint ny, GLint nz); +GLAPI void APIENTRY glNormalStream3ivATI (GLenum stream, const GLint *coords); +GLAPI void APIENTRY glNormalStream3fATI (GLenum stream, GLfloat nx, GLfloat ny, GLfloat nz); +GLAPI void APIENTRY glNormalStream3fvATI (GLenum stream, const GLfloat *coords); +GLAPI void APIENTRY glNormalStream3dATI (GLenum stream, GLdouble nx, GLdouble ny, GLdouble nz); +GLAPI void APIENTRY glNormalStream3dvATI (GLenum stream, const GLdouble *coords); +GLAPI void APIENTRY glClientActiveVertexStreamATI (GLenum stream); +GLAPI void APIENTRY glVertexBlendEnviATI (GLenum pname, GLint param); +GLAPI void APIENTRY glVertexBlendEnvfATI (GLenum pname, GLfloat param); +#endif +#endif /* GL_ATI_vertex_streams */ + +#ifndef GL_EXT_422_pixels +#define GL_EXT_422_pixels 1 +#define GL_422_EXT 0x80CC +#define GL_422_REV_EXT 0x80CD +#define GL_422_AVERAGE_EXT 0x80CE +#define GL_422_REV_AVERAGE_EXT 0x80CF +#endif /* GL_EXT_422_pixels */ + +#ifndef GL_EXT_EGL_image_storage +#define GL_EXT_EGL_image_storage 1 +typedef void *GLeglImageOES; +typedef void (APIENTRYP PFNGLEGLIMAGETARGETTEXSTORAGEEXTPROC) (GLenum target, GLeglImageOES image, const GLint* attrib_list); +typedef void (APIENTRYP PFNGLEGLIMAGETARGETTEXTURESTORAGEEXTPROC) (GLuint texture, GLeglImageOES image, const GLint* attrib_list); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glEGLImageTargetTexStorageEXT (GLenum target, GLeglImageOES image, const GLint* attrib_list); +GLAPI void APIENTRY glEGLImageTargetTextureStorageEXT (GLuint texture, GLeglImageOES image, const GLint* attrib_list); +#endif +#endif /* GL_EXT_EGL_image_storage */ + +#ifndef GL_EXT_abgr +#define GL_EXT_abgr 1 +#define GL_ABGR_EXT 0x8000 +#endif /* GL_EXT_abgr */ + +#ifndef GL_EXT_bgra +#define GL_EXT_bgra 1 +#define GL_BGR_EXT 0x80E0 +#define GL_BGRA_EXT 0x80E1 +#endif /* GL_EXT_bgra */ + +#ifndef GL_EXT_bindable_uniform +#define GL_EXT_bindable_uniform 1 +#define GL_MAX_VERTEX_BINDABLE_UNIFORMS_EXT 0x8DE2 +#define GL_MAX_FRAGMENT_BINDABLE_UNIFORMS_EXT 0x8DE3 +#define GL_MAX_GEOMETRY_BINDABLE_UNIFORMS_EXT 0x8DE4 +#define GL_MAX_BINDABLE_UNIFORM_SIZE_EXT 0x8DED +#define GL_UNIFORM_BUFFER_EXT 0x8DEE +#define GL_UNIFORM_BUFFER_BINDING_EXT 0x8DEF +typedef void (APIENTRYP PFNGLUNIFORMBUFFEREXTPROC) (GLuint program, GLint location, GLuint buffer); +typedef GLint (APIENTRYP PFNGLGETUNIFORMBUFFERSIZEEXTPROC) (GLuint program, GLint location); +typedef GLintptr (APIENTRYP PFNGLGETUNIFORMOFFSETEXTPROC) (GLuint program, GLint location); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glUniformBufferEXT (GLuint program, GLint location, GLuint buffer); +GLAPI GLint APIENTRY glGetUniformBufferSizeEXT (GLuint program, GLint location); +GLAPI GLintptr APIENTRY glGetUniformOffsetEXT (GLuint program, GLint location); +#endif +#endif /* GL_EXT_bindable_uniform */ + +#ifndef GL_EXT_blend_color +#define GL_EXT_blend_color 1 +#define GL_CONSTANT_COLOR_EXT 0x8001 +#define GL_ONE_MINUS_CONSTANT_COLOR_EXT 0x8002 +#define GL_CONSTANT_ALPHA_EXT 0x8003 +#define GL_ONE_MINUS_CONSTANT_ALPHA_EXT 0x8004 +#define GL_BLEND_COLOR_EXT 0x8005 +typedef void (APIENTRYP PFNGLBLENDCOLOREXTPROC) (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBlendColorEXT (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +#endif +#endif /* GL_EXT_blend_color */ + +#ifndef GL_EXT_blend_equation_separate +#define GL_EXT_blend_equation_separate 1 +#define GL_BLEND_EQUATION_RGB_EXT 0x8009 +#define GL_BLEND_EQUATION_ALPHA_EXT 0x883D +typedef void (APIENTRYP PFNGLBLENDEQUATIONSEPARATEEXTPROC) (GLenum modeRGB, GLenum modeAlpha); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBlendEquationSeparateEXT (GLenum modeRGB, GLenum modeAlpha); +#endif +#endif /* GL_EXT_blend_equation_separate */ + +#ifndef GL_EXT_blend_func_separate +#define GL_EXT_blend_func_separate 1 +#define GL_BLEND_DST_RGB_EXT 0x80C8 +#define GL_BLEND_SRC_RGB_EXT 0x80C9 +#define GL_BLEND_DST_ALPHA_EXT 0x80CA +#define GL_BLEND_SRC_ALPHA_EXT 0x80CB +typedef void (APIENTRYP PFNGLBLENDFUNCSEPARATEEXTPROC) (GLenum sfactorRGB, GLenum dfactorRGB, GLenum sfactorAlpha, GLenum dfactorAlpha); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBlendFuncSeparateEXT (GLenum sfactorRGB, GLenum dfactorRGB, GLenum sfactorAlpha, GLenum dfactorAlpha); +#endif +#endif /* GL_EXT_blend_func_separate */ + +#ifndef GL_EXT_blend_logic_op +#define GL_EXT_blend_logic_op 1 +#endif /* GL_EXT_blend_logic_op */ + +#ifndef GL_EXT_blend_minmax +#define GL_EXT_blend_minmax 1 +#define GL_MIN_EXT 0x8007 +#define GL_MAX_EXT 0x8008 +#define GL_FUNC_ADD_EXT 0x8006 +#define GL_BLEND_EQUATION_EXT 0x8009 +typedef void (APIENTRYP PFNGLBLENDEQUATIONEXTPROC) (GLenum mode); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBlendEquationEXT (GLenum mode); +#endif +#endif /* GL_EXT_blend_minmax */ + +#ifndef GL_EXT_blend_subtract +#define GL_EXT_blend_subtract 1 +#define GL_FUNC_SUBTRACT_EXT 0x800A +#define GL_FUNC_REVERSE_SUBTRACT_EXT 0x800B +#endif /* GL_EXT_blend_subtract */ + +#ifndef GL_EXT_clip_volume_hint +#define GL_EXT_clip_volume_hint 1 +#define GL_CLIP_VOLUME_CLIPPING_HINT_EXT 0x80F0 +#endif /* GL_EXT_clip_volume_hint */ + +#ifndef GL_EXT_cmyka +#define GL_EXT_cmyka 1 +#define GL_CMYK_EXT 0x800C +#define GL_CMYKA_EXT 0x800D +#define GL_PACK_CMYK_HINT_EXT 0x800E +#define GL_UNPACK_CMYK_HINT_EXT 0x800F +#endif /* GL_EXT_cmyka */ + +#ifndef GL_EXT_color_subtable +#define GL_EXT_color_subtable 1 +typedef void (APIENTRYP PFNGLCOLORSUBTABLEEXTPROC) (GLenum target, GLsizei start, GLsizei count, GLenum format, GLenum type, const void *data); +typedef void (APIENTRYP PFNGLCOPYCOLORSUBTABLEEXTPROC) (GLenum target, GLsizei start, GLint x, GLint y, GLsizei width); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glColorSubTableEXT (GLenum target, GLsizei start, GLsizei count, GLenum format, GLenum type, const void *data); +GLAPI void APIENTRY glCopyColorSubTableEXT (GLenum target, GLsizei start, GLint x, GLint y, GLsizei width); +#endif +#endif /* GL_EXT_color_subtable */ + +#ifndef GL_EXT_compiled_vertex_array +#define GL_EXT_compiled_vertex_array 1 +#define GL_ARRAY_ELEMENT_LOCK_FIRST_EXT 0x81A8 +#define GL_ARRAY_ELEMENT_LOCK_COUNT_EXT 0x81A9 +typedef void (APIENTRYP PFNGLLOCKARRAYSEXTPROC) (GLint first, GLsizei count); +typedef void (APIENTRYP PFNGLUNLOCKARRAYSEXTPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glLockArraysEXT (GLint first, GLsizei count); +GLAPI void APIENTRY glUnlockArraysEXT (void); +#endif +#endif /* GL_EXT_compiled_vertex_array */ + +#ifndef GL_EXT_convolution +#define GL_EXT_convolution 1 +#define GL_CONVOLUTION_1D_EXT 0x8010 +#define GL_CONVOLUTION_2D_EXT 0x8011 +#define GL_SEPARABLE_2D_EXT 0x8012 +#define GL_CONVOLUTION_BORDER_MODE_EXT 0x8013 +#define GL_CONVOLUTION_FILTER_SCALE_EXT 0x8014 +#define GL_CONVOLUTION_FILTER_BIAS_EXT 0x8015 +#define GL_REDUCE_EXT 0x8016 +#define GL_CONVOLUTION_FORMAT_EXT 0x8017 +#define GL_CONVOLUTION_WIDTH_EXT 0x8018 +#define GL_CONVOLUTION_HEIGHT_EXT 0x8019 +#define GL_MAX_CONVOLUTION_WIDTH_EXT 0x801A +#define GL_MAX_CONVOLUTION_HEIGHT_EXT 0x801B +#define GL_POST_CONVOLUTION_RED_SCALE_EXT 0x801C +#define GL_POST_CONVOLUTION_GREEN_SCALE_EXT 0x801D +#define GL_POST_CONVOLUTION_BLUE_SCALE_EXT 0x801E +#define GL_POST_CONVOLUTION_ALPHA_SCALE_EXT 0x801F +#define GL_POST_CONVOLUTION_RED_BIAS_EXT 0x8020 +#define GL_POST_CONVOLUTION_GREEN_BIAS_EXT 0x8021 +#define GL_POST_CONVOLUTION_BLUE_BIAS_EXT 0x8022 +#define GL_POST_CONVOLUTION_ALPHA_BIAS_EXT 0x8023 +typedef void (APIENTRYP PFNGLCONVOLUTIONFILTER1DEXTPROC) (GLenum target, GLenum internalformat, GLsizei width, GLenum format, GLenum type, const void *image); +typedef void (APIENTRYP PFNGLCONVOLUTIONFILTER2DEXTPROC) (GLenum target, GLenum internalformat, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *image); +typedef void (APIENTRYP PFNGLCONVOLUTIONPARAMETERFEXTPROC) (GLenum target, GLenum pname, GLfloat params); +typedef void (APIENTRYP PFNGLCONVOLUTIONPARAMETERFVEXTPROC) (GLenum target, GLenum pname, const GLfloat *params); +typedef void (APIENTRYP PFNGLCONVOLUTIONPARAMETERIEXTPROC) (GLenum target, GLenum pname, GLint params); +typedef void (APIENTRYP PFNGLCONVOLUTIONPARAMETERIVEXTPROC) (GLenum target, GLenum pname, const GLint *params); +typedef void (APIENTRYP PFNGLCOPYCONVOLUTIONFILTER1DEXTPROC) (GLenum target, GLenum internalformat, GLint x, GLint y, GLsizei width); +typedef void (APIENTRYP PFNGLCOPYCONVOLUTIONFILTER2DEXTPROC) (GLenum target, GLenum internalformat, GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLGETCONVOLUTIONFILTEREXTPROC) (GLenum target, GLenum format, GLenum type, void *image); +typedef void (APIENTRYP PFNGLGETCONVOLUTIONPARAMETERFVEXTPROC) (GLenum target, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETCONVOLUTIONPARAMETERIVEXTPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETSEPARABLEFILTEREXTPROC) (GLenum target, GLenum format, GLenum type, void *row, void *column, void *span); +typedef void (APIENTRYP PFNGLSEPARABLEFILTER2DEXTPROC) (GLenum target, GLenum internalformat, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *row, const void *column); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glConvolutionFilter1DEXT (GLenum target, GLenum internalformat, GLsizei width, GLenum format, GLenum type, const void *image); +GLAPI void APIENTRY glConvolutionFilter2DEXT (GLenum target, GLenum internalformat, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *image); +GLAPI void APIENTRY glConvolutionParameterfEXT (GLenum target, GLenum pname, GLfloat params); +GLAPI void APIENTRY glConvolutionParameterfvEXT (GLenum target, GLenum pname, const GLfloat *params); +GLAPI void APIENTRY glConvolutionParameteriEXT (GLenum target, GLenum pname, GLint params); +GLAPI void APIENTRY glConvolutionParameterivEXT (GLenum target, GLenum pname, const GLint *params); +GLAPI void APIENTRY glCopyConvolutionFilter1DEXT (GLenum target, GLenum internalformat, GLint x, GLint y, GLsizei width); +GLAPI void APIENTRY glCopyConvolutionFilter2DEXT (GLenum target, GLenum internalformat, GLint x, GLint y, GLsizei width, GLsizei height); +GLAPI void APIENTRY glGetConvolutionFilterEXT (GLenum target, GLenum format, GLenum type, void *image); +GLAPI void APIENTRY glGetConvolutionParameterfvEXT (GLenum target, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetConvolutionParameterivEXT (GLenum target, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetSeparableFilterEXT (GLenum target, GLenum format, GLenum type, void *row, void *column, void *span); +GLAPI void APIENTRY glSeparableFilter2DEXT (GLenum target, GLenum internalformat, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *row, const void *column); +#endif +#endif /* GL_EXT_convolution */ + +#ifndef GL_EXT_coordinate_frame +#define GL_EXT_coordinate_frame 1 +#define GL_TANGENT_ARRAY_EXT 0x8439 +#define GL_BINORMAL_ARRAY_EXT 0x843A +#define GL_CURRENT_TANGENT_EXT 0x843B +#define GL_CURRENT_BINORMAL_EXT 0x843C +#define GL_TANGENT_ARRAY_TYPE_EXT 0x843E +#define GL_TANGENT_ARRAY_STRIDE_EXT 0x843F +#define GL_BINORMAL_ARRAY_TYPE_EXT 0x8440 +#define GL_BINORMAL_ARRAY_STRIDE_EXT 0x8441 +#define GL_TANGENT_ARRAY_POINTER_EXT 0x8442 +#define GL_BINORMAL_ARRAY_POINTER_EXT 0x8443 +#define GL_MAP1_TANGENT_EXT 0x8444 +#define GL_MAP2_TANGENT_EXT 0x8445 +#define GL_MAP1_BINORMAL_EXT 0x8446 +#define GL_MAP2_BINORMAL_EXT 0x8447 +typedef void (APIENTRYP PFNGLTANGENT3BEXTPROC) (GLbyte tx, GLbyte ty, GLbyte tz); +typedef void (APIENTRYP PFNGLTANGENT3BVEXTPROC) (const GLbyte *v); +typedef void (APIENTRYP PFNGLTANGENT3DEXTPROC) (GLdouble tx, GLdouble ty, GLdouble tz); +typedef void (APIENTRYP PFNGLTANGENT3DVEXTPROC) (const GLdouble *v); +typedef void (APIENTRYP PFNGLTANGENT3FEXTPROC) (GLfloat tx, GLfloat ty, GLfloat tz); +typedef void (APIENTRYP PFNGLTANGENT3FVEXTPROC) (const GLfloat *v); +typedef void (APIENTRYP PFNGLTANGENT3IEXTPROC) (GLint tx, GLint ty, GLint tz); +typedef void (APIENTRYP PFNGLTANGENT3IVEXTPROC) (const GLint *v); +typedef void (APIENTRYP PFNGLTANGENT3SEXTPROC) (GLshort tx, GLshort ty, GLshort tz); +typedef void (APIENTRYP PFNGLTANGENT3SVEXTPROC) (const GLshort *v); +typedef void (APIENTRYP PFNGLBINORMAL3BEXTPROC) (GLbyte bx, GLbyte by, GLbyte bz); +typedef void (APIENTRYP PFNGLBINORMAL3BVEXTPROC) (const GLbyte *v); +typedef void (APIENTRYP PFNGLBINORMAL3DEXTPROC) (GLdouble bx, GLdouble by, GLdouble bz); +typedef void (APIENTRYP PFNGLBINORMAL3DVEXTPROC) (const GLdouble *v); +typedef void (APIENTRYP PFNGLBINORMAL3FEXTPROC) (GLfloat bx, GLfloat by, GLfloat bz); +typedef void (APIENTRYP PFNGLBINORMAL3FVEXTPROC) (const GLfloat *v); +typedef void (APIENTRYP PFNGLBINORMAL3IEXTPROC) (GLint bx, GLint by, GLint bz); +typedef void (APIENTRYP PFNGLBINORMAL3IVEXTPROC) (const GLint *v); +typedef void (APIENTRYP PFNGLBINORMAL3SEXTPROC) (GLshort bx, GLshort by, GLshort bz); +typedef void (APIENTRYP PFNGLBINORMAL3SVEXTPROC) (const GLshort *v); +typedef void (APIENTRYP PFNGLTANGENTPOINTEREXTPROC) (GLenum type, GLsizei stride, const void *pointer); +typedef void (APIENTRYP PFNGLBINORMALPOINTEREXTPROC) (GLenum type, GLsizei stride, const void *pointer); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glTangent3bEXT (GLbyte tx, GLbyte ty, GLbyte tz); +GLAPI void APIENTRY glTangent3bvEXT (const GLbyte *v); +GLAPI void APIENTRY glTangent3dEXT (GLdouble tx, GLdouble ty, GLdouble tz); +GLAPI void APIENTRY glTangent3dvEXT (const GLdouble *v); +GLAPI void APIENTRY glTangent3fEXT (GLfloat tx, GLfloat ty, GLfloat tz); +GLAPI void APIENTRY glTangent3fvEXT (const GLfloat *v); +GLAPI void APIENTRY glTangent3iEXT (GLint tx, GLint ty, GLint tz); +GLAPI void APIENTRY glTangent3ivEXT (const GLint *v); +GLAPI void APIENTRY glTangent3sEXT (GLshort tx, GLshort ty, GLshort tz); +GLAPI void APIENTRY glTangent3svEXT (const GLshort *v); +GLAPI void APIENTRY glBinormal3bEXT (GLbyte bx, GLbyte by, GLbyte bz); +GLAPI void APIENTRY glBinormal3bvEXT (const GLbyte *v); +GLAPI void APIENTRY glBinormal3dEXT (GLdouble bx, GLdouble by, GLdouble bz); +GLAPI void APIENTRY glBinormal3dvEXT (const GLdouble *v); +GLAPI void APIENTRY glBinormal3fEXT (GLfloat bx, GLfloat by, GLfloat bz); +GLAPI void APIENTRY glBinormal3fvEXT (const GLfloat *v); +GLAPI void APIENTRY glBinormal3iEXT (GLint bx, GLint by, GLint bz); +GLAPI void APIENTRY glBinormal3ivEXT (const GLint *v); +GLAPI void APIENTRY glBinormal3sEXT (GLshort bx, GLshort by, GLshort bz); +GLAPI void APIENTRY glBinormal3svEXT (const GLshort *v); +GLAPI void APIENTRY glTangentPointerEXT (GLenum type, GLsizei stride, const void *pointer); +GLAPI void APIENTRY glBinormalPointerEXT (GLenum type, GLsizei stride, const void *pointer); +#endif +#endif /* GL_EXT_coordinate_frame */ + +#ifndef GL_EXT_copy_texture +#define GL_EXT_copy_texture 1 +typedef void (APIENTRYP PFNGLCOPYTEXIMAGE1DEXTPROC) (GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLint border); +typedef void (APIENTRYP PFNGLCOPYTEXIMAGE2DEXTPROC) (GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLsizei height, GLint border); +typedef void (APIENTRYP PFNGLCOPYTEXSUBIMAGE1DEXTPROC) (GLenum target, GLint level, GLint xoffset, GLint x, GLint y, GLsizei width); +typedef void (APIENTRYP PFNGLCOPYTEXSUBIMAGE2DEXTPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLCOPYTEXSUBIMAGE3DEXTPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glCopyTexImage1DEXT (GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLint border); +GLAPI void APIENTRY glCopyTexImage2DEXT (GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLsizei height, GLint border); +GLAPI void APIENTRY glCopyTexSubImage1DEXT (GLenum target, GLint level, GLint xoffset, GLint x, GLint y, GLsizei width); +GLAPI void APIENTRY glCopyTexSubImage2DEXT (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint x, GLint y, GLsizei width, GLsizei height); +GLAPI void APIENTRY glCopyTexSubImage3DEXT (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height); +#endif +#endif /* GL_EXT_copy_texture */ + +#ifndef GL_EXT_cull_vertex +#define GL_EXT_cull_vertex 1 +#define GL_CULL_VERTEX_EXT 0x81AA +#define GL_CULL_VERTEX_EYE_POSITION_EXT 0x81AB +#define GL_CULL_VERTEX_OBJECT_POSITION_EXT 0x81AC +typedef void (APIENTRYP PFNGLCULLPARAMETERDVEXTPROC) (GLenum pname, GLdouble *params); +typedef void (APIENTRYP PFNGLCULLPARAMETERFVEXTPROC) (GLenum pname, GLfloat *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glCullParameterdvEXT (GLenum pname, GLdouble *params); +GLAPI void APIENTRY glCullParameterfvEXT (GLenum pname, GLfloat *params); +#endif +#endif /* GL_EXT_cull_vertex */ + +#ifndef GL_EXT_debug_label +#define GL_EXT_debug_label 1 +#define GL_PROGRAM_PIPELINE_OBJECT_EXT 0x8A4F +#define GL_PROGRAM_OBJECT_EXT 0x8B40 +#define GL_SHADER_OBJECT_EXT 0x8B48 +#define GL_BUFFER_OBJECT_EXT 0x9151 +#define GL_QUERY_OBJECT_EXT 0x9153 +#define GL_VERTEX_ARRAY_OBJECT_EXT 0x9154 +typedef void (APIENTRYP PFNGLLABELOBJECTEXTPROC) (GLenum type, GLuint object, GLsizei length, const GLchar *label); +typedef void (APIENTRYP PFNGLGETOBJECTLABELEXTPROC) (GLenum type, GLuint object, GLsizei bufSize, GLsizei *length, GLchar *label); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glLabelObjectEXT (GLenum type, GLuint object, GLsizei length, const GLchar *label); +GLAPI void APIENTRY glGetObjectLabelEXT (GLenum type, GLuint object, GLsizei bufSize, GLsizei *length, GLchar *label); +#endif +#endif /* GL_EXT_debug_label */ + +#ifndef GL_EXT_debug_marker +#define GL_EXT_debug_marker 1 +typedef void (APIENTRYP PFNGLINSERTEVENTMARKEREXTPROC) (GLsizei length, const GLchar *marker); +typedef void (APIENTRYP PFNGLPUSHGROUPMARKEREXTPROC) (GLsizei length, const GLchar *marker); +typedef void (APIENTRYP PFNGLPOPGROUPMARKEREXTPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glInsertEventMarkerEXT (GLsizei length, const GLchar *marker); +GLAPI void APIENTRY glPushGroupMarkerEXT (GLsizei length, const GLchar *marker); +GLAPI void APIENTRY glPopGroupMarkerEXT (void); +#endif +#endif /* GL_EXT_debug_marker */ + +#ifndef GL_EXT_depth_bounds_test +#define GL_EXT_depth_bounds_test 1 +#define GL_DEPTH_BOUNDS_TEST_EXT 0x8890 +#define GL_DEPTH_BOUNDS_EXT 0x8891 +typedef void (APIENTRYP PFNGLDEPTHBOUNDSEXTPROC) (GLclampd zmin, GLclampd zmax); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glDepthBoundsEXT (GLclampd zmin, GLclampd zmax); +#endif +#endif /* GL_EXT_depth_bounds_test */ + +#ifndef GL_EXT_direct_state_access +#define GL_EXT_direct_state_access 1 +#define GL_PROGRAM_MATRIX_EXT 0x8E2D +#define GL_TRANSPOSE_PROGRAM_MATRIX_EXT 0x8E2E +#define GL_PROGRAM_MATRIX_STACK_DEPTH_EXT 0x8E2F +typedef void (APIENTRYP PFNGLMATRIXLOADFEXTPROC) (GLenum mode, const GLfloat *m); +typedef void (APIENTRYP PFNGLMATRIXLOADDEXTPROC) (GLenum mode, const GLdouble *m); +typedef void (APIENTRYP PFNGLMATRIXMULTFEXTPROC) (GLenum mode, const GLfloat *m); +typedef void (APIENTRYP PFNGLMATRIXMULTDEXTPROC) (GLenum mode, const GLdouble *m); +typedef void (APIENTRYP PFNGLMATRIXLOADIDENTITYEXTPROC) (GLenum mode); +typedef void (APIENTRYP PFNGLMATRIXROTATEFEXTPROC) (GLenum mode, GLfloat angle, GLfloat x, GLfloat y, GLfloat z); +typedef void (APIENTRYP PFNGLMATRIXROTATEDEXTPROC) (GLenum mode, GLdouble angle, GLdouble x, GLdouble y, GLdouble z); +typedef void (APIENTRYP PFNGLMATRIXSCALEFEXTPROC) (GLenum mode, GLfloat x, GLfloat y, GLfloat z); +typedef void (APIENTRYP PFNGLMATRIXSCALEDEXTPROC) (GLenum mode, GLdouble x, GLdouble y, GLdouble z); +typedef void (APIENTRYP PFNGLMATRIXTRANSLATEFEXTPROC) (GLenum mode, GLfloat x, GLfloat y, GLfloat z); +typedef void (APIENTRYP PFNGLMATRIXTRANSLATEDEXTPROC) (GLenum mode, GLdouble x, GLdouble y, GLdouble z); +typedef void (APIENTRYP PFNGLMATRIXFRUSTUMEXTPROC) (GLenum mode, GLdouble left, GLdouble right, GLdouble bottom, GLdouble top, GLdouble zNear, GLdouble zFar); +typedef void (APIENTRYP PFNGLMATRIXORTHOEXTPROC) (GLenum mode, GLdouble left, GLdouble right, GLdouble bottom, GLdouble top, GLdouble zNear, GLdouble zFar); +typedef void (APIENTRYP PFNGLMATRIXPOPEXTPROC) (GLenum mode); +typedef void (APIENTRYP PFNGLMATRIXPUSHEXTPROC) (GLenum mode); +typedef void (APIENTRYP PFNGLCLIENTATTRIBDEFAULTEXTPROC) (GLbitfield mask); +typedef void (APIENTRYP PFNGLPUSHCLIENTATTRIBDEFAULTEXTPROC) (GLbitfield mask); +typedef void (APIENTRYP PFNGLTEXTUREPARAMETERFEXTPROC) (GLuint texture, GLenum target, GLenum pname, GLfloat param); +typedef void (APIENTRYP PFNGLTEXTUREPARAMETERFVEXTPROC) (GLuint texture, GLenum target, GLenum pname, const GLfloat *params); +typedef void (APIENTRYP PFNGLTEXTUREPARAMETERIEXTPROC) (GLuint texture, GLenum target, GLenum pname, GLint param); +typedef void (APIENTRYP PFNGLTEXTUREPARAMETERIVEXTPROC) (GLuint texture, GLenum target, GLenum pname, const GLint *params); +typedef void (APIENTRYP PFNGLTEXTUREIMAGE1DEXTPROC) (GLuint texture, GLenum target, GLint level, GLint internalformat, GLsizei width, GLint border, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLTEXTUREIMAGE2DEXTPROC) (GLuint texture, GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLTEXTURESUBIMAGE1DEXTPROC) (GLuint texture, GLenum target, GLint level, GLint xoffset, GLsizei width, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLTEXTURESUBIMAGE2DEXTPROC) (GLuint texture, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLCOPYTEXTUREIMAGE1DEXTPROC) (GLuint texture, GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLint border); +typedef void (APIENTRYP PFNGLCOPYTEXTUREIMAGE2DEXTPROC) (GLuint texture, GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLsizei height, GLint border); +typedef void (APIENTRYP PFNGLCOPYTEXTURESUBIMAGE1DEXTPROC) (GLuint texture, GLenum target, GLint level, GLint xoffset, GLint x, GLint y, GLsizei width); +typedef void (APIENTRYP PFNGLCOPYTEXTURESUBIMAGE2DEXTPROC) (GLuint texture, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLGETTEXTUREIMAGEEXTPROC) (GLuint texture, GLenum target, GLint level, GLenum format, GLenum type, void *pixels); +typedef void (APIENTRYP PFNGLGETTEXTUREPARAMETERFVEXTPROC) (GLuint texture, GLenum target, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETTEXTUREPARAMETERIVEXTPROC) (GLuint texture, GLenum target, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETTEXTURELEVELPARAMETERFVEXTPROC) (GLuint texture, GLenum target, GLint level, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETTEXTURELEVELPARAMETERIVEXTPROC) (GLuint texture, GLenum target, GLint level, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLTEXTUREIMAGE3DEXTPROC) (GLuint texture, GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLTEXTURESUBIMAGE3DEXTPROC) (GLuint texture, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLCOPYTEXTURESUBIMAGE3DEXTPROC) (GLuint texture, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLBINDMULTITEXTUREEXTPROC) (GLenum texunit, GLenum target, GLuint texture); +typedef void (APIENTRYP PFNGLMULTITEXCOORDPOINTEREXTPROC) (GLenum texunit, GLint size, GLenum type, GLsizei stride, const void *pointer); +typedef void (APIENTRYP PFNGLMULTITEXENVFEXTPROC) (GLenum texunit, GLenum target, GLenum pname, GLfloat param); +typedef void (APIENTRYP PFNGLMULTITEXENVFVEXTPROC) (GLenum texunit, GLenum target, GLenum pname, const GLfloat *params); +typedef void (APIENTRYP PFNGLMULTITEXENVIEXTPROC) (GLenum texunit, GLenum target, GLenum pname, GLint param); +typedef void (APIENTRYP PFNGLMULTITEXENVIVEXTPROC) (GLenum texunit, GLenum target, GLenum pname, const GLint *params); +typedef void (APIENTRYP PFNGLMULTITEXGENDEXTPROC) (GLenum texunit, GLenum coord, GLenum pname, GLdouble param); +typedef void (APIENTRYP PFNGLMULTITEXGENDVEXTPROC) (GLenum texunit, GLenum coord, GLenum pname, const GLdouble *params); +typedef void (APIENTRYP PFNGLMULTITEXGENFEXTPROC) (GLenum texunit, GLenum coord, GLenum pname, GLfloat param); +typedef void (APIENTRYP PFNGLMULTITEXGENFVEXTPROC) (GLenum texunit, GLenum coord, GLenum pname, const GLfloat *params); +typedef void (APIENTRYP PFNGLMULTITEXGENIEXTPROC) (GLenum texunit, GLenum coord, GLenum pname, GLint param); +typedef void (APIENTRYP PFNGLMULTITEXGENIVEXTPROC) (GLenum texunit, GLenum coord, GLenum pname, const GLint *params); +typedef void (APIENTRYP PFNGLGETMULTITEXENVFVEXTPROC) (GLenum texunit, GLenum target, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETMULTITEXENVIVEXTPROC) (GLenum texunit, GLenum target, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETMULTITEXGENDVEXTPROC) (GLenum texunit, GLenum coord, GLenum pname, GLdouble *params); +typedef void (APIENTRYP PFNGLGETMULTITEXGENFVEXTPROC) (GLenum texunit, GLenum coord, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETMULTITEXGENIVEXTPROC) (GLenum texunit, GLenum coord, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLMULTITEXPARAMETERIEXTPROC) (GLenum texunit, GLenum target, GLenum pname, GLint param); +typedef void (APIENTRYP PFNGLMULTITEXPARAMETERIVEXTPROC) (GLenum texunit, GLenum target, GLenum pname, const GLint *params); +typedef void (APIENTRYP PFNGLMULTITEXPARAMETERFEXTPROC) (GLenum texunit, GLenum target, GLenum pname, GLfloat param); +typedef void (APIENTRYP PFNGLMULTITEXPARAMETERFVEXTPROC) (GLenum texunit, GLenum target, GLenum pname, const GLfloat *params); +typedef void (APIENTRYP PFNGLMULTITEXIMAGE1DEXTPROC) (GLenum texunit, GLenum target, GLint level, GLint internalformat, GLsizei width, GLint border, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLMULTITEXIMAGE2DEXTPROC) (GLenum texunit, GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLMULTITEXSUBIMAGE1DEXTPROC) (GLenum texunit, GLenum target, GLint level, GLint xoffset, GLsizei width, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLMULTITEXSUBIMAGE2DEXTPROC) (GLenum texunit, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLCOPYMULTITEXIMAGE1DEXTPROC) (GLenum texunit, GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLint border); +typedef void (APIENTRYP PFNGLCOPYMULTITEXIMAGE2DEXTPROC) (GLenum texunit, GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLsizei height, GLint border); +typedef void (APIENTRYP PFNGLCOPYMULTITEXSUBIMAGE1DEXTPROC) (GLenum texunit, GLenum target, GLint level, GLint xoffset, GLint x, GLint y, GLsizei width); +typedef void (APIENTRYP PFNGLCOPYMULTITEXSUBIMAGE2DEXTPROC) (GLenum texunit, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLGETMULTITEXIMAGEEXTPROC) (GLenum texunit, GLenum target, GLint level, GLenum format, GLenum type, void *pixels); +typedef void (APIENTRYP PFNGLGETMULTITEXPARAMETERFVEXTPROC) (GLenum texunit, GLenum target, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETMULTITEXPARAMETERIVEXTPROC) (GLenum texunit, GLenum target, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETMULTITEXLEVELPARAMETERFVEXTPROC) (GLenum texunit, GLenum target, GLint level, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETMULTITEXLEVELPARAMETERIVEXTPROC) (GLenum texunit, GLenum target, GLint level, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLMULTITEXIMAGE3DEXTPROC) (GLenum texunit, GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLMULTITEXSUBIMAGE3DEXTPROC) (GLenum texunit, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLCOPYMULTITEXSUBIMAGE3DEXTPROC) (GLenum texunit, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLENABLECLIENTSTATEINDEXEDEXTPROC) (GLenum array, GLuint index); +typedef void (APIENTRYP PFNGLDISABLECLIENTSTATEINDEXEDEXTPROC) (GLenum array, GLuint index); +typedef void (APIENTRYP PFNGLGETFLOATINDEXEDVEXTPROC) (GLenum target, GLuint index, GLfloat *data); +typedef void (APIENTRYP PFNGLGETDOUBLEINDEXEDVEXTPROC) (GLenum target, GLuint index, GLdouble *data); +typedef void (APIENTRYP PFNGLGETPOINTERINDEXEDVEXTPROC) (GLenum target, GLuint index, void **data); +typedef void (APIENTRYP PFNGLENABLEINDEXEDEXTPROC) (GLenum target, GLuint index); +typedef void (APIENTRYP PFNGLDISABLEINDEXEDEXTPROC) (GLenum target, GLuint index); +typedef GLboolean (APIENTRYP PFNGLISENABLEDINDEXEDEXTPROC) (GLenum target, GLuint index); +typedef void (APIENTRYP PFNGLGETINTEGERINDEXEDVEXTPROC) (GLenum target, GLuint index, GLint *data); +typedef void (APIENTRYP PFNGLGETBOOLEANINDEXEDVEXTPROC) (GLenum target, GLuint index, GLboolean *data); +typedef void (APIENTRYP PFNGLCOMPRESSEDTEXTUREIMAGE3DEXTPROC) (GLuint texture, GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, const void *bits); +typedef void (APIENTRYP PFNGLCOMPRESSEDTEXTUREIMAGE2DEXTPROC) (GLuint texture, GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, const void *bits); +typedef void (APIENTRYP PFNGLCOMPRESSEDTEXTUREIMAGE1DEXTPROC) (GLuint texture, GLenum target, GLint level, GLenum internalformat, GLsizei width, GLint border, GLsizei imageSize, const void *bits); +typedef void (APIENTRYP PFNGLCOMPRESSEDTEXTURESUBIMAGE3DEXTPROC) (GLuint texture, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void *bits); +typedef void (APIENTRYP PFNGLCOMPRESSEDTEXTURESUBIMAGE2DEXTPROC) (GLuint texture, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *bits); +typedef void (APIENTRYP PFNGLCOMPRESSEDTEXTURESUBIMAGE1DEXTPROC) (GLuint texture, GLenum target, GLint level, GLint xoffset, GLsizei width, GLenum format, GLsizei imageSize, const void *bits); +typedef void (APIENTRYP PFNGLGETCOMPRESSEDTEXTUREIMAGEEXTPROC) (GLuint texture, GLenum target, GLint lod, void *img); +typedef void (APIENTRYP PFNGLCOMPRESSEDMULTITEXIMAGE3DEXTPROC) (GLenum texunit, GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, const void *bits); +typedef void (APIENTRYP PFNGLCOMPRESSEDMULTITEXIMAGE2DEXTPROC) (GLenum texunit, GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, const void *bits); +typedef void (APIENTRYP PFNGLCOMPRESSEDMULTITEXIMAGE1DEXTPROC) (GLenum texunit, GLenum target, GLint level, GLenum internalformat, GLsizei width, GLint border, GLsizei imageSize, const void *bits); +typedef void (APIENTRYP PFNGLCOMPRESSEDMULTITEXSUBIMAGE3DEXTPROC) (GLenum texunit, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void *bits); +typedef void (APIENTRYP PFNGLCOMPRESSEDMULTITEXSUBIMAGE2DEXTPROC) (GLenum texunit, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *bits); +typedef void (APIENTRYP PFNGLCOMPRESSEDMULTITEXSUBIMAGE1DEXTPROC) (GLenum texunit, GLenum target, GLint level, GLint xoffset, GLsizei width, GLenum format, GLsizei imageSize, const void *bits); +typedef void (APIENTRYP PFNGLGETCOMPRESSEDMULTITEXIMAGEEXTPROC) (GLenum texunit, GLenum target, GLint lod, void *img); +typedef void (APIENTRYP PFNGLMATRIXLOADTRANSPOSEFEXTPROC) (GLenum mode, const GLfloat *m); +typedef void (APIENTRYP PFNGLMATRIXLOADTRANSPOSEDEXTPROC) (GLenum mode, const GLdouble *m); +typedef void (APIENTRYP PFNGLMATRIXMULTTRANSPOSEFEXTPROC) (GLenum mode, const GLfloat *m); +typedef void (APIENTRYP PFNGLMATRIXMULTTRANSPOSEDEXTPROC) (GLenum mode, const GLdouble *m); +typedef void (APIENTRYP PFNGLNAMEDBUFFERDATAEXTPROC) (GLuint buffer, GLsizeiptr size, const void *data, GLenum usage); +typedef void (APIENTRYP PFNGLNAMEDBUFFERSUBDATAEXTPROC) (GLuint buffer, GLintptr offset, GLsizeiptr size, const void *data); +typedef void *(APIENTRYP PFNGLMAPNAMEDBUFFEREXTPROC) (GLuint buffer, GLenum access); +typedef GLboolean (APIENTRYP PFNGLUNMAPNAMEDBUFFEREXTPROC) (GLuint buffer); +typedef void (APIENTRYP PFNGLGETNAMEDBUFFERPARAMETERIVEXTPROC) (GLuint buffer, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETNAMEDBUFFERPOINTERVEXTPROC) (GLuint buffer, GLenum pname, void **params); +typedef void (APIENTRYP PFNGLGETNAMEDBUFFERSUBDATAEXTPROC) (GLuint buffer, GLintptr offset, GLsizeiptr size, void *data); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1FEXTPROC) (GLuint program, GLint location, GLfloat v0); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2FEXTPROC) (GLuint program, GLint location, GLfloat v0, GLfloat v1); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3FEXTPROC) (GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4FEXTPROC) (GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1IEXTPROC) (GLuint program, GLint location, GLint v0); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2IEXTPROC) (GLuint program, GLint location, GLint v0, GLint v1); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3IEXTPROC) (GLuint program, GLint location, GLint v0, GLint v1, GLint v2); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4IEXTPROC) (GLuint program, GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1FVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2FVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3FVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4FVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1IVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2IVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3IVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4IVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2FVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3FVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4FVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2X3FVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3X2FVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2X4FVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4X2FVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3X4FVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4X3FVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (APIENTRYP PFNGLTEXTUREBUFFEREXTPROC) (GLuint texture, GLenum target, GLenum internalformat, GLuint buffer); +typedef void (APIENTRYP PFNGLMULTITEXBUFFEREXTPROC) (GLenum texunit, GLenum target, GLenum internalformat, GLuint buffer); +typedef void (APIENTRYP PFNGLTEXTUREPARAMETERIIVEXTPROC) (GLuint texture, GLenum target, GLenum pname, const GLint *params); +typedef void (APIENTRYP PFNGLTEXTUREPARAMETERIUIVEXTPROC) (GLuint texture, GLenum target, GLenum pname, const GLuint *params); +typedef void (APIENTRYP PFNGLGETTEXTUREPARAMETERIIVEXTPROC) (GLuint texture, GLenum target, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETTEXTUREPARAMETERIUIVEXTPROC) (GLuint texture, GLenum target, GLenum pname, GLuint *params); +typedef void (APIENTRYP PFNGLMULTITEXPARAMETERIIVEXTPROC) (GLenum texunit, GLenum target, GLenum pname, const GLint *params); +typedef void (APIENTRYP PFNGLMULTITEXPARAMETERIUIVEXTPROC) (GLenum texunit, GLenum target, GLenum pname, const GLuint *params); +typedef void (APIENTRYP PFNGLGETMULTITEXPARAMETERIIVEXTPROC) (GLenum texunit, GLenum target, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETMULTITEXPARAMETERIUIVEXTPROC) (GLenum texunit, GLenum target, GLenum pname, GLuint *params); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1UIEXTPROC) (GLuint program, GLint location, GLuint v0); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2UIEXTPROC) (GLuint program, GLint location, GLuint v0, GLuint v1); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3UIEXTPROC) (GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4UIEXTPROC) (GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1UIVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2UIVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3UIVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4UIVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (APIENTRYP PFNGLNAMEDPROGRAMLOCALPARAMETERS4FVEXTPROC) (GLuint program, GLenum target, GLuint index, GLsizei count, const GLfloat *params); +typedef void (APIENTRYP PFNGLNAMEDPROGRAMLOCALPARAMETERI4IEXTPROC) (GLuint program, GLenum target, GLuint index, GLint x, GLint y, GLint z, GLint w); +typedef void (APIENTRYP PFNGLNAMEDPROGRAMLOCALPARAMETERI4IVEXTPROC) (GLuint program, GLenum target, GLuint index, const GLint *params); +typedef void (APIENTRYP PFNGLNAMEDPROGRAMLOCALPARAMETERSI4IVEXTPROC) (GLuint program, GLenum target, GLuint index, GLsizei count, const GLint *params); +typedef void (APIENTRYP PFNGLNAMEDPROGRAMLOCALPARAMETERI4UIEXTPROC) (GLuint program, GLenum target, GLuint index, GLuint x, GLuint y, GLuint z, GLuint w); +typedef void (APIENTRYP PFNGLNAMEDPROGRAMLOCALPARAMETERI4UIVEXTPROC) (GLuint program, GLenum target, GLuint index, const GLuint *params); +typedef void (APIENTRYP PFNGLNAMEDPROGRAMLOCALPARAMETERSI4UIVEXTPROC) (GLuint program, GLenum target, GLuint index, GLsizei count, const GLuint *params); +typedef void (APIENTRYP PFNGLGETNAMEDPROGRAMLOCALPARAMETERIIVEXTPROC) (GLuint program, GLenum target, GLuint index, GLint *params); +typedef void (APIENTRYP PFNGLGETNAMEDPROGRAMLOCALPARAMETERIUIVEXTPROC) (GLuint program, GLenum target, GLuint index, GLuint *params); +typedef void (APIENTRYP PFNGLENABLECLIENTSTATEIEXTPROC) (GLenum array, GLuint index); +typedef void (APIENTRYP PFNGLDISABLECLIENTSTATEIEXTPROC) (GLenum array, GLuint index); +typedef void (APIENTRYP PFNGLGETFLOATI_VEXTPROC) (GLenum pname, GLuint index, GLfloat *params); +typedef void (APIENTRYP PFNGLGETDOUBLEI_VEXTPROC) (GLenum pname, GLuint index, GLdouble *params); +typedef void (APIENTRYP PFNGLGETPOINTERI_VEXTPROC) (GLenum pname, GLuint index, void **params); +typedef void (APIENTRYP PFNGLNAMEDPROGRAMSTRINGEXTPROC) (GLuint program, GLenum target, GLenum format, GLsizei len, const void *string); +typedef void (APIENTRYP PFNGLNAMEDPROGRAMLOCALPARAMETER4DEXTPROC) (GLuint program, GLenum target, GLuint index, GLdouble x, GLdouble y, GLdouble z, GLdouble w); +typedef void (APIENTRYP PFNGLNAMEDPROGRAMLOCALPARAMETER4DVEXTPROC) (GLuint program, GLenum target, GLuint index, const GLdouble *params); +typedef void (APIENTRYP PFNGLNAMEDPROGRAMLOCALPARAMETER4FEXTPROC) (GLuint program, GLenum target, GLuint index, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +typedef void (APIENTRYP PFNGLNAMEDPROGRAMLOCALPARAMETER4FVEXTPROC) (GLuint program, GLenum target, GLuint index, const GLfloat *params); +typedef void (APIENTRYP PFNGLGETNAMEDPROGRAMLOCALPARAMETERDVEXTPROC) (GLuint program, GLenum target, GLuint index, GLdouble *params); +typedef void (APIENTRYP PFNGLGETNAMEDPROGRAMLOCALPARAMETERFVEXTPROC) (GLuint program, GLenum target, GLuint index, GLfloat *params); +typedef void (APIENTRYP PFNGLGETNAMEDPROGRAMIVEXTPROC) (GLuint program, GLenum target, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETNAMEDPROGRAMSTRINGEXTPROC) (GLuint program, GLenum target, GLenum pname, void *string); +typedef void (APIENTRYP PFNGLNAMEDRENDERBUFFERSTORAGEEXTPROC) (GLuint renderbuffer, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLGETNAMEDRENDERBUFFERPARAMETERIVEXTPROC) (GLuint renderbuffer, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLNAMEDRENDERBUFFERSTORAGEMULTISAMPLEEXTPROC) (GLuint renderbuffer, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLNAMEDRENDERBUFFERSTORAGEMULTISAMPLECOVERAGEEXTPROC) (GLuint renderbuffer, GLsizei coverageSamples, GLsizei colorSamples, GLenum internalformat, GLsizei width, GLsizei height); +typedef GLenum (APIENTRYP PFNGLCHECKNAMEDFRAMEBUFFERSTATUSEXTPROC) (GLuint framebuffer, GLenum target); +typedef void (APIENTRYP PFNGLNAMEDFRAMEBUFFERTEXTURE1DEXTPROC) (GLuint framebuffer, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +typedef void (APIENTRYP PFNGLNAMEDFRAMEBUFFERTEXTURE2DEXTPROC) (GLuint framebuffer, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +typedef void (APIENTRYP PFNGLNAMEDFRAMEBUFFERTEXTURE3DEXTPROC) (GLuint framebuffer, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLint zoffset); +typedef void (APIENTRYP PFNGLNAMEDFRAMEBUFFERRENDERBUFFEREXTPROC) (GLuint framebuffer, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer); +typedef void (APIENTRYP PFNGLGETNAMEDFRAMEBUFFERATTACHMENTPARAMETERIVEXTPROC) (GLuint framebuffer, GLenum attachment, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGENERATETEXTUREMIPMAPEXTPROC) (GLuint texture, GLenum target); +typedef void (APIENTRYP PFNGLGENERATEMULTITEXMIPMAPEXTPROC) (GLenum texunit, GLenum target); +typedef void (APIENTRYP PFNGLFRAMEBUFFERDRAWBUFFEREXTPROC) (GLuint framebuffer, GLenum mode); +typedef void (APIENTRYP PFNGLFRAMEBUFFERDRAWBUFFERSEXTPROC) (GLuint framebuffer, GLsizei n, const GLenum *bufs); +typedef void (APIENTRYP PFNGLFRAMEBUFFERREADBUFFEREXTPROC) (GLuint framebuffer, GLenum mode); +typedef void (APIENTRYP PFNGLGETFRAMEBUFFERPARAMETERIVEXTPROC) (GLuint framebuffer, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLNAMEDCOPYBUFFERSUBDATAEXTPROC) (GLuint readBuffer, GLuint writeBuffer, GLintptr readOffset, GLintptr writeOffset, GLsizeiptr size); +typedef void (APIENTRYP PFNGLNAMEDFRAMEBUFFERTEXTUREEXTPROC) (GLuint framebuffer, GLenum attachment, GLuint texture, GLint level); +typedef void (APIENTRYP PFNGLNAMEDFRAMEBUFFERTEXTURELAYEREXTPROC) (GLuint framebuffer, GLenum attachment, GLuint texture, GLint level, GLint layer); +typedef void (APIENTRYP PFNGLNAMEDFRAMEBUFFERTEXTUREFACEEXTPROC) (GLuint framebuffer, GLenum attachment, GLuint texture, GLint level, GLenum face); +typedef void (APIENTRYP PFNGLTEXTURERENDERBUFFEREXTPROC) (GLuint texture, GLenum target, GLuint renderbuffer); +typedef void (APIENTRYP PFNGLMULTITEXRENDERBUFFEREXTPROC) (GLenum texunit, GLenum target, GLuint renderbuffer); +typedef void (APIENTRYP PFNGLVERTEXARRAYVERTEXOFFSETEXTPROC) (GLuint vaobj, GLuint buffer, GLint size, GLenum type, GLsizei stride, GLintptr offset); +typedef void (APIENTRYP PFNGLVERTEXARRAYCOLOROFFSETEXTPROC) (GLuint vaobj, GLuint buffer, GLint size, GLenum type, GLsizei stride, GLintptr offset); +typedef void (APIENTRYP PFNGLVERTEXARRAYEDGEFLAGOFFSETEXTPROC) (GLuint vaobj, GLuint buffer, GLsizei stride, GLintptr offset); +typedef void (APIENTRYP PFNGLVERTEXARRAYINDEXOFFSETEXTPROC) (GLuint vaobj, GLuint buffer, GLenum type, GLsizei stride, GLintptr offset); +typedef void (APIENTRYP PFNGLVERTEXARRAYNORMALOFFSETEXTPROC) (GLuint vaobj, GLuint buffer, GLenum type, GLsizei stride, GLintptr offset); +typedef void (APIENTRYP PFNGLVERTEXARRAYTEXCOORDOFFSETEXTPROC) (GLuint vaobj, GLuint buffer, GLint size, GLenum type, GLsizei stride, GLintptr offset); +typedef void (APIENTRYP PFNGLVERTEXARRAYMULTITEXCOORDOFFSETEXTPROC) (GLuint vaobj, GLuint buffer, GLenum texunit, GLint size, GLenum type, GLsizei stride, GLintptr offset); +typedef void (APIENTRYP PFNGLVERTEXARRAYFOGCOORDOFFSETEXTPROC) (GLuint vaobj, GLuint buffer, GLenum type, GLsizei stride, GLintptr offset); +typedef void (APIENTRYP PFNGLVERTEXARRAYSECONDARYCOLOROFFSETEXTPROC) (GLuint vaobj, GLuint buffer, GLint size, GLenum type, GLsizei stride, GLintptr offset); +typedef void (APIENTRYP PFNGLVERTEXARRAYVERTEXATTRIBOFFSETEXTPROC) (GLuint vaobj, GLuint buffer, GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride, GLintptr offset); +typedef void (APIENTRYP PFNGLVERTEXARRAYVERTEXATTRIBIOFFSETEXTPROC) (GLuint vaobj, GLuint buffer, GLuint index, GLint size, GLenum type, GLsizei stride, GLintptr offset); +typedef void (APIENTRYP PFNGLENABLEVERTEXARRAYEXTPROC) (GLuint vaobj, GLenum array); +typedef void (APIENTRYP PFNGLDISABLEVERTEXARRAYEXTPROC) (GLuint vaobj, GLenum array); +typedef void (APIENTRYP PFNGLENABLEVERTEXARRAYATTRIBEXTPROC) (GLuint vaobj, GLuint index); +typedef void (APIENTRYP PFNGLDISABLEVERTEXARRAYATTRIBEXTPROC) (GLuint vaobj, GLuint index); +typedef void (APIENTRYP PFNGLGETVERTEXARRAYINTEGERVEXTPROC) (GLuint vaobj, GLenum pname, GLint *param); +typedef void (APIENTRYP PFNGLGETVERTEXARRAYPOINTERVEXTPROC) (GLuint vaobj, GLenum pname, void **param); +typedef void (APIENTRYP PFNGLGETVERTEXARRAYINTEGERI_VEXTPROC) (GLuint vaobj, GLuint index, GLenum pname, GLint *param); +typedef void (APIENTRYP PFNGLGETVERTEXARRAYPOINTERI_VEXTPROC) (GLuint vaobj, GLuint index, GLenum pname, void **param); +typedef void *(APIENTRYP PFNGLMAPNAMEDBUFFERRANGEEXTPROC) (GLuint buffer, GLintptr offset, GLsizeiptr length, GLbitfield access); +typedef void (APIENTRYP PFNGLFLUSHMAPPEDNAMEDBUFFERRANGEEXTPROC) (GLuint buffer, GLintptr offset, GLsizeiptr length); +typedef void (APIENTRYP PFNGLNAMEDBUFFERSTORAGEEXTPROC) (GLuint buffer, GLsizeiptr size, const void *data, GLbitfield flags); +typedef void (APIENTRYP PFNGLCLEARNAMEDBUFFERDATAEXTPROC) (GLuint buffer, GLenum internalformat, GLenum format, GLenum type, const void *data); +typedef void (APIENTRYP PFNGLCLEARNAMEDBUFFERSUBDATAEXTPROC) (GLuint buffer, GLenum internalformat, GLsizeiptr offset, GLsizeiptr size, GLenum format, GLenum type, const void *data); +typedef void (APIENTRYP PFNGLNAMEDFRAMEBUFFERPARAMETERIEXTPROC) (GLuint framebuffer, GLenum pname, GLint param); +typedef void (APIENTRYP PFNGLGETNAMEDFRAMEBUFFERPARAMETERIVEXTPROC) (GLuint framebuffer, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1DEXTPROC) (GLuint program, GLint location, GLdouble x); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2DEXTPROC) (GLuint program, GLint location, GLdouble x, GLdouble y); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3DEXTPROC) (GLuint program, GLint location, GLdouble x, GLdouble y, GLdouble z); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4DEXTPROC) (GLuint program, GLint location, GLdouble x, GLdouble y, GLdouble z, GLdouble w); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM1DVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM2DVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM3DVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORM4DVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2DVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3DVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4DVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2X3DVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2X4DVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3X2DVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3X4DVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4X2DVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4X3DVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +typedef void (APIENTRYP PFNGLTEXTUREBUFFERRANGEEXTPROC) (GLuint texture, GLenum target, GLenum internalformat, GLuint buffer, GLintptr offset, GLsizeiptr size); +typedef void (APIENTRYP PFNGLTEXTURESTORAGE1DEXTPROC) (GLuint texture, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width); +typedef void (APIENTRYP PFNGLTEXTURESTORAGE2DEXTPROC) (GLuint texture, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLTEXTURESTORAGE3DEXTPROC) (GLuint texture, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +typedef void (APIENTRYP PFNGLTEXTURESTORAGE2DMULTISAMPLEEXTPROC) (GLuint texture, GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLboolean fixedsamplelocations); +typedef void (APIENTRYP PFNGLTEXTURESTORAGE3DMULTISAMPLEEXTPROC) (GLuint texture, GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedsamplelocations); +typedef void (APIENTRYP PFNGLVERTEXARRAYBINDVERTEXBUFFEREXTPROC) (GLuint vaobj, GLuint bindingindex, GLuint buffer, GLintptr offset, GLsizei stride); +typedef void (APIENTRYP PFNGLVERTEXARRAYVERTEXATTRIBFORMATEXTPROC) (GLuint vaobj, GLuint attribindex, GLint size, GLenum type, GLboolean normalized, GLuint relativeoffset); +typedef void (APIENTRYP PFNGLVERTEXARRAYVERTEXATTRIBIFORMATEXTPROC) (GLuint vaobj, GLuint attribindex, GLint size, GLenum type, GLuint relativeoffset); +typedef void (APIENTRYP PFNGLVERTEXARRAYVERTEXATTRIBLFORMATEXTPROC) (GLuint vaobj, GLuint attribindex, GLint size, GLenum type, GLuint relativeoffset); +typedef void (APIENTRYP PFNGLVERTEXARRAYVERTEXATTRIBBINDINGEXTPROC) (GLuint vaobj, GLuint attribindex, GLuint bindingindex); +typedef void (APIENTRYP PFNGLVERTEXARRAYVERTEXBINDINGDIVISOREXTPROC) (GLuint vaobj, GLuint bindingindex, GLuint divisor); +typedef void (APIENTRYP PFNGLVERTEXARRAYVERTEXATTRIBLOFFSETEXTPROC) (GLuint vaobj, GLuint buffer, GLuint index, GLint size, GLenum type, GLsizei stride, GLintptr offset); +typedef void (APIENTRYP PFNGLTEXTUREPAGECOMMITMENTEXTPROC) (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLboolean commit); +typedef void (APIENTRYP PFNGLVERTEXARRAYVERTEXATTRIBDIVISOREXTPROC) (GLuint vaobj, GLuint index, GLuint divisor); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glMatrixLoadfEXT (GLenum mode, const GLfloat *m); +GLAPI void APIENTRY glMatrixLoaddEXT (GLenum mode, const GLdouble *m); +GLAPI void APIENTRY glMatrixMultfEXT (GLenum mode, const GLfloat *m); +GLAPI void APIENTRY glMatrixMultdEXT (GLenum mode, const GLdouble *m); +GLAPI void APIENTRY glMatrixLoadIdentityEXT (GLenum mode); +GLAPI void APIENTRY glMatrixRotatefEXT (GLenum mode, GLfloat angle, GLfloat x, GLfloat y, GLfloat z); +GLAPI void APIENTRY glMatrixRotatedEXT (GLenum mode, GLdouble angle, GLdouble x, GLdouble y, GLdouble z); +GLAPI void APIENTRY glMatrixScalefEXT (GLenum mode, GLfloat x, GLfloat y, GLfloat z); +GLAPI void APIENTRY glMatrixScaledEXT (GLenum mode, GLdouble x, GLdouble y, GLdouble z); +GLAPI void APIENTRY glMatrixTranslatefEXT (GLenum mode, GLfloat x, GLfloat y, GLfloat z); +GLAPI void APIENTRY glMatrixTranslatedEXT (GLenum mode, GLdouble x, GLdouble y, GLdouble z); +GLAPI void APIENTRY glMatrixFrustumEXT (GLenum mode, GLdouble left, GLdouble right, GLdouble bottom, GLdouble top, GLdouble zNear, GLdouble zFar); +GLAPI void APIENTRY glMatrixOrthoEXT (GLenum mode, GLdouble left, GLdouble right, GLdouble bottom, GLdouble top, GLdouble zNear, GLdouble zFar); +GLAPI void APIENTRY glMatrixPopEXT (GLenum mode); +GLAPI void APIENTRY glMatrixPushEXT (GLenum mode); +GLAPI void APIENTRY glClientAttribDefaultEXT (GLbitfield mask); +GLAPI void APIENTRY glPushClientAttribDefaultEXT (GLbitfield mask); +GLAPI void APIENTRY glTextureParameterfEXT (GLuint texture, GLenum target, GLenum pname, GLfloat param); +GLAPI void APIENTRY glTextureParameterfvEXT (GLuint texture, GLenum target, GLenum pname, const GLfloat *params); +GLAPI void APIENTRY glTextureParameteriEXT (GLuint texture, GLenum target, GLenum pname, GLint param); +GLAPI void APIENTRY glTextureParameterivEXT (GLuint texture, GLenum target, GLenum pname, const GLint *params); +GLAPI void APIENTRY glTextureImage1DEXT (GLuint texture, GLenum target, GLint level, GLint internalformat, GLsizei width, GLint border, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glTextureImage2DEXT (GLuint texture, GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glTextureSubImage1DEXT (GLuint texture, GLenum target, GLint level, GLint xoffset, GLsizei width, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glTextureSubImage2DEXT (GLuint texture, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glCopyTextureImage1DEXT (GLuint texture, GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLint border); +GLAPI void APIENTRY glCopyTextureImage2DEXT (GLuint texture, GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLsizei height, GLint border); +GLAPI void APIENTRY glCopyTextureSubImage1DEXT (GLuint texture, GLenum target, GLint level, GLint xoffset, GLint x, GLint y, GLsizei width); +GLAPI void APIENTRY glCopyTextureSubImage2DEXT (GLuint texture, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint x, GLint y, GLsizei width, GLsizei height); +GLAPI void APIENTRY glGetTextureImageEXT (GLuint texture, GLenum target, GLint level, GLenum format, GLenum type, void *pixels); +GLAPI void APIENTRY glGetTextureParameterfvEXT (GLuint texture, GLenum target, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetTextureParameterivEXT (GLuint texture, GLenum target, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetTextureLevelParameterfvEXT (GLuint texture, GLenum target, GLint level, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetTextureLevelParameterivEXT (GLuint texture, GLenum target, GLint level, GLenum pname, GLint *params); +GLAPI void APIENTRY glTextureImage3DEXT (GLuint texture, GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glTextureSubImage3DEXT (GLuint texture, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glCopyTextureSubImage3DEXT (GLuint texture, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height); +GLAPI void APIENTRY glBindMultiTextureEXT (GLenum texunit, GLenum target, GLuint texture); +GLAPI void APIENTRY glMultiTexCoordPointerEXT (GLenum texunit, GLint size, GLenum type, GLsizei stride, const void *pointer); +GLAPI void APIENTRY glMultiTexEnvfEXT (GLenum texunit, GLenum target, GLenum pname, GLfloat param); +GLAPI void APIENTRY glMultiTexEnvfvEXT (GLenum texunit, GLenum target, GLenum pname, const GLfloat *params); +GLAPI void APIENTRY glMultiTexEnviEXT (GLenum texunit, GLenum target, GLenum pname, GLint param); +GLAPI void APIENTRY glMultiTexEnvivEXT (GLenum texunit, GLenum target, GLenum pname, const GLint *params); +GLAPI void APIENTRY glMultiTexGendEXT (GLenum texunit, GLenum coord, GLenum pname, GLdouble param); +GLAPI void APIENTRY glMultiTexGendvEXT (GLenum texunit, GLenum coord, GLenum pname, const GLdouble *params); +GLAPI void APIENTRY glMultiTexGenfEXT (GLenum texunit, GLenum coord, GLenum pname, GLfloat param); +GLAPI void APIENTRY glMultiTexGenfvEXT (GLenum texunit, GLenum coord, GLenum pname, const GLfloat *params); +GLAPI void APIENTRY glMultiTexGeniEXT (GLenum texunit, GLenum coord, GLenum pname, GLint param); +GLAPI void APIENTRY glMultiTexGenivEXT (GLenum texunit, GLenum coord, GLenum pname, const GLint *params); +GLAPI void APIENTRY glGetMultiTexEnvfvEXT (GLenum texunit, GLenum target, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetMultiTexEnvivEXT (GLenum texunit, GLenum target, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetMultiTexGendvEXT (GLenum texunit, GLenum coord, GLenum pname, GLdouble *params); +GLAPI void APIENTRY glGetMultiTexGenfvEXT (GLenum texunit, GLenum coord, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetMultiTexGenivEXT (GLenum texunit, GLenum coord, GLenum pname, GLint *params); +GLAPI void APIENTRY glMultiTexParameteriEXT (GLenum texunit, GLenum target, GLenum pname, GLint param); +GLAPI void APIENTRY glMultiTexParameterivEXT (GLenum texunit, GLenum target, GLenum pname, const GLint *params); +GLAPI void APIENTRY glMultiTexParameterfEXT (GLenum texunit, GLenum target, GLenum pname, GLfloat param); +GLAPI void APIENTRY glMultiTexParameterfvEXT (GLenum texunit, GLenum target, GLenum pname, const GLfloat *params); +GLAPI void APIENTRY glMultiTexImage1DEXT (GLenum texunit, GLenum target, GLint level, GLint internalformat, GLsizei width, GLint border, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glMultiTexImage2DEXT (GLenum texunit, GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glMultiTexSubImage1DEXT (GLenum texunit, GLenum target, GLint level, GLint xoffset, GLsizei width, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glMultiTexSubImage2DEXT (GLenum texunit, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glCopyMultiTexImage1DEXT (GLenum texunit, GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLint border); +GLAPI void APIENTRY glCopyMultiTexImage2DEXT (GLenum texunit, GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLsizei height, GLint border); +GLAPI void APIENTRY glCopyMultiTexSubImage1DEXT (GLenum texunit, GLenum target, GLint level, GLint xoffset, GLint x, GLint y, GLsizei width); +GLAPI void APIENTRY glCopyMultiTexSubImage2DEXT (GLenum texunit, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint x, GLint y, GLsizei width, GLsizei height); +GLAPI void APIENTRY glGetMultiTexImageEXT (GLenum texunit, GLenum target, GLint level, GLenum format, GLenum type, void *pixels); +GLAPI void APIENTRY glGetMultiTexParameterfvEXT (GLenum texunit, GLenum target, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetMultiTexParameterivEXT (GLenum texunit, GLenum target, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetMultiTexLevelParameterfvEXT (GLenum texunit, GLenum target, GLint level, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetMultiTexLevelParameterivEXT (GLenum texunit, GLenum target, GLint level, GLenum pname, GLint *params); +GLAPI void APIENTRY glMultiTexImage3DEXT (GLenum texunit, GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glMultiTexSubImage3DEXT (GLenum texunit, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glCopyMultiTexSubImage3DEXT (GLenum texunit, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height); +GLAPI void APIENTRY glEnableClientStateIndexedEXT (GLenum array, GLuint index); +GLAPI void APIENTRY glDisableClientStateIndexedEXT (GLenum array, GLuint index); +GLAPI void APIENTRY glGetFloatIndexedvEXT (GLenum target, GLuint index, GLfloat *data); +GLAPI void APIENTRY glGetDoubleIndexedvEXT (GLenum target, GLuint index, GLdouble *data); +GLAPI void APIENTRY glGetPointerIndexedvEXT (GLenum target, GLuint index, void **data); +GLAPI void APIENTRY glEnableIndexedEXT (GLenum target, GLuint index); +GLAPI void APIENTRY glDisableIndexedEXT (GLenum target, GLuint index); +GLAPI GLboolean APIENTRY glIsEnabledIndexedEXT (GLenum target, GLuint index); +GLAPI void APIENTRY glGetIntegerIndexedvEXT (GLenum target, GLuint index, GLint *data); +GLAPI void APIENTRY glGetBooleanIndexedvEXT (GLenum target, GLuint index, GLboolean *data); +GLAPI void APIENTRY glCompressedTextureImage3DEXT (GLuint texture, GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, const void *bits); +GLAPI void APIENTRY glCompressedTextureImage2DEXT (GLuint texture, GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, const void *bits); +GLAPI void APIENTRY glCompressedTextureImage1DEXT (GLuint texture, GLenum target, GLint level, GLenum internalformat, GLsizei width, GLint border, GLsizei imageSize, const void *bits); +GLAPI void APIENTRY glCompressedTextureSubImage3DEXT (GLuint texture, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void *bits); +GLAPI void APIENTRY glCompressedTextureSubImage2DEXT (GLuint texture, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *bits); +GLAPI void APIENTRY glCompressedTextureSubImage1DEXT (GLuint texture, GLenum target, GLint level, GLint xoffset, GLsizei width, GLenum format, GLsizei imageSize, const void *bits); +GLAPI void APIENTRY glGetCompressedTextureImageEXT (GLuint texture, GLenum target, GLint lod, void *img); +GLAPI void APIENTRY glCompressedMultiTexImage3DEXT (GLenum texunit, GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, const void *bits); +GLAPI void APIENTRY glCompressedMultiTexImage2DEXT (GLenum texunit, GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, const void *bits); +GLAPI void APIENTRY glCompressedMultiTexImage1DEXT (GLenum texunit, GLenum target, GLint level, GLenum internalformat, GLsizei width, GLint border, GLsizei imageSize, const void *bits); +GLAPI void APIENTRY glCompressedMultiTexSubImage3DEXT (GLenum texunit, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void *bits); +GLAPI void APIENTRY glCompressedMultiTexSubImage2DEXT (GLenum texunit, GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *bits); +GLAPI void APIENTRY glCompressedMultiTexSubImage1DEXT (GLenum texunit, GLenum target, GLint level, GLint xoffset, GLsizei width, GLenum format, GLsizei imageSize, const void *bits); +GLAPI void APIENTRY glGetCompressedMultiTexImageEXT (GLenum texunit, GLenum target, GLint lod, void *img); +GLAPI void APIENTRY glMatrixLoadTransposefEXT (GLenum mode, const GLfloat *m); +GLAPI void APIENTRY glMatrixLoadTransposedEXT (GLenum mode, const GLdouble *m); +GLAPI void APIENTRY glMatrixMultTransposefEXT (GLenum mode, const GLfloat *m); +GLAPI void APIENTRY glMatrixMultTransposedEXT (GLenum mode, const GLdouble *m); +GLAPI void APIENTRY glNamedBufferDataEXT (GLuint buffer, GLsizeiptr size, const void *data, GLenum usage); +GLAPI void APIENTRY glNamedBufferSubDataEXT (GLuint buffer, GLintptr offset, GLsizeiptr size, const void *data); +GLAPI void *APIENTRY glMapNamedBufferEXT (GLuint buffer, GLenum access); +GLAPI GLboolean APIENTRY glUnmapNamedBufferEXT (GLuint buffer); +GLAPI void APIENTRY glGetNamedBufferParameterivEXT (GLuint buffer, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetNamedBufferPointervEXT (GLuint buffer, GLenum pname, void **params); +GLAPI void APIENTRY glGetNamedBufferSubDataEXT (GLuint buffer, GLintptr offset, GLsizeiptr size, void *data); +GLAPI void APIENTRY glProgramUniform1fEXT (GLuint program, GLint location, GLfloat v0); +GLAPI void APIENTRY glProgramUniform2fEXT (GLuint program, GLint location, GLfloat v0, GLfloat v1); +GLAPI void APIENTRY glProgramUniform3fEXT (GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +GLAPI void APIENTRY glProgramUniform4fEXT (GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +GLAPI void APIENTRY glProgramUniform1iEXT (GLuint program, GLint location, GLint v0); +GLAPI void APIENTRY glProgramUniform2iEXT (GLuint program, GLint location, GLint v0, GLint v1); +GLAPI void APIENTRY glProgramUniform3iEXT (GLuint program, GLint location, GLint v0, GLint v1, GLint v2); +GLAPI void APIENTRY glProgramUniform4iEXT (GLuint program, GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +GLAPI void APIENTRY glProgramUniform1fvEXT (GLuint program, GLint location, GLsizei count, const GLfloat *value); +GLAPI void APIENTRY glProgramUniform2fvEXT (GLuint program, GLint location, GLsizei count, const GLfloat *value); +GLAPI void APIENTRY glProgramUniform3fvEXT (GLuint program, GLint location, GLsizei count, const GLfloat *value); +GLAPI void APIENTRY glProgramUniform4fvEXT (GLuint program, GLint location, GLsizei count, const GLfloat *value); +GLAPI void APIENTRY glProgramUniform1ivEXT (GLuint program, GLint location, GLsizei count, const GLint *value); +GLAPI void APIENTRY glProgramUniform2ivEXT (GLuint program, GLint location, GLsizei count, const GLint *value); +GLAPI void APIENTRY glProgramUniform3ivEXT (GLuint program, GLint location, GLsizei count, const GLint *value); +GLAPI void APIENTRY glProgramUniform4ivEXT (GLuint program, GLint location, GLsizei count, const GLint *value); +GLAPI void APIENTRY glProgramUniformMatrix2fvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glProgramUniformMatrix3fvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glProgramUniformMatrix4fvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glProgramUniformMatrix2x3fvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glProgramUniformMatrix3x2fvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glProgramUniformMatrix2x4fvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glProgramUniformMatrix4x2fvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glProgramUniformMatrix3x4fvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glProgramUniformMatrix4x3fvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GLAPI void APIENTRY glTextureBufferEXT (GLuint texture, GLenum target, GLenum internalformat, GLuint buffer); +GLAPI void APIENTRY glMultiTexBufferEXT (GLenum texunit, GLenum target, GLenum internalformat, GLuint buffer); +GLAPI void APIENTRY glTextureParameterIivEXT (GLuint texture, GLenum target, GLenum pname, const GLint *params); +GLAPI void APIENTRY glTextureParameterIuivEXT (GLuint texture, GLenum target, GLenum pname, const GLuint *params); +GLAPI void APIENTRY glGetTextureParameterIivEXT (GLuint texture, GLenum target, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetTextureParameterIuivEXT (GLuint texture, GLenum target, GLenum pname, GLuint *params); +GLAPI void APIENTRY glMultiTexParameterIivEXT (GLenum texunit, GLenum target, GLenum pname, const GLint *params); +GLAPI void APIENTRY glMultiTexParameterIuivEXT (GLenum texunit, GLenum target, GLenum pname, const GLuint *params); +GLAPI void APIENTRY glGetMultiTexParameterIivEXT (GLenum texunit, GLenum target, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetMultiTexParameterIuivEXT (GLenum texunit, GLenum target, GLenum pname, GLuint *params); +GLAPI void APIENTRY glProgramUniform1uiEXT (GLuint program, GLint location, GLuint v0); +GLAPI void APIENTRY glProgramUniform2uiEXT (GLuint program, GLint location, GLuint v0, GLuint v1); +GLAPI void APIENTRY glProgramUniform3uiEXT (GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2); +GLAPI void APIENTRY glProgramUniform4uiEXT (GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3); +GLAPI void APIENTRY glProgramUniform1uivEXT (GLuint program, GLint location, GLsizei count, const GLuint *value); +GLAPI void APIENTRY glProgramUniform2uivEXT (GLuint program, GLint location, GLsizei count, const GLuint *value); +GLAPI void APIENTRY glProgramUniform3uivEXT (GLuint program, GLint location, GLsizei count, const GLuint *value); +GLAPI void APIENTRY glProgramUniform4uivEXT (GLuint program, GLint location, GLsizei count, const GLuint *value); +GLAPI void APIENTRY glNamedProgramLocalParameters4fvEXT (GLuint program, GLenum target, GLuint index, GLsizei count, const GLfloat *params); +GLAPI void APIENTRY glNamedProgramLocalParameterI4iEXT (GLuint program, GLenum target, GLuint index, GLint x, GLint y, GLint z, GLint w); +GLAPI void APIENTRY glNamedProgramLocalParameterI4ivEXT (GLuint program, GLenum target, GLuint index, const GLint *params); +GLAPI void APIENTRY glNamedProgramLocalParametersI4ivEXT (GLuint program, GLenum target, GLuint index, GLsizei count, const GLint *params); +GLAPI void APIENTRY glNamedProgramLocalParameterI4uiEXT (GLuint program, GLenum target, GLuint index, GLuint x, GLuint y, GLuint z, GLuint w); +GLAPI void APIENTRY glNamedProgramLocalParameterI4uivEXT (GLuint program, GLenum target, GLuint index, const GLuint *params); +GLAPI void APIENTRY glNamedProgramLocalParametersI4uivEXT (GLuint program, GLenum target, GLuint index, GLsizei count, const GLuint *params); +GLAPI void APIENTRY glGetNamedProgramLocalParameterIivEXT (GLuint program, GLenum target, GLuint index, GLint *params); +GLAPI void APIENTRY glGetNamedProgramLocalParameterIuivEXT (GLuint program, GLenum target, GLuint index, GLuint *params); +GLAPI void APIENTRY glEnableClientStateiEXT (GLenum array, GLuint index); +GLAPI void APIENTRY glDisableClientStateiEXT (GLenum array, GLuint index); +GLAPI void APIENTRY glGetFloati_vEXT (GLenum pname, GLuint index, GLfloat *params); +GLAPI void APIENTRY glGetDoublei_vEXT (GLenum pname, GLuint index, GLdouble *params); +GLAPI void APIENTRY glGetPointeri_vEXT (GLenum pname, GLuint index, void **params); +GLAPI void APIENTRY glNamedProgramStringEXT (GLuint program, GLenum target, GLenum format, GLsizei len, const void *string); +GLAPI void APIENTRY glNamedProgramLocalParameter4dEXT (GLuint program, GLenum target, GLuint index, GLdouble x, GLdouble y, GLdouble z, GLdouble w); +GLAPI void APIENTRY glNamedProgramLocalParameter4dvEXT (GLuint program, GLenum target, GLuint index, const GLdouble *params); +GLAPI void APIENTRY glNamedProgramLocalParameter4fEXT (GLuint program, GLenum target, GLuint index, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +GLAPI void APIENTRY glNamedProgramLocalParameter4fvEXT (GLuint program, GLenum target, GLuint index, const GLfloat *params); +GLAPI void APIENTRY glGetNamedProgramLocalParameterdvEXT (GLuint program, GLenum target, GLuint index, GLdouble *params); +GLAPI void APIENTRY glGetNamedProgramLocalParameterfvEXT (GLuint program, GLenum target, GLuint index, GLfloat *params); +GLAPI void APIENTRY glGetNamedProgramivEXT (GLuint program, GLenum target, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetNamedProgramStringEXT (GLuint program, GLenum target, GLenum pname, void *string); +GLAPI void APIENTRY glNamedRenderbufferStorageEXT (GLuint renderbuffer, GLenum internalformat, GLsizei width, GLsizei height); +GLAPI void APIENTRY glGetNamedRenderbufferParameterivEXT (GLuint renderbuffer, GLenum pname, GLint *params); +GLAPI void APIENTRY glNamedRenderbufferStorageMultisampleEXT (GLuint renderbuffer, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +GLAPI void APIENTRY glNamedRenderbufferStorageMultisampleCoverageEXT (GLuint renderbuffer, GLsizei coverageSamples, GLsizei colorSamples, GLenum internalformat, GLsizei width, GLsizei height); +GLAPI GLenum APIENTRY glCheckNamedFramebufferStatusEXT (GLuint framebuffer, GLenum target); +GLAPI void APIENTRY glNamedFramebufferTexture1DEXT (GLuint framebuffer, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +GLAPI void APIENTRY glNamedFramebufferTexture2DEXT (GLuint framebuffer, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +GLAPI void APIENTRY glNamedFramebufferTexture3DEXT (GLuint framebuffer, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLint zoffset); +GLAPI void APIENTRY glNamedFramebufferRenderbufferEXT (GLuint framebuffer, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer); +GLAPI void APIENTRY glGetNamedFramebufferAttachmentParameterivEXT (GLuint framebuffer, GLenum attachment, GLenum pname, GLint *params); +GLAPI void APIENTRY glGenerateTextureMipmapEXT (GLuint texture, GLenum target); +GLAPI void APIENTRY glGenerateMultiTexMipmapEXT (GLenum texunit, GLenum target); +GLAPI void APIENTRY glFramebufferDrawBufferEXT (GLuint framebuffer, GLenum mode); +GLAPI void APIENTRY glFramebufferDrawBuffersEXT (GLuint framebuffer, GLsizei n, const GLenum *bufs); +GLAPI void APIENTRY glFramebufferReadBufferEXT (GLuint framebuffer, GLenum mode); +GLAPI void APIENTRY glGetFramebufferParameterivEXT (GLuint framebuffer, GLenum pname, GLint *params); +GLAPI void APIENTRY glNamedCopyBufferSubDataEXT (GLuint readBuffer, GLuint writeBuffer, GLintptr readOffset, GLintptr writeOffset, GLsizeiptr size); +GLAPI void APIENTRY glNamedFramebufferTextureEXT (GLuint framebuffer, GLenum attachment, GLuint texture, GLint level); +GLAPI void APIENTRY glNamedFramebufferTextureLayerEXT (GLuint framebuffer, GLenum attachment, GLuint texture, GLint level, GLint layer); +GLAPI void APIENTRY glNamedFramebufferTextureFaceEXT (GLuint framebuffer, GLenum attachment, GLuint texture, GLint level, GLenum face); +GLAPI void APIENTRY glTextureRenderbufferEXT (GLuint texture, GLenum target, GLuint renderbuffer); +GLAPI void APIENTRY glMultiTexRenderbufferEXT (GLenum texunit, GLenum target, GLuint renderbuffer); +GLAPI void APIENTRY glVertexArrayVertexOffsetEXT (GLuint vaobj, GLuint buffer, GLint size, GLenum type, GLsizei stride, GLintptr offset); +GLAPI void APIENTRY glVertexArrayColorOffsetEXT (GLuint vaobj, GLuint buffer, GLint size, GLenum type, GLsizei stride, GLintptr offset); +GLAPI void APIENTRY glVertexArrayEdgeFlagOffsetEXT (GLuint vaobj, GLuint buffer, GLsizei stride, GLintptr offset); +GLAPI void APIENTRY glVertexArrayIndexOffsetEXT (GLuint vaobj, GLuint buffer, GLenum type, GLsizei stride, GLintptr offset); +GLAPI void APIENTRY glVertexArrayNormalOffsetEXT (GLuint vaobj, GLuint buffer, GLenum type, GLsizei stride, GLintptr offset); +GLAPI void APIENTRY glVertexArrayTexCoordOffsetEXT (GLuint vaobj, GLuint buffer, GLint size, GLenum type, GLsizei stride, GLintptr offset); +GLAPI void APIENTRY glVertexArrayMultiTexCoordOffsetEXT (GLuint vaobj, GLuint buffer, GLenum texunit, GLint size, GLenum type, GLsizei stride, GLintptr offset); +GLAPI void APIENTRY glVertexArrayFogCoordOffsetEXT (GLuint vaobj, GLuint buffer, GLenum type, GLsizei stride, GLintptr offset); +GLAPI void APIENTRY glVertexArraySecondaryColorOffsetEXT (GLuint vaobj, GLuint buffer, GLint size, GLenum type, GLsizei stride, GLintptr offset); +GLAPI void APIENTRY glVertexArrayVertexAttribOffsetEXT (GLuint vaobj, GLuint buffer, GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride, GLintptr offset); +GLAPI void APIENTRY glVertexArrayVertexAttribIOffsetEXT (GLuint vaobj, GLuint buffer, GLuint index, GLint size, GLenum type, GLsizei stride, GLintptr offset); +GLAPI void APIENTRY glEnableVertexArrayEXT (GLuint vaobj, GLenum array); +GLAPI void APIENTRY glDisableVertexArrayEXT (GLuint vaobj, GLenum array); +GLAPI void APIENTRY glEnableVertexArrayAttribEXT (GLuint vaobj, GLuint index); +GLAPI void APIENTRY glDisableVertexArrayAttribEXT (GLuint vaobj, GLuint index); +GLAPI void APIENTRY glGetVertexArrayIntegervEXT (GLuint vaobj, GLenum pname, GLint *param); +GLAPI void APIENTRY glGetVertexArrayPointervEXT (GLuint vaobj, GLenum pname, void **param); +GLAPI void APIENTRY glGetVertexArrayIntegeri_vEXT (GLuint vaobj, GLuint index, GLenum pname, GLint *param); +GLAPI void APIENTRY glGetVertexArrayPointeri_vEXT (GLuint vaobj, GLuint index, GLenum pname, void **param); +GLAPI void *APIENTRY glMapNamedBufferRangeEXT (GLuint buffer, GLintptr offset, GLsizeiptr length, GLbitfield access); +GLAPI void APIENTRY glFlushMappedNamedBufferRangeEXT (GLuint buffer, GLintptr offset, GLsizeiptr length); +GLAPI void APIENTRY glNamedBufferStorageEXT (GLuint buffer, GLsizeiptr size, const void *data, GLbitfield flags); +GLAPI void APIENTRY glClearNamedBufferDataEXT (GLuint buffer, GLenum internalformat, GLenum format, GLenum type, const void *data); +GLAPI void APIENTRY glClearNamedBufferSubDataEXT (GLuint buffer, GLenum internalformat, GLsizeiptr offset, GLsizeiptr size, GLenum format, GLenum type, const void *data); +GLAPI void APIENTRY glNamedFramebufferParameteriEXT (GLuint framebuffer, GLenum pname, GLint param); +GLAPI void APIENTRY glGetNamedFramebufferParameterivEXT (GLuint framebuffer, GLenum pname, GLint *params); +GLAPI void APIENTRY glProgramUniform1dEXT (GLuint program, GLint location, GLdouble x); +GLAPI void APIENTRY glProgramUniform2dEXT (GLuint program, GLint location, GLdouble x, GLdouble y); +GLAPI void APIENTRY glProgramUniform3dEXT (GLuint program, GLint location, GLdouble x, GLdouble y, GLdouble z); +GLAPI void APIENTRY glProgramUniform4dEXT (GLuint program, GLint location, GLdouble x, GLdouble y, GLdouble z, GLdouble w); +GLAPI void APIENTRY glProgramUniform1dvEXT (GLuint program, GLint location, GLsizei count, const GLdouble *value); +GLAPI void APIENTRY glProgramUniform2dvEXT (GLuint program, GLint location, GLsizei count, const GLdouble *value); +GLAPI void APIENTRY glProgramUniform3dvEXT (GLuint program, GLint location, GLsizei count, const GLdouble *value); +GLAPI void APIENTRY glProgramUniform4dvEXT (GLuint program, GLint location, GLsizei count, const GLdouble *value); +GLAPI void APIENTRY glProgramUniformMatrix2dvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glProgramUniformMatrix3dvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glProgramUniformMatrix4dvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glProgramUniformMatrix2x3dvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glProgramUniformMatrix2x4dvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glProgramUniformMatrix3x2dvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glProgramUniformMatrix3x4dvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glProgramUniformMatrix4x2dvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glProgramUniformMatrix4x3dvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLdouble *value); +GLAPI void APIENTRY glTextureBufferRangeEXT (GLuint texture, GLenum target, GLenum internalformat, GLuint buffer, GLintptr offset, GLsizeiptr size); +GLAPI void APIENTRY glTextureStorage1DEXT (GLuint texture, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width); +GLAPI void APIENTRY glTextureStorage2DEXT (GLuint texture, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +GLAPI void APIENTRY glTextureStorage3DEXT (GLuint texture, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +GLAPI void APIENTRY glTextureStorage2DMultisampleEXT (GLuint texture, GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLboolean fixedsamplelocations); +GLAPI void APIENTRY glTextureStorage3DMultisampleEXT (GLuint texture, GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedsamplelocations); +GLAPI void APIENTRY glVertexArrayBindVertexBufferEXT (GLuint vaobj, GLuint bindingindex, GLuint buffer, GLintptr offset, GLsizei stride); +GLAPI void APIENTRY glVertexArrayVertexAttribFormatEXT (GLuint vaobj, GLuint attribindex, GLint size, GLenum type, GLboolean normalized, GLuint relativeoffset); +GLAPI void APIENTRY glVertexArrayVertexAttribIFormatEXT (GLuint vaobj, GLuint attribindex, GLint size, GLenum type, GLuint relativeoffset); +GLAPI void APIENTRY glVertexArrayVertexAttribLFormatEXT (GLuint vaobj, GLuint attribindex, GLint size, GLenum type, GLuint relativeoffset); +GLAPI void APIENTRY glVertexArrayVertexAttribBindingEXT (GLuint vaobj, GLuint attribindex, GLuint bindingindex); +GLAPI void APIENTRY glVertexArrayVertexBindingDivisorEXT (GLuint vaobj, GLuint bindingindex, GLuint divisor); +GLAPI void APIENTRY glVertexArrayVertexAttribLOffsetEXT (GLuint vaobj, GLuint buffer, GLuint index, GLint size, GLenum type, GLsizei stride, GLintptr offset); +GLAPI void APIENTRY glTexturePageCommitmentEXT (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLboolean commit); +GLAPI void APIENTRY glVertexArrayVertexAttribDivisorEXT (GLuint vaobj, GLuint index, GLuint divisor); +#endif +#endif /* GL_EXT_direct_state_access */ + +#ifndef GL_EXT_draw_buffers2 +#define GL_EXT_draw_buffers2 1 +typedef void (APIENTRYP PFNGLCOLORMASKINDEXEDEXTPROC) (GLuint index, GLboolean r, GLboolean g, GLboolean b, GLboolean a); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glColorMaskIndexedEXT (GLuint index, GLboolean r, GLboolean g, GLboolean b, GLboolean a); +#endif +#endif /* GL_EXT_draw_buffers2 */ + +#ifndef GL_EXT_draw_instanced +#define GL_EXT_draw_instanced 1 +typedef void (APIENTRYP PFNGLDRAWARRAYSINSTANCEDEXTPROC) (GLenum mode, GLint start, GLsizei count, GLsizei primcount); +typedef void (APIENTRYP PFNGLDRAWELEMENTSINSTANCEDEXTPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei primcount); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glDrawArraysInstancedEXT (GLenum mode, GLint start, GLsizei count, GLsizei primcount); +GLAPI void APIENTRY glDrawElementsInstancedEXT (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei primcount); +#endif +#endif /* GL_EXT_draw_instanced */ + +#ifndef GL_EXT_draw_range_elements +#define GL_EXT_draw_range_elements 1 +#define GL_MAX_ELEMENTS_VERTICES_EXT 0x80E8 +#define GL_MAX_ELEMENTS_INDICES_EXT 0x80E9 +typedef void (APIENTRYP PFNGLDRAWRANGEELEMENTSEXTPROC) (GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glDrawRangeElementsEXT (GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices); +#endif +#endif /* GL_EXT_draw_range_elements */ + +#ifndef GL_EXT_external_buffer +#define GL_EXT_external_buffer 1 +typedef void *GLeglClientBufferEXT; +typedef void (APIENTRYP PFNGLBUFFERSTORAGEEXTERNALEXTPROC) (GLenum target, GLintptr offset, GLsizeiptr size, GLeglClientBufferEXT clientBuffer, GLbitfield flags); +typedef void (APIENTRYP PFNGLNAMEDBUFFERSTORAGEEXTERNALEXTPROC) (GLuint buffer, GLintptr offset, GLsizeiptr size, GLeglClientBufferEXT clientBuffer, GLbitfield flags); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBufferStorageExternalEXT (GLenum target, GLintptr offset, GLsizeiptr size, GLeglClientBufferEXT clientBuffer, GLbitfield flags); +GLAPI void APIENTRY glNamedBufferStorageExternalEXT (GLuint buffer, GLintptr offset, GLsizeiptr size, GLeglClientBufferEXT clientBuffer, GLbitfield flags); +#endif +#endif /* GL_EXT_external_buffer */ + +#ifndef GL_EXT_fog_coord +#define GL_EXT_fog_coord 1 +#define GL_FOG_COORDINATE_SOURCE_EXT 0x8450 +#define GL_FOG_COORDINATE_EXT 0x8451 +#define GL_FRAGMENT_DEPTH_EXT 0x8452 +#define GL_CURRENT_FOG_COORDINATE_EXT 0x8453 +#define GL_FOG_COORDINATE_ARRAY_TYPE_EXT 0x8454 +#define GL_FOG_COORDINATE_ARRAY_STRIDE_EXT 0x8455 +#define GL_FOG_COORDINATE_ARRAY_POINTER_EXT 0x8456 +#define GL_FOG_COORDINATE_ARRAY_EXT 0x8457 +typedef void (APIENTRYP PFNGLFOGCOORDFEXTPROC) (GLfloat coord); +typedef void (APIENTRYP PFNGLFOGCOORDFVEXTPROC) (const GLfloat *coord); +typedef void (APIENTRYP PFNGLFOGCOORDDEXTPROC) (GLdouble coord); +typedef void (APIENTRYP PFNGLFOGCOORDDVEXTPROC) (const GLdouble *coord); +typedef void (APIENTRYP PFNGLFOGCOORDPOINTEREXTPROC) (GLenum type, GLsizei stride, const void *pointer); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glFogCoordfEXT (GLfloat coord); +GLAPI void APIENTRY glFogCoordfvEXT (const GLfloat *coord); +GLAPI void APIENTRY glFogCoorddEXT (GLdouble coord); +GLAPI void APIENTRY glFogCoorddvEXT (const GLdouble *coord); +GLAPI void APIENTRY glFogCoordPointerEXT (GLenum type, GLsizei stride, const void *pointer); +#endif +#endif /* GL_EXT_fog_coord */ + +#ifndef GL_EXT_framebuffer_blit +#define GL_EXT_framebuffer_blit 1 +#define GL_READ_FRAMEBUFFER_EXT 0x8CA8 +#define GL_DRAW_FRAMEBUFFER_EXT 0x8CA9 +#define GL_DRAW_FRAMEBUFFER_BINDING_EXT 0x8CA6 +#define GL_READ_FRAMEBUFFER_BINDING_EXT 0x8CAA +typedef void (APIENTRYP PFNGLBLITFRAMEBUFFEREXTPROC) (GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBlitFramebufferEXT (GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +#endif +#endif /* GL_EXT_framebuffer_blit */ + +#ifndef GL_EXT_framebuffer_multisample +#define GL_EXT_framebuffer_multisample 1 +#define GL_RENDERBUFFER_SAMPLES_EXT 0x8CAB +#define GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE_EXT 0x8D56 +#define GL_MAX_SAMPLES_EXT 0x8D57 +typedef void (APIENTRYP PFNGLRENDERBUFFERSTORAGEMULTISAMPLEEXTPROC) (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glRenderbufferStorageMultisampleEXT (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +#endif +#endif /* GL_EXT_framebuffer_multisample */ + +#ifndef GL_EXT_framebuffer_multisample_blit_scaled +#define GL_EXT_framebuffer_multisample_blit_scaled 1 +#define GL_SCALED_RESOLVE_FASTEST_EXT 0x90BA +#define GL_SCALED_RESOLVE_NICEST_EXT 0x90BB +#endif /* GL_EXT_framebuffer_multisample_blit_scaled */ + +#ifndef GL_EXT_framebuffer_object +#define GL_EXT_framebuffer_object 1 +#define GL_INVALID_FRAMEBUFFER_OPERATION_EXT 0x0506 +#define GL_MAX_RENDERBUFFER_SIZE_EXT 0x84E8 +#define GL_FRAMEBUFFER_BINDING_EXT 0x8CA6 +#define GL_RENDERBUFFER_BINDING_EXT 0x8CA7 +#define GL_FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE_EXT 0x8CD0 +#define GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME_EXT 0x8CD1 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL_EXT 0x8CD2 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE_EXT 0x8CD3 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_3D_ZOFFSET_EXT 0x8CD4 +#define GL_FRAMEBUFFER_COMPLETE_EXT 0x8CD5 +#define GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT_EXT 0x8CD6 +#define GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT_EXT 0x8CD7 +#define GL_FRAMEBUFFER_INCOMPLETE_DIMENSIONS_EXT 0x8CD9 +#define GL_FRAMEBUFFER_INCOMPLETE_FORMATS_EXT 0x8CDA +#define GL_FRAMEBUFFER_INCOMPLETE_DRAW_BUFFER_EXT 0x8CDB +#define GL_FRAMEBUFFER_INCOMPLETE_READ_BUFFER_EXT 0x8CDC +#define GL_FRAMEBUFFER_UNSUPPORTED_EXT 0x8CDD +#define GL_MAX_COLOR_ATTACHMENTS_EXT 0x8CDF +#define GL_COLOR_ATTACHMENT0_EXT 0x8CE0 +#define GL_COLOR_ATTACHMENT1_EXT 0x8CE1 +#define GL_COLOR_ATTACHMENT2_EXT 0x8CE2 +#define GL_COLOR_ATTACHMENT3_EXT 0x8CE3 +#define GL_COLOR_ATTACHMENT4_EXT 0x8CE4 +#define GL_COLOR_ATTACHMENT5_EXT 0x8CE5 +#define GL_COLOR_ATTACHMENT6_EXT 0x8CE6 +#define GL_COLOR_ATTACHMENT7_EXT 0x8CE7 +#define GL_COLOR_ATTACHMENT8_EXT 0x8CE8 +#define GL_COLOR_ATTACHMENT9_EXT 0x8CE9 +#define GL_COLOR_ATTACHMENT10_EXT 0x8CEA +#define GL_COLOR_ATTACHMENT11_EXT 0x8CEB +#define GL_COLOR_ATTACHMENT12_EXT 0x8CEC +#define GL_COLOR_ATTACHMENT13_EXT 0x8CED +#define GL_COLOR_ATTACHMENT14_EXT 0x8CEE +#define GL_COLOR_ATTACHMENT15_EXT 0x8CEF +#define GL_DEPTH_ATTACHMENT_EXT 0x8D00 +#define GL_STENCIL_ATTACHMENT_EXT 0x8D20 +#define GL_FRAMEBUFFER_EXT 0x8D40 +#define GL_RENDERBUFFER_EXT 0x8D41 +#define GL_RENDERBUFFER_WIDTH_EXT 0x8D42 +#define GL_RENDERBUFFER_HEIGHT_EXT 0x8D43 +#define GL_RENDERBUFFER_INTERNAL_FORMAT_EXT 0x8D44 +#define GL_STENCIL_INDEX1_EXT 0x8D46 +#define GL_STENCIL_INDEX4_EXT 0x8D47 +#define GL_STENCIL_INDEX8_EXT 0x8D48 +#define GL_STENCIL_INDEX16_EXT 0x8D49 +#define GL_RENDERBUFFER_RED_SIZE_EXT 0x8D50 +#define GL_RENDERBUFFER_GREEN_SIZE_EXT 0x8D51 +#define GL_RENDERBUFFER_BLUE_SIZE_EXT 0x8D52 +#define GL_RENDERBUFFER_ALPHA_SIZE_EXT 0x8D53 +#define GL_RENDERBUFFER_DEPTH_SIZE_EXT 0x8D54 +#define GL_RENDERBUFFER_STENCIL_SIZE_EXT 0x8D55 +typedef GLboolean (APIENTRYP PFNGLISRENDERBUFFEREXTPROC) (GLuint renderbuffer); +typedef void (APIENTRYP PFNGLBINDRENDERBUFFEREXTPROC) (GLenum target, GLuint renderbuffer); +typedef void (APIENTRYP PFNGLDELETERENDERBUFFERSEXTPROC) (GLsizei n, const GLuint *renderbuffers); +typedef void (APIENTRYP PFNGLGENRENDERBUFFERSEXTPROC) (GLsizei n, GLuint *renderbuffers); +typedef void (APIENTRYP PFNGLRENDERBUFFERSTORAGEEXTPROC) (GLenum target, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (APIENTRYP PFNGLGETRENDERBUFFERPARAMETERIVEXTPROC) (GLenum target, GLenum pname, GLint *params); +typedef GLboolean (APIENTRYP PFNGLISFRAMEBUFFEREXTPROC) (GLuint framebuffer); +typedef void (APIENTRYP PFNGLBINDFRAMEBUFFEREXTPROC) (GLenum target, GLuint framebuffer); +typedef void (APIENTRYP PFNGLDELETEFRAMEBUFFERSEXTPROC) (GLsizei n, const GLuint *framebuffers); +typedef void (APIENTRYP PFNGLGENFRAMEBUFFERSEXTPROC) (GLsizei n, GLuint *framebuffers); +typedef GLenum (APIENTRYP PFNGLCHECKFRAMEBUFFERSTATUSEXTPROC) (GLenum target); +typedef void (APIENTRYP PFNGLFRAMEBUFFERTEXTURE1DEXTPROC) (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +typedef void (APIENTRYP PFNGLFRAMEBUFFERTEXTURE2DEXTPROC) (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +typedef void (APIENTRYP PFNGLFRAMEBUFFERTEXTURE3DEXTPROC) (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLint zoffset); +typedef void (APIENTRYP PFNGLFRAMEBUFFERRENDERBUFFEREXTPROC) (GLenum target, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer); +typedef void (APIENTRYP PFNGLGETFRAMEBUFFERATTACHMENTPARAMETERIVEXTPROC) (GLenum target, GLenum attachment, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGENERATEMIPMAPEXTPROC) (GLenum target); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI GLboolean APIENTRY glIsRenderbufferEXT (GLuint renderbuffer); +GLAPI void APIENTRY glBindRenderbufferEXT (GLenum target, GLuint renderbuffer); +GLAPI void APIENTRY glDeleteRenderbuffersEXT (GLsizei n, const GLuint *renderbuffers); +GLAPI void APIENTRY glGenRenderbuffersEXT (GLsizei n, GLuint *renderbuffers); +GLAPI void APIENTRY glRenderbufferStorageEXT (GLenum target, GLenum internalformat, GLsizei width, GLsizei height); +GLAPI void APIENTRY glGetRenderbufferParameterivEXT (GLenum target, GLenum pname, GLint *params); +GLAPI GLboolean APIENTRY glIsFramebufferEXT (GLuint framebuffer); +GLAPI void APIENTRY glBindFramebufferEXT (GLenum target, GLuint framebuffer); +GLAPI void APIENTRY glDeleteFramebuffersEXT (GLsizei n, const GLuint *framebuffers); +GLAPI void APIENTRY glGenFramebuffersEXT (GLsizei n, GLuint *framebuffers); +GLAPI GLenum APIENTRY glCheckFramebufferStatusEXT (GLenum target); +GLAPI void APIENTRY glFramebufferTexture1DEXT (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +GLAPI void APIENTRY glFramebufferTexture2DEXT (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +GLAPI void APIENTRY glFramebufferTexture3DEXT (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLint zoffset); +GLAPI void APIENTRY glFramebufferRenderbufferEXT (GLenum target, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer); +GLAPI void APIENTRY glGetFramebufferAttachmentParameterivEXT (GLenum target, GLenum attachment, GLenum pname, GLint *params); +GLAPI void APIENTRY glGenerateMipmapEXT (GLenum target); +#endif +#endif /* GL_EXT_framebuffer_object */ + +#ifndef GL_EXT_framebuffer_sRGB +#define GL_EXT_framebuffer_sRGB 1 +#define GL_FRAMEBUFFER_SRGB_EXT 0x8DB9 +#define GL_FRAMEBUFFER_SRGB_CAPABLE_EXT 0x8DBA +#endif /* GL_EXT_framebuffer_sRGB */ + +#ifndef GL_EXT_geometry_shader4 +#define GL_EXT_geometry_shader4 1 +#define GL_GEOMETRY_SHADER_EXT 0x8DD9 +#define GL_GEOMETRY_VERTICES_OUT_EXT 0x8DDA +#define GL_GEOMETRY_INPUT_TYPE_EXT 0x8DDB +#define GL_GEOMETRY_OUTPUT_TYPE_EXT 0x8DDC +#define GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS_EXT 0x8C29 +#define GL_MAX_GEOMETRY_VARYING_COMPONENTS_EXT 0x8DDD +#define GL_MAX_VERTEX_VARYING_COMPONENTS_EXT 0x8DDE +#define GL_MAX_VARYING_COMPONENTS_EXT 0x8B4B +#define GL_MAX_GEOMETRY_UNIFORM_COMPONENTS_EXT 0x8DDF +#define GL_MAX_GEOMETRY_OUTPUT_VERTICES_EXT 0x8DE0 +#define GL_MAX_GEOMETRY_TOTAL_OUTPUT_COMPONENTS_EXT 0x8DE1 +#define GL_LINES_ADJACENCY_EXT 0x000A +#define GL_LINE_STRIP_ADJACENCY_EXT 0x000B +#define GL_TRIANGLES_ADJACENCY_EXT 0x000C +#define GL_TRIANGLE_STRIP_ADJACENCY_EXT 0x000D +#define GL_FRAMEBUFFER_INCOMPLETE_LAYER_TARGETS_EXT 0x8DA8 +#define GL_FRAMEBUFFER_INCOMPLETE_LAYER_COUNT_EXT 0x8DA9 +#define GL_FRAMEBUFFER_ATTACHMENT_LAYERED_EXT 0x8DA7 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LAYER_EXT 0x8CD4 +#define GL_PROGRAM_POINT_SIZE_EXT 0x8642 +typedef void (APIENTRYP PFNGLPROGRAMPARAMETERIEXTPROC) (GLuint program, GLenum pname, GLint value); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glProgramParameteriEXT (GLuint program, GLenum pname, GLint value); +#endif +#endif /* GL_EXT_geometry_shader4 */ + +#ifndef GL_EXT_gpu_program_parameters +#define GL_EXT_gpu_program_parameters 1 +typedef void (APIENTRYP PFNGLPROGRAMENVPARAMETERS4FVEXTPROC) (GLenum target, GLuint index, GLsizei count, const GLfloat *params); +typedef void (APIENTRYP PFNGLPROGRAMLOCALPARAMETERS4FVEXTPROC) (GLenum target, GLuint index, GLsizei count, const GLfloat *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glProgramEnvParameters4fvEXT (GLenum target, GLuint index, GLsizei count, const GLfloat *params); +GLAPI void APIENTRY glProgramLocalParameters4fvEXT (GLenum target, GLuint index, GLsizei count, const GLfloat *params); +#endif +#endif /* GL_EXT_gpu_program_parameters */ + +#ifndef GL_EXT_gpu_shader4 +#define GL_EXT_gpu_shader4 1 +#define GL_VERTEX_ATTRIB_ARRAY_INTEGER_EXT 0x88FD +#define GL_SAMPLER_1D_ARRAY_EXT 0x8DC0 +#define GL_SAMPLER_2D_ARRAY_EXT 0x8DC1 +#define GL_SAMPLER_BUFFER_EXT 0x8DC2 +#define GL_SAMPLER_1D_ARRAY_SHADOW_EXT 0x8DC3 +#define GL_SAMPLER_2D_ARRAY_SHADOW_EXT 0x8DC4 +#define GL_SAMPLER_CUBE_SHADOW_EXT 0x8DC5 +#define GL_UNSIGNED_INT_VEC2_EXT 0x8DC6 +#define GL_UNSIGNED_INT_VEC3_EXT 0x8DC7 +#define GL_UNSIGNED_INT_VEC4_EXT 0x8DC8 +#define GL_INT_SAMPLER_1D_EXT 0x8DC9 +#define GL_INT_SAMPLER_2D_EXT 0x8DCA +#define GL_INT_SAMPLER_3D_EXT 0x8DCB +#define GL_INT_SAMPLER_CUBE_EXT 0x8DCC +#define GL_INT_SAMPLER_2D_RECT_EXT 0x8DCD +#define GL_INT_SAMPLER_1D_ARRAY_EXT 0x8DCE +#define GL_INT_SAMPLER_2D_ARRAY_EXT 0x8DCF +#define GL_INT_SAMPLER_BUFFER_EXT 0x8DD0 +#define GL_UNSIGNED_INT_SAMPLER_1D_EXT 0x8DD1 +#define GL_UNSIGNED_INT_SAMPLER_2D_EXT 0x8DD2 +#define GL_UNSIGNED_INT_SAMPLER_3D_EXT 0x8DD3 +#define GL_UNSIGNED_INT_SAMPLER_CUBE_EXT 0x8DD4 +#define GL_UNSIGNED_INT_SAMPLER_2D_RECT_EXT 0x8DD5 +#define GL_UNSIGNED_INT_SAMPLER_1D_ARRAY_EXT 0x8DD6 +#define GL_UNSIGNED_INT_SAMPLER_2D_ARRAY_EXT 0x8DD7 +#define GL_UNSIGNED_INT_SAMPLER_BUFFER_EXT 0x8DD8 +#define GL_MIN_PROGRAM_TEXEL_OFFSET_EXT 0x8904 +#define GL_MAX_PROGRAM_TEXEL_OFFSET_EXT 0x8905 +typedef void (APIENTRYP PFNGLGETUNIFORMUIVEXTPROC) (GLuint program, GLint location, GLuint *params); +typedef void (APIENTRYP PFNGLBINDFRAGDATALOCATIONEXTPROC) (GLuint program, GLuint color, const GLchar *name); +typedef GLint (APIENTRYP PFNGLGETFRAGDATALOCATIONEXTPROC) (GLuint program, const GLchar *name); +typedef void (APIENTRYP PFNGLUNIFORM1UIEXTPROC) (GLint location, GLuint v0); +typedef void (APIENTRYP PFNGLUNIFORM2UIEXTPROC) (GLint location, GLuint v0, GLuint v1); +typedef void (APIENTRYP PFNGLUNIFORM3UIEXTPROC) (GLint location, GLuint v0, GLuint v1, GLuint v2); +typedef void (APIENTRYP PFNGLUNIFORM4UIEXTPROC) (GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3); +typedef void (APIENTRYP PFNGLUNIFORM1UIVEXTPROC) (GLint location, GLsizei count, const GLuint *value); +typedef void (APIENTRYP PFNGLUNIFORM2UIVEXTPROC) (GLint location, GLsizei count, const GLuint *value); +typedef void (APIENTRYP PFNGLUNIFORM3UIVEXTPROC) (GLint location, GLsizei count, const GLuint *value); +typedef void (APIENTRYP PFNGLUNIFORM4UIVEXTPROC) (GLint location, GLsizei count, const GLuint *value); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glGetUniformuivEXT (GLuint program, GLint location, GLuint *params); +GLAPI void APIENTRY glBindFragDataLocationEXT (GLuint program, GLuint color, const GLchar *name); +GLAPI GLint APIENTRY glGetFragDataLocationEXT (GLuint program, const GLchar *name); +GLAPI void APIENTRY glUniform1uiEXT (GLint location, GLuint v0); +GLAPI void APIENTRY glUniform2uiEXT (GLint location, GLuint v0, GLuint v1); +GLAPI void APIENTRY glUniform3uiEXT (GLint location, GLuint v0, GLuint v1, GLuint v2); +GLAPI void APIENTRY glUniform4uiEXT (GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3); +GLAPI void APIENTRY glUniform1uivEXT (GLint location, GLsizei count, const GLuint *value); +GLAPI void APIENTRY glUniform2uivEXT (GLint location, GLsizei count, const GLuint *value); +GLAPI void APIENTRY glUniform3uivEXT (GLint location, GLsizei count, const GLuint *value); +GLAPI void APIENTRY glUniform4uivEXT (GLint location, GLsizei count, const GLuint *value); +#endif +#endif /* GL_EXT_gpu_shader4 */ + +#ifndef GL_EXT_histogram +#define GL_EXT_histogram 1 +#define GL_HISTOGRAM_EXT 0x8024 +#define GL_PROXY_HISTOGRAM_EXT 0x8025 +#define GL_HISTOGRAM_WIDTH_EXT 0x8026 +#define GL_HISTOGRAM_FORMAT_EXT 0x8027 +#define GL_HISTOGRAM_RED_SIZE_EXT 0x8028 +#define GL_HISTOGRAM_GREEN_SIZE_EXT 0x8029 +#define GL_HISTOGRAM_BLUE_SIZE_EXT 0x802A +#define GL_HISTOGRAM_ALPHA_SIZE_EXT 0x802B +#define GL_HISTOGRAM_LUMINANCE_SIZE_EXT 0x802C +#define GL_HISTOGRAM_SINK_EXT 0x802D +#define GL_MINMAX_EXT 0x802E +#define GL_MINMAX_FORMAT_EXT 0x802F +#define GL_MINMAX_SINK_EXT 0x8030 +#define GL_TABLE_TOO_LARGE_EXT 0x8031 +typedef void (APIENTRYP PFNGLGETHISTOGRAMEXTPROC) (GLenum target, GLboolean reset, GLenum format, GLenum type, void *values); +typedef void (APIENTRYP PFNGLGETHISTOGRAMPARAMETERFVEXTPROC) (GLenum target, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETHISTOGRAMPARAMETERIVEXTPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETMINMAXEXTPROC) (GLenum target, GLboolean reset, GLenum format, GLenum type, void *values); +typedef void (APIENTRYP PFNGLGETMINMAXPARAMETERFVEXTPROC) (GLenum target, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETMINMAXPARAMETERIVEXTPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLHISTOGRAMEXTPROC) (GLenum target, GLsizei width, GLenum internalformat, GLboolean sink); +typedef void (APIENTRYP PFNGLMINMAXEXTPROC) (GLenum target, GLenum internalformat, GLboolean sink); +typedef void (APIENTRYP PFNGLRESETHISTOGRAMEXTPROC) (GLenum target); +typedef void (APIENTRYP PFNGLRESETMINMAXEXTPROC) (GLenum target); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glGetHistogramEXT (GLenum target, GLboolean reset, GLenum format, GLenum type, void *values); +GLAPI void APIENTRY glGetHistogramParameterfvEXT (GLenum target, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetHistogramParameterivEXT (GLenum target, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetMinmaxEXT (GLenum target, GLboolean reset, GLenum format, GLenum type, void *values); +GLAPI void APIENTRY glGetMinmaxParameterfvEXT (GLenum target, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetMinmaxParameterivEXT (GLenum target, GLenum pname, GLint *params); +GLAPI void APIENTRY glHistogramEXT (GLenum target, GLsizei width, GLenum internalformat, GLboolean sink); +GLAPI void APIENTRY glMinmaxEXT (GLenum target, GLenum internalformat, GLboolean sink); +GLAPI void APIENTRY glResetHistogramEXT (GLenum target); +GLAPI void APIENTRY glResetMinmaxEXT (GLenum target); +#endif +#endif /* GL_EXT_histogram */ + +#ifndef GL_EXT_index_array_formats +#define GL_EXT_index_array_formats 1 +#define GL_IUI_V2F_EXT 0x81AD +#define GL_IUI_V3F_EXT 0x81AE +#define GL_IUI_N3F_V2F_EXT 0x81AF +#define GL_IUI_N3F_V3F_EXT 0x81B0 +#define GL_T2F_IUI_V2F_EXT 0x81B1 +#define GL_T2F_IUI_V3F_EXT 0x81B2 +#define GL_T2F_IUI_N3F_V2F_EXT 0x81B3 +#define GL_T2F_IUI_N3F_V3F_EXT 0x81B4 +#endif /* GL_EXT_index_array_formats */ + +#ifndef GL_EXT_index_func +#define GL_EXT_index_func 1 +#define GL_INDEX_TEST_EXT 0x81B5 +#define GL_INDEX_TEST_FUNC_EXT 0x81B6 +#define GL_INDEX_TEST_REF_EXT 0x81B7 +typedef void (APIENTRYP PFNGLINDEXFUNCEXTPROC) (GLenum func, GLclampf ref); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glIndexFuncEXT (GLenum func, GLclampf ref); +#endif +#endif /* GL_EXT_index_func */ + +#ifndef GL_EXT_index_material +#define GL_EXT_index_material 1 +#define GL_INDEX_MATERIAL_EXT 0x81B8 +#define GL_INDEX_MATERIAL_PARAMETER_EXT 0x81B9 +#define GL_INDEX_MATERIAL_FACE_EXT 0x81BA +typedef void (APIENTRYP PFNGLINDEXMATERIALEXTPROC) (GLenum face, GLenum mode); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glIndexMaterialEXT (GLenum face, GLenum mode); +#endif +#endif /* GL_EXT_index_material */ + +#ifndef GL_EXT_index_texture +#define GL_EXT_index_texture 1 +#endif /* GL_EXT_index_texture */ + +#ifndef GL_EXT_light_texture +#define GL_EXT_light_texture 1 +#define GL_FRAGMENT_MATERIAL_EXT 0x8349 +#define GL_FRAGMENT_NORMAL_EXT 0x834A +#define GL_FRAGMENT_COLOR_EXT 0x834C +#define GL_ATTENUATION_EXT 0x834D +#define GL_SHADOW_ATTENUATION_EXT 0x834E +#define GL_TEXTURE_APPLICATION_MODE_EXT 0x834F +#define GL_TEXTURE_LIGHT_EXT 0x8350 +#define GL_TEXTURE_MATERIAL_FACE_EXT 0x8351 +#define GL_TEXTURE_MATERIAL_PARAMETER_EXT 0x8352 +typedef void (APIENTRYP PFNGLAPPLYTEXTUREEXTPROC) (GLenum mode); +typedef void (APIENTRYP PFNGLTEXTURELIGHTEXTPROC) (GLenum pname); +typedef void (APIENTRYP PFNGLTEXTUREMATERIALEXTPROC) (GLenum face, GLenum mode); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glApplyTextureEXT (GLenum mode); +GLAPI void APIENTRY glTextureLightEXT (GLenum pname); +GLAPI void APIENTRY glTextureMaterialEXT (GLenum face, GLenum mode); +#endif +#endif /* GL_EXT_light_texture */ + +#ifndef GL_EXT_memory_object +#define GL_EXT_memory_object 1 +#define GL_TEXTURE_TILING_EXT 0x9580 +#define GL_DEDICATED_MEMORY_OBJECT_EXT 0x9581 +#define GL_PROTECTED_MEMORY_OBJECT_EXT 0x959B +#define GL_NUM_TILING_TYPES_EXT 0x9582 +#define GL_TILING_TYPES_EXT 0x9583 +#define GL_OPTIMAL_TILING_EXT 0x9584 +#define GL_LINEAR_TILING_EXT 0x9585 +#define GL_NUM_DEVICE_UUIDS_EXT 0x9596 +#define GL_DEVICE_UUID_EXT 0x9597 +#define GL_DRIVER_UUID_EXT 0x9598 +#define GL_UUID_SIZE_EXT 16 +typedef void (APIENTRYP PFNGLGETUNSIGNEDBYTEVEXTPROC) (GLenum pname, GLubyte *data); +typedef void (APIENTRYP PFNGLGETUNSIGNEDBYTEI_VEXTPROC) (GLenum target, GLuint index, GLubyte *data); +typedef void (APIENTRYP PFNGLDELETEMEMORYOBJECTSEXTPROC) (GLsizei n, const GLuint *memoryObjects); +typedef GLboolean (APIENTRYP PFNGLISMEMORYOBJECTEXTPROC) (GLuint memoryObject); +typedef void (APIENTRYP PFNGLCREATEMEMORYOBJECTSEXTPROC) (GLsizei n, GLuint *memoryObjects); +typedef void (APIENTRYP PFNGLMEMORYOBJECTPARAMETERIVEXTPROC) (GLuint memoryObject, GLenum pname, const GLint *params); +typedef void (APIENTRYP PFNGLGETMEMORYOBJECTPARAMETERIVEXTPROC) (GLuint memoryObject, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLTEXSTORAGEMEM2DEXTPROC) (GLenum target, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLuint memory, GLuint64 offset); +typedef void (APIENTRYP PFNGLTEXSTORAGEMEM2DMULTISAMPLEEXTPROC) (GLenum target, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset); +typedef void (APIENTRYP PFNGLTEXSTORAGEMEM3DEXTPROC) (GLenum target, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLuint memory, GLuint64 offset); +typedef void (APIENTRYP PFNGLTEXSTORAGEMEM3DMULTISAMPLEEXTPROC) (GLenum target, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset); +typedef void (APIENTRYP PFNGLBUFFERSTORAGEMEMEXTPROC) (GLenum target, GLsizeiptr size, GLuint memory, GLuint64 offset); +typedef void (APIENTRYP PFNGLTEXTURESTORAGEMEM2DEXTPROC) (GLuint texture, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLuint memory, GLuint64 offset); +typedef void (APIENTRYP PFNGLTEXTURESTORAGEMEM2DMULTISAMPLEEXTPROC) (GLuint texture, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset); +typedef void (APIENTRYP PFNGLTEXTURESTORAGEMEM3DEXTPROC) (GLuint texture, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLuint memory, GLuint64 offset); +typedef void (APIENTRYP PFNGLTEXTURESTORAGEMEM3DMULTISAMPLEEXTPROC) (GLuint texture, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset); +typedef void (APIENTRYP PFNGLNAMEDBUFFERSTORAGEMEMEXTPROC) (GLuint buffer, GLsizeiptr size, GLuint memory, GLuint64 offset); +typedef void (APIENTRYP PFNGLTEXSTORAGEMEM1DEXTPROC) (GLenum target, GLsizei levels, GLenum internalFormat, GLsizei width, GLuint memory, GLuint64 offset); +typedef void (APIENTRYP PFNGLTEXTURESTORAGEMEM1DEXTPROC) (GLuint texture, GLsizei levels, GLenum internalFormat, GLsizei width, GLuint memory, GLuint64 offset); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glGetUnsignedBytevEXT (GLenum pname, GLubyte *data); +GLAPI void APIENTRY glGetUnsignedBytei_vEXT (GLenum target, GLuint index, GLubyte *data); +GLAPI void APIENTRY glDeleteMemoryObjectsEXT (GLsizei n, const GLuint *memoryObjects); +GLAPI GLboolean APIENTRY glIsMemoryObjectEXT (GLuint memoryObject); +GLAPI void APIENTRY glCreateMemoryObjectsEXT (GLsizei n, GLuint *memoryObjects); +GLAPI void APIENTRY glMemoryObjectParameterivEXT (GLuint memoryObject, GLenum pname, const GLint *params); +GLAPI void APIENTRY glGetMemoryObjectParameterivEXT (GLuint memoryObject, GLenum pname, GLint *params); +GLAPI void APIENTRY glTexStorageMem2DEXT (GLenum target, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLuint memory, GLuint64 offset); +GLAPI void APIENTRY glTexStorageMem2DMultisampleEXT (GLenum target, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset); +GLAPI void APIENTRY glTexStorageMem3DEXT (GLenum target, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLuint memory, GLuint64 offset); +GLAPI void APIENTRY glTexStorageMem3DMultisampleEXT (GLenum target, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset); +GLAPI void APIENTRY glBufferStorageMemEXT (GLenum target, GLsizeiptr size, GLuint memory, GLuint64 offset); +GLAPI void APIENTRY glTextureStorageMem2DEXT (GLuint texture, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLuint memory, GLuint64 offset); +GLAPI void APIENTRY glTextureStorageMem2DMultisampleEXT (GLuint texture, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset); +GLAPI void APIENTRY glTextureStorageMem3DEXT (GLuint texture, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLuint memory, GLuint64 offset); +GLAPI void APIENTRY glTextureStorageMem3DMultisampleEXT (GLuint texture, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset); +GLAPI void APIENTRY glNamedBufferStorageMemEXT (GLuint buffer, GLsizeiptr size, GLuint memory, GLuint64 offset); +GLAPI void APIENTRY glTexStorageMem1DEXT (GLenum target, GLsizei levels, GLenum internalFormat, GLsizei width, GLuint memory, GLuint64 offset); +GLAPI void APIENTRY glTextureStorageMem1DEXT (GLuint texture, GLsizei levels, GLenum internalFormat, GLsizei width, GLuint memory, GLuint64 offset); +#endif +#endif /* GL_EXT_memory_object */ + +#ifndef GL_EXT_memory_object_fd +#define GL_EXT_memory_object_fd 1 +#define GL_HANDLE_TYPE_OPAQUE_FD_EXT 0x9586 +typedef void (APIENTRYP PFNGLIMPORTMEMORYFDEXTPROC) (GLuint memory, GLuint64 size, GLenum handleType, GLint fd); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glImportMemoryFdEXT (GLuint memory, GLuint64 size, GLenum handleType, GLint fd); +#endif +#endif /* GL_EXT_memory_object_fd */ + +#ifndef GL_EXT_memory_object_win32 +#define GL_EXT_memory_object_win32 1 +#define GL_HANDLE_TYPE_OPAQUE_WIN32_EXT 0x9587 +#define GL_HANDLE_TYPE_OPAQUE_WIN32_KMT_EXT 0x9588 +#define GL_DEVICE_LUID_EXT 0x9599 +#define GL_DEVICE_NODE_MASK_EXT 0x959A +#define GL_LUID_SIZE_EXT 8 +#define GL_HANDLE_TYPE_D3D12_TILEPOOL_EXT 0x9589 +#define GL_HANDLE_TYPE_D3D12_RESOURCE_EXT 0x958A +#define GL_HANDLE_TYPE_D3D11_IMAGE_EXT 0x958B +#define GL_HANDLE_TYPE_D3D11_IMAGE_KMT_EXT 0x958C +typedef void (APIENTRYP PFNGLIMPORTMEMORYWIN32HANDLEEXTPROC) (GLuint memory, GLuint64 size, GLenum handleType, void *handle); +typedef void (APIENTRYP PFNGLIMPORTMEMORYWIN32NAMEEXTPROC) (GLuint memory, GLuint64 size, GLenum handleType, const void *name); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glImportMemoryWin32HandleEXT (GLuint memory, GLuint64 size, GLenum handleType, void *handle); +GLAPI void APIENTRY glImportMemoryWin32NameEXT (GLuint memory, GLuint64 size, GLenum handleType, const void *name); +#endif +#endif /* GL_EXT_memory_object_win32 */ + +#ifndef GL_EXT_misc_attribute +#define GL_EXT_misc_attribute 1 +#endif /* GL_EXT_misc_attribute */ + +#ifndef GL_EXT_multi_draw_arrays +#define GL_EXT_multi_draw_arrays 1 +typedef void (APIENTRYP PFNGLMULTIDRAWARRAYSEXTPROC) (GLenum mode, const GLint *first, const GLsizei *count, GLsizei primcount); +typedef void (APIENTRYP PFNGLMULTIDRAWELEMENTSEXTPROC) (GLenum mode, const GLsizei *count, GLenum type, const void *const*indices, GLsizei primcount); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glMultiDrawArraysEXT (GLenum mode, const GLint *first, const GLsizei *count, GLsizei primcount); +GLAPI void APIENTRY glMultiDrawElementsEXT (GLenum mode, const GLsizei *count, GLenum type, const void *const*indices, GLsizei primcount); +#endif +#endif /* GL_EXT_multi_draw_arrays */ + +#ifndef GL_EXT_multisample +#define GL_EXT_multisample 1 +#define GL_MULTISAMPLE_EXT 0x809D +#define GL_SAMPLE_ALPHA_TO_MASK_EXT 0x809E +#define GL_SAMPLE_ALPHA_TO_ONE_EXT 0x809F +#define GL_SAMPLE_MASK_EXT 0x80A0 +#define GL_1PASS_EXT 0x80A1 +#define GL_2PASS_0_EXT 0x80A2 +#define GL_2PASS_1_EXT 0x80A3 +#define GL_4PASS_0_EXT 0x80A4 +#define GL_4PASS_1_EXT 0x80A5 +#define GL_4PASS_2_EXT 0x80A6 +#define GL_4PASS_3_EXT 0x80A7 +#define GL_SAMPLE_BUFFERS_EXT 0x80A8 +#define GL_SAMPLES_EXT 0x80A9 +#define GL_SAMPLE_MASK_VALUE_EXT 0x80AA +#define GL_SAMPLE_MASK_INVERT_EXT 0x80AB +#define GL_SAMPLE_PATTERN_EXT 0x80AC +#define GL_MULTISAMPLE_BIT_EXT 0x20000000 +typedef void (APIENTRYP PFNGLSAMPLEMASKEXTPROC) (GLclampf value, GLboolean invert); +typedef void (APIENTRYP PFNGLSAMPLEPATTERNEXTPROC) (GLenum pattern); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glSampleMaskEXT (GLclampf value, GLboolean invert); +GLAPI void APIENTRY glSamplePatternEXT (GLenum pattern); +#endif +#endif /* GL_EXT_multisample */ + +#ifndef GL_EXT_packed_depth_stencil +#define GL_EXT_packed_depth_stencil 1 +#define GL_DEPTH_STENCIL_EXT 0x84F9 +#define GL_UNSIGNED_INT_24_8_EXT 0x84FA +#define GL_DEPTH24_STENCIL8_EXT 0x88F0 +#define GL_TEXTURE_STENCIL_SIZE_EXT 0x88F1 +#endif /* GL_EXT_packed_depth_stencil */ + +#ifndef GL_EXT_packed_float +#define GL_EXT_packed_float 1 +#define GL_R11F_G11F_B10F_EXT 0x8C3A +#define GL_UNSIGNED_INT_10F_11F_11F_REV_EXT 0x8C3B +#define GL_RGBA_SIGNED_COMPONENTS_EXT 0x8C3C +#endif /* GL_EXT_packed_float */ + +#ifndef GL_EXT_packed_pixels +#define GL_EXT_packed_pixels 1 +#define GL_UNSIGNED_BYTE_3_3_2_EXT 0x8032 +#define GL_UNSIGNED_SHORT_4_4_4_4_EXT 0x8033 +#define GL_UNSIGNED_SHORT_5_5_5_1_EXT 0x8034 +#define GL_UNSIGNED_INT_8_8_8_8_EXT 0x8035 +#define GL_UNSIGNED_INT_10_10_10_2_EXT 0x8036 +#endif /* GL_EXT_packed_pixels */ + +#ifndef GL_EXT_paletted_texture +#define GL_EXT_paletted_texture 1 +#define GL_COLOR_INDEX1_EXT 0x80E2 +#define GL_COLOR_INDEX2_EXT 0x80E3 +#define GL_COLOR_INDEX4_EXT 0x80E4 +#define GL_COLOR_INDEX8_EXT 0x80E5 +#define GL_COLOR_INDEX12_EXT 0x80E6 +#define GL_COLOR_INDEX16_EXT 0x80E7 +#define GL_TEXTURE_INDEX_SIZE_EXT 0x80ED +typedef void (APIENTRYP PFNGLCOLORTABLEEXTPROC) (GLenum target, GLenum internalFormat, GLsizei width, GLenum format, GLenum type, const void *table); +typedef void (APIENTRYP PFNGLGETCOLORTABLEEXTPROC) (GLenum target, GLenum format, GLenum type, void *data); +typedef void (APIENTRYP PFNGLGETCOLORTABLEPARAMETERIVEXTPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETCOLORTABLEPARAMETERFVEXTPROC) (GLenum target, GLenum pname, GLfloat *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glColorTableEXT (GLenum target, GLenum internalFormat, GLsizei width, GLenum format, GLenum type, const void *table); +GLAPI void APIENTRY glGetColorTableEXT (GLenum target, GLenum format, GLenum type, void *data); +GLAPI void APIENTRY glGetColorTableParameterivEXT (GLenum target, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetColorTableParameterfvEXT (GLenum target, GLenum pname, GLfloat *params); +#endif +#endif /* GL_EXT_paletted_texture */ + +#ifndef GL_EXT_pixel_buffer_object +#define GL_EXT_pixel_buffer_object 1 +#define GL_PIXEL_PACK_BUFFER_EXT 0x88EB +#define GL_PIXEL_UNPACK_BUFFER_EXT 0x88EC +#define GL_PIXEL_PACK_BUFFER_BINDING_EXT 0x88ED +#define GL_PIXEL_UNPACK_BUFFER_BINDING_EXT 0x88EF +#endif /* GL_EXT_pixel_buffer_object */ + +#ifndef GL_EXT_pixel_transform +#define GL_EXT_pixel_transform 1 +#define GL_PIXEL_TRANSFORM_2D_EXT 0x8330 +#define GL_PIXEL_MAG_FILTER_EXT 0x8331 +#define GL_PIXEL_MIN_FILTER_EXT 0x8332 +#define GL_PIXEL_CUBIC_WEIGHT_EXT 0x8333 +#define GL_CUBIC_EXT 0x8334 +#define GL_AVERAGE_EXT 0x8335 +#define GL_PIXEL_TRANSFORM_2D_STACK_DEPTH_EXT 0x8336 +#define GL_MAX_PIXEL_TRANSFORM_2D_STACK_DEPTH_EXT 0x8337 +#define GL_PIXEL_TRANSFORM_2D_MATRIX_EXT 0x8338 +typedef void (APIENTRYP PFNGLPIXELTRANSFORMPARAMETERIEXTPROC) (GLenum target, GLenum pname, GLint param); +typedef void (APIENTRYP PFNGLPIXELTRANSFORMPARAMETERFEXTPROC) (GLenum target, GLenum pname, GLfloat param); +typedef void (APIENTRYP PFNGLPIXELTRANSFORMPARAMETERIVEXTPROC) (GLenum target, GLenum pname, const GLint *params); +typedef void (APIENTRYP PFNGLPIXELTRANSFORMPARAMETERFVEXTPROC) (GLenum target, GLenum pname, const GLfloat *params); +typedef void (APIENTRYP PFNGLGETPIXELTRANSFORMPARAMETERIVEXTPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETPIXELTRANSFORMPARAMETERFVEXTPROC) (GLenum target, GLenum pname, GLfloat *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glPixelTransformParameteriEXT (GLenum target, GLenum pname, GLint param); +GLAPI void APIENTRY glPixelTransformParameterfEXT (GLenum target, GLenum pname, GLfloat param); +GLAPI void APIENTRY glPixelTransformParameterivEXT (GLenum target, GLenum pname, const GLint *params); +GLAPI void APIENTRY glPixelTransformParameterfvEXT (GLenum target, GLenum pname, const GLfloat *params); +GLAPI void APIENTRY glGetPixelTransformParameterivEXT (GLenum target, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetPixelTransformParameterfvEXT (GLenum target, GLenum pname, GLfloat *params); +#endif +#endif /* GL_EXT_pixel_transform */ + +#ifndef GL_EXT_pixel_transform_color_table +#define GL_EXT_pixel_transform_color_table 1 +#endif /* GL_EXT_pixel_transform_color_table */ + +#ifndef GL_EXT_point_parameters +#define GL_EXT_point_parameters 1 +#define GL_POINT_SIZE_MIN_EXT 0x8126 +#define GL_POINT_SIZE_MAX_EXT 0x8127 +#define GL_POINT_FADE_THRESHOLD_SIZE_EXT 0x8128 +#define GL_DISTANCE_ATTENUATION_EXT 0x8129 +typedef void (APIENTRYP PFNGLPOINTPARAMETERFEXTPROC) (GLenum pname, GLfloat param); +typedef void (APIENTRYP PFNGLPOINTPARAMETERFVEXTPROC) (GLenum pname, const GLfloat *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glPointParameterfEXT (GLenum pname, GLfloat param); +GLAPI void APIENTRY glPointParameterfvEXT (GLenum pname, const GLfloat *params); +#endif +#endif /* GL_EXT_point_parameters */ + +#ifndef GL_EXT_polygon_offset +#define GL_EXT_polygon_offset 1 +#define GL_POLYGON_OFFSET_EXT 0x8037 +#define GL_POLYGON_OFFSET_FACTOR_EXT 0x8038 +#define GL_POLYGON_OFFSET_BIAS_EXT 0x8039 +typedef void (APIENTRYP PFNGLPOLYGONOFFSETEXTPROC) (GLfloat factor, GLfloat bias); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glPolygonOffsetEXT (GLfloat factor, GLfloat bias); +#endif +#endif /* GL_EXT_polygon_offset */ + +#ifndef GL_EXT_polygon_offset_clamp +#define GL_EXT_polygon_offset_clamp 1 +#define GL_POLYGON_OFFSET_CLAMP_EXT 0x8E1B +typedef void (APIENTRYP PFNGLPOLYGONOFFSETCLAMPEXTPROC) (GLfloat factor, GLfloat units, GLfloat clamp); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glPolygonOffsetClampEXT (GLfloat factor, GLfloat units, GLfloat clamp); +#endif +#endif /* GL_EXT_polygon_offset_clamp */ + +#ifndef GL_EXT_post_depth_coverage +#define GL_EXT_post_depth_coverage 1 +#endif /* GL_EXT_post_depth_coverage */ + +#ifndef GL_EXT_provoking_vertex +#define GL_EXT_provoking_vertex 1 +#define GL_QUADS_FOLLOW_PROVOKING_VERTEX_CONVENTION_EXT 0x8E4C +#define GL_FIRST_VERTEX_CONVENTION_EXT 0x8E4D +#define GL_LAST_VERTEX_CONVENTION_EXT 0x8E4E +#define GL_PROVOKING_VERTEX_EXT 0x8E4F +typedef void (APIENTRYP PFNGLPROVOKINGVERTEXEXTPROC) (GLenum mode); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glProvokingVertexEXT (GLenum mode); +#endif +#endif /* GL_EXT_provoking_vertex */ + +#ifndef GL_EXT_raster_multisample +#define GL_EXT_raster_multisample 1 +#define GL_RASTER_MULTISAMPLE_EXT 0x9327 +#define GL_RASTER_SAMPLES_EXT 0x9328 +#define GL_MAX_RASTER_SAMPLES_EXT 0x9329 +#define GL_RASTER_FIXED_SAMPLE_LOCATIONS_EXT 0x932A +#define GL_MULTISAMPLE_RASTERIZATION_ALLOWED_EXT 0x932B +#define GL_EFFECTIVE_RASTER_SAMPLES_EXT 0x932C +typedef void (APIENTRYP PFNGLRASTERSAMPLESEXTPROC) (GLuint samples, GLboolean fixedsamplelocations); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glRasterSamplesEXT (GLuint samples, GLboolean fixedsamplelocations); +#endif +#endif /* GL_EXT_raster_multisample */ + +#ifndef GL_EXT_rescale_normal +#define GL_EXT_rescale_normal 1 +#define GL_RESCALE_NORMAL_EXT 0x803A +#endif /* GL_EXT_rescale_normal */ + +#ifndef GL_EXT_secondary_color +#define GL_EXT_secondary_color 1 +#define GL_COLOR_SUM_EXT 0x8458 +#define GL_CURRENT_SECONDARY_COLOR_EXT 0x8459 +#define GL_SECONDARY_COLOR_ARRAY_SIZE_EXT 0x845A +#define GL_SECONDARY_COLOR_ARRAY_TYPE_EXT 0x845B +#define GL_SECONDARY_COLOR_ARRAY_STRIDE_EXT 0x845C +#define GL_SECONDARY_COLOR_ARRAY_POINTER_EXT 0x845D +#define GL_SECONDARY_COLOR_ARRAY_EXT 0x845E +typedef void (APIENTRYP PFNGLSECONDARYCOLOR3BEXTPROC) (GLbyte red, GLbyte green, GLbyte blue); +typedef void (APIENTRYP PFNGLSECONDARYCOLOR3BVEXTPROC) (const GLbyte *v); +typedef void (APIENTRYP PFNGLSECONDARYCOLOR3DEXTPROC) (GLdouble red, GLdouble green, GLdouble blue); +typedef void (APIENTRYP PFNGLSECONDARYCOLOR3DVEXTPROC) (const GLdouble *v); +typedef void (APIENTRYP PFNGLSECONDARYCOLOR3FEXTPROC) (GLfloat red, GLfloat green, GLfloat blue); +typedef void (APIENTRYP PFNGLSECONDARYCOLOR3FVEXTPROC) (const GLfloat *v); +typedef void (APIENTRYP PFNGLSECONDARYCOLOR3IEXTPROC) (GLint red, GLint green, GLint blue); +typedef void (APIENTRYP PFNGLSECONDARYCOLOR3IVEXTPROC) (const GLint *v); +typedef void (APIENTRYP PFNGLSECONDARYCOLOR3SEXTPROC) (GLshort red, GLshort green, GLshort blue); +typedef void (APIENTRYP PFNGLSECONDARYCOLOR3SVEXTPROC) (const GLshort *v); +typedef void (APIENTRYP PFNGLSECONDARYCOLOR3UBEXTPROC) (GLubyte red, GLubyte green, GLubyte blue); +typedef void (APIENTRYP PFNGLSECONDARYCOLOR3UBVEXTPROC) (const GLubyte *v); +typedef void (APIENTRYP PFNGLSECONDARYCOLOR3UIEXTPROC) (GLuint red, GLuint green, GLuint blue); +typedef void (APIENTRYP PFNGLSECONDARYCOLOR3UIVEXTPROC) (const GLuint *v); +typedef void (APIENTRYP PFNGLSECONDARYCOLOR3USEXTPROC) (GLushort red, GLushort green, GLushort blue); +typedef void (APIENTRYP PFNGLSECONDARYCOLOR3USVEXTPROC) (const GLushort *v); +typedef void (APIENTRYP PFNGLSECONDARYCOLORPOINTEREXTPROC) (GLint size, GLenum type, GLsizei stride, const void *pointer); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glSecondaryColor3bEXT (GLbyte red, GLbyte green, GLbyte blue); +GLAPI void APIENTRY glSecondaryColor3bvEXT (const GLbyte *v); +GLAPI void APIENTRY glSecondaryColor3dEXT (GLdouble red, GLdouble green, GLdouble blue); +GLAPI void APIENTRY glSecondaryColor3dvEXT (const GLdouble *v); +GLAPI void APIENTRY glSecondaryColor3fEXT (GLfloat red, GLfloat green, GLfloat blue); +GLAPI void APIENTRY glSecondaryColor3fvEXT (const GLfloat *v); +GLAPI void APIENTRY glSecondaryColor3iEXT (GLint red, GLint green, GLint blue); +GLAPI void APIENTRY glSecondaryColor3ivEXT (const GLint *v); +GLAPI void APIENTRY glSecondaryColor3sEXT (GLshort red, GLshort green, GLshort blue); +GLAPI void APIENTRY glSecondaryColor3svEXT (const GLshort *v); +GLAPI void APIENTRY glSecondaryColor3ubEXT (GLubyte red, GLubyte green, GLubyte blue); +GLAPI void APIENTRY glSecondaryColor3ubvEXT (const GLubyte *v); +GLAPI void APIENTRY glSecondaryColor3uiEXT (GLuint red, GLuint green, GLuint blue); +GLAPI void APIENTRY glSecondaryColor3uivEXT (const GLuint *v); +GLAPI void APIENTRY glSecondaryColor3usEXT (GLushort red, GLushort green, GLushort blue); +GLAPI void APIENTRY glSecondaryColor3usvEXT (const GLushort *v); +GLAPI void APIENTRY glSecondaryColorPointerEXT (GLint size, GLenum type, GLsizei stride, const void *pointer); +#endif +#endif /* GL_EXT_secondary_color */ + +#ifndef GL_EXT_semaphore +#define GL_EXT_semaphore 1 +#define GL_LAYOUT_GENERAL_EXT 0x958D +#define GL_LAYOUT_COLOR_ATTACHMENT_EXT 0x958E +#define GL_LAYOUT_DEPTH_STENCIL_ATTACHMENT_EXT 0x958F +#define GL_LAYOUT_DEPTH_STENCIL_READ_ONLY_EXT 0x9590 +#define GL_LAYOUT_SHADER_READ_ONLY_EXT 0x9591 +#define GL_LAYOUT_TRANSFER_SRC_EXT 0x9592 +#define GL_LAYOUT_TRANSFER_DST_EXT 0x9593 +#define GL_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_EXT 0x9530 +#define GL_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_EXT 0x9531 +typedef void (APIENTRYP PFNGLGENSEMAPHORESEXTPROC) (GLsizei n, GLuint *semaphores); +typedef void (APIENTRYP PFNGLDELETESEMAPHORESEXTPROC) (GLsizei n, const GLuint *semaphores); +typedef GLboolean (APIENTRYP PFNGLISSEMAPHOREEXTPROC) (GLuint semaphore); +typedef void (APIENTRYP PFNGLSEMAPHOREPARAMETERUI64VEXTPROC) (GLuint semaphore, GLenum pname, const GLuint64 *params); +typedef void (APIENTRYP PFNGLGETSEMAPHOREPARAMETERUI64VEXTPROC) (GLuint semaphore, GLenum pname, GLuint64 *params); +typedef void (APIENTRYP PFNGLWAITSEMAPHOREEXTPROC) (GLuint semaphore, GLuint numBufferBarriers, const GLuint *buffers, GLuint numTextureBarriers, const GLuint *textures, const GLenum *srcLayouts); +typedef void (APIENTRYP PFNGLSIGNALSEMAPHOREEXTPROC) (GLuint semaphore, GLuint numBufferBarriers, const GLuint *buffers, GLuint numTextureBarriers, const GLuint *textures, const GLenum *dstLayouts); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glGenSemaphoresEXT (GLsizei n, GLuint *semaphores); +GLAPI void APIENTRY glDeleteSemaphoresEXT (GLsizei n, const GLuint *semaphores); +GLAPI GLboolean APIENTRY glIsSemaphoreEXT (GLuint semaphore); +GLAPI void APIENTRY glSemaphoreParameterui64vEXT (GLuint semaphore, GLenum pname, const GLuint64 *params); +GLAPI void APIENTRY glGetSemaphoreParameterui64vEXT (GLuint semaphore, GLenum pname, GLuint64 *params); +GLAPI void APIENTRY glWaitSemaphoreEXT (GLuint semaphore, GLuint numBufferBarriers, const GLuint *buffers, GLuint numTextureBarriers, const GLuint *textures, const GLenum *srcLayouts); +GLAPI void APIENTRY glSignalSemaphoreEXT (GLuint semaphore, GLuint numBufferBarriers, const GLuint *buffers, GLuint numTextureBarriers, const GLuint *textures, const GLenum *dstLayouts); +#endif +#endif /* GL_EXT_semaphore */ + +#ifndef GL_EXT_semaphore_fd +#define GL_EXT_semaphore_fd 1 +typedef void (APIENTRYP PFNGLIMPORTSEMAPHOREFDEXTPROC) (GLuint semaphore, GLenum handleType, GLint fd); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glImportSemaphoreFdEXT (GLuint semaphore, GLenum handleType, GLint fd); +#endif +#endif /* GL_EXT_semaphore_fd */ + +#ifndef GL_EXT_semaphore_win32 +#define GL_EXT_semaphore_win32 1 +#define GL_HANDLE_TYPE_D3D12_FENCE_EXT 0x9594 +#define GL_D3D12_FENCE_VALUE_EXT 0x9595 +typedef void (APIENTRYP PFNGLIMPORTSEMAPHOREWIN32HANDLEEXTPROC) (GLuint semaphore, GLenum handleType, void *handle); +typedef void (APIENTRYP PFNGLIMPORTSEMAPHOREWIN32NAMEEXTPROC) (GLuint semaphore, GLenum handleType, const void *name); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glImportSemaphoreWin32HandleEXT (GLuint semaphore, GLenum handleType, void *handle); +GLAPI void APIENTRY glImportSemaphoreWin32NameEXT (GLuint semaphore, GLenum handleType, const void *name); +#endif +#endif /* GL_EXT_semaphore_win32 */ + +#ifndef GL_EXT_separate_shader_objects +#define GL_EXT_separate_shader_objects 1 +#define GL_ACTIVE_PROGRAM_EXT 0x8B8D +typedef void (APIENTRYP PFNGLUSESHADERPROGRAMEXTPROC) (GLenum type, GLuint program); +typedef void (APIENTRYP PFNGLACTIVEPROGRAMEXTPROC) (GLuint program); +typedef GLuint (APIENTRYP PFNGLCREATESHADERPROGRAMEXTPROC) (GLenum type, const GLchar *string); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glUseShaderProgramEXT (GLenum type, GLuint program); +GLAPI void APIENTRY glActiveProgramEXT (GLuint program); +GLAPI GLuint APIENTRY glCreateShaderProgramEXT (GLenum type, const GLchar *string); +#endif +#endif /* GL_EXT_separate_shader_objects */ + +#ifndef GL_EXT_separate_specular_color +#define GL_EXT_separate_specular_color 1 +#define GL_LIGHT_MODEL_COLOR_CONTROL_EXT 0x81F8 +#define GL_SINGLE_COLOR_EXT 0x81F9 +#define GL_SEPARATE_SPECULAR_COLOR_EXT 0x81FA +#endif /* GL_EXT_separate_specular_color */ + +#ifndef GL_EXT_shader_framebuffer_fetch +#define GL_EXT_shader_framebuffer_fetch 1 +#define GL_FRAGMENT_SHADER_DISCARDS_SAMPLES_EXT 0x8A52 +#endif /* GL_EXT_shader_framebuffer_fetch */ + +#ifndef GL_EXT_shader_framebuffer_fetch_non_coherent +#define GL_EXT_shader_framebuffer_fetch_non_coherent 1 +typedef void (APIENTRYP PFNGLFRAMEBUFFERFETCHBARRIEREXTPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glFramebufferFetchBarrierEXT (void); +#endif +#endif /* GL_EXT_shader_framebuffer_fetch_non_coherent */ + +#ifndef GL_EXT_shader_image_load_formatted +#define GL_EXT_shader_image_load_formatted 1 +#endif /* GL_EXT_shader_image_load_formatted */ + +#ifndef GL_EXT_shader_image_load_store +#define GL_EXT_shader_image_load_store 1 +#define GL_MAX_IMAGE_UNITS_EXT 0x8F38 +#define GL_MAX_COMBINED_IMAGE_UNITS_AND_FRAGMENT_OUTPUTS_EXT 0x8F39 +#define GL_IMAGE_BINDING_NAME_EXT 0x8F3A +#define GL_IMAGE_BINDING_LEVEL_EXT 0x8F3B +#define GL_IMAGE_BINDING_LAYERED_EXT 0x8F3C +#define GL_IMAGE_BINDING_LAYER_EXT 0x8F3D +#define GL_IMAGE_BINDING_ACCESS_EXT 0x8F3E +#define GL_IMAGE_1D_EXT 0x904C +#define GL_IMAGE_2D_EXT 0x904D +#define GL_IMAGE_3D_EXT 0x904E +#define GL_IMAGE_2D_RECT_EXT 0x904F +#define GL_IMAGE_CUBE_EXT 0x9050 +#define GL_IMAGE_BUFFER_EXT 0x9051 +#define GL_IMAGE_1D_ARRAY_EXT 0x9052 +#define GL_IMAGE_2D_ARRAY_EXT 0x9053 +#define GL_IMAGE_CUBE_MAP_ARRAY_EXT 0x9054 +#define GL_IMAGE_2D_MULTISAMPLE_EXT 0x9055 +#define GL_IMAGE_2D_MULTISAMPLE_ARRAY_EXT 0x9056 +#define GL_INT_IMAGE_1D_EXT 0x9057 +#define GL_INT_IMAGE_2D_EXT 0x9058 +#define GL_INT_IMAGE_3D_EXT 0x9059 +#define GL_INT_IMAGE_2D_RECT_EXT 0x905A +#define GL_INT_IMAGE_CUBE_EXT 0x905B +#define GL_INT_IMAGE_BUFFER_EXT 0x905C +#define GL_INT_IMAGE_1D_ARRAY_EXT 0x905D +#define GL_INT_IMAGE_2D_ARRAY_EXT 0x905E +#define GL_INT_IMAGE_CUBE_MAP_ARRAY_EXT 0x905F +#define GL_INT_IMAGE_2D_MULTISAMPLE_EXT 0x9060 +#define GL_INT_IMAGE_2D_MULTISAMPLE_ARRAY_EXT 0x9061 +#define GL_UNSIGNED_INT_IMAGE_1D_EXT 0x9062 +#define GL_UNSIGNED_INT_IMAGE_2D_EXT 0x9063 +#define GL_UNSIGNED_INT_IMAGE_3D_EXT 0x9064 +#define GL_UNSIGNED_INT_IMAGE_2D_RECT_EXT 0x9065 +#define GL_UNSIGNED_INT_IMAGE_CUBE_EXT 0x9066 +#define GL_UNSIGNED_INT_IMAGE_BUFFER_EXT 0x9067 +#define GL_UNSIGNED_INT_IMAGE_1D_ARRAY_EXT 0x9068 +#define GL_UNSIGNED_INT_IMAGE_2D_ARRAY_EXT 0x9069 +#define GL_UNSIGNED_INT_IMAGE_CUBE_MAP_ARRAY_EXT 0x906A +#define GL_UNSIGNED_INT_IMAGE_2D_MULTISAMPLE_EXT 0x906B +#define GL_UNSIGNED_INT_IMAGE_2D_MULTISAMPLE_ARRAY_EXT 0x906C +#define GL_MAX_IMAGE_SAMPLES_EXT 0x906D +#define GL_IMAGE_BINDING_FORMAT_EXT 0x906E +#define GL_VERTEX_ATTRIB_ARRAY_BARRIER_BIT_EXT 0x00000001 +#define GL_ELEMENT_ARRAY_BARRIER_BIT_EXT 0x00000002 +#define GL_UNIFORM_BARRIER_BIT_EXT 0x00000004 +#define GL_TEXTURE_FETCH_BARRIER_BIT_EXT 0x00000008 +#define GL_SHADER_IMAGE_ACCESS_BARRIER_BIT_EXT 0x00000020 +#define GL_COMMAND_BARRIER_BIT_EXT 0x00000040 +#define GL_PIXEL_BUFFER_BARRIER_BIT_EXT 0x00000080 +#define GL_TEXTURE_UPDATE_BARRIER_BIT_EXT 0x00000100 +#define GL_BUFFER_UPDATE_BARRIER_BIT_EXT 0x00000200 +#define GL_FRAMEBUFFER_BARRIER_BIT_EXT 0x00000400 +#define GL_TRANSFORM_FEEDBACK_BARRIER_BIT_EXT 0x00000800 +#define GL_ATOMIC_COUNTER_BARRIER_BIT_EXT 0x00001000 +#define GL_ALL_BARRIER_BITS_EXT 0xFFFFFFFF +typedef void (APIENTRYP PFNGLBINDIMAGETEXTUREEXTPROC) (GLuint index, GLuint texture, GLint level, GLboolean layered, GLint layer, GLenum access, GLint format); +typedef void (APIENTRYP PFNGLMEMORYBARRIEREXTPROC) (GLbitfield barriers); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBindImageTextureEXT (GLuint index, GLuint texture, GLint level, GLboolean layered, GLint layer, GLenum access, GLint format); +GLAPI void APIENTRY glMemoryBarrierEXT (GLbitfield barriers); +#endif +#endif /* GL_EXT_shader_image_load_store */ + +#ifndef GL_EXT_shader_integer_mix +#define GL_EXT_shader_integer_mix 1 +#endif /* GL_EXT_shader_integer_mix */ + +#ifndef GL_EXT_shadow_funcs +#define GL_EXT_shadow_funcs 1 +#endif /* GL_EXT_shadow_funcs */ + +#ifndef GL_EXT_shared_texture_palette +#define GL_EXT_shared_texture_palette 1 +#define GL_SHARED_TEXTURE_PALETTE_EXT 0x81FB +#endif /* GL_EXT_shared_texture_palette */ + +#ifndef GL_EXT_sparse_texture2 +#define GL_EXT_sparse_texture2 1 +#endif /* GL_EXT_sparse_texture2 */ + +#ifndef GL_EXT_stencil_clear_tag +#define GL_EXT_stencil_clear_tag 1 +#define GL_STENCIL_TAG_BITS_EXT 0x88F2 +#define GL_STENCIL_CLEAR_TAG_VALUE_EXT 0x88F3 +typedef void (APIENTRYP PFNGLSTENCILCLEARTAGEXTPROC) (GLsizei stencilTagBits, GLuint stencilClearTag); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glStencilClearTagEXT (GLsizei stencilTagBits, GLuint stencilClearTag); +#endif +#endif /* GL_EXT_stencil_clear_tag */ + +#ifndef GL_EXT_stencil_two_side +#define GL_EXT_stencil_two_side 1 +#define GL_STENCIL_TEST_TWO_SIDE_EXT 0x8910 +#define GL_ACTIVE_STENCIL_FACE_EXT 0x8911 +typedef void (APIENTRYP PFNGLACTIVESTENCILFACEEXTPROC) (GLenum face); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glActiveStencilFaceEXT (GLenum face); +#endif +#endif /* GL_EXT_stencil_two_side */ + +#ifndef GL_EXT_stencil_wrap +#define GL_EXT_stencil_wrap 1 +#define GL_INCR_WRAP_EXT 0x8507 +#define GL_DECR_WRAP_EXT 0x8508 +#endif /* GL_EXT_stencil_wrap */ + +#ifndef GL_EXT_subtexture +#define GL_EXT_subtexture 1 +typedef void (APIENTRYP PFNGLTEXSUBIMAGE1DEXTPROC) (GLenum target, GLint level, GLint xoffset, GLsizei width, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLTEXSUBIMAGE2DEXTPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *pixels); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glTexSubImage1DEXT (GLenum target, GLint level, GLint xoffset, GLsizei width, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glTexSubImage2DEXT (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *pixels); +#endif +#endif /* GL_EXT_subtexture */ + +#ifndef GL_EXT_texture +#define GL_EXT_texture 1 +#define GL_ALPHA4_EXT 0x803B +#define GL_ALPHA8_EXT 0x803C +#define GL_ALPHA12_EXT 0x803D +#define GL_ALPHA16_EXT 0x803E +#define GL_LUMINANCE4_EXT 0x803F +#define GL_LUMINANCE8_EXT 0x8040 +#define GL_LUMINANCE12_EXT 0x8041 +#define GL_LUMINANCE16_EXT 0x8042 +#define GL_LUMINANCE4_ALPHA4_EXT 0x8043 +#define GL_LUMINANCE6_ALPHA2_EXT 0x8044 +#define GL_LUMINANCE8_ALPHA8_EXT 0x8045 +#define GL_LUMINANCE12_ALPHA4_EXT 0x8046 +#define GL_LUMINANCE12_ALPHA12_EXT 0x8047 +#define GL_LUMINANCE16_ALPHA16_EXT 0x8048 +#define GL_INTENSITY_EXT 0x8049 +#define GL_INTENSITY4_EXT 0x804A +#define GL_INTENSITY8_EXT 0x804B +#define GL_INTENSITY12_EXT 0x804C +#define GL_INTENSITY16_EXT 0x804D +#define GL_RGB2_EXT 0x804E +#define GL_RGB4_EXT 0x804F +#define GL_RGB5_EXT 0x8050 +#define GL_RGB8_EXT 0x8051 +#define GL_RGB10_EXT 0x8052 +#define GL_RGB12_EXT 0x8053 +#define GL_RGB16_EXT 0x8054 +#define GL_RGBA2_EXT 0x8055 +#define GL_RGBA4_EXT 0x8056 +#define GL_RGB5_A1_EXT 0x8057 +#define GL_RGBA8_EXT 0x8058 +#define GL_RGB10_A2_EXT 0x8059 +#define GL_RGBA12_EXT 0x805A +#define GL_RGBA16_EXT 0x805B +#define GL_TEXTURE_RED_SIZE_EXT 0x805C +#define GL_TEXTURE_GREEN_SIZE_EXT 0x805D +#define GL_TEXTURE_BLUE_SIZE_EXT 0x805E +#define GL_TEXTURE_ALPHA_SIZE_EXT 0x805F +#define GL_TEXTURE_LUMINANCE_SIZE_EXT 0x8060 +#define GL_TEXTURE_INTENSITY_SIZE_EXT 0x8061 +#define GL_REPLACE_EXT 0x8062 +#define GL_PROXY_TEXTURE_1D_EXT 0x8063 +#define GL_PROXY_TEXTURE_2D_EXT 0x8064 +#define GL_TEXTURE_TOO_LARGE_EXT 0x8065 +#endif /* GL_EXT_texture */ + +#ifndef GL_EXT_texture3D +#define GL_EXT_texture3D 1 +#define GL_PACK_SKIP_IMAGES_EXT 0x806B +#define GL_PACK_IMAGE_HEIGHT_EXT 0x806C +#define GL_UNPACK_SKIP_IMAGES_EXT 0x806D +#define GL_UNPACK_IMAGE_HEIGHT_EXT 0x806E +#define GL_TEXTURE_3D_EXT 0x806F +#define GL_PROXY_TEXTURE_3D_EXT 0x8070 +#define GL_TEXTURE_DEPTH_EXT 0x8071 +#define GL_TEXTURE_WRAP_R_EXT 0x8072 +#define GL_MAX_3D_TEXTURE_SIZE_EXT 0x8073 +typedef void (APIENTRYP PFNGLTEXIMAGE3DEXTPROC) (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLTEXSUBIMAGE3DEXTPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *pixels); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glTexImage3DEXT (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glTexSubImage3DEXT (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *pixels); +#endif +#endif /* GL_EXT_texture3D */ + +#ifndef GL_EXT_texture_array +#define GL_EXT_texture_array 1 +#define GL_TEXTURE_1D_ARRAY_EXT 0x8C18 +#define GL_PROXY_TEXTURE_1D_ARRAY_EXT 0x8C19 +#define GL_TEXTURE_2D_ARRAY_EXT 0x8C1A +#define GL_PROXY_TEXTURE_2D_ARRAY_EXT 0x8C1B +#define GL_TEXTURE_BINDING_1D_ARRAY_EXT 0x8C1C +#define GL_TEXTURE_BINDING_2D_ARRAY_EXT 0x8C1D +#define GL_MAX_ARRAY_TEXTURE_LAYERS_EXT 0x88FF +#define GL_COMPARE_REF_DEPTH_TO_TEXTURE_EXT 0x884E +typedef void (APIENTRYP PFNGLFRAMEBUFFERTEXTURELAYEREXTPROC) (GLenum target, GLenum attachment, GLuint texture, GLint level, GLint layer); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glFramebufferTextureLayerEXT (GLenum target, GLenum attachment, GLuint texture, GLint level, GLint layer); +#endif +#endif /* GL_EXT_texture_array */ + +#ifndef GL_EXT_texture_buffer_object +#define GL_EXT_texture_buffer_object 1 +#define GL_TEXTURE_BUFFER_EXT 0x8C2A +#define GL_MAX_TEXTURE_BUFFER_SIZE_EXT 0x8C2B +#define GL_TEXTURE_BINDING_BUFFER_EXT 0x8C2C +#define GL_TEXTURE_BUFFER_DATA_STORE_BINDING_EXT 0x8C2D +#define GL_TEXTURE_BUFFER_FORMAT_EXT 0x8C2E +typedef void (APIENTRYP PFNGLTEXBUFFEREXTPROC) (GLenum target, GLenum internalformat, GLuint buffer); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glTexBufferEXT (GLenum target, GLenum internalformat, GLuint buffer); +#endif +#endif /* GL_EXT_texture_buffer_object */ + +#ifndef GL_EXT_texture_compression_latc +#define GL_EXT_texture_compression_latc 1 +#define GL_COMPRESSED_LUMINANCE_LATC1_EXT 0x8C70 +#define GL_COMPRESSED_SIGNED_LUMINANCE_LATC1_EXT 0x8C71 +#define GL_COMPRESSED_LUMINANCE_ALPHA_LATC2_EXT 0x8C72 +#define GL_COMPRESSED_SIGNED_LUMINANCE_ALPHA_LATC2_EXT 0x8C73 +#endif /* GL_EXT_texture_compression_latc */ + +#ifndef GL_EXT_texture_compression_rgtc +#define GL_EXT_texture_compression_rgtc 1 +#define GL_COMPRESSED_RED_RGTC1_EXT 0x8DBB +#define GL_COMPRESSED_SIGNED_RED_RGTC1_EXT 0x8DBC +#define GL_COMPRESSED_RED_GREEN_RGTC2_EXT 0x8DBD +#define GL_COMPRESSED_SIGNED_RED_GREEN_RGTC2_EXT 0x8DBE +#endif /* GL_EXT_texture_compression_rgtc */ + +#ifndef GL_EXT_texture_compression_s3tc +#define GL_EXT_texture_compression_s3tc 1 +#define GL_COMPRESSED_RGB_S3TC_DXT1_EXT 0x83F0 +#define GL_COMPRESSED_RGBA_S3TC_DXT1_EXT 0x83F1 +#define GL_COMPRESSED_RGBA_S3TC_DXT3_EXT 0x83F2 +#define GL_COMPRESSED_RGBA_S3TC_DXT5_EXT 0x83F3 +#endif /* GL_EXT_texture_compression_s3tc */ + +#ifndef GL_EXT_texture_cube_map +#define GL_EXT_texture_cube_map 1 +#define GL_NORMAL_MAP_EXT 0x8511 +#define GL_REFLECTION_MAP_EXT 0x8512 +#define GL_TEXTURE_CUBE_MAP_EXT 0x8513 +#define GL_TEXTURE_BINDING_CUBE_MAP_EXT 0x8514 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_X_EXT 0x8515 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_X_EXT 0x8516 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_Y_EXT 0x8517 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_Y_EXT 0x8518 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_Z_EXT 0x8519 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_Z_EXT 0x851A +#define GL_PROXY_TEXTURE_CUBE_MAP_EXT 0x851B +#define GL_MAX_CUBE_MAP_TEXTURE_SIZE_EXT 0x851C +#endif /* GL_EXT_texture_cube_map */ + +#ifndef GL_EXT_texture_env_add +#define GL_EXT_texture_env_add 1 +#endif /* GL_EXT_texture_env_add */ + +#ifndef GL_EXT_texture_env_combine +#define GL_EXT_texture_env_combine 1 +#define GL_COMBINE_EXT 0x8570 +#define GL_COMBINE_RGB_EXT 0x8571 +#define GL_COMBINE_ALPHA_EXT 0x8572 +#define GL_RGB_SCALE_EXT 0x8573 +#define GL_ADD_SIGNED_EXT 0x8574 +#define GL_INTERPOLATE_EXT 0x8575 +#define GL_CONSTANT_EXT 0x8576 +#define GL_PRIMARY_COLOR_EXT 0x8577 +#define GL_PREVIOUS_EXT 0x8578 +#define GL_SOURCE0_RGB_EXT 0x8580 +#define GL_SOURCE1_RGB_EXT 0x8581 +#define GL_SOURCE2_RGB_EXT 0x8582 +#define GL_SOURCE0_ALPHA_EXT 0x8588 +#define GL_SOURCE1_ALPHA_EXT 0x8589 +#define GL_SOURCE2_ALPHA_EXT 0x858A +#define GL_OPERAND0_RGB_EXT 0x8590 +#define GL_OPERAND1_RGB_EXT 0x8591 +#define GL_OPERAND2_RGB_EXT 0x8592 +#define GL_OPERAND0_ALPHA_EXT 0x8598 +#define GL_OPERAND1_ALPHA_EXT 0x8599 +#define GL_OPERAND2_ALPHA_EXT 0x859A +#endif /* GL_EXT_texture_env_combine */ + +#ifndef GL_EXT_texture_env_dot3 +#define GL_EXT_texture_env_dot3 1 +#define GL_DOT3_RGB_EXT 0x8740 +#define GL_DOT3_RGBA_EXT 0x8741 +#endif /* GL_EXT_texture_env_dot3 */ + +#ifndef GL_EXT_texture_filter_anisotropic +#define GL_EXT_texture_filter_anisotropic 1 +#define GL_TEXTURE_MAX_ANISOTROPY_EXT 0x84FE +#define GL_MAX_TEXTURE_MAX_ANISOTROPY_EXT 0x84FF +#endif /* GL_EXT_texture_filter_anisotropic */ + +#ifndef GL_EXT_texture_filter_minmax +#define GL_EXT_texture_filter_minmax 1 +#define GL_TEXTURE_REDUCTION_MODE_EXT 0x9366 +#define GL_WEIGHTED_AVERAGE_EXT 0x9367 +#endif /* GL_EXT_texture_filter_minmax */ + +#ifndef GL_EXT_texture_integer +#define GL_EXT_texture_integer 1 +#define GL_RGBA32UI_EXT 0x8D70 +#define GL_RGB32UI_EXT 0x8D71 +#define GL_ALPHA32UI_EXT 0x8D72 +#define GL_INTENSITY32UI_EXT 0x8D73 +#define GL_LUMINANCE32UI_EXT 0x8D74 +#define GL_LUMINANCE_ALPHA32UI_EXT 0x8D75 +#define GL_RGBA16UI_EXT 0x8D76 +#define GL_RGB16UI_EXT 0x8D77 +#define GL_ALPHA16UI_EXT 0x8D78 +#define GL_INTENSITY16UI_EXT 0x8D79 +#define GL_LUMINANCE16UI_EXT 0x8D7A +#define GL_LUMINANCE_ALPHA16UI_EXT 0x8D7B +#define GL_RGBA8UI_EXT 0x8D7C +#define GL_RGB8UI_EXT 0x8D7D +#define GL_ALPHA8UI_EXT 0x8D7E +#define GL_INTENSITY8UI_EXT 0x8D7F +#define GL_LUMINANCE8UI_EXT 0x8D80 +#define GL_LUMINANCE_ALPHA8UI_EXT 0x8D81 +#define GL_RGBA32I_EXT 0x8D82 +#define GL_RGB32I_EXT 0x8D83 +#define GL_ALPHA32I_EXT 0x8D84 +#define GL_INTENSITY32I_EXT 0x8D85 +#define GL_LUMINANCE32I_EXT 0x8D86 +#define GL_LUMINANCE_ALPHA32I_EXT 0x8D87 +#define GL_RGBA16I_EXT 0x8D88 +#define GL_RGB16I_EXT 0x8D89 +#define GL_ALPHA16I_EXT 0x8D8A +#define GL_INTENSITY16I_EXT 0x8D8B +#define GL_LUMINANCE16I_EXT 0x8D8C +#define GL_LUMINANCE_ALPHA16I_EXT 0x8D8D +#define GL_RGBA8I_EXT 0x8D8E +#define GL_RGB8I_EXT 0x8D8F +#define GL_ALPHA8I_EXT 0x8D90 +#define GL_INTENSITY8I_EXT 0x8D91 +#define GL_LUMINANCE8I_EXT 0x8D92 +#define GL_LUMINANCE_ALPHA8I_EXT 0x8D93 +#define GL_RED_INTEGER_EXT 0x8D94 +#define GL_GREEN_INTEGER_EXT 0x8D95 +#define GL_BLUE_INTEGER_EXT 0x8D96 +#define GL_ALPHA_INTEGER_EXT 0x8D97 +#define GL_RGB_INTEGER_EXT 0x8D98 +#define GL_RGBA_INTEGER_EXT 0x8D99 +#define GL_BGR_INTEGER_EXT 0x8D9A +#define GL_BGRA_INTEGER_EXT 0x8D9B +#define GL_LUMINANCE_INTEGER_EXT 0x8D9C +#define GL_LUMINANCE_ALPHA_INTEGER_EXT 0x8D9D +#define GL_RGBA_INTEGER_MODE_EXT 0x8D9E +typedef void (APIENTRYP PFNGLTEXPARAMETERIIVEXTPROC) (GLenum target, GLenum pname, const GLint *params); +typedef void (APIENTRYP PFNGLTEXPARAMETERIUIVEXTPROC) (GLenum target, GLenum pname, const GLuint *params); +typedef void (APIENTRYP PFNGLGETTEXPARAMETERIIVEXTPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETTEXPARAMETERIUIVEXTPROC) (GLenum target, GLenum pname, GLuint *params); +typedef void (APIENTRYP PFNGLCLEARCOLORIIEXTPROC) (GLint red, GLint green, GLint blue, GLint alpha); +typedef void (APIENTRYP PFNGLCLEARCOLORIUIEXTPROC) (GLuint red, GLuint green, GLuint blue, GLuint alpha); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glTexParameterIivEXT (GLenum target, GLenum pname, const GLint *params); +GLAPI void APIENTRY glTexParameterIuivEXT (GLenum target, GLenum pname, const GLuint *params); +GLAPI void APIENTRY glGetTexParameterIivEXT (GLenum target, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetTexParameterIuivEXT (GLenum target, GLenum pname, GLuint *params); +GLAPI void APIENTRY glClearColorIiEXT (GLint red, GLint green, GLint blue, GLint alpha); +GLAPI void APIENTRY glClearColorIuiEXT (GLuint red, GLuint green, GLuint blue, GLuint alpha); +#endif +#endif /* GL_EXT_texture_integer */ + +#ifndef GL_EXT_texture_lod_bias +#define GL_EXT_texture_lod_bias 1 +#define GL_MAX_TEXTURE_LOD_BIAS_EXT 0x84FD +#define GL_TEXTURE_FILTER_CONTROL_EXT 0x8500 +#define GL_TEXTURE_LOD_BIAS_EXT 0x8501 +#endif /* GL_EXT_texture_lod_bias */ + +#ifndef GL_EXT_texture_mirror_clamp +#define GL_EXT_texture_mirror_clamp 1 +#define GL_MIRROR_CLAMP_EXT 0x8742 +#define GL_MIRROR_CLAMP_TO_EDGE_EXT 0x8743 +#define GL_MIRROR_CLAMP_TO_BORDER_EXT 0x8912 +#endif /* GL_EXT_texture_mirror_clamp */ + +#ifndef GL_EXT_texture_object +#define GL_EXT_texture_object 1 +#define GL_TEXTURE_PRIORITY_EXT 0x8066 +#define GL_TEXTURE_RESIDENT_EXT 0x8067 +#define GL_TEXTURE_1D_BINDING_EXT 0x8068 +#define GL_TEXTURE_2D_BINDING_EXT 0x8069 +#define GL_TEXTURE_3D_BINDING_EXT 0x806A +typedef GLboolean (APIENTRYP PFNGLARETEXTURESRESIDENTEXTPROC) (GLsizei n, const GLuint *textures, GLboolean *residences); +typedef void (APIENTRYP PFNGLBINDTEXTUREEXTPROC) (GLenum target, GLuint texture); +typedef void (APIENTRYP PFNGLDELETETEXTURESEXTPROC) (GLsizei n, const GLuint *textures); +typedef void (APIENTRYP PFNGLGENTEXTURESEXTPROC) (GLsizei n, GLuint *textures); +typedef GLboolean (APIENTRYP PFNGLISTEXTUREEXTPROC) (GLuint texture); +typedef void (APIENTRYP PFNGLPRIORITIZETEXTURESEXTPROC) (GLsizei n, const GLuint *textures, const GLclampf *priorities); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI GLboolean APIENTRY glAreTexturesResidentEXT (GLsizei n, const GLuint *textures, GLboolean *residences); +GLAPI void APIENTRY glBindTextureEXT (GLenum target, GLuint texture); +GLAPI void APIENTRY glDeleteTexturesEXT (GLsizei n, const GLuint *textures); +GLAPI void APIENTRY glGenTexturesEXT (GLsizei n, GLuint *textures); +GLAPI GLboolean APIENTRY glIsTextureEXT (GLuint texture); +GLAPI void APIENTRY glPrioritizeTexturesEXT (GLsizei n, const GLuint *textures, const GLclampf *priorities); +#endif +#endif /* GL_EXT_texture_object */ + +#ifndef GL_EXT_texture_perturb_normal +#define GL_EXT_texture_perturb_normal 1 +#define GL_PERTURB_EXT 0x85AE +#define GL_TEXTURE_NORMAL_EXT 0x85AF +typedef void (APIENTRYP PFNGLTEXTURENORMALEXTPROC) (GLenum mode); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glTextureNormalEXT (GLenum mode); +#endif +#endif /* GL_EXT_texture_perturb_normal */ + +#ifndef GL_EXT_texture_sRGB +#define GL_EXT_texture_sRGB 1 +#define GL_SRGB_EXT 0x8C40 +#define GL_SRGB8_EXT 0x8C41 +#define GL_SRGB_ALPHA_EXT 0x8C42 +#define GL_SRGB8_ALPHA8_EXT 0x8C43 +#define GL_SLUMINANCE_ALPHA_EXT 0x8C44 +#define GL_SLUMINANCE8_ALPHA8_EXT 0x8C45 +#define GL_SLUMINANCE_EXT 0x8C46 +#define GL_SLUMINANCE8_EXT 0x8C47 +#define GL_COMPRESSED_SRGB_EXT 0x8C48 +#define GL_COMPRESSED_SRGB_ALPHA_EXT 0x8C49 +#define GL_COMPRESSED_SLUMINANCE_EXT 0x8C4A +#define GL_COMPRESSED_SLUMINANCE_ALPHA_EXT 0x8C4B +#define GL_COMPRESSED_SRGB_S3TC_DXT1_EXT 0x8C4C +#define GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT1_EXT 0x8C4D +#define GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT3_EXT 0x8C4E +#define GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT5_EXT 0x8C4F +#endif /* GL_EXT_texture_sRGB */ + +#ifndef GL_EXT_texture_sRGB_decode +#define GL_EXT_texture_sRGB_decode 1 +#define GL_TEXTURE_SRGB_DECODE_EXT 0x8A48 +#define GL_DECODE_EXT 0x8A49 +#define GL_SKIP_DECODE_EXT 0x8A4A +#endif /* GL_EXT_texture_sRGB_decode */ + +#ifndef GL_EXT_texture_shared_exponent +#define GL_EXT_texture_shared_exponent 1 +#define GL_RGB9_E5_EXT 0x8C3D +#define GL_UNSIGNED_INT_5_9_9_9_REV_EXT 0x8C3E +#define GL_TEXTURE_SHARED_SIZE_EXT 0x8C3F +#endif /* GL_EXT_texture_shared_exponent */ + +#ifndef GL_EXT_texture_snorm +#define GL_EXT_texture_snorm 1 +#define GL_ALPHA_SNORM 0x9010 +#define GL_LUMINANCE_SNORM 0x9011 +#define GL_LUMINANCE_ALPHA_SNORM 0x9012 +#define GL_INTENSITY_SNORM 0x9013 +#define GL_ALPHA8_SNORM 0x9014 +#define GL_LUMINANCE8_SNORM 0x9015 +#define GL_LUMINANCE8_ALPHA8_SNORM 0x9016 +#define GL_INTENSITY8_SNORM 0x9017 +#define GL_ALPHA16_SNORM 0x9018 +#define GL_LUMINANCE16_SNORM 0x9019 +#define GL_LUMINANCE16_ALPHA16_SNORM 0x901A +#define GL_INTENSITY16_SNORM 0x901B +#define GL_RED_SNORM 0x8F90 +#define GL_RG_SNORM 0x8F91 +#define GL_RGB_SNORM 0x8F92 +#define GL_RGBA_SNORM 0x8F93 +#endif /* GL_EXT_texture_snorm */ + +#ifndef GL_EXT_texture_swizzle +#define GL_EXT_texture_swizzle 1 +#define GL_TEXTURE_SWIZZLE_R_EXT 0x8E42 +#define GL_TEXTURE_SWIZZLE_G_EXT 0x8E43 +#define GL_TEXTURE_SWIZZLE_B_EXT 0x8E44 +#define GL_TEXTURE_SWIZZLE_A_EXT 0x8E45 +#define GL_TEXTURE_SWIZZLE_RGBA_EXT 0x8E46 +#endif /* GL_EXT_texture_swizzle */ + +#ifndef GL_EXT_timer_query +#define GL_EXT_timer_query 1 +#define GL_TIME_ELAPSED_EXT 0x88BF +typedef void (APIENTRYP PFNGLGETQUERYOBJECTI64VEXTPROC) (GLuint id, GLenum pname, GLint64 *params); +typedef void (APIENTRYP PFNGLGETQUERYOBJECTUI64VEXTPROC) (GLuint id, GLenum pname, GLuint64 *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glGetQueryObjecti64vEXT (GLuint id, GLenum pname, GLint64 *params); +GLAPI void APIENTRY glGetQueryObjectui64vEXT (GLuint id, GLenum pname, GLuint64 *params); +#endif +#endif /* GL_EXT_timer_query */ + +#ifndef GL_EXT_transform_feedback +#define GL_EXT_transform_feedback 1 +#define GL_TRANSFORM_FEEDBACK_BUFFER_EXT 0x8C8E +#define GL_TRANSFORM_FEEDBACK_BUFFER_START_EXT 0x8C84 +#define GL_TRANSFORM_FEEDBACK_BUFFER_SIZE_EXT 0x8C85 +#define GL_TRANSFORM_FEEDBACK_BUFFER_BINDING_EXT 0x8C8F +#define GL_INTERLEAVED_ATTRIBS_EXT 0x8C8C +#define GL_SEPARATE_ATTRIBS_EXT 0x8C8D +#define GL_PRIMITIVES_GENERATED_EXT 0x8C87 +#define GL_TRANSFORM_FEEDBACK_PRIMITIVES_WRITTEN_EXT 0x8C88 +#define GL_RASTERIZER_DISCARD_EXT 0x8C89 +#define GL_MAX_TRANSFORM_FEEDBACK_INTERLEAVED_COMPONENTS_EXT 0x8C8A +#define GL_MAX_TRANSFORM_FEEDBACK_SEPARATE_ATTRIBS_EXT 0x8C8B +#define GL_MAX_TRANSFORM_FEEDBACK_SEPARATE_COMPONENTS_EXT 0x8C80 +#define GL_TRANSFORM_FEEDBACK_VARYINGS_EXT 0x8C83 +#define GL_TRANSFORM_FEEDBACK_BUFFER_MODE_EXT 0x8C7F +#define GL_TRANSFORM_FEEDBACK_VARYING_MAX_LENGTH_EXT 0x8C76 +typedef void (APIENTRYP PFNGLBEGINTRANSFORMFEEDBACKEXTPROC) (GLenum primitiveMode); +typedef void (APIENTRYP PFNGLENDTRANSFORMFEEDBACKEXTPROC) (void); +typedef void (APIENTRYP PFNGLBINDBUFFERRANGEEXTPROC) (GLenum target, GLuint index, GLuint buffer, GLintptr offset, GLsizeiptr size); +typedef void (APIENTRYP PFNGLBINDBUFFEROFFSETEXTPROC) (GLenum target, GLuint index, GLuint buffer, GLintptr offset); +typedef void (APIENTRYP PFNGLBINDBUFFERBASEEXTPROC) (GLenum target, GLuint index, GLuint buffer); +typedef void (APIENTRYP PFNGLTRANSFORMFEEDBACKVARYINGSEXTPROC) (GLuint program, GLsizei count, const GLchar *const*varyings, GLenum bufferMode); +typedef void (APIENTRYP PFNGLGETTRANSFORMFEEDBACKVARYINGEXTPROC) (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLsizei *size, GLenum *type, GLchar *name); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBeginTransformFeedbackEXT (GLenum primitiveMode); +GLAPI void APIENTRY glEndTransformFeedbackEXT (void); +GLAPI void APIENTRY glBindBufferRangeEXT (GLenum target, GLuint index, GLuint buffer, GLintptr offset, GLsizeiptr size); +GLAPI void APIENTRY glBindBufferOffsetEXT (GLenum target, GLuint index, GLuint buffer, GLintptr offset); +GLAPI void APIENTRY glBindBufferBaseEXT (GLenum target, GLuint index, GLuint buffer); +GLAPI void APIENTRY glTransformFeedbackVaryingsEXT (GLuint program, GLsizei count, const GLchar *const*varyings, GLenum bufferMode); +GLAPI void APIENTRY glGetTransformFeedbackVaryingEXT (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLsizei *size, GLenum *type, GLchar *name); +#endif +#endif /* GL_EXT_transform_feedback */ + +#ifndef GL_EXT_vertex_array +#define GL_EXT_vertex_array 1 +#define GL_VERTEX_ARRAY_EXT 0x8074 +#define GL_NORMAL_ARRAY_EXT 0x8075 +#define GL_COLOR_ARRAY_EXT 0x8076 +#define GL_INDEX_ARRAY_EXT 0x8077 +#define GL_TEXTURE_COORD_ARRAY_EXT 0x8078 +#define GL_EDGE_FLAG_ARRAY_EXT 0x8079 +#define GL_VERTEX_ARRAY_SIZE_EXT 0x807A +#define GL_VERTEX_ARRAY_TYPE_EXT 0x807B +#define GL_VERTEX_ARRAY_STRIDE_EXT 0x807C +#define GL_VERTEX_ARRAY_COUNT_EXT 0x807D +#define GL_NORMAL_ARRAY_TYPE_EXT 0x807E +#define GL_NORMAL_ARRAY_STRIDE_EXT 0x807F +#define GL_NORMAL_ARRAY_COUNT_EXT 0x8080 +#define GL_COLOR_ARRAY_SIZE_EXT 0x8081 +#define GL_COLOR_ARRAY_TYPE_EXT 0x8082 +#define GL_COLOR_ARRAY_STRIDE_EXT 0x8083 +#define GL_COLOR_ARRAY_COUNT_EXT 0x8084 +#define GL_INDEX_ARRAY_TYPE_EXT 0x8085 +#define GL_INDEX_ARRAY_STRIDE_EXT 0x8086 +#define GL_INDEX_ARRAY_COUNT_EXT 0x8087 +#define GL_TEXTURE_COORD_ARRAY_SIZE_EXT 0x8088 +#define GL_TEXTURE_COORD_ARRAY_TYPE_EXT 0x8089 +#define GL_TEXTURE_COORD_ARRAY_STRIDE_EXT 0x808A +#define GL_TEXTURE_COORD_ARRAY_COUNT_EXT 0x808B +#define GL_EDGE_FLAG_ARRAY_STRIDE_EXT 0x808C +#define GL_EDGE_FLAG_ARRAY_COUNT_EXT 0x808D +#define GL_VERTEX_ARRAY_POINTER_EXT 0x808E +#define GL_NORMAL_ARRAY_POINTER_EXT 0x808F +#define GL_COLOR_ARRAY_POINTER_EXT 0x8090 +#define GL_INDEX_ARRAY_POINTER_EXT 0x8091 +#define GL_TEXTURE_COORD_ARRAY_POINTER_EXT 0x8092 +#define GL_EDGE_FLAG_ARRAY_POINTER_EXT 0x8093 +typedef void (APIENTRYP PFNGLARRAYELEMENTEXTPROC) (GLint i); +typedef void (APIENTRYP PFNGLCOLORPOINTEREXTPROC) (GLint size, GLenum type, GLsizei stride, GLsizei count, const void *pointer); +typedef void (APIENTRYP PFNGLDRAWARRAYSEXTPROC) (GLenum mode, GLint first, GLsizei count); +typedef void (APIENTRYP PFNGLEDGEFLAGPOINTEREXTPROC) (GLsizei stride, GLsizei count, const GLboolean *pointer); +typedef void (APIENTRYP PFNGLGETPOINTERVEXTPROC) (GLenum pname, void **params); +typedef void (APIENTRYP PFNGLINDEXPOINTEREXTPROC) (GLenum type, GLsizei stride, GLsizei count, const void *pointer); +typedef void (APIENTRYP PFNGLNORMALPOINTEREXTPROC) (GLenum type, GLsizei stride, GLsizei count, const void *pointer); +typedef void (APIENTRYP PFNGLTEXCOORDPOINTEREXTPROC) (GLint size, GLenum type, GLsizei stride, GLsizei count, const void *pointer); +typedef void (APIENTRYP PFNGLVERTEXPOINTEREXTPROC) (GLint size, GLenum type, GLsizei stride, GLsizei count, const void *pointer); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glArrayElementEXT (GLint i); +GLAPI void APIENTRY glColorPointerEXT (GLint size, GLenum type, GLsizei stride, GLsizei count, const void *pointer); +GLAPI void APIENTRY glDrawArraysEXT (GLenum mode, GLint first, GLsizei count); +GLAPI void APIENTRY glEdgeFlagPointerEXT (GLsizei stride, GLsizei count, const GLboolean *pointer); +GLAPI void APIENTRY glGetPointervEXT (GLenum pname, void **params); +GLAPI void APIENTRY glIndexPointerEXT (GLenum type, GLsizei stride, GLsizei count, const void *pointer); +GLAPI void APIENTRY glNormalPointerEXT (GLenum type, GLsizei stride, GLsizei count, const void *pointer); +GLAPI void APIENTRY glTexCoordPointerEXT (GLint size, GLenum type, GLsizei stride, GLsizei count, const void *pointer); +GLAPI void APIENTRY glVertexPointerEXT (GLint size, GLenum type, GLsizei stride, GLsizei count, const void *pointer); +#endif +#endif /* GL_EXT_vertex_array */ + +#ifndef GL_EXT_vertex_array_bgra +#define GL_EXT_vertex_array_bgra 1 +#endif /* GL_EXT_vertex_array_bgra */ + +#ifndef GL_EXT_vertex_attrib_64bit +#define GL_EXT_vertex_attrib_64bit 1 +#define GL_DOUBLE_VEC2_EXT 0x8FFC +#define GL_DOUBLE_VEC3_EXT 0x8FFD +#define GL_DOUBLE_VEC4_EXT 0x8FFE +#define GL_DOUBLE_MAT2_EXT 0x8F46 +#define GL_DOUBLE_MAT3_EXT 0x8F47 +#define GL_DOUBLE_MAT4_EXT 0x8F48 +#define GL_DOUBLE_MAT2x3_EXT 0x8F49 +#define GL_DOUBLE_MAT2x4_EXT 0x8F4A +#define GL_DOUBLE_MAT3x2_EXT 0x8F4B +#define GL_DOUBLE_MAT3x4_EXT 0x8F4C +#define GL_DOUBLE_MAT4x2_EXT 0x8F4D +#define GL_DOUBLE_MAT4x3_EXT 0x8F4E +typedef void (APIENTRYP PFNGLVERTEXATTRIBL1DEXTPROC) (GLuint index, GLdouble x); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL2DEXTPROC) (GLuint index, GLdouble x, GLdouble y); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL3DEXTPROC) (GLuint index, GLdouble x, GLdouble y, GLdouble z); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL4DEXTPROC) (GLuint index, GLdouble x, GLdouble y, GLdouble z, GLdouble w); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL1DVEXTPROC) (GLuint index, const GLdouble *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL2DVEXTPROC) (GLuint index, const GLdouble *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL3DVEXTPROC) (GLuint index, const GLdouble *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL4DVEXTPROC) (GLuint index, const GLdouble *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBLPOINTEREXTPROC) (GLuint index, GLint size, GLenum type, GLsizei stride, const void *pointer); +typedef void (APIENTRYP PFNGLGETVERTEXATTRIBLDVEXTPROC) (GLuint index, GLenum pname, GLdouble *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glVertexAttribL1dEXT (GLuint index, GLdouble x); +GLAPI void APIENTRY glVertexAttribL2dEXT (GLuint index, GLdouble x, GLdouble y); +GLAPI void APIENTRY glVertexAttribL3dEXT (GLuint index, GLdouble x, GLdouble y, GLdouble z); +GLAPI void APIENTRY glVertexAttribL4dEXT (GLuint index, GLdouble x, GLdouble y, GLdouble z, GLdouble w); +GLAPI void APIENTRY glVertexAttribL1dvEXT (GLuint index, const GLdouble *v); +GLAPI void APIENTRY glVertexAttribL2dvEXT (GLuint index, const GLdouble *v); +GLAPI void APIENTRY glVertexAttribL3dvEXT (GLuint index, const GLdouble *v); +GLAPI void APIENTRY glVertexAttribL4dvEXT (GLuint index, const GLdouble *v); +GLAPI void APIENTRY glVertexAttribLPointerEXT (GLuint index, GLint size, GLenum type, GLsizei stride, const void *pointer); +GLAPI void APIENTRY glGetVertexAttribLdvEXT (GLuint index, GLenum pname, GLdouble *params); +#endif +#endif /* GL_EXT_vertex_attrib_64bit */ + +#ifndef GL_EXT_vertex_shader +#define GL_EXT_vertex_shader 1 +#define GL_VERTEX_SHADER_EXT 0x8780 +#define GL_VERTEX_SHADER_BINDING_EXT 0x8781 +#define GL_OP_INDEX_EXT 0x8782 +#define GL_OP_NEGATE_EXT 0x8783 +#define GL_OP_DOT3_EXT 0x8784 +#define GL_OP_DOT4_EXT 0x8785 +#define GL_OP_MUL_EXT 0x8786 +#define GL_OP_ADD_EXT 0x8787 +#define GL_OP_MADD_EXT 0x8788 +#define GL_OP_FRAC_EXT 0x8789 +#define GL_OP_MAX_EXT 0x878A +#define GL_OP_MIN_EXT 0x878B +#define GL_OP_SET_GE_EXT 0x878C +#define GL_OP_SET_LT_EXT 0x878D +#define GL_OP_CLAMP_EXT 0x878E +#define GL_OP_FLOOR_EXT 0x878F +#define GL_OP_ROUND_EXT 0x8790 +#define GL_OP_EXP_BASE_2_EXT 0x8791 +#define GL_OP_LOG_BASE_2_EXT 0x8792 +#define GL_OP_POWER_EXT 0x8793 +#define GL_OP_RECIP_EXT 0x8794 +#define GL_OP_RECIP_SQRT_EXT 0x8795 +#define GL_OP_SUB_EXT 0x8796 +#define GL_OP_CROSS_PRODUCT_EXT 0x8797 +#define GL_OP_MULTIPLY_MATRIX_EXT 0x8798 +#define GL_OP_MOV_EXT 0x8799 +#define GL_OUTPUT_VERTEX_EXT 0x879A +#define GL_OUTPUT_COLOR0_EXT 0x879B +#define GL_OUTPUT_COLOR1_EXT 0x879C +#define GL_OUTPUT_TEXTURE_COORD0_EXT 0x879D +#define GL_OUTPUT_TEXTURE_COORD1_EXT 0x879E +#define GL_OUTPUT_TEXTURE_COORD2_EXT 0x879F +#define GL_OUTPUT_TEXTURE_COORD3_EXT 0x87A0 +#define GL_OUTPUT_TEXTURE_COORD4_EXT 0x87A1 +#define GL_OUTPUT_TEXTURE_COORD5_EXT 0x87A2 +#define GL_OUTPUT_TEXTURE_COORD6_EXT 0x87A3 +#define GL_OUTPUT_TEXTURE_COORD7_EXT 0x87A4 +#define GL_OUTPUT_TEXTURE_COORD8_EXT 0x87A5 +#define GL_OUTPUT_TEXTURE_COORD9_EXT 0x87A6 +#define GL_OUTPUT_TEXTURE_COORD10_EXT 0x87A7 +#define GL_OUTPUT_TEXTURE_COORD11_EXT 0x87A8 +#define GL_OUTPUT_TEXTURE_COORD12_EXT 0x87A9 +#define GL_OUTPUT_TEXTURE_COORD13_EXT 0x87AA +#define GL_OUTPUT_TEXTURE_COORD14_EXT 0x87AB +#define GL_OUTPUT_TEXTURE_COORD15_EXT 0x87AC +#define GL_OUTPUT_TEXTURE_COORD16_EXT 0x87AD +#define GL_OUTPUT_TEXTURE_COORD17_EXT 0x87AE +#define GL_OUTPUT_TEXTURE_COORD18_EXT 0x87AF +#define GL_OUTPUT_TEXTURE_COORD19_EXT 0x87B0 +#define GL_OUTPUT_TEXTURE_COORD20_EXT 0x87B1 +#define GL_OUTPUT_TEXTURE_COORD21_EXT 0x87B2 +#define GL_OUTPUT_TEXTURE_COORD22_EXT 0x87B3 +#define GL_OUTPUT_TEXTURE_COORD23_EXT 0x87B4 +#define GL_OUTPUT_TEXTURE_COORD24_EXT 0x87B5 +#define GL_OUTPUT_TEXTURE_COORD25_EXT 0x87B6 +#define GL_OUTPUT_TEXTURE_COORD26_EXT 0x87B7 +#define GL_OUTPUT_TEXTURE_COORD27_EXT 0x87B8 +#define GL_OUTPUT_TEXTURE_COORD28_EXT 0x87B9 +#define GL_OUTPUT_TEXTURE_COORD29_EXT 0x87BA +#define GL_OUTPUT_TEXTURE_COORD30_EXT 0x87BB +#define GL_OUTPUT_TEXTURE_COORD31_EXT 0x87BC +#define GL_OUTPUT_FOG_EXT 0x87BD +#define GL_SCALAR_EXT 0x87BE +#define GL_VECTOR_EXT 0x87BF +#define GL_MATRIX_EXT 0x87C0 +#define GL_VARIANT_EXT 0x87C1 +#define GL_INVARIANT_EXT 0x87C2 +#define GL_LOCAL_CONSTANT_EXT 0x87C3 +#define GL_LOCAL_EXT 0x87C4 +#define GL_MAX_VERTEX_SHADER_INSTRUCTIONS_EXT 0x87C5 +#define GL_MAX_VERTEX_SHADER_VARIANTS_EXT 0x87C6 +#define GL_MAX_VERTEX_SHADER_INVARIANTS_EXT 0x87C7 +#define GL_MAX_VERTEX_SHADER_LOCAL_CONSTANTS_EXT 0x87C8 +#define GL_MAX_VERTEX_SHADER_LOCALS_EXT 0x87C9 +#define GL_MAX_OPTIMIZED_VERTEX_SHADER_INSTRUCTIONS_EXT 0x87CA +#define GL_MAX_OPTIMIZED_VERTEX_SHADER_VARIANTS_EXT 0x87CB +#define GL_MAX_OPTIMIZED_VERTEX_SHADER_LOCAL_CONSTANTS_EXT 0x87CC +#define GL_MAX_OPTIMIZED_VERTEX_SHADER_INVARIANTS_EXT 0x87CD +#define GL_MAX_OPTIMIZED_VERTEX_SHADER_LOCALS_EXT 0x87CE +#define GL_VERTEX_SHADER_INSTRUCTIONS_EXT 0x87CF +#define GL_VERTEX_SHADER_VARIANTS_EXT 0x87D0 +#define GL_VERTEX_SHADER_INVARIANTS_EXT 0x87D1 +#define GL_VERTEX_SHADER_LOCAL_CONSTANTS_EXT 0x87D2 +#define GL_VERTEX_SHADER_LOCALS_EXT 0x87D3 +#define GL_VERTEX_SHADER_OPTIMIZED_EXT 0x87D4 +#define GL_X_EXT 0x87D5 +#define GL_Y_EXT 0x87D6 +#define GL_Z_EXT 0x87D7 +#define GL_W_EXT 0x87D8 +#define GL_NEGATIVE_X_EXT 0x87D9 +#define GL_NEGATIVE_Y_EXT 0x87DA +#define GL_NEGATIVE_Z_EXT 0x87DB +#define GL_NEGATIVE_W_EXT 0x87DC +#define GL_ZERO_EXT 0x87DD +#define GL_ONE_EXT 0x87DE +#define GL_NEGATIVE_ONE_EXT 0x87DF +#define GL_NORMALIZED_RANGE_EXT 0x87E0 +#define GL_FULL_RANGE_EXT 0x87E1 +#define GL_CURRENT_VERTEX_EXT 0x87E2 +#define GL_MVP_MATRIX_EXT 0x87E3 +#define GL_VARIANT_VALUE_EXT 0x87E4 +#define GL_VARIANT_DATATYPE_EXT 0x87E5 +#define GL_VARIANT_ARRAY_STRIDE_EXT 0x87E6 +#define GL_VARIANT_ARRAY_TYPE_EXT 0x87E7 +#define GL_VARIANT_ARRAY_EXT 0x87E8 +#define GL_VARIANT_ARRAY_POINTER_EXT 0x87E9 +#define GL_INVARIANT_VALUE_EXT 0x87EA +#define GL_INVARIANT_DATATYPE_EXT 0x87EB +#define GL_LOCAL_CONSTANT_VALUE_EXT 0x87EC +#define GL_LOCAL_CONSTANT_DATATYPE_EXT 0x87ED +typedef void (APIENTRYP PFNGLBEGINVERTEXSHADEREXTPROC) (void); +typedef void (APIENTRYP PFNGLENDVERTEXSHADEREXTPROC) (void); +typedef void (APIENTRYP PFNGLBINDVERTEXSHADEREXTPROC) (GLuint id); +typedef GLuint (APIENTRYP PFNGLGENVERTEXSHADERSEXTPROC) (GLuint range); +typedef void (APIENTRYP PFNGLDELETEVERTEXSHADEREXTPROC) (GLuint id); +typedef void (APIENTRYP PFNGLSHADEROP1EXTPROC) (GLenum op, GLuint res, GLuint arg1); +typedef void (APIENTRYP PFNGLSHADEROP2EXTPROC) (GLenum op, GLuint res, GLuint arg1, GLuint arg2); +typedef void (APIENTRYP PFNGLSHADEROP3EXTPROC) (GLenum op, GLuint res, GLuint arg1, GLuint arg2, GLuint arg3); +typedef void (APIENTRYP PFNGLSWIZZLEEXTPROC) (GLuint res, GLuint in, GLenum outX, GLenum outY, GLenum outZ, GLenum outW); +typedef void (APIENTRYP PFNGLWRITEMASKEXTPROC) (GLuint res, GLuint in, GLenum outX, GLenum outY, GLenum outZ, GLenum outW); +typedef void (APIENTRYP PFNGLINSERTCOMPONENTEXTPROC) (GLuint res, GLuint src, GLuint num); +typedef void (APIENTRYP PFNGLEXTRACTCOMPONENTEXTPROC) (GLuint res, GLuint src, GLuint num); +typedef GLuint (APIENTRYP PFNGLGENSYMBOLSEXTPROC) (GLenum datatype, GLenum storagetype, GLenum range, GLuint components); +typedef void (APIENTRYP PFNGLSETINVARIANTEXTPROC) (GLuint id, GLenum type, const void *addr); +typedef void (APIENTRYP PFNGLSETLOCALCONSTANTEXTPROC) (GLuint id, GLenum type, const void *addr); +typedef void (APIENTRYP PFNGLVARIANTBVEXTPROC) (GLuint id, const GLbyte *addr); +typedef void (APIENTRYP PFNGLVARIANTSVEXTPROC) (GLuint id, const GLshort *addr); +typedef void (APIENTRYP PFNGLVARIANTIVEXTPROC) (GLuint id, const GLint *addr); +typedef void (APIENTRYP PFNGLVARIANTFVEXTPROC) (GLuint id, const GLfloat *addr); +typedef void (APIENTRYP PFNGLVARIANTDVEXTPROC) (GLuint id, const GLdouble *addr); +typedef void (APIENTRYP PFNGLVARIANTUBVEXTPROC) (GLuint id, const GLubyte *addr); +typedef void (APIENTRYP PFNGLVARIANTUSVEXTPROC) (GLuint id, const GLushort *addr); +typedef void (APIENTRYP PFNGLVARIANTUIVEXTPROC) (GLuint id, const GLuint *addr); +typedef void (APIENTRYP PFNGLVARIANTPOINTEREXTPROC) (GLuint id, GLenum type, GLuint stride, const void *addr); +typedef void (APIENTRYP PFNGLENABLEVARIANTCLIENTSTATEEXTPROC) (GLuint id); +typedef void (APIENTRYP PFNGLDISABLEVARIANTCLIENTSTATEEXTPROC) (GLuint id); +typedef GLuint (APIENTRYP PFNGLBINDLIGHTPARAMETEREXTPROC) (GLenum light, GLenum value); +typedef GLuint (APIENTRYP PFNGLBINDMATERIALPARAMETEREXTPROC) (GLenum face, GLenum value); +typedef GLuint (APIENTRYP PFNGLBINDTEXGENPARAMETEREXTPROC) (GLenum unit, GLenum coord, GLenum value); +typedef GLuint (APIENTRYP PFNGLBINDTEXTUREUNITPARAMETEREXTPROC) (GLenum unit, GLenum value); +typedef GLuint (APIENTRYP PFNGLBINDPARAMETEREXTPROC) (GLenum value); +typedef GLboolean (APIENTRYP PFNGLISVARIANTENABLEDEXTPROC) (GLuint id, GLenum cap); +typedef void (APIENTRYP PFNGLGETVARIANTBOOLEANVEXTPROC) (GLuint id, GLenum value, GLboolean *data); +typedef void (APIENTRYP PFNGLGETVARIANTINTEGERVEXTPROC) (GLuint id, GLenum value, GLint *data); +typedef void (APIENTRYP PFNGLGETVARIANTFLOATVEXTPROC) (GLuint id, GLenum value, GLfloat *data); +typedef void (APIENTRYP PFNGLGETVARIANTPOINTERVEXTPROC) (GLuint id, GLenum value, void **data); +typedef void (APIENTRYP PFNGLGETINVARIANTBOOLEANVEXTPROC) (GLuint id, GLenum value, GLboolean *data); +typedef void (APIENTRYP PFNGLGETINVARIANTINTEGERVEXTPROC) (GLuint id, GLenum value, GLint *data); +typedef void (APIENTRYP PFNGLGETINVARIANTFLOATVEXTPROC) (GLuint id, GLenum value, GLfloat *data); +typedef void (APIENTRYP PFNGLGETLOCALCONSTANTBOOLEANVEXTPROC) (GLuint id, GLenum value, GLboolean *data); +typedef void (APIENTRYP PFNGLGETLOCALCONSTANTINTEGERVEXTPROC) (GLuint id, GLenum value, GLint *data); +typedef void (APIENTRYP PFNGLGETLOCALCONSTANTFLOATVEXTPROC) (GLuint id, GLenum value, GLfloat *data); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBeginVertexShaderEXT (void); +GLAPI void APIENTRY glEndVertexShaderEXT (void); +GLAPI void APIENTRY glBindVertexShaderEXT (GLuint id); +GLAPI GLuint APIENTRY glGenVertexShadersEXT (GLuint range); +GLAPI void APIENTRY glDeleteVertexShaderEXT (GLuint id); +GLAPI void APIENTRY glShaderOp1EXT (GLenum op, GLuint res, GLuint arg1); +GLAPI void APIENTRY glShaderOp2EXT (GLenum op, GLuint res, GLuint arg1, GLuint arg2); +GLAPI void APIENTRY glShaderOp3EXT (GLenum op, GLuint res, GLuint arg1, GLuint arg2, GLuint arg3); +GLAPI void APIENTRY glSwizzleEXT (GLuint res, GLuint in, GLenum outX, GLenum outY, GLenum outZ, GLenum outW); +GLAPI void APIENTRY glWriteMaskEXT (GLuint res, GLuint in, GLenum outX, GLenum outY, GLenum outZ, GLenum outW); +GLAPI void APIENTRY glInsertComponentEXT (GLuint res, GLuint src, GLuint num); +GLAPI void APIENTRY glExtractComponentEXT (GLuint res, GLuint src, GLuint num); +GLAPI GLuint APIENTRY glGenSymbolsEXT (GLenum datatype, GLenum storagetype, GLenum range, GLuint components); +GLAPI void APIENTRY glSetInvariantEXT (GLuint id, GLenum type, const void *addr); +GLAPI void APIENTRY glSetLocalConstantEXT (GLuint id, GLenum type, const void *addr); +GLAPI void APIENTRY glVariantbvEXT (GLuint id, const GLbyte *addr); +GLAPI void APIENTRY glVariantsvEXT (GLuint id, const GLshort *addr); +GLAPI void APIENTRY glVariantivEXT (GLuint id, const GLint *addr); +GLAPI void APIENTRY glVariantfvEXT (GLuint id, const GLfloat *addr); +GLAPI void APIENTRY glVariantdvEXT (GLuint id, const GLdouble *addr); +GLAPI void APIENTRY glVariantubvEXT (GLuint id, const GLubyte *addr); +GLAPI void APIENTRY glVariantusvEXT (GLuint id, const GLushort *addr); +GLAPI void APIENTRY glVariantuivEXT (GLuint id, const GLuint *addr); +GLAPI void APIENTRY glVariantPointerEXT (GLuint id, GLenum type, GLuint stride, const void *addr); +GLAPI void APIENTRY glEnableVariantClientStateEXT (GLuint id); +GLAPI void APIENTRY glDisableVariantClientStateEXT (GLuint id); +GLAPI GLuint APIENTRY glBindLightParameterEXT (GLenum light, GLenum value); +GLAPI GLuint APIENTRY glBindMaterialParameterEXT (GLenum face, GLenum value); +GLAPI GLuint APIENTRY glBindTexGenParameterEXT (GLenum unit, GLenum coord, GLenum value); +GLAPI GLuint APIENTRY glBindTextureUnitParameterEXT (GLenum unit, GLenum value); +GLAPI GLuint APIENTRY glBindParameterEXT (GLenum value); +GLAPI GLboolean APIENTRY glIsVariantEnabledEXT (GLuint id, GLenum cap); +GLAPI void APIENTRY glGetVariantBooleanvEXT (GLuint id, GLenum value, GLboolean *data); +GLAPI void APIENTRY glGetVariantIntegervEXT (GLuint id, GLenum value, GLint *data); +GLAPI void APIENTRY glGetVariantFloatvEXT (GLuint id, GLenum value, GLfloat *data); +GLAPI void APIENTRY glGetVariantPointervEXT (GLuint id, GLenum value, void **data); +GLAPI void APIENTRY glGetInvariantBooleanvEXT (GLuint id, GLenum value, GLboolean *data); +GLAPI void APIENTRY glGetInvariantIntegervEXT (GLuint id, GLenum value, GLint *data); +GLAPI void APIENTRY glGetInvariantFloatvEXT (GLuint id, GLenum value, GLfloat *data); +GLAPI void APIENTRY glGetLocalConstantBooleanvEXT (GLuint id, GLenum value, GLboolean *data); +GLAPI void APIENTRY glGetLocalConstantIntegervEXT (GLuint id, GLenum value, GLint *data); +GLAPI void APIENTRY glGetLocalConstantFloatvEXT (GLuint id, GLenum value, GLfloat *data); +#endif +#endif /* GL_EXT_vertex_shader */ + +#ifndef GL_EXT_vertex_weighting +#define GL_EXT_vertex_weighting 1 +#define GL_MODELVIEW0_STACK_DEPTH_EXT 0x0BA3 +#define GL_MODELVIEW1_STACK_DEPTH_EXT 0x8502 +#define GL_MODELVIEW0_MATRIX_EXT 0x0BA6 +#define GL_MODELVIEW1_MATRIX_EXT 0x8506 +#define GL_VERTEX_WEIGHTING_EXT 0x8509 +#define GL_MODELVIEW0_EXT 0x1700 +#define GL_MODELVIEW1_EXT 0x850A +#define GL_CURRENT_VERTEX_WEIGHT_EXT 0x850B +#define GL_VERTEX_WEIGHT_ARRAY_EXT 0x850C +#define GL_VERTEX_WEIGHT_ARRAY_SIZE_EXT 0x850D +#define GL_VERTEX_WEIGHT_ARRAY_TYPE_EXT 0x850E +#define GL_VERTEX_WEIGHT_ARRAY_STRIDE_EXT 0x850F +#define GL_VERTEX_WEIGHT_ARRAY_POINTER_EXT 0x8510 +typedef void (APIENTRYP PFNGLVERTEXWEIGHTFEXTPROC) (GLfloat weight); +typedef void (APIENTRYP PFNGLVERTEXWEIGHTFVEXTPROC) (const GLfloat *weight); +typedef void (APIENTRYP PFNGLVERTEXWEIGHTPOINTEREXTPROC) (GLint size, GLenum type, GLsizei stride, const void *pointer); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glVertexWeightfEXT (GLfloat weight); +GLAPI void APIENTRY glVertexWeightfvEXT (const GLfloat *weight); +GLAPI void APIENTRY glVertexWeightPointerEXT (GLint size, GLenum type, GLsizei stride, const void *pointer); +#endif +#endif /* GL_EXT_vertex_weighting */ + +#ifndef GL_EXT_win32_keyed_mutex +#define GL_EXT_win32_keyed_mutex 1 +typedef GLboolean (APIENTRYP PFNGLACQUIREKEYEDMUTEXWIN32EXTPROC) (GLuint memory, GLuint64 key, GLuint timeout); +typedef GLboolean (APIENTRYP PFNGLRELEASEKEYEDMUTEXWIN32EXTPROC) (GLuint memory, GLuint64 key); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI GLboolean APIENTRY glAcquireKeyedMutexWin32EXT (GLuint memory, GLuint64 key, GLuint timeout); +GLAPI GLboolean APIENTRY glReleaseKeyedMutexWin32EXT (GLuint memory, GLuint64 key); +#endif +#endif /* GL_EXT_win32_keyed_mutex */ + +#ifndef GL_EXT_window_rectangles +#define GL_EXT_window_rectangles 1 +#define GL_INCLUSIVE_EXT 0x8F10 +#define GL_EXCLUSIVE_EXT 0x8F11 +#define GL_WINDOW_RECTANGLE_EXT 0x8F12 +#define GL_WINDOW_RECTANGLE_MODE_EXT 0x8F13 +#define GL_MAX_WINDOW_RECTANGLES_EXT 0x8F14 +#define GL_NUM_WINDOW_RECTANGLES_EXT 0x8F15 +typedef void (APIENTRYP PFNGLWINDOWRECTANGLESEXTPROC) (GLenum mode, GLsizei count, const GLint *box); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glWindowRectanglesEXT (GLenum mode, GLsizei count, const GLint *box); +#endif +#endif /* GL_EXT_window_rectangles */ + +#ifndef GL_EXT_x11_sync_object +#define GL_EXT_x11_sync_object 1 +#define GL_SYNC_X11_FENCE_EXT 0x90E1 +typedef GLsync (APIENTRYP PFNGLIMPORTSYNCEXTPROC) (GLenum external_sync_type, GLintptr external_sync, GLbitfield flags); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI GLsync APIENTRY glImportSyncEXT (GLenum external_sync_type, GLintptr external_sync, GLbitfield flags); +#endif +#endif /* GL_EXT_x11_sync_object */ + +#ifndef GL_GREMEDY_frame_terminator +#define GL_GREMEDY_frame_terminator 1 +typedef void (APIENTRYP PFNGLFRAMETERMINATORGREMEDYPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glFrameTerminatorGREMEDY (void); +#endif +#endif /* GL_GREMEDY_frame_terminator */ + +#ifndef GL_GREMEDY_string_marker +#define GL_GREMEDY_string_marker 1 +typedef void (APIENTRYP PFNGLSTRINGMARKERGREMEDYPROC) (GLsizei len, const void *string); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glStringMarkerGREMEDY (GLsizei len, const void *string); +#endif +#endif /* GL_GREMEDY_string_marker */ + +#ifndef GL_HP_convolution_border_modes +#define GL_HP_convolution_border_modes 1 +#define GL_IGNORE_BORDER_HP 0x8150 +#define GL_CONSTANT_BORDER_HP 0x8151 +#define GL_REPLICATE_BORDER_HP 0x8153 +#define GL_CONVOLUTION_BORDER_COLOR_HP 0x8154 +#endif /* GL_HP_convolution_border_modes */ + +#ifndef GL_HP_image_transform +#define GL_HP_image_transform 1 +#define GL_IMAGE_SCALE_X_HP 0x8155 +#define GL_IMAGE_SCALE_Y_HP 0x8156 +#define GL_IMAGE_TRANSLATE_X_HP 0x8157 +#define GL_IMAGE_TRANSLATE_Y_HP 0x8158 +#define GL_IMAGE_ROTATE_ANGLE_HP 0x8159 +#define GL_IMAGE_ROTATE_ORIGIN_X_HP 0x815A +#define GL_IMAGE_ROTATE_ORIGIN_Y_HP 0x815B +#define GL_IMAGE_MAG_FILTER_HP 0x815C +#define GL_IMAGE_MIN_FILTER_HP 0x815D +#define GL_IMAGE_CUBIC_WEIGHT_HP 0x815E +#define GL_CUBIC_HP 0x815F +#define GL_AVERAGE_HP 0x8160 +#define GL_IMAGE_TRANSFORM_2D_HP 0x8161 +#define GL_POST_IMAGE_TRANSFORM_COLOR_TABLE_HP 0x8162 +#define GL_PROXY_POST_IMAGE_TRANSFORM_COLOR_TABLE_HP 0x8163 +typedef void (APIENTRYP PFNGLIMAGETRANSFORMPARAMETERIHPPROC) (GLenum target, GLenum pname, GLint param); +typedef void (APIENTRYP PFNGLIMAGETRANSFORMPARAMETERFHPPROC) (GLenum target, GLenum pname, GLfloat param); +typedef void (APIENTRYP PFNGLIMAGETRANSFORMPARAMETERIVHPPROC) (GLenum target, GLenum pname, const GLint *params); +typedef void (APIENTRYP PFNGLIMAGETRANSFORMPARAMETERFVHPPROC) (GLenum target, GLenum pname, const GLfloat *params); +typedef void (APIENTRYP PFNGLGETIMAGETRANSFORMPARAMETERIVHPPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETIMAGETRANSFORMPARAMETERFVHPPROC) (GLenum target, GLenum pname, GLfloat *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glImageTransformParameteriHP (GLenum target, GLenum pname, GLint param); +GLAPI void APIENTRY glImageTransformParameterfHP (GLenum target, GLenum pname, GLfloat param); +GLAPI void APIENTRY glImageTransformParameterivHP (GLenum target, GLenum pname, const GLint *params); +GLAPI void APIENTRY glImageTransformParameterfvHP (GLenum target, GLenum pname, const GLfloat *params); +GLAPI void APIENTRY glGetImageTransformParameterivHP (GLenum target, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetImageTransformParameterfvHP (GLenum target, GLenum pname, GLfloat *params); +#endif +#endif /* GL_HP_image_transform */ + +#ifndef GL_HP_occlusion_test +#define GL_HP_occlusion_test 1 +#define GL_OCCLUSION_TEST_HP 0x8165 +#define GL_OCCLUSION_TEST_RESULT_HP 0x8166 +#endif /* GL_HP_occlusion_test */ + +#ifndef GL_HP_texture_lighting +#define GL_HP_texture_lighting 1 +#define GL_TEXTURE_LIGHTING_MODE_HP 0x8167 +#define GL_TEXTURE_POST_SPECULAR_HP 0x8168 +#define GL_TEXTURE_PRE_SPECULAR_HP 0x8169 +#endif /* GL_HP_texture_lighting */ + +#ifndef GL_IBM_cull_vertex +#define GL_IBM_cull_vertex 1 +#define GL_CULL_VERTEX_IBM 103050 +#endif /* GL_IBM_cull_vertex */ + +#ifndef GL_IBM_multimode_draw_arrays +#define GL_IBM_multimode_draw_arrays 1 +typedef void (APIENTRYP PFNGLMULTIMODEDRAWARRAYSIBMPROC) (const GLenum *mode, const GLint *first, const GLsizei *count, GLsizei primcount, GLint modestride); +typedef void (APIENTRYP PFNGLMULTIMODEDRAWELEMENTSIBMPROC) (const GLenum *mode, const GLsizei *count, GLenum type, const void *const*indices, GLsizei primcount, GLint modestride); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glMultiModeDrawArraysIBM (const GLenum *mode, const GLint *first, const GLsizei *count, GLsizei primcount, GLint modestride); +GLAPI void APIENTRY glMultiModeDrawElementsIBM (const GLenum *mode, const GLsizei *count, GLenum type, const void *const*indices, GLsizei primcount, GLint modestride); +#endif +#endif /* GL_IBM_multimode_draw_arrays */ + +#ifndef GL_IBM_rasterpos_clip +#define GL_IBM_rasterpos_clip 1 +#define GL_RASTER_POSITION_UNCLIPPED_IBM 0x19262 +#endif /* GL_IBM_rasterpos_clip */ + +#ifndef GL_IBM_static_data +#define GL_IBM_static_data 1 +#define GL_ALL_STATIC_DATA_IBM 103060 +#define GL_STATIC_VERTEX_ARRAY_IBM 103061 +typedef void (APIENTRYP PFNGLFLUSHSTATICDATAIBMPROC) (GLenum target); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glFlushStaticDataIBM (GLenum target); +#endif +#endif /* GL_IBM_static_data */ + +#ifndef GL_IBM_texture_mirrored_repeat +#define GL_IBM_texture_mirrored_repeat 1 +#define GL_MIRRORED_REPEAT_IBM 0x8370 +#endif /* GL_IBM_texture_mirrored_repeat */ + +#ifndef GL_IBM_vertex_array_lists +#define GL_IBM_vertex_array_lists 1 +#define GL_VERTEX_ARRAY_LIST_IBM 103070 +#define GL_NORMAL_ARRAY_LIST_IBM 103071 +#define GL_COLOR_ARRAY_LIST_IBM 103072 +#define GL_INDEX_ARRAY_LIST_IBM 103073 +#define GL_TEXTURE_COORD_ARRAY_LIST_IBM 103074 +#define GL_EDGE_FLAG_ARRAY_LIST_IBM 103075 +#define GL_FOG_COORDINATE_ARRAY_LIST_IBM 103076 +#define GL_SECONDARY_COLOR_ARRAY_LIST_IBM 103077 +#define GL_VERTEX_ARRAY_LIST_STRIDE_IBM 103080 +#define GL_NORMAL_ARRAY_LIST_STRIDE_IBM 103081 +#define GL_COLOR_ARRAY_LIST_STRIDE_IBM 103082 +#define GL_INDEX_ARRAY_LIST_STRIDE_IBM 103083 +#define GL_TEXTURE_COORD_ARRAY_LIST_STRIDE_IBM 103084 +#define GL_EDGE_FLAG_ARRAY_LIST_STRIDE_IBM 103085 +#define GL_FOG_COORDINATE_ARRAY_LIST_STRIDE_IBM 103086 +#define GL_SECONDARY_COLOR_ARRAY_LIST_STRIDE_IBM 103087 +typedef void (APIENTRYP PFNGLCOLORPOINTERLISTIBMPROC) (GLint size, GLenum type, GLint stride, const void **pointer, GLint ptrstride); +typedef void (APIENTRYP PFNGLSECONDARYCOLORPOINTERLISTIBMPROC) (GLint size, GLenum type, GLint stride, const void **pointer, GLint ptrstride); +typedef void (APIENTRYP PFNGLEDGEFLAGPOINTERLISTIBMPROC) (GLint stride, const GLboolean **pointer, GLint ptrstride); +typedef void (APIENTRYP PFNGLFOGCOORDPOINTERLISTIBMPROC) (GLenum type, GLint stride, const void **pointer, GLint ptrstride); +typedef void (APIENTRYP PFNGLINDEXPOINTERLISTIBMPROC) (GLenum type, GLint stride, const void **pointer, GLint ptrstride); +typedef void (APIENTRYP PFNGLNORMALPOINTERLISTIBMPROC) (GLenum type, GLint stride, const void **pointer, GLint ptrstride); +typedef void (APIENTRYP PFNGLTEXCOORDPOINTERLISTIBMPROC) (GLint size, GLenum type, GLint stride, const void **pointer, GLint ptrstride); +typedef void (APIENTRYP PFNGLVERTEXPOINTERLISTIBMPROC) (GLint size, GLenum type, GLint stride, const void **pointer, GLint ptrstride); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glColorPointerListIBM (GLint size, GLenum type, GLint stride, const void **pointer, GLint ptrstride); +GLAPI void APIENTRY glSecondaryColorPointerListIBM (GLint size, GLenum type, GLint stride, const void **pointer, GLint ptrstride); +GLAPI void APIENTRY glEdgeFlagPointerListIBM (GLint stride, const GLboolean **pointer, GLint ptrstride); +GLAPI void APIENTRY glFogCoordPointerListIBM (GLenum type, GLint stride, const void **pointer, GLint ptrstride); +GLAPI void APIENTRY glIndexPointerListIBM (GLenum type, GLint stride, const void **pointer, GLint ptrstride); +GLAPI void APIENTRY glNormalPointerListIBM (GLenum type, GLint stride, const void **pointer, GLint ptrstride); +GLAPI void APIENTRY glTexCoordPointerListIBM (GLint size, GLenum type, GLint stride, const void **pointer, GLint ptrstride); +GLAPI void APIENTRY glVertexPointerListIBM (GLint size, GLenum type, GLint stride, const void **pointer, GLint ptrstride); +#endif +#endif /* GL_IBM_vertex_array_lists */ + +#ifndef GL_INGR_blend_func_separate +#define GL_INGR_blend_func_separate 1 +typedef void (APIENTRYP PFNGLBLENDFUNCSEPARATEINGRPROC) (GLenum sfactorRGB, GLenum dfactorRGB, GLenum sfactorAlpha, GLenum dfactorAlpha); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBlendFuncSeparateINGR (GLenum sfactorRGB, GLenum dfactorRGB, GLenum sfactorAlpha, GLenum dfactorAlpha); +#endif +#endif /* GL_INGR_blend_func_separate */ + +#ifndef GL_INGR_color_clamp +#define GL_INGR_color_clamp 1 +#define GL_RED_MIN_CLAMP_INGR 0x8560 +#define GL_GREEN_MIN_CLAMP_INGR 0x8561 +#define GL_BLUE_MIN_CLAMP_INGR 0x8562 +#define GL_ALPHA_MIN_CLAMP_INGR 0x8563 +#define GL_RED_MAX_CLAMP_INGR 0x8564 +#define GL_GREEN_MAX_CLAMP_INGR 0x8565 +#define GL_BLUE_MAX_CLAMP_INGR 0x8566 +#define GL_ALPHA_MAX_CLAMP_INGR 0x8567 +#endif /* GL_INGR_color_clamp */ + +#ifndef GL_INGR_interlace_read +#define GL_INGR_interlace_read 1 +#define GL_INTERLACE_READ_INGR 0x8568 +#endif /* GL_INGR_interlace_read */ + +#ifndef GL_INTEL_blackhole_render +#define GL_INTEL_blackhole_render 1 +#define GL_BLACKHOLE_RENDER_INTEL 0x83FC +#endif /* GL_INTEL_blackhole_render */ + +#ifndef GL_INTEL_conservative_rasterization +#define GL_INTEL_conservative_rasterization 1 +#define GL_CONSERVATIVE_RASTERIZATION_INTEL 0x83FE +#endif /* GL_INTEL_conservative_rasterization */ + +#ifndef GL_INTEL_fragment_shader_ordering +#define GL_INTEL_fragment_shader_ordering 1 +#endif /* GL_INTEL_fragment_shader_ordering */ + +#ifndef GL_INTEL_framebuffer_CMAA +#define GL_INTEL_framebuffer_CMAA 1 +typedef void (APIENTRYP PFNGLAPPLYFRAMEBUFFERATTACHMENTCMAAINTELPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glApplyFramebufferAttachmentCMAAINTEL (void); +#endif +#endif /* GL_INTEL_framebuffer_CMAA */ + +#ifndef GL_INTEL_map_texture +#define GL_INTEL_map_texture 1 +#define GL_TEXTURE_MEMORY_LAYOUT_INTEL 0x83FF +#define GL_LAYOUT_DEFAULT_INTEL 0 +#define GL_LAYOUT_LINEAR_INTEL 1 +#define GL_LAYOUT_LINEAR_CPU_CACHED_INTEL 2 +typedef void (APIENTRYP PFNGLSYNCTEXTUREINTELPROC) (GLuint texture); +typedef void (APIENTRYP PFNGLUNMAPTEXTURE2DINTELPROC) (GLuint texture, GLint level); +typedef void *(APIENTRYP PFNGLMAPTEXTURE2DINTELPROC) (GLuint texture, GLint level, GLbitfield access, GLint *stride, GLenum *layout); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glSyncTextureINTEL (GLuint texture); +GLAPI void APIENTRY glUnmapTexture2DINTEL (GLuint texture, GLint level); +GLAPI void *APIENTRY glMapTexture2DINTEL (GLuint texture, GLint level, GLbitfield access, GLint *stride, GLenum *layout); +#endif +#endif /* GL_INTEL_map_texture */ + +#ifndef GL_INTEL_parallel_arrays +#define GL_INTEL_parallel_arrays 1 +#define GL_PARALLEL_ARRAYS_INTEL 0x83F4 +#define GL_VERTEX_ARRAY_PARALLEL_POINTERS_INTEL 0x83F5 +#define GL_NORMAL_ARRAY_PARALLEL_POINTERS_INTEL 0x83F6 +#define GL_COLOR_ARRAY_PARALLEL_POINTERS_INTEL 0x83F7 +#define GL_TEXTURE_COORD_ARRAY_PARALLEL_POINTERS_INTEL 0x83F8 +typedef void (APIENTRYP PFNGLVERTEXPOINTERVINTELPROC) (GLint size, GLenum type, const void **pointer); +typedef void (APIENTRYP PFNGLNORMALPOINTERVINTELPROC) (GLenum type, const void **pointer); +typedef void (APIENTRYP PFNGLCOLORPOINTERVINTELPROC) (GLint size, GLenum type, const void **pointer); +typedef void (APIENTRYP PFNGLTEXCOORDPOINTERVINTELPROC) (GLint size, GLenum type, const void **pointer); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glVertexPointervINTEL (GLint size, GLenum type, const void **pointer); +GLAPI void APIENTRY glNormalPointervINTEL (GLenum type, const void **pointer); +GLAPI void APIENTRY glColorPointervINTEL (GLint size, GLenum type, const void **pointer); +GLAPI void APIENTRY glTexCoordPointervINTEL (GLint size, GLenum type, const void **pointer); +#endif +#endif /* GL_INTEL_parallel_arrays */ + +#ifndef GL_INTEL_performance_query +#define GL_INTEL_performance_query 1 +#define GL_PERFQUERY_SINGLE_CONTEXT_INTEL 0x00000000 +#define GL_PERFQUERY_GLOBAL_CONTEXT_INTEL 0x00000001 +#define GL_PERFQUERY_WAIT_INTEL 0x83FB +#define GL_PERFQUERY_FLUSH_INTEL 0x83FA +#define GL_PERFQUERY_DONOT_FLUSH_INTEL 0x83F9 +#define GL_PERFQUERY_COUNTER_EVENT_INTEL 0x94F0 +#define GL_PERFQUERY_COUNTER_DURATION_NORM_INTEL 0x94F1 +#define GL_PERFQUERY_COUNTER_DURATION_RAW_INTEL 0x94F2 +#define GL_PERFQUERY_COUNTER_THROUGHPUT_INTEL 0x94F3 +#define GL_PERFQUERY_COUNTER_RAW_INTEL 0x94F4 +#define GL_PERFQUERY_COUNTER_TIMESTAMP_INTEL 0x94F5 +#define GL_PERFQUERY_COUNTER_DATA_UINT32_INTEL 0x94F8 +#define GL_PERFQUERY_COUNTER_DATA_UINT64_INTEL 0x94F9 +#define GL_PERFQUERY_COUNTER_DATA_FLOAT_INTEL 0x94FA +#define GL_PERFQUERY_COUNTER_DATA_DOUBLE_INTEL 0x94FB +#define GL_PERFQUERY_COUNTER_DATA_BOOL32_INTEL 0x94FC +#define GL_PERFQUERY_QUERY_NAME_LENGTH_MAX_INTEL 0x94FD +#define GL_PERFQUERY_COUNTER_NAME_LENGTH_MAX_INTEL 0x94FE +#define GL_PERFQUERY_COUNTER_DESC_LENGTH_MAX_INTEL 0x94FF +#define GL_PERFQUERY_GPA_EXTENDED_COUNTERS_INTEL 0x9500 +typedef void (APIENTRYP PFNGLBEGINPERFQUERYINTELPROC) (GLuint queryHandle); +typedef void (APIENTRYP PFNGLCREATEPERFQUERYINTELPROC) (GLuint queryId, GLuint *queryHandle); +typedef void (APIENTRYP PFNGLDELETEPERFQUERYINTELPROC) (GLuint queryHandle); +typedef void (APIENTRYP PFNGLENDPERFQUERYINTELPROC) (GLuint queryHandle); +typedef void (APIENTRYP PFNGLGETFIRSTPERFQUERYIDINTELPROC) (GLuint *queryId); +typedef void (APIENTRYP PFNGLGETNEXTPERFQUERYIDINTELPROC) (GLuint queryId, GLuint *nextQueryId); +typedef void (APIENTRYP PFNGLGETPERFCOUNTERINFOINTELPROC) (GLuint queryId, GLuint counterId, GLuint counterNameLength, GLchar *counterName, GLuint counterDescLength, GLchar *counterDesc, GLuint *counterOffset, GLuint *counterDataSize, GLuint *counterTypeEnum, GLuint *counterDataTypeEnum, GLuint64 *rawCounterMaxValue); +typedef void (APIENTRYP PFNGLGETPERFQUERYDATAINTELPROC) (GLuint queryHandle, GLuint flags, GLsizei dataSize, void *data, GLuint *bytesWritten); +typedef void (APIENTRYP PFNGLGETPERFQUERYIDBYNAMEINTELPROC) (GLchar *queryName, GLuint *queryId); +typedef void (APIENTRYP PFNGLGETPERFQUERYINFOINTELPROC) (GLuint queryId, GLuint queryNameLength, GLchar *queryName, GLuint *dataSize, GLuint *noCounters, GLuint *noInstances, GLuint *capsMask); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBeginPerfQueryINTEL (GLuint queryHandle); +GLAPI void APIENTRY glCreatePerfQueryINTEL (GLuint queryId, GLuint *queryHandle); +GLAPI void APIENTRY glDeletePerfQueryINTEL (GLuint queryHandle); +GLAPI void APIENTRY glEndPerfQueryINTEL (GLuint queryHandle); +GLAPI void APIENTRY glGetFirstPerfQueryIdINTEL (GLuint *queryId); +GLAPI void APIENTRY glGetNextPerfQueryIdINTEL (GLuint queryId, GLuint *nextQueryId); +GLAPI void APIENTRY glGetPerfCounterInfoINTEL (GLuint queryId, GLuint counterId, GLuint counterNameLength, GLchar *counterName, GLuint counterDescLength, GLchar *counterDesc, GLuint *counterOffset, GLuint *counterDataSize, GLuint *counterTypeEnum, GLuint *counterDataTypeEnum, GLuint64 *rawCounterMaxValue); +GLAPI void APIENTRY glGetPerfQueryDataINTEL (GLuint queryHandle, GLuint flags, GLsizei dataSize, void *data, GLuint *bytesWritten); +GLAPI void APIENTRY glGetPerfQueryIdByNameINTEL (GLchar *queryName, GLuint *queryId); +GLAPI void APIENTRY glGetPerfQueryInfoINTEL (GLuint queryId, GLuint queryNameLength, GLchar *queryName, GLuint *dataSize, GLuint *noCounters, GLuint *noInstances, GLuint *capsMask); +#endif +#endif /* GL_INTEL_performance_query */ + +#ifndef GL_MESAX_texture_stack +#define GL_MESAX_texture_stack 1 +#define GL_TEXTURE_1D_STACK_MESAX 0x8759 +#define GL_TEXTURE_2D_STACK_MESAX 0x875A +#define GL_PROXY_TEXTURE_1D_STACK_MESAX 0x875B +#define GL_PROXY_TEXTURE_2D_STACK_MESAX 0x875C +#define GL_TEXTURE_1D_STACK_BINDING_MESAX 0x875D +#define GL_TEXTURE_2D_STACK_BINDING_MESAX 0x875E +#endif /* GL_MESAX_texture_stack */ + +#ifndef GL_MESA_pack_invert +#define GL_MESA_pack_invert 1 +#define GL_PACK_INVERT_MESA 0x8758 +#endif /* GL_MESA_pack_invert */ + +#ifndef GL_MESA_program_binary_formats +#define GL_MESA_program_binary_formats 1 +#define GL_PROGRAM_BINARY_FORMAT_MESA 0x875F +#endif /* GL_MESA_program_binary_formats */ + +#ifndef GL_MESA_resize_buffers +#define GL_MESA_resize_buffers 1 +typedef void (APIENTRYP PFNGLRESIZEBUFFERSMESAPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glResizeBuffersMESA (void); +#endif +#endif /* GL_MESA_resize_buffers */ + +#ifndef GL_MESA_shader_integer_functions +#define GL_MESA_shader_integer_functions 1 +#endif /* GL_MESA_shader_integer_functions */ + +#ifndef GL_MESA_tile_raster_order +#define GL_MESA_tile_raster_order 1 +#define GL_TILE_RASTER_ORDER_FIXED_MESA 0x8BB8 +#define GL_TILE_RASTER_ORDER_INCREASING_X_MESA 0x8BB9 +#define GL_TILE_RASTER_ORDER_INCREASING_Y_MESA 0x8BBA +#endif /* GL_MESA_tile_raster_order */ + +#ifndef GL_MESA_window_pos +#define GL_MESA_window_pos 1 +typedef void (APIENTRYP PFNGLWINDOWPOS2DMESAPROC) (GLdouble x, GLdouble y); +typedef void (APIENTRYP PFNGLWINDOWPOS2DVMESAPROC) (const GLdouble *v); +typedef void (APIENTRYP PFNGLWINDOWPOS2FMESAPROC) (GLfloat x, GLfloat y); +typedef void (APIENTRYP PFNGLWINDOWPOS2FVMESAPROC) (const GLfloat *v); +typedef void (APIENTRYP PFNGLWINDOWPOS2IMESAPROC) (GLint x, GLint y); +typedef void (APIENTRYP PFNGLWINDOWPOS2IVMESAPROC) (const GLint *v); +typedef void (APIENTRYP PFNGLWINDOWPOS2SMESAPROC) (GLshort x, GLshort y); +typedef void (APIENTRYP PFNGLWINDOWPOS2SVMESAPROC) (const GLshort *v); +typedef void (APIENTRYP PFNGLWINDOWPOS3DMESAPROC) (GLdouble x, GLdouble y, GLdouble z); +typedef void (APIENTRYP PFNGLWINDOWPOS3DVMESAPROC) (const GLdouble *v); +typedef void (APIENTRYP PFNGLWINDOWPOS3FMESAPROC) (GLfloat x, GLfloat y, GLfloat z); +typedef void (APIENTRYP PFNGLWINDOWPOS3FVMESAPROC) (const GLfloat *v); +typedef void (APIENTRYP PFNGLWINDOWPOS3IMESAPROC) (GLint x, GLint y, GLint z); +typedef void (APIENTRYP PFNGLWINDOWPOS3IVMESAPROC) (const GLint *v); +typedef void (APIENTRYP PFNGLWINDOWPOS3SMESAPROC) (GLshort x, GLshort y, GLshort z); +typedef void (APIENTRYP PFNGLWINDOWPOS3SVMESAPROC) (const GLshort *v); +typedef void (APIENTRYP PFNGLWINDOWPOS4DMESAPROC) (GLdouble x, GLdouble y, GLdouble z, GLdouble w); +typedef void (APIENTRYP PFNGLWINDOWPOS4DVMESAPROC) (const GLdouble *v); +typedef void (APIENTRYP PFNGLWINDOWPOS4FMESAPROC) (GLfloat x, GLfloat y, GLfloat z, GLfloat w); +typedef void (APIENTRYP PFNGLWINDOWPOS4FVMESAPROC) (const GLfloat *v); +typedef void (APIENTRYP PFNGLWINDOWPOS4IMESAPROC) (GLint x, GLint y, GLint z, GLint w); +typedef void (APIENTRYP PFNGLWINDOWPOS4IVMESAPROC) (const GLint *v); +typedef void (APIENTRYP PFNGLWINDOWPOS4SMESAPROC) (GLshort x, GLshort y, GLshort z, GLshort w); +typedef void (APIENTRYP PFNGLWINDOWPOS4SVMESAPROC) (const GLshort *v); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glWindowPos2dMESA (GLdouble x, GLdouble y); +GLAPI void APIENTRY glWindowPos2dvMESA (const GLdouble *v); +GLAPI void APIENTRY glWindowPos2fMESA (GLfloat x, GLfloat y); +GLAPI void APIENTRY glWindowPos2fvMESA (const GLfloat *v); +GLAPI void APIENTRY glWindowPos2iMESA (GLint x, GLint y); +GLAPI void APIENTRY glWindowPos2ivMESA (const GLint *v); +GLAPI void APIENTRY glWindowPos2sMESA (GLshort x, GLshort y); +GLAPI void APIENTRY glWindowPos2svMESA (const GLshort *v); +GLAPI void APIENTRY glWindowPos3dMESA (GLdouble x, GLdouble y, GLdouble z); +GLAPI void APIENTRY glWindowPos3dvMESA (const GLdouble *v); +GLAPI void APIENTRY glWindowPos3fMESA (GLfloat x, GLfloat y, GLfloat z); +GLAPI void APIENTRY glWindowPos3fvMESA (const GLfloat *v); +GLAPI void APIENTRY glWindowPos3iMESA (GLint x, GLint y, GLint z); +GLAPI void APIENTRY glWindowPos3ivMESA (const GLint *v); +GLAPI void APIENTRY glWindowPos3sMESA (GLshort x, GLshort y, GLshort z); +GLAPI void APIENTRY glWindowPos3svMESA (const GLshort *v); +GLAPI void APIENTRY glWindowPos4dMESA (GLdouble x, GLdouble y, GLdouble z, GLdouble w); +GLAPI void APIENTRY glWindowPos4dvMESA (const GLdouble *v); +GLAPI void APIENTRY glWindowPos4fMESA (GLfloat x, GLfloat y, GLfloat z, GLfloat w); +GLAPI void APIENTRY glWindowPos4fvMESA (const GLfloat *v); +GLAPI void APIENTRY glWindowPos4iMESA (GLint x, GLint y, GLint z, GLint w); +GLAPI void APIENTRY glWindowPos4ivMESA (const GLint *v); +GLAPI void APIENTRY glWindowPos4sMESA (GLshort x, GLshort y, GLshort z, GLshort w); +GLAPI void APIENTRY glWindowPos4svMESA (const GLshort *v); +#endif +#endif /* GL_MESA_window_pos */ + +#ifndef GL_MESA_ycbcr_texture +#define GL_MESA_ycbcr_texture 1 +#define GL_UNSIGNED_SHORT_8_8_MESA 0x85BA +#define GL_UNSIGNED_SHORT_8_8_REV_MESA 0x85BB +#define GL_YCBCR_MESA 0x8757 +#endif /* GL_MESA_ycbcr_texture */ + +#ifndef GL_NVX_blend_equation_advanced_multi_draw_buffers +#define GL_NVX_blend_equation_advanced_multi_draw_buffers 1 +#endif /* GL_NVX_blend_equation_advanced_multi_draw_buffers */ + +#ifndef GL_NVX_conditional_render +#define GL_NVX_conditional_render 1 +typedef void (APIENTRYP PFNGLBEGINCONDITIONALRENDERNVXPROC) (GLuint id); +typedef void (APIENTRYP PFNGLENDCONDITIONALRENDERNVXPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBeginConditionalRenderNVX (GLuint id); +GLAPI void APIENTRY glEndConditionalRenderNVX (void); +#endif +#endif /* GL_NVX_conditional_render */ + +#ifndef GL_NVX_gpu_memory_info +#define GL_NVX_gpu_memory_info 1 +#define GL_GPU_MEMORY_INFO_DEDICATED_VIDMEM_NVX 0x9047 +#define GL_GPU_MEMORY_INFO_TOTAL_AVAILABLE_MEMORY_NVX 0x9048 +#define GL_GPU_MEMORY_INFO_CURRENT_AVAILABLE_VIDMEM_NVX 0x9049 +#define GL_GPU_MEMORY_INFO_EVICTION_COUNT_NVX 0x904A +#define GL_GPU_MEMORY_INFO_EVICTED_MEMORY_NVX 0x904B +#endif /* GL_NVX_gpu_memory_info */ + +#ifndef GL_NVX_linked_gpu_multicast +#define GL_NVX_linked_gpu_multicast 1 +#define GL_LGPU_SEPARATE_STORAGE_BIT_NVX 0x0800 +#define GL_MAX_LGPU_GPUS_NVX 0x92BA +typedef void (APIENTRYP PFNGLLGPUNAMEDBUFFERSUBDATANVXPROC) (GLbitfield gpuMask, GLuint buffer, GLintptr offset, GLsizeiptr size, const void *data); +typedef void (APIENTRYP PFNGLLGPUCOPYIMAGESUBDATANVXPROC) (GLuint sourceGpu, GLbitfield destinationGpuMask, GLuint srcName, GLenum srcTarget, GLint srcLevel, GLint srcX, GLint srxY, GLint srcZ, GLuint dstName, GLenum dstTarget, GLint dstLevel, GLint dstX, GLint dstY, GLint dstZ, GLsizei width, GLsizei height, GLsizei depth); +typedef void (APIENTRYP PFNGLLGPUINTERLOCKNVXPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glLGPUNamedBufferSubDataNVX (GLbitfield gpuMask, GLuint buffer, GLintptr offset, GLsizeiptr size, const void *data); +GLAPI void APIENTRY glLGPUCopyImageSubDataNVX (GLuint sourceGpu, GLbitfield destinationGpuMask, GLuint srcName, GLenum srcTarget, GLint srcLevel, GLint srcX, GLint srxY, GLint srcZ, GLuint dstName, GLenum dstTarget, GLint dstLevel, GLint dstX, GLint dstY, GLint dstZ, GLsizei width, GLsizei height, GLsizei depth); +GLAPI void APIENTRY glLGPUInterlockNVX (void); +#endif +#endif /* GL_NVX_linked_gpu_multicast */ + +#ifndef GL_NV_alpha_to_coverage_dither_control +#define GL_NV_alpha_to_coverage_dither_control 1 +#define GL_ALPHA_TO_COVERAGE_DITHER_DEFAULT_NV 0x934D +#define GL_ALPHA_TO_COVERAGE_DITHER_ENABLE_NV 0x934E +#define GL_ALPHA_TO_COVERAGE_DITHER_DISABLE_NV 0x934F +#define GL_ALPHA_TO_COVERAGE_DITHER_MODE_NV 0x92BF +typedef void (APIENTRYP PFNGLALPHATOCOVERAGEDITHERCONTROLNVPROC) (GLenum mode); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glAlphaToCoverageDitherControlNV (GLenum mode); +#endif +#endif /* GL_NV_alpha_to_coverage_dither_control */ + +#ifndef GL_NV_bindless_multi_draw_indirect +#define GL_NV_bindless_multi_draw_indirect 1 +typedef void (APIENTRYP PFNGLMULTIDRAWARRAYSINDIRECTBINDLESSNVPROC) (GLenum mode, const void *indirect, GLsizei drawCount, GLsizei stride, GLint vertexBufferCount); +typedef void (APIENTRYP PFNGLMULTIDRAWELEMENTSINDIRECTBINDLESSNVPROC) (GLenum mode, GLenum type, const void *indirect, GLsizei drawCount, GLsizei stride, GLint vertexBufferCount); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glMultiDrawArraysIndirectBindlessNV (GLenum mode, const void *indirect, GLsizei drawCount, GLsizei stride, GLint vertexBufferCount); +GLAPI void APIENTRY glMultiDrawElementsIndirectBindlessNV (GLenum mode, GLenum type, const void *indirect, GLsizei drawCount, GLsizei stride, GLint vertexBufferCount); +#endif +#endif /* GL_NV_bindless_multi_draw_indirect */ + +#ifndef GL_NV_bindless_multi_draw_indirect_count +#define GL_NV_bindless_multi_draw_indirect_count 1 +typedef void (APIENTRYP PFNGLMULTIDRAWARRAYSINDIRECTBINDLESSCOUNTNVPROC) (GLenum mode, const void *indirect, GLsizei drawCount, GLsizei maxDrawCount, GLsizei stride, GLint vertexBufferCount); +typedef void (APIENTRYP PFNGLMULTIDRAWELEMENTSINDIRECTBINDLESSCOUNTNVPROC) (GLenum mode, GLenum type, const void *indirect, GLsizei drawCount, GLsizei maxDrawCount, GLsizei stride, GLint vertexBufferCount); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glMultiDrawArraysIndirectBindlessCountNV (GLenum mode, const void *indirect, GLsizei drawCount, GLsizei maxDrawCount, GLsizei stride, GLint vertexBufferCount); +GLAPI void APIENTRY glMultiDrawElementsIndirectBindlessCountNV (GLenum mode, GLenum type, const void *indirect, GLsizei drawCount, GLsizei maxDrawCount, GLsizei stride, GLint vertexBufferCount); +#endif +#endif /* GL_NV_bindless_multi_draw_indirect_count */ + +#ifndef GL_NV_bindless_texture +#define GL_NV_bindless_texture 1 +typedef GLuint64 (APIENTRYP PFNGLGETTEXTUREHANDLENVPROC) (GLuint texture); +typedef GLuint64 (APIENTRYP PFNGLGETTEXTURESAMPLERHANDLENVPROC) (GLuint texture, GLuint sampler); +typedef void (APIENTRYP PFNGLMAKETEXTUREHANDLERESIDENTNVPROC) (GLuint64 handle); +typedef void (APIENTRYP PFNGLMAKETEXTUREHANDLENONRESIDENTNVPROC) (GLuint64 handle); +typedef GLuint64 (APIENTRYP PFNGLGETIMAGEHANDLENVPROC) (GLuint texture, GLint level, GLboolean layered, GLint layer, GLenum format); +typedef void (APIENTRYP PFNGLMAKEIMAGEHANDLERESIDENTNVPROC) (GLuint64 handle, GLenum access); +typedef void (APIENTRYP PFNGLMAKEIMAGEHANDLENONRESIDENTNVPROC) (GLuint64 handle); +typedef void (APIENTRYP PFNGLUNIFORMHANDLEUI64NVPROC) (GLint location, GLuint64 value); +typedef void (APIENTRYP PFNGLUNIFORMHANDLEUI64VNVPROC) (GLint location, GLsizei count, const GLuint64 *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMHANDLEUI64NVPROC) (GLuint program, GLint location, GLuint64 value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMHANDLEUI64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLuint64 *values); +typedef GLboolean (APIENTRYP PFNGLISTEXTUREHANDLERESIDENTNVPROC) (GLuint64 handle); +typedef GLboolean (APIENTRYP PFNGLISIMAGEHANDLERESIDENTNVPROC) (GLuint64 handle); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI GLuint64 APIENTRY glGetTextureHandleNV (GLuint texture); +GLAPI GLuint64 APIENTRY glGetTextureSamplerHandleNV (GLuint texture, GLuint sampler); +GLAPI void APIENTRY glMakeTextureHandleResidentNV (GLuint64 handle); +GLAPI void APIENTRY glMakeTextureHandleNonResidentNV (GLuint64 handle); +GLAPI GLuint64 APIENTRY glGetImageHandleNV (GLuint texture, GLint level, GLboolean layered, GLint layer, GLenum format); +GLAPI void APIENTRY glMakeImageHandleResidentNV (GLuint64 handle, GLenum access); +GLAPI void APIENTRY glMakeImageHandleNonResidentNV (GLuint64 handle); +GLAPI void APIENTRY glUniformHandleui64NV (GLint location, GLuint64 value); +GLAPI void APIENTRY glUniformHandleui64vNV (GLint location, GLsizei count, const GLuint64 *value); +GLAPI void APIENTRY glProgramUniformHandleui64NV (GLuint program, GLint location, GLuint64 value); +GLAPI void APIENTRY glProgramUniformHandleui64vNV (GLuint program, GLint location, GLsizei count, const GLuint64 *values); +GLAPI GLboolean APIENTRY glIsTextureHandleResidentNV (GLuint64 handle); +GLAPI GLboolean APIENTRY glIsImageHandleResidentNV (GLuint64 handle); +#endif +#endif /* GL_NV_bindless_texture */ + +#ifndef GL_NV_blend_equation_advanced +#define GL_NV_blend_equation_advanced 1 +#define GL_BLEND_OVERLAP_NV 0x9281 +#define GL_BLEND_PREMULTIPLIED_SRC_NV 0x9280 +#define GL_BLUE_NV 0x1905 +#define GL_COLORBURN_NV 0x929A +#define GL_COLORDODGE_NV 0x9299 +#define GL_CONJOINT_NV 0x9284 +#define GL_CONTRAST_NV 0x92A1 +#define GL_DARKEN_NV 0x9297 +#define GL_DIFFERENCE_NV 0x929E +#define GL_DISJOINT_NV 0x9283 +#define GL_DST_ATOP_NV 0x928F +#define GL_DST_IN_NV 0x928B +#define GL_DST_NV 0x9287 +#define GL_DST_OUT_NV 0x928D +#define GL_DST_OVER_NV 0x9289 +#define GL_EXCLUSION_NV 0x92A0 +#define GL_GREEN_NV 0x1904 +#define GL_HARDLIGHT_NV 0x929B +#define GL_HARDMIX_NV 0x92A9 +#define GL_HSL_COLOR_NV 0x92AF +#define GL_HSL_HUE_NV 0x92AD +#define GL_HSL_LUMINOSITY_NV 0x92B0 +#define GL_HSL_SATURATION_NV 0x92AE +#define GL_INVERT_OVG_NV 0x92B4 +#define GL_INVERT_RGB_NV 0x92A3 +#define GL_LIGHTEN_NV 0x9298 +#define GL_LINEARBURN_NV 0x92A5 +#define GL_LINEARDODGE_NV 0x92A4 +#define GL_LINEARLIGHT_NV 0x92A7 +#define GL_MINUS_CLAMPED_NV 0x92B3 +#define GL_MINUS_NV 0x929F +#define GL_MULTIPLY_NV 0x9294 +#define GL_OVERLAY_NV 0x9296 +#define GL_PINLIGHT_NV 0x92A8 +#define GL_PLUS_CLAMPED_ALPHA_NV 0x92B2 +#define GL_PLUS_CLAMPED_NV 0x92B1 +#define GL_PLUS_DARKER_NV 0x9292 +#define GL_PLUS_NV 0x9291 +#define GL_RED_NV 0x1903 +#define GL_SCREEN_NV 0x9295 +#define GL_SOFTLIGHT_NV 0x929C +#define GL_SRC_ATOP_NV 0x928E +#define GL_SRC_IN_NV 0x928A +#define GL_SRC_NV 0x9286 +#define GL_SRC_OUT_NV 0x928C +#define GL_SRC_OVER_NV 0x9288 +#define GL_UNCORRELATED_NV 0x9282 +#define GL_VIVIDLIGHT_NV 0x92A6 +#define GL_XOR_NV 0x1506 +typedef void (APIENTRYP PFNGLBLENDPARAMETERINVPROC) (GLenum pname, GLint value); +typedef void (APIENTRYP PFNGLBLENDBARRIERNVPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBlendParameteriNV (GLenum pname, GLint value); +GLAPI void APIENTRY glBlendBarrierNV (void); +#endif +#endif /* GL_NV_blend_equation_advanced */ + +#ifndef GL_NV_blend_equation_advanced_coherent +#define GL_NV_blend_equation_advanced_coherent 1 +#define GL_BLEND_ADVANCED_COHERENT_NV 0x9285 +#endif /* GL_NV_blend_equation_advanced_coherent */ + +#ifndef GL_NV_blend_minmax_factor +#define GL_NV_blend_minmax_factor 1 +#endif /* GL_NV_blend_minmax_factor */ + +#ifndef GL_NV_blend_square +#define GL_NV_blend_square 1 +#endif /* GL_NV_blend_square */ + +#ifndef GL_NV_clip_space_w_scaling +#define GL_NV_clip_space_w_scaling 1 +#define GL_VIEWPORT_POSITION_W_SCALE_NV 0x937C +#define GL_VIEWPORT_POSITION_W_SCALE_X_COEFF_NV 0x937D +#define GL_VIEWPORT_POSITION_W_SCALE_Y_COEFF_NV 0x937E +typedef void (APIENTRYP PFNGLVIEWPORTPOSITIONWSCALENVPROC) (GLuint index, GLfloat xcoeff, GLfloat ycoeff); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glViewportPositionWScaleNV (GLuint index, GLfloat xcoeff, GLfloat ycoeff); +#endif +#endif /* GL_NV_clip_space_w_scaling */ + +#ifndef GL_NV_command_list +#define GL_NV_command_list 1 +#define GL_TERMINATE_SEQUENCE_COMMAND_NV 0x0000 +#define GL_NOP_COMMAND_NV 0x0001 +#define GL_DRAW_ELEMENTS_COMMAND_NV 0x0002 +#define GL_DRAW_ARRAYS_COMMAND_NV 0x0003 +#define GL_DRAW_ELEMENTS_STRIP_COMMAND_NV 0x0004 +#define GL_DRAW_ARRAYS_STRIP_COMMAND_NV 0x0005 +#define GL_DRAW_ELEMENTS_INSTANCED_COMMAND_NV 0x0006 +#define GL_DRAW_ARRAYS_INSTANCED_COMMAND_NV 0x0007 +#define GL_ELEMENT_ADDRESS_COMMAND_NV 0x0008 +#define GL_ATTRIBUTE_ADDRESS_COMMAND_NV 0x0009 +#define GL_UNIFORM_ADDRESS_COMMAND_NV 0x000A +#define GL_BLEND_COLOR_COMMAND_NV 0x000B +#define GL_STENCIL_REF_COMMAND_NV 0x000C +#define GL_LINE_WIDTH_COMMAND_NV 0x000D +#define GL_POLYGON_OFFSET_COMMAND_NV 0x000E +#define GL_ALPHA_REF_COMMAND_NV 0x000F +#define GL_VIEWPORT_COMMAND_NV 0x0010 +#define GL_SCISSOR_COMMAND_NV 0x0011 +#define GL_FRONT_FACE_COMMAND_NV 0x0012 +typedef void (APIENTRYP PFNGLCREATESTATESNVPROC) (GLsizei n, GLuint *states); +typedef void (APIENTRYP PFNGLDELETESTATESNVPROC) (GLsizei n, const GLuint *states); +typedef GLboolean (APIENTRYP PFNGLISSTATENVPROC) (GLuint state); +typedef void (APIENTRYP PFNGLSTATECAPTURENVPROC) (GLuint state, GLenum mode); +typedef GLuint (APIENTRYP PFNGLGETCOMMANDHEADERNVPROC) (GLenum tokenID, GLuint size); +typedef GLushort (APIENTRYP PFNGLGETSTAGEINDEXNVPROC) (GLenum shadertype); +typedef void (APIENTRYP PFNGLDRAWCOMMANDSNVPROC) (GLenum primitiveMode, GLuint buffer, const GLintptr *indirects, const GLsizei *sizes, GLuint count); +typedef void (APIENTRYP PFNGLDRAWCOMMANDSADDRESSNVPROC) (GLenum primitiveMode, const GLuint64 *indirects, const GLsizei *sizes, GLuint count); +typedef void (APIENTRYP PFNGLDRAWCOMMANDSSTATESNVPROC) (GLuint buffer, const GLintptr *indirects, const GLsizei *sizes, const GLuint *states, const GLuint *fbos, GLuint count); +typedef void (APIENTRYP PFNGLDRAWCOMMANDSSTATESADDRESSNVPROC) (const GLuint64 *indirects, const GLsizei *sizes, const GLuint *states, const GLuint *fbos, GLuint count); +typedef void (APIENTRYP PFNGLCREATECOMMANDLISTSNVPROC) (GLsizei n, GLuint *lists); +typedef void (APIENTRYP PFNGLDELETECOMMANDLISTSNVPROC) (GLsizei n, const GLuint *lists); +typedef GLboolean (APIENTRYP PFNGLISCOMMANDLISTNVPROC) (GLuint list); +typedef void (APIENTRYP PFNGLLISTDRAWCOMMANDSSTATESCLIENTNVPROC) (GLuint list, GLuint segment, const void **indirects, const GLsizei *sizes, const GLuint *states, const GLuint *fbos, GLuint count); +typedef void (APIENTRYP PFNGLCOMMANDLISTSEGMENTSNVPROC) (GLuint list, GLuint segments); +typedef void (APIENTRYP PFNGLCOMPILECOMMANDLISTNVPROC) (GLuint list); +typedef void (APIENTRYP PFNGLCALLCOMMANDLISTNVPROC) (GLuint list); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glCreateStatesNV (GLsizei n, GLuint *states); +GLAPI void APIENTRY glDeleteStatesNV (GLsizei n, const GLuint *states); +GLAPI GLboolean APIENTRY glIsStateNV (GLuint state); +GLAPI void APIENTRY glStateCaptureNV (GLuint state, GLenum mode); +GLAPI GLuint APIENTRY glGetCommandHeaderNV (GLenum tokenID, GLuint size); +GLAPI GLushort APIENTRY glGetStageIndexNV (GLenum shadertype); +GLAPI void APIENTRY glDrawCommandsNV (GLenum primitiveMode, GLuint buffer, const GLintptr *indirects, const GLsizei *sizes, GLuint count); +GLAPI void APIENTRY glDrawCommandsAddressNV (GLenum primitiveMode, const GLuint64 *indirects, const GLsizei *sizes, GLuint count); +GLAPI void APIENTRY glDrawCommandsStatesNV (GLuint buffer, const GLintptr *indirects, const GLsizei *sizes, const GLuint *states, const GLuint *fbos, GLuint count); +GLAPI void APIENTRY glDrawCommandsStatesAddressNV (const GLuint64 *indirects, const GLsizei *sizes, const GLuint *states, const GLuint *fbos, GLuint count); +GLAPI void APIENTRY glCreateCommandListsNV (GLsizei n, GLuint *lists); +GLAPI void APIENTRY glDeleteCommandListsNV (GLsizei n, const GLuint *lists); +GLAPI GLboolean APIENTRY glIsCommandListNV (GLuint list); +GLAPI void APIENTRY glListDrawCommandsStatesClientNV (GLuint list, GLuint segment, const void **indirects, const GLsizei *sizes, const GLuint *states, const GLuint *fbos, GLuint count); +GLAPI void APIENTRY glCommandListSegmentsNV (GLuint list, GLuint segments); +GLAPI void APIENTRY glCompileCommandListNV (GLuint list); +GLAPI void APIENTRY glCallCommandListNV (GLuint list); +#endif +#endif /* GL_NV_command_list */ + +#ifndef GL_NV_compute_program5 +#define GL_NV_compute_program5 1 +#define GL_COMPUTE_PROGRAM_NV 0x90FB +#define GL_COMPUTE_PROGRAM_PARAMETER_BUFFER_NV 0x90FC +#endif /* GL_NV_compute_program5 */ + +#ifndef GL_NV_conditional_render +#define GL_NV_conditional_render 1 +#define GL_QUERY_WAIT_NV 0x8E13 +#define GL_QUERY_NO_WAIT_NV 0x8E14 +#define GL_QUERY_BY_REGION_WAIT_NV 0x8E15 +#define GL_QUERY_BY_REGION_NO_WAIT_NV 0x8E16 +typedef void (APIENTRYP PFNGLBEGINCONDITIONALRENDERNVPROC) (GLuint id, GLenum mode); +typedef void (APIENTRYP PFNGLENDCONDITIONALRENDERNVPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBeginConditionalRenderNV (GLuint id, GLenum mode); +GLAPI void APIENTRY glEndConditionalRenderNV (void); +#endif +#endif /* GL_NV_conditional_render */ + +#ifndef GL_NV_conservative_raster +#define GL_NV_conservative_raster 1 +#define GL_CONSERVATIVE_RASTERIZATION_NV 0x9346 +#define GL_SUBPIXEL_PRECISION_BIAS_X_BITS_NV 0x9347 +#define GL_SUBPIXEL_PRECISION_BIAS_Y_BITS_NV 0x9348 +#define GL_MAX_SUBPIXEL_PRECISION_BIAS_BITS_NV 0x9349 +typedef void (APIENTRYP PFNGLSUBPIXELPRECISIONBIASNVPROC) (GLuint xbits, GLuint ybits); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glSubpixelPrecisionBiasNV (GLuint xbits, GLuint ybits); +#endif +#endif /* GL_NV_conservative_raster */ + +#ifndef GL_NV_conservative_raster_dilate +#define GL_NV_conservative_raster_dilate 1 +#define GL_CONSERVATIVE_RASTER_DILATE_NV 0x9379 +#define GL_CONSERVATIVE_RASTER_DILATE_RANGE_NV 0x937A +#define GL_CONSERVATIVE_RASTER_DILATE_GRANULARITY_NV 0x937B +typedef void (APIENTRYP PFNGLCONSERVATIVERASTERPARAMETERFNVPROC) (GLenum pname, GLfloat value); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glConservativeRasterParameterfNV (GLenum pname, GLfloat value); +#endif +#endif /* GL_NV_conservative_raster_dilate */ + +#ifndef GL_NV_conservative_raster_pre_snap +#define GL_NV_conservative_raster_pre_snap 1 +#define GL_CONSERVATIVE_RASTER_MODE_PRE_SNAP_NV 0x9550 +#endif /* GL_NV_conservative_raster_pre_snap */ + +#ifndef GL_NV_conservative_raster_pre_snap_triangles +#define GL_NV_conservative_raster_pre_snap_triangles 1 +#define GL_CONSERVATIVE_RASTER_MODE_NV 0x954D +#define GL_CONSERVATIVE_RASTER_MODE_POST_SNAP_NV 0x954E +#define GL_CONSERVATIVE_RASTER_MODE_PRE_SNAP_TRIANGLES_NV 0x954F +typedef void (APIENTRYP PFNGLCONSERVATIVERASTERPARAMETERINVPROC) (GLenum pname, GLint param); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glConservativeRasterParameteriNV (GLenum pname, GLint param); +#endif +#endif /* GL_NV_conservative_raster_pre_snap_triangles */ + +#ifndef GL_NV_conservative_raster_underestimation +#define GL_NV_conservative_raster_underestimation 1 +#endif /* GL_NV_conservative_raster_underestimation */ + +#ifndef GL_NV_copy_depth_to_color +#define GL_NV_copy_depth_to_color 1 +#define GL_DEPTH_STENCIL_TO_RGBA_NV 0x886E +#define GL_DEPTH_STENCIL_TO_BGRA_NV 0x886F +#endif /* GL_NV_copy_depth_to_color */ + +#ifndef GL_NV_copy_image +#define GL_NV_copy_image 1 +typedef void (APIENTRYP PFNGLCOPYIMAGESUBDATANVPROC) (GLuint srcName, GLenum srcTarget, GLint srcLevel, GLint srcX, GLint srcY, GLint srcZ, GLuint dstName, GLenum dstTarget, GLint dstLevel, GLint dstX, GLint dstY, GLint dstZ, GLsizei width, GLsizei height, GLsizei depth); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glCopyImageSubDataNV (GLuint srcName, GLenum srcTarget, GLint srcLevel, GLint srcX, GLint srcY, GLint srcZ, GLuint dstName, GLenum dstTarget, GLint dstLevel, GLint dstX, GLint dstY, GLint dstZ, GLsizei width, GLsizei height, GLsizei depth); +#endif +#endif /* GL_NV_copy_image */ + +#ifndef GL_NV_deep_texture3D +#define GL_NV_deep_texture3D 1 +#define GL_MAX_DEEP_3D_TEXTURE_WIDTH_HEIGHT_NV 0x90D0 +#define GL_MAX_DEEP_3D_TEXTURE_DEPTH_NV 0x90D1 +#endif /* GL_NV_deep_texture3D */ + +#ifndef GL_NV_depth_buffer_float +#define GL_NV_depth_buffer_float 1 +#define GL_DEPTH_COMPONENT32F_NV 0x8DAB +#define GL_DEPTH32F_STENCIL8_NV 0x8DAC +#define GL_FLOAT_32_UNSIGNED_INT_24_8_REV_NV 0x8DAD +#define GL_DEPTH_BUFFER_FLOAT_MODE_NV 0x8DAF +typedef void (APIENTRYP PFNGLDEPTHRANGEDNVPROC) (GLdouble zNear, GLdouble zFar); +typedef void (APIENTRYP PFNGLCLEARDEPTHDNVPROC) (GLdouble depth); +typedef void (APIENTRYP PFNGLDEPTHBOUNDSDNVPROC) (GLdouble zmin, GLdouble zmax); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glDepthRangedNV (GLdouble zNear, GLdouble zFar); +GLAPI void APIENTRY glClearDepthdNV (GLdouble depth); +GLAPI void APIENTRY glDepthBoundsdNV (GLdouble zmin, GLdouble zmax); +#endif +#endif /* GL_NV_depth_buffer_float */ + +#ifndef GL_NV_depth_clamp +#define GL_NV_depth_clamp 1 +#define GL_DEPTH_CLAMP_NV 0x864F +#endif /* GL_NV_depth_clamp */ + +#ifndef GL_NV_draw_texture +#define GL_NV_draw_texture 1 +typedef void (APIENTRYP PFNGLDRAWTEXTURENVPROC) (GLuint texture, GLuint sampler, GLfloat x0, GLfloat y0, GLfloat x1, GLfloat y1, GLfloat z, GLfloat s0, GLfloat t0, GLfloat s1, GLfloat t1); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glDrawTextureNV (GLuint texture, GLuint sampler, GLfloat x0, GLfloat y0, GLfloat x1, GLfloat y1, GLfloat z, GLfloat s0, GLfloat t0, GLfloat s1, GLfloat t1); +#endif +#endif /* GL_NV_draw_texture */ + +#ifndef GL_NV_draw_vulkan_image +#define GL_NV_draw_vulkan_image 1 +typedef void (APIENTRY *GLVULKANPROCNV)(void); +typedef void (APIENTRYP PFNGLDRAWVKIMAGENVPROC) (GLuint64 vkImage, GLuint sampler, GLfloat x0, GLfloat y0, GLfloat x1, GLfloat y1, GLfloat z, GLfloat s0, GLfloat t0, GLfloat s1, GLfloat t1); +typedef GLVULKANPROCNV (APIENTRYP PFNGLGETVKPROCADDRNVPROC) (const GLchar *name); +typedef void (APIENTRYP PFNGLWAITVKSEMAPHORENVPROC) (GLuint64 vkSemaphore); +typedef void (APIENTRYP PFNGLSIGNALVKSEMAPHORENVPROC) (GLuint64 vkSemaphore); +typedef void (APIENTRYP PFNGLSIGNALVKFENCENVPROC) (GLuint64 vkFence); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glDrawVkImageNV (GLuint64 vkImage, GLuint sampler, GLfloat x0, GLfloat y0, GLfloat x1, GLfloat y1, GLfloat z, GLfloat s0, GLfloat t0, GLfloat s1, GLfloat t1); +GLAPI GLVULKANPROCNV APIENTRY glGetVkProcAddrNV (const GLchar *name); +GLAPI void APIENTRY glWaitVkSemaphoreNV (GLuint64 vkSemaphore); +GLAPI void APIENTRY glSignalVkSemaphoreNV (GLuint64 vkSemaphore); +GLAPI void APIENTRY glSignalVkFenceNV (GLuint64 vkFence); +#endif +#endif /* GL_NV_draw_vulkan_image */ + +#ifndef GL_NV_evaluators +#define GL_NV_evaluators 1 +#define GL_EVAL_2D_NV 0x86C0 +#define GL_EVAL_TRIANGULAR_2D_NV 0x86C1 +#define GL_MAP_TESSELLATION_NV 0x86C2 +#define GL_MAP_ATTRIB_U_ORDER_NV 0x86C3 +#define GL_MAP_ATTRIB_V_ORDER_NV 0x86C4 +#define GL_EVAL_FRACTIONAL_TESSELLATION_NV 0x86C5 +#define GL_EVAL_VERTEX_ATTRIB0_NV 0x86C6 +#define GL_EVAL_VERTEX_ATTRIB1_NV 0x86C7 +#define GL_EVAL_VERTEX_ATTRIB2_NV 0x86C8 +#define GL_EVAL_VERTEX_ATTRIB3_NV 0x86C9 +#define GL_EVAL_VERTEX_ATTRIB4_NV 0x86CA +#define GL_EVAL_VERTEX_ATTRIB5_NV 0x86CB +#define GL_EVAL_VERTEX_ATTRIB6_NV 0x86CC +#define GL_EVAL_VERTEX_ATTRIB7_NV 0x86CD +#define GL_EVAL_VERTEX_ATTRIB8_NV 0x86CE +#define GL_EVAL_VERTEX_ATTRIB9_NV 0x86CF +#define GL_EVAL_VERTEX_ATTRIB10_NV 0x86D0 +#define GL_EVAL_VERTEX_ATTRIB11_NV 0x86D1 +#define GL_EVAL_VERTEX_ATTRIB12_NV 0x86D2 +#define GL_EVAL_VERTEX_ATTRIB13_NV 0x86D3 +#define GL_EVAL_VERTEX_ATTRIB14_NV 0x86D4 +#define GL_EVAL_VERTEX_ATTRIB15_NV 0x86D5 +#define GL_MAX_MAP_TESSELLATION_NV 0x86D6 +#define GL_MAX_RATIONAL_EVAL_ORDER_NV 0x86D7 +typedef void (APIENTRYP PFNGLMAPCONTROLPOINTSNVPROC) (GLenum target, GLuint index, GLenum type, GLsizei ustride, GLsizei vstride, GLint uorder, GLint vorder, GLboolean packed, const void *points); +typedef void (APIENTRYP PFNGLMAPPARAMETERIVNVPROC) (GLenum target, GLenum pname, const GLint *params); +typedef void (APIENTRYP PFNGLMAPPARAMETERFVNVPROC) (GLenum target, GLenum pname, const GLfloat *params); +typedef void (APIENTRYP PFNGLGETMAPCONTROLPOINTSNVPROC) (GLenum target, GLuint index, GLenum type, GLsizei ustride, GLsizei vstride, GLboolean packed, void *points); +typedef void (APIENTRYP PFNGLGETMAPPARAMETERIVNVPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETMAPPARAMETERFVNVPROC) (GLenum target, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETMAPATTRIBPARAMETERIVNVPROC) (GLenum target, GLuint index, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETMAPATTRIBPARAMETERFVNVPROC) (GLenum target, GLuint index, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLEVALMAPSNVPROC) (GLenum target, GLenum mode); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glMapControlPointsNV (GLenum target, GLuint index, GLenum type, GLsizei ustride, GLsizei vstride, GLint uorder, GLint vorder, GLboolean packed, const void *points); +GLAPI void APIENTRY glMapParameterivNV (GLenum target, GLenum pname, const GLint *params); +GLAPI void APIENTRY glMapParameterfvNV (GLenum target, GLenum pname, const GLfloat *params); +GLAPI void APIENTRY glGetMapControlPointsNV (GLenum target, GLuint index, GLenum type, GLsizei ustride, GLsizei vstride, GLboolean packed, void *points); +GLAPI void APIENTRY glGetMapParameterivNV (GLenum target, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetMapParameterfvNV (GLenum target, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetMapAttribParameterivNV (GLenum target, GLuint index, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetMapAttribParameterfvNV (GLenum target, GLuint index, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glEvalMapsNV (GLenum target, GLenum mode); +#endif +#endif /* GL_NV_evaluators */ + +#ifndef GL_NV_explicit_multisample +#define GL_NV_explicit_multisample 1 +#define GL_SAMPLE_POSITION_NV 0x8E50 +#define GL_SAMPLE_MASK_NV 0x8E51 +#define GL_SAMPLE_MASK_VALUE_NV 0x8E52 +#define GL_TEXTURE_BINDING_RENDERBUFFER_NV 0x8E53 +#define GL_TEXTURE_RENDERBUFFER_DATA_STORE_BINDING_NV 0x8E54 +#define GL_TEXTURE_RENDERBUFFER_NV 0x8E55 +#define GL_SAMPLER_RENDERBUFFER_NV 0x8E56 +#define GL_INT_SAMPLER_RENDERBUFFER_NV 0x8E57 +#define GL_UNSIGNED_INT_SAMPLER_RENDERBUFFER_NV 0x8E58 +#define GL_MAX_SAMPLE_MASK_WORDS_NV 0x8E59 +typedef void (APIENTRYP PFNGLGETMULTISAMPLEFVNVPROC) (GLenum pname, GLuint index, GLfloat *val); +typedef void (APIENTRYP PFNGLSAMPLEMASKINDEXEDNVPROC) (GLuint index, GLbitfield mask); +typedef void (APIENTRYP PFNGLTEXRENDERBUFFERNVPROC) (GLenum target, GLuint renderbuffer); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glGetMultisamplefvNV (GLenum pname, GLuint index, GLfloat *val); +GLAPI void APIENTRY glSampleMaskIndexedNV (GLuint index, GLbitfield mask); +GLAPI void APIENTRY glTexRenderbufferNV (GLenum target, GLuint renderbuffer); +#endif +#endif /* GL_NV_explicit_multisample */ + +#ifndef GL_NV_fence +#define GL_NV_fence 1 +#define GL_ALL_COMPLETED_NV 0x84F2 +#define GL_FENCE_STATUS_NV 0x84F3 +#define GL_FENCE_CONDITION_NV 0x84F4 +typedef void (APIENTRYP PFNGLDELETEFENCESNVPROC) (GLsizei n, const GLuint *fences); +typedef void (APIENTRYP PFNGLGENFENCESNVPROC) (GLsizei n, GLuint *fences); +typedef GLboolean (APIENTRYP PFNGLISFENCENVPROC) (GLuint fence); +typedef GLboolean (APIENTRYP PFNGLTESTFENCENVPROC) (GLuint fence); +typedef void (APIENTRYP PFNGLGETFENCEIVNVPROC) (GLuint fence, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLFINISHFENCENVPROC) (GLuint fence); +typedef void (APIENTRYP PFNGLSETFENCENVPROC) (GLuint fence, GLenum condition); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glDeleteFencesNV (GLsizei n, const GLuint *fences); +GLAPI void APIENTRY glGenFencesNV (GLsizei n, GLuint *fences); +GLAPI GLboolean APIENTRY glIsFenceNV (GLuint fence); +GLAPI GLboolean APIENTRY glTestFenceNV (GLuint fence); +GLAPI void APIENTRY glGetFenceivNV (GLuint fence, GLenum pname, GLint *params); +GLAPI void APIENTRY glFinishFenceNV (GLuint fence); +GLAPI void APIENTRY glSetFenceNV (GLuint fence, GLenum condition); +#endif +#endif /* GL_NV_fence */ + +#ifndef GL_NV_fill_rectangle +#define GL_NV_fill_rectangle 1 +#define GL_FILL_RECTANGLE_NV 0x933C +#endif /* GL_NV_fill_rectangle */ + +#ifndef GL_NV_float_buffer +#define GL_NV_float_buffer 1 +#define GL_FLOAT_R_NV 0x8880 +#define GL_FLOAT_RG_NV 0x8881 +#define GL_FLOAT_RGB_NV 0x8882 +#define GL_FLOAT_RGBA_NV 0x8883 +#define GL_FLOAT_R16_NV 0x8884 +#define GL_FLOAT_R32_NV 0x8885 +#define GL_FLOAT_RG16_NV 0x8886 +#define GL_FLOAT_RG32_NV 0x8887 +#define GL_FLOAT_RGB16_NV 0x8888 +#define GL_FLOAT_RGB32_NV 0x8889 +#define GL_FLOAT_RGBA16_NV 0x888A +#define GL_FLOAT_RGBA32_NV 0x888B +#define GL_TEXTURE_FLOAT_COMPONENTS_NV 0x888C +#define GL_FLOAT_CLEAR_COLOR_VALUE_NV 0x888D +#define GL_FLOAT_RGBA_MODE_NV 0x888E +#endif /* GL_NV_float_buffer */ + +#ifndef GL_NV_fog_distance +#define GL_NV_fog_distance 1 +#define GL_FOG_DISTANCE_MODE_NV 0x855A +#define GL_EYE_RADIAL_NV 0x855B +#define GL_EYE_PLANE_ABSOLUTE_NV 0x855C +#endif /* GL_NV_fog_distance */ + +#ifndef GL_NV_fragment_coverage_to_color +#define GL_NV_fragment_coverage_to_color 1 +#define GL_FRAGMENT_COVERAGE_TO_COLOR_NV 0x92DD +#define GL_FRAGMENT_COVERAGE_COLOR_NV 0x92DE +typedef void (APIENTRYP PFNGLFRAGMENTCOVERAGECOLORNVPROC) (GLuint color); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glFragmentCoverageColorNV (GLuint color); +#endif +#endif /* GL_NV_fragment_coverage_to_color */ + +#ifndef GL_NV_fragment_program +#define GL_NV_fragment_program 1 +#define GL_MAX_FRAGMENT_PROGRAM_LOCAL_PARAMETERS_NV 0x8868 +#define GL_FRAGMENT_PROGRAM_NV 0x8870 +#define GL_MAX_TEXTURE_COORDS_NV 0x8871 +#define GL_MAX_TEXTURE_IMAGE_UNITS_NV 0x8872 +#define GL_FRAGMENT_PROGRAM_BINDING_NV 0x8873 +#define GL_PROGRAM_ERROR_STRING_NV 0x8874 +typedef void (APIENTRYP PFNGLPROGRAMNAMEDPARAMETER4FNVPROC) (GLuint id, GLsizei len, const GLubyte *name, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +typedef void (APIENTRYP PFNGLPROGRAMNAMEDPARAMETER4FVNVPROC) (GLuint id, GLsizei len, const GLubyte *name, const GLfloat *v); +typedef void (APIENTRYP PFNGLPROGRAMNAMEDPARAMETER4DNVPROC) (GLuint id, GLsizei len, const GLubyte *name, GLdouble x, GLdouble y, GLdouble z, GLdouble w); +typedef void (APIENTRYP PFNGLPROGRAMNAMEDPARAMETER4DVNVPROC) (GLuint id, GLsizei len, const GLubyte *name, const GLdouble *v); +typedef void (APIENTRYP PFNGLGETPROGRAMNAMEDPARAMETERFVNVPROC) (GLuint id, GLsizei len, const GLubyte *name, GLfloat *params); +typedef void (APIENTRYP PFNGLGETPROGRAMNAMEDPARAMETERDVNVPROC) (GLuint id, GLsizei len, const GLubyte *name, GLdouble *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glProgramNamedParameter4fNV (GLuint id, GLsizei len, const GLubyte *name, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +GLAPI void APIENTRY glProgramNamedParameter4fvNV (GLuint id, GLsizei len, const GLubyte *name, const GLfloat *v); +GLAPI void APIENTRY glProgramNamedParameter4dNV (GLuint id, GLsizei len, const GLubyte *name, GLdouble x, GLdouble y, GLdouble z, GLdouble w); +GLAPI void APIENTRY glProgramNamedParameter4dvNV (GLuint id, GLsizei len, const GLubyte *name, const GLdouble *v); +GLAPI void APIENTRY glGetProgramNamedParameterfvNV (GLuint id, GLsizei len, const GLubyte *name, GLfloat *params); +GLAPI void APIENTRY glGetProgramNamedParameterdvNV (GLuint id, GLsizei len, const GLubyte *name, GLdouble *params); +#endif +#endif /* GL_NV_fragment_program */ + +#ifndef GL_NV_fragment_program2 +#define GL_NV_fragment_program2 1 +#define GL_MAX_PROGRAM_EXEC_INSTRUCTIONS_NV 0x88F4 +#define GL_MAX_PROGRAM_CALL_DEPTH_NV 0x88F5 +#define GL_MAX_PROGRAM_IF_DEPTH_NV 0x88F6 +#define GL_MAX_PROGRAM_LOOP_DEPTH_NV 0x88F7 +#define GL_MAX_PROGRAM_LOOP_COUNT_NV 0x88F8 +#endif /* GL_NV_fragment_program2 */ + +#ifndef GL_NV_fragment_program4 +#define GL_NV_fragment_program4 1 +#endif /* GL_NV_fragment_program4 */ + +#ifndef GL_NV_fragment_program_option +#define GL_NV_fragment_program_option 1 +#endif /* GL_NV_fragment_program_option */ + +#ifndef GL_NV_fragment_shader_interlock +#define GL_NV_fragment_shader_interlock 1 +#endif /* GL_NV_fragment_shader_interlock */ + +#ifndef GL_NV_framebuffer_mixed_samples +#define GL_NV_framebuffer_mixed_samples 1 +#define GL_COVERAGE_MODULATION_TABLE_NV 0x9331 +#define GL_COLOR_SAMPLES_NV 0x8E20 +#define GL_DEPTH_SAMPLES_NV 0x932D +#define GL_STENCIL_SAMPLES_NV 0x932E +#define GL_MIXED_DEPTH_SAMPLES_SUPPORTED_NV 0x932F +#define GL_MIXED_STENCIL_SAMPLES_SUPPORTED_NV 0x9330 +#define GL_COVERAGE_MODULATION_NV 0x9332 +#define GL_COVERAGE_MODULATION_TABLE_SIZE_NV 0x9333 +typedef void (APIENTRYP PFNGLCOVERAGEMODULATIONTABLENVPROC) (GLsizei n, const GLfloat *v); +typedef void (APIENTRYP PFNGLGETCOVERAGEMODULATIONTABLENVPROC) (GLsizei bufsize, GLfloat *v); +typedef void (APIENTRYP PFNGLCOVERAGEMODULATIONNVPROC) (GLenum components); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glCoverageModulationTableNV (GLsizei n, const GLfloat *v); +GLAPI void APIENTRY glGetCoverageModulationTableNV (GLsizei bufsize, GLfloat *v); +GLAPI void APIENTRY glCoverageModulationNV (GLenum components); +#endif +#endif /* GL_NV_framebuffer_mixed_samples */ + +#ifndef GL_NV_framebuffer_multisample_coverage +#define GL_NV_framebuffer_multisample_coverage 1 +#define GL_RENDERBUFFER_COVERAGE_SAMPLES_NV 0x8CAB +#define GL_RENDERBUFFER_COLOR_SAMPLES_NV 0x8E10 +#define GL_MAX_MULTISAMPLE_COVERAGE_MODES_NV 0x8E11 +#define GL_MULTISAMPLE_COVERAGE_MODES_NV 0x8E12 +typedef void (APIENTRYP PFNGLRENDERBUFFERSTORAGEMULTISAMPLECOVERAGENVPROC) (GLenum target, GLsizei coverageSamples, GLsizei colorSamples, GLenum internalformat, GLsizei width, GLsizei height); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glRenderbufferStorageMultisampleCoverageNV (GLenum target, GLsizei coverageSamples, GLsizei colorSamples, GLenum internalformat, GLsizei width, GLsizei height); +#endif +#endif /* GL_NV_framebuffer_multisample_coverage */ + +#ifndef GL_NV_geometry_program4 +#define GL_NV_geometry_program4 1 +#define GL_GEOMETRY_PROGRAM_NV 0x8C26 +#define GL_MAX_PROGRAM_OUTPUT_VERTICES_NV 0x8C27 +#define GL_MAX_PROGRAM_TOTAL_OUTPUT_COMPONENTS_NV 0x8C28 +typedef void (APIENTRYP PFNGLPROGRAMVERTEXLIMITNVPROC) (GLenum target, GLint limit); +typedef void (APIENTRYP PFNGLFRAMEBUFFERTEXTUREEXTPROC) (GLenum target, GLenum attachment, GLuint texture, GLint level); +typedef void (APIENTRYP PFNGLFRAMEBUFFERTEXTUREFACEEXTPROC) (GLenum target, GLenum attachment, GLuint texture, GLint level, GLenum face); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glProgramVertexLimitNV (GLenum target, GLint limit); +GLAPI void APIENTRY glFramebufferTextureEXT (GLenum target, GLenum attachment, GLuint texture, GLint level); +GLAPI void APIENTRY glFramebufferTextureFaceEXT (GLenum target, GLenum attachment, GLuint texture, GLint level, GLenum face); +#endif +#endif /* GL_NV_geometry_program4 */ + +#ifndef GL_NV_geometry_shader4 +#define GL_NV_geometry_shader4 1 +#endif /* GL_NV_geometry_shader4 */ + +#ifndef GL_NV_geometry_shader_passthrough +#define GL_NV_geometry_shader_passthrough 1 +#endif /* GL_NV_geometry_shader_passthrough */ + +#ifndef GL_NV_gpu_multicast +#define GL_NV_gpu_multicast 1 +#define GL_PER_GPU_STORAGE_BIT_NV 0x0800 +#define GL_MULTICAST_GPUS_NV 0x92BA +#define GL_RENDER_GPU_MASK_NV 0x9558 +#define GL_PER_GPU_STORAGE_NV 0x9548 +#define GL_MULTICAST_PROGRAMMABLE_SAMPLE_LOCATION_NV 0x9549 +typedef void (APIENTRYP PFNGLRENDERGPUMASKNVPROC) (GLbitfield mask); +typedef void (APIENTRYP PFNGLMULTICASTBUFFERSUBDATANVPROC) (GLbitfield gpuMask, GLuint buffer, GLintptr offset, GLsizeiptr size, const void *data); +typedef void (APIENTRYP PFNGLMULTICASTCOPYBUFFERSUBDATANVPROC) (GLuint readGpu, GLbitfield writeGpuMask, GLuint readBuffer, GLuint writeBuffer, GLintptr readOffset, GLintptr writeOffset, GLsizeiptr size); +typedef void (APIENTRYP PFNGLMULTICASTCOPYIMAGESUBDATANVPROC) (GLuint srcGpu, GLbitfield dstGpuMask, GLuint srcName, GLenum srcTarget, GLint srcLevel, GLint srcX, GLint srcY, GLint srcZ, GLuint dstName, GLenum dstTarget, GLint dstLevel, GLint dstX, GLint dstY, GLint dstZ, GLsizei srcWidth, GLsizei srcHeight, GLsizei srcDepth); +typedef void (APIENTRYP PFNGLMULTICASTBLITFRAMEBUFFERNVPROC) (GLuint srcGpu, GLuint dstGpu, GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +typedef void (APIENTRYP PFNGLMULTICASTFRAMEBUFFERSAMPLELOCATIONSFVNVPROC) (GLuint gpu, GLuint framebuffer, GLuint start, GLsizei count, const GLfloat *v); +typedef void (APIENTRYP PFNGLMULTICASTBARRIERNVPROC) (void); +typedef void (APIENTRYP PFNGLMULTICASTWAITSYNCNVPROC) (GLuint signalGpu, GLbitfield waitGpuMask); +typedef void (APIENTRYP PFNGLMULTICASTGETQUERYOBJECTIVNVPROC) (GLuint gpu, GLuint id, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLMULTICASTGETQUERYOBJECTUIVNVPROC) (GLuint gpu, GLuint id, GLenum pname, GLuint *params); +typedef void (APIENTRYP PFNGLMULTICASTGETQUERYOBJECTI64VNVPROC) (GLuint gpu, GLuint id, GLenum pname, GLint64 *params); +typedef void (APIENTRYP PFNGLMULTICASTGETQUERYOBJECTUI64VNVPROC) (GLuint gpu, GLuint id, GLenum pname, GLuint64 *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glRenderGpuMaskNV (GLbitfield mask); +GLAPI void APIENTRY glMulticastBufferSubDataNV (GLbitfield gpuMask, GLuint buffer, GLintptr offset, GLsizeiptr size, const void *data); +GLAPI void APIENTRY glMulticastCopyBufferSubDataNV (GLuint readGpu, GLbitfield writeGpuMask, GLuint readBuffer, GLuint writeBuffer, GLintptr readOffset, GLintptr writeOffset, GLsizeiptr size); +GLAPI void APIENTRY glMulticastCopyImageSubDataNV (GLuint srcGpu, GLbitfield dstGpuMask, GLuint srcName, GLenum srcTarget, GLint srcLevel, GLint srcX, GLint srcY, GLint srcZ, GLuint dstName, GLenum dstTarget, GLint dstLevel, GLint dstX, GLint dstY, GLint dstZ, GLsizei srcWidth, GLsizei srcHeight, GLsizei srcDepth); +GLAPI void APIENTRY glMulticastBlitFramebufferNV (GLuint srcGpu, GLuint dstGpu, GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +GLAPI void APIENTRY glMulticastFramebufferSampleLocationsfvNV (GLuint gpu, GLuint framebuffer, GLuint start, GLsizei count, const GLfloat *v); +GLAPI void APIENTRY glMulticastBarrierNV (void); +GLAPI void APIENTRY glMulticastWaitSyncNV (GLuint signalGpu, GLbitfield waitGpuMask); +GLAPI void APIENTRY glMulticastGetQueryObjectivNV (GLuint gpu, GLuint id, GLenum pname, GLint *params); +GLAPI void APIENTRY glMulticastGetQueryObjectuivNV (GLuint gpu, GLuint id, GLenum pname, GLuint *params); +GLAPI void APIENTRY glMulticastGetQueryObjecti64vNV (GLuint gpu, GLuint id, GLenum pname, GLint64 *params); +GLAPI void APIENTRY glMulticastGetQueryObjectui64vNV (GLuint gpu, GLuint id, GLenum pname, GLuint64 *params); +#endif +#endif /* GL_NV_gpu_multicast */ + +#ifndef GL_NV_gpu_program4 +#define GL_NV_gpu_program4 1 +#define GL_MIN_PROGRAM_TEXEL_OFFSET_NV 0x8904 +#define GL_MAX_PROGRAM_TEXEL_OFFSET_NV 0x8905 +#define GL_PROGRAM_ATTRIB_COMPONENTS_NV 0x8906 +#define GL_PROGRAM_RESULT_COMPONENTS_NV 0x8907 +#define GL_MAX_PROGRAM_ATTRIB_COMPONENTS_NV 0x8908 +#define GL_MAX_PROGRAM_RESULT_COMPONENTS_NV 0x8909 +#define GL_MAX_PROGRAM_GENERIC_ATTRIBS_NV 0x8DA5 +#define GL_MAX_PROGRAM_GENERIC_RESULTS_NV 0x8DA6 +typedef void (APIENTRYP PFNGLPROGRAMLOCALPARAMETERI4INVPROC) (GLenum target, GLuint index, GLint x, GLint y, GLint z, GLint w); +typedef void (APIENTRYP PFNGLPROGRAMLOCALPARAMETERI4IVNVPROC) (GLenum target, GLuint index, const GLint *params); +typedef void (APIENTRYP PFNGLPROGRAMLOCALPARAMETERSI4IVNVPROC) (GLenum target, GLuint index, GLsizei count, const GLint *params); +typedef void (APIENTRYP PFNGLPROGRAMLOCALPARAMETERI4UINVPROC) (GLenum target, GLuint index, GLuint x, GLuint y, GLuint z, GLuint w); +typedef void (APIENTRYP PFNGLPROGRAMLOCALPARAMETERI4UIVNVPROC) (GLenum target, GLuint index, const GLuint *params); +typedef void (APIENTRYP PFNGLPROGRAMLOCALPARAMETERSI4UIVNVPROC) (GLenum target, GLuint index, GLsizei count, const GLuint *params); +typedef void (APIENTRYP PFNGLPROGRAMENVPARAMETERI4INVPROC) (GLenum target, GLuint index, GLint x, GLint y, GLint z, GLint w); +typedef void (APIENTRYP PFNGLPROGRAMENVPARAMETERI4IVNVPROC) (GLenum target, GLuint index, const GLint *params); +typedef void (APIENTRYP PFNGLPROGRAMENVPARAMETERSI4IVNVPROC) (GLenum target, GLuint index, GLsizei count, const GLint *params); +typedef void (APIENTRYP PFNGLPROGRAMENVPARAMETERI4UINVPROC) (GLenum target, GLuint index, GLuint x, GLuint y, GLuint z, GLuint w); +typedef void (APIENTRYP PFNGLPROGRAMENVPARAMETERI4UIVNVPROC) (GLenum target, GLuint index, const GLuint *params); +typedef void (APIENTRYP PFNGLPROGRAMENVPARAMETERSI4UIVNVPROC) (GLenum target, GLuint index, GLsizei count, const GLuint *params); +typedef void (APIENTRYP PFNGLGETPROGRAMLOCALPARAMETERIIVNVPROC) (GLenum target, GLuint index, GLint *params); +typedef void (APIENTRYP PFNGLGETPROGRAMLOCALPARAMETERIUIVNVPROC) (GLenum target, GLuint index, GLuint *params); +typedef void (APIENTRYP PFNGLGETPROGRAMENVPARAMETERIIVNVPROC) (GLenum target, GLuint index, GLint *params); +typedef void (APIENTRYP PFNGLGETPROGRAMENVPARAMETERIUIVNVPROC) (GLenum target, GLuint index, GLuint *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glProgramLocalParameterI4iNV (GLenum target, GLuint index, GLint x, GLint y, GLint z, GLint w); +GLAPI void APIENTRY glProgramLocalParameterI4ivNV (GLenum target, GLuint index, const GLint *params); +GLAPI void APIENTRY glProgramLocalParametersI4ivNV (GLenum target, GLuint index, GLsizei count, const GLint *params); +GLAPI void APIENTRY glProgramLocalParameterI4uiNV (GLenum target, GLuint index, GLuint x, GLuint y, GLuint z, GLuint w); +GLAPI void APIENTRY glProgramLocalParameterI4uivNV (GLenum target, GLuint index, const GLuint *params); +GLAPI void APIENTRY glProgramLocalParametersI4uivNV (GLenum target, GLuint index, GLsizei count, const GLuint *params); +GLAPI void APIENTRY glProgramEnvParameterI4iNV (GLenum target, GLuint index, GLint x, GLint y, GLint z, GLint w); +GLAPI void APIENTRY glProgramEnvParameterI4ivNV (GLenum target, GLuint index, const GLint *params); +GLAPI void APIENTRY glProgramEnvParametersI4ivNV (GLenum target, GLuint index, GLsizei count, const GLint *params); +GLAPI void APIENTRY glProgramEnvParameterI4uiNV (GLenum target, GLuint index, GLuint x, GLuint y, GLuint z, GLuint w); +GLAPI void APIENTRY glProgramEnvParameterI4uivNV (GLenum target, GLuint index, const GLuint *params); +GLAPI void APIENTRY glProgramEnvParametersI4uivNV (GLenum target, GLuint index, GLsizei count, const GLuint *params); +GLAPI void APIENTRY glGetProgramLocalParameterIivNV (GLenum target, GLuint index, GLint *params); +GLAPI void APIENTRY glGetProgramLocalParameterIuivNV (GLenum target, GLuint index, GLuint *params); +GLAPI void APIENTRY glGetProgramEnvParameterIivNV (GLenum target, GLuint index, GLint *params); +GLAPI void APIENTRY glGetProgramEnvParameterIuivNV (GLenum target, GLuint index, GLuint *params); +#endif +#endif /* GL_NV_gpu_program4 */ + +#ifndef GL_NV_gpu_program5 +#define GL_NV_gpu_program5 1 +#define GL_MAX_GEOMETRY_PROGRAM_INVOCATIONS_NV 0x8E5A +#define GL_MIN_FRAGMENT_INTERPOLATION_OFFSET_NV 0x8E5B +#define GL_MAX_FRAGMENT_INTERPOLATION_OFFSET_NV 0x8E5C +#define GL_FRAGMENT_PROGRAM_INTERPOLATION_OFFSET_BITS_NV 0x8E5D +#define GL_MIN_PROGRAM_TEXTURE_GATHER_OFFSET_NV 0x8E5E +#define GL_MAX_PROGRAM_TEXTURE_GATHER_OFFSET_NV 0x8E5F +#define GL_MAX_PROGRAM_SUBROUTINE_PARAMETERS_NV 0x8F44 +#define GL_MAX_PROGRAM_SUBROUTINE_NUM_NV 0x8F45 +typedef void (APIENTRYP PFNGLPROGRAMSUBROUTINEPARAMETERSUIVNVPROC) (GLenum target, GLsizei count, const GLuint *params); +typedef void (APIENTRYP PFNGLGETPROGRAMSUBROUTINEPARAMETERUIVNVPROC) (GLenum target, GLuint index, GLuint *param); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glProgramSubroutineParametersuivNV (GLenum target, GLsizei count, const GLuint *params); +GLAPI void APIENTRY glGetProgramSubroutineParameteruivNV (GLenum target, GLuint index, GLuint *param); +#endif +#endif /* GL_NV_gpu_program5 */ + +#ifndef GL_NV_gpu_program5_mem_extended +#define GL_NV_gpu_program5_mem_extended 1 +#endif /* GL_NV_gpu_program5_mem_extended */ + +#ifndef GL_NV_gpu_shader5 +#define GL_NV_gpu_shader5 1 +#endif /* GL_NV_gpu_shader5 */ + +#ifndef GL_NV_half_float +#define GL_NV_half_float 1 +typedef unsigned short GLhalfNV; +#define GL_HALF_FLOAT_NV 0x140B +typedef void (APIENTRYP PFNGLVERTEX2HNVPROC) (GLhalfNV x, GLhalfNV y); +typedef void (APIENTRYP PFNGLVERTEX2HVNVPROC) (const GLhalfNV *v); +typedef void (APIENTRYP PFNGLVERTEX3HNVPROC) (GLhalfNV x, GLhalfNV y, GLhalfNV z); +typedef void (APIENTRYP PFNGLVERTEX3HVNVPROC) (const GLhalfNV *v); +typedef void (APIENTRYP PFNGLVERTEX4HNVPROC) (GLhalfNV x, GLhalfNV y, GLhalfNV z, GLhalfNV w); +typedef void (APIENTRYP PFNGLVERTEX4HVNVPROC) (const GLhalfNV *v); +typedef void (APIENTRYP PFNGLNORMAL3HNVPROC) (GLhalfNV nx, GLhalfNV ny, GLhalfNV nz); +typedef void (APIENTRYP PFNGLNORMAL3HVNVPROC) (const GLhalfNV *v); +typedef void (APIENTRYP PFNGLCOLOR3HNVPROC) (GLhalfNV red, GLhalfNV green, GLhalfNV blue); +typedef void (APIENTRYP PFNGLCOLOR3HVNVPROC) (const GLhalfNV *v); +typedef void (APIENTRYP PFNGLCOLOR4HNVPROC) (GLhalfNV red, GLhalfNV green, GLhalfNV blue, GLhalfNV alpha); +typedef void (APIENTRYP PFNGLCOLOR4HVNVPROC) (const GLhalfNV *v); +typedef void (APIENTRYP PFNGLTEXCOORD1HNVPROC) (GLhalfNV s); +typedef void (APIENTRYP PFNGLTEXCOORD1HVNVPROC) (const GLhalfNV *v); +typedef void (APIENTRYP PFNGLTEXCOORD2HNVPROC) (GLhalfNV s, GLhalfNV t); +typedef void (APIENTRYP PFNGLTEXCOORD2HVNVPROC) (const GLhalfNV *v); +typedef void (APIENTRYP PFNGLTEXCOORD3HNVPROC) (GLhalfNV s, GLhalfNV t, GLhalfNV r); +typedef void (APIENTRYP PFNGLTEXCOORD3HVNVPROC) (const GLhalfNV *v); +typedef void (APIENTRYP PFNGLTEXCOORD4HNVPROC) (GLhalfNV s, GLhalfNV t, GLhalfNV r, GLhalfNV q); +typedef void (APIENTRYP PFNGLTEXCOORD4HVNVPROC) (const GLhalfNV *v); +typedef void (APIENTRYP PFNGLMULTITEXCOORD1HNVPROC) (GLenum target, GLhalfNV s); +typedef void (APIENTRYP PFNGLMULTITEXCOORD1HVNVPROC) (GLenum target, const GLhalfNV *v); +typedef void (APIENTRYP PFNGLMULTITEXCOORD2HNVPROC) (GLenum target, GLhalfNV s, GLhalfNV t); +typedef void (APIENTRYP PFNGLMULTITEXCOORD2HVNVPROC) (GLenum target, const GLhalfNV *v); +typedef void (APIENTRYP PFNGLMULTITEXCOORD3HNVPROC) (GLenum target, GLhalfNV s, GLhalfNV t, GLhalfNV r); +typedef void (APIENTRYP PFNGLMULTITEXCOORD3HVNVPROC) (GLenum target, const GLhalfNV *v); +typedef void (APIENTRYP PFNGLMULTITEXCOORD4HNVPROC) (GLenum target, GLhalfNV s, GLhalfNV t, GLhalfNV r, GLhalfNV q); +typedef void (APIENTRYP PFNGLMULTITEXCOORD4HVNVPROC) (GLenum target, const GLhalfNV *v); +typedef void (APIENTRYP PFNGLFOGCOORDHNVPROC) (GLhalfNV fog); +typedef void (APIENTRYP PFNGLFOGCOORDHVNVPROC) (const GLhalfNV *fog); +typedef void (APIENTRYP PFNGLSECONDARYCOLOR3HNVPROC) (GLhalfNV red, GLhalfNV green, GLhalfNV blue); +typedef void (APIENTRYP PFNGLSECONDARYCOLOR3HVNVPROC) (const GLhalfNV *v); +typedef void (APIENTRYP PFNGLVERTEXWEIGHTHNVPROC) (GLhalfNV weight); +typedef void (APIENTRYP PFNGLVERTEXWEIGHTHVNVPROC) (const GLhalfNV *weight); +typedef void (APIENTRYP PFNGLVERTEXATTRIB1HNVPROC) (GLuint index, GLhalfNV x); +typedef void (APIENTRYP PFNGLVERTEXATTRIB1HVNVPROC) (GLuint index, const GLhalfNV *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB2HNVPROC) (GLuint index, GLhalfNV x, GLhalfNV y); +typedef void (APIENTRYP PFNGLVERTEXATTRIB2HVNVPROC) (GLuint index, const GLhalfNV *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB3HNVPROC) (GLuint index, GLhalfNV x, GLhalfNV y, GLhalfNV z); +typedef void (APIENTRYP PFNGLVERTEXATTRIB3HVNVPROC) (GLuint index, const GLhalfNV *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4HNVPROC) (GLuint index, GLhalfNV x, GLhalfNV y, GLhalfNV z, GLhalfNV w); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4HVNVPROC) (GLuint index, const GLhalfNV *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBS1HVNVPROC) (GLuint index, GLsizei n, const GLhalfNV *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBS2HVNVPROC) (GLuint index, GLsizei n, const GLhalfNV *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBS3HVNVPROC) (GLuint index, GLsizei n, const GLhalfNV *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBS4HVNVPROC) (GLuint index, GLsizei n, const GLhalfNV *v); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glVertex2hNV (GLhalfNV x, GLhalfNV y); +GLAPI void APIENTRY glVertex2hvNV (const GLhalfNV *v); +GLAPI void APIENTRY glVertex3hNV (GLhalfNV x, GLhalfNV y, GLhalfNV z); +GLAPI void APIENTRY glVertex3hvNV (const GLhalfNV *v); +GLAPI void APIENTRY glVertex4hNV (GLhalfNV x, GLhalfNV y, GLhalfNV z, GLhalfNV w); +GLAPI void APIENTRY glVertex4hvNV (const GLhalfNV *v); +GLAPI void APIENTRY glNormal3hNV (GLhalfNV nx, GLhalfNV ny, GLhalfNV nz); +GLAPI void APIENTRY glNormal3hvNV (const GLhalfNV *v); +GLAPI void APIENTRY glColor3hNV (GLhalfNV red, GLhalfNV green, GLhalfNV blue); +GLAPI void APIENTRY glColor3hvNV (const GLhalfNV *v); +GLAPI void APIENTRY glColor4hNV (GLhalfNV red, GLhalfNV green, GLhalfNV blue, GLhalfNV alpha); +GLAPI void APIENTRY glColor4hvNV (const GLhalfNV *v); +GLAPI void APIENTRY glTexCoord1hNV (GLhalfNV s); +GLAPI void APIENTRY glTexCoord1hvNV (const GLhalfNV *v); +GLAPI void APIENTRY glTexCoord2hNV (GLhalfNV s, GLhalfNV t); +GLAPI void APIENTRY glTexCoord2hvNV (const GLhalfNV *v); +GLAPI void APIENTRY glTexCoord3hNV (GLhalfNV s, GLhalfNV t, GLhalfNV r); +GLAPI void APIENTRY glTexCoord3hvNV (const GLhalfNV *v); +GLAPI void APIENTRY glTexCoord4hNV (GLhalfNV s, GLhalfNV t, GLhalfNV r, GLhalfNV q); +GLAPI void APIENTRY glTexCoord4hvNV (const GLhalfNV *v); +GLAPI void APIENTRY glMultiTexCoord1hNV (GLenum target, GLhalfNV s); +GLAPI void APIENTRY glMultiTexCoord1hvNV (GLenum target, const GLhalfNV *v); +GLAPI void APIENTRY glMultiTexCoord2hNV (GLenum target, GLhalfNV s, GLhalfNV t); +GLAPI void APIENTRY glMultiTexCoord2hvNV (GLenum target, const GLhalfNV *v); +GLAPI void APIENTRY glMultiTexCoord3hNV (GLenum target, GLhalfNV s, GLhalfNV t, GLhalfNV r); +GLAPI void APIENTRY glMultiTexCoord3hvNV (GLenum target, const GLhalfNV *v); +GLAPI void APIENTRY glMultiTexCoord4hNV (GLenum target, GLhalfNV s, GLhalfNV t, GLhalfNV r, GLhalfNV q); +GLAPI void APIENTRY glMultiTexCoord4hvNV (GLenum target, const GLhalfNV *v); +GLAPI void APIENTRY glFogCoordhNV (GLhalfNV fog); +GLAPI void APIENTRY glFogCoordhvNV (const GLhalfNV *fog); +GLAPI void APIENTRY glSecondaryColor3hNV (GLhalfNV red, GLhalfNV green, GLhalfNV blue); +GLAPI void APIENTRY glSecondaryColor3hvNV (const GLhalfNV *v); +GLAPI void APIENTRY glVertexWeighthNV (GLhalfNV weight); +GLAPI void APIENTRY glVertexWeighthvNV (const GLhalfNV *weight); +GLAPI void APIENTRY glVertexAttrib1hNV (GLuint index, GLhalfNV x); +GLAPI void APIENTRY glVertexAttrib1hvNV (GLuint index, const GLhalfNV *v); +GLAPI void APIENTRY glVertexAttrib2hNV (GLuint index, GLhalfNV x, GLhalfNV y); +GLAPI void APIENTRY glVertexAttrib2hvNV (GLuint index, const GLhalfNV *v); +GLAPI void APIENTRY glVertexAttrib3hNV (GLuint index, GLhalfNV x, GLhalfNV y, GLhalfNV z); +GLAPI void APIENTRY glVertexAttrib3hvNV (GLuint index, const GLhalfNV *v); +GLAPI void APIENTRY glVertexAttrib4hNV (GLuint index, GLhalfNV x, GLhalfNV y, GLhalfNV z, GLhalfNV w); +GLAPI void APIENTRY glVertexAttrib4hvNV (GLuint index, const GLhalfNV *v); +GLAPI void APIENTRY glVertexAttribs1hvNV (GLuint index, GLsizei n, const GLhalfNV *v); +GLAPI void APIENTRY glVertexAttribs2hvNV (GLuint index, GLsizei n, const GLhalfNV *v); +GLAPI void APIENTRY glVertexAttribs3hvNV (GLuint index, GLsizei n, const GLhalfNV *v); +GLAPI void APIENTRY glVertexAttribs4hvNV (GLuint index, GLsizei n, const GLhalfNV *v); +#endif +#endif /* GL_NV_half_float */ + +#ifndef GL_NV_internalformat_sample_query +#define GL_NV_internalformat_sample_query 1 +#define GL_MULTISAMPLES_NV 0x9371 +#define GL_SUPERSAMPLE_SCALE_X_NV 0x9372 +#define GL_SUPERSAMPLE_SCALE_Y_NV 0x9373 +#define GL_CONFORMANT_NV 0x9374 +typedef void (APIENTRYP PFNGLGETINTERNALFORMATSAMPLEIVNVPROC) (GLenum target, GLenum internalformat, GLsizei samples, GLenum pname, GLsizei bufSize, GLint *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glGetInternalformatSampleivNV (GLenum target, GLenum internalformat, GLsizei samples, GLenum pname, GLsizei bufSize, GLint *params); +#endif +#endif /* GL_NV_internalformat_sample_query */ + +#ifndef GL_NV_light_max_exponent +#define GL_NV_light_max_exponent 1 +#define GL_MAX_SHININESS_NV 0x8504 +#define GL_MAX_SPOT_EXPONENT_NV 0x8505 +#endif /* GL_NV_light_max_exponent */ + +#ifndef GL_NV_multisample_coverage +#define GL_NV_multisample_coverage 1 +#endif /* GL_NV_multisample_coverage */ + +#ifndef GL_NV_multisample_filter_hint +#define GL_NV_multisample_filter_hint 1 +#define GL_MULTISAMPLE_FILTER_HINT_NV 0x8534 +#endif /* GL_NV_multisample_filter_hint */ + +#ifndef GL_NV_occlusion_query +#define GL_NV_occlusion_query 1 +#define GL_PIXEL_COUNTER_BITS_NV 0x8864 +#define GL_CURRENT_OCCLUSION_QUERY_ID_NV 0x8865 +#define GL_PIXEL_COUNT_NV 0x8866 +#define GL_PIXEL_COUNT_AVAILABLE_NV 0x8867 +typedef void (APIENTRYP PFNGLGENOCCLUSIONQUERIESNVPROC) (GLsizei n, GLuint *ids); +typedef void (APIENTRYP PFNGLDELETEOCCLUSIONQUERIESNVPROC) (GLsizei n, const GLuint *ids); +typedef GLboolean (APIENTRYP PFNGLISOCCLUSIONQUERYNVPROC) (GLuint id); +typedef void (APIENTRYP PFNGLBEGINOCCLUSIONQUERYNVPROC) (GLuint id); +typedef void (APIENTRYP PFNGLENDOCCLUSIONQUERYNVPROC) (void); +typedef void (APIENTRYP PFNGLGETOCCLUSIONQUERYIVNVPROC) (GLuint id, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETOCCLUSIONQUERYUIVNVPROC) (GLuint id, GLenum pname, GLuint *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glGenOcclusionQueriesNV (GLsizei n, GLuint *ids); +GLAPI void APIENTRY glDeleteOcclusionQueriesNV (GLsizei n, const GLuint *ids); +GLAPI GLboolean APIENTRY glIsOcclusionQueryNV (GLuint id); +GLAPI void APIENTRY glBeginOcclusionQueryNV (GLuint id); +GLAPI void APIENTRY glEndOcclusionQueryNV (void); +GLAPI void APIENTRY glGetOcclusionQueryivNV (GLuint id, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetOcclusionQueryuivNV (GLuint id, GLenum pname, GLuint *params); +#endif +#endif /* GL_NV_occlusion_query */ + +#ifndef GL_NV_packed_depth_stencil +#define GL_NV_packed_depth_stencil 1 +#define GL_DEPTH_STENCIL_NV 0x84F9 +#define GL_UNSIGNED_INT_24_8_NV 0x84FA +#endif /* GL_NV_packed_depth_stencil */ + +#ifndef GL_NV_parameter_buffer_object +#define GL_NV_parameter_buffer_object 1 +#define GL_MAX_PROGRAM_PARAMETER_BUFFER_BINDINGS_NV 0x8DA0 +#define GL_MAX_PROGRAM_PARAMETER_BUFFER_SIZE_NV 0x8DA1 +#define GL_VERTEX_PROGRAM_PARAMETER_BUFFER_NV 0x8DA2 +#define GL_GEOMETRY_PROGRAM_PARAMETER_BUFFER_NV 0x8DA3 +#define GL_FRAGMENT_PROGRAM_PARAMETER_BUFFER_NV 0x8DA4 +typedef void (APIENTRYP PFNGLPROGRAMBUFFERPARAMETERSFVNVPROC) (GLenum target, GLuint bindingIndex, GLuint wordIndex, GLsizei count, const GLfloat *params); +typedef void (APIENTRYP PFNGLPROGRAMBUFFERPARAMETERSIIVNVPROC) (GLenum target, GLuint bindingIndex, GLuint wordIndex, GLsizei count, const GLint *params); +typedef void (APIENTRYP PFNGLPROGRAMBUFFERPARAMETERSIUIVNVPROC) (GLenum target, GLuint bindingIndex, GLuint wordIndex, GLsizei count, const GLuint *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glProgramBufferParametersfvNV (GLenum target, GLuint bindingIndex, GLuint wordIndex, GLsizei count, const GLfloat *params); +GLAPI void APIENTRY glProgramBufferParametersIivNV (GLenum target, GLuint bindingIndex, GLuint wordIndex, GLsizei count, const GLint *params); +GLAPI void APIENTRY glProgramBufferParametersIuivNV (GLenum target, GLuint bindingIndex, GLuint wordIndex, GLsizei count, const GLuint *params); +#endif +#endif /* GL_NV_parameter_buffer_object */ + +#ifndef GL_NV_parameter_buffer_object2 +#define GL_NV_parameter_buffer_object2 1 +#endif /* GL_NV_parameter_buffer_object2 */ + +#ifndef GL_NV_path_rendering +#define GL_NV_path_rendering 1 +#define GL_PATH_FORMAT_SVG_NV 0x9070 +#define GL_PATH_FORMAT_PS_NV 0x9071 +#define GL_STANDARD_FONT_NAME_NV 0x9072 +#define GL_SYSTEM_FONT_NAME_NV 0x9073 +#define GL_FILE_NAME_NV 0x9074 +#define GL_PATH_STROKE_WIDTH_NV 0x9075 +#define GL_PATH_END_CAPS_NV 0x9076 +#define GL_PATH_INITIAL_END_CAP_NV 0x9077 +#define GL_PATH_TERMINAL_END_CAP_NV 0x9078 +#define GL_PATH_JOIN_STYLE_NV 0x9079 +#define GL_PATH_MITER_LIMIT_NV 0x907A +#define GL_PATH_DASH_CAPS_NV 0x907B +#define GL_PATH_INITIAL_DASH_CAP_NV 0x907C +#define GL_PATH_TERMINAL_DASH_CAP_NV 0x907D +#define GL_PATH_DASH_OFFSET_NV 0x907E +#define GL_PATH_CLIENT_LENGTH_NV 0x907F +#define GL_PATH_FILL_MODE_NV 0x9080 +#define GL_PATH_FILL_MASK_NV 0x9081 +#define GL_PATH_FILL_COVER_MODE_NV 0x9082 +#define GL_PATH_STROKE_COVER_MODE_NV 0x9083 +#define GL_PATH_STROKE_MASK_NV 0x9084 +#define GL_COUNT_UP_NV 0x9088 +#define GL_COUNT_DOWN_NV 0x9089 +#define GL_PATH_OBJECT_BOUNDING_BOX_NV 0x908A +#define GL_CONVEX_HULL_NV 0x908B +#define GL_BOUNDING_BOX_NV 0x908D +#define GL_TRANSLATE_X_NV 0x908E +#define GL_TRANSLATE_Y_NV 0x908F +#define GL_TRANSLATE_2D_NV 0x9090 +#define GL_TRANSLATE_3D_NV 0x9091 +#define GL_AFFINE_2D_NV 0x9092 +#define GL_AFFINE_3D_NV 0x9094 +#define GL_TRANSPOSE_AFFINE_2D_NV 0x9096 +#define GL_TRANSPOSE_AFFINE_3D_NV 0x9098 +#define GL_UTF8_NV 0x909A +#define GL_UTF16_NV 0x909B +#define GL_BOUNDING_BOX_OF_BOUNDING_BOXES_NV 0x909C +#define GL_PATH_COMMAND_COUNT_NV 0x909D +#define GL_PATH_COORD_COUNT_NV 0x909E +#define GL_PATH_DASH_ARRAY_COUNT_NV 0x909F +#define GL_PATH_COMPUTED_LENGTH_NV 0x90A0 +#define GL_PATH_FILL_BOUNDING_BOX_NV 0x90A1 +#define GL_PATH_STROKE_BOUNDING_BOX_NV 0x90A2 +#define GL_SQUARE_NV 0x90A3 +#define GL_ROUND_NV 0x90A4 +#define GL_TRIANGULAR_NV 0x90A5 +#define GL_BEVEL_NV 0x90A6 +#define GL_MITER_REVERT_NV 0x90A7 +#define GL_MITER_TRUNCATE_NV 0x90A8 +#define GL_SKIP_MISSING_GLYPH_NV 0x90A9 +#define GL_USE_MISSING_GLYPH_NV 0x90AA +#define GL_PATH_ERROR_POSITION_NV 0x90AB +#define GL_ACCUM_ADJACENT_PAIRS_NV 0x90AD +#define GL_ADJACENT_PAIRS_NV 0x90AE +#define GL_FIRST_TO_REST_NV 0x90AF +#define GL_PATH_GEN_MODE_NV 0x90B0 +#define GL_PATH_GEN_COEFF_NV 0x90B1 +#define GL_PATH_GEN_COMPONENTS_NV 0x90B3 +#define GL_PATH_STENCIL_FUNC_NV 0x90B7 +#define GL_PATH_STENCIL_REF_NV 0x90B8 +#define GL_PATH_STENCIL_VALUE_MASK_NV 0x90B9 +#define GL_PATH_STENCIL_DEPTH_OFFSET_FACTOR_NV 0x90BD +#define GL_PATH_STENCIL_DEPTH_OFFSET_UNITS_NV 0x90BE +#define GL_PATH_COVER_DEPTH_FUNC_NV 0x90BF +#define GL_PATH_DASH_OFFSET_RESET_NV 0x90B4 +#define GL_MOVE_TO_RESETS_NV 0x90B5 +#define GL_MOVE_TO_CONTINUES_NV 0x90B6 +#define GL_CLOSE_PATH_NV 0x00 +#define GL_MOVE_TO_NV 0x02 +#define GL_RELATIVE_MOVE_TO_NV 0x03 +#define GL_LINE_TO_NV 0x04 +#define GL_RELATIVE_LINE_TO_NV 0x05 +#define GL_HORIZONTAL_LINE_TO_NV 0x06 +#define GL_RELATIVE_HORIZONTAL_LINE_TO_NV 0x07 +#define GL_VERTICAL_LINE_TO_NV 0x08 +#define GL_RELATIVE_VERTICAL_LINE_TO_NV 0x09 +#define GL_QUADRATIC_CURVE_TO_NV 0x0A +#define GL_RELATIVE_QUADRATIC_CURVE_TO_NV 0x0B +#define GL_CUBIC_CURVE_TO_NV 0x0C +#define GL_RELATIVE_CUBIC_CURVE_TO_NV 0x0D +#define GL_SMOOTH_QUADRATIC_CURVE_TO_NV 0x0E +#define GL_RELATIVE_SMOOTH_QUADRATIC_CURVE_TO_NV 0x0F +#define GL_SMOOTH_CUBIC_CURVE_TO_NV 0x10 +#define GL_RELATIVE_SMOOTH_CUBIC_CURVE_TO_NV 0x11 +#define GL_SMALL_CCW_ARC_TO_NV 0x12 +#define GL_RELATIVE_SMALL_CCW_ARC_TO_NV 0x13 +#define GL_SMALL_CW_ARC_TO_NV 0x14 +#define GL_RELATIVE_SMALL_CW_ARC_TO_NV 0x15 +#define GL_LARGE_CCW_ARC_TO_NV 0x16 +#define GL_RELATIVE_LARGE_CCW_ARC_TO_NV 0x17 +#define GL_LARGE_CW_ARC_TO_NV 0x18 +#define GL_RELATIVE_LARGE_CW_ARC_TO_NV 0x19 +#define GL_RESTART_PATH_NV 0xF0 +#define GL_DUP_FIRST_CUBIC_CURVE_TO_NV 0xF2 +#define GL_DUP_LAST_CUBIC_CURVE_TO_NV 0xF4 +#define GL_RECT_NV 0xF6 +#define GL_CIRCULAR_CCW_ARC_TO_NV 0xF8 +#define GL_CIRCULAR_CW_ARC_TO_NV 0xFA +#define GL_CIRCULAR_TANGENT_ARC_TO_NV 0xFC +#define GL_ARC_TO_NV 0xFE +#define GL_RELATIVE_ARC_TO_NV 0xFF +#define GL_BOLD_BIT_NV 0x01 +#define GL_ITALIC_BIT_NV 0x02 +#define GL_GLYPH_WIDTH_BIT_NV 0x01 +#define GL_GLYPH_HEIGHT_BIT_NV 0x02 +#define GL_GLYPH_HORIZONTAL_BEARING_X_BIT_NV 0x04 +#define GL_GLYPH_HORIZONTAL_BEARING_Y_BIT_NV 0x08 +#define GL_GLYPH_HORIZONTAL_BEARING_ADVANCE_BIT_NV 0x10 +#define GL_GLYPH_VERTICAL_BEARING_X_BIT_NV 0x20 +#define GL_GLYPH_VERTICAL_BEARING_Y_BIT_NV 0x40 +#define GL_GLYPH_VERTICAL_BEARING_ADVANCE_BIT_NV 0x80 +#define GL_GLYPH_HAS_KERNING_BIT_NV 0x100 +#define GL_FONT_X_MIN_BOUNDS_BIT_NV 0x00010000 +#define GL_FONT_Y_MIN_BOUNDS_BIT_NV 0x00020000 +#define GL_FONT_X_MAX_BOUNDS_BIT_NV 0x00040000 +#define GL_FONT_Y_MAX_BOUNDS_BIT_NV 0x00080000 +#define GL_FONT_UNITS_PER_EM_BIT_NV 0x00100000 +#define GL_FONT_ASCENDER_BIT_NV 0x00200000 +#define GL_FONT_DESCENDER_BIT_NV 0x00400000 +#define GL_FONT_HEIGHT_BIT_NV 0x00800000 +#define GL_FONT_MAX_ADVANCE_WIDTH_BIT_NV 0x01000000 +#define GL_FONT_MAX_ADVANCE_HEIGHT_BIT_NV 0x02000000 +#define GL_FONT_UNDERLINE_POSITION_BIT_NV 0x04000000 +#define GL_FONT_UNDERLINE_THICKNESS_BIT_NV 0x08000000 +#define GL_FONT_HAS_KERNING_BIT_NV 0x10000000 +#define GL_ROUNDED_RECT_NV 0xE8 +#define GL_RELATIVE_ROUNDED_RECT_NV 0xE9 +#define GL_ROUNDED_RECT2_NV 0xEA +#define GL_RELATIVE_ROUNDED_RECT2_NV 0xEB +#define GL_ROUNDED_RECT4_NV 0xEC +#define GL_RELATIVE_ROUNDED_RECT4_NV 0xED +#define GL_ROUNDED_RECT8_NV 0xEE +#define GL_RELATIVE_ROUNDED_RECT8_NV 0xEF +#define GL_RELATIVE_RECT_NV 0xF7 +#define GL_FONT_GLYPHS_AVAILABLE_NV 0x9368 +#define GL_FONT_TARGET_UNAVAILABLE_NV 0x9369 +#define GL_FONT_UNAVAILABLE_NV 0x936A +#define GL_FONT_UNINTELLIGIBLE_NV 0x936B +#define GL_CONIC_CURVE_TO_NV 0x1A +#define GL_RELATIVE_CONIC_CURVE_TO_NV 0x1B +#define GL_FONT_NUM_GLYPH_INDICES_BIT_NV 0x20000000 +#define GL_STANDARD_FONT_FORMAT_NV 0x936C +#define GL_2_BYTES_NV 0x1407 +#define GL_3_BYTES_NV 0x1408 +#define GL_4_BYTES_NV 0x1409 +#define GL_EYE_LINEAR_NV 0x2400 +#define GL_OBJECT_LINEAR_NV 0x2401 +#define GL_CONSTANT_NV 0x8576 +#define GL_PATH_FOG_GEN_MODE_NV 0x90AC +#define GL_PRIMARY_COLOR_NV 0x852C +#define GL_SECONDARY_COLOR_NV 0x852D +#define GL_PATH_GEN_COLOR_FORMAT_NV 0x90B2 +#define GL_PATH_PROJECTION_NV 0x1701 +#define GL_PATH_MODELVIEW_NV 0x1700 +#define GL_PATH_MODELVIEW_STACK_DEPTH_NV 0x0BA3 +#define GL_PATH_MODELVIEW_MATRIX_NV 0x0BA6 +#define GL_PATH_MAX_MODELVIEW_STACK_DEPTH_NV 0x0D36 +#define GL_PATH_TRANSPOSE_MODELVIEW_MATRIX_NV 0x84E3 +#define GL_PATH_PROJECTION_STACK_DEPTH_NV 0x0BA4 +#define GL_PATH_PROJECTION_MATRIX_NV 0x0BA7 +#define GL_PATH_MAX_PROJECTION_STACK_DEPTH_NV 0x0D38 +#define GL_PATH_TRANSPOSE_PROJECTION_MATRIX_NV 0x84E4 +#define GL_FRAGMENT_INPUT_NV 0x936D +typedef GLuint (APIENTRYP PFNGLGENPATHSNVPROC) (GLsizei range); +typedef void (APIENTRYP PFNGLDELETEPATHSNVPROC) (GLuint path, GLsizei range); +typedef GLboolean (APIENTRYP PFNGLISPATHNVPROC) (GLuint path); +typedef void (APIENTRYP PFNGLPATHCOMMANDSNVPROC) (GLuint path, GLsizei numCommands, const GLubyte *commands, GLsizei numCoords, GLenum coordType, const void *coords); +typedef void (APIENTRYP PFNGLPATHCOORDSNVPROC) (GLuint path, GLsizei numCoords, GLenum coordType, const void *coords); +typedef void (APIENTRYP PFNGLPATHSUBCOMMANDSNVPROC) (GLuint path, GLsizei commandStart, GLsizei commandsToDelete, GLsizei numCommands, const GLubyte *commands, GLsizei numCoords, GLenum coordType, const void *coords); +typedef void (APIENTRYP PFNGLPATHSUBCOORDSNVPROC) (GLuint path, GLsizei coordStart, GLsizei numCoords, GLenum coordType, const void *coords); +typedef void (APIENTRYP PFNGLPATHSTRINGNVPROC) (GLuint path, GLenum format, GLsizei length, const void *pathString); +typedef void (APIENTRYP PFNGLPATHGLYPHSNVPROC) (GLuint firstPathName, GLenum fontTarget, const void *fontName, GLbitfield fontStyle, GLsizei numGlyphs, GLenum type, const void *charcodes, GLenum handleMissingGlyphs, GLuint pathParameterTemplate, GLfloat emScale); +typedef void (APIENTRYP PFNGLPATHGLYPHRANGENVPROC) (GLuint firstPathName, GLenum fontTarget, const void *fontName, GLbitfield fontStyle, GLuint firstGlyph, GLsizei numGlyphs, GLenum handleMissingGlyphs, GLuint pathParameterTemplate, GLfloat emScale); +typedef void (APIENTRYP PFNGLWEIGHTPATHSNVPROC) (GLuint resultPath, GLsizei numPaths, const GLuint *paths, const GLfloat *weights); +typedef void (APIENTRYP PFNGLCOPYPATHNVPROC) (GLuint resultPath, GLuint srcPath); +typedef void (APIENTRYP PFNGLINTERPOLATEPATHSNVPROC) (GLuint resultPath, GLuint pathA, GLuint pathB, GLfloat weight); +typedef void (APIENTRYP PFNGLTRANSFORMPATHNVPROC) (GLuint resultPath, GLuint srcPath, GLenum transformType, const GLfloat *transformValues); +typedef void (APIENTRYP PFNGLPATHPARAMETERIVNVPROC) (GLuint path, GLenum pname, const GLint *value); +typedef void (APIENTRYP PFNGLPATHPARAMETERINVPROC) (GLuint path, GLenum pname, GLint value); +typedef void (APIENTRYP PFNGLPATHPARAMETERFVNVPROC) (GLuint path, GLenum pname, const GLfloat *value); +typedef void (APIENTRYP PFNGLPATHPARAMETERFNVPROC) (GLuint path, GLenum pname, GLfloat value); +typedef void (APIENTRYP PFNGLPATHDASHARRAYNVPROC) (GLuint path, GLsizei dashCount, const GLfloat *dashArray); +typedef void (APIENTRYP PFNGLPATHSTENCILFUNCNVPROC) (GLenum func, GLint ref, GLuint mask); +typedef void (APIENTRYP PFNGLPATHSTENCILDEPTHOFFSETNVPROC) (GLfloat factor, GLfloat units); +typedef void (APIENTRYP PFNGLSTENCILFILLPATHNVPROC) (GLuint path, GLenum fillMode, GLuint mask); +typedef void (APIENTRYP PFNGLSTENCILSTROKEPATHNVPROC) (GLuint path, GLint reference, GLuint mask); +typedef void (APIENTRYP PFNGLSTENCILFILLPATHINSTANCEDNVPROC) (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLenum fillMode, GLuint mask, GLenum transformType, const GLfloat *transformValues); +typedef void (APIENTRYP PFNGLSTENCILSTROKEPATHINSTANCEDNVPROC) (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLint reference, GLuint mask, GLenum transformType, const GLfloat *transformValues); +typedef void (APIENTRYP PFNGLPATHCOVERDEPTHFUNCNVPROC) (GLenum func); +typedef void (APIENTRYP PFNGLCOVERFILLPATHNVPROC) (GLuint path, GLenum coverMode); +typedef void (APIENTRYP PFNGLCOVERSTROKEPATHNVPROC) (GLuint path, GLenum coverMode); +typedef void (APIENTRYP PFNGLCOVERFILLPATHINSTANCEDNVPROC) (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLenum coverMode, GLenum transformType, const GLfloat *transformValues); +typedef void (APIENTRYP PFNGLCOVERSTROKEPATHINSTANCEDNVPROC) (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLenum coverMode, GLenum transformType, const GLfloat *transformValues); +typedef void (APIENTRYP PFNGLGETPATHPARAMETERIVNVPROC) (GLuint path, GLenum pname, GLint *value); +typedef void (APIENTRYP PFNGLGETPATHPARAMETERFVNVPROC) (GLuint path, GLenum pname, GLfloat *value); +typedef void (APIENTRYP PFNGLGETPATHCOMMANDSNVPROC) (GLuint path, GLubyte *commands); +typedef void (APIENTRYP PFNGLGETPATHCOORDSNVPROC) (GLuint path, GLfloat *coords); +typedef void (APIENTRYP PFNGLGETPATHDASHARRAYNVPROC) (GLuint path, GLfloat *dashArray); +typedef void (APIENTRYP PFNGLGETPATHMETRICSNVPROC) (GLbitfield metricQueryMask, GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLsizei stride, GLfloat *metrics); +typedef void (APIENTRYP PFNGLGETPATHMETRICRANGENVPROC) (GLbitfield metricQueryMask, GLuint firstPathName, GLsizei numPaths, GLsizei stride, GLfloat *metrics); +typedef void (APIENTRYP PFNGLGETPATHSPACINGNVPROC) (GLenum pathListMode, GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLfloat advanceScale, GLfloat kerningScale, GLenum transformType, GLfloat *returnedSpacing); +typedef GLboolean (APIENTRYP PFNGLISPOINTINFILLPATHNVPROC) (GLuint path, GLuint mask, GLfloat x, GLfloat y); +typedef GLboolean (APIENTRYP PFNGLISPOINTINSTROKEPATHNVPROC) (GLuint path, GLfloat x, GLfloat y); +typedef GLfloat (APIENTRYP PFNGLGETPATHLENGTHNVPROC) (GLuint path, GLsizei startSegment, GLsizei numSegments); +typedef GLboolean (APIENTRYP PFNGLPOINTALONGPATHNVPROC) (GLuint path, GLsizei startSegment, GLsizei numSegments, GLfloat distance, GLfloat *x, GLfloat *y, GLfloat *tangentX, GLfloat *tangentY); +typedef void (APIENTRYP PFNGLMATRIXLOAD3X2FNVPROC) (GLenum matrixMode, const GLfloat *m); +typedef void (APIENTRYP PFNGLMATRIXLOAD3X3FNVPROC) (GLenum matrixMode, const GLfloat *m); +typedef void (APIENTRYP PFNGLMATRIXLOADTRANSPOSE3X3FNVPROC) (GLenum matrixMode, const GLfloat *m); +typedef void (APIENTRYP PFNGLMATRIXMULT3X2FNVPROC) (GLenum matrixMode, const GLfloat *m); +typedef void (APIENTRYP PFNGLMATRIXMULT3X3FNVPROC) (GLenum matrixMode, const GLfloat *m); +typedef void (APIENTRYP PFNGLMATRIXMULTTRANSPOSE3X3FNVPROC) (GLenum matrixMode, const GLfloat *m); +typedef void (APIENTRYP PFNGLSTENCILTHENCOVERFILLPATHNVPROC) (GLuint path, GLenum fillMode, GLuint mask, GLenum coverMode); +typedef void (APIENTRYP PFNGLSTENCILTHENCOVERSTROKEPATHNVPROC) (GLuint path, GLint reference, GLuint mask, GLenum coverMode); +typedef void (APIENTRYP PFNGLSTENCILTHENCOVERFILLPATHINSTANCEDNVPROC) (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLenum fillMode, GLuint mask, GLenum coverMode, GLenum transformType, const GLfloat *transformValues); +typedef void (APIENTRYP PFNGLSTENCILTHENCOVERSTROKEPATHINSTANCEDNVPROC) (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLint reference, GLuint mask, GLenum coverMode, GLenum transformType, const GLfloat *transformValues); +typedef GLenum (APIENTRYP PFNGLPATHGLYPHINDEXRANGENVPROC) (GLenum fontTarget, const void *fontName, GLbitfield fontStyle, GLuint pathParameterTemplate, GLfloat emScale, GLuint baseAndCount[2]); +typedef GLenum (APIENTRYP PFNGLPATHGLYPHINDEXARRAYNVPROC) (GLuint firstPathName, GLenum fontTarget, const void *fontName, GLbitfield fontStyle, GLuint firstGlyphIndex, GLsizei numGlyphs, GLuint pathParameterTemplate, GLfloat emScale); +typedef GLenum (APIENTRYP PFNGLPATHMEMORYGLYPHINDEXARRAYNVPROC) (GLuint firstPathName, GLenum fontTarget, GLsizeiptr fontSize, const void *fontData, GLsizei faceIndex, GLuint firstGlyphIndex, GLsizei numGlyphs, GLuint pathParameterTemplate, GLfloat emScale); +typedef void (APIENTRYP PFNGLPROGRAMPATHFRAGMENTINPUTGENNVPROC) (GLuint program, GLint location, GLenum genMode, GLint components, const GLfloat *coeffs); +typedef void (APIENTRYP PFNGLGETPROGRAMRESOURCEFVNVPROC) (GLuint program, GLenum programInterface, GLuint index, GLsizei propCount, const GLenum *props, GLsizei bufSize, GLsizei *length, GLfloat *params); +typedef void (APIENTRYP PFNGLPATHCOLORGENNVPROC) (GLenum color, GLenum genMode, GLenum colorFormat, const GLfloat *coeffs); +typedef void (APIENTRYP PFNGLPATHTEXGENNVPROC) (GLenum texCoordSet, GLenum genMode, GLint components, const GLfloat *coeffs); +typedef void (APIENTRYP PFNGLPATHFOGGENNVPROC) (GLenum genMode); +typedef void (APIENTRYP PFNGLGETPATHCOLORGENIVNVPROC) (GLenum color, GLenum pname, GLint *value); +typedef void (APIENTRYP PFNGLGETPATHCOLORGENFVNVPROC) (GLenum color, GLenum pname, GLfloat *value); +typedef void (APIENTRYP PFNGLGETPATHTEXGENIVNVPROC) (GLenum texCoordSet, GLenum pname, GLint *value); +typedef void (APIENTRYP PFNGLGETPATHTEXGENFVNVPROC) (GLenum texCoordSet, GLenum pname, GLfloat *value); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI GLuint APIENTRY glGenPathsNV (GLsizei range); +GLAPI void APIENTRY glDeletePathsNV (GLuint path, GLsizei range); +GLAPI GLboolean APIENTRY glIsPathNV (GLuint path); +GLAPI void APIENTRY glPathCommandsNV (GLuint path, GLsizei numCommands, const GLubyte *commands, GLsizei numCoords, GLenum coordType, const void *coords); +GLAPI void APIENTRY glPathCoordsNV (GLuint path, GLsizei numCoords, GLenum coordType, const void *coords); +GLAPI void APIENTRY glPathSubCommandsNV (GLuint path, GLsizei commandStart, GLsizei commandsToDelete, GLsizei numCommands, const GLubyte *commands, GLsizei numCoords, GLenum coordType, const void *coords); +GLAPI void APIENTRY glPathSubCoordsNV (GLuint path, GLsizei coordStart, GLsizei numCoords, GLenum coordType, const void *coords); +GLAPI void APIENTRY glPathStringNV (GLuint path, GLenum format, GLsizei length, const void *pathString); +GLAPI void APIENTRY glPathGlyphsNV (GLuint firstPathName, GLenum fontTarget, const void *fontName, GLbitfield fontStyle, GLsizei numGlyphs, GLenum type, const void *charcodes, GLenum handleMissingGlyphs, GLuint pathParameterTemplate, GLfloat emScale); +GLAPI void APIENTRY glPathGlyphRangeNV (GLuint firstPathName, GLenum fontTarget, const void *fontName, GLbitfield fontStyle, GLuint firstGlyph, GLsizei numGlyphs, GLenum handleMissingGlyphs, GLuint pathParameterTemplate, GLfloat emScale); +GLAPI void APIENTRY glWeightPathsNV (GLuint resultPath, GLsizei numPaths, const GLuint *paths, const GLfloat *weights); +GLAPI void APIENTRY glCopyPathNV (GLuint resultPath, GLuint srcPath); +GLAPI void APIENTRY glInterpolatePathsNV (GLuint resultPath, GLuint pathA, GLuint pathB, GLfloat weight); +GLAPI void APIENTRY glTransformPathNV (GLuint resultPath, GLuint srcPath, GLenum transformType, const GLfloat *transformValues); +GLAPI void APIENTRY glPathParameterivNV (GLuint path, GLenum pname, const GLint *value); +GLAPI void APIENTRY glPathParameteriNV (GLuint path, GLenum pname, GLint value); +GLAPI void APIENTRY glPathParameterfvNV (GLuint path, GLenum pname, const GLfloat *value); +GLAPI void APIENTRY glPathParameterfNV (GLuint path, GLenum pname, GLfloat value); +GLAPI void APIENTRY glPathDashArrayNV (GLuint path, GLsizei dashCount, const GLfloat *dashArray); +GLAPI void APIENTRY glPathStencilFuncNV (GLenum func, GLint ref, GLuint mask); +GLAPI void APIENTRY glPathStencilDepthOffsetNV (GLfloat factor, GLfloat units); +GLAPI void APIENTRY glStencilFillPathNV (GLuint path, GLenum fillMode, GLuint mask); +GLAPI void APIENTRY glStencilStrokePathNV (GLuint path, GLint reference, GLuint mask); +GLAPI void APIENTRY glStencilFillPathInstancedNV (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLenum fillMode, GLuint mask, GLenum transformType, const GLfloat *transformValues); +GLAPI void APIENTRY glStencilStrokePathInstancedNV (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLint reference, GLuint mask, GLenum transformType, const GLfloat *transformValues); +GLAPI void APIENTRY glPathCoverDepthFuncNV (GLenum func); +GLAPI void APIENTRY glCoverFillPathNV (GLuint path, GLenum coverMode); +GLAPI void APIENTRY glCoverStrokePathNV (GLuint path, GLenum coverMode); +GLAPI void APIENTRY glCoverFillPathInstancedNV (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLenum coverMode, GLenum transformType, const GLfloat *transformValues); +GLAPI void APIENTRY glCoverStrokePathInstancedNV (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLenum coverMode, GLenum transformType, const GLfloat *transformValues); +GLAPI void APIENTRY glGetPathParameterivNV (GLuint path, GLenum pname, GLint *value); +GLAPI void APIENTRY glGetPathParameterfvNV (GLuint path, GLenum pname, GLfloat *value); +GLAPI void APIENTRY glGetPathCommandsNV (GLuint path, GLubyte *commands); +GLAPI void APIENTRY glGetPathCoordsNV (GLuint path, GLfloat *coords); +GLAPI void APIENTRY glGetPathDashArrayNV (GLuint path, GLfloat *dashArray); +GLAPI void APIENTRY glGetPathMetricsNV (GLbitfield metricQueryMask, GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLsizei stride, GLfloat *metrics); +GLAPI void APIENTRY glGetPathMetricRangeNV (GLbitfield metricQueryMask, GLuint firstPathName, GLsizei numPaths, GLsizei stride, GLfloat *metrics); +GLAPI void APIENTRY glGetPathSpacingNV (GLenum pathListMode, GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLfloat advanceScale, GLfloat kerningScale, GLenum transformType, GLfloat *returnedSpacing); +GLAPI GLboolean APIENTRY glIsPointInFillPathNV (GLuint path, GLuint mask, GLfloat x, GLfloat y); +GLAPI GLboolean APIENTRY glIsPointInStrokePathNV (GLuint path, GLfloat x, GLfloat y); +GLAPI GLfloat APIENTRY glGetPathLengthNV (GLuint path, GLsizei startSegment, GLsizei numSegments); +GLAPI GLboolean APIENTRY glPointAlongPathNV (GLuint path, GLsizei startSegment, GLsizei numSegments, GLfloat distance, GLfloat *x, GLfloat *y, GLfloat *tangentX, GLfloat *tangentY); +GLAPI void APIENTRY glMatrixLoad3x2fNV (GLenum matrixMode, const GLfloat *m); +GLAPI void APIENTRY glMatrixLoad3x3fNV (GLenum matrixMode, const GLfloat *m); +GLAPI void APIENTRY glMatrixLoadTranspose3x3fNV (GLenum matrixMode, const GLfloat *m); +GLAPI void APIENTRY glMatrixMult3x2fNV (GLenum matrixMode, const GLfloat *m); +GLAPI void APIENTRY glMatrixMult3x3fNV (GLenum matrixMode, const GLfloat *m); +GLAPI void APIENTRY glMatrixMultTranspose3x3fNV (GLenum matrixMode, const GLfloat *m); +GLAPI void APIENTRY glStencilThenCoverFillPathNV (GLuint path, GLenum fillMode, GLuint mask, GLenum coverMode); +GLAPI void APIENTRY glStencilThenCoverStrokePathNV (GLuint path, GLint reference, GLuint mask, GLenum coverMode); +GLAPI void APIENTRY glStencilThenCoverFillPathInstancedNV (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLenum fillMode, GLuint mask, GLenum coverMode, GLenum transformType, const GLfloat *transformValues); +GLAPI void APIENTRY glStencilThenCoverStrokePathInstancedNV (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLint reference, GLuint mask, GLenum coverMode, GLenum transformType, const GLfloat *transformValues); +GLAPI GLenum APIENTRY glPathGlyphIndexRangeNV (GLenum fontTarget, const void *fontName, GLbitfield fontStyle, GLuint pathParameterTemplate, GLfloat emScale, GLuint baseAndCount[2]); +GLAPI GLenum APIENTRY glPathGlyphIndexArrayNV (GLuint firstPathName, GLenum fontTarget, const void *fontName, GLbitfield fontStyle, GLuint firstGlyphIndex, GLsizei numGlyphs, GLuint pathParameterTemplate, GLfloat emScale); +GLAPI GLenum APIENTRY glPathMemoryGlyphIndexArrayNV (GLuint firstPathName, GLenum fontTarget, GLsizeiptr fontSize, const void *fontData, GLsizei faceIndex, GLuint firstGlyphIndex, GLsizei numGlyphs, GLuint pathParameterTemplate, GLfloat emScale); +GLAPI void APIENTRY glProgramPathFragmentInputGenNV (GLuint program, GLint location, GLenum genMode, GLint components, const GLfloat *coeffs); +GLAPI void APIENTRY glGetProgramResourcefvNV (GLuint program, GLenum programInterface, GLuint index, GLsizei propCount, const GLenum *props, GLsizei bufSize, GLsizei *length, GLfloat *params); +GLAPI void APIENTRY glPathColorGenNV (GLenum color, GLenum genMode, GLenum colorFormat, const GLfloat *coeffs); +GLAPI void APIENTRY glPathTexGenNV (GLenum texCoordSet, GLenum genMode, GLint components, const GLfloat *coeffs); +GLAPI void APIENTRY glPathFogGenNV (GLenum genMode); +GLAPI void APIENTRY glGetPathColorGenivNV (GLenum color, GLenum pname, GLint *value); +GLAPI void APIENTRY glGetPathColorGenfvNV (GLenum color, GLenum pname, GLfloat *value); +GLAPI void APIENTRY glGetPathTexGenivNV (GLenum texCoordSet, GLenum pname, GLint *value); +GLAPI void APIENTRY glGetPathTexGenfvNV (GLenum texCoordSet, GLenum pname, GLfloat *value); +#endif +#endif /* GL_NV_path_rendering */ + +#ifndef GL_NV_path_rendering_shared_edge +#define GL_NV_path_rendering_shared_edge 1 +#define GL_SHARED_EDGE_NV 0xC0 +#endif /* GL_NV_path_rendering_shared_edge */ + +#ifndef GL_NV_pixel_data_range +#define GL_NV_pixel_data_range 1 +#define GL_WRITE_PIXEL_DATA_RANGE_NV 0x8878 +#define GL_READ_PIXEL_DATA_RANGE_NV 0x8879 +#define GL_WRITE_PIXEL_DATA_RANGE_LENGTH_NV 0x887A +#define GL_READ_PIXEL_DATA_RANGE_LENGTH_NV 0x887B +#define GL_WRITE_PIXEL_DATA_RANGE_POINTER_NV 0x887C +#define GL_READ_PIXEL_DATA_RANGE_POINTER_NV 0x887D +typedef void (APIENTRYP PFNGLPIXELDATARANGENVPROC) (GLenum target, GLsizei length, const void *pointer); +typedef void (APIENTRYP PFNGLFLUSHPIXELDATARANGENVPROC) (GLenum target); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glPixelDataRangeNV (GLenum target, GLsizei length, const void *pointer); +GLAPI void APIENTRY glFlushPixelDataRangeNV (GLenum target); +#endif +#endif /* GL_NV_pixel_data_range */ + +#ifndef GL_NV_point_sprite +#define GL_NV_point_sprite 1 +#define GL_POINT_SPRITE_NV 0x8861 +#define GL_COORD_REPLACE_NV 0x8862 +#define GL_POINT_SPRITE_R_MODE_NV 0x8863 +typedef void (APIENTRYP PFNGLPOINTPARAMETERINVPROC) (GLenum pname, GLint param); +typedef void (APIENTRYP PFNGLPOINTPARAMETERIVNVPROC) (GLenum pname, const GLint *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glPointParameteriNV (GLenum pname, GLint param); +GLAPI void APIENTRY glPointParameterivNV (GLenum pname, const GLint *params); +#endif +#endif /* GL_NV_point_sprite */ + +#ifndef GL_NV_present_video +#define GL_NV_present_video 1 +#define GL_FRAME_NV 0x8E26 +#define GL_FIELDS_NV 0x8E27 +#define GL_CURRENT_TIME_NV 0x8E28 +#define GL_NUM_FILL_STREAMS_NV 0x8E29 +#define GL_PRESENT_TIME_NV 0x8E2A +#define GL_PRESENT_DURATION_NV 0x8E2B +typedef void (APIENTRYP PFNGLPRESENTFRAMEKEYEDNVPROC) (GLuint video_slot, GLuint64EXT minPresentTime, GLuint beginPresentTimeId, GLuint presentDurationId, GLenum type, GLenum target0, GLuint fill0, GLuint key0, GLenum target1, GLuint fill1, GLuint key1); +typedef void (APIENTRYP PFNGLPRESENTFRAMEDUALFILLNVPROC) (GLuint video_slot, GLuint64EXT minPresentTime, GLuint beginPresentTimeId, GLuint presentDurationId, GLenum type, GLenum target0, GLuint fill0, GLenum target1, GLuint fill1, GLenum target2, GLuint fill2, GLenum target3, GLuint fill3); +typedef void (APIENTRYP PFNGLGETVIDEOIVNVPROC) (GLuint video_slot, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETVIDEOUIVNVPROC) (GLuint video_slot, GLenum pname, GLuint *params); +typedef void (APIENTRYP PFNGLGETVIDEOI64VNVPROC) (GLuint video_slot, GLenum pname, GLint64EXT *params); +typedef void (APIENTRYP PFNGLGETVIDEOUI64VNVPROC) (GLuint video_slot, GLenum pname, GLuint64EXT *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glPresentFrameKeyedNV (GLuint video_slot, GLuint64EXT minPresentTime, GLuint beginPresentTimeId, GLuint presentDurationId, GLenum type, GLenum target0, GLuint fill0, GLuint key0, GLenum target1, GLuint fill1, GLuint key1); +GLAPI void APIENTRY glPresentFrameDualFillNV (GLuint video_slot, GLuint64EXT minPresentTime, GLuint beginPresentTimeId, GLuint presentDurationId, GLenum type, GLenum target0, GLuint fill0, GLenum target1, GLuint fill1, GLenum target2, GLuint fill2, GLenum target3, GLuint fill3); +GLAPI void APIENTRY glGetVideoivNV (GLuint video_slot, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetVideouivNV (GLuint video_slot, GLenum pname, GLuint *params); +GLAPI void APIENTRY glGetVideoi64vNV (GLuint video_slot, GLenum pname, GLint64EXT *params); +GLAPI void APIENTRY glGetVideoui64vNV (GLuint video_slot, GLenum pname, GLuint64EXT *params); +#endif +#endif /* GL_NV_present_video */ + +#ifndef GL_NV_primitive_restart +#define GL_NV_primitive_restart 1 +#define GL_PRIMITIVE_RESTART_NV 0x8558 +#define GL_PRIMITIVE_RESTART_INDEX_NV 0x8559 +typedef void (APIENTRYP PFNGLPRIMITIVERESTARTNVPROC) (void); +typedef void (APIENTRYP PFNGLPRIMITIVERESTARTINDEXNVPROC) (GLuint index); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glPrimitiveRestartNV (void); +GLAPI void APIENTRY glPrimitiveRestartIndexNV (GLuint index); +#endif +#endif /* GL_NV_primitive_restart */ + +#ifndef GL_NV_query_resource +#define GL_NV_query_resource 1 +#define GL_QUERY_RESOURCE_TYPE_VIDMEM_ALLOC_NV 0x9540 +#define GL_QUERY_RESOURCE_MEMTYPE_VIDMEM_NV 0x9542 +#define GL_QUERY_RESOURCE_SYS_RESERVED_NV 0x9544 +#define GL_QUERY_RESOURCE_TEXTURE_NV 0x9545 +#define GL_QUERY_RESOURCE_RENDERBUFFER_NV 0x9546 +#define GL_QUERY_RESOURCE_BUFFEROBJECT_NV 0x9547 +typedef GLint (APIENTRYP PFNGLQUERYRESOURCENVPROC) (GLenum queryType, GLint tagId, GLuint bufSize, GLint *buffer); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI GLint APIENTRY glQueryResourceNV (GLenum queryType, GLint tagId, GLuint bufSize, GLint *buffer); +#endif +#endif /* GL_NV_query_resource */ + +#ifndef GL_NV_query_resource_tag +#define GL_NV_query_resource_tag 1 +typedef void (APIENTRYP PFNGLGENQUERYRESOURCETAGNVPROC) (GLsizei n, GLint *tagIds); +typedef void (APIENTRYP PFNGLDELETEQUERYRESOURCETAGNVPROC) (GLsizei n, const GLint *tagIds); +typedef void (APIENTRYP PFNGLQUERYRESOURCETAGNVPROC) (GLint tagId, const GLchar *tagString); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glGenQueryResourceTagNV (GLsizei n, GLint *tagIds); +GLAPI void APIENTRY glDeleteQueryResourceTagNV (GLsizei n, const GLint *tagIds); +GLAPI void APIENTRY glQueryResourceTagNV (GLint tagId, const GLchar *tagString); +#endif +#endif /* GL_NV_query_resource_tag */ + +#ifndef GL_NV_register_combiners +#define GL_NV_register_combiners 1 +#define GL_REGISTER_COMBINERS_NV 0x8522 +#define GL_VARIABLE_A_NV 0x8523 +#define GL_VARIABLE_B_NV 0x8524 +#define GL_VARIABLE_C_NV 0x8525 +#define GL_VARIABLE_D_NV 0x8526 +#define GL_VARIABLE_E_NV 0x8527 +#define GL_VARIABLE_F_NV 0x8528 +#define GL_VARIABLE_G_NV 0x8529 +#define GL_CONSTANT_COLOR0_NV 0x852A +#define GL_CONSTANT_COLOR1_NV 0x852B +#define GL_SPARE0_NV 0x852E +#define GL_SPARE1_NV 0x852F +#define GL_DISCARD_NV 0x8530 +#define GL_E_TIMES_F_NV 0x8531 +#define GL_SPARE0_PLUS_SECONDARY_COLOR_NV 0x8532 +#define GL_UNSIGNED_IDENTITY_NV 0x8536 +#define GL_UNSIGNED_INVERT_NV 0x8537 +#define GL_EXPAND_NORMAL_NV 0x8538 +#define GL_EXPAND_NEGATE_NV 0x8539 +#define GL_HALF_BIAS_NORMAL_NV 0x853A +#define GL_HALF_BIAS_NEGATE_NV 0x853B +#define GL_SIGNED_IDENTITY_NV 0x853C +#define GL_SIGNED_NEGATE_NV 0x853D +#define GL_SCALE_BY_TWO_NV 0x853E +#define GL_SCALE_BY_FOUR_NV 0x853F +#define GL_SCALE_BY_ONE_HALF_NV 0x8540 +#define GL_BIAS_BY_NEGATIVE_ONE_HALF_NV 0x8541 +#define GL_COMBINER_INPUT_NV 0x8542 +#define GL_COMBINER_MAPPING_NV 0x8543 +#define GL_COMBINER_COMPONENT_USAGE_NV 0x8544 +#define GL_COMBINER_AB_DOT_PRODUCT_NV 0x8545 +#define GL_COMBINER_CD_DOT_PRODUCT_NV 0x8546 +#define GL_COMBINER_MUX_SUM_NV 0x8547 +#define GL_COMBINER_SCALE_NV 0x8548 +#define GL_COMBINER_BIAS_NV 0x8549 +#define GL_COMBINER_AB_OUTPUT_NV 0x854A +#define GL_COMBINER_CD_OUTPUT_NV 0x854B +#define GL_COMBINER_SUM_OUTPUT_NV 0x854C +#define GL_MAX_GENERAL_COMBINERS_NV 0x854D +#define GL_NUM_GENERAL_COMBINERS_NV 0x854E +#define GL_COLOR_SUM_CLAMP_NV 0x854F +#define GL_COMBINER0_NV 0x8550 +#define GL_COMBINER1_NV 0x8551 +#define GL_COMBINER2_NV 0x8552 +#define GL_COMBINER3_NV 0x8553 +#define GL_COMBINER4_NV 0x8554 +#define GL_COMBINER5_NV 0x8555 +#define GL_COMBINER6_NV 0x8556 +#define GL_COMBINER7_NV 0x8557 +typedef void (APIENTRYP PFNGLCOMBINERPARAMETERFVNVPROC) (GLenum pname, const GLfloat *params); +typedef void (APIENTRYP PFNGLCOMBINERPARAMETERFNVPROC) (GLenum pname, GLfloat param); +typedef void (APIENTRYP PFNGLCOMBINERPARAMETERIVNVPROC) (GLenum pname, const GLint *params); +typedef void (APIENTRYP PFNGLCOMBINERPARAMETERINVPROC) (GLenum pname, GLint param); +typedef void (APIENTRYP PFNGLCOMBINERINPUTNVPROC) (GLenum stage, GLenum portion, GLenum variable, GLenum input, GLenum mapping, GLenum componentUsage); +typedef void (APIENTRYP PFNGLCOMBINEROUTPUTNVPROC) (GLenum stage, GLenum portion, GLenum abOutput, GLenum cdOutput, GLenum sumOutput, GLenum scale, GLenum bias, GLboolean abDotProduct, GLboolean cdDotProduct, GLboolean muxSum); +typedef void (APIENTRYP PFNGLFINALCOMBINERINPUTNVPROC) (GLenum variable, GLenum input, GLenum mapping, GLenum componentUsage); +typedef void (APIENTRYP PFNGLGETCOMBINERINPUTPARAMETERFVNVPROC) (GLenum stage, GLenum portion, GLenum variable, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETCOMBINERINPUTPARAMETERIVNVPROC) (GLenum stage, GLenum portion, GLenum variable, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETCOMBINEROUTPUTPARAMETERFVNVPROC) (GLenum stage, GLenum portion, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETCOMBINEROUTPUTPARAMETERIVNVPROC) (GLenum stage, GLenum portion, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETFINALCOMBINERINPUTPARAMETERFVNVPROC) (GLenum variable, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETFINALCOMBINERINPUTPARAMETERIVNVPROC) (GLenum variable, GLenum pname, GLint *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glCombinerParameterfvNV (GLenum pname, const GLfloat *params); +GLAPI void APIENTRY glCombinerParameterfNV (GLenum pname, GLfloat param); +GLAPI void APIENTRY glCombinerParameterivNV (GLenum pname, const GLint *params); +GLAPI void APIENTRY glCombinerParameteriNV (GLenum pname, GLint param); +GLAPI void APIENTRY glCombinerInputNV (GLenum stage, GLenum portion, GLenum variable, GLenum input, GLenum mapping, GLenum componentUsage); +GLAPI void APIENTRY glCombinerOutputNV (GLenum stage, GLenum portion, GLenum abOutput, GLenum cdOutput, GLenum sumOutput, GLenum scale, GLenum bias, GLboolean abDotProduct, GLboolean cdDotProduct, GLboolean muxSum); +GLAPI void APIENTRY glFinalCombinerInputNV (GLenum variable, GLenum input, GLenum mapping, GLenum componentUsage); +GLAPI void APIENTRY glGetCombinerInputParameterfvNV (GLenum stage, GLenum portion, GLenum variable, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetCombinerInputParameterivNV (GLenum stage, GLenum portion, GLenum variable, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetCombinerOutputParameterfvNV (GLenum stage, GLenum portion, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetCombinerOutputParameterivNV (GLenum stage, GLenum portion, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetFinalCombinerInputParameterfvNV (GLenum variable, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetFinalCombinerInputParameterivNV (GLenum variable, GLenum pname, GLint *params); +#endif +#endif /* GL_NV_register_combiners */ + +#ifndef GL_NV_register_combiners2 +#define GL_NV_register_combiners2 1 +#define GL_PER_STAGE_CONSTANTS_NV 0x8535 +typedef void (APIENTRYP PFNGLCOMBINERSTAGEPARAMETERFVNVPROC) (GLenum stage, GLenum pname, const GLfloat *params); +typedef void (APIENTRYP PFNGLGETCOMBINERSTAGEPARAMETERFVNVPROC) (GLenum stage, GLenum pname, GLfloat *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glCombinerStageParameterfvNV (GLenum stage, GLenum pname, const GLfloat *params); +GLAPI void APIENTRY glGetCombinerStageParameterfvNV (GLenum stage, GLenum pname, GLfloat *params); +#endif +#endif /* GL_NV_register_combiners2 */ + +#ifndef GL_NV_robustness_video_memory_purge +#define GL_NV_robustness_video_memory_purge 1 +#define GL_PURGED_CONTEXT_RESET_NV 0x92BB +#endif /* GL_NV_robustness_video_memory_purge */ + +#ifndef GL_NV_sample_locations +#define GL_NV_sample_locations 1 +#define GL_SAMPLE_LOCATION_SUBPIXEL_BITS_NV 0x933D +#define GL_SAMPLE_LOCATION_PIXEL_GRID_WIDTH_NV 0x933E +#define GL_SAMPLE_LOCATION_PIXEL_GRID_HEIGHT_NV 0x933F +#define GL_PROGRAMMABLE_SAMPLE_LOCATION_TABLE_SIZE_NV 0x9340 +#define GL_SAMPLE_LOCATION_NV 0x8E50 +#define GL_PROGRAMMABLE_SAMPLE_LOCATION_NV 0x9341 +#define GL_FRAMEBUFFER_PROGRAMMABLE_SAMPLE_LOCATIONS_NV 0x9342 +#define GL_FRAMEBUFFER_SAMPLE_LOCATION_PIXEL_GRID_NV 0x9343 +typedef void (APIENTRYP PFNGLFRAMEBUFFERSAMPLELOCATIONSFVNVPROC) (GLenum target, GLuint start, GLsizei count, const GLfloat *v); +typedef void (APIENTRYP PFNGLNAMEDFRAMEBUFFERSAMPLELOCATIONSFVNVPROC) (GLuint framebuffer, GLuint start, GLsizei count, const GLfloat *v); +typedef void (APIENTRYP PFNGLRESOLVEDEPTHVALUESNVPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glFramebufferSampleLocationsfvNV (GLenum target, GLuint start, GLsizei count, const GLfloat *v); +GLAPI void APIENTRY glNamedFramebufferSampleLocationsfvNV (GLuint framebuffer, GLuint start, GLsizei count, const GLfloat *v); +GLAPI void APIENTRY glResolveDepthValuesNV (void); +#endif +#endif /* GL_NV_sample_locations */ + +#ifndef GL_NV_sample_mask_override_coverage +#define GL_NV_sample_mask_override_coverage 1 +#endif /* GL_NV_sample_mask_override_coverage */ + +#ifndef GL_NV_shader_atomic_counters +#define GL_NV_shader_atomic_counters 1 +#endif /* GL_NV_shader_atomic_counters */ + +#ifndef GL_NV_shader_atomic_float +#define GL_NV_shader_atomic_float 1 +#endif /* GL_NV_shader_atomic_float */ + +#ifndef GL_NV_shader_atomic_float64 +#define GL_NV_shader_atomic_float64 1 +#endif /* GL_NV_shader_atomic_float64 */ + +#ifndef GL_NV_shader_atomic_fp16_vector +#define GL_NV_shader_atomic_fp16_vector 1 +#endif /* GL_NV_shader_atomic_fp16_vector */ + +#ifndef GL_NV_shader_atomic_int64 +#define GL_NV_shader_atomic_int64 1 +#endif /* GL_NV_shader_atomic_int64 */ + +#ifndef GL_NV_shader_buffer_load +#define GL_NV_shader_buffer_load 1 +#define GL_BUFFER_GPU_ADDRESS_NV 0x8F1D +#define GL_GPU_ADDRESS_NV 0x8F34 +#define GL_MAX_SHADER_BUFFER_ADDRESS_NV 0x8F35 +typedef void (APIENTRYP PFNGLMAKEBUFFERRESIDENTNVPROC) (GLenum target, GLenum access); +typedef void (APIENTRYP PFNGLMAKEBUFFERNONRESIDENTNVPROC) (GLenum target); +typedef GLboolean (APIENTRYP PFNGLISBUFFERRESIDENTNVPROC) (GLenum target); +typedef void (APIENTRYP PFNGLMAKENAMEDBUFFERRESIDENTNVPROC) (GLuint buffer, GLenum access); +typedef void (APIENTRYP PFNGLMAKENAMEDBUFFERNONRESIDENTNVPROC) (GLuint buffer); +typedef GLboolean (APIENTRYP PFNGLISNAMEDBUFFERRESIDENTNVPROC) (GLuint buffer); +typedef void (APIENTRYP PFNGLGETBUFFERPARAMETERUI64VNVPROC) (GLenum target, GLenum pname, GLuint64EXT *params); +typedef void (APIENTRYP PFNGLGETNAMEDBUFFERPARAMETERUI64VNVPROC) (GLuint buffer, GLenum pname, GLuint64EXT *params); +typedef void (APIENTRYP PFNGLGETINTEGERUI64VNVPROC) (GLenum value, GLuint64EXT *result); +typedef void (APIENTRYP PFNGLUNIFORMUI64NVPROC) (GLint location, GLuint64EXT value); +typedef void (APIENTRYP PFNGLUNIFORMUI64VNVPROC) (GLint location, GLsizei count, const GLuint64EXT *value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMUI64NVPROC) (GLuint program, GLint location, GLuint64EXT value); +typedef void (APIENTRYP PFNGLPROGRAMUNIFORMUI64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLuint64EXT *value); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glMakeBufferResidentNV (GLenum target, GLenum access); +GLAPI void APIENTRY glMakeBufferNonResidentNV (GLenum target); +GLAPI GLboolean APIENTRY glIsBufferResidentNV (GLenum target); +GLAPI void APIENTRY glMakeNamedBufferResidentNV (GLuint buffer, GLenum access); +GLAPI void APIENTRY glMakeNamedBufferNonResidentNV (GLuint buffer); +GLAPI GLboolean APIENTRY glIsNamedBufferResidentNV (GLuint buffer); +GLAPI void APIENTRY glGetBufferParameterui64vNV (GLenum target, GLenum pname, GLuint64EXT *params); +GLAPI void APIENTRY glGetNamedBufferParameterui64vNV (GLuint buffer, GLenum pname, GLuint64EXT *params); +GLAPI void APIENTRY glGetIntegerui64vNV (GLenum value, GLuint64EXT *result); +GLAPI void APIENTRY glUniformui64NV (GLint location, GLuint64EXT value); +GLAPI void APIENTRY glUniformui64vNV (GLint location, GLsizei count, const GLuint64EXT *value); +GLAPI void APIENTRY glProgramUniformui64NV (GLuint program, GLint location, GLuint64EXT value); +GLAPI void APIENTRY glProgramUniformui64vNV (GLuint program, GLint location, GLsizei count, const GLuint64EXT *value); +#endif +#endif /* GL_NV_shader_buffer_load */ + +#ifndef GL_NV_shader_buffer_store +#define GL_NV_shader_buffer_store 1 +#define GL_SHADER_GLOBAL_ACCESS_BARRIER_BIT_NV 0x00000010 +#endif /* GL_NV_shader_buffer_store */ + +#ifndef GL_NV_shader_storage_buffer_object +#define GL_NV_shader_storage_buffer_object 1 +#endif /* GL_NV_shader_storage_buffer_object */ + +#ifndef GL_NV_shader_thread_group +#define GL_NV_shader_thread_group 1 +#define GL_WARP_SIZE_NV 0x9339 +#define GL_WARPS_PER_SM_NV 0x933A +#define GL_SM_COUNT_NV 0x933B +#endif /* GL_NV_shader_thread_group */ + +#ifndef GL_NV_shader_thread_shuffle +#define GL_NV_shader_thread_shuffle 1 +#endif /* GL_NV_shader_thread_shuffle */ + +#ifndef GL_NV_stereo_view_rendering +#define GL_NV_stereo_view_rendering 1 +#endif /* GL_NV_stereo_view_rendering */ + +#ifndef GL_NV_tessellation_program5 +#define GL_NV_tessellation_program5 1 +#define GL_MAX_PROGRAM_PATCH_ATTRIBS_NV 0x86D8 +#define GL_TESS_CONTROL_PROGRAM_NV 0x891E +#define GL_TESS_EVALUATION_PROGRAM_NV 0x891F +#define GL_TESS_CONTROL_PROGRAM_PARAMETER_BUFFER_NV 0x8C74 +#define GL_TESS_EVALUATION_PROGRAM_PARAMETER_BUFFER_NV 0x8C75 +#endif /* GL_NV_tessellation_program5 */ + +#ifndef GL_NV_texgen_emboss +#define GL_NV_texgen_emboss 1 +#define GL_EMBOSS_LIGHT_NV 0x855D +#define GL_EMBOSS_CONSTANT_NV 0x855E +#define GL_EMBOSS_MAP_NV 0x855F +#endif /* GL_NV_texgen_emboss */ + +#ifndef GL_NV_texgen_reflection +#define GL_NV_texgen_reflection 1 +#define GL_NORMAL_MAP_NV 0x8511 +#define GL_REFLECTION_MAP_NV 0x8512 +#endif /* GL_NV_texgen_reflection */ + +#ifndef GL_NV_texture_barrier +#define GL_NV_texture_barrier 1 +typedef void (APIENTRYP PFNGLTEXTUREBARRIERNVPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glTextureBarrierNV (void); +#endif +#endif /* GL_NV_texture_barrier */ + +#ifndef GL_NV_texture_compression_vtc +#define GL_NV_texture_compression_vtc 1 +#endif /* GL_NV_texture_compression_vtc */ + +#ifndef GL_NV_texture_env_combine4 +#define GL_NV_texture_env_combine4 1 +#define GL_COMBINE4_NV 0x8503 +#define GL_SOURCE3_RGB_NV 0x8583 +#define GL_SOURCE3_ALPHA_NV 0x858B +#define GL_OPERAND3_RGB_NV 0x8593 +#define GL_OPERAND3_ALPHA_NV 0x859B +#endif /* GL_NV_texture_env_combine4 */ + +#ifndef GL_NV_texture_expand_normal +#define GL_NV_texture_expand_normal 1 +#define GL_TEXTURE_UNSIGNED_REMAP_MODE_NV 0x888F +#endif /* GL_NV_texture_expand_normal */ + +#ifndef GL_NV_texture_multisample +#define GL_NV_texture_multisample 1 +#define GL_TEXTURE_COVERAGE_SAMPLES_NV 0x9045 +#define GL_TEXTURE_COLOR_SAMPLES_NV 0x9046 +typedef void (APIENTRYP PFNGLTEXIMAGE2DMULTISAMPLECOVERAGENVPROC) (GLenum target, GLsizei coverageSamples, GLsizei colorSamples, GLint internalFormat, GLsizei width, GLsizei height, GLboolean fixedSampleLocations); +typedef void (APIENTRYP PFNGLTEXIMAGE3DMULTISAMPLECOVERAGENVPROC) (GLenum target, GLsizei coverageSamples, GLsizei colorSamples, GLint internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedSampleLocations); +typedef void (APIENTRYP PFNGLTEXTUREIMAGE2DMULTISAMPLENVPROC) (GLuint texture, GLenum target, GLsizei samples, GLint internalFormat, GLsizei width, GLsizei height, GLboolean fixedSampleLocations); +typedef void (APIENTRYP PFNGLTEXTUREIMAGE3DMULTISAMPLENVPROC) (GLuint texture, GLenum target, GLsizei samples, GLint internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedSampleLocations); +typedef void (APIENTRYP PFNGLTEXTUREIMAGE2DMULTISAMPLECOVERAGENVPROC) (GLuint texture, GLenum target, GLsizei coverageSamples, GLsizei colorSamples, GLint internalFormat, GLsizei width, GLsizei height, GLboolean fixedSampleLocations); +typedef void (APIENTRYP PFNGLTEXTUREIMAGE3DMULTISAMPLECOVERAGENVPROC) (GLuint texture, GLenum target, GLsizei coverageSamples, GLsizei colorSamples, GLint internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedSampleLocations); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glTexImage2DMultisampleCoverageNV (GLenum target, GLsizei coverageSamples, GLsizei colorSamples, GLint internalFormat, GLsizei width, GLsizei height, GLboolean fixedSampleLocations); +GLAPI void APIENTRY glTexImage3DMultisampleCoverageNV (GLenum target, GLsizei coverageSamples, GLsizei colorSamples, GLint internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedSampleLocations); +GLAPI void APIENTRY glTextureImage2DMultisampleNV (GLuint texture, GLenum target, GLsizei samples, GLint internalFormat, GLsizei width, GLsizei height, GLboolean fixedSampleLocations); +GLAPI void APIENTRY glTextureImage3DMultisampleNV (GLuint texture, GLenum target, GLsizei samples, GLint internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedSampleLocations); +GLAPI void APIENTRY glTextureImage2DMultisampleCoverageNV (GLuint texture, GLenum target, GLsizei coverageSamples, GLsizei colorSamples, GLint internalFormat, GLsizei width, GLsizei height, GLboolean fixedSampleLocations); +GLAPI void APIENTRY glTextureImage3DMultisampleCoverageNV (GLuint texture, GLenum target, GLsizei coverageSamples, GLsizei colorSamples, GLint internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedSampleLocations); +#endif +#endif /* GL_NV_texture_multisample */ + +#ifndef GL_NV_texture_rectangle +#define GL_NV_texture_rectangle 1 +#define GL_TEXTURE_RECTANGLE_NV 0x84F5 +#define GL_TEXTURE_BINDING_RECTANGLE_NV 0x84F6 +#define GL_PROXY_TEXTURE_RECTANGLE_NV 0x84F7 +#define GL_MAX_RECTANGLE_TEXTURE_SIZE_NV 0x84F8 +#endif /* GL_NV_texture_rectangle */ + +#ifndef GL_NV_texture_rectangle_compressed +#define GL_NV_texture_rectangle_compressed 1 +#endif /* GL_NV_texture_rectangle_compressed */ + +#ifndef GL_NV_texture_shader +#define GL_NV_texture_shader 1 +#define GL_OFFSET_TEXTURE_RECTANGLE_NV 0x864C +#define GL_OFFSET_TEXTURE_RECTANGLE_SCALE_NV 0x864D +#define GL_DOT_PRODUCT_TEXTURE_RECTANGLE_NV 0x864E +#define GL_RGBA_UNSIGNED_DOT_PRODUCT_MAPPING_NV 0x86D9 +#define GL_UNSIGNED_INT_S8_S8_8_8_NV 0x86DA +#define GL_UNSIGNED_INT_8_8_S8_S8_REV_NV 0x86DB +#define GL_DSDT_MAG_INTENSITY_NV 0x86DC +#define GL_SHADER_CONSISTENT_NV 0x86DD +#define GL_TEXTURE_SHADER_NV 0x86DE +#define GL_SHADER_OPERATION_NV 0x86DF +#define GL_CULL_MODES_NV 0x86E0 +#define GL_OFFSET_TEXTURE_MATRIX_NV 0x86E1 +#define GL_OFFSET_TEXTURE_SCALE_NV 0x86E2 +#define GL_OFFSET_TEXTURE_BIAS_NV 0x86E3 +#define GL_OFFSET_TEXTURE_2D_MATRIX_NV 0x86E1 +#define GL_OFFSET_TEXTURE_2D_SCALE_NV 0x86E2 +#define GL_OFFSET_TEXTURE_2D_BIAS_NV 0x86E3 +#define GL_PREVIOUS_TEXTURE_INPUT_NV 0x86E4 +#define GL_CONST_EYE_NV 0x86E5 +#define GL_PASS_THROUGH_NV 0x86E6 +#define GL_CULL_FRAGMENT_NV 0x86E7 +#define GL_OFFSET_TEXTURE_2D_NV 0x86E8 +#define GL_DEPENDENT_AR_TEXTURE_2D_NV 0x86E9 +#define GL_DEPENDENT_GB_TEXTURE_2D_NV 0x86EA +#define GL_DOT_PRODUCT_NV 0x86EC +#define GL_DOT_PRODUCT_DEPTH_REPLACE_NV 0x86ED +#define GL_DOT_PRODUCT_TEXTURE_2D_NV 0x86EE +#define GL_DOT_PRODUCT_TEXTURE_CUBE_MAP_NV 0x86F0 +#define GL_DOT_PRODUCT_DIFFUSE_CUBE_MAP_NV 0x86F1 +#define GL_DOT_PRODUCT_REFLECT_CUBE_MAP_NV 0x86F2 +#define GL_DOT_PRODUCT_CONST_EYE_REFLECT_CUBE_MAP_NV 0x86F3 +#define GL_HILO_NV 0x86F4 +#define GL_DSDT_NV 0x86F5 +#define GL_DSDT_MAG_NV 0x86F6 +#define GL_DSDT_MAG_VIB_NV 0x86F7 +#define GL_HILO16_NV 0x86F8 +#define GL_SIGNED_HILO_NV 0x86F9 +#define GL_SIGNED_HILO16_NV 0x86FA +#define GL_SIGNED_RGBA_NV 0x86FB +#define GL_SIGNED_RGBA8_NV 0x86FC +#define GL_SIGNED_RGB_NV 0x86FE +#define GL_SIGNED_RGB8_NV 0x86FF +#define GL_SIGNED_LUMINANCE_NV 0x8701 +#define GL_SIGNED_LUMINANCE8_NV 0x8702 +#define GL_SIGNED_LUMINANCE_ALPHA_NV 0x8703 +#define GL_SIGNED_LUMINANCE8_ALPHA8_NV 0x8704 +#define GL_SIGNED_ALPHA_NV 0x8705 +#define GL_SIGNED_ALPHA8_NV 0x8706 +#define GL_SIGNED_INTENSITY_NV 0x8707 +#define GL_SIGNED_INTENSITY8_NV 0x8708 +#define GL_DSDT8_NV 0x8709 +#define GL_DSDT8_MAG8_NV 0x870A +#define GL_DSDT8_MAG8_INTENSITY8_NV 0x870B +#define GL_SIGNED_RGB_UNSIGNED_ALPHA_NV 0x870C +#define GL_SIGNED_RGB8_UNSIGNED_ALPHA8_NV 0x870D +#define GL_HI_SCALE_NV 0x870E +#define GL_LO_SCALE_NV 0x870F +#define GL_DS_SCALE_NV 0x8710 +#define GL_DT_SCALE_NV 0x8711 +#define GL_MAGNITUDE_SCALE_NV 0x8712 +#define GL_VIBRANCE_SCALE_NV 0x8713 +#define GL_HI_BIAS_NV 0x8714 +#define GL_LO_BIAS_NV 0x8715 +#define GL_DS_BIAS_NV 0x8716 +#define GL_DT_BIAS_NV 0x8717 +#define GL_MAGNITUDE_BIAS_NV 0x8718 +#define GL_VIBRANCE_BIAS_NV 0x8719 +#define GL_TEXTURE_BORDER_VALUES_NV 0x871A +#define GL_TEXTURE_HI_SIZE_NV 0x871B +#define GL_TEXTURE_LO_SIZE_NV 0x871C +#define GL_TEXTURE_DS_SIZE_NV 0x871D +#define GL_TEXTURE_DT_SIZE_NV 0x871E +#define GL_TEXTURE_MAG_SIZE_NV 0x871F +#endif /* GL_NV_texture_shader */ + +#ifndef GL_NV_texture_shader2 +#define GL_NV_texture_shader2 1 +#define GL_DOT_PRODUCT_TEXTURE_3D_NV 0x86EF +#endif /* GL_NV_texture_shader2 */ + +#ifndef GL_NV_texture_shader3 +#define GL_NV_texture_shader3 1 +#define GL_OFFSET_PROJECTIVE_TEXTURE_2D_NV 0x8850 +#define GL_OFFSET_PROJECTIVE_TEXTURE_2D_SCALE_NV 0x8851 +#define GL_OFFSET_PROJECTIVE_TEXTURE_RECTANGLE_NV 0x8852 +#define GL_OFFSET_PROJECTIVE_TEXTURE_RECTANGLE_SCALE_NV 0x8853 +#define GL_OFFSET_HILO_TEXTURE_2D_NV 0x8854 +#define GL_OFFSET_HILO_TEXTURE_RECTANGLE_NV 0x8855 +#define GL_OFFSET_HILO_PROJECTIVE_TEXTURE_2D_NV 0x8856 +#define GL_OFFSET_HILO_PROJECTIVE_TEXTURE_RECTANGLE_NV 0x8857 +#define GL_DEPENDENT_HILO_TEXTURE_2D_NV 0x8858 +#define GL_DEPENDENT_RGB_TEXTURE_3D_NV 0x8859 +#define GL_DEPENDENT_RGB_TEXTURE_CUBE_MAP_NV 0x885A +#define GL_DOT_PRODUCT_PASS_THROUGH_NV 0x885B +#define GL_DOT_PRODUCT_TEXTURE_1D_NV 0x885C +#define GL_DOT_PRODUCT_AFFINE_DEPTH_REPLACE_NV 0x885D +#define GL_HILO8_NV 0x885E +#define GL_SIGNED_HILO8_NV 0x885F +#define GL_FORCE_BLUE_TO_ONE_NV 0x8860 +#endif /* GL_NV_texture_shader3 */ + +#ifndef GL_NV_transform_feedback +#define GL_NV_transform_feedback 1 +#define GL_BACK_PRIMARY_COLOR_NV 0x8C77 +#define GL_BACK_SECONDARY_COLOR_NV 0x8C78 +#define GL_TEXTURE_COORD_NV 0x8C79 +#define GL_CLIP_DISTANCE_NV 0x8C7A +#define GL_VERTEX_ID_NV 0x8C7B +#define GL_PRIMITIVE_ID_NV 0x8C7C +#define GL_GENERIC_ATTRIB_NV 0x8C7D +#define GL_TRANSFORM_FEEDBACK_ATTRIBS_NV 0x8C7E +#define GL_TRANSFORM_FEEDBACK_BUFFER_MODE_NV 0x8C7F +#define GL_MAX_TRANSFORM_FEEDBACK_SEPARATE_COMPONENTS_NV 0x8C80 +#define GL_ACTIVE_VARYINGS_NV 0x8C81 +#define GL_ACTIVE_VARYING_MAX_LENGTH_NV 0x8C82 +#define GL_TRANSFORM_FEEDBACK_VARYINGS_NV 0x8C83 +#define GL_TRANSFORM_FEEDBACK_BUFFER_START_NV 0x8C84 +#define GL_TRANSFORM_FEEDBACK_BUFFER_SIZE_NV 0x8C85 +#define GL_TRANSFORM_FEEDBACK_RECORD_NV 0x8C86 +#define GL_PRIMITIVES_GENERATED_NV 0x8C87 +#define GL_TRANSFORM_FEEDBACK_PRIMITIVES_WRITTEN_NV 0x8C88 +#define GL_RASTERIZER_DISCARD_NV 0x8C89 +#define GL_MAX_TRANSFORM_FEEDBACK_INTERLEAVED_COMPONENTS_NV 0x8C8A +#define GL_MAX_TRANSFORM_FEEDBACK_SEPARATE_ATTRIBS_NV 0x8C8B +#define GL_INTERLEAVED_ATTRIBS_NV 0x8C8C +#define GL_SEPARATE_ATTRIBS_NV 0x8C8D +#define GL_TRANSFORM_FEEDBACK_BUFFER_NV 0x8C8E +#define GL_TRANSFORM_FEEDBACK_BUFFER_BINDING_NV 0x8C8F +#define GL_LAYER_NV 0x8DAA +#define GL_NEXT_BUFFER_NV -2 +#define GL_SKIP_COMPONENTS4_NV -3 +#define GL_SKIP_COMPONENTS3_NV -4 +#define GL_SKIP_COMPONENTS2_NV -5 +#define GL_SKIP_COMPONENTS1_NV -6 +typedef void (APIENTRYP PFNGLBEGINTRANSFORMFEEDBACKNVPROC) (GLenum primitiveMode); +typedef void (APIENTRYP PFNGLENDTRANSFORMFEEDBACKNVPROC) (void); +typedef void (APIENTRYP PFNGLTRANSFORMFEEDBACKATTRIBSNVPROC) (GLsizei count, const GLint *attribs, GLenum bufferMode); +typedef void (APIENTRYP PFNGLBINDBUFFERRANGENVPROC) (GLenum target, GLuint index, GLuint buffer, GLintptr offset, GLsizeiptr size); +typedef void (APIENTRYP PFNGLBINDBUFFEROFFSETNVPROC) (GLenum target, GLuint index, GLuint buffer, GLintptr offset); +typedef void (APIENTRYP PFNGLBINDBUFFERBASENVPROC) (GLenum target, GLuint index, GLuint buffer); +typedef void (APIENTRYP PFNGLTRANSFORMFEEDBACKVARYINGSNVPROC) (GLuint program, GLsizei count, const GLint *locations, GLenum bufferMode); +typedef void (APIENTRYP PFNGLACTIVEVARYINGNVPROC) (GLuint program, const GLchar *name); +typedef GLint (APIENTRYP PFNGLGETVARYINGLOCATIONNVPROC) (GLuint program, const GLchar *name); +typedef void (APIENTRYP PFNGLGETACTIVEVARYINGNVPROC) (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLsizei *size, GLenum *type, GLchar *name); +typedef void (APIENTRYP PFNGLGETTRANSFORMFEEDBACKVARYINGNVPROC) (GLuint program, GLuint index, GLint *location); +typedef void (APIENTRYP PFNGLTRANSFORMFEEDBACKSTREAMATTRIBSNVPROC) (GLsizei count, const GLint *attribs, GLsizei nbuffers, const GLint *bufstreams, GLenum bufferMode); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBeginTransformFeedbackNV (GLenum primitiveMode); +GLAPI void APIENTRY glEndTransformFeedbackNV (void); +GLAPI void APIENTRY glTransformFeedbackAttribsNV (GLsizei count, const GLint *attribs, GLenum bufferMode); +GLAPI void APIENTRY glBindBufferRangeNV (GLenum target, GLuint index, GLuint buffer, GLintptr offset, GLsizeiptr size); +GLAPI void APIENTRY glBindBufferOffsetNV (GLenum target, GLuint index, GLuint buffer, GLintptr offset); +GLAPI void APIENTRY glBindBufferBaseNV (GLenum target, GLuint index, GLuint buffer); +GLAPI void APIENTRY glTransformFeedbackVaryingsNV (GLuint program, GLsizei count, const GLint *locations, GLenum bufferMode); +GLAPI void APIENTRY glActiveVaryingNV (GLuint program, const GLchar *name); +GLAPI GLint APIENTRY glGetVaryingLocationNV (GLuint program, const GLchar *name); +GLAPI void APIENTRY glGetActiveVaryingNV (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLsizei *size, GLenum *type, GLchar *name); +GLAPI void APIENTRY glGetTransformFeedbackVaryingNV (GLuint program, GLuint index, GLint *location); +GLAPI void APIENTRY glTransformFeedbackStreamAttribsNV (GLsizei count, const GLint *attribs, GLsizei nbuffers, const GLint *bufstreams, GLenum bufferMode); +#endif +#endif /* GL_NV_transform_feedback */ + +#ifndef GL_NV_transform_feedback2 +#define GL_NV_transform_feedback2 1 +#define GL_TRANSFORM_FEEDBACK_NV 0x8E22 +#define GL_TRANSFORM_FEEDBACK_BUFFER_PAUSED_NV 0x8E23 +#define GL_TRANSFORM_FEEDBACK_BUFFER_ACTIVE_NV 0x8E24 +#define GL_TRANSFORM_FEEDBACK_BINDING_NV 0x8E25 +typedef void (APIENTRYP PFNGLBINDTRANSFORMFEEDBACKNVPROC) (GLenum target, GLuint id); +typedef void (APIENTRYP PFNGLDELETETRANSFORMFEEDBACKSNVPROC) (GLsizei n, const GLuint *ids); +typedef void (APIENTRYP PFNGLGENTRANSFORMFEEDBACKSNVPROC) (GLsizei n, GLuint *ids); +typedef GLboolean (APIENTRYP PFNGLISTRANSFORMFEEDBACKNVPROC) (GLuint id); +typedef void (APIENTRYP PFNGLPAUSETRANSFORMFEEDBACKNVPROC) (void); +typedef void (APIENTRYP PFNGLRESUMETRANSFORMFEEDBACKNVPROC) (void); +typedef void (APIENTRYP PFNGLDRAWTRANSFORMFEEDBACKNVPROC) (GLenum mode, GLuint id); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBindTransformFeedbackNV (GLenum target, GLuint id); +GLAPI void APIENTRY glDeleteTransformFeedbacksNV (GLsizei n, const GLuint *ids); +GLAPI void APIENTRY glGenTransformFeedbacksNV (GLsizei n, GLuint *ids); +GLAPI GLboolean APIENTRY glIsTransformFeedbackNV (GLuint id); +GLAPI void APIENTRY glPauseTransformFeedbackNV (void); +GLAPI void APIENTRY glResumeTransformFeedbackNV (void); +GLAPI void APIENTRY glDrawTransformFeedbackNV (GLenum mode, GLuint id); +#endif +#endif /* GL_NV_transform_feedback2 */ + +#ifndef GL_NV_uniform_buffer_unified_memory +#define GL_NV_uniform_buffer_unified_memory 1 +#define GL_UNIFORM_BUFFER_UNIFIED_NV 0x936E +#define GL_UNIFORM_BUFFER_ADDRESS_NV 0x936F +#define GL_UNIFORM_BUFFER_LENGTH_NV 0x9370 +#endif /* GL_NV_uniform_buffer_unified_memory */ + +#ifndef GL_NV_vdpau_interop +#define GL_NV_vdpau_interop 1 +typedef GLintptr GLvdpauSurfaceNV; +#define GL_SURFACE_STATE_NV 0x86EB +#define GL_SURFACE_REGISTERED_NV 0x86FD +#define GL_SURFACE_MAPPED_NV 0x8700 +#define GL_WRITE_DISCARD_NV 0x88BE +typedef void (APIENTRYP PFNGLVDPAUINITNVPROC) (const void *vdpDevice, const void *getProcAddress); +typedef void (APIENTRYP PFNGLVDPAUFININVPROC) (void); +typedef GLvdpauSurfaceNV (APIENTRYP PFNGLVDPAUREGISTERVIDEOSURFACENVPROC) (const void *vdpSurface, GLenum target, GLsizei numTextureNames, const GLuint *textureNames); +typedef GLvdpauSurfaceNV (APIENTRYP PFNGLVDPAUREGISTEROUTPUTSURFACENVPROC) (const void *vdpSurface, GLenum target, GLsizei numTextureNames, const GLuint *textureNames); +typedef GLboolean (APIENTRYP PFNGLVDPAUISSURFACENVPROC) (GLvdpauSurfaceNV surface); +typedef void (APIENTRYP PFNGLVDPAUUNREGISTERSURFACENVPROC) (GLvdpauSurfaceNV surface); +typedef void (APIENTRYP PFNGLVDPAUGETSURFACEIVNVPROC) (GLvdpauSurfaceNV surface, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *values); +typedef void (APIENTRYP PFNGLVDPAUSURFACEACCESSNVPROC) (GLvdpauSurfaceNV surface, GLenum access); +typedef void (APIENTRYP PFNGLVDPAUMAPSURFACESNVPROC) (GLsizei numSurfaces, const GLvdpauSurfaceNV *surfaces); +typedef void (APIENTRYP PFNGLVDPAUUNMAPSURFACESNVPROC) (GLsizei numSurface, const GLvdpauSurfaceNV *surfaces); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glVDPAUInitNV (const void *vdpDevice, const void *getProcAddress); +GLAPI void APIENTRY glVDPAUFiniNV (void); +GLAPI GLvdpauSurfaceNV APIENTRY glVDPAURegisterVideoSurfaceNV (const void *vdpSurface, GLenum target, GLsizei numTextureNames, const GLuint *textureNames); +GLAPI GLvdpauSurfaceNV APIENTRY glVDPAURegisterOutputSurfaceNV (const void *vdpSurface, GLenum target, GLsizei numTextureNames, const GLuint *textureNames); +GLAPI GLboolean APIENTRY glVDPAUIsSurfaceNV (GLvdpauSurfaceNV surface); +GLAPI void APIENTRY glVDPAUUnregisterSurfaceNV (GLvdpauSurfaceNV surface); +GLAPI void APIENTRY glVDPAUGetSurfaceivNV (GLvdpauSurfaceNV surface, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *values); +GLAPI void APIENTRY glVDPAUSurfaceAccessNV (GLvdpauSurfaceNV surface, GLenum access); +GLAPI void APIENTRY glVDPAUMapSurfacesNV (GLsizei numSurfaces, const GLvdpauSurfaceNV *surfaces); +GLAPI void APIENTRY glVDPAUUnmapSurfacesNV (GLsizei numSurface, const GLvdpauSurfaceNV *surfaces); +#endif +#endif /* GL_NV_vdpau_interop */ + +#ifndef GL_NV_vertex_array_range +#define GL_NV_vertex_array_range 1 +#define GL_VERTEX_ARRAY_RANGE_NV 0x851D +#define GL_VERTEX_ARRAY_RANGE_LENGTH_NV 0x851E +#define GL_VERTEX_ARRAY_RANGE_VALID_NV 0x851F +#define GL_MAX_VERTEX_ARRAY_RANGE_ELEMENT_NV 0x8520 +#define GL_VERTEX_ARRAY_RANGE_POINTER_NV 0x8521 +typedef void (APIENTRYP PFNGLFLUSHVERTEXARRAYRANGENVPROC) (void); +typedef void (APIENTRYP PFNGLVERTEXARRAYRANGENVPROC) (GLsizei length, const void *pointer); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glFlushVertexArrayRangeNV (void); +GLAPI void APIENTRY glVertexArrayRangeNV (GLsizei length, const void *pointer); +#endif +#endif /* GL_NV_vertex_array_range */ + +#ifndef GL_NV_vertex_array_range2 +#define GL_NV_vertex_array_range2 1 +#define GL_VERTEX_ARRAY_RANGE_WITHOUT_FLUSH_NV 0x8533 +#endif /* GL_NV_vertex_array_range2 */ + +#ifndef GL_NV_vertex_attrib_integer_64bit +#define GL_NV_vertex_attrib_integer_64bit 1 +typedef void (APIENTRYP PFNGLVERTEXATTRIBL1I64NVPROC) (GLuint index, GLint64EXT x); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL2I64NVPROC) (GLuint index, GLint64EXT x, GLint64EXT y); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL3I64NVPROC) (GLuint index, GLint64EXT x, GLint64EXT y, GLint64EXT z); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL4I64NVPROC) (GLuint index, GLint64EXT x, GLint64EXT y, GLint64EXT z, GLint64EXT w); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL1I64VNVPROC) (GLuint index, const GLint64EXT *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL2I64VNVPROC) (GLuint index, const GLint64EXT *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL3I64VNVPROC) (GLuint index, const GLint64EXT *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL4I64VNVPROC) (GLuint index, const GLint64EXT *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL1UI64NVPROC) (GLuint index, GLuint64EXT x); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL2UI64NVPROC) (GLuint index, GLuint64EXT x, GLuint64EXT y); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL3UI64NVPROC) (GLuint index, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL4UI64NVPROC) (GLuint index, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z, GLuint64EXT w); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL1UI64VNVPROC) (GLuint index, const GLuint64EXT *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL2UI64VNVPROC) (GLuint index, const GLuint64EXT *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL3UI64VNVPROC) (GLuint index, const GLuint64EXT *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBL4UI64VNVPROC) (GLuint index, const GLuint64EXT *v); +typedef void (APIENTRYP PFNGLGETVERTEXATTRIBLI64VNVPROC) (GLuint index, GLenum pname, GLint64EXT *params); +typedef void (APIENTRYP PFNGLGETVERTEXATTRIBLUI64VNVPROC) (GLuint index, GLenum pname, GLuint64EXT *params); +typedef void (APIENTRYP PFNGLVERTEXATTRIBLFORMATNVPROC) (GLuint index, GLint size, GLenum type, GLsizei stride); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glVertexAttribL1i64NV (GLuint index, GLint64EXT x); +GLAPI void APIENTRY glVertexAttribL2i64NV (GLuint index, GLint64EXT x, GLint64EXT y); +GLAPI void APIENTRY glVertexAttribL3i64NV (GLuint index, GLint64EXT x, GLint64EXT y, GLint64EXT z); +GLAPI void APIENTRY glVertexAttribL4i64NV (GLuint index, GLint64EXT x, GLint64EXT y, GLint64EXT z, GLint64EXT w); +GLAPI void APIENTRY glVertexAttribL1i64vNV (GLuint index, const GLint64EXT *v); +GLAPI void APIENTRY glVertexAttribL2i64vNV (GLuint index, const GLint64EXT *v); +GLAPI void APIENTRY glVertexAttribL3i64vNV (GLuint index, const GLint64EXT *v); +GLAPI void APIENTRY glVertexAttribL4i64vNV (GLuint index, const GLint64EXT *v); +GLAPI void APIENTRY glVertexAttribL1ui64NV (GLuint index, GLuint64EXT x); +GLAPI void APIENTRY glVertexAttribL2ui64NV (GLuint index, GLuint64EXT x, GLuint64EXT y); +GLAPI void APIENTRY glVertexAttribL3ui64NV (GLuint index, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z); +GLAPI void APIENTRY glVertexAttribL4ui64NV (GLuint index, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z, GLuint64EXT w); +GLAPI void APIENTRY glVertexAttribL1ui64vNV (GLuint index, const GLuint64EXT *v); +GLAPI void APIENTRY glVertexAttribL2ui64vNV (GLuint index, const GLuint64EXT *v); +GLAPI void APIENTRY glVertexAttribL3ui64vNV (GLuint index, const GLuint64EXT *v); +GLAPI void APIENTRY glVertexAttribL4ui64vNV (GLuint index, const GLuint64EXT *v); +GLAPI void APIENTRY glGetVertexAttribLi64vNV (GLuint index, GLenum pname, GLint64EXT *params); +GLAPI void APIENTRY glGetVertexAttribLui64vNV (GLuint index, GLenum pname, GLuint64EXT *params); +GLAPI void APIENTRY glVertexAttribLFormatNV (GLuint index, GLint size, GLenum type, GLsizei stride); +#endif +#endif /* GL_NV_vertex_attrib_integer_64bit */ + +#ifndef GL_NV_vertex_buffer_unified_memory +#define GL_NV_vertex_buffer_unified_memory 1 +#define GL_VERTEX_ATTRIB_ARRAY_UNIFIED_NV 0x8F1E +#define GL_ELEMENT_ARRAY_UNIFIED_NV 0x8F1F +#define GL_VERTEX_ATTRIB_ARRAY_ADDRESS_NV 0x8F20 +#define GL_VERTEX_ARRAY_ADDRESS_NV 0x8F21 +#define GL_NORMAL_ARRAY_ADDRESS_NV 0x8F22 +#define GL_COLOR_ARRAY_ADDRESS_NV 0x8F23 +#define GL_INDEX_ARRAY_ADDRESS_NV 0x8F24 +#define GL_TEXTURE_COORD_ARRAY_ADDRESS_NV 0x8F25 +#define GL_EDGE_FLAG_ARRAY_ADDRESS_NV 0x8F26 +#define GL_SECONDARY_COLOR_ARRAY_ADDRESS_NV 0x8F27 +#define GL_FOG_COORD_ARRAY_ADDRESS_NV 0x8F28 +#define GL_ELEMENT_ARRAY_ADDRESS_NV 0x8F29 +#define GL_VERTEX_ATTRIB_ARRAY_LENGTH_NV 0x8F2A +#define GL_VERTEX_ARRAY_LENGTH_NV 0x8F2B +#define GL_NORMAL_ARRAY_LENGTH_NV 0x8F2C +#define GL_COLOR_ARRAY_LENGTH_NV 0x8F2D +#define GL_INDEX_ARRAY_LENGTH_NV 0x8F2E +#define GL_TEXTURE_COORD_ARRAY_LENGTH_NV 0x8F2F +#define GL_EDGE_FLAG_ARRAY_LENGTH_NV 0x8F30 +#define GL_SECONDARY_COLOR_ARRAY_LENGTH_NV 0x8F31 +#define GL_FOG_COORD_ARRAY_LENGTH_NV 0x8F32 +#define GL_ELEMENT_ARRAY_LENGTH_NV 0x8F33 +#define GL_DRAW_INDIRECT_UNIFIED_NV 0x8F40 +#define GL_DRAW_INDIRECT_ADDRESS_NV 0x8F41 +#define GL_DRAW_INDIRECT_LENGTH_NV 0x8F42 +typedef void (APIENTRYP PFNGLBUFFERADDRESSRANGENVPROC) (GLenum pname, GLuint index, GLuint64EXT address, GLsizeiptr length); +typedef void (APIENTRYP PFNGLVERTEXFORMATNVPROC) (GLint size, GLenum type, GLsizei stride); +typedef void (APIENTRYP PFNGLNORMALFORMATNVPROC) (GLenum type, GLsizei stride); +typedef void (APIENTRYP PFNGLCOLORFORMATNVPROC) (GLint size, GLenum type, GLsizei stride); +typedef void (APIENTRYP PFNGLINDEXFORMATNVPROC) (GLenum type, GLsizei stride); +typedef void (APIENTRYP PFNGLTEXCOORDFORMATNVPROC) (GLint size, GLenum type, GLsizei stride); +typedef void (APIENTRYP PFNGLEDGEFLAGFORMATNVPROC) (GLsizei stride); +typedef void (APIENTRYP PFNGLSECONDARYCOLORFORMATNVPROC) (GLint size, GLenum type, GLsizei stride); +typedef void (APIENTRYP PFNGLFOGCOORDFORMATNVPROC) (GLenum type, GLsizei stride); +typedef void (APIENTRYP PFNGLVERTEXATTRIBFORMATNVPROC) (GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride); +typedef void (APIENTRYP PFNGLVERTEXATTRIBIFORMATNVPROC) (GLuint index, GLint size, GLenum type, GLsizei stride); +typedef void (APIENTRYP PFNGLGETINTEGERUI64I_VNVPROC) (GLenum value, GLuint index, GLuint64EXT *result); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBufferAddressRangeNV (GLenum pname, GLuint index, GLuint64EXT address, GLsizeiptr length); +GLAPI void APIENTRY glVertexFormatNV (GLint size, GLenum type, GLsizei stride); +GLAPI void APIENTRY glNormalFormatNV (GLenum type, GLsizei stride); +GLAPI void APIENTRY glColorFormatNV (GLint size, GLenum type, GLsizei stride); +GLAPI void APIENTRY glIndexFormatNV (GLenum type, GLsizei stride); +GLAPI void APIENTRY glTexCoordFormatNV (GLint size, GLenum type, GLsizei stride); +GLAPI void APIENTRY glEdgeFlagFormatNV (GLsizei stride); +GLAPI void APIENTRY glSecondaryColorFormatNV (GLint size, GLenum type, GLsizei stride); +GLAPI void APIENTRY glFogCoordFormatNV (GLenum type, GLsizei stride); +GLAPI void APIENTRY glVertexAttribFormatNV (GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride); +GLAPI void APIENTRY glVertexAttribIFormatNV (GLuint index, GLint size, GLenum type, GLsizei stride); +GLAPI void APIENTRY glGetIntegerui64i_vNV (GLenum value, GLuint index, GLuint64EXT *result); +#endif +#endif /* GL_NV_vertex_buffer_unified_memory */ + +#ifndef GL_NV_vertex_program +#define GL_NV_vertex_program 1 +#define GL_VERTEX_PROGRAM_NV 0x8620 +#define GL_VERTEX_STATE_PROGRAM_NV 0x8621 +#define GL_ATTRIB_ARRAY_SIZE_NV 0x8623 +#define GL_ATTRIB_ARRAY_STRIDE_NV 0x8624 +#define GL_ATTRIB_ARRAY_TYPE_NV 0x8625 +#define GL_CURRENT_ATTRIB_NV 0x8626 +#define GL_PROGRAM_LENGTH_NV 0x8627 +#define GL_PROGRAM_STRING_NV 0x8628 +#define GL_MODELVIEW_PROJECTION_NV 0x8629 +#define GL_IDENTITY_NV 0x862A +#define GL_INVERSE_NV 0x862B +#define GL_TRANSPOSE_NV 0x862C +#define GL_INVERSE_TRANSPOSE_NV 0x862D +#define GL_MAX_TRACK_MATRIX_STACK_DEPTH_NV 0x862E +#define GL_MAX_TRACK_MATRICES_NV 0x862F +#define GL_MATRIX0_NV 0x8630 +#define GL_MATRIX1_NV 0x8631 +#define GL_MATRIX2_NV 0x8632 +#define GL_MATRIX3_NV 0x8633 +#define GL_MATRIX4_NV 0x8634 +#define GL_MATRIX5_NV 0x8635 +#define GL_MATRIX6_NV 0x8636 +#define GL_MATRIX7_NV 0x8637 +#define GL_CURRENT_MATRIX_STACK_DEPTH_NV 0x8640 +#define GL_CURRENT_MATRIX_NV 0x8641 +#define GL_VERTEX_PROGRAM_POINT_SIZE_NV 0x8642 +#define GL_VERTEX_PROGRAM_TWO_SIDE_NV 0x8643 +#define GL_PROGRAM_PARAMETER_NV 0x8644 +#define GL_ATTRIB_ARRAY_POINTER_NV 0x8645 +#define GL_PROGRAM_TARGET_NV 0x8646 +#define GL_PROGRAM_RESIDENT_NV 0x8647 +#define GL_TRACK_MATRIX_NV 0x8648 +#define GL_TRACK_MATRIX_TRANSFORM_NV 0x8649 +#define GL_VERTEX_PROGRAM_BINDING_NV 0x864A +#define GL_PROGRAM_ERROR_POSITION_NV 0x864B +#define GL_VERTEX_ATTRIB_ARRAY0_NV 0x8650 +#define GL_VERTEX_ATTRIB_ARRAY1_NV 0x8651 +#define GL_VERTEX_ATTRIB_ARRAY2_NV 0x8652 +#define GL_VERTEX_ATTRIB_ARRAY3_NV 0x8653 +#define GL_VERTEX_ATTRIB_ARRAY4_NV 0x8654 +#define GL_VERTEX_ATTRIB_ARRAY5_NV 0x8655 +#define GL_VERTEX_ATTRIB_ARRAY6_NV 0x8656 +#define GL_VERTEX_ATTRIB_ARRAY7_NV 0x8657 +#define GL_VERTEX_ATTRIB_ARRAY8_NV 0x8658 +#define GL_VERTEX_ATTRIB_ARRAY9_NV 0x8659 +#define GL_VERTEX_ATTRIB_ARRAY10_NV 0x865A +#define GL_VERTEX_ATTRIB_ARRAY11_NV 0x865B +#define GL_VERTEX_ATTRIB_ARRAY12_NV 0x865C +#define GL_VERTEX_ATTRIB_ARRAY13_NV 0x865D +#define GL_VERTEX_ATTRIB_ARRAY14_NV 0x865E +#define GL_VERTEX_ATTRIB_ARRAY15_NV 0x865F +#define GL_MAP1_VERTEX_ATTRIB0_4_NV 0x8660 +#define GL_MAP1_VERTEX_ATTRIB1_4_NV 0x8661 +#define GL_MAP1_VERTEX_ATTRIB2_4_NV 0x8662 +#define GL_MAP1_VERTEX_ATTRIB3_4_NV 0x8663 +#define GL_MAP1_VERTEX_ATTRIB4_4_NV 0x8664 +#define GL_MAP1_VERTEX_ATTRIB5_4_NV 0x8665 +#define GL_MAP1_VERTEX_ATTRIB6_4_NV 0x8666 +#define GL_MAP1_VERTEX_ATTRIB7_4_NV 0x8667 +#define GL_MAP1_VERTEX_ATTRIB8_4_NV 0x8668 +#define GL_MAP1_VERTEX_ATTRIB9_4_NV 0x8669 +#define GL_MAP1_VERTEX_ATTRIB10_4_NV 0x866A +#define GL_MAP1_VERTEX_ATTRIB11_4_NV 0x866B +#define GL_MAP1_VERTEX_ATTRIB12_4_NV 0x866C +#define GL_MAP1_VERTEX_ATTRIB13_4_NV 0x866D +#define GL_MAP1_VERTEX_ATTRIB14_4_NV 0x866E +#define GL_MAP1_VERTEX_ATTRIB15_4_NV 0x866F +#define GL_MAP2_VERTEX_ATTRIB0_4_NV 0x8670 +#define GL_MAP2_VERTEX_ATTRIB1_4_NV 0x8671 +#define GL_MAP2_VERTEX_ATTRIB2_4_NV 0x8672 +#define GL_MAP2_VERTEX_ATTRIB3_4_NV 0x8673 +#define GL_MAP2_VERTEX_ATTRIB4_4_NV 0x8674 +#define GL_MAP2_VERTEX_ATTRIB5_4_NV 0x8675 +#define GL_MAP2_VERTEX_ATTRIB6_4_NV 0x8676 +#define GL_MAP2_VERTEX_ATTRIB7_4_NV 0x8677 +#define GL_MAP2_VERTEX_ATTRIB8_4_NV 0x8678 +#define GL_MAP2_VERTEX_ATTRIB9_4_NV 0x8679 +#define GL_MAP2_VERTEX_ATTRIB10_4_NV 0x867A +#define GL_MAP2_VERTEX_ATTRIB11_4_NV 0x867B +#define GL_MAP2_VERTEX_ATTRIB12_4_NV 0x867C +#define GL_MAP2_VERTEX_ATTRIB13_4_NV 0x867D +#define GL_MAP2_VERTEX_ATTRIB14_4_NV 0x867E +#define GL_MAP2_VERTEX_ATTRIB15_4_NV 0x867F +typedef GLboolean (APIENTRYP PFNGLAREPROGRAMSRESIDENTNVPROC) (GLsizei n, const GLuint *programs, GLboolean *residences); +typedef void (APIENTRYP PFNGLBINDPROGRAMNVPROC) (GLenum target, GLuint id); +typedef void (APIENTRYP PFNGLDELETEPROGRAMSNVPROC) (GLsizei n, const GLuint *programs); +typedef void (APIENTRYP PFNGLEXECUTEPROGRAMNVPROC) (GLenum target, GLuint id, const GLfloat *params); +typedef void (APIENTRYP PFNGLGENPROGRAMSNVPROC) (GLsizei n, GLuint *programs); +typedef void (APIENTRYP PFNGLGETPROGRAMPARAMETERDVNVPROC) (GLenum target, GLuint index, GLenum pname, GLdouble *params); +typedef void (APIENTRYP PFNGLGETPROGRAMPARAMETERFVNVPROC) (GLenum target, GLuint index, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETPROGRAMIVNVPROC) (GLuint id, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETPROGRAMSTRINGNVPROC) (GLuint id, GLenum pname, GLubyte *program); +typedef void (APIENTRYP PFNGLGETTRACKMATRIXIVNVPROC) (GLenum target, GLuint address, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETVERTEXATTRIBDVNVPROC) (GLuint index, GLenum pname, GLdouble *params); +typedef void (APIENTRYP PFNGLGETVERTEXATTRIBFVNVPROC) (GLuint index, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETVERTEXATTRIBIVNVPROC) (GLuint index, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETVERTEXATTRIBPOINTERVNVPROC) (GLuint index, GLenum pname, void **pointer); +typedef GLboolean (APIENTRYP PFNGLISPROGRAMNVPROC) (GLuint id); +typedef void (APIENTRYP PFNGLLOADPROGRAMNVPROC) (GLenum target, GLuint id, GLsizei len, const GLubyte *program); +typedef void (APIENTRYP PFNGLPROGRAMPARAMETER4DNVPROC) (GLenum target, GLuint index, GLdouble x, GLdouble y, GLdouble z, GLdouble w); +typedef void (APIENTRYP PFNGLPROGRAMPARAMETER4DVNVPROC) (GLenum target, GLuint index, const GLdouble *v); +typedef void (APIENTRYP PFNGLPROGRAMPARAMETER4FNVPROC) (GLenum target, GLuint index, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +typedef void (APIENTRYP PFNGLPROGRAMPARAMETER4FVNVPROC) (GLenum target, GLuint index, const GLfloat *v); +typedef void (APIENTRYP PFNGLPROGRAMPARAMETERS4DVNVPROC) (GLenum target, GLuint index, GLsizei count, const GLdouble *v); +typedef void (APIENTRYP PFNGLPROGRAMPARAMETERS4FVNVPROC) (GLenum target, GLuint index, GLsizei count, const GLfloat *v); +typedef void (APIENTRYP PFNGLREQUESTRESIDENTPROGRAMSNVPROC) (GLsizei n, const GLuint *programs); +typedef void (APIENTRYP PFNGLTRACKMATRIXNVPROC) (GLenum target, GLuint address, GLenum matrix, GLenum transform); +typedef void (APIENTRYP PFNGLVERTEXATTRIBPOINTERNVPROC) (GLuint index, GLint fsize, GLenum type, GLsizei stride, const void *pointer); +typedef void (APIENTRYP PFNGLVERTEXATTRIB1DNVPROC) (GLuint index, GLdouble x); +typedef void (APIENTRYP PFNGLVERTEXATTRIB1DVNVPROC) (GLuint index, const GLdouble *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB1FNVPROC) (GLuint index, GLfloat x); +typedef void (APIENTRYP PFNGLVERTEXATTRIB1FVNVPROC) (GLuint index, const GLfloat *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB1SNVPROC) (GLuint index, GLshort x); +typedef void (APIENTRYP PFNGLVERTEXATTRIB1SVNVPROC) (GLuint index, const GLshort *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB2DNVPROC) (GLuint index, GLdouble x, GLdouble y); +typedef void (APIENTRYP PFNGLVERTEXATTRIB2DVNVPROC) (GLuint index, const GLdouble *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB2FNVPROC) (GLuint index, GLfloat x, GLfloat y); +typedef void (APIENTRYP PFNGLVERTEXATTRIB2FVNVPROC) (GLuint index, const GLfloat *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB2SNVPROC) (GLuint index, GLshort x, GLshort y); +typedef void (APIENTRYP PFNGLVERTEXATTRIB2SVNVPROC) (GLuint index, const GLshort *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB3DNVPROC) (GLuint index, GLdouble x, GLdouble y, GLdouble z); +typedef void (APIENTRYP PFNGLVERTEXATTRIB3DVNVPROC) (GLuint index, const GLdouble *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB3FNVPROC) (GLuint index, GLfloat x, GLfloat y, GLfloat z); +typedef void (APIENTRYP PFNGLVERTEXATTRIB3FVNVPROC) (GLuint index, const GLfloat *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB3SNVPROC) (GLuint index, GLshort x, GLshort y, GLshort z); +typedef void (APIENTRYP PFNGLVERTEXATTRIB3SVNVPROC) (GLuint index, const GLshort *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4DNVPROC) (GLuint index, GLdouble x, GLdouble y, GLdouble z, GLdouble w); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4DVNVPROC) (GLuint index, const GLdouble *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4FNVPROC) (GLuint index, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4FVNVPROC) (GLuint index, const GLfloat *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4SNVPROC) (GLuint index, GLshort x, GLshort y, GLshort z, GLshort w); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4SVNVPROC) (GLuint index, const GLshort *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4UBNVPROC) (GLuint index, GLubyte x, GLubyte y, GLubyte z, GLubyte w); +typedef void (APIENTRYP PFNGLVERTEXATTRIB4UBVNVPROC) (GLuint index, const GLubyte *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBS1DVNVPROC) (GLuint index, GLsizei count, const GLdouble *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBS1FVNVPROC) (GLuint index, GLsizei count, const GLfloat *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBS1SVNVPROC) (GLuint index, GLsizei count, const GLshort *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBS2DVNVPROC) (GLuint index, GLsizei count, const GLdouble *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBS2FVNVPROC) (GLuint index, GLsizei count, const GLfloat *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBS2SVNVPROC) (GLuint index, GLsizei count, const GLshort *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBS3DVNVPROC) (GLuint index, GLsizei count, const GLdouble *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBS3FVNVPROC) (GLuint index, GLsizei count, const GLfloat *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBS3SVNVPROC) (GLuint index, GLsizei count, const GLshort *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBS4DVNVPROC) (GLuint index, GLsizei count, const GLdouble *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBS4FVNVPROC) (GLuint index, GLsizei count, const GLfloat *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBS4SVNVPROC) (GLuint index, GLsizei count, const GLshort *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBS4UBVNVPROC) (GLuint index, GLsizei count, const GLubyte *v); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI GLboolean APIENTRY glAreProgramsResidentNV (GLsizei n, const GLuint *programs, GLboolean *residences); +GLAPI void APIENTRY glBindProgramNV (GLenum target, GLuint id); +GLAPI void APIENTRY glDeleteProgramsNV (GLsizei n, const GLuint *programs); +GLAPI void APIENTRY glExecuteProgramNV (GLenum target, GLuint id, const GLfloat *params); +GLAPI void APIENTRY glGenProgramsNV (GLsizei n, GLuint *programs); +GLAPI void APIENTRY glGetProgramParameterdvNV (GLenum target, GLuint index, GLenum pname, GLdouble *params); +GLAPI void APIENTRY glGetProgramParameterfvNV (GLenum target, GLuint index, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetProgramivNV (GLuint id, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetProgramStringNV (GLuint id, GLenum pname, GLubyte *program); +GLAPI void APIENTRY glGetTrackMatrixivNV (GLenum target, GLuint address, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetVertexAttribdvNV (GLuint index, GLenum pname, GLdouble *params); +GLAPI void APIENTRY glGetVertexAttribfvNV (GLuint index, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetVertexAttribivNV (GLuint index, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetVertexAttribPointervNV (GLuint index, GLenum pname, void **pointer); +GLAPI GLboolean APIENTRY glIsProgramNV (GLuint id); +GLAPI void APIENTRY glLoadProgramNV (GLenum target, GLuint id, GLsizei len, const GLubyte *program); +GLAPI void APIENTRY glProgramParameter4dNV (GLenum target, GLuint index, GLdouble x, GLdouble y, GLdouble z, GLdouble w); +GLAPI void APIENTRY glProgramParameter4dvNV (GLenum target, GLuint index, const GLdouble *v); +GLAPI void APIENTRY glProgramParameter4fNV (GLenum target, GLuint index, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +GLAPI void APIENTRY glProgramParameter4fvNV (GLenum target, GLuint index, const GLfloat *v); +GLAPI void APIENTRY glProgramParameters4dvNV (GLenum target, GLuint index, GLsizei count, const GLdouble *v); +GLAPI void APIENTRY glProgramParameters4fvNV (GLenum target, GLuint index, GLsizei count, const GLfloat *v); +GLAPI void APIENTRY glRequestResidentProgramsNV (GLsizei n, const GLuint *programs); +GLAPI void APIENTRY glTrackMatrixNV (GLenum target, GLuint address, GLenum matrix, GLenum transform); +GLAPI void APIENTRY glVertexAttribPointerNV (GLuint index, GLint fsize, GLenum type, GLsizei stride, const void *pointer); +GLAPI void APIENTRY glVertexAttrib1dNV (GLuint index, GLdouble x); +GLAPI void APIENTRY glVertexAttrib1dvNV (GLuint index, const GLdouble *v); +GLAPI void APIENTRY glVertexAttrib1fNV (GLuint index, GLfloat x); +GLAPI void APIENTRY glVertexAttrib1fvNV (GLuint index, const GLfloat *v); +GLAPI void APIENTRY glVertexAttrib1sNV (GLuint index, GLshort x); +GLAPI void APIENTRY glVertexAttrib1svNV (GLuint index, const GLshort *v); +GLAPI void APIENTRY glVertexAttrib2dNV (GLuint index, GLdouble x, GLdouble y); +GLAPI void APIENTRY glVertexAttrib2dvNV (GLuint index, const GLdouble *v); +GLAPI void APIENTRY glVertexAttrib2fNV (GLuint index, GLfloat x, GLfloat y); +GLAPI void APIENTRY glVertexAttrib2fvNV (GLuint index, const GLfloat *v); +GLAPI void APIENTRY glVertexAttrib2sNV (GLuint index, GLshort x, GLshort y); +GLAPI void APIENTRY glVertexAttrib2svNV (GLuint index, const GLshort *v); +GLAPI void APIENTRY glVertexAttrib3dNV (GLuint index, GLdouble x, GLdouble y, GLdouble z); +GLAPI void APIENTRY glVertexAttrib3dvNV (GLuint index, const GLdouble *v); +GLAPI void APIENTRY glVertexAttrib3fNV (GLuint index, GLfloat x, GLfloat y, GLfloat z); +GLAPI void APIENTRY glVertexAttrib3fvNV (GLuint index, const GLfloat *v); +GLAPI void APIENTRY glVertexAttrib3sNV (GLuint index, GLshort x, GLshort y, GLshort z); +GLAPI void APIENTRY glVertexAttrib3svNV (GLuint index, const GLshort *v); +GLAPI void APIENTRY glVertexAttrib4dNV (GLuint index, GLdouble x, GLdouble y, GLdouble z, GLdouble w); +GLAPI void APIENTRY glVertexAttrib4dvNV (GLuint index, const GLdouble *v); +GLAPI void APIENTRY glVertexAttrib4fNV (GLuint index, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +GLAPI void APIENTRY glVertexAttrib4fvNV (GLuint index, const GLfloat *v); +GLAPI void APIENTRY glVertexAttrib4sNV (GLuint index, GLshort x, GLshort y, GLshort z, GLshort w); +GLAPI void APIENTRY glVertexAttrib4svNV (GLuint index, const GLshort *v); +GLAPI void APIENTRY glVertexAttrib4ubNV (GLuint index, GLubyte x, GLubyte y, GLubyte z, GLubyte w); +GLAPI void APIENTRY glVertexAttrib4ubvNV (GLuint index, const GLubyte *v); +GLAPI void APIENTRY glVertexAttribs1dvNV (GLuint index, GLsizei count, const GLdouble *v); +GLAPI void APIENTRY glVertexAttribs1fvNV (GLuint index, GLsizei count, const GLfloat *v); +GLAPI void APIENTRY glVertexAttribs1svNV (GLuint index, GLsizei count, const GLshort *v); +GLAPI void APIENTRY glVertexAttribs2dvNV (GLuint index, GLsizei count, const GLdouble *v); +GLAPI void APIENTRY glVertexAttribs2fvNV (GLuint index, GLsizei count, const GLfloat *v); +GLAPI void APIENTRY glVertexAttribs2svNV (GLuint index, GLsizei count, const GLshort *v); +GLAPI void APIENTRY glVertexAttribs3dvNV (GLuint index, GLsizei count, const GLdouble *v); +GLAPI void APIENTRY glVertexAttribs3fvNV (GLuint index, GLsizei count, const GLfloat *v); +GLAPI void APIENTRY glVertexAttribs3svNV (GLuint index, GLsizei count, const GLshort *v); +GLAPI void APIENTRY glVertexAttribs4dvNV (GLuint index, GLsizei count, const GLdouble *v); +GLAPI void APIENTRY glVertexAttribs4fvNV (GLuint index, GLsizei count, const GLfloat *v); +GLAPI void APIENTRY glVertexAttribs4svNV (GLuint index, GLsizei count, const GLshort *v); +GLAPI void APIENTRY glVertexAttribs4ubvNV (GLuint index, GLsizei count, const GLubyte *v); +#endif +#endif /* GL_NV_vertex_program */ + +#ifndef GL_NV_vertex_program1_1 +#define GL_NV_vertex_program1_1 1 +#endif /* GL_NV_vertex_program1_1 */ + +#ifndef GL_NV_vertex_program2 +#define GL_NV_vertex_program2 1 +#endif /* GL_NV_vertex_program2 */ + +#ifndef GL_NV_vertex_program2_option +#define GL_NV_vertex_program2_option 1 +#endif /* GL_NV_vertex_program2_option */ + +#ifndef GL_NV_vertex_program3 +#define GL_NV_vertex_program3 1 +#endif /* GL_NV_vertex_program3 */ + +#ifndef GL_NV_vertex_program4 +#define GL_NV_vertex_program4 1 +#define GL_VERTEX_ATTRIB_ARRAY_INTEGER_NV 0x88FD +typedef void (APIENTRYP PFNGLVERTEXATTRIBI1IEXTPROC) (GLuint index, GLint x); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI2IEXTPROC) (GLuint index, GLint x, GLint y); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI3IEXTPROC) (GLuint index, GLint x, GLint y, GLint z); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI4IEXTPROC) (GLuint index, GLint x, GLint y, GLint z, GLint w); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI1UIEXTPROC) (GLuint index, GLuint x); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI2UIEXTPROC) (GLuint index, GLuint x, GLuint y); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI3UIEXTPROC) (GLuint index, GLuint x, GLuint y, GLuint z); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI4UIEXTPROC) (GLuint index, GLuint x, GLuint y, GLuint z, GLuint w); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI1IVEXTPROC) (GLuint index, const GLint *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI2IVEXTPROC) (GLuint index, const GLint *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI3IVEXTPROC) (GLuint index, const GLint *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI4IVEXTPROC) (GLuint index, const GLint *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI1UIVEXTPROC) (GLuint index, const GLuint *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI2UIVEXTPROC) (GLuint index, const GLuint *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI3UIVEXTPROC) (GLuint index, const GLuint *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI4UIVEXTPROC) (GLuint index, const GLuint *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI4BVEXTPROC) (GLuint index, const GLbyte *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI4SVEXTPROC) (GLuint index, const GLshort *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI4UBVEXTPROC) (GLuint index, const GLubyte *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBI4USVEXTPROC) (GLuint index, const GLushort *v); +typedef void (APIENTRYP PFNGLVERTEXATTRIBIPOINTEREXTPROC) (GLuint index, GLint size, GLenum type, GLsizei stride, const void *pointer); +typedef void (APIENTRYP PFNGLGETVERTEXATTRIBIIVEXTPROC) (GLuint index, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETVERTEXATTRIBIUIVEXTPROC) (GLuint index, GLenum pname, GLuint *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glVertexAttribI1iEXT (GLuint index, GLint x); +GLAPI void APIENTRY glVertexAttribI2iEXT (GLuint index, GLint x, GLint y); +GLAPI void APIENTRY glVertexAttribI3iEXT (GLuint index, GLint x, GLint y, GLint z); +GLAPI void APIENTRY glVertexAttribI4iEXT (GLuint index, GLint x, GLint y, GLint z, GLint w); +GLAPI void APIENTRY glVertexAttribI1uiEXT (GLuint index, GLuint x); +GLAPI void APIENTRY glVertexAttribI2uiEXT (GLuint index, GLuint x, GLuint y); +GLAPI void APIENTRY glVertexAttribI3uiEXT (GLuint index, GLuint x, GLuint y, GLuint z); +GLAPI void APIENTRY glVertexAttribI4uiEXT (GLuint index, GLuint x, GLuint y, GLuint z, GLuint w); +GLAPI void APIENTRY glVertexAttribI1ivEXT (GLuint index, const GLint *v); +GLAPI void APIENTRY glVertexAttribI2ivEXT (GLuint index, const GLint *v); +GLAPI void APIENTRY glVertexAttribI3ivEXT (GLuint index, const GLint *v); +GLAPI void APIENTRY glVertexAttribI4ivEXT (GLuint index, const GLint *v); +GLAPI void APIENTRY glVertexAttribI1uivEXT (GLuint index, const GLuint *v); +GLAPI void APIENTRY glVertexAttribI2uivEXT (GLuint index, const GLuint *v); +GLAPI void APIENTRY glVertexAttribI3uivEXT (GLuint index, const GLuint *v); +GLAPI void APIENTRY glVertexAttribI4uivEXT (GLuint index, const GLuint *v); +GLAPI void APIENTRY glVertexAttribI4bvEXT (GLuint index, const GLbyte *v); +GLAPI void APIENTRY glVertexAttribI4svEXT (GLuint index, const GLshort *v); +GLAPI void APIENTRY glVertexAttribI4ubvEXT (GLuint index, const GLubyte *v); +GLAPI void APIENTRY glVertexAttribI4usvEXT (GLuint index, const GLushort *v); +GLAPI void APIENTRY glVertexAttribIPointerEXT (GLuint index, GLint size, GLenum type, GLsizei stride, const void *pointer); +GLAPI void APIENTRY glGetVertexAttribIivEXT (GLuint index, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetVertexAttribIuivEXT (GLuint index, GLenum pname, GLuint *params); +#endif +#endif /* GL_NV_vertex_program4 */ + +#ifndef GL_NV_video_capture +#define GL_NV_video_capture 1 +#define GL_VIDEO_BUFFER_NV 0x9020 +#define GL_VIDEO_BUFFER_BINDING_NV 0x9021 +#define GL_FIELD_UPPER_NV 0x9022 +#define GL_FIELD_LOWER_NV 0x9023 +#define GL_NUM_VIDEO_CAPTURE_STREAMS_NV 0x9024 +#define GL_NEXT_VIDEO_CAPTURE_BUFFER_STATUS_NV 0x9025 +#define GL_VIDEO_CAPTURE_TO_422_SUPPORTED_NV 0x9026 +#define GL_LAST_VIDEO_CAPTURE_STATUS_NV 0x9027 +#define GL_VIDEO_BUFFER_PITCH_NV 0x9028 +#define GL_VIDEO_COLOR_CONVERSION_MATRIX_NV 0x9029 +#define GL_VIDEO_COLOR_CONVERSION_MAX_NV 0x902A +#define GL_VIDEO_COLOR_CONVERSION_MIN_NV 0x902B +#define GL_VIDEO_COLOR_CONVERSION_OFFSET_NV 0x902C +#define GL_VIDEO_BUFFER_INTERNAL_FORMAT_NV 0x902D +#define GL_PARTIAL_SUCCESS_NV 0x902E +#define GL_SUCCESS_NV 0x902F +#define GL_FAILURE_NV 0x9030 +#define GL_YCBYCR8_422_NV 0x9031 +#define GL_YCBAYCR8A_4224_NV 0x9032 +#define GL_Z6Y10Z6CB10Z6Y10Z6CR10_422_NV 0x9033 +#define GL_Z6Y10Z6CB10Z6A10Z6Y10Z6CR10Z6A10_4224_NV 0x9034 +#define GL_Z4Y12Z4CB12Z4Y12Z4CR12_422_NV 0x9035 +#define GL_Z4Y12Z4CB12Z4A12Z4Y12Z4CR12Z4A12_4224_NV 0x9036 +#define GL_Z4Y12Z4CB12Z4CR12_444_NV 0x9037 +#define GL_VIDEO_CAPTURE_FRAME_WIDTH_NV 0x9038 +#define GL_VIDEO_CAPTURE_FRAME_HEIGHT_NV 0x9039 +#define GL_VIDEO_CAPTURE_FIELD_UPPER_HEIGHT_NV 0x903A +#define GL_VIDEO_CAPTURE_FIELD_LOWER_HEIGHT_NV 0x903B +#define GL_VIDEO_CAPTURE_SURFACE_ORIGIN_NV 0x903C +typedef void (APIENTRYP PFNGLBEGINVIDEOCAPTURENVPROC) (GLuint video_capture_slot); +typedef void (APIENTRYP PFNGLBINDVIDEOCAPTURESTREAMBUFFERNVPROC) (GLuint video_capture_slot, GLuint stream, GLenum frame_region, GLintptrARB offset); +typedef void (APIENTRYP PFNGLBINDVIDEOCAPTURESTREAMTEXTURENVPROC) (GLuint video_capture_slot, GLuint stream, GLenum frame_region, GLenum target, GLuint texture); +typedef void (APIENTRYP PFNGLENDVIDEOCAPTURENVPROC) (GLuint video_capture_slot); +typedef void (APIENTRYP PFNGLGETVIDEOCAPTUREIVNVPROC) (GLuint video_capture_slot, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETVIDEOCAPTURESTREAMIVNVPROC) (GLuint video_capture_slot, GLuint stream, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETVIDEOCAPTURESTREAMFVNVPROC) (GLuint video_capture_slot, GLuint stream, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETVIDEOCAPTURESTREAMDVNVPROC) (GLuint video_capture_slot, GLuint stream, GLenum pname, GLdouble *params); +typedef GLenum (APIENTRYP PFNGLVIDEOCAPTURENVPROC) (GLuint video_capture_slot, GLuint *sequence_num, GLuint64EXT *capture_time); +typedef void (APIENTRYP PFNGLVIDEOCAPTURESTREAMPARAMETERIVNVPROC) (GLuint video_capture_slot, GLuint stream, GLenum pname, const GLint *params); +typedef void (APIENTRYP PFNGLVIDEOCAPTURESTREAMPARAMETERFVNVPROC) (GLuint video_capture_slot, GLuint stream, GLenum pname, const GLfloat *params); +typedef void (APIENTRYP PFNGLVIDEOCAPTURESTREAMPARAMETERDVNVPROC) (GLuint video_capture_slot, GLuint stream, GLenum pname, const GLdouble *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glBeginVideoCaptureNV (GLuint video_capture_slot); +GLAPI void APIENTRY glBindVideoCaptureStreamBufferNV (GLuint video_capture_slot, GLuint stream, GLenum frame_region, GLintptrARB offset); +GLAPI void APIENTRY glBindVideoCaptureStreamTextureNV (GLuint video_capture_slot, GLuint stream, GLenum frame_region, GLenum target, GLuint texture); +GLAPI void APIENTRY glEndVideoCaptureNV (GLuint video_capture_slot); +GLAPI void APIENTRY glGetVideoCaptureivNV (GLuint video_capture_slot, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetVideoCaptureStreamivNV (GLuint video_capture_slot, GLuint stream, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetVideoCaptureStreamfvNV (GLuint video_capture_slot, GLuint stream, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetVideoCaptureStreamdvNV (GLuint video_capture_slot, GLuint stream, GLenum pname, GLdouble *params); +GLAPI GLenum APIENTRY glVideoCaptureNV (GLuint video_capture_slot, GLuint *sequence_num, GLuint64EXT *capture_time); +GLAPI void APIENTRY glVideoCaptureStreamParameterivNV (GLuint video_capture_slot, GLuint stream, GLenum pname, const GLint *params); +GLAPI void APIENTRY glVideoCaptureStreamParameterfvNV (GLuint video_capture_slot, GLuint stream, GLenum pname, const GLfloat *params); +GLAPI void APIENTRY glVideoCaptureStreamParameterdvNV (GLuint video_capture_slot, GLuint stream, GLenum pname, const GLdouble *params); +#endif +#endif /* GL_NV_video_capture */ + +#ifndef GL_NV_viewport_array2 +#define GL_NV_viewport_array2 1 +#endif /* GL_NV_viewport_array2 */ + +#ifndef GL_NV_viewport_swizzle +#define GL_NV_viewport_swizzle 1 +#define GL_VIEWPORT_SWIZZLE_POSITIVE_X_NV 0x9350 +#define GL_VIEWPORT_SWIZZLE_NEGATIVE_X_NV 0x9351 +#define GL_VIEWPORT_SWIZZLE_POSITIVE_Y_NV 0x9352 +#define GL_VIEWPORT_SWIZZLE_NEGATIVE_Y_NV 0x9353 +#define GL_VIEWPORT_SWIZZLE_POSITIVE_Z_NV 0x9354 +#define GL_VIEWPORT_SWIZZLE_NEGATIVE_Z_NV 0x9355 +#define GL_VIEWPORT_SWIZZLE_POSITIVE_W_NV 0x9356 +#define GL_VIEWPORT_SWIZZLE_NEGATIVE_W_NV 0x9357 +#define GL_VIEWPORT_SWIZZLE_X_NV 0x9358 +#define GL_VIEWPORT_SWIZZLE_Y_NV 0x9359 +#define GL_VIEWPORT_SWIZZLE_Z_NV 0x935A +#define GL_VIEWPORT_SWIZZLE_W_NV 0x935B +typedef void (APIENTRYP PFNGLVIEWPORTSWIZZLENVPROC) (GLuint index, GLenum swizzlex, GLenum swizzley, GLenum swizzlez, GLenum swizzlew); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glViewportSwizzleNV (GLuint index, GLenum swizzlex, GLenum swizzley, GLenum swizzlez, GLenum swizzlew); +#endif +#endif /* GL_NV_viewport_swizzle */ + +#ifndef GL_OML_interlace +#define GL_OML_interlace 1 +#define GL_INTERLACE_OML 0x8980 +#define GL_INTERLACE_READ_OML 0x8981 +#endif /* GL_OML_interlace */ + +#ifndef GL_OML_resample +#define GL_OML_resample 1 +#define GL_PACK_RESAMPLE_OML 0x8984 +#define GL_UNPACK_RESAMPLE_OML 0x8985 +#define GL_RESAMPLE_REPLICATE_OML 0x8986 +#define GL_RESAMPLE_ZERO_FILL_OML 0x8987 +#define GL_RESAMPLE_AVERAGE_OML 0x8988 +#define GL_RESAMPLE_DECIMATE_OML 0x8989 +#endif /* GL_OML_resample */ + +#ifndef GL_OML_subsample +#define GL_OML_subsample 1 +#define GL_FORMAT_SUBSAMPLE_24_24_OML 0x8982 +#define GL_FORMAT_SUBSAMPLE_244_244_OML 0x8983 +#endif /* GL_OML_subsample */ + +#ifndef GL_OVR_multiview +#define GL_OVR_multiview 1 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_NUM_VIEWS_OVR 0x9630 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_BASE_VIEW_INDEX_OVR 0x9632 +#define GL_MAX_VIEWS_OVR 0x9631 +#define GL_FRAMEBUFFER_INCOMPLETE_VIEW_TARGETS_OVR 0x9633 +typedef void (APIENTRYP PFNGLFRAMEBUFFERTEXTUREMULTIVIEWOVRPROC) (GLenum target, GLenum attachment, GLuint texture, GLint level, GLint baseViewIndex, GLsizei numViews); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glFramebufferTextureMultiviewOVR (GLenum target, GLenum attachment, GLuint texture, GLint level, GLint baseViewIndex, GLsizei numViews); +#endif +#endif /* GL_OVR_multiview */ + +#ifndef GL_OVR_multiview2 +#define GL_OVR_multiview2 1 +#endif /* GL_OVR_multiview2 */ + +#ifndef GL_PGI_misc_hints +#define GL_PGI_misc_hints 1 +#define GL_PREFER_DOUBLEBUFFER_HINT_PGI 0x1A1F8 +#define GL_CONSERVE_MEMORY_HINT_PGI 0x1A1FD +#define GL_RECLAIM_MEMORY_HINT_PGI 0x1A1FE +#define GL_NATIVE_GRAPHICS_HANDLE_PGI 0x1A202 +#define GL_NATIVE_GRAPHICS_BEGIN_HINT_PGI 0x1A203 +#define GL_NATIVE_GRAPHICS_END_HINT_PGI 0x1A204 +#define GL_ALWAYS_FAST_HINT_PGI 0x1A20C +#define GL_ALWAYS_SOFT_HINT_PGI 0x1A20D +#define GL_ALLOW_DRAW_OBJ_HINT_PGI 0x1A20E +#define GL_ALLOW_DRAW_WIN_HINT_PGI 0x1A20F +#define GL_ALLOW_DRAW_FRG_HINT_PGI 0x1A210 +#define GL_ALLOW_DRAW_MEM_HINT_PGI 0x1A211 +#define GL_STRICT_DEPTHFUNC_HINT_PGI 0x1A216 +#define GL_STRICT_LIGHTING_HINT_PGI 0x1A217 +#define GL_STRICT_SCISSOR_HINT_PGI 0x1A218 +#define GL_FULL_STIPPLE_HINT_PGI 0x1A219 +#define GL_CLIP_NEAR_HINT_PGI 0x1A220 +#define GL_CLIP_FAR_HINT_PGI 0x1A221 +#define GL_WIDE_LINE_HINT_PGI 0x1A222 +#define GL_BACK_NORMALS_HINT_PGI 0x1A223 +typedef void (APIENTRYP PFNGLHINTPGIPROC) (GLenum target, GLint mode); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glHintPGI (GLenum target, GLint mode); +#endif +#endif /* GL_PGI_misc_hints */ + +#ifndef GL_PGI_vertex_hints +#define GL_PGI_vertex_hints 1 +#define GL_VERTEX_DATA_HINT_PGI 0x1A22A +#define GL_VERTEX_CONSISTENT_HINT_PGI 0x1A22B +#define GL_MATERIAL_SIDE_HINT_PGI 0x1A22C +#define GL_MAX_VERTEX_HINT_PGI 0x1A22D +#define GL_COLOR3_BIT_PGI 0x00010000 +#define GL_COLOR4_BIT_PGI 0x00020000 +#define GL_EDGEFLAG_BIT_PGI 0x00040000 +#define GL_INDEX_BIT_PGI 0x00080000 +#define GL_MAT_AMBIENT_BIT_PGI 0x00100000 +#define GL_MAT_AMBIENT_AND_DIFFUSE_BIT_PGI 0x00200000 +#define GL_MAT_DIFFUSE_BIT_PGI 0x00400000 +#define GL_MAT_EMISSION_BIT_PGI 0x00800000 +#define GL_MAT_COLOR_INDEXES_BIT_PGI 0x01000000 +#define GL_MAT_SHININESS_BIT_PGI 0x02000000 +#define GL_MAT_SPECULAR_BIT_PGI 0x04000000 +#define GL_NORMAL_BIT_PGI 0x08000000 +#define GL_TEXCOORD1_BIT_PGI 0x10000000 +#define GL_TEXCOORD2_BIT_PGI 0x20000000 +#define GL_TEXCOORD3_BIT_PGI 0x40000000 +#define GL_TEXCOORD4_BIT_PGI 0x80000000 +#define GL_VERTEX23_BIT_PGI 0x00000004 +#define GL_VERTEX4_BIT_PGI 0x00000008 +#endif /* GL_PGI_vertex_hints */ + +#ifndef GL_REND_screen_coordinates +#define GL_REND_screen_coordinates 1 +#define GL_SCREEN_COORDINATES_REND 0x8490 +#define GL_INVERTED_SCREEN_W_REND 0x8491 +#endif /* GL_REND_screen_coordinates */ + +#ifndef GL_S3_s3tc +#define GL_S3_s3tc 1 +#define GL_RGB_S3TC 0x83A0 +#define GL_RGB4_S3TC 0x83A1 +#define GL_RGBA_S3TC 0x83A2 +#define GL_RGBA4_S3TC 0x83A3 +#define GL_RGBA_DXT5_S3TC 0x83A4 +#define GL_RGBA4_DXT5_S3TC 0x83A5 +#endif /* GL_S3_s3tc */ + +#ifndef GL_SGIS_detail_texture +#define GL_SGIS_detail_texture 1 +#define GL_DETAIL_TEXTURE_2D_SGIS 0x8095 +#define GL_DETAIL_TEXTURE_2D_BINDING_SGIS 0x8096 +#define GL_LINEAR_DETAIL_SGIS 0x8097 +#define GL_LINEAR_DETAIL_ALPHA_SGIS 0x8098 +#define GL_LINEAR_DETAIL_COLOR_SGIS 0x8099 +#define GL_DETAIL_TEXTURE_LEVEL_SGIS 0x809A +#define GL_DETAIL_TEXTURE_MODE_SGIS 0x809B +#define GL_DETAIL_TEXTURE_FUNC_POINTS_SGIS 0x809C +typedef void (APIENTRYP PFNGLDETAILTEXFUNCSGISPROC) (GLenum target, GLsizei n, const GLfloat *points); +typedef void (APIENTRYP PFNGLGETDETAILTEXFUNCSGISPROC) (GLenum target, GLfloat *points); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glDetailTexFuncSGIS (GLenum target, GLsizei n, const GLfloat *points); +GLAPI void APIENTRY glGetDetailTexFuncSGIS (GLenum target, GLfloat *points); +#endif +#endif /* GL_SGIS_detail_texture */ + +#ifndef GL_SGIS_fog_function +#define GL_SGIS_fog_function 1 +#define GL_FOG_FUNC_SGIS 0x812A +#define GL_FOG_FUNC_POINTS_SGIS 0x812B +#define GL_MAX_FOG_FUNC_POINTS_SGIS 0x812C +typedef void (APIENTRYP PFNGLFOGFUNCSGISPROC) (GLsizei n, const GLfloat *points); +typedef void (APIENTRYP PFNGLGETFOGFUNCSGISPROC) (GLfloat *points); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glFogFuncSGIS (GLsizei n, const GLfloat *points); +GLAPI void APIENTRY glGetFogFuncSGIS (GLfloat *points); +#endif +#endif /* GL_SGIS_fog_function */ + +#ifndef GL_SGIS_generate_mipmap +#define GL_SGIS_generate_mipmap 1 +#define GL_GENERATE_MIPMAP_SGIS 0x8191 +#define GL_GENERATE_MIPMAP_HINT_SGIS 0x8192 +#endif /* GL_SGIS_generate_mipmap */ + +#ifndef GL_SGIS_multisample +#define GL_SGIS_multisample 1 +#define GL_MULTISAMPLE_SGIS 0x809D +#define GL_SAMPLE_ALPHA_TO_MASK_SGIS 0x809E +#define GL_SAMPLE_ALPHA_TO_ONE_SGIS 0x809F +#define GL_SAMPLE_MASK_SGIS 0x80A0 +#define GL_1PASS_SGIS 0x80A1 +#define GL_2PASS_0_SGIS 0x80A2 +#define GL_2PASS_1_SGIS 0x80A3 +#define GL_4PASS_0_SGIS 0x80A4 +#define GL_4PASS_1_SGIS 0x80A5 +#define GL_4PASS_2_SGIS 0x80A6 +#define GL_4PASS_3_SGIS 0x80A7 +#define GL_SAMPLE_BUFFERS_SGIS 0x80A8 +#define GL_SAMPLES_SGIS 0x80A9 +#define GL_SAMPLE_MASK_VALUE_SGIS 0x80AA +#define GL_SAMPLE_MASK_INVERT_SGIS 0x80AB +#define GL_SAMPLE_PATTERN_SGIS 0x80AC +typedef void (APIENTRYP PFNGLSAMPLEMASKSGISPROC) (GLclampf value, GLboolean invert); +typedef void (APIENTRYP PFNGLSAMPLEPATTERNSGISPROC) (GLenum pattern); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glSampleMaskSGIS (GLclampf value, GLboolean invert); +GLAPI void APIENTRY glSamplePatternSGIS (GLenum pattern); +#endif +#endif /* GL_SGIS_multisample */ + +#ifndef GL_SGIS_pixel_texture +#define GL_SGIS_pixel_texture 1 +#define GL_PIXEL_TEXTURE_SGIS 0x8353 +#define GL_PIXEL_FRAGMENT_RGB_SOURCE_SGIS 0x8354 +#define GL_PIXEL_FRAGMENT_ALPHA_SOURCE_SGIS 0x8355 +#define GL_PIXEL_GROUP_COLOR_SGIS 0x8356 +typedef void (APIENTRYP PFNGLPIXELTEXGENPARAMETERISGISPROC) (GLenum pname, GLint param); +typedef void (APIENTRYP PFNGLPIXELTEXGENPARAMETERIVSGISPROC) (GLenum pname, const GLint *params); +typedef void (APIENTRYP PFNGLPIXELTEXGENPARAMETERFSGISPROC) (GLenum pname, GLfloat param); +typedef void (APIENTRYP PFNGLPIXELTEXGENPARAMETERFVSGISPROC) (GLenum pname, const GLfloat *params); +typedef void (APIENTRYP PFNGLGETPIXELTEXGENPARAMETERIVSGISPROC) (GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETPIXELTEXGENPARAMETERFVSGISPROC) (GLenum pname, GLfloat *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glPixelTexGenParameteriSGIS (GLenum pname, GLint param); +GLAPI void APIENTRY glPixelTexGenParameterivSGIS (GLenum pname, const GLint *params); +GLAPI void APIENTRY glPixelTexGenParameterfSGIS (GLenum pname, GLfloat param); +GLAPI void APIENTRY glPixelTexGenParameterfvSGIS (GLenum pname, const GLfloat *params); +GLAPI void APIENTRY glGetPixelTexGenParameterivSGIS (GLenum pname, GLint *params); +GLAPI void APIENTRY glGetPixelTexGenParameterfvSGIS (GLenum pname, GLfloat *params); +#endif +#endif /* GL_SGIS_pixel_texture */ + +#ifndef GL_SGIS_point_line_texgen +#define GL_SGIS_point_line_texgen 1 +#define GL_EYE_DISTANCE_TO_POINT_SGIS 0x81F0 +#define GL_OBJECT_DISTANCE_TO_POINT_SGIS 0x81F1 +#define GL_EYE_DISTANCE_TO_LINE_SGIS 0x81F2 +#define GL_OBJECT_DISTANCE_TO_LINE_SGIS 0x81F3 +#define GL_EYE_POINT_SGIS 0x81F4 +#define GL_OBJECT_POINT_SGIS 0x81F5 +#define GL_EYE_LINE_SGIS 0x81F6 +#define GL_OBJECT_LINE_SGIS 0x81F7 +#endif /* GL_SGIS_point_line_texgen */ + +#ifndef GL_SGIS_point_parameters +#define GL_SGIS_point_parameters 1 +#define GL_POINT_SIZE_MIN_SGIS 0x8126 +#define GL_POINT_SIZE_MAX_SGIS 0x8127 +#define GL_POINT_FADE_THRESHOLD_SIZE_SGIS 0x8128 +#define GL_DISTANCE_ATTENUATION_SGIS 0x8129 +typedef void (APIENTRYP PFNGLPOINTPARAMETERFSGISPROC) (GLenum pname, GLfloat param); +typedef void (APIENTRYP PFNGLPOINTPARAMETERFVSGISPROC) (GLenum pname, const GLfloat *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glPointParameterfSGIS (GLenum pname, GLfloat param); +GLAPI void APIENTRY glPointParameterfvSGIS (GLenum pname, const GLfloat *params); +#endif +#endif /* GL_SGIS_point_parameters */ + +#ifndef GL_SGIS_sharpen_texture +#define GL_SGIS_sharpen_texture 1 +#define GL_LINEAR_SHARPEN_SGIS 0x80AD +#define GL_LINEAR_SHARPEN_ALPHA_SGIS 0x80AE +#define GL_LINEAR_SHARPEN_COLOR_SGIS 0x80AF +#define GL_SHARPEN_TEXTURE_FUNC_POINTS_SGIS 0x80B0 +typedef void (APIENTRYP PFNGLSHARPENTEXFUNCSGISPROC) (GLenum target, GLsizei n, const GLfloat *points); +typedef void (APIENTRYP PFNGLGETSHARPENTEXFUNCSGISPROC) (GLenum target, GLfloat *points); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glSharpenTexFuncSGIS (GLenum target, GLsizei n, const GLfloat *points); +GLAPI void APIENTRY glGetSharpenTexFuncSGIS (GLenum target, GLfloat *points); +#endif +#endif /* GL_SGIS_sharpen_texture */ + +#ifndef GL_SGIS_texture4D +#define GL_SGIS_texture4D 1 +#define GL_PACK_SKIP_VOLUMES_SGIS 0x8130 +#define GL_PACK_IMAGE_DEPTH_SGIS 0x8131 +#define GL_UNPACK_SKIP_VOLUMES_SGIS 0x8132 +#define GL_UNPACK_IMAGE_DEPTH_SGIS 0x8133 +#define GL_TEXTURE_4D_SGIS 0x8134 +#define GL_PROXY_TEXTURE_4D_SGIS 0x8135 +#define GL_TEXTURE_4DSIZE_SGIS 0x8136 +#define GL_TEXTURE_WRAP_Q_SGIS 0x8137 +#define GL_MAX_4D_TEXTURE_SIZE_SGIS 0x8138 +#define GL_TEXTURE_4D_BINDING_SGIS 0x814F +typedef void (APIENTRYP PFNGLTEXIMAGE4DSGISPROC) (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLsizei size4d, GLint border, GLenum format, GLenum type, const void *pixels); +typedef void (APIENTRYP PFNGLTEXSUBIMAGE4DSGISPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint woffset, GLsizei width, GLsizei height, GLsizei depth, GLsizei size4d, GLenum format, GLenum type, const void *pixels); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glTexImage4DSGIS (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLsizei size4d, GLint border, GLenum format, GLenum type, const void *pixels); +GLAPI void APIENTRY glTexSubImage4DSGIS (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint woffset, GLsizei width, GLsizei height, GLsizei depth, GLsizei size4d, GLenum format, GLenum type, const void *pixels); +#endif +#endif /* GL_SGIS_texture4D */ + +#ifndef GL_SGIS_texture_border_clamp +#define GL_SGIS_texture_border_clamp 1 +#define GL_CLAMP_TO_BORDER_SGIS 0x812D +#endif /* GL_SGIS_texture_border_clamp */ + +#ifndef GL_SGIS_texture_color_mask +#define GL_SGIS_texture_color_mask 1 +#define GL_TEXTURE_COLOR_WRITEMASK_SGIS 0x81EF +typedef void (APIENTRYP PFNGLTEXTURECOLORMASKSGISPROC) (GLboolean red, GLboolean green, GLboolean blue, GLboolean alpha); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glTextureColorMaskSGIS (GLboolean red, GLboolean green, GLboolean blue, GLboolean alpha); +#endif +#endif /* GL_SGIS_texture_color_mask */ + +#ifndef GL_SGIS_texture_edge_clamp +#define GL_SGIS_texture_edge_clamp 1 +#define GL_CLAMP_TO_EDGE_SGIS 0x812F +#endif /* GL_SGIS_texture_edge_clamp */ + +#ifndef GL_SGIS_texture_filter4 +#define GL_SGIS_texture_filter4 1 +#define GL_FILTER4_SGIS 0x8146 +#define GL_TEXTURE_FILTER4_SIZE_SGIS 0x8147 +typedef void (APIENTRYP PFNGLGETTEXFILTERFUNCSGISPROC) (GLenum target, GLenum filter, GLfloat *weights); +typedef void (APIENTRYP PFNGLTEXFILTERFUNCSGISPROC) (GLenum target, GLenum filter, GLsizei n, const GLfloat *weights); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glGetTexFilterFuncSGIS (GLenum target, GLenum filter, GLfloat *weights); +GLAPI void APIENTRY glTexFilterFuncSGIS (GLenum target, GLenum filter, GLsizei n, const GLfloat *weights); +#endif +#endif /* GL_SGIS_texture_filter4 */ + +#ifndef GL_SGIS_texture_lod +#define GL_SGIS_texture_lod 1 +#define GL_TEXTURE_MIN_LOD_SGIS 0x813A +#define GL_TEXTURE_MAX_LOD_SGIS 0x813B +#define GL_TEXTURE_BASE_LEVEL_SGIS 0x813C +#define GL_TEXTURE_MAX_LEVEL_SGIS 0x813D +#endif /* GL_SGIS_texture_lod */ + +#ifndef GL_SGIS_texture_select +#define GL_SGIS_texture_select 1 +#define GL_DUAL_ALPHA4_SGIS 0x8110 +#define GL_DUAL_ALPHA8_SGIS 0x8111 +#define GL_DUAL_ALPHA12_SGIS 0x8112 +#define GL_DUAL_ALPHA16_SGIS 0x8113 +#define GL_DUAL_LUMINANCE4_SGIS 0x8114 +#define GL_DUAL_LUMINANCE8_SGIS 0x8115 +#define GL_DUAL_LUMINANCE12_SGIS 0x8116 +#define GL_DUAL_LUMINANCE16_SGIS 0x8117 +#define GL_DUAL_INTENSITY4_SGIS 0x8118 +#define GL_DUAL_INTENSITY8_SGIS 0x8119 +#define GL_DUAL_INTENSITY12_SGIS 0x811A +#define GL_DUAL_INTENSITY16_SGIS 0x811B +#define GL_DUAL_LUMINANCE_ALPHA4_SGIS 0x811C +#define GL_DUAL_LUMINANCE_ALPHA8_SGIS 0x811D +#define GL_QUAD_ALPHA4_SGIS 0x811E +#define GL_QUAD_ALPHA8_SGIS 0x811F +#define GL_QUAD_LUMINANCE4_SGIS 0x8120 +#define GL_QUAD_LUMINANCE8_SGIS 0x8121 +#define GL_QUAD_INTENSITY4_SGIS 0x8122 +#define GL_QUAD_INTENSITY8_SGIS 0x8123 +#define GL_DUAL_TEXTURE_SELECT_SGIS 0x8124 +#define GL_QUAD_TEXTURE_SELECT_SGIS 0x8125 +#endif /* GL_SGIS_texture_select */ + +#ifndef GL_SGIX_async +#define GL_SGIX_async 1 +#define GL_ASYNC_MARKER_SGIX 0x8329 +typedef void (APIENTRYP PFNGLASYNCMARKERSGIXPROC) (GLuint marker); +typedef GLint (APIENTRYP PFNGLFINISHASYNCSGIXPROC) (GLuint *markerp); +typedef GLint (APIENTRYP PFNGLPOLLASYNCSGIXPROC) (GLuint *markerp); +typedef GLuint (APIENTRYP PFNGLGENASYNCMARKERSSGIXPROC) (GLsizei range); +typedef void (APIENTRYP PFNGLDELETEASYNCMARKERSSGIXPROC) (GLuint marker, GLsizei range); +typedef GLboolean (APIENTRYP PFNGLISASYNCMARKERSGIXPROC) (GLuint marker); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glAsyncMarkerSGIX (GLuint marker); +GLAPI GLint APIENTRY glFinishAsyncSGIX (GLuint *markerp); +GLAPI GLint APIENTRY glPollAsyncSGIX (GLuint *markerp); +GLAPI GLuint APIENTRY glGenAsyncMarkersSGIX (GLsizei range); +GLAPI void APIENTRY glDeleteAsyncMarkersSGIX (GLuint marker, GLsizei range); +GLAPI GLboolean APIENTRY glIsAsyncMarkerSGIX (GLuint marker); +#endif +#endif /* GL_SGIX_async */ + +#ifndef GL_SGIX_async_histogram +#define GL_SGIX_async_histogram 1 +#define GL_ASYNC_HISTOGRAM_SGIX 0x832C +#define GL_MAX_ASYNC_HISTOGRAM_SGIX 0x832D +#endif /* GL_SGIX_async_histogram */ + +#ifndef GL_SGIX_async_pixel +#define GL_SGIX_async_pixel 1 +#define GL_ASYNC_TEX_IMAGE_SGIX 0x835C +#define GL_ASYNC_DRAW_PIXELS_SGIX 0x835D +#define GL_ASYNC_READ_PIXELS_SGIX 0x835E +#define GL_MAX_ASYNC_TEX_IMAGE_SGIX 0x835F +#define GL_MAX_ASYNC_DRAW_PIXELS_SGIX 0x8360 +#define GL_MAX_ASYNC_READ_PIXELS_SGIX 0x8361 +#endif /* GL_SGIX_async_pixel */ + +#ifndef GL_SGIX_blend_alpha_minmax +#define GL_SGIX_blend_alpha_minmax 1 +#define GL_ALPHA_MIN_SGIX 0x8320 +#define GL_ALPHA_MAX_SGIX 0x8321 +#endif /* GL_SGIX_blend_alpha_minmax */ + +#ifndef GL_SGIX_calligraphic_fragment +#define GL_SGIX_calligraphic_fragment 1 +#define GL_CALLIGRAPHIC_FRAGMENT_SGIX 0x8183 +#endif /* GL_SGIX_calligraphic_fragment */ + +#ifndef GL_SGIX_clipmap +#define GL_SGIX_clipmap 1 +#define GL_LINEAR_CLIPMAP_LINEAR_SGIX 0x8170 +#define GL_TEXTURE_CLIPMAP_CENTER_SGIX 0x8171 +#define GL_TEXTURE_CLIPMAP_FRAME_SGIX 0x8172 +#define GL_TEXTURE_CLIPMAP_OFFSET_SGIX 0x8173 +#define GL_TEXTURE_CLIPMAP_VIRTUAL_DEPTH_SGIX 0x8174 +#define GL_TEXTURE_CLIPMAP_LOD_OFFSET_SGIX 0x8175 +#define GL_TEXTURE_CLIPMAP_DEPTH_SGIX 0x8176 +#define GL_MAX_CLIPMAP_DEPTH_SGIX 0x8177 +#define GL_MAX_CLIPMAP_VIRTUAL_DEPTH_SGIX 0x8178 +#define GL_NEAREST_CLIPMAP_NEAREST_SGIX 0x844D +#define GL_NEAREST_CLIPMAP_LINEAR_SGIX 0x844E +#define GL_LINEAR_CLIPMAP_NEAREST_SGIX 0x844F +#endif /* GL_SGIX_clipmap */ + +#ifndef GL_SGIX_convolution_accuracy +#define GL_SGIX_convolution_accuracy 1 +#define GL_CONVOLUTION_HINT_SGIX 0x8316 +#endif /* GL_SGIX_convolution_accuracy */ + +#ifndef GL_SGIX_depth_pass_instrument +#define GL_SGIX_depth_pass_instrument 1 +#endif /* GL_SGIX_depth_pass_instrument */ + +#ifndef GL_SGIX_depth_texture +#define GL_SGIX_depth_texture 1 +#define GL_DEPTH_COMPONENT16_SGIX 0x81A5 +#define GL_DEPTH_COMPONENT24_SGIX 0x81A6 +#define GL_DEPTH_COMPONENT32_SGIX 0x81A7 +#endif /* GL_SGIX_depth_texture */ + +#ifndef GL_SGIX_flush_raster +#define GL_SGIX_flush_raster 1 +typedef void (APIENTRYP PFNGLFLUSHRASTERSGIXPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glFlushRasterSGIX (void); +#endif +#endif /* GL_SGIX_flush_raster */ + +#ifndef GL_SGIX_fog_offset +#define GL_SGIX_fog_offset 1 +#define GL_FOG_OFFSET_SGIX 0x8198 +#define GL_FOG_OFFSET_VALUE_SGIX 0x8199 +#endif /* GL_SGIX_fog_offset */ + +#ifndef GL_SGIX_fragment_lighting +#define GL_SGIX_fragment_lighting 1 +#define GL_FRAGMENT_LIGHTING_SGIX 0x8400 +#define GL_FRAGMENT_COLOR_MATERIAL_SGIX 0x8401 +#define GL_FRAGMENT_COLOR_MATERIAL_FACE_SGIX 0x8402 +#define GL_FRAGMENT_COLOR_MATERIAL_PARAMETER_SGIX 0x8403 +#define GL_MAX_FRAGMENT_LIGHTS_SGIX 0x8404 +#define GL_MAX_ACTIVE_LIGHTS_SGIX 0x8405 +#define GL_CURRENT_RASTER_NORMAL_SGIX 0x8406 +#define GL_LIGHT_ENV_MODE_SGIX 0x8407 +#define GL_FRAGMENT_LIGHT_MODEL_LOCAL_VIEWER_SGIX 0x8408 +#define GL_FRAGMENT_LIGHT_MODEL_TWO_SIDE_SGIX 0x8409 +#define GL_FRAGMENT_LIGHT_MODEL_AMBIENT_SGIX 0x840A +#define GL_FRAGMENT_LIGHT_MODEL_NORMAL_INTERPOLATION_SGIX 0x840B +#define GL_FRAGMENT_LIGHT0_SGIX 0x840C +#define GL_FRAGMENT_LIGHT1_SGIX 0x840D +#define GL_FRAGMENT_LIGHT2_SGIX 0x840E +#define GL_FRAGMENT_LIGHT3_SGIX 0x840F +#define GL_FRAGMENT_LIGHT4_SGIX 0x8410 +#define GL_FRAGMENT_LIGHT5_SGIX 0x8411 +#define GL_FRAGMENT_LIGHT6_SGIX 0x8412 +#define GL_FRAGMENT_LIGHT7_SGIX 0x8413 +typedef void (APIENTRYP PFNGLFRAGMENTCOLORMATERIALSGIXPROC) (GLenum face, GLenum mode); +typedef void (APIENTRYP PFNGLFRAGMENTLIGHTFSGIXPROC) (GLenum light, GLenum pname, GLfloat param); +typedef void (APIENTRYP PFNGLFRAGMENTLIGHTFVSGIXPROC) (GLenum light, GLenum pname, const GLfloat *params); +typedef void (APIENTRYP PFNGLFRAGMENTLIGHTISGIXPROC) (GLenum light, GLenum pname, GLint param); +typedef void (APIENTRYP PFNGLFRAGMENTLIGHTIVSGIXPROC) (GLenum light, GLenum pname, const GLint *params); +typedef void (APIENTRYP PFNGLFRAGMENTLIGHTMODELFSGIXPROC) (GLenum pname, GLfloat param); +typedef void (APIENTRYP PFNGLFRAGMENTLIGHTMODELFVSGIXPROC) (GLenum pname, const GLfloat *params); +typedef void (APIENTRYP PFNGLFRAGMENTLIGHTMODELISGIXPROC) (GLenum pname, GLint param); +typedef void (APIENTRYP PFNGLFRAGMENTLIGHTMODELIVSGIXPROC) (GLenum pname, const GLint *params); +typedef void (APIENTRYP PFNGLFRAGMENTMATERIALFSGIXPROC) (GLenum face, GLenum pname, GLfloat param); +typedef void (APIENTRYP PFNGLFRAGMENTMATERIALFVSGIXPROC) (GLenum face, GLenum pname, const GLfloat *params); +typedef void (APIENTRYP PFNGLFRAGMENTMATERIALISGIXPROC) (GLenum face, GLenum pname, GLint param); +typedef void (APIENTRYP PFNGLFRAGMENTMATERIALIVSGIXPROC) (GLenum face, GLenum pname, const GLint *params); +typedef void (APIENTRYP PFNGLGETFRAGMENTLIGHTFVSGIXPROC) (GLenum light, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETFRAGMENTLIGHTIVSGIXPROC) (GLenum light, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLGETFRAGMENTMATERIALFVSGIXPROC) (GLenum face, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETFRAGMENTMATERIALIVSGIXPROC) (GLenum face, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLLIGHTENVISGIXPROC) (GLenum pname, GLint param); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glFragmentColorMaterialSGIX (GLenum face, GLenum mode); +GLAPI void APIENTRY glFragmentLightfSGIX (GLenum light, GLenum pname, GLfloat param); +GLAPI void APIENTRY glFragmentLightfvSGIX (GLenum light, GLenum pname, const GLfloat *params); +GLAPI void APIENTRY glFragmentLightiSGIX (GLenum light, GLenum pname, GLint param); +GLAPI void APIENTRY glFragmentLightivSGIX (GLenum light, GLenum pname, const GLint *params); +GLAPI void APIENTRY glFragmentLightModelfSGIX (GLenum pname, GLfloat param); +GLAPI void APIENTRY glFragmentLightModelfvSGIX (GLenum pname, const GLfloat *params); +GLAPI void APIENTRY glFragmentLightModeliSGIX (GLenum pname, GLint param); +GLAPI void APIENTRY glFragmentLightModelivSGIX (GLenum pname, const GLint *params); +GLAPI void APIENTRY glFragmentMaterialfSGIX (GLenum face, GLenum pname, GLfloat param); +GLAPI void APIENTRY glFragmentMaterialfvSGIX (GLenum face, GLenum pname, const GLfloat *params); +GLAPI void APIENTRY glFragmentMaterialiSGIX (GLenum face, GLenum pname, GLint param); +GLAPI void APIENTRY glFragmentMaterialivSGIX (GLenum face, GLenum pname, const GLint *params); +GLAPI void APIENTRY glGetFragmentLightfvSGIX (GLenum light, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetFragmentLightivSGIX (GLenum light, GLenum pname, GLint *params); +GLAPI void APIENTRY glGetFragmentMaterialfvSGIX (GLenum face, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetFragmentMaterialivSGIX (GLenum face, GLenum pname, GLint *params); +GLAPI void APIENTRY glLightEnviSGIX (GLenum pname, GLint param); +#endif +#endif /* GL_SGIX_fragment_lighting */ + +#ifndef GL_SGIX_framezoom +#define GL_SGIX_framezoom 1 +#define GL_FRAMEZOOM_SGIX 0x818B +#define GL_FRAMEZOOM_FACTOR_SGIX 0x818C +#define GL_MAX_FRAMEZOOM_FACTOR_SGIX 0x818D +typedef void (APIENTRYP PFNGLFRAMEZOOMSGIXPROC) (GLint factor); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glFrameZoomSGIX (GLint factor); +#endif +#endif /* GL_SGIX_framezoom */ + +#ifndef GL_SGIX_igloo_interface +#define GL_SGIX_igloo_interface 1 +typedef void (APIENTRYP PFNGLIGLOOINTERFACESGIXPROC) (GLenum pname, const void *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glIglooInterfaceSGIX (GLenum pname, const void *params); +#endif +#endif /* GL_SGIX_igloo_interface */ + +#ifndef GL_SGIX_instruments +#define GL_SGIX_instruments 1 +#define GL_INSTRUMENT_BUFFER_POINTER_SGIX 0x8180 +#define GL_INSTRUMENT_MEASUREMENTS_SGIX 0x8181 +typedef GLint (APIENTRYP PFNGLGETINSTRUMENTSSGIXPROC) (void); +typedef void (APIENTRYP PFNGLINSTRUMENTSBUFFERSGIXPROC) (GLsizei size, GLint *buffer); +typedef GLint (APIENTRYP PFNGLPOLLINSTRUMENTSSGIXPROC) (GLint *marker_p); +typedef void (APIENTRYP PFNGLREADINSTRUMENTSSGIXPROC) (GLint marker); +typedef void (APIENTRYP PFNGLSTARTINSTRUMENTSSGIXPROC) (void); +typedef void (APIENTRYP PFNGLSTOPINSTRUMENTSSGIXPROC) (GLint marker); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI GLint APIENTRY glGetInstrumentsSGIX (void); +GLAPI void APIENTRY glInstrumentsBufferSGIX (GLsizei size, GLint *buffer); +GLAPI GLint APIENTRY glPollInstrumentsSGIX (GLint *marker_p); +GLAPI void APIENTRY glReadInstrumentsSGIX (GLint marker); +GLAPI void APIENTRY glStartInstrumentsSGIX (void); +GLAPI void APIENTRY glStopInstrumentsSGIX (GLint marker); +#endif +#endif /* GL_SGIX_instruments */ + +#ifndef GL_SGIX_interlace +#define GL_SGIX_interlace 1 +#define GL_INTERLACE_SGIX 0x8094 +#endif /* GL_SGIX_interlace */ + +#ifndef GL_SGIX_ir_instrument1 +#define GL_SGIX_ir_instrument1 1 +#define GL_IR_INSTRUMENT1_SGIX 0x817F +#endif /* GL_SGIX_ir_instrument1 */ + +#ifndef GL_SGIX_list_priority +#define GL_SGIX_list_priority 1 +#define GL_LIST_PRIORITY_SGIX 0x8182 +typedef void (APIENTRYP PFNGLGETLISTPARAMETERFVSGIXPROC) (GLuint list, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETLISTPARAMETERIVSGIXPROC) (GLuint list, GLenum pname, GLint *params); +typedef void (APIENTRYP PFNGLLISTPARAMETERFSGIXPROC) (GLuint list, GLenum pname, GLfloat param); +typedef void (APIENTRYP PFNGLLISTPARAMETERFVSGIXPROC) (GLuint list, GLenum pname, const GLfloat *params); +typedef void (APIENTRYP PFNGLLISTPARAMETERISGIXPROC) (GLuint list, GLenum pname, GLint param); +typedef void (APIENTRYP PFNGLLISTPARAMETERIVSGIXPROC) (GLuint list, GLenum pname, const GLint *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glGetListParameterfvSGIX (GLuint list, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetListParameterivSGIX (GLuint list, GLenum pname, GLint *params); +GLAPI void APIENTRY glListParameterfSGIX (GLuint list, GLenum pname, GLfloat param); +GLAPI void APIENTRY glListParameterfvSGIX (GLuint list, GLenum pname, const GLfloat *params); +GLAPI void APIENTRY glListParameteriSGIX (GLuint list, GLenum pname, GLint param); +GLAPI void APIENTRY glListParameterivSGIX (GLuint list, GLenum pname, const GLint *params); +#endif +#endif /* GL_SGIX_list_priority */ + +#ifndef GL_SGIX_pixel_texture +#define GL_SGIX_pixel_texture 1 +#define GL_PIXEL_TEX_GEN_SGIX 0x8139 +#define GL_PIXEL_TEX_GEN_MODE_SGIX 0x832B +typedef void (APIENTRYP PFNGLPIXELTEXGENSGIXPROC) (GLenum mode); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glPixelTexGenSGIX (GLenum mode); +#endif +#endif /* GL_SGIX_pixel_texture */ + +#ifndef GL_SGIX_pixel_tiles +#define GL_SGIX_pixel_tiles 1 +#define GL_PIXEL_TILE_BEST_ALIGNMENT_SGIX 0x813E +#define GL_PIXEL_TILE_CACHE_INCREMENT_SGIX 0x813F +#define GL_PIXEL_TILE_WIDTH_SGIX 0x8140 +#define GL_PIXEL_TILE_HEIGHT_SGIX 0x8141 +#define GL_PIXEL_TILE_GRID_WIDTH_SGIX 0x8142 +#define GL_PIXEL_TILE_GRID_HEIGHT_SGIX 0x8143 +#define GL_PIXEL_TILE_GRID_DEPTH_SGIX 0x8144 +#define GL_PIXEL_TILE_CACHE_SIZE_SGIX 0x8145 +#endif /* GL_SGIX_pixel_tiles */ + +#ifndef GL_SGIX_polynomial_ffd +#define GL_SGIX_polynomial_ffd 1 +#define GL_TEXTURE_DEFORMATION_BIT_SGIX 0x00000001 +#define GL_GEOMETRY_DEFORMATION_BIT_SGIX 0x00000002 +#define GL_GEOMETRY_DEFORMATION_SGIX 0x8194 +#define GL_TEXTURE_DEFORMATION_SGIX 0x8195 +#define GL_DEFORMATIONS_MASK_SGIX 0x8196 +#define GL_MAX_DEFORMATION_ORDER_SGIX 0x8197 +typedef void (APIENTRYP PFNGLDEFORMATIONMAP3DSGIXPROC) (GLenum target, GLdouble u1, GLdouble u2, GLint ustride, GLint uorder, GLdouble v1, GLdouble v2, GLint vstride, GLint vorder, GLdouble w1, GLdouble w2, GLint wstride, GLint worder, const GLdouble *points); +typedef void (APIENTRYP PFNGLDEFORMATIONMAP3FSGIXPROC) (GLenum target, GLfloat u1, GLfloat u2, GLint ustride, GLint uorder, GLfloat v1, GLfloat v2, GLint vstride, GLint vorder, GLfloat w1, GLfloat w2, GLint wstride, GLint worder, const GLfloat *points); +typedef void (APIENTRYP PFNGLDEFORMSGIXPROC) (GLbitfield mask); +typedef void (APIENTRYP PFNGLLOADIDENTITYDEFORMATIONMAPSGIXPROC) (GLbitfield mask); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glDeformationMap3dSGIX (GLenum target, GLdouble u1, GLdouble u2, GLint ustride, GLint uorder, GLdouble v1, GLdouble v2, GLint vstride, GLint vorder, GLdouble w1, GLdouble w2, GLint wstride, GLint worder, const GLdouble *points); +GLAPI void APIENTRY glDeformationMap3fSGIX (GLenum target, GLfloat u1, GLfloat u2, GLint ustride, GLint uorder, GLfloat v1, GLfloat v2, GLint vstride, GLint vorder, GLfloat w1, GLfloat w2, GLint wstride, GLint worder, const GLfloat *points); +GLAPI void APIENTRY glDeformSGIX (GLbitfield mask); +GLAPI void APIENTRY glLoadIdentityDeformationMapSGIX (GLbitfield mask); +#endif +#endif /* GL_SGIX_polynomial_ffd */ + +#ifndef GL_SGIX_reference_plane +#define GL_SGIX_reference_plane 1 +#define GL_REFERENCE_PLANE_SGIX 0x817D +#define GL_REFERENCE_PLANE_EQUATION_SGIX 0x817E +typedef void (APIENTRYP PFNGLREFERENCEPLANESGIXPROC) (const GLdouble *equation); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glReferencePlaneSGIX (const GLdouble *equation); +#endif +#endif /* GL_SGIX_reference_plane */ + +#ifndef GL_SGIX_resample +#define GL_SGIX_resample 1 +#define GL_PACK_RESAMPLE_SGIX 0x842E +#define GL_UNPACK_RESAMPLE_SGIX 0x842F +#define GL_RESAMPLE_REPLICATE_SGIX 0x8433 +#define GL_RESAMPLE_ZERO_FILL_SGIX 0x8434 +#define GL_RESAMPLE_DECIMATE_SGIX 0x8430 +#endif /* GL_SGIX_resample */ + +#ifndef GL_SGIX_scalebias_hint +#define GL_SGIX_scalebias_hint 1 +#define GL_SCALEBIAS_HINT_SGIX 0x8322 +#endif /* GL_SGIX_scalebias_hint */ + +#ifndef GL_SGIX_shadow +#define GL_SGIX_shadow 1 +#define GL_TEXTURE_COMPARE_SGIX 0x819A +#define GL_TEXTURE_COMPARE_OPERATOR_SGIX 0x819B +#define GL_TEXTURE_LEQUAL_R_SGIX 0x819C +#define GL_TEXTURE_GEQUAL_R_SGIX 0x819D +#endif /* GL_SGIX_shadow */ + +#ifndef GL_SGIX_shadow_ambient +#define GL_SGIX_shadow_ambient 1 +#define GL_SHADOW_AMBIENT_SGIX 0x80BF +#endif /* GL_SGIX_shadow_ambient */ + +#ifndef GL_SGIX_sprite +#define GL_SGIX_sprite 1 +#define GL_SPRITE_SGIX 0x8148 +#define GL_SPRITE_MODE_SGIX 0x8149 +#define GL_SPRITE_AXIS_SGIX 0x814A +#define GL_SPRITE_TRANSLATION_SGIX 0x814B +#define GL_SPRITE_AXIAL_SGIX 0x814C +#define GL_SPRITE_OBJECT_ALIGNED_SGIX 0x814D +#define GL_SPRITE_EYE_ALIGNED_SGIX 0x814E +typedef void (APIENTRYP PFNGLSPRITEPARAMETERFSGIXPROC) (GLenum pname, GLfloat param); +typedef void (APIENTRYP PFNGLSPRITEPARAMETERFVSGIXPROC) (GLenum pname, const GLfloat *params); +typedef void (APIENTRYP PFNGLSPRITEPARAMETERISGIXPROC) (GLenum pname, GLint param); +typedef void (APIENTRYP PFNGLSPRITEPARAMETERIVSGIXPROC) (GLenum pname, const GLint *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glSpriteParameterfSGIX (GLenum pname, GLfloat param); +GLAPI void APIENTRY glSpriteParameterfvSGIX (GLenum pname, const GLfloat *params); +GLAPI void APIENTRY glSpriteParameteriSGIX (GLenum pname, GLint param); +GLAPI void APIENTRY glSpriteParameterivSGIX (GLenum pname, const GLint *params); +#endif +#endif /* GL_SGIX_sprite */ + +#ifndef GL_SGIX_subsample +#define GL_SGIX_subsample 1 +#define GL_PACK_SUBSAMPLE_RATE_SGIX 0x85A0 +#define GL_UNPACK_SUBSAMPLE_RATE_SGIX 0x85A1 +#define GL_PIXEL_SUBSAMPLE_4444_SGIX 0x85A2 +#define GL_PIXEL_SUBSAMPLE_2424_SGIX 0x85A3 +#define GL_PIXEL_SUBSAMPLE_4242_SGIX 0x85A4 +#endif /* GL_SGIX_subsample */ + +#ifndef GL_SGIX_tag_sample_buffer +#define GL_SGIX_tag_sample_buffer 1 +typedef void (APIENTRYP PFNGLTAGSAMPLEBUFFERSGIXPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glTagSampleBufferSGIX (void); +#endif +#endif /* GL_SGIX_tag_sample_buffer */ + +#ifndef GL_SGIX_texture_add_env +#define GL_SGIX_texture_add_env 1 +#define GL_TEXTURE_ENV_BIAS_SGIX 0x80BE +#endif /* GL_SGIX_texture_add_env */ + +#ifndef GL_SGIX_texture_coordinate_clamp +#define GL_SGIX_texture_coordinate_clamp 1 +#define GL_TEXTURE_MAX_CLAMP_S_SGIX 0x8369 +#define GL_TEXTURE_MAX_CLAMP_T_SGIX 0x836A +#define GL_TEXTURE_MAX_CLAMP_R_SGIX 0x836B +#endif /* GL_SGIX_texture_coordinate_clamp */ + +#ifndef GL_SGIX_texture_lod_bias +#define GL_SGIX_texture_lod_bias 1 +#define GL_TEXTURE_LOD_BIAS_S_SGIX 0x818E +#define GL_TEXTURE_LOD_BIAS_T_SGIX 0x818F +#define GL_TEXTURE_LOD_BIAS_R_SGIX 0x8190 +#endif /* GL_SGIX_texture_lod_bias */ + +#ifndef GL_SGIX_texture_multi_buffer +#define GL_SGIX_texture_multi_buffer 1 +#define GL_TEXTURE_MULTI_BUFFER_HINT_SGIX 0x812E +#endif /* GL_SGIX_texture_multi_buffer */ + +#ifndef GL_SGIX_texture_scale_bias +#define GL_SGIX_texture_scale_bias 1 +#define GL_POST_TEXTURE_FILTER_BIAS_SGIX 0x8179 +#define GL_POST_TEXTURE_FILTER_SCALE_SGIX 0x817A +#define GL_POST_TEXTURE_FILTER_BIAS_RANGE_SGIX 0x817B +#define GL_POST_TEXTURE_FILTER_SCALE_RANGE_SGIX 0x817C +#endif /* GL_SGIX_texture_scale_bias */ + +#ifndef GL_SGIX_vertex_preclip +#define GL_SGIX_vertex_preclip 1 +#define GL_VERTEX_PRECLIP_SGIX 0x83EE +#define GL_VERTEX_PRECLIP_HINT_SGIX 0x83EF +#endif /* GL_SGIX_vertex_preclip */ + +#ifndef GL_SGIX_ycrcb +#define GL_SGIX_ycrcb 1 +#define GL_YCRCB_422_SGIX 0x81BB +#define GL_YCRCB_444_SGIX 0x81BC +#endif /* GL_SGIX_ycrcb */ + +#ifndef GL_SGIX_ycrcb_subsample +#define GL_SGIX_ycrcb_subsample 1 +#endif /* GL_SGIX_ycrcb_subsample */ + +#ifndef GL_SGIX_ycrcba +#define GL_SGIX_ycrcba 1 +#define GL_YCRCB_SGIX 0x8318 +#define GL_YCRCBA_SGIX 0x8319 +#endif /* GL_SGIX_ycrcba */ + +#ifndef GL_SGI_color_matrix +#define GL_SGI_color_matrix 1 +#define GL_COLOR_MATRIX_SGI 0x80B1 +#define GL_COLOR_MATRIX_STACK_DEPTH_SGI 0x80B2 +#define GL_MAX_COLOR_MATRIX_STACK_DEPTH_SGI 0x80B3 +#define GL_POST_COLOR_MATRIX_RED_SCALE_SGI 0x80B4 +#define GL_POST_COLOR_MATRIX_GREEN_SCALE_SGI 0x80B5 +#define GL_POST_COLOR_MATRIX_BLUE_SCALE_SGI 0x80B6 +#define GL_POST_COLOR_MATRIX_ALPHA_SCALE_SGI 0x80B7 +#define GL_POST_COLOR_MATRIX_RED_BIAS_SGI 0x80B8 +#define GL_POST_COLOR_MATRIX_GREEN_BIAS_SGI 0x80B9 +#define GL_POST_COLOR_MATRIX_BLUE_BIAS_SGI 0x80BA +#define GL_POST_COLOR_MATRIX_ALPHA_BIAS_SGI 0x80BB +#endif /* GL_SGI_color_matrix */ + +#ifndef GL_SGI_color_table +#define GL_SGI_color_table 1 +#define GL_COLOR_TABLE_SGI 0x80D0 +#define GL_POST_CONVOLUTION_COLOR_TABLE_SGI 0x80D1 +#define GL_POST_COLOR_MATRIX_COLOR_TABLE_SGI 0x80D2 +#define GL_PROXY_COLOR_TABLE_SGI 0x80D3 +#define GL_PROXY_POST_CONVOLUTION_COLOR_TABLE_SGI 0x80D4 +#define GL_PROXY_POST_COLOR_MATRIX_COLOR_TABLE_SGI 0x80D5 +#define GL_COLOR_TABLE_SCALE_SGI 0x80D6 +#define GL_COLOR_TABLE_BIAS_SGI 0x80D7 +#define GL_COLOR_TABLE_FORMAT_SGI 0x80D8 +#define GL_COLOR_TABLE_WIDTH_SGI 0x80D9 +#define GL_COLOR_TABLE_RED_SIZE_SGI 0x80DA +#define GL_COLOR_TABLE_GREEN_SIZE_SGI 0x80DB +#define GL_COLOR_TABLE_BLUE_SIZE_SGI 0x80DC +#define GL_COLOR_TABLE_ALPHA_SIZE_SGI 0x80DD +#define GL_COLOR_TABLE_LUMINANCE_SIZE_SGI 0x80DE +#define GL_COLOR_TABLE_INTENSITY_SIZE_SGI 0x80DF +typedef void (APIENTRYP PFNGLCOLORTABLESGIPROC) (GLenum target, GLenum internalformat, GLsizei width, GLenum format, GLenum type, const void *table); +typedef void (APIENTRYP PFNGLCOLORTABLEPARAMETERFVSGIPROC) (GLenum target, GLenum pname, const GLfloat *params); +typedef void (APIENTRYP PFNGLCOLORTABLEPARAMETERIVSGIPROC) (GLenum target, GLenum pname, const GLint *params); +typedef void (APIENTRYP PFNGLCOPYCOLORTABLESGIPROC) (GLenum target, GLenum internalformat, GLint x, GLint y, GLsizei width); +typedef void (APIENTRYP PFNGLGETCOLORTABLESGIPROC) (GLenum target, GLenum format, GLenum type, void *table); +typedef void (APIENTRYP PFNGLGETCOLORTABLEPARAMETERFVSGIPROC) (GLenum target, GLenum pname, GLfloat *params); +typedef void (APIENTRYP PFNGLGETCOLORTABLEPARAMETERIVSGIPROC) (GLenum target, GLenum pname, GLint *params); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glColorTableSGI (GLenum target, GLenum internalformat, GLsizei width, GLenum format, GLenum type, const void *table); +GLAPI void APIENTRY glColorTableParameterfvSGI (GLenum target, GLenum pname, const GLfloat *params); +GLAPI void APIENTRY glColorTableParameterivSGI (GLenum target, GLenum pname, const GLint *params); +GLAPI void APIENTRY glCopyColorTableSGI (GLenum target, GLenum internalformat, GLint x, GLint y, GLsizei width); +GLAPI void APIENTRY glGetColorTableSGI (GLenum target, GLenum format, GLenum type, void *table); +GLAPI void APIENTRY glGetColorTableParameterfvSGI (GLenum target, GLenum pname, GLfloat *params); +GLAPI void APIENTRY glGetColorTableParameterivSGI (GLenum target, GLenum pname, GLint *params); +#endif +#endif /* GL_SGI_color_table */ + +#ifndef GL_SGI_texture_color_table +#define GL_SGI_texture_color_table 1 +#define GL_TEXTURE_COLOR_TABLE_SGI 0x80BC +#define GL_PROXY_TEXTURE_COLOR_TABLE_SGI 0x80BD +#endif /* GL_SGI_texture_color_table */ + +#ifndef GL_SUNX_constant_data +#define GL_SUNX_constant_data 1 +#define GL_UNPACK_CONSTANT_DATA_SUNX 0x81D5 +#define GL_TEXTURE_CONSTANT_DATA_SUNX 0x81D6 +typedef void (APIENTRYP PFNGLFINISHTEXTURESUNXPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glFinishTextureSUNX (void); +#endif +#endif /* GL_SUNX_constant_data */ + +#ifndef GL_SUN_convolution_border_modes +#define GL_SUN_convolution_border_modes 1 +#define GL_WRAP_BORDER_SUN 0x81D4 +#endif /* GL_SUN_convolution_border_modes */ + +#ifndef GL_SUN_global_alpha +#define GL_SUN_global_alpha 1 +#define GL_GLOBAL_ALPHA_SUN 0x81D9 +#define GL_GLOBAL_ALPHA_FACTOR_SUN 0x81DA +typedef void (APIENTRYP PFNGLGLOBALALPHAFACTORBSUNPROC) (GLbyte factor); +typedef void (APIENTRYP PFNGLGLOBALALPHAFACTORSSUNPROC) (GLshort factor); +typedef void (APIENTRYP PFNGLGLOBALALPHAFACTORISUNPROC) (GLint factor); +typedef void (APIENTRYP PFNGLGLOBALALPHAFACTORFSUNPROC) (GLfloat factor); +typedef void (APIENTRYP PFNGLGLOBALALPHAFACTORDSUNPROC) (GLdouble factor); +typedef void (APIENTRYP PFNGLGLOBALALPHAFACTORUBSUNPROC) (GLubyte factor); +typedef void (APIENTRYP PFNGLGLOBALALPHAFACTORUSSUNPROC) (GLushort factor); +typedef void (APIENTRYP PFNGLGLOBALALPHAFACTORUISUNPROC) (GLuint factor); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glGlobalAlphaFactorbSUN (GLbyte factor); +GLAPI void APIENTRY glGlobalAlphaFactorsSUN (GLshort factor); +GLAPI void APIENTRY glGlobalAlphaFactoriSUN (GLint factor); +GLAPI void APIENTRY glGlobalAlphaFactorfSUN (GLfloat factor); +GLAPI void APIENTRY glGlobalAlphaFactordSUN (GLdouble factor); +GLAPI void APIENTRY glGlobalAlphaFactorubSUN (GLubyte factor); +GLAPI void APIENTRY glGlobalAlphaFactorusSUN (GLushort factor); +GLAPI void APIENTRY glGlobalAlphaFactoruiSUN (GLuint factor); +#endif +#endif /* GL_SUN_global_alpha */ + +#ifndef GL_SUN_mesh_array +#define GL_SUN_mesh_array 1 +#define GL_QUAD_MESH_SUN 0x8614 +#define GL_TRIANGLE_MESH_SUN 0x8615 +typedef void (APIENTRYP PFNGLDRAWMESHARRAYSSUNPROC) (GLenum mode, GLint first, GLsizei count, GLsizei width); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glDrawMeshArraysSUN (GLenum mode, GLint first, GLsizei count, GLsizei width); +#endif +#endif /* GL_SUN_mesh_array */ + +#ifndef GL_SUN_slice_accum +#define GL_SUN_slice_accum 1 +#define GL_SLICE_ACCUM_SUN 0x85CC +#endif /* GL_SUN_slice_accum */ + +#ifndef GL_SUN_triangle_list +#define GL_SUN_triangle_list 1 +#define GL_RESTART_SUN 0x0001 +#define GL_REPLACE_MIDDLE_SUN 0x0002 +#define GL_REPLACE_OLDEST_SUN 0x0003 +#define GL_TRIANGLE_LIST_SUN 0x81D7 +#define GL_REPLACEMENT_CODE_SUN 0x81D8 +#define GL_REPLACEMENT_CODE_ARRAY_SUN 0x85C0 +#define GL_REPLACEMENT_CODE_ARRAY_TYPE_SUN 0x85C1 +#define GL_REPLACEMENT_CODE_ARRAY_STRIDE_SUN 0x85C2 +#define GL_REPLACEMENT_CODE_ARRAY_POINTER_SUN 0x85C3 +#define GL_R1UI_V3F_SUN 0x85C4 +#define GL_R1UI_C4UB_V3F_SUN 0x85C5 +#define GL_R1UI_C3F_V3F_SUN 0x85C6 +#define GL_R1UI_N3F_V3F_SUN 0x85C7 +#define GL_R1UI_C4F_N3F_V3F_SUN 0x85C8 +#define GL_R1UI_T2F_V3F_SUN 0x85C9 +#define GL_R1UI_T2F_N3F_V3F_SUN 0x85CA +#define GL_R1UI_T2F_C4F_N3F_V3F_SUN 0x85CB +typedef void (APIENTRYP PFNGLREPLACEMENTCODEUISUNPROC) (GLuint code); +typedef void (APIENTRYP PFNGLREPLACEMENTCODEUSSUNPROC) (GLushort code); +typedef void (APIENTRYP PFNGLREPLACEMENTCODEUBSUNPROC) (GLubyte code); +typedef void (APIENTRYP PFNGLREPLACEMENTCODEUIVSUNPROC) (const GLuint *code); +typedef void (APIENTRYP PFNGLREPLACEMENTCODEUSVSUNPROC) (const GLushort *code); +typedef void (APIENTRYP PFNGLREPLACEMENTCODEUBVSUNPROC) (const GLubyte *code); +typedef void (APIENTRYP PFNGLREPLACEMENTCODEPOINTERSUNPROC) (GLenum type, GLsizei stride, const void **pointer); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glReplacementCodeuiSUN (GLuint code); +GLAPI void APIENTRY glReplacementCodeusSUN (GLushort code); +GLAPI void APIENTRY glReplacementCodeubSUN (GLubyte code); +GLAPI void APIENTRY glReplacementCodeuivSUN (const GLuint *code); +GLAPI void APIENTRY glReplacementCodeusvSUN (const GLushort *code); +GLAPI void APIENTRY glReplacementCodeubvSUN (const GLubyte *code); +GLAPI void APIENTRY glReplacementCodePointerSUN (GLenum type, GLsizei stride, const void **pointer); +#endif +#endif /* GL_SUN_triangle_list */ + +#ifndef GL_SUN_vertex +#define GL_SUN_vertex 1 +typedef void (APIENTRYP PFNGLCOLOR4UBVERTEX2FSUNPROC) (GLubyte r, GLubyte g, GLubyte b, GLubyte a, GLfloat x, GLfloat y); +typedef void (APIENTRYP PFNGLCOLOR4UBVERTEX2FVSUNPROC) (const GLubyte *c, const GLfloat *v); +typedef void (APIENTRYP PFNGLCOLOR4UBVERTEX3FSUNPROC) (GLubyte r, GLubyte g, GLubyte b, GLubyte a, GLfloat x, GLfloat y, GLfloat z); +typedef void (APIENTRYP PFNGLCOLOR4UBVERTEX3FVSUNPROC) (const GLubyte *c, const GLfloat *v); +typedef void (APIENTRYP PFNGLCOLOR3FVERTEX3FSUNPROC) (GLfloat r, GLfloat g, GLfloat b, GLfloat x, GLfloat y, GLfloat z); +typedef void (APIENTRYP PFNGLCOLOR3FVERTEX3FVSUNPROC) (const GLfloat *c, const GLfloat *v); +typedef void (APIENTRYP PFNGLNORMAL3FVERTEX3FSUNPROC) (GLfloat nx, GLfloat ny, GLfloat nz, GLfloat x, GLfloat y, GLfloat z); +typedef void (APIENTRYP PFNGLNORMAL3FVERTEX3FVSUNPROC) (const GLfloat *n, const GLfloat *v); +typedef void (APIENTRYP PFNGLCOLOR4FNORMAL3FVERTEX3FSUNPROC) (GLfloat r, GLfloat g, GLfloat b, GLfloat a, GLfloat nx, GLfloat ny, GLfloat nz, GLfloat x, GLfloat y, GLfloat z); +typedef void (APIENTRYP PFNGLCOLOR4FNORMAL3FVERTEX3FVSUNPROC) (const GLfloat *c, const GLfloat *n, const GLfloat *v); +typedef void (APIENTRYP PFNGLTEXCOORD2FVERTEX3FSUNPROC) (GLfloat s, GLfloat t, GLfloat x, GLfloat y, GLfloat z); +typedef void (APIENTRYP PFNGLTEXCOORD2FVERTEX3FVSUNPROC) (const GLfloat *tc, const GLfloat *v); +typedef void (APIENTRYP PFNGLTEXCOORD4FVERTEX4FSUNPROC) (GLfloat s, GLfloat t, GLfloat p, GLfloat q, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +typedef void (APIENTRYP PFNGLTEXCOORD4FVERTEX4FVSUNPROC) (const GLfloat *tc, const GLfloat *v); +typedef void (APIENTRYP PFNGLTEXCOORD2FCOLOR4UBVERTEX3FSUNPROC) (GLfloat s, GLfloat t, GLubyte r, GLubyte g, GLubyte b, GLubyte a, GLfloat x, GLfloat y, GLfloat z); +typedef void (APIENTRYP PFNGLTEXCOORD2FCOLOR4UBVERTEX3FVSUNPROC) (const GLfloat *tc, const GLubyte *c, const GLfloat *v); +typedef void (APIENTRYP PFNGLTEXCOORD2FCOLOR3FVERTEX3FSUNPROC) (GLfloat s, GLfloat t, GLfloat r, GLfloat g, GLfloat b, GLfloat x, GLfloat y, GLfloat z); +typedef void (APIENTRYP PFNGLTEXCOORD2FCOLOR3FVERTEX3FVSUNPROC) (const GLfloat *tc, const GLfloat *c, const GLfloat *v); +typedef void (APIENTRYP PFNGLTEXCOORD2FNORMAL3FVERTEX3FSUNPROC) (GLfloat s, GLfloat t, GLfloat nx, GLfloat ny, GLfloat nz, GLfloat x, GLfloat y, GLfloat z); +typedef void (APIENTRYP PFNGLTEXCOORD2FNORMAL3FVERTEX3FVSUNPROC) (const GLfloat *tc, const GLfloat *n, const GLfloat *v); +typedef void (APIENTRYP PFNGLTEXCOORD2FCOLOR4FNORMAL3FVERTEX3FSUNPROC) (GLfloat s, GLfloat t, GLfloat r, GLfloat g, GLfloat b, GLfloat a, GLfloat nx, GLfloat ny, GLfloat nz, GLfloat x, GLfloat y, GLfloat z); +typedef void (APIENTRYP PFNGLTEXCOORD2FCOLOR4FNORMAL3FVERTEX3FVSUNPROC) (const GLfloat *tc, const GLfloat *c, const GLfloat *n, const GLfloat *v); +typedef void (APIENTRYP PFNGLTEXCOORD4FCOLOR4FNORMAL3FVERTEX4FSUNPROC) (GLfloat s, GLfloat t, GLfloat p, GLfloat q, GLfloat r, GLfloat g, GLfloat b, GLfloat a, GLfloat nx, GLfloat ny, GLfloat nz, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +typedef void (APIENTRYP PFNGLTEXCOORD4FCOLOR4FNORMAL3FVERTEX4FVSUNPROC) (const GLfloat *tc, const GLfloat *c, const GLfloat *n, const GLfloat *v); +typedef void (APIENTRYP PFNGLREPLACEMENTCODEUIVERTEX3FSUNPROC) (GLuint rc, GLfloat x, GLfloat y, GLfloat z); +typedef void (APIENTRYP PFNGLREPLACEMENTCODEUIVERTEX3FVSUNPROC) (const GLuint *rc, const GLfloat *v); +typedef void (APIENTRYP PFNGLREPLACEMENTCODEUICOLOR4UBVERTEX3FSUNPROC) (GLuint rc, GLubyte r, GLubyte g, GLubyte b, GLubyte a, GLfloat x, GLfloat y, GLfloat z); +typedef void (APIENTRYP PFNGLREPLACEMENTCODEUICOLOR4UBVERTEX3FVSUNPROC) (const GLuint *rc, const GLubyte *c, const GLfloat *v); +typedef void (APIENTRYP PFNGLREPLACEMENTCODEUICOLOR3FVERTEX3FSUNPROC) (GLuint rc, GLfloat r, GLfloat g, GLfloat b, GLfloat x, GLfloat y, GLfloat z); +typedef void (APIENTRYP PFNGLREPLACEMENTCODEUICOLOR3FVERTEX3FVSUNPROC) (const GLuint *rc, const GLfloat *c, const GLfloat *v); +typedef void (APIENTRYP PFNGLREPLACEMENTCODEUINORMAL3FVERTEX3FSUNPROC) (GLuint rc, GLfloat nx, GLfloat ny, GLfloat nz, GLfloat x, GLfloat y, GLfloat z); +typedef void (APIENTRYP PFNGLREPLACEMENTCODEUINORMAL3FVERTEX3FVSUNPROC) (const GLuint *rc, const GLfloat *n, const GLfloat *v); +typedef void (APIENTRYP PFNGLREPLACEMENTCODEUICOLOR4FNORMAL3FVERTEX3FSUNPROC) (GLuint rc, GLfloat r, GLfloat g, GLfloat b, GLfloat a, GLfloat nx, GLfloat ny, GLfloat nz, GLfloat x, GLfloat y, GLfloat z); +typedef void (APIENTRYP PFNGLREPLACEMENTCODEUICOLOR4FNORMAL3FVERTEX3FVSUNPROC) (const GLuint *rc, const GLfloat *c, const GLfloat *n, const GLfloat *v); +typedef void (APIENTRYP PFNGLREPLACEMENTCODEUITEXCOORD2FVERTEX3FSUNPROC) (GLuint rc, GLfloat s, GLfloat t, GLfloat x, GLfloat y, GLfloat z); +typedef void (APIENTRYP PFNGLREPLACEMENTCODEUITEXCOORD2FVERTEX3FVSUNPROC) (const GLuint *rc, const GLfloat *tc, const GLfloat *v); +typedef void (APIENTRYP PFNGLREPLACEMENTCODEUITEXCOORD2FNORMAL3FVERTEX3FSUNPROC) (GLuint rc, GLfloat s, GLfloat t, GLfloat nx, GLfloat ny, GLfloat nz, GLfloat x, GLfloat y, GLfloat z); +typedef void (APIENTRYP PFNGLREPLACEMENTCODEUITEXCOORD2FNORMAL3FVERTEX3FVSUNPROC) (const GLuint *rc, const GLfloat *tc, const GLfloat *n, const GLfloat *v); +typedef void (APIENTRYP PFNGLREPLACEMENTCODEUITEXCOORD2FCOLOR4FNORMAL3FVERTEX3FSUNPROC) (GLuint rc, GLfloat s, GLfloat t, GLfloat r, GLfloat g, GLfloat b, GLfloat a, GLfloat nx, GLfloat ny, GLfloat nz, GLfloat x, GLfloat y, GLfloat z); +typedef void (APIENTRYP PFNGLREPLACEMENTCODEUITEXCOORD2FCOLOR4FNORMAL3FVERTEX3FVSUNPROC) (const GLuint *rc, const GLfloat *tc, const GLfloat *c, const GLfloat *n, const GLfloat *v); +#ifdef GL_GLEXT_PROTOTYPES +GLAPI void APIENTRY glColor4ubVertex2fSUN (GLubyte r, GLubyte g, GLubyte b, GLubyte a, GLfloat x, GLfloat y); +GLAPI void APIENTRY glColor4ubVertex2fvSUN (const GLubyte *c, const GLfloat *v); +GLAPI void APIENTRY glColor4ubVertex3fSUN (GLubyte r, GLubyte g, GLubyte b, GLubyte a, GLfloat x, GLfloat y, GLfloat z); +GLAPI void APIENTRY glColor4ubVertex3fvSUN (const GLubyte *c, const GLfloat *v); +GLAPI void APIENTRY glColor3fVertex3fSUN (GLfloat r, GLfloat g, GLfloat b, GLfloat x, GLfloat y, GLfloat z); +GLAPI void APIENTRY glColor3fVertex3fvSUN (const GLfloat *c, const GLfloat *v); +GLAPI void APIENTRY glNormal3fVertex3fSUN (GLfloat nx, GLfloat ny, GLfloat nz, GLfloat x, GLfloat y, GLfloat z); +GLAPI void APIENTRY glNormal3fVertex3fvSUN (const GLfloat *n, const GLfloat *v); +GLAPI void APIENTRY glColor4fNormal3fVertex3fSUN (GLfloat r, GLfloat g, GLfloat b, GLfloat a, GLfloat nx, GLfloat ny, GLfloat nz, GLfloat x, GLfloat y, GLfloat z); +GLAPI void APIENTRY glColor4fNormal3fVertex3fvSUN (const GLfloat *c, const GLfloat *n, const GLfloat *v); +GLAPI void APIENTRY glTexCoord2fVertex3fSUN (GLfloat s, GLfloat t, GLfloat x, GLfloat y, GLfloat z); +GLAPI void APIENTRY glTexCoord2fVertex3fvSUN (const GLfloat *tc, const GLfloat *v); +GLAPI void APIENTRY glTexCoord4fVertex4fSUN (GLfloat s, GLfloat t, GLfloat p, GLfloat q, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +GLAPI void APIENTRY glTexCoord4fVertex4fvSUN (const GLfloat *tc, const GLfloat *v); +GLAPI void APIENTRY glTexCoord2fColor4ubVertex3fSUN (GLfloat s, GLfloat t, GLubyte r, GLubyte g, GLubyte b, GLubyte a, GLfloat x, GLfloat y, GLfloat z); +GLAPI void APIENTRY glTexCoord2fColor4ubVertex3fvSUN (const GLfloat *tc, const GLubyte *c, const GLfloat *v); +GLAPI void APIENTRY glTexCoord2fColor3fVertex3fSUN (GLfloat s, GLfloat t, GLfloat r, GLfloat g, GLfloat b, GLfloat x, GLfloat y, GLfloat z); +GLAPI void APIENTRY glTexCoord2fColor3fVertex3fvSUN (const GLfloat *tc, const GLfloat *c, const GLfloat *v); +GLAPI void APIENTRY glTexCoord2fNormal3fVertex3fSUN (GLfloat s, GLfloat t, GLfloat nx, GLfloat ny, GLfloat nz, GLfloat x, GLfloat y, GLfloat z); +GLAPI void APIENTRY glTexCoord2fNormal3fVertex3fvSUN (const GLfloat *tc, const GLfloat *n, const GLfloat *v); +GLAPI void APIENTRY glTexCoord2fColor4fNormal3fVertex3fSUN (GLfloat s, GLfloat t, GLfloat r, GLfloat g, GLfloat b, GLfloat a, GLfloat nx, GLfloat ny, GLfloat nz, GLfloat x, GLfloat y, GLfloat z); +GLAPI void APIENTRY glTexCoord2fColor4fNormal3fVertex3fvSUN (const GLfloat *tc, const GLfloat *c, const GLfloat *n, const GLfloat *v); +GLAPI void APIENTRY glTexCoord4fColor4fNormal3fVertex4fSUN (GLfloat s, GLfloat t, GLfloat p, GLfloat q, GLfloat r, GLfloat g, GLfloat b, GLfloat a, GLfloat nx, GLfloat ny, GLfloat nz, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +GLAPI void APIENTRY glTexCoord4fColor4fNormal3fVertex4fvSUN (const GLfloat *tc, const GLfloat *c, const GLfloat *n, const GLfloat *v); +GLAPI void APIENTRY glReplacementCodeuiVertex3fSUN (GLuint rc, GLfloat x, GLfloat y, GLfloat z); +GLAPI void APIENTRY glReplacementCodeuiVertex3fvSUN (const GLuint *rc, const GLfloat *v); +GLAPI void APIENTRY glReplacementCodeuiColor4ubVertex3fSUN (GLuint rc, GLubyte r, GLubyte g, GLubyte b, GLubyte a, GLfloat x, GLfloat y, GLfloat z); +GLAPI void APIENTRY glReplacementCodeuiColor4ubVertex3fvSUN (const GLuint *rc, const GLubyte *c, const GLfloat *v); +GLAPI void APIENTRY glReplacementCodeuiColor3fVertex3fSUN (GLuint rc, GLfloat r, GLfloat g, GLfloat b, GLfloat x, GLfloat y, GLfloat z); +GLAPI void APIENTRY glReplacementCodeuiColor3fVertex3fvSUN (const GLuint *rc, const GLfloat *c, const GLfloat *v); +GLAPI void APIENTRY glReplacementCodeuiNormal3fVertex3fSUN (GLuint rc, GLfloat nx, GLfloat ny, GLfloat nz, GLfloat x, GLfloat y, GLfloat z); +GLAPI void APIENTRY glReplacementCodeuiNormal3fVertex3fvSUN (const GLuint *rc, const GLfloat *n, const GLfloat *v); +GLAPI void APIENTRY glReplacementCodeuiColor4fNormal3fVertex3fSUN (GLuint rc, GLfloat r, GLfloat g, GLfloat b, GLfloat a, GLfloat nx, GLfloat ny, GLfloat nz, GLfloat x, GLfloat y, GLfloat z); +GLAPI void APIENTRY glReplacementCodeuiColor4fNormal3fVertex3fvSUN (const GLuint *rc, const GLfloat *c, const GLfloat *n, const GLfloat *v); +GLAPI void APIENTRY glReplacementCodeuiTexCoord2fVertex3fSUN (GLuint rc, GLfloat s, GLfloat t, GLfloat x, GLfloat y, GLfloat z); +GLAPI void APIENTRY glReplacementCodeuiTexCoord2fVertex3fvSUN (const GLuint *rc, const GLfloat *tc, const GLfloat *v); +GLAPI void APIENTRY glReplacementCodeuiTexCoord2fNormal3fVertex3fSUN (GLuint rc, GLfloat s, GLfloat t, GLfloat nx, GLfloat ny, GLfloat nz, GLfloat x, GLfloat y, GLfloat z); +GLAPI void APIENTRY glReplacementCodeuiTexCoord2fNormal3fVertex3fvSUN (const GLuint *rc, const GLfloat *tc, const GLfloat *n, const GLfloat *v); +GLAPI void APIENTRY glReplacementCodeuiTexCoord2fColor4fNormal3fVertex3fSUN (GLuint rc, GLfloat s, GLfloat t, GLfloat r, GLfloat g, GLfloat b, GLfloat a, GLfloat nx, GLfloat ny, GLfloat nz, GLfloat x, GLfloat y, GLfloat z); +GLAPI void APIENTRY glReplacementCodeuiTexCoord2fColor4fNormal3fVertex3fvSUN (const GLuint *rc, const GLfloat *tc, const GLfloat *c, const GLfloat *n, const GLfloat *v); +#endif +#endif /* GL_SUN_vertex */ + +#ifndef GL_WIN_phong_shading +#define GL_WIN_phong_shading 1 +#define GL_PHONG_WIN 0x80EA +#define GL_PHONG_HINT_WIN 0x80EB +#endif /* GL_WIN_phong_shading */ + +#ifndef GL_WIN_specular_fog +#define GL_WIN_specular_fog 1 +#define GL_FOG_SPECULAR_TEXTURE_WIN 0x80EC +#endif /* GL_WIN_specular_fog */ + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/swiftshader/include/GL/glxext.h b/vendor/swiftshader/include/GL/glxext.h new file mode 100644 index 00000000..4c984ef4 --- /dev/null +++ b/vendor/swiftshader/include/GL/glxext.h @@ -0,0 +1,953 @@ +#ifndef __glx_glxext_h_ +#define __glx_glxext_h_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2013-2018 The Khronos Group Inc. +** +** Permission is hereby granted, free of charge, to any person obtaining a +** copy of this software and/or associated documentation files (the +** "Materials"), to deal in the Materials without restriction, including +** without limitation the rights to use, copy, modify, merge, publish, +** distribute, sublicense, and/or sell copies of the Materials, and to +** permit persons to whom the Materials are furnished to do so, subject to +** the following conditions: +** +** The above copyright notice and this permission notice shall be included +** in all copies or substantial portions of the Materials. +** +** THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +** EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +** MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +** IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +** CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +** MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. +*/ +/* +** This header is generated from the Khronos OpenGL / OpenGL ES XML +** API Registry. The current version of the Registry, generator scripts +** used to make the header, and the header can be found at +** https://github.com/KhronosGroup/OpenGL-Registry +*/ + +#define GLX_GLXEXT_VERSION 20180525 + +/* Generated C header for: + * API: glx + * Versions considered: .* + * Versions emitted: 1\.[3-9] + * Default extensions included: glx + * Additional extensions included: _nomatch_^ + * Extensions removed: _nomatch_^ + */ + +#ifndef GLX_VERSION_1_3 +#define GLX_VERSION_1_3 1 +typedef XID GLXContextID; +typedef struct __GLXFBConfigRec *GLXFBConfig; +typedef XID GLXWindow; +typedef XID GLXPbuffer; +#define GLX_WINDOW_BIT 0x00000001 +#define GLX_PIXMAP_BIT 0x00000002 +#define GLX_PBUFFER_BIT 0x00000004 +#define GLX_RGBA_BIT 0x00000001 +#define GLX_COLOR_INDEX_BIT 0x00000002 +#define GLX_PBUFFER_CLOBBER_MASK 0x08000000 +#define GLX_FRONT_LEFT_BUFFER_BIT 0x00000001 +#define GLX_FRONT_RIGHT_BUFFER_BIT 0x00000002 +#define GLX_BACK_LEFT_BUFFER_BIT 0x00000004 +#define GLX_BACK_RIGHT_BUFFER_BIT 0x00000008 +#define GLX_AUX_BUFFERS_BIT 0x00000010 +#define GLX_DEPTH_BUFFER_BIT 0x00000020 +#define GLX_STENCIL_BUFFER_BIT 0x00000040 +#define GLX_ACCUM_BUFFER_BIT 0x00000080 +#define GLX_CONFIG_CAVEAT 0x20 +#define GLX_X_VISUAL_TYPE 0x22 +#define GLX_TRANSPARENT_TYPE 0x23 +#define GLX_TRANSPARENT_INDEX_VALUE 0x24 +#define GLX_TRANSPARENT_RED_VALUE 0x25 +#define GLX_TRANSPARENT_GREEN_VALUE 0x26 +#define GLX_TRANSPARENT_BLUE_VALUE 0x27 +#define GLX_TRANSPARENT_ALPHA_VALUE 0x28 +#define GLX_DONT_CARE 0xFFFFFFFF +#define GLX_NONE 0x8000 +#define GLX_SLOW_CONFIG 0x8001 +#define GLX_TRUE_COLOR 0x8002 +#define GLX_DIRECT_COLOR 0x8003 +#define GLX_PSEUDO_COLOR 0x8004 +#define GLX_STATIC_COLOR 0x8005 +#define GLX_GRAY_SCALE 0x8006 +#define GLX_STATIC_GRAY 0x8007 +#define GLX_TRANSPARENT_RGB 0x8008 +#define GLX_TRANSPARENT_INDEX 0x8009 +#define GLX_VISUAL_ID 0x800B +#define GLX_SCREEN 0x800C +#define GLX_NON_CONFORMANT_CONFIG 0x800D +#define GLX_DRAWABLE_TYPE 0x8010 +#define GLX_RENDER_TYPE 0x8011 +#define GLX_X_RENDERABLE 0x8012 +#define GLX_FBCONFIG_ID 0x8013 +#define GLX_RGBA_TYPE 0x8014 +#define GLX_COLOR_INDEX_TYPE 0x8015 +#define GLX_MAX_PBUFFER_WIDTH 0x8016 +#define GLX_MAX_PBUFFER_HEIGHT 0x8017 +#define GLX_MAX_PBUFFER_PIXELS 0x8018 +#define GLX_PRESERVED_CONTENTS 0x801B +#define GLX_LARGEST_PBUFFER 0x801C +#define GLX_WIDTH 0x801D +#define GLX_HEIGHT 0x801E +#define GLX_EVENT_MASK 0x801F +#define GLX_DAMAGED 0x8020 +#define GLX_SAVED 0x8021 +#define GLX_WINDOW 0x8022 +#define GLX_PBUFFER 0x8023 +#define GLX_PBUFFER_HEIGHT 0x8040 +#define GLX_PBUFFER_WIDTH 0x8041 +typedef GLXFBConfig *( *PFNGLXGETFBCONFIGSPROC) (Display *dpy, int screen, int *nelements); +typedef GLXFBConfig *( *PFNGLXCHOOSEFBCONFIGPROC) (Display *dpy, int screen, const int *attrib_list, int *nelements); +typedef int ( *PFNGLXGETFBCONFIGATTRIBPROC) (Display *dpy, GLXFBConfig config, int attribute, int *value); +typedef XVisualInfo *( *PFNGLXGETVISUALFROMFBCONFIGPROC) (Display *dpy, GLXFBConfig config); +typedef GLXWindow ( *PFNGLXCREATEWINDOWPROC) (Display *dpy, GLXFBConfig config, Window win, const int *attrib_list); +typedef void ( *PFNGLXDESTROYWINDOWPROC) (Display *dpy, GLXWindow win); +typedef GLXPixmap ( *PFNGLXCREATEPIXMAPPROC) (Display *dpy, GLXFBConfig config, Pixmap pixmap, const int *attrib_list); +typedef void ( *PFNGLXDESTROYPIXMAPPROC) (Display *dpy, GLXPixmap pixmap); +typedef GLXPbuffer ( *PFNGLXCREATEPBUFFERPROC) (Display *dpy, GLXFBConfig config, const int *attrib_list); +typedef void ( *PFNGLXDESTROYPBUFFERPROC) (Display *dpy, GLXPbuffer pbuf); +typedef void ( *PFNGLXQUERYDRAWABLEPROC) (Display *dpy, GLXDrawable draw, int attribute, unsigned int *value); +typedef GLXContext ( *PFNGLXCREATENEWCONTEXTPROC) (Display *dpy, GLXFBConfig config, int render_type, GLXContext share_list, Bool direct); +typedef Bool ( *PFNGLXMAKECONTEXTCURRENTPROC) (Display *dpy, GLXDrawable draw, GLXDrawable read, GLXContext ctx); +typedef GLXDrawable ( *PFNGLXGETCURRENTREADDRAWABLEPROC) (void); +typedef int ( *PFNGLXQUERYCONTEXTPROC) (Display *dpy, GLXContext ctx, int attribute, int *value); +typedef void ( *PFNGLXSELECTEVENTPROC) (Display *dpy, GLXDrawable draw, unsigned long event_mask); +typedef void ( *PFNGLXGETSELECTEDEVENTPROC) (Display *dpy, GLXDrawable draw, unsigned long *event_mask); +#ifdef GLX_GLXEXT_PROTOTYPES +GLXFBConfig *glXGetFBConfigs (Display *dpy, int screen, int *nelements); +GLXFBConfig *glXChooseFBConfig (Display *dpy, int screen, const int *attrib_list, int *nelements); +int glXGetFBConfigAttrib (Display *dpy, GLXFBConfig config, int attribute, int *value); +XVisualInfo *glXGetVisualFromFBConfig (Display *dpy, GLXFBConfig config); +GLXWindow glXCreateWindow (Display *dpy, GLXFBConfig config, Window win, const int *attrib_list); +void glXDestroyWindow (Display *dpy, GLXWindow win); +GLXPixmap glXCreatePixmap (Display *dpy, GLXFBConfig config, Pixmap pixmap, const int *attrib_list); +void glXDestroyPixmap (Display *dpy, GLXPixmap pixmap); +GLXPbuffer glXCreatePbuffer (Display *dpy, GLXFBConfig config, const int *attrib_list); +void glXDestroyPbuffer (Display *dpy, GLXPbuffer pbuf); +void glXQueryDrawable (Display *dpy, GLXDrawable draw, int attribute, unsigned int *value); +GLXContext glXCreateNewContext (Display *dpy, GLXFBConfig config, int render_type, GLXContext share_list, Bool direct); +Bool glXMakeContextCurrent (Display *dpy, GLXDrawable draw, GLXDrawable read, GLXContext ctx); +GLXDrawable glXGetCurrentReadDrawable (void); +int glXQueryContext (Display *dpy, GLXContext ctx, int attribute, int *value); +void glXSelectEvent (Display *dpy, GLXDrawable draw, unsigned long event_mask); +void glXGetSelectedEvent (Display *dpy, GLXDrawable draw, unsigned long *event_mask); +#endif +#endif /* GLX_VERSION_1_3 */ + +#ifndef GLX_VERSION_1_4 +#define GLX_VERSION_1_4 1 +typedef void ( *__GLXextFuncPtr)(void); +#define GLX_SAMPLE_BUFFERS 100000 +#define GLX_SAMPLES 100001 +typedef __GLXextFuncPtr ( *PFNGLXGETPROCADDRESSPROC) (const GLubyte *procName); +#ifdef GLX_GLXEXT_PROTOTYPES +__GLXextFuncPtr glXGetProcAddress (const GLubyte *procName); +#endif +#endif /* GLX_VERSION_1_4 */ + +#ifndef GLX_ARB_context_flush_control +#define GLX_ARB_context_flush_control 1 +#define GLX_CONTEXT_RELEASE_BEHAVIOR_ARB 0x2097 +#define GLX_CONTEXT_RELEASE_BEHAVIOR_NONE_ARB 0 +#define GLX_CONTEXT_RELEASE_BEHAVIOR_FLUSH_ARB 0x2098 +#endif /* GLX_ARB_context_flush_control */ + +#ifndef GLX_ARB_create_context +#define GLX_ARB_create_context 1 +#define GLX_CONTEXT_DEBUG_BIT_ARB 0x00000001 +#define GLX_CONTEXT_FORWARD_COMPATIBLE_BIT_ARB 0x00000002 +#define GLX_CONTEXT_MAJOR_VERSION_ARB 0x2091 +#define GLX_CONTEXT_MINOR_VERSION_ARB 0x2092 +#define GLX_CONTEXT_FLAGS_ARB 0x2094 +typedef GLXContext ( *PFNGLXCREATECONTEXTATTRIBSARBPROC) (Display *dpy, GLXFBConfig config, GLXContext share_context, Bool direct, const int *attrib_list); +#ifdef GLX_GLXEXT_PROTOTYPES +GLXContext glXCreateContextAttribsARB (Display *dpy, GLXFBConfig config, GLXContext share_context, Bool direct, const int *attrib_list); +#endif +#endif /* GLX_ARB_create_context */ + +#ifndef GLX_ARB_create_context_no_error +#define GLX_ARB_create_context_no_error 1 +#define GLX_CONTEXT_OPENGL_NO_ERROR_ARB 0x31B3 +#endif /* GLX_ARB_create_context_no_error */ + +#ifndef GLX_ARB_create_context_profile +#define GLX_ARB_create_context_profile 1 +#define GLX_CONTEXT_CORE_PROFILE_BIT_ARB 0x00000001 +#define GLX_CONTEXT_COMPATIBILITY_PROFILE_BIT_ARB 0x00000002 +#define GLX_CONTEXT_PROFILE_MASK_ARB 0x9126 +#endif /* GLX_ARB_create_context_profile */ + +#ifndef GLX_ARB_create_context_robustness +#define GLX_ARB_create_context_robustness 1 +#define GLX_CONTEXT_ROBUST_ACCESS_BIT_ARB 0x00000004 +#define GLX_LOSE_CONTEXT_ON_RESET_ARB 0x8252 +#define GLX_CONTEXT_RESET_NOTIFICATION_STRATEGY_ARB 0x8256 +#define GLX_NO_RESET_NOTIFICATION_ARB 0x8261 +#endif /* GLX_ARB_create_context_robustness */ + +#ifndef GLX_ARB_fbconfig_float +#define GLX_ARB_fbconfig_float 1 +#define GLX_RGBA_FLOAT_TYPE_ARB 0x20B9 +#define GLX_RGBA_FLOAT_BIT_ARB 0x00000004 +#endif /* GLX_ARB_fbconfig_float */ + +#ifndef GLX_ARB_framebuffer_sRGB +#define GLX_ARB_framebuffer_sRGB 1 +#define GLX_FRAMEBUFFER_SRGB_CAPABLE_ARB 0x20B2 +#endif /* GLX_ARB_framebuffer_sRGB */ + +#ifndef GLX_ARB_get_proc_address +#define GLX_ARB_get_proc_address 1 +typedef __GLXextFuncPtr ( *PFNGLXGETPROCADDRESSARBPROC) (const GLubyte *procName); +#ifdef GLX_GLXEXT_PROTOTYPES +__GLXextFuncPtr glXGetProcAddressARB (const GLubyte *procName); +#endif +#endif /* GLX_ARB_get_proc_address */ + +#ifndef GLX_ARB_multisample +#define GLX_ARB_multisample 1 +#define GLX_SAMPLE_BUFFERS_ARB 100000 +#define GLX_SAMPLES_ARB 100001 +#endif /* GLX_ARB_multisample */ + +#ifndef GLX_ARB_robustness_application_isolation +#define GLX_ARB_robustness_application_isolation 1 +#define GLX_CONTEXT_RESET_ISOLATION_BIT_ARB 0x00000008 +#endif /* GLX_ARB_robustness_application_isolation */ + +#ifndef GLX_ARB_robustness_share_group_isolation +#define GLX_ARB_robustness_share_group_isolation 1 +#endif /* GLX_ARB_robustness_share_group_isolation */ + +#ifndef GLX_ARB_vertex_buffer_object +#define GLX_ARB_vertex_buffer_object 1 +#define GLX_CONTEXT_ALLOW_BUFFER_BYTE_ORDER_MISMATCH_ARB 0x2095 +#endif /* GLX_ARB_vertex_buffer_object */ + +#ifndef GLX_3DFX_multisample +#define GLX_3DFX_multisample 1 +#define GLX_SAMPLE_BUFFERS_3DFX 0x8050 +#define GLX_SAMPLES_3DFX 0x8051 +#endif /* GLX_3DFX_multisample */ + +#ifndef GLX_AMD_gpu_association +#define GLX_AMD_gpu_association 1 +#define GLX_GPU_VENDOR_AMD 0x1F00 +#define GLX_GPU_RENDERER_STRING_AMD 0x1F01 +#define GLX_GPU_OPENGL_VERSION_STRING_AMD 0x1F02 +#define GLX_GPU_FASTEST_TARGET_GPUS_AMD 0x21A2 +#define GLX_GPU_RAM_AMD 0x21A3 +#define GLX_GPU_CLOCK_AMD 0x21A4 +#define GLX_GPU_NUM_PIPES_AMD 0x21A5 +#define GLX_GPU_NUM_SIMD_AMD 0x21A6 +#define GLX_GPU_NUM_RB_AMD 0x21A7 +#define GLX_GPU_NUM_SPI_AMD 0x21A8 +typedef unsigned int ( *PFNGLXGETGPUIDSAMDPROC) (unsigned int maxCount, unsigned int *ids); +typedef int ( *PFNGLXGETGPUINFOAMDPROC) (unsigned int id, int property, GLenum dataType, unsigned int size, void *data); +typedef unsigned int ( *PFNGLXGETCONTEXTGPUIDAMDPROC) (GLXContext ctx); +typedef GLXContext ( *PFNGLXCREATEASSOCIATEDCONTEXTAMDPROC) (unsigned int id, GLXContext share_list); +typedef GLXContext ( *PFNGLXCREATEASSOCIATEDCONTEXTATTRIBSAMDPROC) (unsigned int id, GLXContext share_context, const int *attribList); +typedef Bool ( *PFNGLXDELETEASSOCIATEDCONTEXTAMDPROC) (GLXContext ctx); +typedef Bool ( *PFNGLXMAKEASSOCIATEDCONTEXTCURRENTAMDPROC) (GLXContext ctx); +typedef GLXContext ( *PFNGLXGETCURRENTASSOCIATEDCONTEXTAMDPROC) (void); +typedef void ( *PFNGLXBLITCONTEXTFRAMEBUFFERAMDPROC) (GLXContext dstCtx, GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +#ifdef GLX_GLXEXT_PROTOTYPES +unsigned int glXGetGPUIDsAMD (unsigned int maxCount, unsigned int *ids); +int glXGetGPUInfoAMD (unsigned int id, int property, GLenum dataType, unsigned int size, void *data); +unsigned int glXGetContextGPUIDAMD (GLXContext ctx); +GLXContext glXCreateAssociatedContextAMD (unsigned int id, GLXContext share_list); +GLXContext glXCreateAssociatedContextAttribsAMD (unsigned int id, GLXContext share_context, const int *attribList); +Bool glXDeleteAssociatedContextAMD (GLXContext ctx); +Bool glXMakeAssociatedContextCurrentAMD (GLXContext ctx); +GLXContext glXGetCurrentAssociatedContextAMD (void); +void glXBlitContextFramebufferAMD (GLXContext dstCtx, GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +#endif +#endif /* GLX_AMD_gpu_association */ + +#ifndef GLX_EXT_buffer_age +#define GLX_EXT_buffer_age 1 +#define GLX_BACK_BUFFER_AGE_EXT 0x20F4 +#endif /* GLX_EXT_buffer_age */ + +#ifndef GLX_EXT_create_context_es2_profile +#define GLX_EXT_create_context_es2_profile 1 +#define GLX_CONTEXT_ES2_PROFILE_BIT_EXT 0x00000004 +#endif /* GLX_EXT_create_context_es2_profile */ + +#ifndef GLX_EXT_create_context_es_profile +#define GLX_EXT_create_context_es_profile 1 +#define GLX_CONTEXT_ES_PROFILE_BIT_EXT 0x00000004 +#endif /* GLX_EXT_create_context_es_profile */ + +#ifndef GLX_EXT_fbconfig_packed_float +#define GLX_EXT_fbconfig_packed_float 1 +#define GLX_RGBA_UNSIGNED_FLOAT_TYPE_EXT 0x20B1 +#define GLX_RGBA_UNSIGNED_FLOAT_BIT_EXT 0x00000008 +#endif /* GLX_EXT_fbconfig_packed_float */ + +#ifndef GLX_EXT_framebuffer_sRGB +#define GLX_EXT_framebuffer_sRGB 1 +#define GLX_FRAMEBUFFER_SRGB_CAPABLE_EXT 0x20B2 +#endif /* GLX_EXT_framebuffer_sRGB */ + +#ifndef GLX_EXT_import_context +#define GLX_EXT_import_context 1 +#define GLX_SHARE_CONTEXT_EXT 0x800A +#define GLX_VISUAL_ID_EXT 0x800B +#define GLX_SCREEN_EXT 0x800C +typedef Display *( *PFNGLXGETCURRENTDISPLAYEXTPROC) (void); +typedef int ( *PFNGLXQUERYCONTEXTINFOEXTPROC) (Display *dpy, GLXContext context, int attribute, int *value); +typedef GLXContextID ( *PFNGLXGETCONTEXTIDEXTPROC) (const GLXContext context); +typedef GLXContext ( *PFNGLXIMPORTCONTEXTEXTPROC) (Display *dpy, GLXContextID contextID); +typedef void ( *PFNGLXFREECONTEXTEXTPROC) (Display *dpy, GLXContext context); +#ifdef GLX_GLXEXT_PROTOTYPES +Display *glXGetCurrentDisplayEXT (void); +int glXQueryContextInfoEXT (Display *dpy, GLXContext context, int attribute, int *value); +GLXContextID glXGetContextIDEXT (const GLXContext context); +GLXContext glXImportContextEXT (Display *dpy, GLXContextID contextID); +void glXFreeContextEXT (Display *dpy, GLXContext context); +#endif +#endif /* GLX_EXT_import_context */ + +#ifndef GLX_EXT_libglvnd +#define GLX_EXT_libglvnd 1 +#define GLX_VENDOR_NAMES_EXT 0x20F6 +#endif /* GLX_EXT_libglvnd */ + +#ifndef GLX_EXT_no_config_context +#define GLX_EXT_no_config_context 1 +#endif /* GLX_EXT_no_config_context */ + +#ifndef GLX_EXT_stereo_tree +#define GLX_EXT_stereo_tree 1 +typedef struct { + int type; + unsigned long serial; + Bool send_event; + Display *display; + int extension; + int evtype; + GLXDrawable window; + Bool stereo_tree; +} GLXStereoNotifyEventEXT; +#define GLX_STEREO_TREE_EXT 0x20F5 +#define GLX_STEREO_NOTIFY_MASK_EXT 0x00000001 +#define GLX_STEREO_NOTIFY_EXT 0x00000000 +#endif /* GLX_EXT_stereo_tree */ + +#ifndef GLX_EXT_swap_control +#define GLX_EXT_swap_control 1 +#define GLX_SWAP_INTERVAL_EXT 0x20F1 +#define GLX_MAX_SWAP_INTERVAL_EXT 0x20F2 +typedef void ( *PFNGLXSWAPINTERVALEXTPROC) (Display *dpy, GLXDrawable drawable, int interval); +#ifdef GLX_GLXEXT_PROTOTYPES +void glXSwapIntervalEXT (Display *dpy, GLXDrawable drawable, int interval); +#endif +#endif /* GLX_EXT_swap_control */ + +#ifndef GLX_EXT_swap_control_tear +#define GLX_EXT_swap_control_tear 1 +#define GLX_LATE_SWAPS_TEAR_EXT 0x20F3 +#endif /* GLX_EXT_swap_control_tear */ + +#ifndef GLX_EXT_texture_from_pixmap +#define GLX_EXT_texture_from_pixmap 1 +#define GLX_TEXTURE_1D_BIT_EXT 0x00000001 +#define GLX_TEXTURE_2D_BIT_EXT 0x00000002 +#define GLX_TEXTURE_RECTANGLE_BIT_EXT 0x00000004 +#define GLX_BIND_TO_TEXTURE_RGB_EXT 0x20D0 +#define GLX_BIND_TO_TEXTURE_RGBA_EXT 0x20D1 +#define GLX_BIND_TO_MIPMAP_TEXTURE_EXT 0x20D2 +#define GLX_BIND_TO_TEXTURE_TARGETS_EXT 0x20D3 +#define GLX_Y_INVERTED_EXT 0x20D4 +#define GLX_TEXTURE_FORMAT_EXT 0x20D5 +#define GLX_TEXTURE_TARGET_EXT 0x20D6 +#define GLX_MIPMAP_TEXTURE_EXT 0x20D7 +#define GLX_TEXTURE_FORMAT_NONE_EXT 0x20D8 +#define GLX_TEXTURE_FORMAT_RGB_EXT 0x20D9 +#define GLX_TEXTURE_FORMAT_RGBA_EXT 0x20DA +#define GLX_TEXTURE_1D_EXT 0x20DB +#define GLX_TEXTURE_2D_EXT 0x20DC +#define GLX_TEXTURE_RECTANGLE_EXT 0x20DD +#define GLX_FRONT_LEFT_EXT 0x20DE +#define GLX_FRONT_RIGHT_EXT 0x20DF +#define GLX_BACK_LEFT_EXT 0x20E0 +#define GLX_BACK_RIGHT_EXT 0x20E1 +#define GLX_FRONT_EXT 0x20DE +#define GLX_BACK_EXT 0x20E0 +#define GLX_AUX0_EXT 0x20E2 +#define GLX_AUX1_EXT 0x20E3 +#define GLX_AUX2_EXT 0x20E4 +#define GLX_AUX3_EXT 0x20E5 +#define GLX_AUX4_EXT 0x20E6 +#define GLX_AUX5_EXT 0x20E7 +#define GLX_AUX6_EXT 0x20E8 +#define GLX_AUX7_EXT 0x20E9 +#define GLX_AUX8_EXT 0x20EA +#define GLX_AUX9_EXT 0x20EB +typedef void ( *PFNGLXBINDTEXIMAGEEXTPROC) (Display *dpy, GLXDrawable drawable, int buffer, const int *attrib_list); +typedef void ( *PFNGLXRELEASETEXIMAGEEXTPROC) (Display *dpy, GLXDrawable drawable, int buffer); +#ifdef GLX_GLXEXT_PROTOTYPES +void glXBindTexImageEXT (Display *dpy, GLXDrawable drawable, int buffer, const int *attrib_list); +void glXReleaseTexImageEXT (Display *dpy, GLXDrawable drawable, int buffer); +#endif +#endif /* GLX_EXT_texture_from_pixmap */ + +#ifndef GLX_EXT_visual_info +#define GLX_EXT_visual_info 1 +#define GLX_X_VISUAL_TYPE_EXT 0x22 +#define GLX_TRANSPARENT_TYPE_EXT 0x23 +#define GLX_TRANSPARENT_INDEX_VALUE_EXT 0x24 +#define GLX_TRANSPARENT_RED_VALUE_EXT 0x25 +#define GLX_TRANSPARENT_GREEN_VALUE_EXT 0x26 +#define GLX_TRANSPARENT_BLUE_VALUE_EXT 0x27 +#define GLX_TRANSPARENT_ALPHA_VALUE_EXT 0x28 +#define GLX_NONE_EXT 0x8000 +#define GLX_TRUE_COLOR_EXT 0x8002 +#define GLX_DIRECT_COLOR_EXT 0x8003 +#define GLX_PSEUDO_COLOR_EXT 0x8004 +#define GLX_STATIC_COLOR_EXT 0x8005 +#define GLX_GRAY_SCALE_EXT 0x8006 +#define GLX_STATIC_GRAY_EXT 0x8007 +#define GLX_TRANSPARENT_RGB_EXT 0x8008 +#define GLX_TRANSPARENT_INDEX_EXT 0x8009 +#endif /* GLX_EXT_visual_info */ + +#ifndef GLX_EXT_visual_rating +#define GLX_EXT_visual_rating 1 +#define GLX_VISUAL_CAVEAT_EXT 0x20 +#define GLX_SLOW_VISUAL_EXT 0x8001 +#define GLX_NON_CONFORMANT_VISUAL_EXT 0x800D +#endif /* GLX_EXT_visual_rating */ + +#ifndef GLX_INTEL_swap_event +#define GLX_INTEL_swap_event 1 +#define GLX_BUFFER_SWAP_COMPLETE_INTEL_MASK 0x04000000 +#define GLX_EXCHANGE_COMPLETE_INTEL 0x8180 +#define GLX_COPY_COMPLETE_INTEL 0x8181 +#define GLX_FLIP_COMPLETE_INTEL 0x8182 +#endif /* GLX_INTEL_swap_event */ + +#ifndef GLX_MESA_agp_offset +#define GLX_MESA_agp_offset 1 +typedef unsigned int ( *PFNGLXGETAGPOFFSETMESAPROC) (const void *pointer); +#ifdef GLX_GLXEXT_PROTOTYPES +unsigned int glXGetAGPOffsetMESA (const void *pointer); +#endif +#endif /* GLX_MESA_agp_offset */ + +#ifndef GLX_MESA_copy_sub_buffer +#define GLX_MESA_copy_sub_buffer 1 +typedef void ( *PFNGLXCOPYSUBBUFFERMESAPROC) (Display *dpy, GLXDrawable drawable, int x, int y, int width, int height); +#ifdef GLX_GLXEXT_PROTOTYPES +void glXCopySubBufferMESA (Display *dpy, GLXDrawable drawable, int x, int y, int width, int height); +#endif +#endif /* GLX_MESA_copy_sub_buffer */ + +#ifndef GLX_MESA_pixmap_colormap +#define GLX_MESA_pixmap_colormap 1 +typedef GLXPixmap ( *PFNGLXCREATEGLXPIXMAPMESAPROC) (Display *dpy, XVisualInfo *visual, Pixmap pixmap, Colormap cmap); +#ifdef GLX_GLXEXT_PROTOTYPES +GLXPixmap glXCreateGLXPixmapMESA (Display *dpy, XVisualInfo *visual, Pixmap pixmap, Colormap cmap); +#endif +#endif /* GLX_MESA_pixmap_colormap */ + +#ifndef GLX_MESA_query_renderer +#define GLX_MESA_query_renderer 1 +#define GLX_RENDERER_VENDOR_ID_MESA 0x8183 +#define GLX_RENDERER_DEVICE_ID_MESA 0x8184 +#define GLX_RENDERER_VERSION_MESA 0x8185 +#define GLX_RENDERER_ACCELERATED_MESA 0x8186 +#define GLX_RENDERER_VIDEO_MEMORY_MESA 0x8187 +#define GLX_RENDERER_UNIFIED_MEMORY_ARCHITECTURE_MESA 0x8188 +#define GLX_RENDERER_PREFERRED_PROFILE_MESA 0x8189 +#define GLX_RENDERER_OPENGL_CORE_PROFILE_VERSION_MESA 0x818A +#define GLX_RENDERER_OPENGL_COMPATIBILITY_PROFILE_VERSION_MESA 0x818B +#define GLX_RENDERER_OPENGL_ES_PROFILE_VERSION_MESA 0x818C +#define GLX_RENDERER_OPENGL_ES2_PROFILE_VERSION_MESA 0x818D +#define GLX_RENDERER_ID_MESA 0x818E +typedef Bool ( *PFNGLXQUERYCURRENTRENDERERINTEGERMESAPROC) (int attribute, unsigned int *value); +typedef const char *( *PFNGLXQUERYCURRENTRENDERERSTRINGMESAPROC) (int attribute); +typedef Bool ( *PFNGLXQUERYRENDERERINTEGERMESAPROC) (Display *dpy, int screen, int renderer, int attribute, unsigned int *value); +typedef const char *( *PFNGLXQUERYRENDERERSTRINGMESAPROC) (Display *dpy, int screen, int renderer, int attribute); +#ifdef GLX_GLXEXT_PROTOTYPES +Bool glXQueryCurrentRendererIntegerMESA (int attribute, unsigned int *value); +const char *glXQueryCurrentRendererStringMESA (int attribute); +Bool glXQueryRendererIntegerMESA (Display *dpy, int screen, int renderer, int attribute, unsigned int *value); +const char *glXQueryRendererStringMESA (Display *dpy, int screen, int renderer, int attribute); +#endif +#endif /* GLX_MESA_query_renderer */ + +#ifndef GLX_MESA_release_buffers +#define GLX_MESA_release_buffers 1 +typedef Bool ( *PFNGLXRELEASEBUFFERSMESAPROC) (Display *dpy, GLXDrawable drawable); +#ifdef GLX_GLXEXT_PROTOTYPES +Bool glXReleaseBuffersMESA (Display *dpy, GLXDrawable drawable); +#endif +#endif /* GLX_MESA_release_buffers */ + +#ifndef GLX_MESA_set_3dfx_mode +#define GLX_MESA_set_3dfx_mode 1 +#define GLX_3DFX_WINDOW_MODE_MESA 0x1 +#define GLX_3DFX_FULLSCREEN_MODE_MESA 0x2 +typedef Bool ( *PFNGLXSET3DFXMODEMESAPROC) (int mode); +#ifdef GLX_GLXEXT_PROTOTYPES +Bool glXSet3DfxModeMESA (int mode); +#endif +#endif /* GLX_MESA_set_3dfx_mode */ + +#ifndef GLX_MESA_swap_control +#define GLX_MESA_swap_control 1 +typedef int ( *PFNGLXGETSWAPINTERVALMESAPROC) (void); +typedef int ( *PFNGLXSWAPINTERVALMESAPROC) (unsigned int interval); +#ifdef GLX_GLXEXT_PROTOTYPES +int glXGetSwapIntervalMESA (void); +int glXSwapIntervalMESA (unsigned int interval); +#endif +#endif /* GLX_MESA_swap_control */ + +#ifndef GLX_NV_copy_buffer +#define GLX_NV_copy_buffer 1 +typedef void ( *PFNGLXCOPYBUFFERSUBDATANVPROC) (Display *dpy, GLXContext readCtx, GLXContext writeCtx, GLenum readTarget, GLenum writeTarget, GLintptr readOffset, GLintptr writeOffset, GLsizeiptr size); +typedef void ( *PFNGLXNAMEDCOPYBUFFERSUBDATANVPROC) (Display *dpy, GLXContext readCtx, GLXContext writeCtx, GLuint readBuffer, GLuint writeBuffer, GLintptr readOffset, GLintptr writeOffset, GLsizeiptr size); +#ifdef GLX_GLXEXT_PROTOTYPES +void glXCopyBufferSubDataNV (Display *dpy, GLXContext readCtx, GLXContext writeCtx, GLenum readTarget, GLenum writeTarget, GLintptr readOffset, GLintptr writeOffset, GLsizeiptr size); +void glXNamedCopyBufferSubDataNV (Display *dpy, GLXContext readCtx, GLXContext writeCtx, GLuint readBuffer, GLuint writeBuffer, GLintptr readOffset, GLintptr writeOffset, GLsizeiptr size); +#endif +#endif /* GLX_NV_copy_buffer */ + +#ifndef GLX_NV_copy_image +#define GLX_NV_copy_image 1 +typedef void ( *PFNGLXCOPYIMAGESUBDATANVPROC) (Display *dpy, GLXContext srcCtx, GLuint srcName, GLenum srcTarget, GLint srcLevel, GLint srcX, GLint srcY, GLint srcZ, GLXContext dstCtx, GLuint dstName, GLenum dstTarget, GLint dstLevel, GLint dstX, GLint dstY, GLint dstZ, GLsizei width, GLsizei height, GLsizei depth); +#ifdef GLX_GLXEXT_PROTOTYPES +void glXCopyImageSubDataNV (Display *dpy, GLXContext srcCtx, GLuint srcName, GLenum srcTarget, GLint srcLevel, GLint srcX, GLint srcY, GLint srcZ, GLXContext dstCtx, GLuint dstName, GLenum dstTarget, GLint dstLevel, GLint dstX, GLint dstY, GLint dstZ, GLsizei width, GLsizei height, GLsizei depth); +#endif +#endif /* GLX_NV_copy_image */ + +#ifndef GLX_NV_delay_before_swap +#define GLX_NV_delay_before_swap 1 +typedef Bool ( *PFNGLXDELAYBEFORESWAPNVPROC) (Display *dpy, GLXDrawable drawable, GLfloat seconds); +#ifdef GLX_GLXEXT_PROTOTYPES +Bool glXDelayBeforeSwapNV (Display *dpy, GLXDrawable drawable, GLfloat seconds); +#endif +#endif /* GLX_NV_delay_before_swap */ + +#ifndef GLX_NV_float_buffer +#define GLX_NV_float_buffer 1 +#define GLX_FLOAT_COMPONENTS_NV 0x20B0 +#endif /* GLX_NV_float_buffer */ + +#ifndef GLX_NV_multisample_coverage +#define GLX_NV_multisample_coverage 1 +#define GLX_COVERAGE_SAMPLES_NV 100001 +#define GLX_COLOR_SAMPLES_NV 0x20B3 +#endif /* GLX_NV_multisample_coverage */ + +#ifndef GLX_NV_present_video +#define GLX_NV_present_video 1 +#define GLX_NUM_VIDEO_SLOTS_NV 0x20F0 +typedef unsigned int *( *PFNGLXENUMERATEVIDEODEVICESNVPROC) (Display *dpy, int screen, int *nelements); +typedef int ( *PFNGLXBINDVIDEODEVICENVPROC) (Display *dpy, unsigned int video_slot, unsigned int video_device, const int *attrib_list); +#ifdef GLX_GLXEXT_PROTOTYPES +unsigned int *glXEnumerateVideoDevicesNV (Display *dpy, int screen, int *nelements); +int glXBindVideoDeviceNV (Display *dpy, unsigned int video_slot, unsigned int video_device, const int *attrib_list); +#endif +#endif /* GLX_NV_present_video */ + +#ifndef GLX_NV_robustness_video_memory_purge +#define GLX_NV_robustness_video_memory_purge 1 +#define GLX_GENERATE_RESET_ON_VIDEO_MEMORY_PURGE_NV 0x20F7 +#endif /* GLX_NV_robustness_video_memory_purge */ + +#ifndef GLX_NV_swap_group +#define GLX_NV_swap_group 1 +typedef Bool ( *PFNGLXJOINSWAPGROUPNVPROC) (Display *dpy, GLXDrawable drawable, GLuint group); +typedef Bool ( *PFNGLXBINDSWAPBARRIERNVPROC) (Display *dpy, GLuint group, GLuint barrier); +typedef Bool ( *PFNGLXQUERYSWAPGROUPNVPROC) (Display *dpy, GLXDrawable drawable, GLuint *group, GLuint *barrier); +typedef Bool ( *PFNGLXQUERYMAXSWAPGROUPSNVPROC) (Display *dpy, int screen, GLuint *maxGroups, GLuint *maxBarriers); +typedef Bool ( *PFNGLXQUERYFRAMECOUNTNVPROC) (Display *dpy, int screen, GLuint *count); +typedef Bool ( *PFNGLXRESETFRAMECOUNTNVPROC) (Display *dpy, int screen); +#ifdef GLX_GLXEXT_PROTOTYPES +Bool glXJoinSwapGroupNV (Display *dpy, GLXDrawable drawable, GLuint group); +Bool glXBindSwapBarrierNV (Display *dpy, GLuint group, GLuint barrier); +Bool glXQuerySwapGroupNV (Display *dpy, GLXDrawable drawable, GLuint *group, GLuint *barrier); +Bool glXQueryMaxSwapGroupsNV (Display *dpy, int screen, GLuint *maxGroups, GLuint *maxBarriers); +Bool glXQueryFrameCountNV (Display *dpy, int screen, GLuint *count); +Bool glXResetFrameCountNV (Display *dpy, int screen); +#endif +#endif /* GLX_NV_swap_group */ + +#ifndef GLX_NV_video_capture +#define GLX_NV_video_capture 1 +typedef XID GLXVideoCaptureDeviceNV; +#define GLX_DEVICE_ID_NV 0x20CD +#define GLX_UNIQUE_ID_NV 0x20CE +#define GLX_NUM_VIDEO_CAPTURE_SLOTS_NV 0x20CF +typedef int ( *PFNGLXBINDVIDEOCAPTUREDEVICENVPROC) (Display *dpy, unsigned int video_capture_slot, GLXVideoCaptureDeviceNV device); +typedef GLXVideoCaptureDeviceNV *( *PFNGLXENUMERATEVIDEOCAPTUREDEVICESNVPROC) (Display *dpy, int screen, int *nelements); +typedef void ( *PFNGLXLOCKVIDEOCAPTUREDEVICENVPROC) (Display *dpy, GLXVideoCaptureDeviceNV device); +typedef int ( *PFNGLXQUERYVIDEOCAPTUREDEVICENVPROC) (Display *dpy, GLXVideoCaptureDeviceNV device, int attribute, int *value); +typedef void ( *PFNGLXRELEASEVIDEOCAPTUREDEVICENVPROC) (Display *dpy, GLXVideoCaptureDeviceNV device); +#ifdef GLX_GLXEXT_PROTOTYPES +int glXBindVideoCaptureDeviceNV (Display *dpy, unsigned int video_capture_slot, GLXVideoCaptureDeviceNV device); +GLXVideoCaptureDeviceNV *glXEnumerateVideoCaptureDevicesNV (Display *dpy, int screen, int *nelements); +void glXLockVideoCaptureDeviceNV (Display *dpy, GLXVideoCaptureDeviceNV device); +int glXQueryVideoCaptureDeviceNV (Display *dpy, GLXVideoCaptureDeviceNV device, int attribute, int *value); +void glXReleaseVideoCaptureDeviceNV (Display *dpy, GLXVideoCaptureDeviceNV device); +#endif +#endif /* GLX_NV_video_capture */ + +#ifndef GLX_NV_video_out +#define GLX_NV_video_out 1 +typedef unsigned int GLXVideoDeviceNV; +#define GLX_VIDEO_OUT_COLOR_NV 0x20C3 +#define GLX_VIDEO_OUT_ALPHA_NV 0x20C4 +#define GLX_VIDEO_OUT_DEPTH_NV 0x20C5 +#define GLX_VIDEO_OUT_COLOR_AND_ALPHA_NV 0x20C6 +#define GLX_VIDEO_OUT_COLOR_AND_DEPTH_NV 0x20C7 +#define GLX_VIDEO_OUT_FRAME_NV 0x20C8 +#define GLX_VIDEO_OUT_FIELD_1_NV 0x20C9 +#define GLX_VIDEO_OUT_FIELD_2_NV 0x20CA +#define GLX_VIDEO_OUT_STACKED_FIELDS_1_2_NV 0x20CB +#define GLX_VIDEO_OUT_STACKED_FIELDS_2_1_NV 0x20CC +typedef int ( *PFNGLXGETVIDEODEVICENVPROC) (Display *dpy, int screen, int numVideoDevices, GLXVideoDeviceNV *pVideoDevice); +typedef int ( *PFNGLXRELEASEVIDEODEVICENVPROC) (Display *dpy, int screen, GLXVideoDeviceNV VideoDevice); +typedef int ( *PFNGLXBINDVIDEOIMAGENVPROC) (Display *dpy, GLXVideoDeviceNV VideoDevice, GLXPbuffer pbuf, int iVideoBuffer); +typedef int ( *PFNGLXRELEASEVIDEOIMAGENVPROC) (Display *dpy, GLXPbuffer pbuf); +typedef int ( *PFNGLXSENDPBUFFERTOVIDEONVPROC) (Display *dpy, GLXPbuffer pbuf, int iBufferType, unsigned long *pulCounterPbuffer, GLboolean bBlock); +typedef int ( *PFNGLXGETVIDEOINFONVPROC) (Display *dpy, int screen, GLXVideoDeviceNV VideoDevice, unsigned long *pulCounterOutputPbuffer, unsigned long *pulCounterOutputVideo); +#ifdef GLX_GLXEXT_PROTOTYPES +int glXGetVideoDeviceNV (Display *dpy, int screen, int numVideoDevices, GLXVideoDeviceNV *pVideoDevice); +int glXReleaseVideoDeviceNV (Display *dpy, int screen, GLXVideoDeviceNV VideoDevice); +int glXBindVideoImageNV (Display *dpy, GLXVideoDeviceNV VideoDevice, GLXPbuffer pbuf, int iVideoBuffer); +int glXReleaseVideoImageNV (Display *dpy, GLXPbuffer pbuf); +int glXSendPbufferToVideoNV (Display *dpy, GLXPbuffer pbuf, int iBufferType, unsigned long *pulCounterPbuffer, GLboolean bBlock); +int glXGetVideoInfoNV (Display *dpy, int screen, GLXVideoDeviceNV VideoDevice, unsigned long *pulCounterOutputPbuffer, unsigned long *pulCounterOutputVideo); +#endif +#endif /* GLX_NV_video_out */ + +#ifndef GLX_OML_swap_method +#define GLX_OML_swap_method 1 +#define GLX_SWAP_METHOD_OML 0x8060 +#define GLX_SWAP_EXCHANGE_OML 0x8061 +#define GLX_SWAP_COPY_OML 0x8062 +#define GLX_SWAP_UNDEFINED_OML 0x8063 +#endif /* GLX_OML_swap_method */ + +#ifndef GLX_OML_sync_control +#define GLX_OML_sync_control 1 +#ifndef GLEXT_64_TYPES_DEFINED +/* This code block is duplicated in glext.h, so must be protected */ +#define GLEXT_64_TYPES_DEFINED +/* Define int32_t, int64_t, and uint64_t types for UST/MSC */ +/* (as used in the GLX_OML_sync_control extension). */ +#if defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L +#include +#elif defined(__sun__) || defined(__digital__) +#include +#if defined(__STDC__) +#if defined(__arch64__) || defined(_LP64) +typedef long int int64_t; +typedef unsigned long int uint64_t; +#else +typedef long long int int64_t; +typedef unsigned long long int uint64_t; +#endif /* __arch64__ */ +#endif /* __STDC__ */ +#elif defined( __VMS ) || defined(__sgi) +#include +#elif defined(__SCO__) || defined(__USLC__) +#include +#elif defined(__UNIXOS2__) || defined(__SOL64__) +typedef long int int32_t; +typedef long long int int64_t; +typedef unsigned long long int uint64_t; +#elif defined(_WIN32) && defined(__GNUC__) +#include +#elif defined(_WIN32) +typedef __int32 int32_t; +typedef __int64 int64_t; +typedef unsigned __int64 uint64_t; +#else +/* Fallback if nothing above works */ +#include +#endif +#endif +typedef Bool ( *PFNGLXGETSYNCVALUESOMLPROC) (Display *dpy, GLXDrawable drawable, int64_t *ust, int64_t *msc, int64_t *sbc); +typedef Bool ( *PFNGLXGETMSCRATEOMLPROC) (Display *dpy, GLXDrawable drawable, int32_t *numerator, int32_t *denominator); +typedef int64_t ( *PFNGLXSWAPBUFFERSMSCOMLPROC) (Display *dpy, GLXDrawable drawable, int64_t target_msc, int64_t divisor, int64_t remainder); +typedef Bool ( *PFNGLXWAITFORMSCOMLPROC) (Display *dpy, GLXDrawable drawable, int64_t target_msc, int64_t divisor, int64_t remainder, int64_t *ust, int64_t *msc, int64_t *sbc); +typedef Bool ( *PFNGLXWAITFORSBCOMLPROC) (Display *dpy, GLXDrawable drawable, int64_t target_sbc, int64_t *ust, int64_t *msc, int64_t *sbc); +#ifdef GLX_GLXEXT_PROTOTYPES +Bool glXGetSyncValuesOML (Display *dpy, GLXDrawable drawable, int64_t *ust, int64_t *msc, int64_t *sbc); +Bool glXGetMscRateOML (Display *dpy, GLXDrawable drawable, int32_t *numerator, int32_t *denominator); +int64_t glXSwapBuffersMscOML (Display *dpy, GLXDrawable drawable, int64_t target_msc, int64_t divisor, int64_t remainder); +Bool glXWaitForMscOML (Display *dpy, GLXDrawable drawable, int64_t target_msc, int64_t divisor, int64_t remainder, int64_t *ust, int64_t *msc, int64_t *sbc); +Bool glXWaitForSbcOML (Display *dpy, GLXDrawable drawable, int64_t target_sbc, int64_t *ust, int64_t *msc, int64_t *sbc); +#endif +#endif /* GLX_OML_sync_control */ + +#ifndef GLX_SGIS_blended_overlay +#define GLX_SGIS_blended_overlay 1 +#define GLX_BLENDED_RGBA_SGIS 0x8025 +#endif /* GLX_SGIS_blended_overlay */ + +#ifndef GLX_SGIS_multisample +#define GLX_SGIS_multisample 1 +#define GLX_SAMPLE_BUFFERS_SGIS 100000 +#define GLX_SAMPLES_SGIS 100001 +#endif /* GLX_SGIS_multisample */ + +#ifndef GLX_SGIS_shared_multisample +#define GLX_SGIS_shared_multisample 1 +#define GLX_MULTISAMPLE_SUB_RECT_WIDTH_SGIS 0x8026 +#define GLX_MULTISAMPLE_SUB_RECT_HEIGHT_SGIS 0x8027 +#endif /* GLX_SGIS_shared_multisample */ + +#ifndef GLX_SGIX_dmbuffer +#define GLX_SGIX_dmbuffer 1 +typedef XID GLXPbufferSGIX; +#ifdef _DM_BUFFER_H_ +#define GLX_DIGITAL_MEDIA_PBUFFER_SGIX 0x8024 +typedef Bool ( *PFNGLXASSOCIATEDMPBUFFERSGIXPROC) (Display *dpy, GLXPbufferSGIX pbuffer, DMparams *params, DMbuffer dmbuffer); +#ifdef GLX_GLXEXT_PROTOTYPES +Bool glXAssociateDMPbufferSGIX (Display *dpy, GLXPbufferSGIX pbuffer, DMparams *params, DMbuffer dmbuffer); +#endif +#endif /* _DM_BUFFER_H_ */ +#endif /* GLX_SGIX_dmbuffer */ + +#ifndef GLX_SGIX_fbconfig +#define GLX_SGIX_fbconfig 1 +typedef struct __GLXFBConfigRec *GLXFBConfigSGIX; +#define GLX_WINDOW_BIT_SGIX 0x00000001 +#define GLX_PIXMAP_BIT_SGIX 0x00000002 +#define GLX_RGBA_BIT_SGIX 0x00000001 +#define GLX_COLOR_INDEX_BIT_SGIX 0x00000002 +#define GLX_DRAWABLE_TYPE_SGIX 0x8010 +#define GLX_RENDER_TYPE_SGIX 0x8011 +#define GLX_X_RENDERABLE_SGIX 0x8012 +#define GLX_FBCONFIG_ID_SGIX 0x8013 +#define GLX_RGBA_TYPE_SGIX 0x8014 +#define GLX_COLOR_INDEX_TYPE_SGIX 0x8015 +typedef int ( *PFNGLXGETFBCONFIGATTRIBSGIXPROC) (Display *dpy, GLXFBConfigSGIX config, int attribute, int *value); +typedef GLXFBConfigSGIX *( *PFNGLXCHOOSEFBCONFIGSGIXPROC) (Display *dpy, int screen, int *attrib_list, int *nelements); +typedef GLXPixmap ( *PFNGLXCREATEGLXPIXMAPWITHCONFIGSGIXPROC) (Display *dpy, GLXFBConfigSGIX config, Pixmap pixmap); +typedef GLXContext ( *PFNGLXCREATECONTEXTWITHCONFIGSGIXPROC) (Display *dpy, GLXFBConfigSGIX config, int render_type, GLXContext share_list, Bool direct); +typedef XVisualInfo *( *PFNGLXGETVISUALFROMFBCONFIGSGIXPROC) (Display *dpy, GLXFBConfigSGIX config); +typedef GLXFBConfigSGIX ( *PFNGLXGETFBCONFIGFROMVISUALSGIXPROC) (Display *dpy, XVisualInfo *vis); +#ifdef GLX_GLXEXT_PROTOTYPES +int glXGetFBConfigAttribSGIX (Display *dpy, GLXFBConfigSGIX config, int attribute, int *value); +GLXFBConfigSGIX *glXChooseFBConfigSGIX (Display *dpy, int screen, int *attrib_list, int *nelements); +GLXPixmap glXCreateGLXPixmapWithConfigSGIX (Display *dpy, GLXFBConfigSGIX config, Pixmap pixmap); +GLXContext glXCreateContextWithConfigSGIX (Display *dpy, GLXFBConfigSGIX config, int render_type, GLXContext share_list, Bool direct); +XVisualInfo *glXGetVisualFromFBConfigSGIX (Display *dpy, GLXFBConfigSGIX config); +GLXFBConfigSGIX glXGetFBConfigFromVisualSGIX (Display *dpy, XVisualInfo *vis); +#endif +#endif /* GLX_SGIX_fbconfig */ + +#ifndef GLX_SGIX_hyperpipe +#define GLX_SGIX_hyperpipe 1 +typedef struct { + char pipeName[80]; /* Should be [GLX_HYPERPIPE_PIPE_NAME_LENGTH_SGIX] */ + int networkId; +} GLXHyperpipeNetworkSGIX; +typedef struct { + char pipeName[80]; /* Should be [GLX_HYPERPIPE_PIPE_NAME_LENGTH_SGIX] */ + int channel; + unsigned int participationType; + int timeSlice; +} GLXHyperpipeConfigSGIX; +typedef struct { + char pipeName[80]; /* Should be [GLX_HYPERPIPE_PIPE_NAME_LENGTH_SGIX] */ + int srcXOrigin, srcYOrigin, srcWidth, srcHeight; + int destXOrigin, destYOrigin, destWidth, destHeight; +} GLXPipeRect; +typedef struct { + char pipeName[80]; /* Should be [GLX_HYPERPIPE_PIPE_NAME_LENGTH_SGIX] */ + int XOrigin, YOrigin, maxHeight, maxWidth; +} GLXPipeRectLimits; +#define GLX_HYPERPIPE_PIPE_NAME_LENGTH_SGIX 80 +#define GLX_BAD_HYPERPIPE_CONFIG_SGIX 91 +#define GLX_BAD_HYPERPIPE_SGIX 92 +#define GLX_HYPERPIPE_DISPLAY_PIPE_SGIX 0x00000001 +#define GLX_HYPERPIPE_RENDER_PIPE_SGIX 0x00000002 +#define GLX_PIPE_RECT_SGIX 0x00000001 +#define GLX_PIPE_RECT_LIMITS_SGIX 0x00000002 +#define GLX_HYPERPIPE_STEREO_SGIX 0x00000003 +#define GLX_HYPERPIPE_PIXEL_AVERAGE_SGIX 0x00000004 +#define GLX_HYPERPIPE_ID_SGIX 0x8030 +typedef GLXHyperpipeNetworkSGIX *( *PFNGLXQUERYHYPERPIPENETWORKSGIXPROC) (Display *dpy, int *npipes); +typedef int ( *PFNGLXHYPERPIPECONFIGSGIXPROC) (Display *dpy, int networkId, int npipes, GLXHyperpipeConfigSGIX *cfg, int *hpId); +typedef GLXHyperpipeConfigSGIX *( *PFNGLXQUERYHYPERPIPECONFIGSGIXPROC) (Display *dpy, int hpId, int *npipes); +typedef int ( *PFNGLXDESTROYHYPERPIPECONFIGSGIXPROC) (Display *dpy, int hpId); +typedef int ( *PFNGLXBINDHYPERPIPESGIXPROC) (Display *dpy, int hpId); +typedef int ( *PFNGLXQUERYHYPERPIPEBESTATTRIBSGIXPROC) (Display *dpy, int timeSlice, int attrib, int size, void *attribList, void *returnAttribList); +typedef int ( *PFNGLXHYPERPIPEATTRIBSGIXPROC) (Display *dpy, int timeSlice, int attrib, int size, void *attribList); +typedef int ( *PFNGLXQUERYHYPERPIPEATTRIBSGIXPROC) (Display *dpy, int timeSlice, int attrib, int size, void *returnAttribList); +#ifdef GLX_GLXEXT_PROTOTYPES +GLXHyperpipeNetworkSGIX *glXQueryHyperpipeNetworkSGIX (Display *dpy, int *npipes); +int glXHyperpipeConfigSGIX (Display *dpy, int networkId, int npipes, GLXHyperpipeConfigSGIX *cfg, int *hpId); +GLXHyperpipeConfigSGIX *glXQueryHyperpipeConfigSGIX (Display *dpy, int hpId, int *npipes); +int glXDestroyHyperpipeConfigSGIX (Display *dpy, int hpId); +int glXBindHyperpipeSGIX (Display *dpy, int hpId); +int glXQueryHyperpipeBestAttribSGIX (Display *dpy, int timeSlice, int attrib, int size, void *attribList, void *returnAttribList); +int glXHyperpipeAttribSGIX (Display *dpy, int timeSlice, int attrib, int size, void *attribList); +int glXQueryHyperpipeAttribSGIX (Display *dpy, int timeSlice, int attrib, int size, void *returnAttribList); +#endif +#endif /* GLX_SGIX_hyperpipe */ + +#ifndef GLX_SGIX_pbuffer +#define GLX_SGIX_pbuffer 1 +#define GLX_PBUFFER_BIT_SGIX 0x00000004 +#define GLX_BUFFER_CLOBBER_MASK_SGIX 0x08000000 +#define GLX_FRONT_LEFT_BUFFER_BIT_SGIX 0x00000001 +#define GLX_FRONT_RIGHT_BUFFER_BIT_SGIX 0x00000002 +#define GLX_BACK_LEFT_BUFFER_BIT_SGIX 0x00000004 +#define GLX_BACK_RIGHT_BUFFER_BIT_SGIX 0x00000008 +#define GLX_AUX_BUFFERS_BIT_SGIX 0x00000010 +#define GLX_DEPTH_BUFFER_BIT_SGIX 0x00000020 +#define GLX_STENCIL_BUFFER_BIT_SGIX 0x00000040 +#define GLX_ACCUM_BUFFER_BIT_SGIX 0x00000080 +#define GLX_SAMPLE_BUFFERS_BIT_SGIX 0x00000100 +#define GLX_MAX_PBUFFER_WIDTH_SGIX 0x8016 +#define GLX_MAX_PBUFFER_HEIGHT_SGIX 0x8017 +#define GLX_MAX_PBUFFER_PIXELS_SGIX 0x8018 +#define GLX_OPTIMAL_PBUFFER_WIDTH_SGIX 0x8019 +#define GLX_OPTIMAL_PBUFFER_HEIGHT_SGIX 0x801A +#define GLX_PRESERVED_CONTENTS_SGIX 0x801B +#define GLX_LARGEST_PBUFFER_SGIX 0x801C +#define GLX_WIDTH_SGIX 0x801D +#define GLX_HEIGHT_SGIX 0x801E +#define GLX_EVENT_MASK_SGIX 0x801F +#define GLX_DAMAGED_SGIX 0x8020 +#define GLX_SAVED_SGIX 0x8021 +#define GLX_WINDOW_SGIX 0x8022 +#define GLX_PBUFFER_SGIX 0x8023 +typedef GLXPbufferSGIX ( *PFNGLXCREATEGLXPBUFFERSGIXPROC) (Display *dpy, GLXFBConfigSGIX config, unsigned int width, unsigned int height, int *attrib_list); +typedef void ( *PFNGLXDESTROYGLXPBUFFERSGIXPROC) (Display *dpy, GLXPbufferSGIX pbuf); +typedef int ( *PFNGLXQUERYGLXPBUFFERSGIXPROC) (Display *dpy, GLXPbufferSGIX pbuf, int attribute, unsigned int *value); +typedef void ( *PFNGLXSELECTEVENTSGIXPROC) (Display *dpy, GLXDrawable drawable, unsigned long mask); +typedef void ( *PFNGLXGETSELECTEDEVENTSGIXPROC) (Display *dpy, GLXDrawable drawable, unsigned long *mask); +#ifdef GLX_GLXEXT_PROTOTYPES +GLXPbufferSGIX glXCreateGLXPbufferSGIX (Display *dpy, GLXFBConfigSGIX config, unsigned int width, unsigned int height, int *attrib_list); +void glXDestroyGLXPbufferSGIX (Display *dpy, GLXPbufferSGIX pbuf); +int glXQueryGLXPbufferSGIX (Display *dpy, GLXPbufferSGIX pbuf, int attribute, unsigned int *value); +void glXSelectEventSGIX (Display *dpy, GLXDrawable drawable, unsigned long mask); +void glXGetSelectedEventSGIX (Display *dpy, GLXDrawable drawable, unsigned long *mask); +#endif +#endif /* GLX_SGIX_pbuffer */ + +#ifndef GLX_SGIX_swap_barrier +#define GLX_SGIX_swap_barrier 1 +typedef void ( *PFNGLXBINDSWAPBARRIERSGIXPROC) (Display *dpy, GLXDrawable drawable, int barrier); +typedef Bool ( *PFNGLXQUERYMAXSWAPBARRIERSSGIXPROC) (Display *dpy, int screen, int *max); +#ifdef GLX_GLXEXT_PROTOTYPES +void glXBindSwapBarrierSGIX (Display *dpy, GLXDrawable drawable, int barrier); +Bool glXQueryMaxSwapBarriersSGIX (Display *dpy, int screen, int *max); +#endif +#endif /* GLX_SGIX_swap_barrier */ + +#ifndef GLX_SGIX_swap_group +#define GLX_SGIX_swap_group 1 +typedef void ( *PFNGLXJOINSWAPGROUPSGIXPROC) (Display *dpy, GLXDrawable drawable, GLXDrawable member); +#ifdef GLX_GLXEXT_PROTOTYPES +void glXJoinSwapGroupSGIX (Display *dpy, GLXDrawable drawable, GLXDrawable member); +#endif +#endif /* GLX_SGIX_swap_group */ + +#ifndef GLX_SGIX_video_resize +#define GLX_SGIX_video_resize 1 +#define GLX_SYNC_FRAME_SGIX 0x00000000 +#define GLX_SYNC_SWAP_SGIX 0x00000001 +typedef int ( *PFNGLXBINDCHANNELTOWINDOWSGIXPROC) (Display *display, int screen, int channel, Window window); +typedef int ( *PFNGLXCHANNELRECTSGIXPROC) (Display *display, int screen, int channel, int x, int y, int w, int h); +typedef int ( *PFNGLXQUERYCHANNELRECTSGIXPROC) (Display *display, int screen, int channel, int *dx, int *dy, int *dw, int *dh); +typedef int ( *PFNGLXQUERYCHANNELDELTASSGIXPROC) (Display *display, int screen, int channel, int *x, int *y, int *w, int *h); +typedef int ( *PFNGLXCHANNELRECTSYNCSGIXPROC) (Display *display, int screen, int channel, GLenum synctype); +#ifdef GLX_GLXEXT_PROTOTYPES +int glXBindChannelToWindowSGIX (Display *display, int screen, int channel, Window window); +int glXChannelRectSGIX (Display *display, int screen, int channel, int x, int y, int w, int h); +int glXQueryChannelRectSGIX (Display *display, int screen, int channel, int *dx, int *dy, int *dw, int *dh); +int glXQueryChannelDeltasSGIX (Display *display, int screen, int channel, int *x, int *y, int *w, int *h); +int glXChannelRectSyncSGIX (Display *display, int screen, int channel, GLenum synctype); +#endif +#endif /* GLX_SGIX_video_resize */ + +#ifndef GLX_SGIX_video_source +#define GLX_SGIX_video_source 1 +typedef XID GLXVideoSourceSGIX; +#ifdef _VL_H +typedef GLXVideoSourceSGIX ( *PFNGLXCREATEGLXVIDEOSOURCESGIXPROC) (Display *display, int screen, VLServer server, VLPath path, int nodeClass, VLNode drainNode); +typedef void ( *PFNGLXDESTROYGLXVIDEOSOURCESGIXPROC) (Display *dpy, GLXVideoSourceSGIX glxvideosource); +#ifdef GLX_GLXEXT_PROTOTYPES +GLXVideoSourceSGIX glXCreateGLXVideoSourceSGIX (Display *display, int screen, VLServer server, VLPath path, int nodeClass, VLNode drainNode); +void glXDestroyGLXVideoSourceSGIX (Display *dpy, GLXVideoSourceSGIX glxvideosource); +#endif +#endif /* _VL_H */ +#endif /* GLX_SGIX_video_source */ + +#ifndef GLX_SGIX_visual_select_group +#define GLX_SGIX_visual_select_group 1 +#define GLX_VISUAL_SELECT_GROUP_SGIX 0x8028 +#endif /* GLX_SGIX_visual_select_group */ + +#ifndef GLX_SGI_cushion +#define GLX_SGI_cushion 1 +typedef void ( *PFNGLXCUSHIONSGIPROC) (Display *dpy, Window window, float cushion); +#ifdef GLX_GLXEXT_PROTOTYPES +void glXCushionSGI (Display *dpy, Window window, float cushion); +#endif +#endif /* GLX_SGI_cushion */ + +#ifndef GLX_SGI_make_current_read +#define GLX_SGI_make_current_read 1 +typedef Bool ( *PFNGLXMAKECURRENTREADSGIPROC) (Display *dpy, GLXDrawable draw, GLXDrawable read, GLXContext ctx); +typedef GLXDrawable ( *PFNGLXGETCURRENTREADDRAWABLESGIPROC) (void); +#ifdef GLX_GLXEXT_PROTOTYPES +Bool glXMakeCurrentReadSGI (Display *dpy, GLXDrawable draw, GLXDrawable read, GLXContext ctx); +GLXDrawable glXGetCurrentReadDrawableSGI (void); +#endif +#endif /* GLX_SGI_make_current_read */ + +#ifndef GLX_SGI_swap_control +#define GLX_SGI_swap_control 1 +typedef int ( *PFNGLXSWAPINTERVALSGIPROC) (int interval); +#ifdef GLX_GLXEXT_PROTOTYPES +int glXSwapIntervalSGI (int interval); +#endif +#endif /* GLX_SGI_swap_control */ + +#ifndef GLX_SGI_video_sync +#define GLX_SGI_video_sync 1 +typedef int ( *PFNGLXGETVIDEOSYNCSGIPROC) (unsigned int *count); +typedef int ( *PFNGLXWAITVIDEOSYNCSGIPROC) (int divisor, int remainder, unsigned int *count); +#ifdef GLX_GLXEXT_PROTOTYPES +int glXGetVideoSyncSGI (unsigned int *count); +int glXWaitVideoSyncSGI (int divisor, int remainder, unsigned int *count); +#endif +#endif /* GLX_SGI_video_sync */ + +#ifndef GLX_SUN_get_transparent_index +#define GLX_SUN_get_transparent_index 1 +typedef Status ( *PFNGLXGETTRANSPARENTINDEXSUNPROC) (Display *dpy, Window overlay, Window underlay, long *pTransparentIndex); +#ifdef GLX_GLXEXT_PROTOTYPES +Status glXGetTransparentIndexSUN (Display *dpy, Window overlay, Window underlay, long *pTransparentIndex); +#endif +#endif /* GLX_SUN_get_transparent_index */ + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/swiftshader/include/GL/wglext.h b/vendor/swiftshader/include/GL/wglext.h new file mode 100644 index 00000000..2d528f77 --- /dev/null +++ b/vendor/swiftshader/include/GL/wglext.h @@ -0,0 +1,850 @@ +#ifndef __wgl_wglext_h_ +#define __wgl_wglext_h_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2013-2018 The Khronos Group Inc. +** +** Permission is hereby granted, free of charge, to any person obtaining a +** copy of this software and/or associated documentation files (the +** "Materials"), to deal in the Materials without restriction, including +** without limitation the rights to use, copy, modify, merge, publish, +** distribute, sublicense, and/or sell copies of the Materials, and to +** permit persons to whom the Materials are furnished to do so, subject to +** the following conditions: +** +** The above copyright notice and this permission notice shall be included +** in all copies or substantial portions of the Materials. +** +** THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +** EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +** MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +** IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +** CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +** MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. +*/ +/* +** This header is generated from the Khronos OpenGL / OpenGL ES XML +** API Registry. The current version of the Registry, generator scripts +** used to make the header, and the header can be found at +** https://github.com/KhronosGroup/OpenGL-Registry +*/ + +#if defined(_WIN32) && !defined(APIENTRY) && !defined(__CYGWIN__) && !defined(__SCITECH_SNAP__) +#define WIN32_LEAN_AND_MEAN 1 +#include +#endif + +#define WGL_WGLEXT_VERSION 20180525 + +/* Generated C header for: + * API: wgl + * Versions considered: .* + * Versions emitted: _nomatch_^ + * Default extensions included: wgl + * Additional extensions included: _nomatch_^ + * Extensions removed: _nomatch_^ + */ + +#ifndef WGL_ARB_buffer_region +#define WGL_ARB_buffer_region 1 +#define WGL_FRONT_COLOR_BUFFER_BIT_ARB 0x00000001 +#define WGL_BACK_COLOR_BUFFER_BIT_ARB 0x00000002 +#define WGL_DEPTH_BUFFER_BIT_ARB 0x00000004 +#define WGL_STENCIL_BUFFER_BIT_ARB 0x00000008 +typedef HANDLE (WINAPI * PFNWGLCREATEBUFFERREGIONARBPROC) (HDC hDC, int iLayerPlane, UINT uType); +typedef VOID (WINAPI * PFNWGLDELETEBUFFERREGIONARBPROC) (HANDLE hRegion); +typedef BOOL (WINAPI * PFNWGLSAVEBUFFERREGIONARBPROC) (HANDLE hRegion, int x, int y, int width, int height); +typedef BOOL (WINAPI * PFNWGLRESTOREBUFFERREGIONARBPROC) (HANDLE hRegion, int x, int y, int width, int height, int xSrc, int ySrc); +#ifdef WGL_WGLEXT_PROTOTYPES +HANDLE WINAPI wglCreateBufferRegionARB (HDC hDC, int iLayerPlane, UINT uType); +VOID WINAPI wglDeleteBufferRegionARB (HANDLE hRegion); +BOOL WINAPI wglSaveBufferRegionARB (HANDLE hRegion, int x, int y, int width, int height); +BOOL WINAPI wglRestoreBufferRegionARB (HANDLE hRegion, int x, int y, int width, int height, int xSrc, int ySrc); +#endif +#endif /* WGL_ARB_buffer_region */ + +#ifndef WGL_ARB_context_flush_control +#define WGL_ARB_context_flush_control 1 +#define WGL_CONTEXT_RELEASE_BEHAVIOR_ARB 0x2097 +#define WGL_CONTEXT_RELEASE_BEHAVIOR_NONE_ARB 0 +#define WGL_CONTEXT_RELEASE_BEHAVIOR_FLUSH_ARB 0x2098 +#endif /* WGL_ARB_context_flush_control */ + +#ifndef WGL_ARB_create_context +#define WGL_ARB_create_context 1 +#define WGL_CONTEXT_DEBUG_BIT_ARB 0x00000001 +#define WGL_CONTEXT_FORWARD_COMPATIBLE_BIT_ARB 0x00000002 +#define WGL_CONTEXT_MAJOR_VERSION_ARB 0x2091 +#define WGL_CONTEXT_MINOR_VERSION_ARB 0x2092 +#define WGL_CONTEXT_LAYER_PLANE_ARB 0x2093 +#define WGL_CONTEXT_FLAGS_ARB 0x2094 +#define ERROR_INVALID_VERSION_ARB 0x2095 +typedef HGLRC (WINAPI * PFNWGLCREATECONTEXTATTRIBSARBPROC) (HDC hDC, HGLRC hShareContext, const int *attribList); +#ifdef WGL_WGLEXT_PROTOTYPES +HGLRC WINAPI wglCreateContextAttribsARB (HDC hDC, HGLRC hShareContext, const int *attribList); +#endif +#endif /* WGL_ARB_create_context */ + +#ifndef WGL_ARB_create_context_no_error +#define WGL_ARB_create_context_no_error 1 +#define WGL_CONTEXT_OPENGL_NO_ERROR_ARB 0x31B3 +#endif /* WGL_ARB_create_context_no_error */ + +#ifndef WGL_ARB_create_context_profile +#define WGL_ARB_create_context_profile 1 +#define WGL_CONTEXT_PROFILE_MASK_ARB 0x9126 +#define WGL_CONTEXT_CORE_PROFILE_BIT_ARB 0x00000001 +#define WGL_CONTEXT_COMPATIBILITY_PROFILE_BIT_ARB 0x00000002 +#define ERROR_INVALID_PROFILE_ARB 0x2096 +#endif /* WGL_ARB_create_context_profile */ + +#ifndef WGL_ARB_create_context_robustness +#define WGL_ARB_create_context_robustness 1 +#define WGL_CONTEXT_ROBUST_ACCESS_BIT_ARB 0x00000004 +#define WGL_LOSE_CONTEXT_ON_RESET_ARB 0x8252 +#define WGL_CONTEXT_RESET_NOTIFICATION_STRATEGY_ARB 0x8256 +#define WGL_NO_RESET_NOTIFICATION_ARB 0x8261 +#endif /* WGL_ARB_create_context_robustness */ + +#ifndef WGL_ARB_extensions_string +#define WGL_ARB_extensions_string 1 +typedef const char *(WINAPI * PFNWGLGETEXTENSIONSSTRINGARBPROC) (HDC hdc); +#ifdef WGL_WGLEXT_PROTOTYPES +const char *WINAPI wglGetExtensionsStringARB (HDC hdc); +#endif +#endif /* WGL_ARB_extensions_string */ + +#ifndef WGL_ARB_framebuffer_sRGB +#define WGL_ARB_framebuffer_sRGB 1 +#define WGL_FRAMEBUFFER_SRGB_CAPABLE_ARB 0x20A9 +#endif /* WGL_ARB_framebuffer_sRGB */ + +#ifndef WGL_ARB_make_current_read +#define WGL_ARB_make_current_read 1 +#define ERROR_INVALID_PIXEL_TYPE_ARB 0x2043 +#define ERROR_INCOMPATIBLE_DEVICE_CONTEXTS_ARB 0x2054 +typedef BOOL (WINAPI * PFNWGLMAKECONTEXTCURRENTARBPROC) (HDC hDrawDC, HDC hReadDC, HGLRC hglrc); +typedef HDC (WINAPI * PFNWGLGETCURRENTREADDCARBPROC) (void); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglMakeContextCurrentARB (HDC hDrawDC, HDC hReadDC, HGLRC hglrc); +HDC WINAPI wglGetCurrentReadDCARB (void); +#endif +#endif /* WGL_ARB_make_current_read */ + +#ifndef WGL_ARB_multisample +#define WGL_ARB_multisample 1 +#define WGL_SAMPLE_BUFFERS_ARB 0x2041 +#define WGL_SAMPLES_ARB 0x2042 +#endif /* WGL_ARB_multisample */ + +#ifndef WGL_ARB_pbuffer +#define WGL_ARB_pbuffer 1 +DECLARE_HANDLE(HPBUFFERARB); +#define WGL_DRAW_TO_PBUFFER_ARB 0x202D +#define WGL_MAX_PBUFFER_PIXELS_ARB 0x202E +#define WGL_MAX_PBUFFER_WIDTH_ARB 0x202F +#define WGL_MAX_PBUFFER_HEIGHT_ARB 0x2030 +#define WGL_PBUFFER_LARGEST_ARB 0x2033 +#define WGL_PBUFFER_WIDTH_ARB 0x2034 +#define WGL_PBUFFER_HEIGHT_ARB 0x2035 +#define WGL_PBUFFER_LOST_ARB 0x2036 +typedef HPBUFFERARB (WINAPI * PFNWGLCREATEPBUFFERARBPROC) (HDC hDC, int iPixelFormat, int iWidth, int iHeight, const int *piAttribList); +typedef HDC (WINAPI * PFNWGLGETPBUFFERDCARBPROC) (HPBUFFERARB hPbuffer); +typedef int (WINAPI * PFNWGLRELEASEPBUFFERDCARBPROC) (HPBUFFERARB hPbuffer, HDC hDC); +typedef BOOL (WINAPI * PFNWGLDESTROYPBUFFERARBPROC) (HPBUFFERARB hPbuffer); +typedef BOOL (WINAPI * PFNWGLQUERYPBUFFERARBPROC) (HPBUFFERARB hPbuffer, int iAttribute, int *piValue); +#ifdef WGL_WGLEXT_PROTOTYPES +HPBUFFERARB WINAPI wglCreatePbufferARB (HDC hDC, int iPixelFormat, int iWidth, int iHeight, const int *piAttribList); +HDC WINAPI wglGetPbufferDCARB (HPBUFFERARB hPbuffer); +int WINAPI wglReleasePbufferDCARB (HPBUFFERARB hPbuffer, HDC hDC); +BOOL WINAPI wglDestroyPbufferARB (HPBUFFERARB hPbuffer); +BOOL WINAPI wglQueryPbufferARB (HPBUFFERARB hPbuffer, int iAttribute, int *piValue); +#endif +#endif /* WGL_ARB_pbuffer */ + +#ifndef WGL_ARB_pixel_format +#define WGL_ARB_pixel_format 1 +#define WGL_NUMBER_PIXEL_FORMATS_ARB 0x2000 +#define WGL_DRAW_TO_WINDOW_ARB 0x2001 +#define WGL_DRAW_TO_BITMAP_ARB 0x2002 +#define WGL_ACCELERATION_ARB 0x2003 +#define WGL_NEED_PALETTE_ARB 0x2004 +#define WGL_NEED_SYSTEM_PALETTE_ARB 0x2005 +#define WGL_SWAP_LAYER_BUFFERS_ARB 0x2006 +#define WGL_SWAP_METHOD_ARB 0x2007 +#define WGL_NUMBER_OVERLAYS_ARB 0x2008 +#define WGL_NUMBER_UNDERLAYS_ARB 0x2009 +#define WGL_TRANSPARENT_ARB 0x200A +#define WGL_TRANSPARENT_RED_VALUE_ARB 0x2037 +#define WGL_TRANSPARENT_GREEN_VALUE_ARB 0x2038 +#define WGL_TRANSPARENT_BLUE_VALUE_ARB 0x2039 +#define WGL_TRANSPARENT_ALPHA_VALUE_ARB 0x203A +#define WGL_TRANSPARENT_INDEX_VALUE_ARB 0x203B +#define WGL_SHARE_DEPTH_ARB 0x200C +#define WGL_SHARE_STENCIL_ARB 0x200D +#define WGL_SHARE_ACCUM_ARB 0x200E +#define WGL_SUPPORT_GDI_ARB 0x200F +#define WGL_SUPPORT_OPENGL_ARB 0x2010 +#define WGL_DOUBLE_BUFFER_ARB 0x2011 +#define WGL_STEREO_ARB 0x2012 +#define WGL_PIXEL_TYPE_ARB 0x2013 +#define WGL_COLOR_BITS_ARB 0x2014 +#define WGL_RED_BITS_ARB 0x2015 +#define WGL_RED_SHIFT_ARB 0x2016 +#define WGL_GREEN_BITS_ARB 0x2017 +#define WGL_GREEN_SHIFT_ARB 0x2018 +#define WGL_BLUE_BITS_ARB 0x2019 +#define WGL_BLUE_SHIFT_ARB 0x201A +#define WGL_ALPHA_BITS_ARB 0x201B +#define WGL_ALPHA_SHIFT_ARB 0x201C +#define WGL_ACCUM_BITS_ARB 0x201D +#define WGL_ACCUM_RED_BITS_ARB 0x201E +#define WGL_ACCUM_GREEN_BITS_ARB 0x201F +#define WGL_ACCUM_BLUE_BITS_ARB 0x2020 +#define WGL_ACCUM_ALPHA_BITS_ARB 0x2021 +#define WGL_DEPTH_BITS_ARB 0x2022 +#define WGL_STENCIL_BITS_ARB 0x2023 +#define WGL_AUX_BUFFERS_ARB 0x2024 +#define WGL_NO_ACCELERATION_ARB 0x2025 +#define WGL_GENERIC_ACCELERATION_ARB 0x2026 +#define WGL_FULL_ACCELERATION_ARB 0x2027 +#define WGL_SWAP_EXCHANGE_ARB 0x2028 +#define WGL_SWAP_COPY_ARB 0x2029 +#define WGL_SWAP_UNDEFINED_ARB 0x202A +#define WGL_TYPE_RGBA_ARB 0x202B +#define WGL_TYPE_COLORINDEX_ARB 0x202C +typedef BOOL (WINAPI * PFNWGLGETPIXELFORMATATTRIBIVARBPROC) (HDC hdc, int iPixelFormat, int iLayerPlane, UINT nAttributes, const int *piAttributes, int *piValues); +typedef BOOL (WINAPI * PFNWGLGETPIXELFORMATATTRIBFVARBPROC) (HDC hdc, int iPixelFormat, int iLayerPlane, UINT nAttributes, const int *piAttributes, FLOAT *pfValues); +typedef BOOL (WINAPI * PFNWGLCHOOSEPIXELFORMATARBPROC) (HDC hdc, const int *piAttribIList, const FLOAT *pfAttribFList, UINT nMaxFormats, int *piFormats, UINT *nNumFormats); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglGetPixelFormatAttribivARB (HDC hdc, int iPixelFormat, int iLayerPlane, UINT nAttributes, const int *piAttributes, int *piValues); +BOOL WINAPI wglGetPixelFormatAttribfvARB (HDC hdc, int iPixelFormat, int iLayerPlane, UINT nAttributes, const int *piAttributes, FLOAT *pfValues); +BOOL WINAPI wglChoosePixelFormatARB (HDC hdc, const int *piAttribIList, const FLOAT *pfAttribFList, UINT nMaxFormats, int *piFormats, UINT *nNumFormats); +#endif +#endif /* WGL_ARB_pixel_format */ + +#ifndef WGL_ARB_pixel_format_float +#define WGL_ARB_pixel_format_float 1 +#define WGL_TYPE_RGBA_FLOAT_ARB 0x21A0 +#endif /* WGL_ARB_pixel_format_float */ + +#ifndef WGL_ARB_render_texture +#define WGL_ARB_render_texture 1 +#define WGL_BIND_TO_TEXTURE_RGB_ARB 0x2070 +#define WGL_BIND_TO_TEXTURE_RGBA_ARB 0x2071 +#define WGL_TEXTURE_FORMAT_ARB 0x2072 +#define WGL_TEXTURE_TARGET_ARB 0x2073 +#define WGL_MIPMAP_TEXTURE_ARB 0x2074 +#define WGL_TEXTURE_RGB_ARB 0x2075 +#define WGL_TEXTURE_RGBA_ARB 0x2076 +#define WGL_NO_TEXTURE_ARB 0x2077 +#define WGL_TEXTURE_CUBE_MAP_ARB 0x2078 +#define WGL_TEXTURE_1D_ARB 0x2079 +#define WGL_TEXTURE_2D_ARB 0x207A +#define WGL_MIPMAP_LEVEL_ARB 0x207B +#define WGL_CUBE_MAP_FACE_ARB 0x207C +#define WGL_TEXTURE_CUBE_MAP_POSITIVE_X_ARB 0x207D +#define WGL_TEXTURE_CUBE_MAP_NEGATIVE_X_ARB 0x207E +#define WGL_TEXTURE_CUBE_MAP_POSITIVE_Y_ARB 0x207F +#define WGL_TEXTURE_CUBE_MAP_NEGATIVE_Y_ARB 0x2080 +#define WGL_TEXTURE_CUBE_MAP_POSITIVE_Z_ARB 0x2081 +#define WGL_TEXTURE_CUBE_MAP_NEGATIVE_Z_ARB 0x2082 +#define WGL_FRONT_LEFT_ARB 0x2083 +#define WGL_FRONT_RIGHT_ARB 0x2084 +#define WGL_BACK_LEFT_ARB 0x2085 +#define WGL_BACK_RIGHT_ARB 0x2086 +#define WGL_AUX0_ARB 0x2087 +#define WGL_AUX1_ARB 0x2088 +#define WGL_AUX2_ARB 0x2089 +#define WGL_AUX3_ARB 0x208A +#define WGL_AUX4_ARB 0x208B +#define WGL_AUX5_ARB 0x208C +#define WGL_AUX6_ARB 0x208D +#define WGL_AUX7_ARB 0x208E +#define WGL_AUX8_ARB 0x208F +#define WGL_AUX9_ARB 0x2090 +typedef BOOL (WINAPI * PFNWGLBINDTEXIMAGEARBPROC) (HPBUFFERARB hPbuffer, int iBuffer); +typedef BOOL (WINAPI * PFNWGLRELEASETEXIMAGEARBPROC) (HPBUFFERARB hPbuffer, int iBuffer); +typedef BOOL (WINAPI * PFNWGLSETPBUFFERATTRIBARBPROC) (HPBUFFERARB hPbuffer, const int *piAttribList); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglBindTexImageARB (HPBUFFERARB hPbuffer, int iBuffer); +BOOL WINAPI wglReleaseTexImageARB (HPBUFFERARB hPbuffer, int iBuffer); +BOOL WINAPI wglSetPbufferAttribARB (HPBUFFERARB hPbuffer, const int *piAttribList); +#endif +#endif /* WGL_ARB_render_texture */ + +#ifndef WGL_ARB_robustness_application_isolation +#define WGL_ARB_robustness_application_isolation 1 +#define WGL_CONTEXT_RESET_ISOLATION_BIT_ARB 0x00000008 +#endif /* WGL_ARB_robustness_application_isolation */ + +#ifndef WGL_ARB_robustness_share_group_isolation +#define WGL_ARB_robustness_share_group_isolation 1 +#endif /* WGL_ARB_robustness_share_group_isolation */ + +#ifndef WGL_3DFX_multisample +#define WGL_3DFX_multisample 1 +#define WGL_SAMPLE_BUFFERS_3DFX 0x2060 +#define WGL_SAMPLES_3DFX 0x2061 +#endif /* WGL_3DFX_multisample */ + +#ifndef WGL_3DL_stereo_control +#define WGL_3DL_stereo_control 1 +#define WGL_STEREO_EMITTER_ENABLE_3DL 0x2055 +#define WGL_STEREO_EMITTER_DISABLE_3DL 0x2056 +#define WGL_STEREO_POLARITY_NORMAL_3DL 0x2057 +#define WGL_STEREO_POLARITY_INVERT_3DL 0x2058 +typedef BOOL (WINAPI * PFNWGLSETSTEREOEMITTERSTATE3DLPROC) (HDC hDC, UINT uState); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglSetStereoEmitterState3DL (HDC hDC, UINT uState); +#endif +#endif /* WGL_3DL_stereo_control */ + +#ifndef WGL_AMD_gpu_association +#define WGL_AMD_gpu_association 1 +#define WGL_GPU_VENDOR_AMD 0x1F00 +#define WGL_GPU_RENDERER_STRING_AMD 0x1F01 +#define WGL_GPU_OPENGL_VERSION_STRING_AMD 0x1F02 +#define WGL_GPU_FASTEST_TARGET_GPUS_AMD 0x21A2 +#define WGL_GPU_RAM_AMD 0x21A3 +#define WGL_GPU_CLOCK_AMD 0x21A4 +#define WGL_GPU_NUM_PIPES_AMD 0x21A5 +#define WGL_GPU_NUM_SIMD_AMD 0x21A6 +#define WGL_GPU_NUM_RB_AMD 0x21A7 +#define WGL_GPU_NUM_SPI_AMD 0x21A8 +typedef UINT (WINAPI * PFNWGLGETGPUIDSAMDPROC) (UINT maxCount, UINT *ids); +typedef INT (WINAPI * PFNWGLGETGPUINFOAMDPROC) (UINT id, int property, GLenum dataType, UINT size, void *data); +typedef UINT (WINAPI * PFNWGLGETCONTEXTGPUIDAMDPROC) (HGLRC hglrc); +typedef HGLRC (WINAPI * PFNWGLCREATEASSOCIATEDCONTEXTAMDPROC) (UINT id); +typedef HGLRC (WINAPI * PFNWGLCREATEASSOCIATEDCONTEXTATTRIBSAMDPROC) (UINT id, HGLRC hShareContext, const int *attribList); +typedef BOOL (WINAPI * PFNWGLDELETEASSOCIATEDCONTEXTAMDPROC) (HGLRC hglrc); +typedef BOOL (WINAPI * PFNWGLMAKEASSOCIATEDCONTEXTCURRENTAMDPROC) (HGLRC hglrc); +typedef HGLRC (WINAPI * PFNWGLGETCURRENTASSOCIATEDCONTEXTAMDPROC) (void); +typedef VOID (WINAPI * PFNWGLBLITCONTEXTFRAMEBUFFERAMDPROC) (HGLRC dstCtx, GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +#ifdef WGL_WGLEXT_PROTOTYPES +UINT WINAPI wglGetGPUIDsAMD (UINT maxCount, UINT *ids); +INT WINAPI wglGetGPUInfoAMD (UINT id, int property, GLenum dataType, UINT size, void *data); +UINT WINAPI wglGetContextGPUIDAMD (HGLRC hglrc); +HGLRC WINAPI wglCreateAssociatedContextAMD (UINT id); +HGLRC WINAPI wglCreateAssociatedContextAttribsAMD (UINT id, HGLRC hShareContext, const int *attribList); +BOOL WINAPI wglDeleteAssociatedContextAMD (HGLRC hglrc); +BOOL WINAPI wglMakeAssociatedContextCurrentAMD (HGLRC hglrc); +HGLRC WINAPI wglGetCurrentAssociatedContextAMD (void); +VOID WINAPI wglBlitContextFramebufferAMD (HGLRC dstCtx, GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +#endif +#endif /* WGL_AMD_gpu_association */ + +#ifndef WGL_ATI_pixel_format_float +#define WGL_ATI_pixel_format_float 1 +#define WGL_TYPE_RGBA_FLOAT_ATI 0x21A0 +#endif /* WGL_ATI_pixel_format_float */ + +#ifndef WGL_EXT_colorspace +#define WGL_EXT_colorspace 1 +#define WGL_COLORSPACE_EXT 0x3087 +#define WGL_COLORSPACE_SRGB_EXT 0x3089 +#define WGL_COLORSPACE_LINEAR_EXT 0x308A +#endif /* WGL_EXT_colorspace */ + +#ifndef WGL_EXT_create_context_es2_profile +#define WGL_EXT_create_context_es2_profile 1 +#define WGL_CONTEXT_ES2_PROFILE_BIT_EXT 0x00000004 +#endif /* WGL_EXT_create_context_es2_profile */ + +#ifndef WGL_EXT_create_context_es_profile +#define WGL_EXT_create_context_es_profile 1 +#define WGL_CONTEXT_ES_PROFILE_BIT_EXT 0x00000004 +#endif /* WGL_EXT_create_context_es_profile */ + +#ifndef WGL_EXT_depth_float +#define WGL_EXT_depth_float 1 +#define WGL_DEPTH_FLOAT_EXT 0x2040 +#endif /* WGL_EXT_depth_float */ + +#ifndef WGL_EXT_display_color_table +#define WGL_EXT_display_color_table 1 +typedef GLboolean (WINAPI * PFNWGLCREATEDISPLAYCOLORTABLEEXTPROC) (GLushort id); +typedef GLboolean (WINAPI * PFNWGLLOADDISPLAYCOLORTABLEEXTPROC) (const GLushort *table, GLuint length); +typedef GLboolean (WINAPI * PFNWGLBINDDISPLAYCOLORTABLEEXTPROC) (GLushort id); +typedef VOID (WINAPI * PFNWGLDESTROYDISPLAYCOLORTABLEEXTPROC) (GLushort id); +#ifdef WGL_WGLEXT_PROTOTYPES +GLboolean WINAPI wglCreateDisplayColorTableEXT (GLushort id); +GLboolean WINAPI wglLoadDisplayColorTableEXT (const GLushort *table, GLuint length); +GLboolean WINAPI wglBindDisplayColorTableEXT (GLushort id); +VOID WINAPI wglDestroyDisplayColorTableEXT (GLushort id); +#endif +#endif /* WGL_EXT_display_color_table */ + +#ifndef WGL_EXT_extensions_string +#define WGL_EXT_extensions_string 1 +typedef const char *(WINAPI * PFNWGLGETEXTENSIONSSTRINGEXTPROC) (void); +#ifdef WGL_WGLEXT_PROTOTYPES +const char *WINAPI wglGetExtensionsStringEXT (void); +#endif +#endif /* WGL_EXT_extensions_string */ + +#ifndef WGL_EXT_framebuffer_sRGB +#define WGL_EXT_framebuffer_sRGB 1 +#define WGL_FRAMEBUFFER_SRGB_CAPABLE_EXT 0x20A9 +#endif /* WGL_EXT_framebuffer_sRGB */ + +#ifndef WGL_EXT_make_current_read +#define WGL_EXT_make_current_read 1 +#define ERROR_INVALID_PIXEL_TYPE_EXT 0x2043 +typedef BOOL (WINAPI * PFNWGLMAKECONTEXTCURRENTEXTPROC) (HDC hDrawDC, HDC hReadDC, HGLRC hglrc); +typedef HDC (WINAPI * PFNWGLGETCURRENTREADDCEXTPROC) (void); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglMakeContextCurrentEXT (HDC hDrawDC, HDC hReadDC, HGLRC hglrc); +HDC WINAPI wglGetCurrentReadDCEXT (void); +#endif +#endif /* WGL_EXT_make_current_read */ + +#ifndef WGL_EXT_multisample +#define WGL_EXT_multisample 1 +#define WGL_SAMPLE_BUFFERS_EXT 0x2041 +#define WGL_SAMPLES_EXT 0x2042 +#endif /* WGL_EXT_multisample */ + +#ifndef WGL_EXT_pbuffer +#define WGL_EXT_pbuffer 1 +DECLARE_HANDLE(HPBUFFEREXT); +#define WGL_DRAW_TO_PBUFFER_EXT 0x202D +#define WGL_MAX_PBUFFER_PIXELS_EXT 0x202E +#define WGL_MAX_PBUFFER_WIDTH_EXT 0x202F +#define WGL_MAX_PBUFFER_HEIGHT_EXT 0x2030 +#define WGL_OPTIMAL_PBUFFER_WIDTH_EXT 0x2031 +#define WGL_OPTIMAL_PBUFFER_HEIGHT_EXT 0x2032 +#define WGL_PBUFFER_LARGEST_EXT 0x2033 +#define WGL_PBUFFER_WIDTH_EXT 0x2034 +#define WGL_PBUFFER_HEIGHT_EXT 0x2035 +typedef HPBUFFEREXT (WINAPI * PFNWGLCREATEPBUFFEREXTPROC) (HDC hDC, int iPixelFormat, int iWidth, int iHeight, const int *piAttribList); +typedef HDC (WINAPI * PFNWGLGETPBUFFERDCEXTPROC) (HPBUFFEREXT hPbuffer); +typedef int (WINAPI * PFNWGLRELEASEPBUFFERDCEXTPROC) (HPBUFFEREXT hPbuffer, HDC hDC); +typedef BOOL (WINAPI * PFNWGLDESTROYPBUFFEREXTPROC) (HPBUFFEREXT hPbuffer); +typedef BOOL (WINAPI * PFNWGLQUERYPBUFFEREXTPROC) (HPBUFFEREXT hPbuffer, int iAttribute, int *piValue); +#ifdef WGL_WGLEXT_PROTOTYPES +HPBUFFEREXT WINAPI wglCreatePbufferEXT (HDC hDC, int iPixelFormat, int iWidth, int iHeight, const int *piAttribList); +HDC WINAPI wglGetPbufferDCEXT (HPBUFFEREXT hPbuffer); +int WINAPI wglReleasePbufferDCEXT (HPBUFFEREXT hPbuffer, HDC hDC); +BOOL WINAPI wglDestroyPbufferEXT (HPBUFFEREXT hPbuffer); +BOOL WINAPI wglQueryPbufferEXT (HPBUFFEREXT hPbuffer, int iAttribute, int *piValue); +#endif +#endif /* WGL_EXT_pbuffer */ + +#ifndef WGL_EXT_pixel_format +#define WGL_EXT_pixel_format 1 +#define WGL_NUMBER_PIXEL_FORMATS_EXT 0x2000 +#define WGL_DRAW_TO_WINDOW_EXT 0x2001 +#define WGL_DRAW_TO_BITMAP_EXT 0x2002 +#define WGL_ACCELERATION_EXT 0x2003 +#define WGL_NEED_PALETTE_EXT 0x2004 +#define WGL_NEED_SYSTEM_PALETTE_EXT 0x2005 +#define WGL_SWAP_LAYER_BUFFERS_EXT 0x2006 +#define WGL_SWAP_METHOD_EXT 0x2007 +#define WGL_NUMBER_OVERLAYS_EXT 0x2008 +#define WGL_NUMBER_UNDERLAYS_EXT 0x2009 +#define WGL_TRANSPARENT_EXT 0x200A +#define WGL_TRANSPARENT_VALUE_EXT 0x200B +#define WGL_SHARE_DEPTH_EXT 0x200C +#define WGL_SHARE_STENCIL_EXT 0x200D +#define WGL_SHARE_ACCUM_EXT 0x200E +#define WGL_SUPPORT_GDI_EXT 0x200F +#define WGL_SUPPORT_OPENGL_EXT 0x2010 +#define WGL_DOUBLE_BUFFER_EXT 0x2011 +#define WGL_STEREO_EXT 0x2012 +#define WGL_PIXEL_TYPE_EXT 0x2013 +#define WGL_COLOR_BITS_EXT 0x2014 +#define WGL_RED_BITS_EXT 0x2015 +#define WGL_RED_SHIFT_EXT 0x2016 +#define WGL_GREEN_BITS_EXT 0x2017 +#define WGL_GREEN_SHIFT_EXT 0x2018 +#define WGL_BLUE_BITS_EXT 0x2019 +#define WGL_BLUE_SHIFT_EXT 0x201A +#define WGL_ALPHA_BITS_EXT 0x201B +#define WGL_ALPHA_SHIFT_EXT 0x201C +#define WGL_ACCUM_BITS_EXT 0x201D +#define WGL_ACCUM_RED_BITS_EXT 0x201E +#define WGL_ACCUM_GREEN_BITS_EXT 0x201F +#define WGL_ACCUM_BLUE_BITS_EXT 0x2020 +#define WGL_ACCUM_ALPHA_BITS_EXT 0x2021 +#define WGL_DEPTH_BITS_EXT 0x2022 +#define WGL_STENCIL_BITS_EXT 0x2023 +#define WGL_AUX_BUFFERS_EXT 0x2024 +#define WGL_NO_ACCELERATION_EXT 0x2025 +#define WGL_GENERIC_ACCELERATION_EXT 0x2026 +#define WGL_FULL_ACCELERATION_EXT 0x2027 +#define WGL_SWAP_EXCHANGE_EXT 0x2028 +#define WGL_SWAP_COPY_EXT 0x2029 +#define WGL_SWAP_UNDEFINED_EXT 0x202A +#define WGL_TYPE_RGBA_EXT 0x202B +#define WGL_TYPE_COLORINDEX_EXT 0x202C +typedef BOOL (WINAPI * PFNWGLGETPIXELFORMATATTRIBIVEXTPROC) (HDC hdc, int iPixelFormat, int iLayerPlane, UINT nAttributes, int *piAttributes, int *piValues); +typedef BOOL (WINAPI * PFNWGLGETPIXELFORMATATTRIBFVEXTPROC) (HDC hdc, int iPixelFormat, int iLayerPlane, UINT nAttributes, int *piAttributes, FLOAT *pfValues); +typedef BOOL (WINAPI * PFNWGLCHOOSEPIXELFORMATEXTPROC) (HDC hdc, const int *piAttribIList, const FLOAT *pfAttribFList, UINT nMaxFormats, int *piFormats, UINT *nNumFormats); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglGetPixelFormatAttribivEXT (HDC hdc, int iPixelFormat, int iLayerPlane, UINT nAttributes, int *piAttributes, int *piValues); +BOOL WINAPI wglGetPixelFormatAttribfvEXT (HDC hdc, int iPixelFormat, int iLayerPlane, UINT nAttributes, int *piAttributes, FLOAT *pfValues); +BOOL WINAPI wglChoosePixelFormatEXT (HDC hdc, const int *piAttribIList, const FLOAT *pfAttribFList, UINT nMaxFormats, int *piFormats, UINT *nNumFormats); +#endif +#endif /* WGL_EXT_pixel_format */ + +#ifndef WGL_EXT_pixel_format_packed_float +#define WGL_EXT_pixel_format_packed_float 1 +#define WGL_TYPE_RGBA_UNSIGNED_FLOAT_EXT 0x20A8 +#endif /* WGL_EXT_pixel_format_packed_float */ + +#ifndef WGL_EXT_swap_control +#define WGL_EXT_swap_control 1 +typedef BOOL (WINAPI * PFNWGLSWAPINTERVALEXTPROC) (int interval); +typedef int (WINAPI * PFNWGLGETSWAPINTERVALEXTPROC) (void); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglSwapIntervalEXT (int interval); +int WINAPI wglGetSwapIntervalEXT (void); +#endif +#endif /* WGL_EXT_swap_control */ + +#ifndef WGL_EXT_swap_control_tear +#define WGL_EXT_swap_control_tear 1 +#endif /* WGL_EXT_swap_control_tear */ + +#ifndef WGL_I3D_digital_video_control +#define WGL_I3D_digital_video_control 1 +#define WGL_DIGITAL_VIDEO_CURSOR_ALPHA_FRAMEBUFFER_I3D 0x2050 +#define WGL_DIGITAL_VIDEO_CURSOR_ALPHA_VALUE_I3D 0x2051 +#define WGL_DIGITAL_VIDEO_CURSOR_INCLUDED_I3D 0x2052 +#define WGL_DIGITAL_VIDEO_GAMMA_CORRECTED_I3D 0x2053 +typedef BOOL (WINAPI * PFNWGLGETDIGITALVIDEOPARAMETERSI3DPROC) (HDC hDC, int iAttribute, int *piValue); +typedef BOOL (WINAPI * PFNWGLSETDIGITALVIDEOPARAMETERSI3DPROC) (HDC hDC, int iAttribute, const int *piValue); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglGetDigitalVideoParametersI3D (HDC hDC, int iAttribute, int *piValue); +BOOL WINAPI wglSetDigitalVideoParametersI3D (HDC hDC, int iAttribute, const int *piValue); +#endif +#endif /* WGL_I3D_digital_video_control */ + +#ifndef WGL_I3D_gamma +#define WGL_I3D_gamma 1 +#define WGL_GAMMA_TABLE_SIZE_I3D 0x204E +#define WGL_GAMMA_EXCLUDE_DESKTOP_I3D 0x204F +typedef BOOL (WINAPI * PFNWGLGETGAMMATABLEPARAMETERSI3DPROC) (HDC hDC, int iAttribute, int *piValue); +typedef BOOL (WINAPI * PFNWGLSETGAMMATABLEPARAMETERSI3DPROC) (HDC hDC, int iAttribute, const int *piValue); +typedef BOOL (WINAPI * PFNWGLGETGAMMATABLEI3DPROC) (HDC hDC, int iEntries, USHORT *puRed, USHORT *puGreen, USHORT *puBlue); +typedef BOOL (WINAPI * PFNWGLSETGAMMATABLEI3DPROC) (HDC hDC, int iEntries, const USHORT *puRed, const USHORT *puGreen, const USHORT *puBlue); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglGetGammaTableParametersI3D (HDC hDC, int iAttribute, int *piValue); +BOOL WINAPI wglSetGammaTableParametersI3D (HDC hDC, int iAttribute, const int *piValue); +BOOL WINAPI wglGetGammaTableI3D (HDC hDC, int iEntries, USHORT *puRed, USHORT *puGreen, USHORT *puBlue); +BOOL WINAPI wglSetGammaTableI3D (HDC hDC, int iEntries, const USHORT *puRed, const USHORT *puGreen, const USHORT *puBlue); +#endif +#endif /* WGL_I3D_gamma */ + +#ifndef WGL_I3D_genlock +#define WGL_I3D_genlock 1 +#define WGL_GENLOCK_SOURCE_MULTIVIEW_I3D 0x2044 +#define WGL_GENLOCK_SOURCE_EXTERNAL_SYNC_I3D 0x2045 +#define WGL_GENLOCK_SOURCE_EXTERNAL_FIELD_I3D 0x2046 +#define WGL_GENLOCK_SOURCE_EXTERNAL_TTL_I3D 0x2047 +#define WGL_GENLOCK_SOURCE_DIGITAL_SYNC_I3D 0x2048 +#define WGL_GENLOCK_SOURCE_DIGITAL_FIELD_I3D 0x2049 +#define WGL_GENLOCK_SOURCE_EDGE_FALLING_I3D 0x204A +#define WGL_GENLOCK_SOURCE_EDGE_RISING_I3D 0x204B +#define WGL_GENLOCK_SOURCE_EDGE_BOTH_I3D 0x204C +typedef BOOL (WINAPI * PFNWGLENABLEGENLOCKI3DPROC) (HDC hDC); +typedef BOOL (WINAPI * PFNWGLDISABLEGENLOCKI3DPROC) (HDC hDC); +typedef BOOL (WINAPI * PFNWGLISENABLEDGENLOCKI3DPROC) (HDC hDC, BOOL *pFlag); +typedef BOOL (WINAPI * PFNWGLGENLOCKSOURCEI3DPROC) (HDC hDC, UINT uSource); +typedef BOOL (WINAPI * PFNWGLGETGENLOCKSOURCEI3DPROC) (HDC hDC, UINT *uSource); +typedef BOOL (WINAPI * PFNWGLGENLOCKSOURCEEDGEI3DPROC) (HDC hDC, UINT uEdge); +typedef BOOL (WINAPI * PFNWGLGETGENLOCKSOURCEEDGEI3DPROC) (HDC hDC, UINT *uEdge); +typedef BOOL (WINAPI * PFNWGLGENLOCKSAMPLERATEI3DPROC) (HDC hDC, UINT uRate); +typedef BOOL (WINAPI * PFNWGLGETGENLOCKSAMPLERATEI3DPROC) (HDC hDC, UINT *uRate); +typedef BOOL (WINAPI * PFNWGLGENLOCKSOURCEDELAYI3DPROC) (HDC hDC, UINT uDelay); +typedef BOOL (WINAPI * PFNWGLGETGENLOCKSOURCEDELAYI3DPROC) (HDC hDC, UINT *uDelay); +typedef BOOL (WINAPI * PFNWGLQUERYGENLOCKMAXSOURCEDELAYI3DPROC) (HDC hDC, UINT *uMaxLineDelay, UINT *uMaxPixelDelay); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglEnableGenlockI3D (HDC hDC); +BOOL WINAPI wglDisableGenlockI3D (HDC hDC); +BOOL WINAPI wglIsEnabledGenlockI3D (HDC hDC, BOOL *pFlag); +BOOL WINAPI wglGenlockSourceI3D (HDC hDC, UINT uSource); +BOOL WINAPI wglGetGenlockSourceI3D (HDC hDC, UINT *uSource); +BOOL WINAPI wglGenlockSourceEdgeI3D (HDC hDC, UINT uEdge); +BOOL WINAPI wglGetGenlockSourceEdgeI3D (HDC hDC, UINT *uEdge); +BOOL WINAPI wglGenlockSampleRateI3D (HDC hDC, UINT uRate); +BOOL WINAPI wglGetGenlockSampleRateI3D (HDC hDC, UINT *uRate); +BOOL WINAPI wglGenlockSourceDelayI3D (HDC hDC, UINT uDelay); +BOOL WINAPI wglGetGenlockSourceDelayI3D (HDC hDC, UINT *uDelay); +BOOL WINAPI wglQueryGenlockMaxSourceDelayI3D (HDC hDC, UINT *uMaxLineDelay, UINT *uMaxPixelDelay); +#endif +#endif /* WGL_I3D_genlock */ + +#ifndef WGL_I3D_image_buffer +#define WGL_I3D_image_buffer 1 +#define WGL_IMAGE_BUFFER_MIN_ACCESS_I3D 0x00000001 +#define WGL_IMAGE_BUFFER_LOCK_I3D 0x00000002 +typedef LPVOID (WINAPI * PFNWGLCREATEIMAGEBUFFERI3DPROC) (HDC hDC, DWORD dwSize, UINT uFlags); +typedef BOOL (WINAPI * PFNWGLDESTROYIMAGEBUFFERI3DPROC) (HDC hDC, LPVOID pAddress); +typedef BOOL (WINAPI * PFNWGLASSOCIATEIMAGEBUFFEREVENTSI3DPROC) (HDC hDC, const HANDLE *pEvent, const LPVOID *pAddress, const DWORD *pSize, UINT count); +typedef BOOL (WINAPI * PFNWGLRELEASEIMAGEBUFFEREVENTSI3DPROC) (HDC hDC, const LPVOID *pAddress, UINT count); +#ifdef WGL_WGLEXT_PROTOTYPES +LPVOID WINAPI wglCreateImageBufferI3D (HDC hDC, DWORD dwSize, UINT uFlags); +BOOL WINAPI wglDestroyImageBufferI3D (HDC hDC, LPVOID pAddress); +BOOL WINAPI wglAssociateImageBufferEventsI3D (HDC hDC, const HANDLE *pEvent, const LPVOID *pAddress, const DWORD *pSize, UINT count); +BOOL WINAPI wglReleaseImageBufferEventsI3D (HDC hDC, const LPVOID *pAddress, UINT count); +#endif +#endif /* WGL_I3D_image_buffer */ + +#ifndef WGL_I3D_swap_frame_lock +#define WGL_I3D_swap_frame_lock 1 +typedef BOOL (WINAPI * PFNWGLENABLEFRAMELOCKI3DPROC) (void); +typedef BOOL (WINAPI * PFNWGLDISABLEFRAMELOCKI3DPROC) (void); +typedef BOOL (WINAPI * PFNWGLISENABLEDFRAMELOCKI3DPROC) (BOOL *pFlag); +typedef BOOL (WINAPI * PFNWGLQUERYFRAMELOCKMASTERI3DPROC) (BOOL *pFlag); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglEnableFrameLockI3D (void); +BOOL WINAPI wglDisableFrameLockI3D (void); +BOOL WINAPI wglIsEnabledFrameLockI3D (BOOL *pFlag); +BOOL WINAPI wglQueryFrameLockMasterI3D (BOOL *pFlag); +#endif +#endif /* WGL_I3D_swap_frame_lock */ + +#ifndef WGL_I3D_swap_frame_usage +#define WGL_I3D_swap_frame_usage 1 +typedef BOOL (WINAPI * PFNWGLGETFRAMEUSAGEI3DPROC) (float *pUsage); +typedef BOOL (WINAPI * PFNWGLBEGINFRAMETRACKINGI3DPROC) (void); +typedef BOOL (WINAPI * PFNWGLENDFRAMETRACKINGI3DPROC) (void); +typedef BOOL (WINAPI * PFNWGLQUERYFRAMETRACKINGI3DPROC) (DWORD *pFrameCount, DWORD *pMissedFrames, float *pLastMissedUsage); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglGetFrameUsageI3D (float *pUsage); +BOOL WINAPI wglBeginFrameTrackingI3D (void); +BOOL WINAPI wglEndFrameTrackingI3D (void); +BOOL WINAPI wglQueryFrameTrackingI3D (DWORD *pFrameCount, DWORD *pMissedFrames, float *pLastMissedUsage); +#endif +#endif /* WGL_I3D_swap_frame_usage */ + +#ifndef WGL_NV_DX_interop +#define WGL_NV_DX_interop 1 +#define WGL_ACCESS_READ_ONLY_NV 0x00000000 +#define WGL_ACCESS_READ_WRITE_NV 0x00000001 +#define WGL_ACCESS_WRITE_DISCARD_NV 0x00000002 +typedef BOOL (WINAPI * PFNWGLDXSETRESOURCESHAREHANDLENVPROC) (void *dxObject, HANDLE shareHandle); +typedef HANDLE (WINAPI * PFNWGLDXOPENDEVICENVPROC) (void *dxDevice); +typedef BOOL (WINAPI * PFNWGLDXCLOSEDEVICENVPROC) (HANDLE hDevice); +typedef HANDLE (WINAPI * PFNWGLDXREGISTEROBJECTNVPROC) (HANDLE hDevice, void *dxObject, GLuint name, GLenum type, GLenum access); +typedef BOOL (WINAPI * PFNWGLDXUNREGISTEROBJECTNVPROC) (HANDLE hDevice, HANDLE hObject); +typedef BOOL (WINAPI * PFNWGLDXOBJECTACCESSNVPROC) (HANDLE hObject, GLenum access); +typedef BOOL (WINAPI * PFNWGLDXLOCKOBJECTSNVPROC) (HANDLE hDevice, GLint count, HANDLE *hObjects); +typedef BOOL (WINAPI * PFNWGLDXUNLOCKOBJECTSNVPROC) (HANDLE hDevice, GLint count, HANDLE *hObjects); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglDXSetResourceShareHandleNV (void *dxObject, HANDLE shareHandle); +HANDLE WINAPI wglDXOpenDeviceNV (void *dxDevice); +BOOL WINAPI wglDXCloseDeviceNV (HANDLE hDevice); +HANDLE WINAPI wglDXRegisterObjectNV (HANDLE hDevice, void *dxObject, GLuint name, GLenum type, GLenum access); +BOOL WINAPI wglDXUnregisterObjectNV (HANDLE hDevice, HANDLE hObject); +BOOL WINAPI wglDXObjectAccessNV (HANDLE hObject, GLenum access); +BOOL WINAPI wglDXLockObjectsNV (HANDLE hDevice, GLint count, HANDLE *hObjects); +BOOL WINAPI wglDXUnlockObjectsNV (HANDLE hDevice, GLint count, HANDLE *hObjects); +#endif +#endif /* WGL_NV_DX_interop */ + +#ifndef WGL_NV_DX_interop2 +#define WGL_NV_DX_interop2 1 +#endif /* WGL_NV_DX_interop2 */ + +#ifndef WGL_NV_copy_image +#define WGL_NV_copy_image 1 +typedef BOOL (WINAPI * PFNWGLCOPYIMAGESUBDATANVPROC) (HGLRC hSrcRC, GLuint srcName, GLenum srcTarget, GLint srcLevel, GLint srcX, GLint srcY, GLint srcZ, HGLRC hDstRC, GLuint dstName, GLenum dstTarget, GLint dstLevel, GLint dstX, GLint dstY, GLint dstZ, GLsizei width, GLsizei height, GLsizei depth); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglCopyImageSubDataNV (HGLRC hSrcRC, GLuint srcName, GLenum srcTarget, GLint srcLevel, GLint srcX, GLint srcY, GLint srcZ, HGLRC hDstRC, GLuint dstName, GLenum dstTarget, GLint dstLevel, GLint dstX, GLint dstY, GLint dstZ, GLsizei width, GLsizei height, GLsizei depth); +#endif +#endif /* WGL_NV_copy_image */ + +#ifndef WGL_NV_delay_before_swap +#define WGL_NV_delay_before_swap 1 +typedef BOOL (WINAPI * PFNWGLDELAYBEFORESWAPNVPROC) (HDC hDC, GLfloat seconds); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglDelayBeforeSwapNV (HDC hDC, GLfloat seconds); +#endif +#endif /* WGL_NV_delay_before_swap */ + +#ifndef WGL_NV_float_buffer +#define WGL_NV_float_buffer 1 +#define WGL_FLOAT_COMPONENTS_NV 0x20B0 +#define WGL_BIND_TO_TEXTURE_RECTANGLE_FLOAT_R_NV 0x20B1 +#define WGL_BIND_TO_TEXTURE_RECTANGLE_FLOAT_RG_NV 0x20B2 +#define WGL_BIND_TO_TEXTURE_RECTANGLE_FLOAT_RGB_NV 0x20B3 +#define WGL_BIND_TO_TEXTURE_RECTANGLE_FLOAT_RGBA_NV 0x20B4 +#define WGL_TEXTURE_FLOAT_R_NV 0x20B5 +#define WGL_TEXTURE_FLOAT_RG_NV 0x20B6 +#define WGL_TEXTURE_FLOAT_RGB_NV 0x20B7 +#define WGL_TEXTURE_FLOAT_RGBA_NV 0x20B8 +#endif /* WGL_NV_float_buffer */ + +#ifndef WGL_NV_gpu_affinity +#define WGL_NV_gpu_affinity 1 +DECLARE_HANDLE(HGPUNV); +struct _GPU_DEVICE { + DWORD cb; + CHAR DeviceName[32]; + CHAR DeviceString[128]; + DWORD Flags; + RECT rcVirtualScreen; +}; +typedef struct _GPU_DEVICE *PGPU_DEVICE; +#define ERROR_INCOMPATIBLE_AFFINITY_MASKS_NV 0x20D0 +#define ERROR_MISSING_AFFINITY_MASK_NV 0x20D1 +typedef BOOL (WINAPI * PFNWGLENUMGPUSNVPROC) (UINT iGpuIndex, HGPUNV *phGpu); +typedef BOOL (WINAPI * PFNWGLENUMGPUDEVICESNVPROC) (HGPUNV hGpu, UINT iDeviceIndex, PGPU_DEVICE lpGpuDevice); +typedef HDC (WINAPI * PFNWGLCREATEAFFINITYDCNVPROC) (const HGPUNV *phGpuList); +typedef BOOL (WINAPI * PFNWGLENUMGPUSFROMAFFINITYDCNVPROC) (HDC hAffinityDC, UINT iGpuIndex, HGPUNV *hGpu); +typedef BOOL (WINAPI * PFNWGLDELETEDCNVPROC) (HDC hdc); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglEnumGpusNV (UINT iGpuIndex, HGPUNV *phGpu); +BOOL WINAPI wglEnumGpuDevicesNV (HGPUNV hGpu, UINT iDeviceIndex, PGPU_DEVICE lpGpuDevice); +HDC WINAPI wglCreateAffinityDCNV (const HGPUNV *phGpuList); +BOOL WINAPI wglEnumGpusFromAffinityDCNV (HDC hAffinityDC, UINT iGpuIndex, HGPUNV *hGpu); +BOOL WINAPI wglDeleteDCNV (HDC hdc); +#endif +#endif /* WGL_NV_gpu_affinity */ + +#ifndef WGL_NV_multisample_coverage +#define WGL_NV_multisample_coverage 1 +#define WGL_COVERAGE_SAMPLES_NV 0x2042 +#define WGL_COLOR_SAMPLES_NV 0x20B9 +#endif /* WGL_NV_multisample_coverage */ + +#ifndef WGL_NV_present_video +#define WGL_NV_present_video 1 +DECLARE_HANDLE(HVIDEOOUTPUTDEVICENV); +#define WGL_NUM_VIDEO_SLOTS_NV 0x20F0 +typedef int (WINAPI * PFNWGLENUMERATEVIDEODEVICESNVPROC) (HDC hDC, HVIDEOOUTPUTDEVICENV *phDeviceList); +typedef BOOL (WINAPI * PFNWGLBINDVIDEODEVICENVPROC) (HDC hDC, unsigned int uVideoSlot, HVIDEOOUTPUTDEVICENV hVideoDevice, const int *piAttribList); +typedef BOOL (WINAPI * PFNWGLQUERYCURRENTCONTEXTNVPROC) (int iAttribute, int *piValue); +#ifdef WGL_WGLEXT_PROTOTYPES +int WINAPI wglEnumerateVideoDevicesNV (HDC hDC, HVIDEOOUTPUTDEVICENV *phDeviceList); +BOOL WINAPI wglBindVideoDeviceNV (HDC hDC, unsigned int uVideoSlot, HVIDEOOUTPUTDEVICENV hVideoDevice, const int *piAttribList); +BOOL WINAPI wglQueryCurrentContextNV (int iAttribute, int *piValue); +#endif +#endif /* WGL_NV_present_video */ + +#ifndef WGL_NV_render_depth_texture +#define WGL_NV_render_depth_texture 1 +#define WGL_BIND_TO_TEXTURE_DEPTH_NV 0x20A3 +#define WGL_BIND_TO_TEXTURE_RECTANGLE_DEPTH_NV 0x20A4 +#define WGL_DEPTH_TEXTURE_FORMAT_NV 0x20A5 +#define WGL_TEXTURE_DEPTH_COMPONENT_NV 0x20A6 +#define WGL_DEPTH_COMPONENT_NV 0x20A7 +#endif /* WGL_NV_render_depth_texture */ + +#ifndef WGL_NV_render_texture_rectangle +#define WGL_NV_render_texture_rectangle 1 +#define WGL_BIND_TO_TEXTURE_RECTANGLE_RGB_NV 0x20A0 +#define WGL_BIND_TO_TEXTURE_RECTANGLE_RGBA_NV 0x20A1 +#define WGL_TEXTURE_RECTANGLE_NV 0x20A2 +#endif /* WGL_NV_render_texture_rectangle */ + +#ifndef WGL_NV_swap_group +#define WGL_NV_swap_group 1 +typedef BOOL (WINAPI * PFNWGLJOINSWAPGROUPNVPROC) (HDC hDC, GLuint group); +typedef BOOL (WINAPI * PFNWGLBINDSWAPBARRIERNVPROC) (GLuint group, GLuint barrier); +typedef BOOL (WINAPI * PFNWGLQUERYSWAPGROUPNVPROC) (HDC hDC, GLuint *group, GLuint *barrier); +typedef BOOL (WINAPI * PFNWGLQUERYMAXSWAPGROUPSNVPROC) (HDC hDC, GLuint *maxGroups, GLuint *maxBarriers); +typedef BOOL (WINAPI * PFNWGLQUERYFRAMECOUNTNVPROC) (HDC hDC, GLuint *count); +typedef BOOL (WINAPI * PFNWGLRESETFRAMECOUNTNVPROC) (HDC hDC); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglJoinSwapGroupNV (HDC hDC, GLuint group); +BOOL WINAPI wglBindSwapBarrierNV (GLuint group, GLuint barrier); +BOOL WINAPI wglQuerySwapGroupNV (HDC hDC, GLuint *group, GLuint *barrier); +BOOL WINAPI wglQueryMaxSwapGroupsNV (HDC hDC, GLuint *maxGroups, GLuint *maxBarriers); +BOOL WINAPI wglQueryFrameCountNV (HDC hDC, GLuint *count); +BOOL WINAPI wglResetFrameCountNV (HDC hDC); +#endif +#endif /* WGL_NV_swap_group */ + +#ifndef WGL_NV_vertex_array_range +#define WGL_NV_vertex_array_range 1 +typedef void *(WINAPI * PFNWGLALLOCATEMEMORYNVPROC) (GLsizei size, GLfloat readfreq, GLfloat writefreq, GLfloat priority); +typedef void (WINAPI * PFNWGLFREEMEMORYNVPROC) (void *pointer); +#ifdef WGL_WGLEXT_PROTOTYPES +void *WINAPI wglAllocateMemoryNV (GLsizei size, GLfloat readfreq, GLfloat writefreq, GLfloat priority); +void WINAPI wglFreeMemoryNV (void *pointer); +#endif +#endif /* WGL_NV_vertex_array_range */ + +#ifndef WGL_NV_video_capture +#define WGL_NV_video_capture 1 +DECLARE_HANDLE(HVIDEOINPUTDEVICENV); +#define WGL_UNIQUE_ID_NV 0x20CE +#define WGL_NUM_VIDEO_CAPTURE_SLOTS_NV 0x20CF +typedef BOOL (WINAPI * PFNWGLBINDVIDEOCAPTUREDEVICENVPROC) (UINT uVideoSlot, HVIDEOINPUTDEVICENV hDevice); +typedef UINT (WINAPI * PFNWGLENUMERATEVIDEOCAPTUREDEVICESNVPROC) (HDC hDc, HVIDEOINPUTDEVICENV *phDeviceList); +typedef BOOL (WINAPI * PFNWGLLOCKVIDEOCAPTUREDEVICENVPROC) (HDC hDc, HVIDEOINPUTDEVICENV hDevice); +typedef BOOL (WINAPI * PFNWGLQUERYVIDEOCAPTUREDEVICENVPROC) (HDC hDc, HVIDEOINPUTDEVICENV hDevice, int iAttribute, int *piValue); +typedef BOOL (WINAPI * PFNWGLRELEASEVIDEOCAPTUREDEVICENVPROC) (HDC hDc, HVIDEOINPUTDEVICENV hDevice); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglBindVideoCaptureDeviceNV (UINT uVideoSlot, HVIDEOINPUTDEVICENV hDevice); +UINT WINAPI wglEnumerateVideoCaptureDevicesNV (HDC hDc, HVIDEOINPUTDEVICENV *phDeviceList); +BOOL WINAPI wglLockVideoCaptureDeviceNV (HDC hDc, HVIDEOINPUTDEVICENV hDevice); +BOOL WINAPI wglQueryVideoCaptureDeviceNV (HDC hDc, HVIDEOINPUTDEVICENV hDevice, int iAttribute, int *piValue); +BOOL WINAPI wglReleaseVideoCaptureDeviceNV (HDC hDc, HVIDEOINPUTDEVICENV hDevice); +#endif +#endif /* WGL_NV_video_capture */ + +#ifndef WGL_NV_video_output +#define WGL_NV_video_output 1 +DECLARE_HANDLE(HPVIDEODEV); +#define WGL_BIND_TO_VIDEO_RGB_NV 0x20C0 +#define WGL_BIND_TO_VIDEO_RGBA_NV 0x20C1 +#define WGL_BIND_TO_VIDEO_RGB_AND_DEPTH_NV 0x20C2 +#define WGL_VIDEO_OUT_COLOR_NV 0x20C3 +#define WGL_VIDEO_OUT_ALPHA_NV 0x20C4 +#define WGL_VIDEO_OUT_DEPTH_NV 0x20C5 +#define WGL_VIDEO_OUT_COLOR_AND_ALPHA_NV 0x20C6 +#define WGL_VIDEO_OUT_COLOR_AND_DEPTH_NV 0x20C7 +#define WGL_VIDEO_OUT_FRAME 0x20C8 +#define WGL_VIDEO_OUT_FIELD_1 0x20C9 +#define WGL_VIDEO_OUT_FIELD_2 0x20CA +#define WGL_VIDEO_OUT_STACKED_FIELDS_1_2 0x20CB +#define WGL_VIDEO_OUT_STACKED_FIELDS_2_1 0x20CC +typedef BOOL (WINAPI * PFNWGLGETVIDEODEVICENVPROC) (HDC hDC, int numDevices, HPVIDEODEV *hVideoDevice); +typedef BOOL (WINAPI * PFNWGLRELEASEVIDEODEVICENVPROC) (HPVIDEODEV hVideoDevice); +typedef BOOL (WINAPI * PFNWGLBINDVIDEOIMAGENVPROC) (HPVIDEODEV hVideoDevice, HPBUFFERARB hPbuffer, int iVideoBuffer); +typedef BOOL (WINAPI * PFNWGLRELEASEVIDEOIMAGENVPROC) (HPBUFFERARB hPbuffer, int iVideoBuffer); +typedef BOOL (WINAPI * PFNWGLSENDPBUFFERTOVIDEONVPROC) (HPBUFFERARB hPbuffer, int iBufferType, unsigned long *pulCounterPbuffer, BOOL bBlock); +typedef BOOL (WINAPI * PFNWGLGETVIDEOINFONVPROC) (HPVIDEODEV hpVideoDevice, unsigned long *pulCounterOutputPbuffer, unsigned long *pulCounterOutputVideo); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglGetVideoDeviceNV (HDC hDC, int numDevices, HPVIDEODEV *hVideoDevice); +BOOL WINAPI wglReleaseVideoDeviceNV (HPVIDEODEV hVideoDevice); +BOOL WINAPI wglBindVideoImageNV (HPVIDEODEV hVideoDevice, HPBUFFERARB hPbuffer, int iVideoBuffer); +BOOL WINAPI wglReleaseVideoImageNV (HPBUFFERARB hPbuffer, int iVideoBuffer); +BOOL WINAPI wglSendPbufferToVideoNV (HPBUFFERARB hPbuffer, int iBufferType, unsigned long *pulCounterPbuffer, BOOL bBlock); +BOOL WINAPI wglGetVideoInfoNV (HPVIDEODEV hpVideoDevice, unsigned long *pulCounterOutputPbuffer, unsigned long *pulCounterOutputVideo); +#endif +#endif /* WGL_NV_video_output */ + +#ifndef WGL_OML_sync_control +#define WGL_OML_sync_control 1 +typedef BOOL (WINAPI * PFNWGLGETSYNCVALUESOMLPROC) (HDC hdc, INT64 *ust, INT64 *msc, INT64 *sbc); +typedef BOOL (WINAPI * PFNWGLGETMSCRATEOMLPROC) (HDC hdc, INT32 *numerator, INT32 *denominator); +typedef INT64 (WINAPI * PFNWGLSWAPBUFFERSMSCOMLPROC) (HDC hdc, INT64 target_msc, INT64 divisor, INT64 remainder); +typedef INT64 (WINAPI * PFNWGLSWAPLAYERBUFFERSMSCOMLPROC) (HDC hdc, int fuPlanes, INT64 target_msc, INT64 divisor, INT64 remainder); +typedef BOOL (WINAPI * PFNWGLWAITFORMSCOMLPROC) (HDC hdc, INT64 target_msc, INT64 divisor, INT64 remainder, INT64 *ust, INT64 *msc, INT64 *sbc); +typedef BOOL (WINAPI * PFNWGLWAITFORSBCOMLPROC) (HDC hdc, INT64 target_sbc, INT64 *ust, INT64 *msc, INT64 *sbc); +#ifdef WGL_WGLEXT_PROTOTYPES +BOOL WINAPI wglGetSyncValuesOML (HDC hdc, INT64 *ust, INT64 *msc, INT64 *sbc); +BOOL WINAPI wglGetMscRateOML (HDC hdc, INT32 *numerator, INT32 *denominator); +INT64 WINAPI wglSwapBuffersMscOML (HDC hdc, INT64 target_msc, INT64 divisor, INT64 remainder); +INT64 WINAPI wglSwapLayerBuffersMscOML (HDC hdc, int fuPlanes, INT64 target_msc, INT64 divisor, INT64 remainder); +BOOL WINAPI wglWaitForMscOML (HDC hdc, INT64 target_msc, INT64 divisor, INT64 remainder, INT64 *ust, INT64 *msc, INT64 *sbc); +BOOL WINAPI wglWaitForSbcOML (HDC hdc, INT64 target_sbc, INT64 *ust, INT64 *msc, INT64 *sbc); +#endif +#endif /* WGL_OML_sync_control */ + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/swiftshader/include/GLES/gl.h b/vendor/swiftshader/include/GLES/gl.h new file mode 100644 index 00000000..6955f6a2 --- /dev/null +++ b/vendor/swiftshader/include/GLES/gl.h @@ -0,0 +1,590 @@ +#ifndef __gles1_gl_h_ +#define __gles1_gl_h_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2013-2018 The Khronos Group Inc. +** +** Permission is hereby granted, free of charge, to any person obtaining a +** copy of this software and/or associated documentation files (the +** "Materials"), to deal in the Materials without restriction, including +** without limitation the rights to use, copy, modify, merge, publish, +** distribute, sublicense, and/or sell copies of the Materials, and to +** permit persons to whom the Materials are furnished to do so, subject to +** the following conditions: +** +** The above copyright notice and this permission notice shall be included +** in all copies or substantial portions of the Materials. +** +** THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +** EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +** MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +** IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +** CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +** MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. +*/ +/* +** This header is generated from the Khronos OpenGL / OpenGL ES XML +** API Registry. The current version of the Registry, generator scripts +** used to make the header, and the header can be found at +** https://github.com/KhronosGroup/OpenGL-Registry +*/ + +#include + +/* Generated on date 20180525 */ + +/* Generated C header for: + * API: gles1 + * Profile: common + * Versions considered: .* + * Versions emitted: .* + * Default extensions included: None + * Additional extensions included: ^(GL_OES_read_format|GL_OES_compressed_paletted_texture|GL_OES_point_size_array|GL_OES_point_sprite)$ + * Extensions removed: _nomatch_^ + */ + +#ifndef GL_VERSION_ES_CM_1_0 +#define GL_VERSION_ES_CM_1_0 1 +#include +typedef khronos_int8_t GLbyte; +typedef khronos_float_t GLclampf; +typedef short GLshort; +typedef unsigned short GLushort; +typedef void GLvoid; +typedef unsigned int GLenum; +typedef khronos_float_t GLfloat; +typedef khronos_int32_t GLfixed; +typedef unsigned int GLuint; +typedef khronos_ssize_t GLsizeiptr; +typedef khronos_intptr_t GLintptr; +typedef unsigned int GLbitfield; +typedef int GLint; +typedef khronos_uint8_t GLubyte; +typedef unsigned char GLboolean; +typedef int GLsizei; +typedef khronos_int32_t GLclampx; +#define GL_VERSION_ES_CL_1_0 1 +#define GL_VERSION_ES_CM_1_1 1 +#define GL_VERSION_ES_CL_1_1 1 +#define GL_DEPTH_BUFFER_BIT 0x00000100 +#define GL_STENCIL_BUFFER_BIT 0x00000400 +#define GL_COLOR_BUFFER_BIT 0x00004000 +#define GL_FALSE 0 +#define GL_TRUE 1 +#define GL_POINTS 0x0000 +#define GL_LINES 0x0001 +#define GL_LINE_LOOP 0x0002 +#define GL_LINE_STRIP 0x0003 +#define GL_TRIANGLES 0x0004 +#define GL_TRIANGLE_STRIP 0x0005 +#define GL_TRIANGLE_FAN 0x0006 +#define GL_NEVER 0x0200 +#define GL_LESS 0x0201 +#define GL_EQUAL 0x0202 +#define GL_LEQUAL 0x0203 +#define GL_GREATER 0x0204 +#define GL_NOTEQUAL 0x0205 +#define GL_GEQUAL 0x0206 +#define GL_ALWAYS 0x0207 +#define GL_ZERO 0 +#define GL_ONE 1 +#define GL_SRC_COLOR 0x0300 +#define GL_ONE_MINUS_SRC_COLOR 0x0301 +#define GL_SRC_ALPHA 0x0302 +#define GL_ONE_MINUS_SRC_ALPHA 0x0303 +#define GL_DST_ALPHA 0x0304 +#define GL_ONE_MINUS_DST_ALPHA 0x0305 +#define GL_DST_COLOR 0x0306 +#define GL_ONE_MINUS_DST_COLOR 0x0307 +#define GL_SRC_ALPHA_SATURATE 0x0308 +#define GL_CLIP_PLANE0 0x3000 +#define GL_CLIP_PLANE1 0x3001 +#define GL_CLIP_PLANE2 0x3002 +#define GL_CLIP_PLANE3 0x3003 +#define GL_CLIP_PLANE4 0x3004 +#define GL_CLIP_PLANE5 0x3005 +#define GL_FRONT 0x0404 +#define GL_BACK 0x0405 +#define GL_FRONT_AND_BACK 0x0408 +#define GL_FOG 0x0B60 +#define GL_LIGHTING 0x0B50 +#define GL_TEXTURE_2D 0x0DE1 +#define GL_CULL_FACE 0x0B44 +#define GL_ALPHA_TEST 0x0BC0 +#define GL_BLEND 0x0BE2 +#define GL_COLOR_LOGIC_OP 0x0BF2 +#define GL_DITHER 0x0BD0 +#define GL_STENCIL_TEST 0x0B90 +#define GL_DEPTH_TEST 0x0B71 +#define GL_POINT_SMOOTH 0x0B10 +#define GL_LINE_SMOOTH 0x0B20 +#define GL_SCISSOR_TEST 0x0C11 +#define GL_COLOR_MATERIAL 0x0B57 +#define GL_NORMALIZE 0x0BA1 +#define GL_RESCALE_NORMAL 0x803A +#define GL_VERTEX_ARRAY 0x8074 +#define GL_NORMAL_ARRAY 0x8075 +#define GL_COLOR_ARRAY 0x8076 +#define GL_TEXTURE_COORD_ARRAY 0x8078 +#define GL_MULTISAMPLE 0x809D +#define GL_SAMPLE_ALPHA_TO_COVERAGE 0x809E +#define GL_SAMPLE_ALPHA_TO_ONE 0x809F +#define GL_SAMPLE_COVERAGE 0x80A0 +#define GL_NO_ERROR 0 +#define GL_INVALID_ENUM 0x0500 +#define GL_INVALID_VALUE 0x0501 +#define GL_INVALID_OPERATION 0x0502 +#define GL_STACK_OVERFLOW 0x0503 +#define GL_STACK_UNDERFLOW 0x0504 +#define GL_OUT_OF_MEMORY 0x0505 +#define GL_EXP 0x0800 +#define GL_EXP2 0x0801 +#define GL_FOG_DENSITY 0x0B62 +#define GL_FOG_START 0x0B63 +#define GL_FOG_END 0x0B64 +#define GL_FOG_MODE 0x0B65 +#define GL_FOG_COLOR 0x0B66 +#define GL_CW 0x0900 +#define GL_CCW 0x0901 +#define GL_CURRENT_COLOR 0x0B00 +#define GL_CURRENT_NORMAL 0x0B02 +#define GL_CURRENT_TEXTURE_COORDS 0x0B03 +#define GL_POINT_SIZE 0x0B11 +#define GL_POINT_SIZE_MIN 0x8126 +#define GL_POINT_SIZE_MAX 0x8127 +#define GL_POINT_FADE_THRESHOLD_SIZE 0x8128 +#define GL_POINT_DISTANCE_ATTENUATION 0x8129 +#define GL_SMOOTH_POINT_SIZE_RANGE 0x0B12 +#define GL_LINE_WIDTH 0x0B21 +#define GL_SMOOTH_LINE_WIDTH_RANGE 0x0B22 +#define GL_ALIASED_POINT_SIZE_RANGE 0x846D +#define GL_ALIASED_LINE_WIDTH_RANGE 0x846E +#define GL_CULL_FACE_MODE 0x0B45 +#define GL_FRONT_FACE 0x0B46 +#define GL_SHADE_MODEL 0x0B54 +#define GL_DEPTH_RANGE 0x0B70 +#define GL_DEPTH_WRITEMASK 0x0B72 +#define GL_DEPTH_CLEAR_VALUE 0x0B73 +#define GL_DEPTH_FUNC 0x0B74 +#define GL_STENCIL_CLEAR_VALUE 0x0B91 +#define GL_STENCIL_FUNC 0x0B92 +#define GL_STENCIL_VALUE_MASK 0x0B93 +#define GL_STENCIL_FAIL 0x0B94 +#define GL_STENCIL_PASS_DEPTH_FAIL 0x0B95 +#define GL_STENCIL_PASS_DEPTH_PASS 0x0B96 +#define GL_STENCIL_REF 0x0B97 +#define GL_STENCIL_WRITEMASK 0x0B98 +#define GL_MATRIX_MODE 0x0BA0 +#define GL_VIEWPORT 0x0BA2 +#define GL_MODELVIEW_STACK_DEPTH 0x0BA3 +#define GL_PROJECTION_STACK_DEPTH 0x0BA4 +#define GL_TEXTURE_STACK_DEPTH 0x0BA5 +#define GL_MODELVIEW_MATRIX 0x0BA6 +#define GL_PROJECTION_MATRIX 0x0BA7 +#define GL_TEXTURE_MATRIX 0x0BA8 +#define GL_ALPHA_TEST_FUNC 0x0BC1 +#define GL_ALPHA_TEST_REF 0x0BC2 +#define GL_BLEND_DST 0x0BE0 +#define GL_BLEND_SRC 0x0BE1 +#define GL_LOGIC_OP_MODE 0x0BF0 +#define GL_SCISSOR_BOX 0x0C10 +#define GL_COLOR_CLEAR_VALUE 0x0C22 +#define GL_COLOR_WRITEMASK 0x0C23 +#define GL_MAX_LIGHTS 0x0D31 +#define GL_MAX_CLIP_PLANES 0x0D32 +#define GL_MAX_TEXTURE_SIZE 0x0D33 +#define GL_MAX_MODELVIEW_STACK_DEPTH 0x0D36 +#define GL_MAX_PROJECTION_STACK_DEPTH 0x0D38 +#define GL_MAX_TEXTURE_STACK_DEPTH 0x0D39 +#define GL_MAX_VIEWPORT_DIMS 0x0D3A +#define GL_MAX_TEXTURE_UNITS 0x84E2 +#define GL_SUBPIXEL_BITS 0x0D50 +#define GL_RED_BITS 0x0D52 +#define GL_GREEN_BITS 0x0D53 +#define GL_BLUE_BITS 0x0D54 +#define GL_ALPHA_BITS 0x0D55 +#define GL_DEPTH_BITS 0x0D56 +#define GL_STENCIL_BITS 0x0D57 +#define GL_POLYGON_OFFSET_UNITS 0x2A00 +#define GL_POLYGON_OFFSET_FILL 0x8037 +#define GL_POLYGON_OFFSET_FACTOR 0x8038 +#define GL_TEXTURE_BINDING_2D 0x8069 +#define GL_VERTEX_ARRAY_SIZE 0x807A +#define GL_VERTEX_ARRAY_TYPE 0x807B +#define GL_VERTEX_ARRAY_STRIDE 0x807C +#define GL_NORMAL_ARRAY_TYPE 0x807E +#define GL_NORMAL_ARRAY_STRIDE 0x807F +#define GL_COLOR_ARRAY_SIZE 0x8081 +#define GL_COLOR_ARRAY_TYPE 0x8082 +#define GL_COLOR_ARRAY_STRIDE 0x8083 +#define GL_TEXTURE_COORD_ARRAY_SIZE 0x8088 +#define GL_TEXTURE_COORD_ARRAY_TYPE 0x8089 +#define GL_TEXTURE_COORD_ARRAY_STRIDE 0x808A +#define GL_VERTEX_ARRAY_POINTER 0x808E +#define GL_NORMAL_ARRAY_POINTER 0x808F +#define GL_COLOR_ARRAY_POINTER 0x8090 +#define GL_TEXTURE_COORD_ARRAY_POINTER 0x8092 +#define GL_SAMPLE_BUFFERS 0x80A8 +#define GL_SAMPLES 0x80A9 +#define GL_SAMPLE_COVERAGE_VALUE 0x80AA +#define GL_SAMPLE_COVERAGE_INVERT 0x80AB +#define GL_NUM_COMPRESSED_TEXTURE_FORMATS 0x86A2 +#define GL_COMPRESSED_TEXTURE_FORMATS 0x86A3 +#define GL_DONT_CARE 0x1100 +#define GL_FASTEST 0x1101 +#define GL_NICEST 0x1102 +#define GL_PERSPECTIVE_CORRECTION_HINT 0x0C50 +#define GL_POINT_SMOOTH_HINT 0x0C51 +#define GL_LINE_SMOOTH_HINT 0x0C52 +#define GL_FOG_HINT 0x0C54 +#define GL_GENERATE_MIPMAP_HINT 0x8192 +#define GL_LIGHT_MODEL_AMBIENT 0x0B53 +#define GL_LIGHT_MODEL_TWO_SIDE 0x0B52 +#define GL_AMBIENT 0x1200 +#define GL_DIFFUSE 0x1201 +#define GL_SPECULAR 0x1202 +#define GL_POSITION 0x1203 +#define GL_SPOT_DIRECTION 0x1204 +#define GL_SPOT_EXPONENT 0x1205 +#define GL_SPOT_CUTOFF 0x1206 +#define GL_CONSTANT_ATTENUATION 0x1207 +#define GL_LINEAR_ATTENUATION 0x1208 +#define GL_QUADRATIC_ATTENUATION 0x1209 +#define GL_BYTE 0x1400 +#define GL_UNSIGNED_BYTE 0x1401 +#define GL_SHORT 0x1402 +#define GL_UNSIGNED_SHORT 0x1403 +#define GL_FLOAT 0x1406 +#define GL_FIXED 0x140C +#define GL_CLEAR 0x1500 +#define GL_AND 0x1501 +#define GL_AND_REVERSE 0x1502 +#define GL_COPY 0x1503 +#define GL_AND_INVERTED 0x1504 +#define GL_NOOP 0x1505 +#define GL_XOR 0x1506 +#define GL_OR 0x1507 +#define GL_NOR 0x1508 +#define GL_EQUIV 0x1509 +#define GL_INVERT 0x150A +#define GL_OR_REVERSE 0x150B +#define GL_COPY_INVERTED 0x150C +#define GL_OR_INVERTED 0x150D +#define GL_NAND 0x150E +#define GL_SET 0x150F +#define GL_EMISSION 0x1600 +#define GL_SHININESS 0x1601 +#define GL_AMBIENT_AND_DIFFUSE 0x1602 +#define GL_MODELVIEW 0x1700 +#define GL_PROJECTION 0x1701 +#define GL_TEXTURE 0x1702 +#define GL_ALPHA 0x1906 +#define GL_RGB 0x1907 +#define GL_RGBA 0x1908 +#define GL_LUMINANCE 0x1909 +#define GL_LUMINANCE_ALPHA 0x190A +#define GL_UNPACK_ALIGNMENT 0x0CF5 +#define GL_PACK_ALIGNMENT 0x0D05 +#define GL_UNSIGNED_SHORT_4_4_4_4 0x8033 +#define GL_UNSIGNED_SHORT_5_5_5_1 0x8034 +#define GL_UNSIGNED_SHORT_5_6_5 0x8363 +#define GL_FLAT 0x1D00 +#define GL_SMOOTH 0x1D01 +#define GL_KEEP 0x1E00 +#define GL_REPLACE 0x1E01 +#define GL_INCR 0x1E02 +#define GL_DECR 0x1E03 +#define GL_VENDOR 0x1F00 +#define GL_RENDERER 0x1F01 +#define GL_VERSION 0x1F02 +#define GL_EXTENSIONS 0x1F03 +#define GL_MODULATE 0x2100 +#define GL_DECAL 0x2101 +#define GL_ADD 0x0104 +#define GL_TEXTURE_ENV_MODE 0x2200 +#define GL_TEXTURE_ENV_COLOR 0x2201 +#define GL_TEXTURE_ENV 0x2300 +#define GL_NEAREST 0x2600 +#define GL_LINEAR 0x2601 +#define GL_NEAREST_MIPMAP_NEAREST 0x2700 +#define GL_LINEAR_MIPMAP_NEAREST 0x2701 +#define GL_NEAREST_MIPMAP_LINEAR 0x2702 +#define GL_LINEAR_MIPMAP_LINEAR 0x2703 +#define GL_TEXTURE_MAG_FILTER 0x2800 +#define GL_TEXTURE_MIN_FILTER 0x2801 +#define GL_TEXTURE_WRAP_S 0x2802 +#define GL_TEXTURE_WRAP_T 0x2803 +#define GL_GENERATE_MIPMAP 0x8191 +#define GL_TEXTURE0 0x84C0 +#define GL_TEXTURE1 0x84C1 +#define GL_TEXTURE2 0x84C2 +#define GL_TEXTURE3 0x84C3 +#define GL_TEXTURE4 0x84C4 +#define GL_TEXTURE5 0x84C5 +#define GL_TEXTURE6 0x84C6 +#define GL_TEXTURE7 0x84C7 +#define GL_TEXTURE8 0x84C8 +#define GL_TEXTURE9 0x84C9 +#define GL_TEXTURE10 0x84CA +#define GL_TEXTURE11 0x84CB +#define GL_TEXTURE12 0x84CC +#define GL_TEXTURE13 0x84CD +#define GL_TEXTURE14 0x84CE +#define GL_TEXTURE15 0x84CF +#define GL_TEXTURE16 0x84D0 +#define GL_TEXTURE17 0x84D1 +#define GL_TEXTURE18 0x84D2 +#define GL_TEXTURE19 0x84D3 +#define GL_TEXTURE20 0x84D4 +#define GL_TEXTURE21 0x84D5 +#define GL_TEXTURE22 0x84D6 +#define GL_TEXTURE23 0x84D7 +#define GL_TEXTURE24 0x84D8 +#define GL_TEXTURE25 0x84D9 +#define GL_TEXTURE26 0x84DA +#define GL_TEXTURE27 0x84DB +#define GL_TEXTURE28 0x84DC +#define GL_TEXTURE29 0x84DD +#define GL_TEXTURE30 0x84DE +#define GL_TEXTURE31 0x84DF +#define GL_ACTIVE_TEXTURE 0x84E0 +#define GL_CLIENT_ACTIVE_TEXTURE 0x84E1 +#define GL_REPEAT 0x2901 +#define GL_CLAMP_TO_EDGE 0x812F +#define GL_LIGHT0 0x4000 +#define GL_LIGHT1 0x4001 +#define GL_LIGHT2 0x4002 +#define GL_LIGHT3 0x4003 +#define GL_LIGHT4 0x4004 +#define GL_LIGHT5 0x4005 +#define GL_LIGHT6 0x4006 +#define GL_LIGHT7 0x4007 +#define GL_ARRAY_BUFFER 0x8892 +#define GL_ELEMENT_ARRAY_BUFFER 0x8893 +#define GL_ARRAY_BUFFER_BINDING 0x8894 +#define GL_ELEMENT_ARRAY_BUFFER_BINDING 0x8895 +#define GL_VERTEX_ARRAY_BUFFER_BINDING 0x8896 +#define GL_NORMAL_ARRAY_BUFFER_BINDING 0x8897 +#define GL_COLOR_ARRAY_BUFFER_BINDING 0x8898 +#define GL_TEXTURE_COORD_ARRAY_BUFFER_BINDING 0x889A +#define GL_STATIC_DRAW 0x88E4 +#define GL_DYNAMIC_DRAW 0x88E8 +#define GL_BUFFER_SIZE 0x8764 +#define GL_BUFFER_USAGE 0x8765 +#define GL_SUBTRACT 0x84E7 +#define GL_COMBINE 0x8570 +#define GL_COMBINE_RGB 0x8571 +#define GL_COMBINE_ALPHA 0x8572 +#define GL_RGB_SCALE 0x8573 +#define GL_ADD_SIGNED 0x8574 +#define GL_INTERPOLATE 0x8575 +#define GL_CONSTANT 0x8576 +#define GL_PRIMARY_COLOR 0x8577 +#define GL_PREVIOUS 0x8578 +#define GL_OPERAND0_RGB 0x8590 +#define GL_OPERAND1_RGB 0x8591 +#define GL_OPERAND2_RGB 0x8592 +#define GL_OPERAND0_ALPHA 0x8598 +#define GL_OPERAND1_ALPHA 0x8599 +#define GL_OPERAND2_ALPHA 0x859A +#define GL_ALPHA_SCALE 0x0D1C +#define GL_SRC0_RGB 0x8580 +#define GL_SRC1_RGB 0x8581 +#define GL_SRC2_RGB 0x8582 +#define GL_SRC0_ALPHA 0x8588 +#define GL_SRC1_ALPHA 0x8589 +#define GL_SRC2_ALPHA 0x858A +#define GL_DOT3_RGB 0x86AE +#define GL_DOT3_RGBA 0x86AF +GL_API void GL_APIENTRY glAlphaFunc (GLenum func, GLfloat ref); +GL_API void GL_APIENTRY glClearColor (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +GL_API void GL_APIENTRY glClearDepthf (GLfloat d); +GL_API void GL_APIENTRY glClipPlanef (GLenum p, const GLfloat *eqn); +GL_API void GL_APIENTRY glColor4f (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +GL_API void GL_APIENTRY glDepthRangef (GLfloat n, GLfloat f); +GL_API void GL_APIENTRY glFogf (GLenum pname, GLfloat param); +GL_API void GL_APIENTRY glFogfv (GLenum pname, const GLfloat *params); +GL_API void GL_APIENTRY glFrustumf (GLfloat l, GLfloat r, GLfloat b, GLfloat t, GLfloat n, GLfloat f); +GL_API void GL_APIENTRY glGetClipPlanef (GLenum plane, GLfloat *equation); +GL_API void GL_APIENTRY glGetFloatv (GLenum pname, GLfloat *data); +GL_API void GL_APIENTRY glGetLightfv (GLenum light, GLenum pname, GLfloat *params); +GL_API void GL_APIENTRY glGetMaterialfv (GLenum face, GLenum pname, GLfloat *params); +GL_API void GL_APIENTRY glGetTexEnvfv (GLenum target, GLenum pname, GLfloat *params); +GL_API void GL_APIENTRY glGetTexParameterfv (GLenum target, GLenum pname, GLfloat *params); +GL_API void GL_APIENTRY glLightModelf (GLenum pname, GLfloat param); +GL_API void GL_APIENTRY glLightModelfv (GLenum pname, const GLfloat *params); +GL_API void GL_APIENTRY glLightf (GLenum light, GLenum pname, GLfloat param); +GL_API void GL_APIENTRY glLightfv (GLenum light, GLenum pname, const GLfloat *params); +GL_API void GL_APIENTRY glLineWidth (GLfloat width); +GL_API void GL_APIENTRY glLoadMatrixf (const GLfloat *m); +GL_API void GL_APIENTRY glMaterialf (GLenum face, GLenum pname, GLfloat param); +GL_API void GL_APIENTRY glMaterialfv (GLenum face, GLenum pname, const GLfloat *params); +GL_API void GL_APIENTRY glMultMatrixf (const GLfloat *m); +GL_API void GL_APIENTRY glMultiTexCoord4f (GLenum target, GLfloat s, GLfloat t, GLfloat r, GLfloat q); +GL_API void GL_APIENTRY glNormal3f (GLfloat nx, GLfloat ny, GLfloat nz); +GL_API void GL_APIENTRY glOrthof (GLfloat l, GLfloat r, GLfloat b, GLfloat t, GLfloat n, GLfloat f); +GL_API void GL_APIENTRY glPointParameterf (GLenum pname, GLfloat param); +GL_API void GL_APIENTRY glPointParameterfv (GLenum pname, const GLfloat *params); +GL_API void GL_APIENTRY glPointSize (GLfloat size); +GL_API void GL_APIENTRY glPolygonOffset (GLfloat factor, GLfloat units); +GL_API void GL_APIENTRY glRotatef (GLfloat angle, GLfloat x, GLfloat y, GLfloat z); +GL_API void GL_APIENTRY glScalef (GLfloat x, GLfloat y, GLfloat z); +GL_API void GL_APIENTRY glTexEnvf (GLenum target, GLenum pname, GLfloat param); +GL_API void GL_APIENTRY glTexEnvfv (GLenum target, GLenum pname, const GLfloat *params); +GL_API void GL_APIENTRY glTexParameterf (GLenum target, GLenum pname, GLfloat param); +GL_API void GL_APIENTRY glTexParameterfv (GLenum target, GLenum pname, const GLfloat *params); +GL_API void GL_APIENTRY glTranslatef (GLfloat x, GLfloat y, GLfloat z); +GL_API void GL_APIENTRY glActiveTexture (GLenum texture); +GL_API void GL_APIENTRY glAlphaFuncx (GLenum func, GLfixed ref); +GL_API void GL_APIENTRY glBindBuffer (GLenum target, GLuint buffer); +GL_API void GL_APIENTRY glBindTexture (GLenum target, GLuint texture); +GL_API void GL_APIENTRY glBlendFunc (GLenum sfactor, GLenum dfactor); +GL_API void GL_APIENTRY glBufferData (GLenum target, GLsizeiptr size, const void *data, GLenum usage); +GL_API void GL_APIENTRY glBufferSubData (GLenum target, GLintptr offset, GLsizeiptr size, const void *data); +GL_API void GL_APIENTRY glClear (GLbitfield mask); +GL_API void GL_APIENTRY glClearColorx (GLfixed red, GLfixed green, GLfixed blue, GLfixed alpha); +GL_API void GL_APIENTRY glClearDepthx (GLfixed depth); +GL_API void GL_APIENTRY glClearStencil (GLint s); +GL_API void GL_APIENTRY glClientActiveTexture (GLenum texture); +GL_API void GL_APIENTRY glClipPlanex (GLenum plane, const GLfixed *equation); +GL_API void GL_APIENTRY glColor4ub (GLubyte red, GLubyte green, GLubyte blue, GLubyte alpha); +GL_API void GL_APIENTRY glColor4x (GLfixed red, GLfixed green, GLfixed blue, GLfixed alpha); +GL_API void GL_APIENTRY glColorMask (GLboolean red, GLboolean green, GLboolean blue, GLboolean alpha); +GL_API void GL_APIENTRY glColorPointer (GLint size, GLenum type, GLsizei stride, const void *pointer); +GL_API void GL_APIENTRY glCompressedTexImage2D (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, const void *data); +GL_API void GL_APIENTRY glCompressedTexSubImage2D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *data); +GL_API void GL_APIENTRY glCopyTexImage2D (GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLsizei height, GLint border); +GL_API void GL_APIENTRY glCopyTexSubImage2D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint x, GLint y, GLsizei width, GLsizei height); +GL_API void GL_APIENTRY glCullFace (GLenum mode); +GL_API void GL_APIENTRY glDeleteBuffers (GLsizei n, const GLuint *buffers); +GL_API void GL_APIENTRY glDeleteTextures (GLsizei n, const GLuint *textures); +GL_API void GL_APIENTRY glDepthFunc (GLenum func); +GL_API void GL_APIENTRY glDepthMask (GLboolean flag); +GL_API void GL_APIENTRY glDepthRangex (GLfixed n, GLfixed f); +GL_API void GL_APIENTRY glDisable (GLenum cap); +GL_API void GL_APIENTRY glDisableClientState (GLenum array); +GL_API void GL_APIENTRY glDrawArrays (GLenum mode, GLint first, GLsizei count); +GL_API void GL_APIENTRY glDrawElements (GLenum mode, GLsizei count, GLenum type, const void *indices); +GL_API void GL_APIENTRY glEnable (GLenum cap); +GL_API void GL_APIENTRY glEnableClientState (GLenum array); +GL_API void GL_APIENTRY glFinish (void); +GL_API void GL_APIENTRY glFlush (void); +GL_API void GL_APIENTRY glFogx (GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glFogxv (GLenum pname, const GLfixed *param); +GL_API void GL_APIENTRY glFrontFace (GLenum mode); +GL_API void GL_APIENTRY glFrustumx (GLfixed l, GLfixed r, GLfixed b, GLfixed t, GLfixed n, GLfixed f); +GL_API void GL_APIENTRY glGetBooleanv (GLenum pname, GLboolean *data); +GL_API void GL_APIENTRY glGetBufferParameteriv (GLenum target, GLenum pname, GLint *params); +GL_API void GL_APIENTRY glGetClipPlanex (GLenum plane, GLfixed *equation); +GL_API void GL_APIENTRY glGenBuffers (GLsizei n, GLuint *buffers); +GL_API void GL_APIENTRY glGenTextures (GLsizei n, GLuint *textures); +GL_API GLenum GL_APIENTRY glGetError (void); +GL_API void GL_APIENTRY glGetFixedv (GLenum pname, GLfixed *params); +GL_API void GL_APIENTRY glGetIntegerv (GLenum pname, GLint *data); +GL_API void GL_APIENTRY glGetLightxv (GLenum light, GLenum pname, GLfixed *params); +GL_API void GL_APIENTRY glGetMaterialxv (GLenum face, GLenum pname, GLfixed *params); +GL_API void GL_APIENTRY glGetPointerv (GLenum pname, void **params); +GL_API const GLubyte *GL_APIENTRY glGetString (GLenum name); +GL_API void GL_APIENTRY glGetTexEnviv (GLenum target, GLenum pname, GLint *params); +GL_API void GL_APIENTRY glGetTexEnvxv (GLenum target, GLenum pname, GLfixed *params); +GL_API void GL_APIENTRY glGetTexParameteriv (GLenum target, GLenum pname, GLint *params); +GL_API void GL_APIENTRY glGetTexParameterxv (GLenum target, GLenum pname, GLfixed *params); +GL_API void GL_APIENTRY glHint (GLenum target, GLenum mode); +GL_API GLboolean GL_APIENTRY glIsBuffer (GLuint buffer); +GL_API GLboolean GL_APIENTRY glIsEnabled (GLenum cap); +GL_API GLboolean GL_APIENTRY glIsTexture (GLuint texture); +GL_API void GL_APIENTRY glLightModelx (GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glLightModelxv (GLenum pname, const GLfixed *param); +GL_API void GL_APIENTRY glLightx (GLenum light, GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glLightxv (GLenum light, GLenum pname, const GLfixed *params); +GL_API void GL_APIENTRY glLineWidthx (GLfixed width); +GL_API void GL_APIENTRY glLoadIdentity (void); +GL_API void GL_APIENTRY glLoadMatrixx (const GLfixed *m); +GL_API void GL_APIENTRY glLogicOp (GLenum opcode); +GL_API void GL_APIENTRY glMaterialx (GLenum face, GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glMaterialxv (GLenum face, GLenum pname, const GLfixed *param); +GL_API void GL_APIENTRY glMatrixMode (GLenum mode); +GL_API void GL_APIENTRY glMultMatrixx (const GLfixed *m); +GL_API void GL_APIENTRY glMultiTexCoord4x (GLenum texture, GLfixed s, GLfixed t, GLfixed r, GLfixed q); +GL_API void GL_APIENTRY glNormal3x (GLfixed nx, GLfixed ny, GLfixed nz); +GL_API void GL_APIENTRY glNormalPointer (GLenum type, GLsizei stride, const void *pointer); +GL_API void GL_APIENTRY glOrthox (GLfixed l, GLfixed r, GLfixed b, GLfixed t, GLfixed n, GLfixed f); +GL_API void GL_APIENTRY glPixelStorei (GLenum pname, GLint param); +GL_API void GL_APIENTRY glPointParameterx (GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glPointParameterxv (GLenum pname, const GLfixed *params); +GL_API void GL_APIENTRY glPointSizex (GLfixed size); +GL_API void GL_APIENTRY glPolygonOffsetx (GLfixed factor, GLfixed units); +GL_API void GL_APIENTRY glPopMatrix (void); +GL_API void GL_APIENTRY glPushMatrix (void); +GL_API void GL_APIENTRY glReadPixels (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, void *pixels); +GL_API void GL_APIENTRY glRotatex (GLfixed angle, GLfixed x, GLfixed y, GLfixed z); +GL_API void GL_APIENTRY glSampleCoverage (GLfloat value, GLboolean invert); +GL_API void GL_APIENTRY glSampleCoveragex (GLclampx value, GLboolean invert); +GL_API void GL_APIENTRY glScalex (GLfixed x, GLfixed y, GLfixed z); +GL_API void GL_APIENTRY glScissor (GLint x, GLint y, GLsizei width, GLsizei height); +GL_API void GL_APIENTRY glShadeModel (GLenum mode); +GL_API void GL_APIENTRY glStencilFunc (GLenum func, GLint ref, GLuint mask); +GL_API void GL_APIENTRY glStencilMask (GLuint mask); +GL_API void GL_APIENTRY glStencilOp (GLenum fail, GLenum zfail, GLenum zpass); +GL_API void GL_APIENTRY glTexCoordPointer (GLint size, GLenum type, GLsizei stride, const void *pointer); +GL_API void GL_APIENTRY glTexEnvi (GLenum target, GLenum pname, GLint param); +GL_API void GL_APIENTRY glTexEnvx (GLenum target, GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glTexEnviv (GLenum target, GLenum pname, const GLint *params); +GL_API void GL_APIENTRY glTexEnvxv (GLenum target, GLenum pname, const GLfixed *params); +GL_API void GL_APIENTRY glTexImage2D (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, const void *pixels); +GL_API void GL_APIENTRY glTexParameteri (GLenum target, GLenum pname, GLint param); +GL_API void GL_APIENTRY glTexParameterx (GLenum target, GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glTexParameteriv (GLenum target, GLenum pname, const GLint *params); +GL_API void GL_APIENTRY glTexParameterxv (GLenum target, GLenum pname, const GLfixed *params); +GL_API void GL_APIENTRY glTexSubImage2D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *pixels); +GL_API void GL_APIENTRY glTranslatex (GLfixed x, GLfixed y, GLfixed z); +GL_API void GL_APIENTRY glVertexPointer (GLint size, GLenum type, GLsizei stride, const void *pointer); +GL_API void GL_APIENTRY glViewport (GLint x, GLint y, GLsizei width, GLsizei height); +#endif /* GL_VERSION_ES_CM_1_0 */ + +#ifndef GL_OES_compressed_paletted_texture +#define GL_OES_compressed_paletted_texture 1 +#define GL_PALETTE4_RGB8_OES 0x8B90 +#define GL_PALETTE4_RGBA8_OES 0x8B91 +#define GL_PALETTE4_R5_G6_B5_OES 0x8B92 +#define GL_PALETTE4_RGBA4_OES 0x8B93 +#define GL_PALETTE4_RGB5_A1_OES 0x8B94 +#define GL_PALETTE8_RGB8_OES 0x8B95 +#define GL_PALETTE8_RGBA8_OES 0x8B96 +#define GL_PALETTE8_R5_G6_B5_OES 0x8B97 +#define GL_PALETTE8_RGBA4_OES 0x8B98 +#define GL_PALETTE8_RGB5_A1_OES 0x8B99 +#endif /* GL_OES_compressed_paletted_texture */ + +#ifndef GL_OES_point_size_array +#define GL_OES_point_size_array 1 +#define GL_POINT_SIZE_ARRAY_OES 0x8B9C +#define GL_POINT_SIZE_ARRAY_TYPE_OES 0x898A +#define GL_POINT_SIZE_ARRAY_STRIDE_OES 0x898B +#define GL_POINT_SIZE_ARRAY_POINTER_OES 0x898C +#define GL_POINT_SIZE_ARRAY_BUFFER_BINDING_OES 0x8B9F +GL_API void GL_APIENTRY glPointSizePointerOES (GLenum type, GLsizei stride, const void *pointer); +#endif /* GL_OES_point_size_array */ + +#ifndef GL_OES_point_sprite +#define GL_OES_point_sprite 1 +#define GL_POINT_SPRITE_OES 0x8861 +#define GL_COORD_REPLACE_OES 0x8862 +#endif /* GL_OES_point_sprite */ + +#ifndef GL_OES_read_format +#define GL_OES_read_format 1 +#define GL_IMPLEMENTATION_COLOR_READ_TYPE_OES 0x8B9A +#define GL_IMPLEMENTATION_COLOR_READ_FORMAT_OES 0x8B9B +#endif /* GL_OES_read_format */ + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/swiftshader/include/GLES/glext.h b/vendor/swiftshader/include/GLES/glext.h new file mode 100644 index 00000000..6c9c9d2c --- /dev/null +++ b/vendor/swiftshader/include/GLES/glext.h @@ -0,0 +1,973 @@ +#ifndef __gles1_glext_h_ +#define __gles1_glext_h_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2013-2018 The Khronos Group Inc. +** +** Permission is hereby granted, free of charge, to any person obtaining a +** copy of this software and/or associated documentation files (the +** "Materials"), to deal in the Materials without restriction, including +** without limitation the rights to use, copy, modify, merge, publish, +** distribute, sublicense, and/or sell copies of the Materials, and to +** permit persons to whom the Materials are furnished to do so, subject to +** the following conditions: +** +** The above copyright notice and this permission notice shall be included +** in all copies or substantial portions of the Materials. +** +** THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +** EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +** MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +** IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +** CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +** MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. +*/ +/* +** This header is generated from the Khronos OpenGL / OpenGL ES XML +** API Registry. The current version of the Registry, generator scripts +** used to make the header, and the header can be found at +** https://github.com/KhronosGroup/OpenGL-Registry +*/ + +#ifndef GL_APIENTRYP +#define GL_APIENTRYP GL_APIENTRY* +#endif + +/* Generated on date 20180525 */ + +/* Generated C header for: + * API: gles1 + * Profile: common + * Versions considered: .* + * Versions emitted: _nomatch_^ + * Default extensions included: gles1 + * Additional extensions included: _nomatch_^ + * Extensions removed: ^(GL_OES_read_format|GL_OES_compressed_paletted_texture|GL_OES_point_size_array|GL_OES_point_sprite)$ + */ + +#ifndef GL_KHR_debug +#define GL_KHR_debug 1 +#endif /* GL_KHR_debug */ + +#ifndef GL_OES_EGL_image +#define GL_OES_EGL_image 1 +typedef void *GLeglImageOES; +typedef void (GL_APIENTRYP PFNGLEGLIMAGETARGETTEXTURE2DOESPROC) (GLenum target, GLeglImageOES image); +typedef void (GL_APIENTRYP PFNGLEGLIMAGETARGETRENDERBUFFERSTORAGEOESPROC) (GLenum target, GLeglImageOES image); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glEGLImageTargetTexture2DOES (GLenum target, GLeglImageOES image); +GL_API void GL_APIENTRY glEGLImageTargetRenderbufferStorageOES (GLenum target, GLeglImageOES image); +#endif +#endif /* GL_OES_EGL_image */ + +#ifndef GL_OES_EGL_image_external +#define GL_OES_EGL_image_external 1 +#define GL_TEXTURE_EXTERNAL_OES 0x8D65 +#define GL_TEXTURE_BINDING_EXTERNAL_OES 0x8D67 +#define GL_REQUIRED_TEXTURE_IMAGE_UNITS_OES 0x8D68 +#endif /* GL_OES_EGL_image_external */ + +#ifndef GL_OES_blend_equation_separate +#define GL_OES_blend_equation_separate 1 +#define GL_BLEND_EQUATION_RGB_OES 0x8009 +#define GL_BLEND_EQUATION_ALPHA_OES 0x883D +typedef void (GL_APIENTRYP PFNGLBLENDEQUATIONSEPARATEOESPROC) (GLenum modeRGB, GLenum modeAlpha); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glBlendEquationSeparateOES (GLenum modeRGB, GLenum modeAlpha); +#endif +#endif /* GL_OES_blend_equation_separate */ + +#ifndef GL_OES_blend_func_separate +#define GL_OES_blend_func_separate 1 +#define GL_BLEND_DST_RGB_OES 0x80C8 +#define GL_BLEND_SRC_RGB_OES 0x80C9 +#define GL_BLEND_DST_ALPHA_OES 0x80CA +#define GL_BLEND_SRC_ALPHA_OES 0x80CB +typedef void (GL_APIENTRYP PFNGLBLENDFUNCSEPARATEOESPROC) (GLenum srcRGB, GLenum dstRGB, GLenum srcAlpha, GLenum dstAlpha); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glBlendFuncSeparateOES (GLenum srcRGB, GLenum dstRGB, GLenum srcAlpha, GLenum dstAlpha); +#endif +#endif /* GL_OES_blend_func_separate */ + +#ifndef GL_OES_blend_subtract +#define GL_OES_blend_subtract 1 +#define GL_BLEND_EQUATION_OES 0x8009 +#define GL_FUNC_ADD_OES 0x8006 +#define GL_FUNC_SUBTRACT_OES 0x800A +#define GL_FUNC_REVERSE_SUBTRACT_OES 0x800B +typedef void (GL_APIENTRYP PFNGLBLENDEQUATIONOESPROC) (GLenum mode); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glBlendEquationOES (GLenum mode); +#endif +#endif /* GL_OES_blend_subtract */ + +#ifndef GL_OES_byte_coordinates +#define GL_OES_byte_coordinates 1 +#endif /* GL_OES_byte_coordinates */ + +#ifndef GL_OES_compressed_ETC1_RGB8_sub_texture +#define GL_OES_compressed_ETC1_RGB8_sub_texture 1 +#endif /* GL_OES_compressed_ETC1_RGB8_sub_texture */ + +#ifndef GL_OES_compressed_ETC1_RGB8_texture +#define GL_OES_compressed_ETC1_RGB8_texture 1 +#define GL_ETC1_RGB8_OES 0x8D64 +#endif /* GL_OES_compressed_ETC1_RGB8_texture */ + +#ifndef GL_OES_depth24 +#define GL_OES_depth24 1 +#define GL_DEPTH_COMPONENT24_OES 0x81A6 +#endif /* GL_OES_depth24 */ + +#ifndef GL_OES_depth32 +#define GL_OES_depth32 1 +#define GL_DEPTH_COMPONENT32_OES 0x81A7 +#endif /* GL_OES_depth32 */ + +#ifndef GL_OES_draw_texture +#define GL_OES_draw_texture 1 +#define GL_TEXTURE_CROP_RECT_OES 0x8B9D +typedef void (GL_APIENTRYP PFNGLDRAWTEXSOESPROC) (GLshort x, GLshort y, GLshort z, GLshort width, GLshort height); +typedef void (GL_APIENTRYP PFNGLDRAWTEXIOESPROC) (GLint x, GLint y, GLint z, GLint width, GLint height); +typedef void (GL_APIENTRYP PFNGLDRAWTEXXOESPROC) (GLfixed x, GLfixed y, GLfixed z, GLfixed width, GLfixed height); +typedef void (GL_APIENTRYP PFNGLDRAWTEXSVOESPROC) (const GLshort *coords); +typedef void (GL_APIENTRYP PFNGLDRAWTEXIVOESPROC) (const GLint *coords); +typedef void (GL_APIENTRYP PFNGLDRAWTEXXVOESPROC) (const GLfixed *coords); +typedef void (GL_APIENTRYP PFNGLDRAWTEXFOESPROC) (GLfloat x, GLfloat y, GLfloat z, GLfloat width, GLfloat height); +typedef void (GL_APIENTRYP PFNGLDRAWTEXFVOESPROC) (const GLfloat *coords); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glDrawTexsOES (GLshort x, GLshort y, GLshort z, GLshort width, GLshort height); +GL_API void GL_APIENTRY glDrawTexiOES (GLint x, GLint y, GLint z, GLint width, GLint height); +GL_API void GL_APIENTRY glDrawTexxOES (GLfixed x, GLfixed y, GLfixed z, GLfixed width, GLfixed height); +GL_API void GL_APIENTRY glDrawTexsvOES (const GLshort *coords); +GL_API void GL_APIENTRY glDrawTexivOES (const GLint *coords); +GL_API void GL_APIENTRY glDrawTexxvOES (const GLfixed *coords); +GL_API void GL_APIENTRY glDrawTexfOES (GLfloat x, GLfloat y, GLfloat z, GLfloat width, GLfloat height); +GL_API void GL_APIENTRY glDrawTexfvOES (const GLfloat *coords); +#endif +#endif /* GL_OES_draw_texture */ + +#ifndef GL_OES_element_index_uint +#define GL_OES_element_index_uint 1 +#define GL_UNSIGNED_INT 0x1405 +#endif /* GL_OES_element_index_uint */ + +#ifndef GL_OES_extended_matrix_palette +#define GL_OES_extended_matrix_palette 1 +#endif /* GL_OES_extended_matrix_palette */ + +#ifndef GL_OES_fbo_render_mipmap +#define GL_OES_fbo_render_mipmap 1 +#endif /* GL_OES_fbo_render_mipmap */ + +#ifndef GL_OES_fixed_point +#define GL_OES_fixed_point 1 +#define GL_FIXED_OES 0x140C +typedef void (GL_APIENTRYP PFNGLALPHAFUNCXOESPROC) (GLenum func, GLfixed ref); +typedef void (GL_APIENTRYP PFNGLCLEARCOLORXOESPROC) (GLfixed red, GLfixed green, GLfixed blue, GLfixed alpha); +typedef void (GL_APIENTRYP PFNGLCLEARDEPTHXOESPROC) (GLfixed depth); +typedef void (GL_APIENTRYP PFNGLCLIPPLANEXOESPROC) (GLenum plane, const GLfixed *equation); +typedef void (GL_APIENTRYP PFNGLCOLOR4XOESPROC) (GLfixed red, GLfixed green, GLfixed blue, GLfixed alpha); +typedef void (GL_APIENTRYP PFNGLDEPTHRANGEXOESPROC) (GLfixed n, GLfixed f); +typedef void (GL_APIENTRYP PFNGLFOGXOESPROC) (GLenum pname, GLfixed param); +typedef void (GL_APIENTRYP PFNGLFOGXVOESPROC) (GLenum pname, const GLfixed *param); +typedef void (GL_APIENTRYP PFNGLFRUSTUMXOESPROC) (GLfixed l, GLfixed r, GLfixed b, GLfixed t, GLfixed n, GLfixed f); +typedef void (GL_APIENTRYP PFNGLGETCLIPPLANEXOESPROC) (GLenum plane, GLfixed *equation); +typedef void (GL_APIENTRYP PFNGLGETFIXEDVOESPROC) (GLenum pname, GLfixed *params); +typedef void (GL_APIENTRYP PFNGLGETTEXENVXVOESPROC) (GLenum target, GLenum pname, GLfixed *params); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERXVOESPROC) (GLenum target, GLenum pname, GLfixed *params); +typedef void (GL_APIENTRYP PFNGLLIGHTMODELXOESPROC) (GLenum pname, GLfixed param); +typedef void (GL_APIENTRYP PFNGLLIGHTMODELXVOESPROC) (GLenum pname, const GLfixed *param); +typedef void (GL_APIENTRYP PFNGLLIGHTXOESPROC) (GLenum light, GLenum pname, GLfixed param); +typedef void (GL_APIENTRYP PFNGLLIGHTXVOESPROC) (GLenum light, GLenum pname, const GLfixed *params); +typedef void (GL_APIENTRYP PFNGLLINEWIDTHXOESPROC) (GLfixed width); +typedef void (GL_APIENTRYP PFNGLLOADMATRIXXOESPROC) (const GLfixed *m); +typedef void (GL_APIENTRYP PFNGLMATERIALXOESPROC) (GLenum face, GLenum pname, GLfixed param); +typedef void (GL_APIENTRYP PFNGLMATERIALXVOESPROC) (GLenum face, GLenum pname, const GLfixed *param); +typedef void (GL_APIENTRYP PFNGLMULTMATRIXXOESPROC) (const GLfixed *m); +typedef void (GL_APIENTRYP PFNGLMULTITEXCOORD4XOESPROC) (GLenum texture, GLfixed s, GLfixed t, GLfixed r, GLfixed q); +typedef void (GL_APIENTRYP PFNGLNORMAL3XOESPROC) (GLfixed nx, GLfixed ny, GLfixed nz); +typedef void (GL_APIENTRYP PFNGLORTHOXOESPROC) (GLfixed l, GLfixed r, GLfixed b, GLfixed t, GLfixed n, GLfixed f); +typedef void (GL_APIENTRYP PFNGLPOINTPARAMETERXVOESPROC) (GLenum pname, const GLfixed *params); +typedef void (GL_APIENTRYP PFNGLPOINTSIZEXOESPROC) (GLfixed size); +typedef void (GL_APIENTRYP PFNGLPOLYGONOFFSETXOESPROC) (GLfixed factor, GLfixed units); +typedef void (GL_APIENTRYP PFNGLROTATEXOESPROC) (GLfixed angle, GLfixed x, GLfixed y, GLfixed z); +typedef void (GL_APIENTRYP PFNGLSCALEXOESPROC) (GLfixed x, GLfixed y, GLfixed z); +typedef void (GL_APIENTRYP PFNGLTEXENVXOESPROC) (GLenum target, GLenum pname, GLfixed param); +typedef void (GL_APIENTRYP PFNGLTEXENVXVOESPROC) (GLenum target, GLenum pname, const GLfixed *params); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERXOESPROC) (GLenum target, GLenum pname, GLfixed param); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERXVOESPROC) (GLenum target, GLenum pname, const GLfixed *params); +typedef void (GL_APIENTRYP PFNGLTRANSLATEXOESPROC) (GLfixed x, GLfixed y, GLfixed z); +typedef void (GL_APIENTRYP PFNGLGETLIGHTXVOESPROC) (GLenum light, GLenum pname, GLfixed *params); +typedef void (GL_APIENTRYP PFNGLGETMATERIALXVOESPROC) (GLenum face, GLenum pname, GLfixed *params); +typedef void (GL_APIENTRYP PFNGLPOINTPARAMETERXOESPROC) (GLenum pname, GLfixed param); +typedef void (GL_APIENTRYP PFNGLSAMPLECOVERAGEXOESPROC) (GLclampx value, GLboolean invert); +typedef void (GL_APIENTRYP PFNGLGETTEXGENXVOESPROC) (GLenum coord, GLenum pname, GLfixed *params); +typedef void (GL_APIENTRYP PFNGLTEXGENXOESPROC) (GLenum coord, GLenum pname, GLfixed param); +typedef void (GL_APIENTRYP PFNGLTEXGENXVOESPROC) (GLenum coord, GLenum pname, const GLfixed *params); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glAlphaFuncxOES (GLenum func, GLfixed ref); +GL_API void GL_APIENTRY glClearColorxOES (GLfixed red, GLfixed green, GLfixed blue, GLfixed alpha); +GL_API void GL_APIENTRY glClearDepthxOES (GLfixed depth); +GL_API void GL_APIENTRY glClipPlanexOES (GLenum plane, const GLfixed *equation); +GL_API void GL_APIENTRY glColor4xOES (GLfixed red, GLfixed green, GLfixed blue, GLfixed alpha); +GL_API void GL_APIENTRY glDepthRangexOES (GLfixed n, GLfixed f); +GL_API void GL_APIENTRY glFogxOES (GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glFogxvOES (GLenum pname, const GLfixed *param); +GL_API void GL_APIENTRY glFrustumxOES (GLfixed l, GLfixed r, GLfixed b, GLfixed t, GLfixed n, GLfixed f); +GL_API void GL_APIENTRY glGetClipPlanexOES (GLenum plane, GLfixed *equation); +GL_API void GL_APIENTRY glGetFixedvOES (GLenum pname, GLfixed *params); +GL_API void GL_APIENTRY glGetTexEnvxvOES (GLenum target, GLenum pname, GLfixed *params); +GL_API void GL_APIENTRY glGetTexParameterxvOES (GLenum target, GLenum pname, GLfixed *params); +GL_API void GL_APIENTRY glLightModelxOES (GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glLightModelxvOES (GLenum pname, const GLfixed *param); +GL_API void GL_APIENTRY glLightxOES (GLenum light, GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glLightxvOES (GLenum light, GLenum pname, const GLfixed *params); +GL_API void GL_APIENTRY glLineWidthxOES (GLfixed width); +GL_API void GL_APIENTRY glLoadMatrixxOES (const GLfixed *m); +GL_API void GL_APIENTRY glMaterialxOES (GLenum face, GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glMaterialxvOES (GLenum face, GLenum pname, const GLfixed *param); +GL_API void GL_APIENTRY glMultMatrixxOES (const GLfixed *m); +GL_API void GL_APIENTRY glMultiTexCoord4xOES (GLenum texture, GLfixed s, GLfixed t, GLfixed r, GLfixed q); +GL_API void GL_APIENTRY glNormal3xOES (GLfixed nx, GLfixed ny, GLfixed nz); +GL_API void GL_APIENTRY glOrthoxOES (GLfixed l, GLfixed r, GLfixed b, GLfixed t, GLfixed n, GLfixed f); +GL_API void GL_APIENTRY glPointParameterxvOES (GLenum pname, const GLfixed *params); +GL_API void GL_APIENTRY glPointSizexOES (GLfixed size); +GL_API void GL_APIENTRY glPolygonOffsetxOES (GLfixed factor, GLfixed units); +GL_API void GL_APIENTRY glRotatexOES (GLfixed angle, GLfixed x, GLfixed y, GLfixed z); +GL_API void GL_APIENTRY glScalexOES (GLfixed x, GLfixed y, GLfixed z); +GL_API void GL_APIENTRY glTexEnvxOES (GLenum target, GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glTexEnvxvOES (GLenum target, GLenum pname, const GLfixed *params); +GL_API void GL_APIENTRY glTexParameterxOES (GLenum target, GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glTexParameterxvOES (GLenum target, GLenum pname, const GLfixed *params); +GL_API void GL_APIENTRY glTranslatexOES (GLfixed x, GLfixed y, GLfixed z); +GL_API void GL_APIENTRY glGetLightxvOES (GLenum light, GLenum pname, GLfixed *params); +GL_API void GL_APIENTRY glGetMaterialxvOES (GLenum face, GLenum pname, GLfixed *params); +GL_API void GL_APIENTRY glPointParameterxOES (GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glSampleCoveragexOES (GLclampx value, GLboolean invert); +GL_API void GL_APIENTRY glGetTexGenxvOES (GLenum coord, GLenum pname, GLfixed *params); +GL_API void GL_APIENTRY glTexGenxOES (GLenum coord, GLenum pname, GLfixed param); +GL_API void GL_APIENTRY glTexGenxvOES (GLenum coord, GLenum pname, const GLfixed *params); +#endif +#endif /* GL_OES_fixed_point */ + +#ifndef GL_OES_framebuffer_object +#define GL_OES_framebuffer_object 1 +#define GL_NONE_OES 0 +#define GL_FRAMEBUFFER_OES 0x8D40 +#define GL_RENDERBUFFER_OES 0x8D41 +#define GL_RGBA4_OES 0x8056 +#define GL_RGB5_A1_OES 0x8057 +#define GL_RGB565_OES 0x8D62 +#define GL_DEPTH_COMPONENT16_OES 0x81A5 +#define GL_RENDERBUFFER_WIDTH_OES 0x8D42 +#define GL_RENDERBUFFER_HEIGHT_OES 0x8D43 +#define GL_RENDERBUFFER_INTERNAL_FORMAT_OES 0x8D44 +#define GL_RENDERBUFFER_RED_SIZE_OES 0x8D50 +#define GL_RENDERBUFFER_GREEN_SIZE_OES 0x8D51 +#define GL_RENDERBUFFER_BLUE_SIZE_OES 0x8D52 +#define GL_RENDERBUFFER_ALPHA_SIZE_OES 0x8D53 +#define GL_RENDERBUFFER_DEPTH_SIZE_OES 0x8D54 +#define GL_RENDERBUFFER_STENCIL_SIZE_OES 0x8D55 +#define GL_FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE_OES 0x8CD0 +#define GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME_OES 0x8CD1 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL_OES 0x8CD2 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE_OES 0x8CD3 +#define GL_COLOR_ATTACHMENT0_OES 0x8CE0 +#define GL_DEPTH_ATTACHMENT_OES 0x8D00 +#define GL_STENCIL_ATTACHMENT_OES 0x8D20 +#define GL_FRAMEBUFFER_COMPLETE_OES 0x8CD5 +#define GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT_OES 0x8CD6 +#define GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT_OES 0x8CD7 +#define GL_FRAMEBUFFER_INCOMPLETE_DIMENSIONS_OES 0x8CD9 +#define GL_FRAMEBUFFER_INCOMPLETE_FORMATS_OES 0x8CDA +#define GL_FRAMEBUFFER_UNSUPPORTED_OES 0x8CDD +#define GL_FRAMEBUFFER_BINDING_OES 0x8CA6 +#define GL_RENDERBUFFER_BINDING_OES 0x8CA7 +#define GL_MAX_RENDERBUFFER_SIZE_OES 0x84E8 +#define GL_INVALID_FRAMEBUFFER_OPERATION_OES 0x0506 +typedef GLboolean (GL_APIENTRYP PFNGLISRENDERBUFFEROESPROC) (GLuint renderbuffer); +typedef void (GL_APIENTRYP PFNGLBINDRENDERBUFFEROESPROC) (GLenum target, GLuint renderbuffer); +typedef void (GL_APIENTRYP PFNGLDELETERENDERBUFFERSOESPROC) (GLsizei n, const GLuint *renderbuffers); +typedef void (GL_APIENTRYP PFNGLGENRENDERBUFFERSOESPROC) (GLsizei n, GLuint *renderbuffers); +typedef void (GL_APIENTRYP PFNGLRENDERBUFFERSTORAGEOESPROC) (GLenum target, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLGETRENDERBUFFERPARAMETERIVOESPROC) (GLenum target, GLenum pname, GLint *params); +typedef GLboolean (GL_APIENTRYP PFNGLISFRAMEBUFFEROESPROC) (GLuint framebuffer); +typedef void (GL_APIENTRYP PFNGLBINDFRAMEBUFFEROESPROC) (GLenum target, GLuint framebuffer); +typedef void (GL_APIENTRYP PFNGLDELETEFRAMEBUFFERSOESPROC) (GLsizei n, const GLuint *framebuffers); +typedef void (GL_APIENTRYP PFNGLGENFRAMEBUFFERSOESPROC) (GLsizei n, GLuint *framebuffers); +typedef GLenum (GL_APIENTRYP PFNGLCHECKFRAMEBUFFERSTATUSOESPROC) (GLenum target); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERRENDERBUFFEROESPROC) (GLenum target, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTURE2DOESPROC) (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +typedef void (GL_APIENTRYP PFNGLGETFRAMEBUFFERATTACHMENTPARAMETERIVOESPROC) (GLenum target, GLenum attachment, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGENERATEMIPMAPOESPROC) (GLenum target); +#ifdef GL_GLEXT_PROTOTYPES +GL_API GLboolean GL_APIENTRY glIsRenderbufferOES (GLuint renderbuffer); +GL_API void GL_APIENTRY glBindRenderbufferOES (GLenum target, GLuint renderbuffer); +GL_API void GL_APIENTRY glDeleteRenderbuffersOES (GLsizei n, const GLuint *renderbuffers); +GL_API void GL_APIENTRY glGenRenderbuffersOES (GLsizei n, GLuint *renderbuffers); +GL_API void GL_APIENTRY glRenderbufferStorageOES (GLenum target, GLenum internalformat, GLsizei width, GLsizei height); +GL_API void GL_APIENTRY glGetRenderbufferParameterivOES (GLenum target, GLenum pname, GLint *params); +GL_API GLboolean GL_APIENTRY glIsFramebufferOES (GLuint framebuffer); +GL_API void GL_APIENTRY glBindFramebufferOES (GLenum target, GLuint framebuffer); +GL_API void GL_APIENTRY glDeleteFramebuffersOES (GLsizei n, const GLuint *framebuffers); +GL_API void GL_APIENTRY glGenFramebuffersOES (GLsizei n, GLuint *framebuffers); +GL_API GLenum GL_APIENTRY glCheckFramebufferStatusOES (GLenum target); +GL_API void GL_APIENTRY glFramebufferRenderbufferOES (GLenum target, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer); +GL_API void GL_APIENTRY glFramebufferTexture2DOES (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +GL_API void GL_APIENTRY glGetFramebufferAttachmentParameterivOES (GLenum target, GLenum attachment, GLenum pname, GLint *params); +GL_API void GL_APIENTRY glGenerateMipmapOES (GLenum target); +#endif +#endif /* GL_OES_framebuffer_object */ + +#ifndef GL_OES_mapbuffer +#define GL_OES_mapbuffer 1 +#define GL_WRITE_ONLY_OES 0x88B9 +#define GL_BUFFER_ACCESS_OES 0x88BB +#define GL_BUFFER_MAPPED_OES 0x88BC +#define GL_BUFFER_MAP_POINTER_OES 0x88BD +typedef void *(GL_APIENTRYP PFNGLMAPBUFFEROESPROC) (GLenum target, GLenum access); +typedef GLboolean (GL_APIENTRYP PFNGLUNMAPBUFFEROESPROC) (GLenum target); +typedef void (GL_APIENTRYP PFNGLGETBUFFERPOINTERVOESPROC) (GLenum target, GLenum pname, void **params); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void *GL_APIENTRY glMapBufferOES (GLenum target, GLenum access); +GL_API GLboolean GL_APIENTRY glUnmapBufferOES (GLenum target); +GL_API void GL_APIENTRY glGetBufferPointervOES (GLenum target, GLenum pname, void **params); +#endif +#endif /* GL_OES_mapbuffer */ + +#ifndef GL_OES_matrix_get +#define GL_OES_matrix_get 1 +#define GL_MODELVIEW_MATRIX_FLOAT_AS_INT_BITS_OES 0x898D +#define GL_PROJECTION_MATRIX_FLOAT_AS_INT_BITS_OES 0x898E +#define GL_TEXTURE_MATRIX_FLOAT_AS_INT_BITS_OES 0x898F +#endif /* GL_OES_matrix_get */ + +#ifndef GL_OES_matrix_palette +#define GL_OES_matrix_palette 1 +#define GL_MAX_VERTEX_UNITS_OES 0x86A4 +#define GL_MAX_PALETTE_MATRICES_OES 0x8842 +#define GL_MATRIX_PALETTE_OES 0x8840 +#define GL_MATRIX_INDEX_ARRAY_OES 0x8844 +#define GL_WEIGHT_ARRAY_OES 0x86AD +#define GL_CURRENT_PALETTE_MATRIX_OES 0x8843 +#define GL_MATRIX_INDEX_ARRAY_SIZE_OES 0x8846 +#define GL_MATRIX_INDEX_ARRAY_TYPE_OES 0x8847 +#define GL_MATRIX_INDEX_ARRAY_STRIDE_OES 0x8848 +#define GL_MATRIX_INDEX_ARRAY_POINTER_OES 0x8849 +#define GL_MATRIX_INDEX_ARRAY_BUFFER_BINDING_OES 0x8B9E +#define GL_WEIGHT_ARRAY_SIZE_OES 0x86AB +#define GL_WEIGHT_ARRAY_TYPE_OES 0x86A9 +#define GL_WEIGHT_ARRAY_STRIDE_OES 0x86AA +#define GL_WEIGHT_ARRAY_POINTER_OES 0x86AC +#define GL_WEIGHT_ARRAY_BUFFER_BINDING_OES 0x889E +typedef void (GL_APIENTRYP PFNGLCURRENTPALETTEMATRIXOESPROC) (GLuint matrixpaletteindex); +typedef void (GL_APIENTRYP PFNGLLOADPALETTEFROMMODELVIEWMATRIXOESPROC) (void); +typedef void (GL_APIENTRYP PFNGLMATRIXINDEXPOINTEROESPROC) (GLint size, GLenum type, GLsizei stride, const void *pointer); +typedef void (GL_APIENTRYP PFNGLWEIGHTPOINTEROESPROC) (GLint size, GLenum type, GLsizei stride, const void *pointer); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glCurrentPaletteMatrixOES (GLuint matrixpaletteindex); +GL_API void GL_APIENTRY glLoadPaletteFromModelViewMatrixOES (void); +GL_API void GL_APIENTRY glMatrixIndexPointerOES (GLint size, GLenum type, GLsizei stride, const void *pointer); +GL_API void GL_APIENTRY glWeightPointerOES (GLint size, GLenum type, GLsizei stride, const void *pointer); +#endif +#endif /* GL_OES_matrix_palette */ + +#ifndef GL_OES_packed_depth_stencil +#define GL_OES_packed_depth_stencil 1 +#define GL_DEPTH_STENCIL_OES 0x84F9 +#define GL_UNSIGNED_INT_24_8_OES 0x84FA +#define GL_DEPTH24_STENCIL8_OES 0x88F0 +#endif /* GL_OES_packed_depth_stencil */ + +#ifndef GL_OES_query_matrix +#define GL_OES_query_matrix 1 +typedef GLbitfield (GL_APIENTRYP PFNGLQUERYMATRIXXOESPROC) (GLfixed *mantissa, GLint *exponent); +#ifdef GL_GLEXT_PROTOTYPES +GL_API GLbitfield GL_APIENTRY glQueryMatrixxOES (GLfixed *mantissa, GLint *exponent); +#endif +#endif /* GL_OES_query_matrix */ + +#ifndef GL_OES_required_internalformat +#define GL_OES_required_internalformat 1 +#define GL_ALPHA8_OES 0x803C +#define GL_LUMINANCE4_ALPHA4_OES 0x8043 +#define GL_LUMINANCE8_ALPHA8_OES 0x8045 +#define GL_LUMINANCE8_OES 0x8040 +#define GL_RGB8_OES 0x8051 +#define GL_RGBA8_OES 0x8058 +#define GL_RGB10_EXT 0x8052 +#define GL_RGB10_A2_EXT 0x8059 +#endif /* GL_OES_required_internalformat */ + +#ifndef GL_OES_rgb8_rgba8 +#define GL_OES_rgb8_rgba8 1 +#endif /* GL_OES_rgb8_rgba8 */ + +#ifndef GL_OES_single_precision +#define GL_OES_single_precision 1 +typedef void (GL_APIENTRYP PFNGLCLEARDEPTHFOESPROC) (GLclampf depth); +typedef void (GL_APIENTRYP PFNGLCLIPPLANEFOESPROC) (GLenum plane, const GLfloat *equation); +typedef void (GL_APIENTRYP PFNGLDEPTHRANGEFOESPROC) (GLclampf n, GLclampf f); +typedef void (GL_APIENTRYP PFNGLFRUSTUMFOESPROC) (GLfloat l, GLfloat r, GLfloat b, GLfloat t, GLfloat n, GLfloat f); +typedef void (GL_APIENTRYP PFNGLGETCLIPPLANEFOESPROC) (GLenum plane, GLfloat *equation); +typedef void (GL_APIENTRYP PFNGLORTHOFOESPROC) (GLfloat l, GLfloat r, GLfloat b, GLfloat t, GLfloat n, GLfloat f); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glClearDepthfOES (GLclampf depth); +GL_API void GL_APIENTRY glClipPlanefOES (GLenum plane, const GLfloat *equation); +GL_API void GL_APIENTRY glDepthRangefOES (GLclampf n, GLclampf f); +GL_API void GL_APIENTRY glFrustumfOES (GLfloat l, GLfloat r, GLfloat b, GLfloat t, GLfloat n, GLfloat f); +GL_API void GL_APIENTRY glGetClipPlanefOES (GLenum plane, GLfloat *equation); +GL_API void GL_APIENTRY glOrthofOES (GLfloat l, GLfloat r, GLfloat b, GLfloat t, GLfloat n, GLfloat f); +#endif +#endif /* GL_OES_single_precision */ + +#ifndef GL_OES_stencil1 +#define GL_OES_stencil1 1 +#define GL_STENCIL_INDEX1_OES 0x8D46 +#endif /* GL_OES_stencil1 */ + +#ifndef GL_OES_stencil4 +#define GL_OES_stencil4 1 +#define GL_STENCIL_INDEX4_OES 0x8D47 +#endif /* GL_OES_stencil4 */ + +#ifndef GL_OES_stencil8 +#define GL_OES_stencil8 1 +#define GL_STENCIL_INDEX8_OES 0x8D48 +#endif /* GL_OES_stencil8 */ + +#ifndef GL_OES_stencil_wrap +#define GL_OES_stencil_wrap 1 +#define GL_INCR_WRAP_OES 0x8507 +#define GL_DECR_WRAP_OES 0x8508 +#endif /* GL_OES_stencil_wrap */ + +#ifndef GL_OES_surfaceless_context +#define GL_OES_surfaceless_context 1 +#define GL_FRAMEBUFFER_UNDEFINED_OES 0x8219 +#endif /* GL_OES_surfaceless_context */ + +#ifndef GL_OES_texture_cube_map +#define GL_OES_texture_cube_map 1 +#define GL_NORMAL_MAP_OES 0x8511 +#define GL_REFLECTION_MAP_OES 0x8512 +#define GL_TEXTURE_CUBE_MAP_OES 0x8513 +#define GL_TEXTURE_BINDING_CUBE_MAP_OES 0x8514 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_X_OES 0x8515 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_X_OES 0x8516 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_Y_OES 0x8517 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_Y_OES 0x8518 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_Z_OES 0x8519 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_Z_OES 0x851A +#define GL_MAX_CUBE_MAP_TEXTURE_SIZE_OES 0x851C +#define GL_TEXTURE_GEN_MODE_OES 0x2500 +#define GL_TEXTURE_GEN_STR_OES 0x8D60 +typedef void (GL_APIENTRYP PFNGLTEXGENFOESPROC) (GLenum coord, GLenum pname, GLfloat param); +typedef void (GL_APIENTRYP PFNGLTEXGENFVOESPROC) (GLenum coord, GLenum pname, const GLfloat *params); +typedef void (GL_APIENTRYP PFNGLTEXGENIOESPROC) (GLenum coord, GLenum pname, GLint param); +typedef void (GL_APIENTRYP PFNGLTEXGENIVOESPROC) (GLenum coord, GLenum pname, const GLint *params); +typedef void (GL_APIENTRYP PFNGLGETTEXGENFVOESPROC) (GLenum coord, GLenum pname, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETTEXGENIVOESPROC) (GLenum coord, GLenum pname, GLint *params); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glTexGenfOES (GLenum coord, GLenum pname, GLfloat param); +GL_API void GL_APIENTRY glTexGenfvOES (GLenum coord, GLenum pname, const GLfloat *params); +GL_API void GL_APIENTRY glTexGeniOES (GLenum coord, GLenum pname, GLint param); +GL_API void GL_APIENTRY glTexGenivOES (GLenum coord, GLenum pname, const GLint *params); +GL_API void GL_APIENTRY glGetTexGenfvOES (GLenum coord, GLenum pname, GLfloat *params); +GL_API void GL_APIENTRY glGetTexGenivOES (GLenum coord, GLenum pname, GLint *params); +#endif +#endif /* GL_OES_texture_cube_map */ + +#ifndef GL_OES_texture_env_crossbar +#define GL_OES_texture_env_crossbar 1 +#endif /* GL_OES_texture_env_crossbar */ + +#ifndef GL_OES_texture_mirrored_repeat +#define GL_OES_texture_mirrored_repeat 1 +#define GL_MIRRORED_REPEAT_OES 0x8370 +#endif /* GL_OES_texture_mirrored_repeat */ + +#ifndef GL_OES_texture_npot +#define GL_OES_texture_npot 1 +#endif /* GL_OES_texture_npot */ + +#ifndef GL_OES_vertex_array_object +#define GL_OES_vertex_array_object 1 +#define GL_VERTEX_ARRAY_BINDING_OES 0x85B5 +typedef void (GL_APIENTRYP PFNGLBINDVERTEXARRAYOESPROC) (GLuint array); +typedef void (GL_APIENTRYP PFNGLDELETEVERTEXARRAYSOESPROC) (GLsizei n, const GLuint *arrays); +typedef void (GL_APIENTRYP PFNGLGENVERTEXARRAYSOESPROC) (GLsizei n, GLuint *arrays); +typedef GLboolean (GL_APIENTRYP PFNGLISVERTEXARRAYOESPROC) (GLuint array); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glBindVertexArrayOES (GLuint array); +GL_API void GL_APIENTRY glDeleteVertexArraysOES (GLsizei n, const GLuint *arrays); +GL_API void GL_APIENTRY glGenVertexArraysOES (GLsizei n, GLuint *arrays); +GL_API GLboolean GL_APIENTRY glIsVertexArrayOES (GLuint array); +#endif +#endif /* GL_OES_vertex_array_object */ + +#ifndef GL_AMD_compressed_3DC_texture +#define GL_AMD_compressed_3DC_texture 1 +#define GL_3DC_X_AMD 0x87F9 +#define GL_3DC_XY_AMD 0x87FA +#endif /* GL_AMD_compressed_3DC_texture */ + +#ifndef GL_AMD_compressed_ATC_texture +#define GL_AMD_compressed_ATC_texture 1 +#define GL_ATC_RGB_AMD 0x8C92 +#define GL_ATC_RGBA_EXPLICIT_ALPHA_AMD 0x8C93 +#define GL_ATC_RGBA_INTERPOLATED_ALPHA_AMD 0x87EE +#endif /* GL_AMD_compressed_ATC_texture */ + +#ifndef GL_APPLE_copy_texture_levels +#define GL_APPLE_copy_texture_levels 1 +typedef void (GL_APIENTRYP PFNGLCOPYTEXTURELEVELSAPPLEPROC) (GLuint destinationTexture, GLuint sourceTexture, GLint sourceBaseLevel, GLsizei sourceLevelCount); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glCopyTextureLevelsAPPLE (GLuint destinationTexture, GLuint sourceTexture, GLint sourceBaseLevel, GLsizei sourceLevelCount); +#endif +#endif /* GL_APPLE_copy_texture_levels */ + +#ifndef GL_APPLE_framebuffer_multisample +#define GL_APPLE_framebuffer_multisample 1 +#define GL_RENDERBUFFER_SAMPLES_APPLE 0x8CAB +#define GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE_APPLE 0x8D56 +#define GL_MAX_SAMPLES_APPLE 0x8D57 +#define GL_READ_FRAMEBUFFER_APPLE 0x8CA8 +#define GL_DRAW_FRAMEBUFFER_APPLE 0x8CA9 +#define GL_DRAW_FRAMEBUFFER_BINDING_APPLE 0x8CA6 +#define GL_READ_FRAMEBUFFER_BINDING_APPLE 0x8CAA +typedef void (GL_APIENTRYP PFNGLRENDERBUFFERSTORAGEMULTISAMPLEAPPLEPROC) (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLRESOLVEMULTISAMPLEFRAMEBUFFERAPPLEPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glRenderbufferStorageMultisampleAPPLE (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +GL_API void GL_APIENTRY glResolveMultisampleFramebufferAPPLE (void); +#endif +#endif /* GL_APPLE_framebuffer_multisample */ + +#ifndef GL_APPLE_sync +#define GL_APPLE_sync 1 +typedef struct __GLsync *GLsync; +typedef khronos_uint64_t GLuint64; +typedef khronos_int64_t GLint64; +#define GL_SYNC_OBJECT_APPLE 0x8A53 +#define GL_MAX_SERVER_WAIT_TIMEOUT_APPLE 0x9111 +#define GL_OBJECT_TYPE_APPLE 0x9112 +#define GL_SYNC_CONDITION_APPLE 0x9113 +#define GL_SYNC_STATUS_APPLE 0x9114 +#define GL_SYNC_FLAGS_APPLE 0x9115 +#define GL_SYNC_FENCE_APPLE 0x9116 +#define GL_SYNC_GPU_COMMANDS_COMPLETE_APPLE 0x9117 +#define GL_UNSIGNALED_APPLE 0x9118 +#define GL_SIGNALED_APPLE 0x9119 +#define GL_ALREADY_SIGNALED_APPLE 0x911A +#define GL_TIMEOUT_EXPIRED_APPLE 0x911B +#define GL_CONDITION_SATISFIED_APPLE 0x911C +#define GL_WAIT_FAILED_APPLE 0x911D +#define GL_SYNC_FLUSH_COMMANDS_BIT_APPLE 0x00000001 +#define GL_TIMEOUT_IGNORED_APPLE 0xFFFFFFFFFFFFFFFFull +typedef GLsync (GL_APIENTRYP PFNGLFENCESYNCAPPLEPROC) (GLenum condition, GLbitfield flags); +typedef GLboolean (GL_APIENTRYP PFNGLISSYNCAPPLEPROC) (GLsync sync); +typedef void (GL_APIENTRYP PFNGLDELETESYNCAPPLEPROC) (GLsync sync); +typedef GLenum (GL_APIENTRYP PFNGLCLIENTWAITSYNCAPPLEPROC) (GLsync sync, GLbitfield flags, GLuint64 timeout); +typedef void (GL_APIENTRYP PFNGLWAITSYNCAPPLEPROC) (GLsync sync, GLbitfield flags, GLuint64 timeout); +typedef void (GL_APIENTRYP PFNGLGETINTEGER64VAPPLEPROC) (GLenum pname, GLint64 *params); +typedef void (GL_APIENTRYP PFNGLGETSYNCIVAPPLEPROC) (GLsync sync, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *values); +#ifdef GL_GLEXT_PROTOTYPES +GL_API GLsync GL_APIENTRY glFenceSyncAPPLE (GLenum condition, GLbitfield flags); +GL_API GLboolean GL_APIENTRY glIsSyncAPPLE (GLsync sync); +GL_API void GL_APIENTRY glDeleteSyncAPPLE (GLsync sync); +GL_API GLenum GL_APIENTRY glClientWaitSyncAPPLE (GLsync sync, GLbitfield flags, GLuint64 timeout); +GL_API void GL_APIENTRY glWaitSyncAPPLE (GLsync sync, GLbitfield flags, GLuint64 timeout); +GL_API void GL_APIENTRY glGetInteger64vAPPLE (GLenum pname, GLint64 *params); +GL_API void GL_APIENTRY glGetSyncivAPPLE (GLsync sync, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *values); +#endif +#endif /* GL_APPLE_sync */ + +#ifndef GL_APPLE_texture_2D_limited_npot +#define GL_APPLE_texture_2D_limited_npot 1 +#endif /* GL_APPLE_texture_2D_limited_npot */ + +#ifndef GL_APPLE_texture_format_BGRA8888 +#define GL_APPLE_texture_format_BGRA8888 1 +#define GL_BGRA_EXT 0x80E1 +#define GL_BGRA8_EXT 0x93A1 +#endif /* GL_APPLE_texture_format_BGRA8888 */ + +#ifndef GL_APPLE_texture_max_level +#define GL_APPLE_texture_max_level 1 +#define GL_TEXTURE_MAX_LEVEL_APPLE 0x813D +#endif /* GL_APPLE_texture_max_level */ + +#ifndef GL_ARM_rgba8 +#define GL_ARM_rgba8 1 +#endif /* GL_ARM_rgba8 */ + +#ifndef GL_EXT_blend_minmax +#define GL_EXT_blend_minmax 1 +#define GL_MIN_EXT 0x8007 +#define GL_MAX_EXT 0x8008 +#endif /* GL_EXT_blend_minmax */ + +#ifndef GL_EXT_debug_marker +#define GL_EXT_debug_marker 1 +typedef char GLchar; +typedef void (GL_APIENTRYP PFNGLINSERTEVENTMARKEREXTPROC) (GLsizei length, const GLchar *marker); +typedef void (GL_APIENTRYP PFNGLPUSHGROUPMARKEREXTPROC) (GLsizei length, const GLchar *marker); +typedef void (GL_APIENTRYP PFNGLPOPGROUPMARKEREXTPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glInsertEventMarkerEXT (GLsizei length, const GLchar *marker); +GL_API void GL_APIENTRY glPushGroupMarkerEXT (GLsizei length, const GLchar *marker); +GL_API void GL_APIENTRY glPopGroupMarkerEXT (void); +#endif +#endif /* GL_EXT_debug_marker */ + +#ifndef GL_EXT_discard_framebuffer +#define GL_EXT_discard_framebuffer 1 +#define GL_COLOR_EXT 0x1800 +#define GL_DEPTH_EXT 0x1801 +#define GL_STENCIL_EXT 0x1802 +typedef void (GL_APIENTRYP PFNGLDISCARDFRAMEBUFFEREXTPROC) (GLenum target, GLsizei numAttachments, const GLenum *attachments); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glDiscardFramebufferEXT (GLenum target, GLsizei numAttachments, const GLenum *attachments); +#endif +#endif /* GL_EXT_discard_framebuffer */ + +#ifndef GL_EXT_map_buffer_range +#define GL_EXT_map_buffer_range 1 +#define GL_MAP_READ_BIT_EXT 0x0001 +#define GL_MAP_WRITE_BIT_EXT 0x0002 +#define GL_MAP_INVALIDATE_RANGE_BIT_EXT 0x0004 +#define GL_MAP_INVALIDATE_BUFFER_BIT_EXT 0x0008 +#define GL_MAP_FLUSH_EXPLICIT_BIT_EXT 0x0010 +#define GL_MAP_UNSYNCHRONIZED_BIT_EXT 0x0020 +typedef void *(GL_APIENTRYP PFNGLMAPBUFFERRANGEEXTPROC) (GLenum target, GLintptr offset, GLsizeiptr length, GLbitfield access); +typedef void (GL_APIENTRYP PFNGLFLUSHMAPPEDBUFFERRANGEEXTPROC) (GLenum target, GLintptr offset, GLsizeiptr length); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void *GL_APIENTRY glMapBufferRangeEXT (GLenum target, GLintptr offset, GLsizeiptr length, GLbitfield access); +GL_API void GL_APIENTRY glFlushMappedBufferRangeEXT (GLenum target, GLintptr offset, GLsizeiptr length); +#endif +#endif /* GL_EXT_map_buffer_range */ + +#ifndef GL_EXT_multi_draw_arrays +#define GL_EXT_multi_draw_arrays 1 +typedef void (GL_APIENTRYP PFNGLMULTIDRAWARRAYSEXTPROC) (GLenum mode, const GLint *first, const GLsizei *count, GLsizei primcount); +typedef void (GL_APIENTRYP PFNGLMULTIDRAWELEMENTSEXTPROC) (GLenum mode, const GLsizei *count, GLenum type, const void *const*indices, GLsizei primcount); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glMultiDrawArraysEXT (GLenum mode, const GLint *first, const GLsizei *count, GLsizei primcount); +GL_API void GL_APIENTRY glMultiDrawElementsEXT (GLenum mode, const GLsizei *count, GLenum type, const void *const*indices, GLsizei primcount); +#endif +#endif /* GL_EXT_multi_draw_arrays */ + +#ifndef GL_EXT_multisampled_render_to_texture +#define GL_EXT_multisampled_render_to_texture 1 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_SAMPLES_EXT 0x8D6C +#define GL_RENDERBUFFER_SAMPLES_EXT 0x8CAB +#define GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE_EXT 0x8D56 +#define GL_MAX_SAMPLES_EXT 0x8D57 +typedef void (GL_APIENTRYP PFNGLRENDERBUFFERSTORAGEMULTISAMPLEEXTPROC) (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTURE2DMULTISAMPLEEXTPROC) (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLsizei samples); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glRenderbufferStorageMultisampleEXT (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +GL_API void GL_APIENTRY glFramebufferTexture2DMultisampleEXT (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLsizei samples); +#endif +#endif /* GL_EXT_multisampled_render_to_texture */ + +#ifndef GL_EXT_read_format_bgra +#define GL_EXT_read_format_bgra 1 +#define GL_UNSIGNED_SHORT_4_4_4_4_REV_EXT 0x8365 +#define GL_UNSIGNED_SHORT_1_5_5_5_REV_EXT 0x8366 +#endif /* GL_EXT_read_format_bgra */ + +#ifndef GL_EXT_robustness +#define GL_EXT_robustness 1 +#define GL_GUILTY_CONTEXT_RESET_EXT 0x8253 +#define GL_INNOCENT_CONTEXT_RESET_EXT 0x8254 +#define GL_UNKNOWN_CONTEXT_RESET_EXT 0x8255 +#define GL_CONTEXT_ROBUST_ACCESS_EXT 0x90F3 +#define GL_RESET_NOTIFICATION_STRATEGY_EXT 0x8256 +#define GL_LOSE_CONTEXT_ON_RESET_EXT 0x8252 +#define GL_NO_RESET_NOTIFICATION_EXT 0x8261 +typedef GLenum (GL_APIENTRYP PFNGLGETGRAPHICSRESETSTATUSEXTPROC) (void); +typedef void (GL_APIENTRYP PFNGLREADNPIXELSEXTPROC) (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, void *data); +typedef void (GL_APIENTRYP PFNGLGETNUNIFORMFVEXTPROC) (GLuint program, GLint location, GLsizei bufSize, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETNUNIFORMIVEXTPROC) (GLuint program, GLint location, GLsizei bufSize, GLint *params); +#ifdef GL_GLEXT_PROTOTYPES +GL_API GLenum GL_APIENTRY glGetGraphicsResetStatusEXT (void); +GL_API void GL_APIENTRY glReadnPixelsEXT (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, void *data); +GL_API void GL_APIENTRY glGetnUniformfvEXT (GLuint program, GLint location, GLsizei bufSize, GLfloat *params); +GL_API void GL_APIENTRY glGetnUniformivEXT (GLuint program, GLint location, GLsizei bufSize, GLint *params); +#endif +#endif /* GL_EXT_robustness */ + +#ifndef GL_EXT_sRGB +#define GL_EXT_sRGB 1 +#define GL_SRGB_EXT 0x8C40 +#define GL_SRGB_ALPHA_EXT 0x8C42 +#define GL_SRGB8_ALPHA8_EXT 0x8C43 +#define GL_FRAMEBUFFER_ATTACHMENT_COLOR_ENCODING_EXT 0x8210 +#endif /* GL_EXT_sRGB */ + +#ifndef GL_EXT_texture_compression_dxt1 +#define GL_EXT_texture_compression_dxt1 1 +#define GL_COMPRESSED_RGB_S3TC_DXT1_EXT 0x83F0 +#define GL_COMPRESSED_RGBA_S3TC_DXT1_EXT 0x83F1 +#endif /* GL_EXT_texture_compression_dxt1 */ + +#ifndef GL_EXT_texture_filter_anisotropic +#define GL_EXT_texture_filter_anisotropic 1 +#define GL_TEXTURE_MAX_ANISOTROPY_EXT 0x84FE +#define GL_MAX_TEXTURE_MAX_ANISOTROPY_EXT 0x84FF +#endif /* GL_EXT_texture_filter_anisotropic */ + +#ifndef GL_EXT_texture_format_BGRA8888 +#define GL_EXT_texture_format_BGRA8888 1 +#endif /* GL_EXT_texture_format_BGRA8888 */ + +#ifndef GL_EXT_texture_lod_bias +#define GL_EXT_texture_lod_bias 1 +#define GL_MAX_TEXTURE_LOD_BIAS_EXT 0x84FD +#define GL_TEXTURE_FILTER_CONTROL_EXT 0x8500 +#define GL_TEXTURE_LOD_BIAS_EXT 0x8501 +#endif /* GL_EXT_texture_lod_bias */ + +#ifndef GL_EXT_texture_storage +#define GL_EXT_texture_storage 1 +#define GL_TEXTURE_IMMUTABLE_FORMAT_EXT 0x912F +#define GL_ALPHA8_EXT 0x803C +#define GL_LUMINANCE8_EXT 0x8040 +#define GL_LUMINANCE8_ALPHA8_EXT 0x8045 +#define GL_RGBA32F_EXT 0x8814 +#define GL_RGB32F_EXT 0x8815 +#define GL_ALPHA32F_EXT 0x8816 +#define GL_LUMINANCE32F_EXT 0x8818 +#define GL_LUMINANCE_ALPHA32F_EXT 0x8819 +#define GL_RGBA16F_EXT 0x881A +#define GL_RGB16F_EXT 0x881B +#define GL_ALPHA16F_EXT 0x881C +#define GL_LUMINANCE16F_EXT 0x881E +#define GL_LUMINANCE_ALPHA16F_EXT 0x881F +#define GL_R8_EXT 0x8229 +#define GL_RG8_EXT 0x822B +#define GL_R32F_EXT 0x822E +#define GL_RG32F_EXT 0x8230 +#define GL_R16F_EXT 0x822D +#define GL_RG16F_EXT 0x822F +typedef void (GL_APIENTRYP PFNGLTEXSTORAGE1DEXTPROC) (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGE2DEXTPROC) (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGE3DEXTPROC) (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +typedef void (GL_APIENTRYP PFNGLTEXTURESTORAGE1DEXTPROC) (GLuint texture, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width); +typedef void (GL_APIENTRYP PFNGLTEXTURESTORAGE2DEXTPROC) (GLuint texture, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLTEXTURESTORAGE3DEXTPROC) (GLuint texture, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glTexStorage1DEXT (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width); +GL_API void GL_APIENTRY glTexStorage2DEXT (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +GL_API void GL_APIENTRY glTexStorage3DEXT (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +GL_API void GL_APIENTRY glTextureStorage1DEXT (GLuint texture, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width); +GL_API void GL_APIENTRY glTextureStorage2DEXT (GLuint texture, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +GL_API void GL_APIENTRY glTextureStorage3DEXT (GLuint texture, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +#endif +#endif /* GL_EXT_texture_storage */ + +#ifndef GL_IMG_multisampled_render_to_texture +#define GL_IMG_multisampled_render_to_texture 1 +#define GL_RENDERBUFFER_SAMPLES_IMG 0x9133 +#define GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE_IMG 0x9134 +#define GL_MAX_SAMPLES_IMG 0x9135 +#define GL_TEXTURE_SAMPLES_IMG 0x9136 +typedef void (GL_APIENTRYP PFNGLRENDERBUFFERSTORAGEMULTISAMPLEIMGPROC) (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTURE2DMULTISAMPLEIMGPROC) (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLsizei samples); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glRenderbufferStorageMultisampleIMG (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +GL_API void GL_APIENTRY glFramebufferTexture2DMultisampleIMG (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLsizei samples); +#endif +#endif /* GL_IMG_multisampled_render_to_texture */ + +#ifndef GL_IMG_read_format +#define GL_IMG_read_format 1 +#define GL_BGRA_IMG 0x80E1 +#define GL_UNSIGNED_SHORT_4_4_4_4_REV_IMG 0x8365 +#endif /* GL_IMG_read_format */ + +#ifndef GL_IMG_texture_compression_pvrtc +#define GL_IMG_texture_compression_pvrtc 1 +#define GL_COMPRESSED_RGB_PVRTC_4BPPV1_IMG 0x8C00 +#define GL_COMPRESSED_RGB_PVRTC_2BPPV1_IMG 0x8C01 +#define GL_COMPRESSED_RGBA_PVRTC_4BPPV1_IMG 0x8C02 +#define GL_COMPRESSED_RGBA_PVRTC_2BPPV1_IMG 0x8C03 +#endif /* GL_IMG_texture_compression_pvrtc */ + +#ifndef GL_IMG_texture_env_enhanced_fixed_function +#define GL_IMG_texture_env_enhanced_fixed_function 1 +#define GL_MODULATE_COLOR_IMG 0x8C04 +#define GL_RECIP_ADD_SIGNED_ALPHA_IMG 0x8C05 +#define GL_TEXTURE_ALPHA_MODULATE_IMG 0x8C06 +#define GL_FACTOR_ALPHA_MODULATE_IMG 0x8C07 +#define GL_FRAGMENT_ALPHA_MODULATE_IMG 0x8C08 +#define GL_ADD_BLEND_IMG 0x8C09 +#define GL_DOT3_RGBA_IMG 0x86AF +#endif /* GL_IMG_texture_env_enhanced_fixed_function */ + +#ifndef GL_IMG_user_clip_plane +#define GL_IMG_user_clip_plane 1 +#define GL_CLIP_PLANE0_IMG 0x3000 +#define GL_CLIP_PLANE1_IMG 0x3001 +#define GL_CLIP_PLANE2_IMG 0x3002 +#define GL_CLIP_PLANE3_IMG 0x3003 +#define GL_CLIP_PLANE4_IMG 0x3004 +#define GL_CLIP_PLANE5_IMG 0x3005 +#define GL_MAX_CLIP_PLANES_IMG 0x0D32 +typedef void (GL_APIENTRYP PFNGLCLIPPLANEFIMGPROC) (GLenum p, const GLfloat *eqn); +typedef void (GL_APIENTRYP PFNGLCLIPPLANEXIMGPROC) (GLenum p, const GLfixed *eqn); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glClipPlanefIMG (GLenum p, const GLfloat *eqn); +GL_API void GL_APIENTRY glClipPlanexIMG (GLenum p, const GLfixed *eqn); +#endif +#endif /* GL_IMG_user_clip_plane */ + +#ifndef GL_NV_fence +#define GL_NV_fence 1 +#define GL_ALL_COMPLETED_NV 0x84F2 +#define GL_FENCE_STATUS_NV 0x84F3 +#define GL_FENCE_CONDITION_NV 0x84F4 +typedef void (GL_APIENTRYP PFNGLDELETEFENCESNVPROC) (GLsizei n, const GLuint *fences); +typedef void (GL_APIENTRYP PFNGLGENFENCESNVPROC) (GLsizei n, GLuint *fences); +typedef GLboolean (GL_APIENTRYP PFNGLISFENCENVPROC) (GLuint fence); +typedef GLboolean (GL_APIENTRYP PFNGLTESTFENCENVPROC) (GLuint fence); +typedef void (GL_APIENTRYP PFNGLGETFENCEIVNVPROC) (GLuint fence, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLFINISHFENCENVPROC) (GLuint fence); +typedef void (GL_APIENTRYP PFNGLSETFENCENVPROC) (GLuint fence, GLenum condition); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glDeleteFencesNV (GLsizei n, const GLuint *fences); +GL_API void GL_APIENTRY glGenFencesNV (GLsizei n, GLuint *fences); +GL_API GLboolean GL_APIENTRY glIsFenceNV (GLuint fence); +GL_API GLboolean GL_APIENTRY glTestFenceNV (GLuint fence); +GL_API void GL_APIENTRY glGetFenceivNV (GLuint fence, GLenum pname, GLint *params); +GL_API void GL_APIENTRY glFinishFenceNV (GLuint fence); +GL_API void GL_APIENTRY glSetFenceNV (GLuint fence, GLenum condition); +#endif +#endif /* GL_NV_fence */ + +#ifndef GL_QCOM_driver_control +#define GL_QCOM_driver_control 1 +typedef void (GL_APIENTRYP PFNGLGETDRIVERCONTROLSQCOMPROC) (GLint *num, GLsizei size, GLuint *driverControls); +typedef void (GL_APIENTRYP PFNGLGETDRIVERCONTROLSTRINGQCOMPROC) (GLuint driverControl, GLsizei bufSize, GLsizei *length, GLchar *driverControlString); +typedef void (GL_APIENTRYP PFNGLENABLEDRIVERCONTROLQCOMPROC) (GLuint driverControl); +typedef void (GL_APIENTRYP PFNGLDISABLEDRIVERCONTROLQCOMPROC) (GLuint driverControl); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glGetDriverControlsQCOM (GLint *num, GLsizei size, GLuint *driverControls); +GL_API void GL_APIENTRY glGetDriverControlStringQCOM (GLuint driverControl, GLsizei bufSize, GLsizei *length, GLchar *driverControlString); +GL_API void GL_APIENTRY glEnableDriverControlQCOM (GLuint driverControl); +GL_API void GL_APIENTRY glDisableDriverControlQCOM (GLuint driverControl); +#endif +#endif /* GL_QCOM_driver_control */ + +#ifndef GL_QCOM_extended_get +#define GL_QCOM_extended_get 1 +#define GL_TEXTURE_WIDTH_QCOM 0x8BD2 +#define GL_TEXTURE_HEIGHT_QCOM 0x8BD3 +#define GL_TEXTURE_DEPTH_QCOM 0x8BD4 +#define GL_TEXTURE_INTERNAL_FORMAT_QCOM 0x8BD5 +#define GL_TEXTURE_FORMAT_QCOM 0x8BD6 +#define GL_TEXTURE_TYPE_QCOM 0x8BD7 +#define GL_TEXTURE_IMAGE_VALID_QCOM 0x8BD8 +#define GL_TEXTURE_NUM_LEVELS_QCOM 0x8BD9 +#define GL_TEXTURE_TARGET_QCOM 0x8BDA +#define GL_TEXTURE_OBJECT_VALID_QCOM 0x8BDB +#define GL_STATE_RESTORE 0x8BDC +typedef void (GL_APIENTRYP PFNGLEXTGETTEXTURESQCOMPROC) (GLuint *textures, GLint maxTextures, GLint *numTextures); +typedef void (GL_APIENTRYP PFNGLEXTGETBUFFERSQCOMPROC) (GLuint *buffers, GLint maxBuffers, GLint *numBuffers); +typedef void (GL_APIENTRYP PFNGLEXTGETRENDERBUFFERSQCOMPROC) (GLuint *renderbuffers, GLint maxRenderbuffers, GLint *numRenderbuffers); +typedef void (GL_APIENTRYP PFNGLEXTGETFRAMEBUFFERSQCOMPROC) (GLuint *framebuffers, GLint maxFramebuffers, GLint *numFramebuffers); +typedef void (GL_APIENTRYP PFNGLEXTGETTEXLEVELPARAMETERIVQCOMPROC) (GLuint texture, GLenum face, GLint level, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLEXTTEXOBJECTSTATEOVERRIDEIQCOMPROC) (GLenum target, GLenum pname, GLint param); +typedef void (GL_APIENTRYP PFNGLEXTGETTEXSUBIMAGEQCOMPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, void *texels); +typedef void (GL_APIENTRYP PFNGLEXTGETBUFFERPOINTERVQCOMPROC) (GLenum target, void **params); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glExtGetTexturesQCOM (GLuint *textures, GLint maxTextures, GLint *numTextures); +GL_API void GL_APIENTRY glExtGetBuffersQCOM (GLuint *buffers, GLint maxBuffers, GLint *numBuffers); +GL_API void GL_APIENTRY glExtGetRenderbuffersQCOM (GLuint *renderbuffers, GLint maxRenderbuffers, GLint *numRenderbuffers); +GL_API void GL_APIENTRY glExtGetFramebuffersQCOM (GLuint *framebuffers, GLint maxFramebuffers, GLint *numFramebuffers); +GL_API void GL_APIENTRY glExtGetTexLevelParameterivQCOM (GLuint texture, GLenum face, GLint level, GLenum pname, GLint *params); +GL_API void GL_APIENTRY glExtTexObjectStateOverrideiQCOM (GLenum target, GLenum pname, GLint param); +GL_API void GL_APIENTRY glExtGetTexSubImageQCOM (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, void *texels); +GL_API void GL_APIENTRY glExtGetBufferPointervQCOM (GLenum target, void **params); +#endif +#endif /* GL_QCOM_extended_get */ + +#ifndef GL_QCOM_extended_get2 +#define GL_QCOM_extended_get2 1 +typedef void (GL_APIENTRYP PFNGLEXTGETSHADERSQCOMPROC) (GLuint *shaders, GLint maxShaders, GLint *numShaders); +typedef void (GL_APIENTRYP PFNGLEXTGETPROGRAMSQCOMPROC) (GLuint *programs, GLint maxPrograms, GLint *numPrograms); +typedef GLboolean (GL_APIENTRYP PFNGLEXTISPROGRAMBINARYQCOMPROC) (GLuint program); +typedef void (GL_APIENTRYP PFNGLEXTGETPROGRAMBINARYSOURCEQCOMPROC) (GLuint program, GLenum shadertype, GLchar *source, GLint *length); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glExtGetShadersQCOM (GLuint *shaders, GLint maxShaders, GLint *numShaders); +GL_API void GL_APIENTRY glExtGetProgramsQCOM (GLuint *programs, GLint maxPrograms, GLint *numPrograms); +GL_API GLboolean GL_APIENTRY glExtIsProgramBinaryQCOM (GLuint program); +GL_API void GL_APIENTRY glExtGetProgramBinarySourceQCOM (GLuint program, GLenum shadertype, GLchar *source, GLint *length); +#endif +#endif /* GL_QCOM_extended_get2 */ + +#ifndef GL_QCOM_perfmon_global_mode +#define GL_QCOM_perfmon_global_mode 1 +#define GL_PERFMON_GLOBAL_MODE_QCOM 0x8FA0 +#endif /* GL_QCOM_perfmon_global_mode */ + +#ifndef GL_QCOM_tiled_rendering +#define GL_QCOM_tiled_rendering 1 +#define GL_COLOR_BUFFER_BIT0_QCOM 0x00000001 +#define GL_COLOR_BUFFER_BIT1_QCOM 0x00000002 +#define GL_COLOR_BUFFER_BIT2_QCOM 0x00000004 +#define GL_COLOR_BUFFER_BIT3_QCOM 0x00000008 +#define GL_COLOR_BUFFER_BIT4_QCOM 0x00000010 +#define GL_COLOR_BUFFER_BIT5_QCOM 0x00000020 +#define GL_COLOR_BUFFER_BIT6_QCOM 0x00000040 +#define GL_COLOR_BUFFER_BIT7_QCOM 0x00000080 +#define GL_DEPTH_BUFFER_BIT0_QCOM 0x00000100 +#define GL_DEPTH_BUFFER_BIT1_QCOM 0x00000200 +#define GL_DEPTH_BUFFER_BIT2_QCOM 0x00000400 +#define GL_DEPTH_BUFFER_BIT3_QCOM 0x00000800 +#define GL_DEPTH_BUFFER_BIT4_QCOM 0x00001000 +#define GL_DEPTH_BUFFER_BIT5_QCOM 0x00002000 +#define GL_DEPTH_BUFFER_BIT6_QCOM 0x00004000 +#define GL_DEPTH_BUFFER_BIT7_QCOM 0x00008000 +#define GL_STENCIL_BUFFER_BIT0_QCOM 0x00010000 +#define GL_STENCIL_BUFFER_BIT1_QCOM 0x00020000 +#define GL_STENCIL_BUFFER_BIT2_QCOM 0x00040000 +#define GL_STENCIL_BUFFER_BIT3_QCOM 0x00080000 +#define GL_STENCIL_BUFFER_BIT4_QCOM 0x00100000 +#define GL_STENCIL_BUFFER_BIT5_QCOM 0x00200000 +#define GL_STENCIL_BUFFER_BIT6_QCOM 0x00400000 +#define GL_STENCIL_BUFFER_BIT7_QCOM 0x00800000 +#define GL_MULTISAMPLE_BUFFER_BIT0_QCOM 0x01000000 +#define GL_MULTISAMPLE_BUFFER_BIT1_QCOM 0x02000000 +#define GL_MULTISAMPLE_BUFFER_BIT2_QCOM 0x04000000 +#define GL_MULTISAMPLE_BUFFER_BIT3_QCOM 0x08000000 +#define GL_MULTISAMPLE_BUFFER_BIT4_QCOM 0x10000000 +#define GL_MULTISAMPLE_BUFFER_BIT5_QCOM 0x20000000 +#define GL_MULTISAMPLE_BUFFER_BIT6_QCOM 0x40000000 +#define GL_MULTISAMPLE_BUFFER_BIT7_QCOM 0x80000000 +typedef void (GL_APIENTRYP PFNGLSTARTTILINGQCOMPROC) (GLuint x, GLuint y, GLuint width, GLuint height, GLbitfield preserveMask); +typedef void (GL_APIENTRYP PFNGLENDTILINGQCOMPROC) (GLbitfield preserveMask); +#ifdef GL_GLEXT_PROTOTYPES +GL_API void GL_APIENTRY glStartTilingQCOM (GLuint x, GLuint y, GLuint width, GLuint height, GLbitfield preserveMask); +GL_API void GL_APIENTRY glEndTilingQCOM (GLbitfield preserveMask); +#endif +#endif /* GL_QCOM_tiled_rendering */ + +#ifndef GL_QCOM_writeonly_rendering +#define GL_QCOM_writeonly_rendering 1 +#define GL_WRITEONLY_RENDERING_QCOM 0x8823 +#endif /* GL_QCOM_writeonly_rendering */ + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/swiftshader/include/GLES/glplatform.h b/vendor/swiftshader/include/GLES/glplatform.h new file mode 100644 index 00000000..16060a9a --- /dev/null +++ b/vendor/swiftshader/include/GLES/glplatform.h @@ -0,0 +1,38 @@ +#ifndef __glplatform_h_ +#define __glplatform_h_ + +/* +** Copyright (c) 2017 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* Platform-specific types and definitions for OpenGL ES 1.X gl.h + * + * Adopters may modify khrplatform.h and this file to suit their platform. + * Please contribute modifications back to Khronos as pull requests on the + * public github repository: + * https://github.com/KhronosGroup/OpenGL-Registry + */ + +#include + +#ifndef GL_API +#define GL_API KHRONOS_APICALL +#endif + +#ifndef GL_APIENTRY +#define GL_APIENTRY KHRONOS_APIENTRY +#endif + +#endif /* __glplatform_h_ */ diff --git a/vendor/swiftshader/include/GLES2/gl2.h b/vendor/swiftshader/include/GLES2/gl2.h new file mode 100644 index 00000000..5e8e4c5d --- /dev/null +++ b/vendor/swiftshader/include/GLES2/gl2.h @@ -0,0 +1,675 @@ +#ifndef __gles2_gl2_h_ +#define __gles2_gl2_h_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2013-2018 The Khronos Group Inc. +** +** Permission is hereby granted, free of charge, to any person obtaining a +** copy of this software and/or associated documentation files (the +** "Materials"), to deal in the Materials without restriction, including +** without limitation the rights to use, copy, modify, merge, publish, +** distribute, sublicense, and/or sell copies of the Materials, and to +** permit persons to whom the Materials are furnished to do so, subject to +** the following conditions: +** +** The above copyright notice and this permission notice shall be included +** in all copies or substantial portions of the Materials. +** +** THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +** EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +** MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +** IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +** CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +** MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. +*/ +/* +** This header is generated from the Khronos OpenGL / OpenGL ES XML +** API Registry. The current version of the Registry, generator scripts +** used to make the header, and the header can be found at +** https://github.com/KhronosGroup/OpenGL-Registry +*/ + +#include + +#ifndef GL_APIENTRYP +#define GL_APIENTRYP GL_APIENTRY* +#endif + +#ifndef GL_GLES_PROTOTYPES +#define GL_GLES_PROTOTYPES 1 +#endif + +/* Generated on date 20180525 */ + +/* Generated C header for: + * API: gles2 + * Profile: common + * Versions considered: 2\.[0-9] + * Versions emitted: .* + * Default extensions included: None + * Additional extensions included: _nomatch_^ + * Extensions removed: _nomatch_^ + */ + +#ifndef GL_ES_VERSION_2_0 +#define GL_ES_VERSION_2_0 1 +#include +typedef khronos_int8_t GLbyte; +typedef khronos_float_t GLclampf; +typedef khronos_int32_t GLfixed; +typedef short GLshort; +typedef unsigned short GLushort; +typedef void GLvoid; +typedef struct __GLsync *GLsync; +typedef khronos_int64_t GLint64; +typedef khronos_uint64_t GLuint64; +typedef unsigned int GLenum; +typedef unsigned int GLuint; +typedef char GLchar; +typedef khronos_float_t GLfloat; +typedef khronos_ssize_t GLsizeiptr; +typedef khronos_intptr_t GLintptr; +typedef unsigned int GLbitfield; +typedef int GLint; +typedef unsigned char GLboolean; +typedef int GLsizei; +typedef khronos_uint8_t GLubyte; +#define GL_DEPTH_BUFFER_BIT 0x00000100 +#define GL_STENCIL_BUFFER_BIT 0x00000400 +#define GL_COLOR_BUFFER_BIT 0x00004000 +#define GL_FALSE 0 +#define GL_TRUE 1 +#define GL_POINTS 0x0000 +#define GL_LINES 0x0001 +#define GL_LINE_LOOP 0x0002 +#define GL_LINE_STRIP 0x0003 +#define GL_TRIANGLES 0x0004 +#define GL_TRIANGLE_STRIP 0x0005 +#define GL_TRIANGLE_FAN 0x0006 +#define GL_ZERO 0 +#define GL_ONE 1 +#define GL_SRC_COLOR 0x0300 +#define GL_ONE_MINUS_SRC_COLOR 0x0301 +#define GL_SRC_ALPHA 0x0302 +#define GL_ONE_MINUS_SRC_ALPHA 0x0303 +#define GL_DST_ALPHA 0x0304 +#define GL_ONE_MINUS_DST_ALPHA 0x0305 +#define GL_DST_COLOR 0x0306 +#define GL_ONE_MINUS_DST_COLOR 0x0307 +#define GL_SRC_ALPHA_SATURATE 0x0308 +#define GL_FUNC_ADD 0x8006 +#define GL_BLEND_EQUATION 0x8009 +#define GL_BLEND_EQUATION_RGB 0x8009 +#define GL_BLEND_EQUATION_ALPHA 0x883D +#define GL_FUNC_SUBTRACT 0x800A +#define GL_FUNC_REVERSE_SUBTRACT 0x800B +#define GL_BLEND_DST_RGB 0x80C8 +#define GL_BLEND_SRC_RGB 0x80C9 +#define GL_BLEND_DST_ALPHA 0x80CA +#define GL_BLEND_SRC_ALPHA 0x80CB +#define GL_CONSTANT_COLOR 0x8001 +#define GL_ONE_MINUS_CONSTANT_COLOR 0x8002 +#define GL_CONSTANT_ALPHA 0x8003 +#define GL_ONE_MINUS_CONSTANT_ALPHA 0x8004 +#define GL_BLEND_COLOR 0x8005 +#define GL_ARRAY_BUFFER 0x8892 +#define GL_ELEMENT_ARRAY_BUFFER 0x8893 +#define GL_ARRAY_BUFFER_BINDING 0x8894 +#define GL_ELEMENT_ARRAY_BUFFER_BINDING 0x8895 +#define GL_STREAM_DRAW 0x88E0 +#define GL_STATIC_DRAW 0x88E4 +#define GL_DYNAMIC_DRAW 0x88E8 +#define GL_BUFFER_SIZE 0x8764 +#define GL_BUFFER_USAGE 0x8765 +#define GL_CURRENT_VERTEX_ATTRIB 0x8626 +#define GL_FRONT 0x0404 +#define GL_BACK 0x0405 +#define GL_FRONT_AND_BACK 0x0408 +#define GL_TEXTURE_2D 0x0DE1 +#define GL_CULL_FACE 0x0B44 +#define GL_BLEND 0x0BE2 +#define GL_DITHER 0x0BD0 +#define GL_STENCIL_TEST 0x0B90 +#define GL_DEPTH_TEST 0x0B71 +#define GL_SCISSOR_TEST 0x0C11 +#define GL_POLYGON_OFFSET_FILL 0x8037 +#define GL_SAMPLE_ALPHA_TO_COVERAGE 0x809E +#define GL_SAMPLE_COVERAGE 0x80A0 +#define GL_NO_ERROR 0 +#define GL_INVALID_ENUM 0x0500 +#define GL_INVALID_VALUE 0x0501 +#define GL_INVALID_OPERATION 0x0502 +#define GL_OUT_OF_MEMORY 0x0505 +#define GL_CW 0x0900 +#define GL_CCW 0x0901 +#define GL_LINE_WIDTH 0x0B21 +#define GL_ALIASED_POINT_SIZE_RANGE 0x846D +#define GL_ALIASED_LINE_WIDTH_RANGE 0x846E +#define GL_CULL_FACE_MODE 0x0B45 +#define GL_FRONT_FACE 0x0B46 +#define GL_DEPTH_RANGE 0x0B70 +#define GL_DEPTH_WRITEMASK 0x0B72 +#define GL_DEPTH_CLEAR_VALUE 0x0B73 +#define GL_DEPTH_FUNC 0x0B74 +#define GL_STENCIL_CLEAR_VALUE 0x0B91 +#define GL_STENCIL_FUNC 0x0B92 +#define GL_STENCIL_FAIL 0x0B94 +#define GL_STENCIL_PASS_DEPTH_FAIL 0x0B95 +#define GL_STENCIL_PASS_DEPTH_PASS 0x0B96 +#define GL_STENCIL_REF 0x0B97 +#define GL_STENCIL_VALUE_MASK 0x0B93 +#define GL_STENCIL_WRITEMASK 0x0B98 +#define GL_STENCIL_BACK_FUNC 0x8800 +#define GL_STENCIL_BACK_FAIL 0x8801 +#define GL_STENCIL_BACK_PASS_DEPTH_FAIL 0x8802 +#define GL_STENCIL_BACK_PASS_DEPTH_PASS 0x8803 +#define GL_STENCIL_BACK_REF 0x8CA3 +#define GL_STENCIL_BACK_VALUE_MASK 0x8CA4 +#define GL_STENCIL_BACK_WRITEMASK 0x8CA5 +#define GL_VIEWPORT 0x0BA2 +#define GL_SCISSOR_BOX 0x0C10 +#define GL_COLOR_CLEAR_VALUE 0x0C22 +#define GL_COLOR_WRITEMASK 0x0C23 +#define GL_UNPACK_ALIGNMENT 0x0CF5 +#define GL_PACK_ALIGNMENT 0x0D05 +#define GL_MAX_TEXTURE_SIZE 0x0D33 +#define GL_MAX_VIEWPORT_DIMS 0x0D3A +#define GL_SUBPIXEL_BITS 0x0D50 +#define GL_RED_BITS 0x0D52 +#define GL_GREEN_BITS 0x0D53 +#define GL_BLUE_BITS 0x0D54 +#define GL_ALPHA_BITS 0x0D55 +#define GL_DEPTH_BITS 0x0D56 +#define GL_STENCIL_BITS 0x0D57 +#define GL_POLYGON_OFFSET_UNITS 0x2A00 +#define GL_POLYGON_OFFSET_FACTOR 0x8038 +#define GL_TEXTURE_BINDING_2D 0x8069 +#define GL_SAMPLE_BUFFERS 0x80A8 +#define GL_SAMPLES 0x80A9 +#define GL_SAMPLE_COVERAGE_VALUE 0x80AA +#define GL_SAMPLE_COVERAGE_INVERT 0x80AB +#define GL_NUM_COMPRESSED_TEXTURE_FORMATS 0x86A2 +#define GL_COMPRESSED_TEXTURE_FORMATS 0x86A3 +#define GL_DONT_CARE 0x1100 +#define GL_FASTEST 0x1101 +#define GL_NICEST 0x1102 +#define GL_GENERATE_MIPMAP_HINT 0x8192 +#define GL_BYTE 0x1400 +#define GL_UNSIGNED_BYTE 0x1401 +#define GL_SHORT 0x1402 +#define GL_UNSIGNED_SHORT 0x1403 +#define GL_INT 0x1404 +#define GL_UNSIGNED_INT 0x1405 +#define GL_FLOAT 0x1406 +#define GL_FIXED 0x140C +#define GL_DEPTH_COMPONENT 0x1902 +#define GL_ALPHA 0x1906 +#define GL_RGB 0x1907 +#define GL_RGBA 0x1908 +#define GL_LUMINANCE 0x1909 +#define GL_LUMINANCE_ALPHA 0x190A +#define GL_UNSIGNED_SHORT_4_4_4_4 0x8033 +#define GL_UNSIGNED_SHORT_5_5_5_1 0x8034 +#define GL_UNSIGNED_SHORT_5_6_5 0x8363 +#define GL_FRAGMENT_SHADER 0x8B30 +#define GL_VERTEX_SHADER 0x8B31 +#define GL_MAX_VERTEX_ATTRIBS 0x8869 +#define GL_MAX_VERTEX_UNIFORM_VECTORS 0x8DFB +#define GL_MAX_VARYING_VECTORS 0x8DFC +#define GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS 0x8B4D +#define GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS 0x8B4C +#define GL_MAX_TEXTURE_IMAGE_UNITS 0x8872 +#define GL_MAX_FRAGMENT_UNIFORM_VECTORS 0x8DFD +#define GL_SHADER_TYPE 0x8B4F +#define GL_DELETE_STATUS 0x8B80 +#define GL_LINK_STATUS 0x8B82 +#define GL_VALIDATE_STATUS 0x8B83 +#define GL_ATTACHED_SHADERS 0x8B85 +#define GL_ACTIVE_UNIFORMS 0x8B86 +#define GL_ACTIVE_UNIFORM_MAX_LENGTH 0x8B87 +#define GL_ACTIVE_ATTRIBUTES 0x8B89 +#define GL_ACTIVE_ATTRIBUTE_MAX_LENGTH 0x8B8A +#define GL_SHADING_LANGUAGE_VERSION 0x8B8C +#define GL_CURRENT_PROGRAM 0x8B8D +#define GL_NEVER 0x0200 +#define GL_LESS 0x0201 +#define GL_EQUAL 0x0202 +#define GL_LEQUAL 0x0203 +#define GL_GREATER 0x0204 +#define GL_NOTEQUAL 0x0205 +#define GL_GEQUAL 0x0206 +#define GL_ALWAYS 0x0207 +#define GL_KEEP 0x1E00 +#define GL_REPLACE 0x1E01 +#define GL_INCR 0x1E02 +#define GL_DECR 0x1E03 +#define GL_INVERT 0x150A +#define GL_INCR_WRAP 0x8507 +#define GL_DECR_WRAP 0x8508 +#define GL_VENDOR 0x1F00 +#define GL_RENDERER 0x1F01 +#define GL_VERSION 0x1F02 +#define GL_EXTENSIONS 0x1F03 +#define GL_NEAREST 0x2600 +#define GL_LINEAR 0x2601 +#define GL_NEAREST_MIPMAP_NEAREST 0x2700 +#define GL_LINEAR_MIPMAP_NEAREST 0x2701 +#define GL_NEAREST_MIPMAP_LINEAR 0x2702 +#define GL_LINEAR_MIPMAP_LINEAR 0x2703 +#define GL_TEXTURE_MAG_FILTER 0x2800 +#define GL_TEXTURE_MIN_FILTER 0x2801 +#define GL_TEXTURE_WRAP_S 0x2802 +#define GL_TEXTURE_WRAP_T 0x2803 +#define GL_TEXTURE 0x1702 +#define GL_TEXTURE_CUBE_MAP 0x8513 +#define GL_TEXTURE_BINDING_CUBE_MAP 0x8514 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_X 0x8515 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_X 0x8516 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_Y 0x8517 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_Y 0x8518 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_Z 0x8519 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_Z 0x851A +#define GL_MAX_CUBE_MAP_TEXTURE_SIZE 0x851C +#define GL_TEXTURE0 0x84C0 +#define GL_TEXTURE1 0x84C1 +#define GL_TEXTURE2 0x84C2 +#define GL_TEXTURE3 0x84C3 +#define GL_TEXTURE4 0x84C4 +#define GL_TEXTURE5 0x84C5 +#define GL_TEXTURE6 0x84C6 +#define GL_TEXTURE7 0x84C7 +#define GL_TEXTURE8 0x84C8 +#define GL_TEXTURE9 0x84C9 +#define GL_TEXTURE10 0x84CA +#define GL_TEXTURE11 0x84CB +#define GL_TEXTURE12 0x84CC +#define GL_TEXTURE13 0x84CD +#define GL_TEXTURE14 0x84CE +#define GL_TEXTURE15 0x84CF +#define GL_TEXTURE16 0x84D0 +#define GL_TEXTURE17 0x84D1 +#define GL_TEXTURE18 0x84D2 +#define GL_TEXTURE19 0x84D3 +#define GL_TEXTURE20 0x84D4 +#define GL_TEXTURE21 0x84D5 +#define GL_TEXTURE22 0x84D6 +#define GL_TEXTURE23 0x84D7 +#define GL_TEXTURE24 0x84D8 +#define GL_TEXTURE25 0x84D9 +#define GL_TEXTURE26 0x84DA +#define GL_TEXTURE27 0x84DB +#define GL_TEXTURE28 0x84DC +#define GL_TEXTURE29 0x84DD +#define GL_TEXTURE30 0x84DE +#define GL_TEXTURE31 0x84DF +#define GL_ACTIVE_TEXTURE 0x84E0 +#define GL_REPEAT 0x2901 +#define GL_CLAMP_TO_EDGE 0x812F +#define GL_MIRRORED_REPEAT 0x8370 +#define GL_FLOAT_VEC2 0x8B50 +#define GL_FLOAT_VEC3 0x8B51 +#define GL_FLOAT_VEC4 0x8B52 +#define GL_INT_VEC2 0x8B53 +#define GL_INT_VEC3 0x8B54 +#define GL_INT_VEC4 0x8B55 +#define GL_BOOL 0x8B56 +#define GL_BOOL_VEC2 0x8B57 +#define GL_BOOL_VEC3 0x8B58 +#define GL_BOOL_VEC4 0x8B59 +#define GL_FLOAT_MAT2 0x8B5A +#define GL_FLOAT_MAT3 0x8B5B +#define GL_FLOAT_MAT4 0x8B5C +#define GL_SAMPLER_2D 0x8B5E +#define GL_SAMPLER_CUBE 0x8B60 +#define GL_VERTEX_ATTRIB_ARRAY_ENABLED 0x8622 +#define GL_VERTEX_ATTRIB_ARRAY_SIZE 0x8623 +#define GL_VERTEX_ATTRIB_ARRAY_STRIDE 0x8624 +#define GL_VERTEX_ATTRIB_ARRAY_TYPE 0x8625 +#define GL_VERTEX_ATTRIB_ARRAY_NORMALIZED 0x886A +#define GL_VERTEX_ATTRIB_ARRAY_POINTER 0x8645 +#define GL_VERTEX_ATTRIB_ARRAY_BUFFER_BINDING 0x889F +#define GL_IMPLEMENTATION_COLOR_READ_TYPE 0x8B9A +#define GL_IMPLEMENTATION_COLOR_READ_FORMAT 0x8B9B +#define GL_COMPILE_STATUS 0x8B81 +#define GL_INFO_LOG_LENGTH 0x8B84 +#define GL_SHADER_SOURCE_LENGTH 0x8B88 +#define GL_SHADER_COMPILER 0x8DFA +#define GL_SHADER_BINARY_FORMATS 0x8DF8 +#define GL_NUM_SHADER_BINARY_FORMATS 0x8DF9 +#define GL_LOW_FLOAT 0x8DF0 +#define GL_MEDIUM_FLOAT 0x8DF1 +#define GL_HIGH_FLOAT 0x8DF2 +#define GL_LOW_INT 0x8DF3 +#define GL_MEDIUM_INT 0x8DF4 +#define GL_HIGH_INT 0x8DF5 +#define GL_FRAMEBUFFER 0x8D40 +#define GL_RENDERBUFFER 0x8D41 +#define GL_RGBA4 0x8056 +#define GL_RGB5_A1 0x8057 +#define GL_RGB565 0x8D62 +#define GL_DEPTH_COMPONENT16 0x81A5 +#define GL_STENCIL_INDEX8 0x8D48 +#define GL_RENDERBUFFER_WIDTH 0x8D42 +#define GL_RENDERBUFFER_HEIGHT 0x8D43 +#define GL_RENDERBUFFER_INTERNAL_FORMAT 0x8D44 +#define GL_RENDERBUFFER_RED_SIZE 0x8D50 +#define GL_RENDERBUFFER_GREEN_SIZE 0x8D51 +#define GL_RENDERBUFFER_BLUE_SIZE 0x8D52 +#define GL_RENDERBUFFER_ALPHA_SIZE 0x8D53 +#define GL_RENDERBUFFER_DEPTH_SIZE 0x8D54 +#define GL_RENDERBUFFER_STENCIL_SIZE 0x8D55 +#define GL_FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE 0x8CD0 +#define GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME 0x8CD1 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL 0x8CD2 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE 0x8CD3 +#define GL_COLOR_ATTACHMENT0 0x8CE0 +#define GL_DEPTH_ATTACHMENT 0x8D00 +#define GL_STENCIL_ATTACHMENT 0x8D20 +#define GL_NONE 0 +#define GL_FRAMEBUFFER_COMPLETE 0x8CD5 +#define GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT 0x8CD6 +#define GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT 0x8CD7 +#define GL_FRAMEBUFFER_INCOMPLETE_DIMENSIONS 0x8CD9 +#define GL_FRAMEBUFFER_UNSUPPORTED 0x8CDD +#define GL_FRAMEBUFFER_BINDING 0x8CA6 +#define GL_RENDERBUFFER_BINDING 0x8CA7 +#define GL_MAX_RENDERBUFFER_SIZE 0x84E8 +#define GL_INVALID_FRAMEBUFFER_OPERATION 0x0506 +typedef void (GL_APIENTRYP PFNGLACTIVETEXTUREPROC) (GLenum texture); +typedef void (GL_APIENTRYP PFNGLATTACHSHADERPROC) (GLuint program, GLuint shader); +typedef void (GL_APIENTRYP PFNGLBINDATTRIBLOCATIONPROC) (GLuint program, GLuint index, const GLchar *name); +typedef void (GL_APIENTRYP PFNGLBINDBUFFERPROC) (GLenum target, GLuint buffer); +typedef void (GL_APIENTRYP PFNGLBINDFRAMEBUFFERPROC) (GLenum target, GLuint framebuffer); +typedef void (GL_APIENTRYP PFNGLBINDRENDERBUFFERPROC) (GLenum target, GLuint renderbuffer); +typedef void (GL_APIENTRYP PFNGLBINDTEXTUREPROC) (GLenum target, GLuint texture); +typedef void (GL_APIENTRYP PFNGLBLENDCOLORPROC) (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +typedef void (GL_APIENTRYP PFNGLBLENDEQUATIONPROC) (GLenum mode); +typedef void (GL_APIENTRYP PFNGLBLENDEQUATIONSEPARATEPROC) (GLenum modeRGB, GLenum modeAlpha); +typedef void (GL_APIENTRYP PFNGLBLENDFUNCPROC) (GLenum sfactor, GLenum dfactor); +typedef void (GL_APIENTRYP PFNGLBLENDFUNCSEPARATEPROC) (GLenum sfactorRGB, GLenum dfactorRGB, GLenum sfactorAlpha, GLenum dfactorAlpha); +typedef void (GL_APIENTRYP PFNGLBUFFERDATAPROC) (GLenum target, GLsizeiptr size, const void *data, GLenum usage); +typedef void (GL_APIENTRYP PFNGLBUFFERSUBDATAPROC) (GLenum target, GLintptr offset, GLsizeiptr size, const void *data); +typedef GLenum (GL_APIENTRYP PFNGLCHECKFRAMEBUFFERSTATUSPROC) (GLenum target); +typedef void (GL_APIENTRYP PFNGLCLEARPROC) (GLbitfield mask); +typedef void (GL_APIENTRYP PFNGLCLEARCOLORPROC) (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +typedef void (GL_APIENTRYP PFNGLCLEARDEPTHFPROC) (GLfloat d); +typedef void (GL_APIENTRYP PFNGLCLEARSTENCILPROC) (GLint s); +typedef void (GL_APIENTRYP PFNGLCOLORMASKPROC) (GLboolean red, GLboolean green, GLboolean blue, GLboolean alpha); +typedef void (GL_APIENTRYP PFNGLCOMPILESHADERPROC) (GLuint shader); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXIMAGE2DPROC) (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, const void *data); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXSUBIMAGE2DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *data); +typedef void (GL_APIENTRYP PFNGLCOPYTEXIMAGE2DPROC) (GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLsizei height, GLint border); +typedef void (GL_APIENTRYP PFNGLCOPYTEXSUBIMAGE2DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint x, GLint y, GLsizei width, GLsizei height); +typedef GLuint (GL_APIENTRYP PFNGLCREATEPROGRAMPROC) (void); +typedef GLuint (GL_APIENTRYP PFNGLCREATESHADERPROC) (GLenum type); +typedef void (GL_APIENTRYP PFNGLCULLFACEPROC) (GLenum mode); +typedef void (GL_APIENTRYP PFNGLDELETEBUFFERSPROC) (GLsizei n, const GLuint *buffers); +typedef void (GL_APIENTRYP PFNGLDELETEFRAMEBUFFERSPROC) (GLsizei n, const GLuint *framebuffers); +typedef void (GL_APIENTRYP PFNGLDELETEPROGRAMPROC) (GLuint program); +typedef void (GL_APIENTRYP PFNGLDELETERENDERBUFFERSPROC) (GLsizei n, const GLuint *renderbuffers); +typedef void (GL_APIENTRYP PFNGLDELETESHADERPROC) (GLuint shader); +typedef void (GL_APIENTRYP PFNGLDELETETEXTURESPROC) (GLsizei n, const GLuint *textures); +typedef void (GL_APIENTRYP PFNGLDEPTHFUNCPROC) (GLenum func); +typedef void (GL_APIENTRYP PFNGLDEPTHMASKPROC) (GLboolean flag); +typedef void (GL_APIENTRYP PFNGLDEPTHRANGEFPROC) (GLfloat n, GLfloat f); +typedef void (GL_APIENTRYP PFNGLDETACHSHADERPROC) (GLuint program, GLuint shader); +typedef void (GL_APIENTRYP PFNGLDISABLEPROC) (GLenum cap); +typedef void (GL_APIENTRYP PFNGLDISABLEVERTEXATTRIBARRAYPROC) (GLuint index); +typedef void (GL_APIENTRYP PFNGLDRAWARRAYSPROC) (GLenum mode, GLint first, GLsizei count); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices); +typedef void (GL_APIENTRYP PFNGLENABLEPROC) (GLenum cap); +typedef void (GL_APIENTRYP PFNGLENABLEVERTEXATTRIBARRAYPROC) (GLuint index); +typedef void (GL_APIENTRYP PFNGLFINISHPROC) (void); +typedef void (GL_APIENTRYP PFNGLFLUSHPROC) (void); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERRENDERBUFFERPROC) (GLenum target, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTURE2DPROC) (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +typedef void (GL_APIENTRYP PFNGLFRONTFACEPROC) (GLenum mode); +typedef void (GL_APIENTRYP PFNGLGENBUFFERSPROC) (GLsizei n, GLuint *buffers); +typedef void (GL_APIENTRYP PFNGLGENERATEMIPMAPPROC) (GLenum target); +typedef void (GL_APIENTRYP PFNGLGENFRAMEBUFFERSPROC) (GLsizei n, GLuint *framebuffers); +typedef void (GL_APIENTRYP PFNGLGENRENDERBUFFERSPROC) (GLsizei n, GLuint *renderbuffers); +typedef void (GL_APIENTRYP PFNGLGENTEXTURESPROC) (GLsizei n, GLuint *textures); +typedef void (GL_APIENTRYP PFNGLGETACTIVEATTRIBPROC) (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name); +typedef void (GL_APIENTRYP PFNGLGETACTIVEUNIFORMPROC) (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name); +typedef void (GL_APIENTRYP PFNGLGETATTACHEDSHADERSPROC) (GLuint program, GLsizei maxCount, GLsizei *count, GLuint *shaders); +typedef GLint (GL_APIENTRYP PFNGLGETATTRIBLOCATIONPROC) (GLuint program, const GLchar *name); +typedef void (GL_APIENTRYP PFNGLGETBOOLEANVPROC) (GLenum pname, GLboolean *data); +typedef void (GL_APIENTRYP PFNGLGETBUFFERPARAMETERIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef GLenum (GL_APIENTRYP PFNGLGETERRORPROC) (void); +typedef void (GL_APIENTRYP PFNGLGETFLOATVPROC) (GLenum pname, GLfloat *data); +typedef void (GL_APIENTRYP PFNGLGETFRAMEBUFFERATTACHMENTPARAMETERIVPROC) (GLenum target, GLenum attachment, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETINTEGERVPROC) (GLenum pname, GLint *data); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMIVPROC) (GLuint program, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMINFOLOGPROC) (GLuint program, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +typedef void (GL_APIENTRYP PFNGLGETRENDERBUFFERPARAMETERIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETSHADERIVPROC) (GLuint shader, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETSHADERINFOLOGPROC) (GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +typedef void (GL_APIENTRYP PFNGLGETSHADERPRECISIONFORMATPROC) (GLenum shadertype, GLenum precisiontype, GLint *range, GLint *precision); +typedef void (GL_APIENTRYP PFNGLGETSHADERSOURCEPROC) (GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *source); +typedef const GLubyte *(GL_APIENTRYP PFNGLGETSTRINGPROC) (GLenum name); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERFVPROC) (GLenum target, GLenum pname, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETUNIFORMFVPROC) (GLuint program, GLint location, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETUNIFORMIVPROC) (GLuint program, GLint location, GLint *params); +typedef GLint (GL_APIENTRYP PFNGLGETUNIFORMLOCATIONPROC) (GLuint program, const GLchar *name); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBFVPROC) (GLuint index, GLenum pname, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBIVPROC) (GLuint index, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBPOINTERVPROC) (GLuint index, GLenum pname, void **pointer); +typedef void (GL_APIENTRYP PFNGLHINTPROC) (GLenum target, GLenum mode); +typedef GLboolean (GL_APIENTRYP PFNGLISBUFFERPROC) (GLuint buffer); +typedef GLboolean (GL_APIENTRYP PFNGLISENABLEDPROC) (GLenum cap); +typedef GLboolean (GL_APIENTRYP PFNGLISFRAMEBUFFERPROC) (GLuint framebuffer); +typedef GLboolean (GL_APIENTRYP PFNGLISPROGRAMPROC) (GLuint program); +typedef GLboolean (GL_APIENTRYP PFNGLISRENDERBUFFERPROC) (GLuint renderbuffer); +typedef GLboolean (GL_APIENTRYP PFNGLISSHADERPROC) (GLuint shader); +typedef GLboolean (GL_APIENTRYP PFNGLISTEXTUREPROC) (GLuint texture); +typedef void (GL_APIENTRYP PFNGLLINEWIDTHPROC) (GLfloat width); +typedef void (GL_APIENTRYP PFNGLLINKPROGRAMPROC) (GLuint program); +typedef void (GL_APIENTRYP PFNGLPIXELSTOREIPROC) (GLenum pname, GLint param); +typedef void (GL_APIENTRYP PFNGLPOLYGONOFFSETPROC) (GLfloat factor, GLfloat units); +typedef void (GL_APIENTRYP PFNGLREADPIXELSPROC) (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, void *pixels); +typedef void (GL_APIENTRYP PFNGLRELEASESHADERCOMPILERPROC) (void); +typedef void (GL_APIENTRYP PFNGLRENDERBUFFERSTORAGEPROC) (GLenum target, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLSAMPLECOVERAGEPROC) (GLfloat value, GLboolean invert); +typedef void (GL_APIENTRYP PFNGLSCISSORPROC) (GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLSHADERBINARYPROC) (GLsizei count, const GLuint *shaders, GLenum binaryformat, const void *binary, GLsizei length); +typedef void (GL_APIENTRYP PFNGLSHADERSOURCEPROC) (GLuint shader, GLsizei count, const GLchar *const*string, const GLint *length); +typedef void (GL_APIENTRYP PFNGLSTENCILFUNCPROC) (GLenum func, GLint ref, GLuint mask); +typedef void (GL_APIENTRYP PFNGLSTENCILFUNCSEPARATEPROC) (GLenum face, GLenum func, GLint ref, GLuint mask); +typedef void (GL_APIENTRYP PFNGLSTENCILMASKPROC) (GLuint mask); +typedef void (GL_APIENTRYP PFNGLSTENCILMASKSEPARATEPROC) (GLenum face, GLuint mask); +typedef void (GL_APIENTRYP PFNGLSTENCILOPPROC) (GLenum fail, GLenum zfail, GLenum zpass); +typedef void (GL_APIENTRYP PFNGLSTENCILOPSEPARATEPROC) (GLenum face, GLenum sfail, GLenum dpfail, GLenum dppass); +typedef void (GL_APIENTRYP PFNGLTEXIMAGE2DPROC) (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, const void *pixels); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERFPROC) (GLenum target, GLenum pname, GLfloat param); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERFVPROC) (GLenum target, GLenum pname, const GLfloat *params); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERIPROC) (GLenum target, GLenum pname, GLint param); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERIVPROC) (GLenum target, GLenum pname, const GLint *params); +typedef void (GL_APIENTRYP PFNGLTEXSUBIMAGE2DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *pixels); +typedef void (GL_APIENTRYP PFNGLUNIFORM1FPROC) (GLint location, GLfloat v0); +typedef void (GL_APIENTRYP PFNGLUNIFORM1FVPROC) (GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM1IPROC) (GLint location, GLint v0); +typedef void (GL_APIENTRYP PFNGLUNIFORM1IVPROC) (GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM2FPROC) (GLint location, GLfloat v0, GLfloat v1); +typedef void (GL_APIENTRYP PFNGLUNIFORM2FVPROC) (GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM2IPROC) (GLint location, GLint v0, GLint v1); +typedef void (GL_APIENTRYP PFNGLUNIFORM2IVPROC) (GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM3FPROC) (GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +typedef void (GL_APIENTRYP PFNGLUNIFORM3FVPROC) (GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM3IPROC) (GLint location, GLint v0, GLint v1, GLint v2); +typedef void (GL_APIENTRYP PFNGLUNIFORM3IVPROC) (GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM4FPROC) (GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +typedef void (GL_APIENTRYP PFNGLUNIFORM4FVPROC) (GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM4IPROC) (GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +typedef void (GL_APIENTRYP PFNGLUNIFORM4IVPROC) (GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX2FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX3FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX4FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUSEPROGRAMPROC) (GLuint program); +typedef void (GL_APIENTRYP PFNGLVALIDATEPROGRAMPROC) (GLuint program); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB1FPROC) (GLuint index, GLfloat x); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB1FVPROC) (GLuint index, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB2FPROC) (GLuint index, GLfloat x, GLfloat y); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB2FVPROC) (GLuint index, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB3FPROC) (GLuint index, GLfloat x, GLfloat y, GLfloat z); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB3FVPROC) (GLuint index, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB4FPROC) (GLuint index, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB4FVPROC) (GLuint index, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBPOINTERPROC) (GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride, const void *pointer); +typedef void (GL_APIENTRYP PFNGLVIEWPORTPROC) (GLint x, GLint y, GLsizei width, GLsizei height); +#if GL_GLES_PROTOTYPES +GL_APICALL void GL_APIENTRY glActiveTexture (GLenum texture); +GL_APICALL void GL_APIENTRY glAttachShader (GLuint program, GLuint shader); +GL_APICALL void GL_APIENTRY glBindAttribLocation (GLuint program, GLuint index, const GLchar *name); +GL_APICALL void GL_APIENTRY glBindBuffer (GLenum target, GLuint buffer); +GL_APICALL void GL_APIENTRY glBindFramebuffer (GLenum target, GLuint framebuffer); +GL_APICALL void GL_APIENTRY glBindRenderbuffer (GLenum target, GLuint renderbuffer); +GL_APICALL void GL_APIENTRY glBindTexture (GLenum target, GLuint texture); +GL_APICALL void GL_APIENTRY glBlendColor (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +GL_APICALL void GL_APIENTRY glBlendEquation (GLenum mode); +GL_APICALL void GL_APIENTRY glBlendEquationSeparate (GLenum modeRGB, GLenum modeAlpha); +GL_APICALL void GL_APIENTRY glBlendFunc (GLenum sfactor, GLenum dfactor); +GL_APICALL void GL_APIENTRY glBlendFuncSeparate (GLenum sfactorRGB, GLenum dfactorRGB, GLenum sfactorAlpha, GLenum dfactorAlpha); +GL_APICALL void GL_APIENTRY glBufferData (GLenum target, GLsizeiptr size, const void *data, GLenum usage); +GL_APICALL void GL_APIENTRY glBufferSubData (GLenum target, GLintptr offset, GLsizeiptr size, const void *data); +GL_APICALL GLenum GL_APIENTRY glCheckFramebufferStatus (GLenum target); +GL_APICALL void GL_APIENTRY glClear (GLbitfield mask); +GL_APICALL void GL_APIENTRY glClearColor (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +GL_APICALL void GL_APIENTRY glClearDepthf (GLfloat d); +GL_APICALL void GL_APIENTRY glClearStencil (GLint s); +GL_APICALL void GL_APIENTRY glColorMask (GLboolean red, GLboolean green, GLboolean blue, GLboolean alpha); +GL_APICALL void GL_APIENTRY glCompileShader (GLuint shader); +GL_APICALL void GL_APIENTRY glCompressedTexImage2D (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, const void *data); +GL_APICALL void GL_APIENTRY glCompressedTexSubImage2D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *data); +GL_APICALL void GL_APIENTRY glCopyTexImage2D (GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLsizei height, GLint border); +GL_APICALL void GL_APIENTRY glCopyTexSubImage2D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint x, GLint y, GLsizei width, GLsizei height); +GL_APICALL GLuint GL_APIENTRY glCreateProgram (void); +GL_APICALL GLuint GL_APIENTRY glCreateShader (GLenum type); +GL_APICALL void GL_APIENTRY glCullFace (GLenum mode); +GL_APICALL void GL_APIENTRY glDeleteBuffers (GLsizei n, const GLuint *buffers); +GL_APICALL void GL_APIENTRY glDeleteFramebuffers (GLsizei n, const GLuint *framebuffers); +GL_APICALL void GL_APIENTRY glDeleteProgram (GLuint program); +GL_APICALL void GL_APIENTRY glDeleteRenderbuffers (GLsizei n, const GLuint *renderbuffers); +GL_APICALL void GL_APIENTRY glDeleteShader (GLuint shader); +GL_APICALL void GL_APIENTRY glDeleteTextures (GLsizei n, const GLuint *textures); +GL_APICALL void GL_APIENTRY glDepthFunc (GLenum func); +GL_APICALL void GL_APIENTRY glDepthMask (GLboolean flag); +GL_APICALL void GL_APIENTRY glDepthRangef (GLfloat n, GLfloat f); +GL_APICALL void GL_APIENTRY glDetachShader (GLuint program, GLuint shader); +GL_APICALL void GL_APIENTRY glDisable (GLenum cap); +GL_APICALL void GL_APIENTRY glDisableVertexAttribArray (GLuint index); +GL_APICALL void GL_APIENTRY glDrawArrays (GLenum mode, GLint first, GLsizei count); +GL_APICALL void GL_APIENTRY glDrawElements (GLenum mode, GLsizei count, GLenum type, const void *indices); +GL_APICALL void GL_APIENTRY glEnable (GLenum cap); +GL_APICALL void GL_APIENTRY glEnableVertexAttribArray (GLuint index); +GL_APICALL void GL_APIENTRY glFinish (void); +GL_APICALL void GL_APIENTRY glFlush (void); +GL_APICALL void GL_APIENTRY glFramebufferRenderbuffer (GLenum target, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer); +GL_APICALL void GL_APIENTRY glFramebufferTexture2D (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +GL_APICALL void GL_APIENTRY glFrontFace (GLenum mode); +GL_APICALL void GL_APIENTRY glGenBuffers (GLsizei n, GLuint *buffers); +GL_APICALL void GL_APIENTRY glGenerateMipmap (GLenum target); +GL_APICALL void GL_APIENTRY glGenFramebuffers (GLsizei n, GLuint *framebuffers); +GL_APICALL void GL_APIENTRY glGenRenderbuffers (GLsizei n, GLuint *renderbuffers); +GL_APICALL void GL_APIENTRY glGenTextures (GLsizei n, GLuint *textures); +GL_APICALL void GL_APIENTRY glGetActiveAttrib (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name); +GL_APICALL void GL_APIENTRY glGetActiveUniform (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name); +GL_APICALL void GL_APIENTRY glGetAttachedShaders (GLuint program, GLsizei maxCount, GLsizei *count, GLuint *shaders); +GL_APICALL GLint GL_APIENTRY glGetAttribLocation (GLuint program, const GLchar *name); +GL_APICALL void GL_APIENTRY glGetBooleanv (GLenum pname, GLboolean *data); +GL_APICALL void GL_APIENTRY glGetBufferParameteriv (GLenum target, GLenum pname, GLint *params); +GL_APICALL GLenum GL_APIENTRY glGetError (void); +GL_APICALL void GL_APIENTRY glGetFloatv (GLenum pname, GLfloat *data); +GL_APICALL void GL_APIENTRY glGetFramebufferAttachmentParameteriv (GLenum target, GLenum attachment, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetIntegerv (GLenum pname, GLint *data); +GL_APICALL void GL_APIENTRY glGetProgramiv (GLuint program, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetProgramInfoLog (GLuint program, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +GL_APICALL void GL_APIENTRY glGetRenderbufferParameteriv (GLenum target, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetShaderiv (GLuint shader, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetShaderInfoLog (GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +GL_APICALL void GL_APIENTRY glGetShaderPrecisionFormat (GLenum shadertype, GLenum precisiontype, GLint *range, GLint *precision); +GL_APICALL void GL_APIENTRY glGetShaderSource (GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *source); +GL_APICALL const GLubyte *GL_APIENTRY glGetString (GLenum name); +GL_APICALL void GL_APIENTRY glGetTexParameterfv (GLenum target, GLenum pname, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetTexParameteriv (GLenum target, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetUniformfv (GLuint program, GLint location, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetUniformiv (GLuint program, GLint location, GLint *params); +GL_APICALL GLint GL_APIENTRY glGetUniformLocation (GLuint program, const GLchar *name); +GL_APICALL void GL_APIENTRY glGetVertexAttribfv (GLuint index, GLenum pname, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetVertexAttribiv (GLuint index, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetVertexAttribPointerv (GLuint index, GLenum pname, void **pointer); +GL_APICALL void GL_APIENTRY glHint (GLenum target, GLenum mode); +GL_APICALL GLboolean GL_APIENTRY glIsBuffer (GLuint buffer); +GL_APICALL GLboolean GL_APIENTRY glIsEnabled (GLenum cap); +GL_APICALL GLboolean GL_APIENTRY glIsFramebuffer (GLuint framebuffer); +GL_APICALL GLboolean GL_APIENTRY glIsProgram (GLuint program); +GL_APICALL GLboolean GL_APIENTRY glIsRenderbuffer (GLuint renderbuffer); +GL_APICALL GLboolean GL_APIENTRY glIsShader (GLuint shader); +GL_APICALL GLboolean GL_APIENTRY glIsTexture (GLuint texture); +GL_APICALL void GL_APIENTRY glLineWidth (GLfloat width); +GL_APICALL void GL_APIENTRY glLinkProgram (GLuint program); +GL_APICALL void GL_APIENTRY glPixelStorei (GLenum pname, GLint param); +GL_APICALL void GL_APIENTRY glPolygonOffset (GLfloat factor, GLfloat units); +GL_APICALL void GL_APIENTRY glReadPixels (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, void *pixels); +GL_APICALL void GL_APIENTRY glReleaseShaderCompiler (void); +GL_APICALL void GL_APIENTRY glRenderbufferStorage (GLenum target, GLenum internalformat, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glSampleCoverage (GLfloat value, GLboolean invert); +GL_APICALL void GL_APIENTRY glScissor (GLint x, GLint y, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glShaderBinary (GLsizei count, const GLuint *shaders, GLenum binaryformat, const void *binary, GLsizei length); +GL_APICALL void GL_APIENTRY glShaderSource (GLuint shader, GLsizei count, const GLchar *const*string, const GLint *length); +GL_APICALL void GL_APIENTRY glStencilFunc (GLenum func, GLint ref, GLuint mask); +GL_APICALL void GL_APIENTRY glStencilFuncSeparate (GLenum face, GLenum func, GLint ref, GLuint mask); +GL_APICALL void GL_APIENTRY glStencilMask (GLuint mask); +GL_APICALL void GL_APIENTRY glStencilMaskSeparate (GLenum face, GLuint mask); +GL_APICALL void GL_APIENTRY glStencilOp (GLenum fail, GLenum zfail, GLenum zpass); +GL_APICALL void GL_APIENTRY glStencilOpSeparate (GLenum face, GLenum sfail, GLenum dpfail, GLenum dppass); +GL_APICALL void GL_APIENTRY glTexImage2D (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, const void *pixels); +GL_APICALL void GL_APIENTRY glTexParameterf (GLenum target, GLenum pname, GLfloat param); +GL_APICALL void GL_APIENTRY glTexParameterfv (GLenum target, GLenum pname, const GLfloat *params); +GL_APICALL void GL_APIENTRY glTexParameteri (GLenum target, GLenum pname, GLint param); +GL_APICALL void GL_APIENTRY glTexParameteriv (GLenum target, GLenum pname, const GLint *params); +GL_APICALL void GL_APIENTRY glTexSubImage2D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *pixels); +GL_APICALL void GL_APIENTRY glUniform1f (GLint location, GLfloat v0); +GL_APICALL void GL_APIENTRY glUniform1fv (GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniform1i (GLint location, GLint v0); +GL_APICALL void GL_APIENTRY glUniform1iv (GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glUniform2f (GLint location, GLfloat v0, GLfloat v1); +GL_APICALL void GL_APIENTRY glUniform2fv (GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniform2i (GLint location, GLint v0, GLint v1); +GL_APICALL void GL_APIENTRY glUniform2iv (GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glUniform3f (GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +GL_APICALL void GL_APIENTRY glUniform3fv (GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniform3i (GLint location, GLint v0, GLint v1, GLint v2); +GL_APICALL void GL_APIENTRY glUniform3iv (GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glUniform4f (GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +GL_APICALL void GL_APIENTRY glUniform4fv (GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniform4i (GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +GL_APICALL void GL_APIENTRY glUniform4iv (GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glUniformMatrix2fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix3fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix4fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUseProgram (GLuint program); +GL_APICALL void GL_APIENTRY glValidateProgram (GLuint program); +GL_APICALL void GL_APIENTRY glVertexAttrib1f (GLuint index, GLfloat x); +GL_APICALL void GL_APIENTRY glVertexAttrib1fv (GLuint index, const GLfloat *v); +GL_APICALL void GL_APIENTRY glVertexAttrib2f (GLuint index, GLfloat x, GLfloat y); +GL_APICALL void GL_APIENTRY glVertexAttrib2fv (GLuint index, const GLfloat *v); +GL_APICALL void GL_APIENTRY glVertexAttrib3f (GLuint index, GLfloat x, GLfloat y, GLfloat z); +GL_APICALL void GL_APIENTRY glVertexAttrib3fv (GLuint index, const GLfloat *v); +GL_APICALL void GL_APIENTRY glVertexAttrib4f (GLuint index, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +GL_APICALL void GL_APIENTRY glVertexAttrib4fv (GLuint index, const GLfloat *v); +GL_APICALL void GL_APIENTRY glVertexAttribPointer (GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride, const void *pointer); +GL_APICALL void GL_APIENTRY glViewport (GLint x, GLint y, GLsizei width, GLsizei height); +#endif +#endif /* GL_ES_VERSION_2_0 */ + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/swiftshader/include/GLES2/gl2ext.h b/vendor/swiftshader/include/GLES2/gl2ext.h new file mode 100644 index 00000000..94d5dba3 --- /dev/null +++ b/vendor/swiftshader/include/GLES2/gl2ext.h @@ -0,0 +1,3619 @@ +#ifndef __gles2_gl2ext_h_ +#define __gles2_gl2ext_h_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2013-2018 The Khronos Group Inc. +** +** Permission is hereby granted, free of charge, to any person obtaining a +** copy of this software and/or associated documentation files (the +** "Materials"), to deal in the Materials without restriction, including +** without limitation the rights to use, copy, modify, merge, publish, +** distribute, sublicense, and/or sell copies of the Materials, and to +** permit persons to whom the Materials are furnished to do so, subject to +** the following conditions: +** +** The above copyright notice and this permission notice shall be included +** in all copies or substantial portions of the Materials. +** +** THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +** EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +** MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +** IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +** CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +** MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. +*/ +/* +** This header is generated from the Khronos OpenGL / OpenGL ES XML +** API Registry. The current version of the Registry, generator scripts +** used to make the header, and the header can be found at +** https://github.com/KhronosGroup/OpenGL-Registry +*/ + +#ifndef GL_APIENTRYP +#define GL_APIENTRYP GL_APIENTRY* +#endif + +/* Generated on date 20180525 */ + +/* Generated C header for: + * API: gles2 + * Profile: common + * Versions considered: 2\.[0-9] + * Versions emitted: _nomatch_^ + * Default extensions included: gles2 + * Additional extensions included: _nomatch_^ + * Extensions removed: _nomatch_^ + */ + +#ifndef GL_KHR_blend_equation_advanced +#define GL_KHR_blend_equation_advanced 1 +#define GL_MULTIPLY_KHR 0x9294 +#define GL_SCREEN_KHR 0x9295 +#define GL_OVERLAY_KHR 0x9296 +#define GL_DARKEN_KHR 0x9297 +#define GL_LIGHTEN_KHR 0x9298 +#define GL_COLORDODGE_KHR 0x9299 +#define GL_COLORBURN_KHR 0x929A +#define GL_HARDLIGHT_KHR 0x929B +#define GL_SOFTLIGHT_KHR 0x929C +#define GL_DIFFERENCE_KHR 0x929E +#define GL_EXCLUSION_KHR 0x92A0 +#define GL_HSL_HUE_KHR 0x92AD +#define GL_HSL_SATURATION_KHR 0x92AE +#define GL_HSL_COLOR_KHR 0x92AF +#define GL_HSL_LUMINOSITY_KHR 0x92B0 +typedef void (GL_APIENTRYP PFNGLBLENDBARRIERKHRPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glBlendBarrierKHR (void); +#endif +#endif /* GL_KHR_blend_equation_advanced */ + +#ifndef GL_KHR_blend_equation_advanced_coherent +#define GL_KHR_blend_equation_advanced_coherent 1 +#define GL_BLEND_ADVANCED_COHERENT_KHR 0x9285 +#endif /* GL_KHR_blend_equation_advanced_coherent */ + +#ifndef GL_KHR_context_flush_control +#define GL_KHR_context_flush_control 1 +#define GL_CONTEXT_RELEASE_BEHAVIOR_KHR 0x82FB +#define GL_CONTEXT_RELEASE_BEHAVIOR_FLUSH_KHR 0x82FC +#endif /* GL_KHR_context_flush_control */ + +#ifndef GL_KHR_debug +#define GL_KHR_debug 1 +typedef void (GL_APIENTRY *GLDEBUGPROCKHR)(GLenum source,GLenum type,GLuint id,GLenum severity,GLsizei length,const GLchar *message,const void *userParam); +#define GL_SAMPLER 0x82E6 +#define GL_DEBUG_OUTPUT_SYNCHRONOUS_KHR 0x8242 +#define GL_DEBUG_NEXT_LOGGED_MESSAGE_LENGTH_KHR 0x8243 +#define GL_DEBUG_CALLBACK_FUNCTION_KHR 0x8244 +#define GL_DEBUG_CALLBACK_USER_PARAM_KHR 0x8245 +#define GL_DEBUG_SOURCE_API_KHR 0x8246 +#define GL_DEBUG_SOURCE_WINDOW_SYSTEM_KHR 0x8247 +#define GL_DEBUG_SOURCE_SHADER_COMPILER_KHR 0x8248 +#define GL_DEBUG_SOURCE_THIRD_PARTY_KHR 0x8249 +#define GL_DEBUG_SOURCE_APPLICATION_KHR 0x824A +#define GL_DEBUG_SOURCE_OTHER_KHR 0x824B +#define GL_DEBUG_TYPE_ERROR_KHR 0x824C +#define GL_DEBUG_TYPE_DEPRECATED_BEHAVIOR_KHR 0x824D +#define GL_DEBUG_TYPE_UNDEFINED_BEHAVIOR_KHR 0x824E +#define GL_DEBUG_TYPE_PORTABILITY_KHR 0x824F +#define GL_DEBUG_TYPE_PERFORMANCE_KHR 0x8250 +#define GL_DEBUG_TYPE_OTHER_KHR 0x8251 +#define GL_DEBUG_TYPE_MARKER_KHR 0x8268 +#define GL_DEBUG_TYPE_PUSH_GROUP_KHR 0x8269 +#define GL_DEBUG_TYPE_POP_GROUP_KHR 0x826A +#define GL_DEBUG_SEVERITY_NOTIFICATION_KHR 0x826B +#define GL_MAX_DEBUG_GROUP_STACK_DEPTH_KHR 0x826C +#define GL_DEBUG_GROUP_STACK_DEPTH_KHR 0x826D +#define GL_BUFFER_KHR 0x82E0 +#define GL_SHADER_KHR 0x82E1 +#define GL_PROGRAM_KHR 0x82E2 +#define GL_VERTEX_ARRAY_KHR 0x8074 +#define GL_QUERY_KHR 0x82E3 +#define GL_PROGRAM_PIPELINE_KHR 0x82E4 +#define GL_SAMPLER_KHR 0x82E6 +#define GL_MAX_LABEL_LENGTH_KHR 0x82E8 +#define GL_MAX_DEBUG_MESSAGE_LENGTH_KHR 0x9143 +#define GL_MAX_DEBUG_LOGGED_MESSAGES_KHR 0x9144 +#define GL_DEBUG_LOGGED_MESSAGES_KHR 0x9145 +#define GL_DEBUG_SEVERITY_HIGH_KHR 0x9146 +#define GL_DEBUG_SEVERITY_MEDIUM_KHR 0x9147 +#define GL_DEBUG_SEVERITY_LOW_KHR 0x9148 +#define GL_DEBUG_OUTPUT_KHR 0x92E0 +#define GL_CONTEXT_FLAG_DEBUG_BIT_KHR 0x00000002 +#define GL_STACK_OVERFLOW_KHR 0x0503 +#define GL_STACK_UNDERFLOW_KHR 0x0504 +typedef void (GL_APIENTRYP PFNGLDEBUGMESSAGECONTROLKHRPROC) (GLenum source, GLenum type, GLenum severity, GLsizei count, const GLuint *ids, GLboolean enabled); +typedef void (GL_APIENTRYP PFNGLDEBUGMESSAGEINSERTKHRPROC) (GLenum source, GLenum type, GLuint id, GLenum severity, GLsizei length, const GLchar *buf); +typedef void (GL_APIENTRYP PFNGLDEBUGMESSAGECALLBACKKHRPROC) (GLDEBUGPROCKHR callback, const void *userParam); +typedef GLuint (GL_APIENTRYP PFNGLGETDEBUGMESSAGELOGKHRPROC) (GLuint count, GLsizei bufSize, GLenum *sources, GLenum *types, GLuint *ids, GLenum *severities, GLsizei *lengths, GLchar *messageLog); +typedef void (GL_APIENTRYP PFNGLPUSHDEBUGGROUPKHRPROC) (GLenum source, GLuint id, GLsizei length, const GLchar *message); +typedef void (GL_APIENTRYP PFNGLPOPDEBUGGROUPKHRPROC) (void); +typedef void (GL_APIENTRYP PFNGLOBJECTLABELKHRPROC) (GLenum identifier, GLuint name, GLsizei length, const GLchar *label); +typedef void (GL_APIENTRYP PFNGLGETOBJECTLABELKHRPROC) (GLenum identifier, GLuint name, GLsizei bufSize, GLsizei *length, GLchar *label); +typedef void (GL_APIENTRYP PFNGLOBJECTPTRLABELKHRPROC) (const void *ptr, GLsizei length, const GLchar *label); +typedef void (GL_APIENTRYP PFNGLGETOBJECTPTRLABELKHRPROC) (const void *ptr, GLsizei bufSize, GLsizei *length, GLchar *label); +typedef void (GL_APIENTRYP PFNGLGETPOINTERVKHRPROC) (GLenum pname, void **params); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glDebugMessageControlKHR (GLenum source, GLenum type, GLenum severity, GLsizei count, const GLuint *ids, GLboolean enabled); +GL_APICALL void GL_APIENTRY glDebugMessageInsertKHR (GLenum source, GLenum type, GLuint id, GLenum severity, GLsizei length, const GLchar *buf); +GL_APICALL void GL_APIENTRY glDebugMessageCallbackKHR (GLDEBUGPROCKHR callback, const void *userParam); +GL_APICALL GLuint GL_APIENTRY glGetDebugMessageLogKHR (GLuint count, GLsizei bufSize, GLenum *sources, GLenum *types, GLuint *ids, GLenum *severities, GLsizei *lengths, GLchar *messageLog); +GL_APICALL void GL_APIENTRY glPushDebugGroupKHR (GLenum source, GLuint id, GLsizei length, const GLchar *message); +GL_APICALL void GL_APIENTRY glPopDebugGroupKHR (void); +GL_APICALL void GL_APIENTRY glObjectLabelKHR (GLenum identifier, GLuint name, GLsizei length, const GLchar *label); +GL_APICALL void GL_APIENTRY glGetObjectLabelKHR (GLenum identifier, GLuint name, GLsizei bufSize, GLsizei *length, GLchar *label); +GL_APICALL void GL_APIENTRY glObjectPtrLabelKHR (const void *ptr, GLsizei length, const GLchar *label); +GL_APICALL void GL_APIENTRY glGetObjectPtrLabelKHR (const void *ptr, GLsizei bufSize, GLsizei *length, GLchar *label); +GL_APICALL void GL_APIENTRY glGetPointervKHR (GLenum pname, void **params); +#endif +#endif /* GL_KHR_debug */ + +#ifndef GL_KHR_no_error +#define GL_KHR_no_error 1 +#define GL_CONTEXT_FLAG_NO_ERROR_BIT_KHR 0x00000008 +#endif /* GL_KHR_no_error */ + +#ifndef GL_KHR_parallel_shader_compile +#define GL_KHR_parallel_shader_compile 1 +#define GL_MAX_SHADER_COMPILER_THREADS_KHR 0x91B0 +#define GL_COMPLETION_STATUS_KHR 0x91B1 +typedef void (GL_APIENTRYP PFNGLMAXSHADERCOMPILERTHREADSKHRPROC) (GLuint count); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glMaxShaderCompilerThreadsKHR (GLuint count); +#endif +#endif /* GL_KHR_parallel_shader_compile */ + +#ifndef GL_KHR_robust_buffer_access_behavior +#define GL_KHR_robust_buffer_access_behavior 1 +#endif /* GL_KHR_robust_buffer_access_behavior */ + +#ifndef GL_KHR_robustness +#define GL_KHR_robustness 1 +#define GL_CONTEXT_ROBUST_ACCESS_KHR 0x90F3 +#define GL_LOSE_CONTEXT_ON_RESET_KHR 0x8252 +#define GL_GUILTY_CONTEXT_RESET_KHR 0x8253 +#define GL_INNOCENT_CONTEXT_RESET_KHR 0x8254 +#define GL_UNKNOWN_CONTEXT_RESET_KHR 0x8255 +#define GL_RESET_NOTIFICATION_STRATEGY_KHR 0x8256 +#define GL_NO_RESET_NOTIFICATION_KHR 0x8261 +#define GL_CONTEXT_LOST_KHR 0x0507 +typedef GLenum (GL_APIENTRYP PFNGLGETGRAPHICSRESETSTATUSKHRPROC) (void); +typedef void (GL_APIENTRYP PFNGLREADNPIXELSKHRPROC) (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, void *data); +typedef void (GL_APIENTRYP PFNGLGETNUNIFORMFVKHRPROC) (GLuint program, GLint location, GLsizei bufSize, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETNUNIFORMIVKHRPROC) (GLuint program, GLint location, GLsizei bufSize, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETNUNIFORMUIVKHRPROC) (GLuint program, GLint location, GLsizei bufSize, GLuint *params); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL GLenum GL_APIENTRY glGetGraphicsResetStatusKHR (void); +GL_APICALL void GL_APIENTRY glReadnPixelsKHR (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, void *data); +GL_APICALL void GL_APIENTRY glGetnUniformfvKHR (GLuint program, GLint location, GLsizei bufSize, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetnUniformivKHR (GLuint program, GLint location, GLsizei bufSize, GLint *params); +GL_APICALL void GL_APIENTRY glGetnUniformuivKHR (GLuint program, GLint location, GLsizei bufSize, GLuint *params); +#endif +#endif /* GL_KHR_robustness */ + +#ifndef GL_KHR_texture_compression_astc_hdr +#define GL_KHR_texture_compression_astc_hdr 1 +#define GL_COMPRESSED_RGBA_ASTC_4x4_KHR 0x93B0 +#define GL_COMPRESSED_RGBA_ASTC_5x4_KHR 0x93B1 +#define GL_COMPRESSED_RGBA_ASTC_5x5_KHR 0x93B2 +#define GL_COMPRESSED_RGBA_ASTC_6x5_KHR 0x93B3 +#define GL_COMPRESSED_RGBA_ASTC_6x6_KHR 0x93B4 +#define GL_COMPRESSED_RGBA_ASTC_8x5_KHR 0x93B5 +#define GL_COMPRESSED_RGBA_ASTC_8x6_KHR 0x93B6 +#define GL_COMPRESSED_RGBA_ASTC_8x8_KHR 0x93B7 +#define GL_COMPRESSED_RGBA_ASTC_10x5_KHR 0x93B8 +#define GL_COMPRESSED_RGBA_ASTC_10x6_KHR 0x93B9 +#define GL_COMPRESSED_RGBA_ASTC_10x8_KHR 0x93BA +#define GL_COMPRESSED_RGBA_ASTC_10x10_KHR 0x93BB +#define GL_COMPRESSED_RGBA_ASTC_12x10_KHR 0x93BC +#define GL_COMPRESSED_RGBA_ASTC_12x12_KHR 0x93BD +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_4x4_KHR 0x93D0 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x4_KHR 0x93D1 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x5_KHR 0x93D2 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x5_KHR 0x93D3 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x6_KHR 0x93D4 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x5_KHR 0x93D5 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x6_KHR 0x93D6 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x8_KHR 0x93D7 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x5_KHR 0x93D8 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x6_KHR 0x93D9 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x8_KHR 0x93DA +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x10_KHR 0x93DB +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_12x10_KHR 0x93DC +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_12x12_KHR 0x93DD +#endif /* GL_KHR_texture_compression_astc_hdr */ + +#ifndef GL_KHR_texture_compression_astc_ldr +#define GL_KHR_texture_compression_astc_ldr 1 +#endif /* GL_KHR_texture_compression_astc_ldr */ + +#ifndef GL_KHR_texture_compression_astc_sliced_3d +#define GL_KHR_texture_compression_astc_sliced_3d 1 +#endif /* GL_KHR_texture_compression_astc_sliced_3d */ + +#ifndef GL_OES_EGL_image +#define GL_OES_EGL_image 1 +typedef void *GLeglImageOES; +typedef void (GL_APIENTRYP PFNGLEGLIMAGETARGETTEXTURE2DOESPROC) (GLenum target, GLeglImageOES image); +typedef void (GL_APIENTRYP PFNGLEGLIMAGETARGETRENDERBUFFERSTORAGEOESPROC) (GLenum target, GLeglImageOES image); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glEGLImageTargetTexture2DOES (GLenum target, GLeglImageOES image); +GL_APICALL void GL_APIENTRY glEGLImageTargetRenderbufferStorageOES (GLenum target, GLeglImageOES image); +#endif +#endif /* GL_OES_EGL_image */ + +#ifndef GL_OES_EGL_image_external +#define GL_OES_EGL_image_external 1 +#define GL_TEXTURE_EXTERNAL_OES 0x8D65 +#define GL_TEXTURE_BINDING_EXTERNAL_OES 0x8D67 +#define GL_REQUIRED_TEXTURE_IMAGE_UNITS_OES 0x8D68 +#define GL_SAMPLER_EXTERNAL_OES 0x8D66 +#endif /* GL_OES_EGL_image_external */ + +#ifndef GL_OES_EGL_image_external_essl3 +#define GL_OES_EGL_image_external_essl3 1 +#endif /* GL_OES_EGL_image_external_essl3 */ + +#ifndef GL_OES_compressed_ETC1_RGB8_sub_texture +#define GL_OES_compressed_ETC1_RGB8_sub_texture 1 +#endif /* GL_OES_compressed_ETC1_RGB8_sub_texture */ + +#ifndef GL_OES_compressed_ETC1_RGB8_texture +#define GL_OES_compressed_ETC1_RGB8_texture 1 +#define GL_ETC1_RGB8_OES 0x8D64 +#endif /* GL_OES_compressed_ETC1_RGB8_texture */ + +#ifndef GL_OES_compressed_paletted_texture +#define GL_OES_compressed_paletted_texture 1 +#define GL_PALETTE4_RGB8_OES 0x8B90 +#define GL_PALETTE4_RGBA8_OES 0x8B91 +#define GL_PALETTE4_R5_G6_B5_OES 0x8B92 +#define GL_PALETTE4_RGBA4_OES 0x8B93 +#define GL_PALETTE4_RGB5_A1_OES 0x8B94 +#define GL_PALETTE8_RGB8_OES 0x8B95 +#define GL_PALETTE8_RGBA8_OES 0x8B96 +#define GL_PALETTE8_R5_G6_B5_OES 0x8B97 +#define GL_PALETTE8_RGBA4_OES 0x8B98 +#define GL_PALETTE8_RGB5_A1_OES 0x8B99 +#endif /* GL_OES_compressed_paletted_texture */ + +#ifndef GL_OES_copy_image +#define GL_OES_copy_image 1 +typedef void (GL_APIENTRYP PFNGLCOPYIMAGESUBDATAOESPROC) (GLuint srcName, GLenum srcTarget, GLint srcLevel, GLint srcX, GLint srcY, GLint srcZ, GLuint dstName, GLenum dstTarget, GLint dstLevel, GLint dstX, GLint dstY, GLint dstZ, GLsizei srcWidth, GLsizei srcHeight, GLsizei srcDepth); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glCopyImageSubDataOES (GLuint srcName, GLenum srcTarget, GLint srcLevel, GLint srcX, GLint srcY, GLint srcZ, GLuint dstName, GLenum dstTarget, GLint dstLevel, GLint dstX, GLint dstY, GLint dstZ, GLsizei srcWidth, GLsizei srcHeight, GLsizei srcDepth); +#endif +#endif /* GL_OES_copy_image */ + +#ifndef GL_OES_depth24 +#define GL_OES_depth24 1 +#define GL_DEPTH_COMPONENT24_OES 0x81A6 +#endif /* GL_OES_depth24 */ + +#ifndef GL_OES_depth32 +#define GL_OES_depth32 1 +#define GL_DEPTH_COMPONENT32_OES 0x81A7 +#endif /* GL_OES_depth32 */ + +#ifndef GL_OES_depth_texture +#define GL_OES_depth_texture 1 +#endif /* GL_OES_depth_texture */ + +#ifndef GL_OES_draw_buffers_indexed +#define GL_OES_draw_buffers_indexed 1 +#define GL_MIN 0x8007 +#define GL_MAX 0x8008 +typedef void (GL_APIENTRYP PFNGLENABLEIOESPROC) (GLenum target, GLuint index); +typedef void (GL_APIENTRYP PFNGLDISABLEIOESPROC) (GLenum target, GLuint index); +typedef void (GL_APIENTRYP PFNGLBLENDEQUATIONIOESPROC) (GLuint buf, GLenum mode); +typedef void (GL_APIENTRYP PFNGLBLENDEQUATIONSEPARATEIOESPROC) (GLuint buf, GLenum modeRGB, GLenum modeAlpha); +typedef void (GL_APIENTRYP PFNGLBLENDFUNCIOESPROC) (GLuint buf, GLenum src, GLenum dst); +typedef void (GL_APIENTRYP PFNGLBLENDFUNCSEPARATEIOESPROC) (GLuint buf, GLenum srcRGB, GLenum dstRGB, GLenum srcAlpha, GLenum dstAlpha); +typedef void (GL_APIENTRYP PFNGLCOLORMASKIOESPROC) (GLuint index, GLboolean r, GLboolean g, GLboolean b, GLboolean a); +typedef GLboolean (GL_APIENTRYP PFNGLISENABLEDIOESPROC) (GLenum target, GLuint index); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glEnableiOES (GLenum target, GLuint index); +GL_APICALL void GL_APIENTRY glDisableiOES (GLenum target, GLuint index); +GL_APICALL void GL_APIENTRY glBlendEquationiOES (GLuint buf, GLenum mode); +GL_APICALL void GL_APIENTRY glBlendEquationSeparateiOES (GLuint buf, GLenum modeRGB, GLenum modeAlpha); +GL_APICALL void GL_APIENTRY glBlendFunciOES (GLuint buf, GLenum src, GLenum dst); +GL_APICALL void GL_APIENTRY glBlendFuncSeparateiOES (GLuint buf, GLenum srcRGB, GLenum dstRGB, GLenum srcAlpha, GLenum dstAlpha); +GL_APICALL void GL_APIENTRY glColorMaskiOES (GLuint index, GLboolean r, GLboolean g, GLboolean b, GLboolean a); +GL_APICALL GLboolean GL_APIENTRY glIsEnablediOES (GLenum target, GLuint index); +#endif +#endif /* GL_OES_draw_buffers_indexed */ + +#ifndef GL_OES_draw_elements_base_vertex +#define GL_OES_draw_elements_base_vertex 1 +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSBASEVERTEXOESPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLint basevertex); +typedef void (GL_APIENTRYP PFNGLDRAWRANGEELEMENTSBASEVERTEXOESPROC) (GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices, GLint basevertex); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSINSTANCEDBASEVERTEXOESPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLint basevertex); +typedef void (GL_APIENTRYP PFNGLMULTIDRAWELEMENTSBASEVERTEXEXTPROC) (GLenum mode, const GLsizei *count, GLenum type, const void *const*indices, GLsizei primcount, const GLint *basevertex); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glDrawElementsBaseVertexOES (GLenum mode, GLsizei count, GLenum type, const void *indices, GLint basevertex); +GL_APICALL void GL_APIENTRY glDrawRangeElementsBaseVertexOES (GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices, GLint basevertex); +GL_APICALL void GL_APIENTRY glDrawElementsInstancedBaseVertexOES (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLint basevertex); +GL_APICALL void GL_APIENTRY glMultiDrawElementsBaseVertexEXT (GLenum mode, const GLsizei *count, GLenum type, const void *const*indices, GLsizei primcount, const GLint *basevertex); +#endif +#endif /* GL_OES_draw_elements_base_vertex */ + +#ifndef GL_OES_element_index_uint +#define GL_OES_element_index_uint 1 +#endif /* GL_OES_element_index_uint */ + +#ifndef GL_OES_fbo_render_mipmap +#define GL_OES_fbo_render_mipmap 1 +#endif /* GL_OES_fbo_render_mipmap */ + +#ifndef GL_OES_fragment_precision_high +#define GL_OES_fragment_precision_high 1 +#endif /* GL_OES_fragment_precision_high */ + +#ifndef GL_OES_geometry_point_size +#define GL_OES_geometry_point_size 1 +#endif /* GL_OES_geometry_point_size */ + +#ifndef GL_OES_geometry_shader +#define GL_OES_geometry_shader 1 +#define GL_GEOMETRY_SHADER_OES 0x8DD9 +#define GL_GEOMETRY_SHADER_BIT_OES 0x00000004 +#define GL_GEOMETRY_LINKED_VERTICES_OUT_OES 0x8916 +#define GL_GEOMETRY_LINKED_INPUT_TYPE_OES 0x8917 +#define GL_GEOMETRY_LINKED_OUTPUT_TYPE_OES 0x8918 +#define GL_GEOMETRY_SHADER_INVOCATIONS_OES 0x887F +#define GL_LAYER_PROVOKING_VERTEX_OES 0x825E +#define GL_LINES_ADJACENCY_OES 0x000A +#define GL_LINE_STRIP_ADJACENCY_OES 0x000B +#define GL_TRIANGLES_ADJACENCY_OES 0x000C +#define GL_TRIANGLE_STRIP_ADJACENCY_OES 0x000D +#define GL_MAX_GEOMETRY_UNIFORM_COMPONENTS_OES 0x8DDF +#define GL_MAX_GEOMETRY_UNIFORM_BLOCKS_OES 0x8A2C +#define GL_MAX_COMBINED_GEOMETRY_UNIFORM_COMPONENTS_OES 0x8A32 +#define GL_MAX_GEOMETRY_INPUT_COMPONENTS_OES 0x9123 +#define GL_MAX_GEOMETRY_OUTPUT_COMPONENTS_OES 0x9124 +#define GL_MAX_GEOMETRY_OUTPUT_VERTICES_OES 0x8DE0 +#define GL_MAX_GEOMETRY_TOTAL_OUTPUT_COMPONENTS_OES 0x8DE1 +#define GL_MAX_GEOMETRY_SHADER_INVOCATIONS_OES 0x8E5A +#define GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS_OES 0x8C29 +#define GL_MAX_GEOMETRY_ATOMIC_COUNTER_BUFFERS_OES 0x92CF +#define GL_MAX_GEOMETRY_ATOMIC_COUNTERS_OES 0x92D5 +#define GL_MAX_GEOMETRY_IMAGE_UNIFORMS_OES 0x90CD +#define GL_MAX_GEOMETRY_SHADER_STORAGE_BLOCKS_OES 0x90D7 +#define GL_FIRST_VERTEX_CONVENTION_OES 0x8E4D +#define GL_LAST_VERTEX_CONVENTION_OES 0x8E4E +#define GL_UNDEFINED_VERTEX_OES 0x8260 +#define GL_PRIMITIVES_GENERATED_OES 0x8C87 +#define GL_FRAMEBUFFER_DEFAULT_LAYERS_OES 0x9312 +#define GL_MAX_FRAMEBUFFER_LAYERS_OES 0x9317 +#define GL_FRAMEBUFFER_INCOMPLETE_LAYER_TARGETS_OES 0x8DA8 +#define GL_FRAMEBUFFER_ATTACHMENT_LAYERED_OES 0x8DA7 +#define GL_REFERENCED_BY_GEOMETRY_SHADER_OES 0x9309 +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTUREOESPROC) (GLenum target, GLenum attachment, GLuint texture, GLint level); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glFramebufferTextureOES (GLenum target, GLenum attachment, GLuint texture, GLint level); +#endif +#endif /* GL_OES_geometry_shader */ + +#ifndef GL_OES_get_program_binary +#define GL_OES_get_program_binary 1 +#define GL_PROGRAM_BINARY_LENGTH_OES 0x8741 +#define GL_NUM_PROGRAM_BINARY_FORMATS_OES 0x87FE +#define GL_PROGRAM_BINARY_FORMATS_OES 0x87FF +typedef void (GL_APIENTRYP PFNGLGETPROGRAMBINARYOESPROC) (GLuint program, GLsizei bufSize, GLsizei *length, GLenum *binaryFormat, void *binary); +typedef void (GL_APIENTRYP PFNGLPROGRAMBINARYOESPROC) (GLuint program, GLenum binaryFormat, const void *binary, GLint length); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glGetProgramBinaryOES (GLuint program, GLsizei bufSize, GLsizei *length, GLenum *binaryFormat, void *binary); +GL_APICALL void GL_APIENTRY glProgramBinaryOES (GLuint program, GLenum binaryFormat, const void *binary, GLint length); +#endif +#endif /* GL_OES_get_program_binary */ + +#ifndef GL_OES_gpu_shader5 +#define GL_OES_gpu_shader5 1 +#endif /* GL_OES_gpu_shader5 */ + +#ifndef GL_OES_mapbuffer +#define GL_OES_mapbuffer 1 +#define GL_WRITE_ONLY_OES 0x88B9 +#define GL_BUFFER_ACCESS_OES 0x88BB +#define GL_BUFFER_MAPPED_OES 0x88BC +#define GL_BUFFER_MAP_POINTER_OES 0x88BD +typedef void *(GL_APIENTRYP PFNGLMAPBUFFEROESPROC) (GLenum target, GLenum access); +typedef GLboolean (GL_APIENTRYP PFNGLUNMAPBUFFEROESPROC) (GLenum target); +typedef void (GL_APIENTRYP PFNGLGETBUFFERPOINTERVOESPROC) (GLenum target, GLenum pname, void **params); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void *GL_APIENTRY glMapBufferOES (GLenum target, GLenum access); +GL_APICALL GLboolean GL_APIENTRY glUnmapBufferOES (GLenum target); +GL_APICALL void GL_APIENTRY glGetBufferPointervOES (GLenum target, GLenum pname, void **params); +#endif +#endif /* GL_OES_mapbuffer */ + +#ifndef GL_OES_packed_depth_stencil +#define GL_OES_packed_depth_stencil 1 +#define GL_DEPTH_STENCIL_OES 0x84F9 +#define GL_UNSIGNED_INT_24_8_OES 0x84FA +#define GL_DEPTH24_STENCIL8_OES 0x88F0 +#endif /* GL_OES_packed_depth_stencil */ + +#ifndef GL_OES_primitive_bounding_box +#define GL_OES_primitive_bounding_box 1 +#define GL_PRIMITIVE_BOUNDING_BOX_OES 0x92BE +typedef void (GL_APIENTRYP PFNGLPRIMITIVEBOUNDINGBOXOESPROC) (GLfloat minX, GLfloat minY, GLfloat minZ, GLfloat minW, GLfloat maxX, GLfloat maxY, GLfloat maxZ, GLfloat maxW); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glPrimitiveBoundingBoxOES (GLfloat minX, GLfloat minY, GLfloat minZ, GLfloat minW, GLfloat maxX, GLfloat maxY, GLfloat maxZ, GLfloat maxW); +#endif +#endif /* GL_OES_primitive_bounding_box */ + +#ifndef GL_OES_required_internalformat +#define GL_OES_required_internalformat 1 +#define GL_ALPHA8_OES 0x803C +#define GL_DEPTH_COMPONENT16_OES 0x81A5 +#define GL_LUMINANCE4_ALPHA4_OES 0x8043 +#define GL_LUMINANCE8_ALPHA8_OES 0x8045 +#define GL_LUMINANCE8_OES 0x8040 +#define GL_RGBA4_OES 0x8056 +#define GL_RGB5_A1_OES 0x8057 +#define GL_RGB565_OES 0x8D62 +#define GL_RGB8_OES 0x8051 +#define GL_RGBA8_OES 0x8058 +#define GL_RGB10_EXT 0x8052 +#define GL_RGB10_A2_EXT 0x8059 +#endif /* GL_OES_required_internalformat */ + +#ifndef GL_OES_rgb8_rgba8 +#define GL_OES_rgb8_rgba8 1 +#endif /* GL_OES_rgb8_rgba8 */ + +#ifndef GL_OES_sample_shading +#define GL_OES_sample_shading 1 +#define GL_SAMPLE_SHADING_OES 0x8C36 +#define GL_MIN_SAMPLE_SHADING_VALUE_OES 0x8C37 +typedef void (GL_APIENTRYP PFNGLMINSAMPLESHADINGOESPROC) (GLfloat value); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glMinSampleShadingOES (GLfloat value); +#endif +#endif /* GL_OES_sample_shading */ + +#ifndef GL_OES_sample_variables +#define GL_OES_sample_variables 1 +#endif /* GL_OES_sample_variables */ + +#ifndef GL_OES_shader_image_atomic +#define GL_OES_shader_image_atomic 1 +#endif /* GL_OES_shader_image_atomic */ + +#ifndef GL_OES_shader_io_blocks +#define GL_OES_shader_io_blocks 1 +#endif /* GL_OES_shader_io_blocks */ + +#ifndef GL_OES_shader_multisample_interpolation +#define GL_OES_shader_multisample_interpolation 1 +#define GL_MIN_FRAGMENT_INTERPOLATION_OFFSET_OES 0x8E5B +#define GL_MAX_FRAGMENT_INTERPOLATION_OFFSET_OES 0x8E5C +#define GL_FRAGMENT_INTERPOLATION_OFFSET_BITS_OES 0x8E5D +#endif /* GL_OES_shader_multisample_interpolation */ + +#ifndef GL_OES_standard_derivatives +#define GL_OES_standard_derivatives 1 +#define GL_FRAGMENT_SHADER_DERIVATIVE_HINT_OES 0x8B8B +#endif /* GL_OES_standard_derivatives */ + +#ifndef GL_OES_stencil1 +#define GL_OES_stencil1 1 +#define GL_STENCIL_INDEX1_OES 0x8D46 +#endif /* GL_OES_stencil1 */ + +#ifndef GL_OES_stencil4 +#define GL_OES_stencil4 1 +#define GL_STENCIL_INDEX4_OES 0x8D47 +#endif /* GL_OES_stencil4 */ + +#ifndef GL_OES_surfaceless_context +#define GL_OES_surfaceless_context 1 +#define GL_FRAMEBUFFER_UNDEFINED_OES 0x8219 +#endif /* GL_OES_surfaceless_context */ + +#ifndef GL_OES_tessellation_point_size +#define GL_OES_tessellation_point_size 1 +#endif /* GL_OES_tessellation_point_size */ + +#ifndef GL_OES_tessellation_shader +#define GL_OES_tessellation_shader 1 +#define GL_PATCHES_OES 0x000E +#define GL_PATCH_VERTICES_OES 0x8E72 +#define GL_TESS_CONTROL_OUTPUT_VERTICES_OES 0x8E75 +#define GL_TESS_GEN_MODE_OES 0x8E76 +#define GL_TESS_GEN_SPACING_OES 0x8E77 +#define GL_TESS_GEN_VERTEX_ORDER_OES 0x8E78 +#define GL_TESS_GEN_POINT_MODE_OES 0x8E79 +#define GL_ISOLINES_OES 0x8E7A +#define GL_QUADS_OES 0x0007 +#define GL_FRACTIONAL_ODD_OES 0x8E7B +#define GL_FRACTIONAL_EVEN_OES 0x8E7C +#define GL_MAX_PATCH_VERTICES_OES 0x8E7D +#define GL_MAX_TESS_GEN_LEVEL_OES 0x8E7E +#define GL_MAX_TESS_CONTROL_UNIFORM_COMPONENTS_OES 0x8E7F +#define GL_MAX_TESS_EVALUATION_UNIFORM_COMPONENTS_OES 0x8E80 +#define GL_MAX_TESS_CONTROL_TEXTURE_IMAGE_UNITS_OES 0x8E81 +#define GL_MAX_TESS_EVALUATION_TEXTURE_IMAGE_UNITS_OES 0x8E82 +#define GL_MAX_TESS_CONTROL_OUTPUT_COMPONENTS_OES 0x8E83 +#define GL_MAX_TESS_PATCH_COMPONENTS_OES 0x8E84 +#define GL_MAX_TESS_CONTROL_TOTAL_OUTPUT_COMPONENTS_OES 0x8E85 +#define GL_MAX_TESS_EVALUATION_OUTPUT_COMPONENTS_OES 0x8E86 +#define GL_MAX_TESS_CONTROL_UNIFORM_BLOCKS_OES 0x8E89 +#define GL_MAX_TESS_EVALUATION_UNIFORM_BLOCKS_OES 0x8E8A +#define GL_MAX_TESS_CONTROL_INPUT_COMPONENTS_OES 0x886C +#define GL_MAX_TESS_EVALUATION_INPUT_COMPONENTS_OES 0x886D +#define GL_MAX_COMBINED_TESS_CONTROL_UNIFORM_COMPONENTS_OES 0x8E1E +#define GL_MAX_COMBINED_TESS_EVALUATION_UNIFORM_COMPONENTS_OES 0x8E1F +#define GL_MAX_TESS_CONTROL_ATOMIC_COUNTER_BUFFERS_OES 0x92CD +#define GL_MAX_TESS_EVALUATION_ATOMIC_COUNTER_BUFFERS_OES 0x92CE +#define GL_MAX_TESS_CONTROL_ATOMIC_COUNTERS_OES 0x92D3 +#define GL_MAX_TESS_EVALUATION_ATOMIC_COUNTERS_OES 0x92D4 +#define GL_MAX_TESS_CONTROL_IMAGE_UNIFORMS_OES 0x90CB +#define GL_MAX_TESS_EVALUATION_IMAGE_UNIFORMS_OES 0x90CC +#define GL_MAX_TESS_CONTROL_SHADER_STORAGE_BLOCKS_OES 0x90D8 +#define GL_MAX_TESS_EVALUATION_SHADER_STORAGE_BLOCKS_OES 0x90D9 +#define GL_PRIMITIVE_RESTART_FOR_PATCHES_SUPPORTED_OES 0x8221 +#define GL_IS_PER_PATCH_OES 0x92E7 +#define GL_REFERENCED_BY_TESS_CONTROL_SHADER_OES 0x9307 +#define GL_REFERENCED_BY_TESS_EVALUATION_SHADER_OES 0x9308 +#define GL_TESS_CONTROL_SHADER_OES 0x8E88 +#define GL_TESS_EVALUATION_SHADER_OES 0x8E87 +#define GL_TESS_CONTROL_SHADER_BIT_OES 0x00000008 +#define GL_TESS_EVALUATION_SHADER_BIT_OES 0x00000010 +typedef void (GL_APIENTRYP PFNGLPATCHPARAMETERIOESPROC) (GLenum pname, GLint value); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glPatchParameteriOES (GLenum pname, GLint value); +#endif +#endif /* GL_OES_tessellation_shader */ + +#ifndef GL_OES_texture_3D +#define GL_OES_texture_3D 1 +#define GL_TEXTURE_WRAP_R_OES 0x8072 +#define GL_TEXTURE_3D_OES 0x806F +#define GL_TEXTURE_BINDING_3D_OES 0x806A +#define GL_MAX_3D_TEXTURE_SIZE_OES 0x8073 +#define GL_SAMPLER_3D_OES 0x8B5F +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_3D_ZOFFSET_OES 0x8CD4 +typedef void (GL_APIENTRYP PFNGLTEXIMAGE3DOESPROC) (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, const void *pixels); +typedef void (GL_APIENTRYP PFNGLTEXSUBIMAGE3DOESPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *pixels); +typedef void (GL_APIENTRYP PFNGLCOPYTEXSUBIMAGE3DOESPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXIMAGE3DOESPROC) (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, const void *data); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXSUBIMAGE3DOESPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void *data); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTURE3DOESPROC) (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLint zoffset); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glTexImage3DOES (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, const void *pixels); +GL_APICALL void GL_APIENTRY glTexSubImage3DOES (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *pixels); +GL_APICALL void GL_APIENTRY glCopyTexSubImage3DOES (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glCompressedTexImage3DOES (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, const void *data); +GL_APICALL void GL_APIENTRY glCompressedTexSubImage3DOES (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void *data); +GL_APICALL void GL_APIENTRY glFramebufferTexture3DOES (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLint zoffset); +#endif +#endif /* GL_OES_texture_3D */ + +#ifndef GL_OES_texture_border_clamp +#define GL_OES_texture_border_clamp 1 +#define GL_TEXTURE_BORDER_COLOR_OES 0x1004 +#define GL_CLAMP_TO_BORDER_OES 0x812D +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERIIVOESPROC) (GLenum target, GLenum pname, const GLint *params); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERIUIVOESPROC) (GLenum target, GLenum pname, const GLuint *params); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERIIVOESPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERIUIVOESPROC) (GLenum target, GLenum pname, GLuint *params); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERIIVOESPROC) (GLuint sampler, GLenum pname, const GLint *param); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERIUIVOESPROC) (GLuint sampler, GLenum pname, const GLuint *param); +typedef void (GL_APIENTRYP PFNGLGETSAMPLERPARAMETERIIVOESPROC) (GLuint sampler, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETSAMPLERPARAMETERIUIVOESPROC) (GLuint sampler, GLenum pname, GLuint *params); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glTexParameterIivOES (GLenum target, GLenum pname, const GLint *params); +GL_APICALL void GL_APIENTRY glTexParameterIuivOES (GLenum target, GLenum pname, const GLuint *params); +GL_APICALL void GL_APIENTRY glGetTexParameterIivOES (GLenum target, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetTexParameterIuivOES (GLenum target, GLenum pname, GLuint *params); +GL_APICALL void GL_APIENTRY glSamplerParameterIivOES (GLuint sampler, GLenum pname, const GLint *param); +GL_APICALL void GL_APIENTRY glSamplerParameterIuivOES (GLuint sampler, GLenum pname, const GLuint *param); +GL_APICALL void GL_APIENTRY glGetSamplerParameterIivOES (GLuint sampler, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetSamplerParameterIuivOES (GLuint sampler, GLenum pname, GLuint *params); +#endif +#endif /* GL_OES_texture_border_clamp */ + +#ifndef GL_OES_texture_buffer +#define GL_OES_texture_buffer 1 +#define GL_TEXTURE_BUFFER_OES 0x8C2A +#define GL_TEXTURE_BUFFER_BINDING_OES 0x8C2A +#define GL_MAX_TEXTURE_BUFFER_SIZE_OES 0x8C2B +#define GL_TEXTURE_BINDING_BUFFER_OES 0x8C2C +#define GL_TEXTURE_BUFFER_DATA_STORE_BINDING_OES 0x8C2D +#define GL_TEXTURE_BUFFER_OFFSET_ALIGNMENT_OES 0x919F +#define GL_SAMPLER_BUFFER_OES 0x8DC2 +#define GL_INT_SAMPLER_BUFFER_OES 0x8DD0 +#define GL_UNSIGNED_INT_SAMPLER_BUFFER_OES 0x8DD8 +#define GL_IMAGE_BUFFER_OES 0x9051 +#define GL_INT_IMAGE_BUFFER_OES 0x905C +#define GL_UNSIGNED_INT_IMAGE_BUFFER_OES 0x9067 +#define GL_TEXTURE_BUFFER_OFFSET_OES 0x919D +#define GL_TEXTURE_BUFFER_SIZE_OES 0x919E +typedef void (GL_APIENTRYP PFNGLTEXBUFFEROESPROC) (GLenum target, GLenum internalformat, GLuint buffer); +typedef void (GL_APIENTRYP PFNGLTEXBUFFERRANGEOESPROC) (GLenum target, GLenum internalformat, GLuint buffer, GLintptr offset, GLsizeiptr size); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glTexBufferOES (GLenum target, GLenum internalformat, GLuint buffer); +GL_APICALL void GL_APIENTRY glTexBufferRangeOES (GLenum target, GLenum internalformat, GLuint buffer, GLintptr offset, GLsizeiptr size); +#endif +#endif /* GL_OES_texture_buffer */ + +#ifndef GL_OES_texture_compression_astc +#define GL_OES_texture_compression_astc 1 +#define GL_COMPRESSED_RGBA_ASTC_3x3x3_OES 0x93C0 +#define GL_COMPRESSED_RGBA_ASTC_4x3x3_OES 0x93C1 +#define GL_COMPRESSED_RGBA_ASTC_4x4x3_OES 0x93C2 +#define GL_COMPRESSED_RGBA_ASTC_4x4x4_OES 0x93C3 +#define GL_COMPRESSED_RGBA_ASTC_5x4x4_OES 0x93C4 +#define GL_COMPRESSED_RGBA_ASTC_5x5x4_OES 0x93C5 +#define GL_COMPRESSED_RGBA_ASTC_5x5x5_OES 0x93C6 +#define GL_COMPRESSED_RGBA_ASTC_6x5x5_OES 0x93C7 +#define GL_COMPRESSED_RGBA_ASTC_6x6x5_OES 0x93C8 +#define GL_COMPRESSED_RGBA_ASTC_6x6x6_OES 0x93C9 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_3x3x3_OES 0x93E0 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_4x3x3_OES 0x93E1 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_4x4x3_OES 0x93E2 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_4x4x4_OES 0x93E3 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x4x4_OES 0x93E4 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x5x4_OES 0x93E5 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x5x5_OES 0x93E6 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x5x5_OES 0x93E7 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x6x5_OES 0x93E8 +#define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x6x6_OES 0x93E9 +#endif /* GL_OES_texture_compression_astc */ + +#ifndef GL_OES_texture_cube_map_array +#define GL_OES_texture_cube_map_array 1 +#define GL_TEXTURE_CUBE_MAP_ARRAY_OES 0x9009 +#define GL_TEXTURE_BINDING_CUBE_MAP_ARRAY_OES 0x900A +#define GL_SAMPLER_CUBE_MAP_ARRAY_OES 0x900C +#define GL_SAMPLER_CUBE_MAP_ARRAY_SHADOW_OES 0x900D +#define GL_INT_SAMPLER_CUBE_MAP_ARRAY_OES 0x900E +#define GL_UNSIGNED_INT_SAMPLER_CUBE_MAP_ARRAY_OES 0x900F +#define GL_IMAGE_CUBE_MAP_ARRAY_OES 0x9054 +#define GL_INT_IMAGE_CUBE_MAP_ARRAY_OES 0x905F +#define GL_UNSIGNED_INT_IMAGE_CUBE_MAP_ARRAY_OES 0x906A +#endif /* GL_OES_texture_cube_map_array */ + +#ifndef GL_OES_texture_float +#define GL_OES_texture_float 1 +#endif /* GL_OES_texture_float */ + +#ifndef GL_OES_texture_float_linear +#define GL_OES_texture_float_linear 1 +#endif /* GL_OES_texture_float_linear */ + +#ifndef GL_OES_texture_half_float +#define GL_OES_texture_half_float 1 +#define GL_HALF_FLOAT_OES 0x8D61 +#endif /* GL_OES_texture_half_float */ + +#ifndef GL_OES_texture_half_float_linear +#define GL_OES_texture_half_float_linear 1 +#endif /* GL_OES_texture_half_float_linear */ + +#ifndef GL_OES_texture_npot +#define GL_OES_texture_npot 1 +#endif /* GL_OES_texture_npot */ + +#ifndef GL_OES_texture_stencil8 +#define GL_OES_texture_stencil8 1 +#define GL_STENCIL_INDEX_OES 0x1901 +#define GL_STENCIL_INDEX8_OES 0x8D48 +#endif /* GL_OES_texture_stencil8 */ + +#ifndef GL_OES_texture_storage_multisample_2d_array +#define GL_OES_texture_storage_multisample_2d_array 1 +#define GL_TEXTURE_2D_MULTISAMPLE_ARRAY_OES 0x9102 +#define GL_TEXTURE_BINDING_2D_MULTISAMPLE_ARRAY_OES 0x9105 +#define GL_SAMPLER_2D_MULTISAMPLE_ARRAY_OES 0x910B +#define GL_INT_SAMPLER_2D_MULTISAMPLE_ARRAY_OES 0x910C +#define GL_UNSIGNED_INT_SAMPLER_2D_MULTISAMPLE_ARRAY_OES 0x910D +typedef void (GL_APIENTRYP PFNGLTEXSTORAGE3DMULTISAMPLEOESPROC) (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedsamplelocations); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glTexStorage3DMultisampleOES (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedsamplelocations); +#endif +#endif /* GL_OES_texture_storage_multisample_2d_array */ + +#ifndef GL_OES_texture_view +#define GL_OES_texture_view 1 +#define GL_TEXTURE_VIEW_MIN_LEVEL_OES 0x82DB +#define GL_TEXTURE_VIEW_NUM_LEVELS_OES 0x82DC +#define GL_TEXTURE_VIEW_MIN_LAYER_OES 0x82DD +#define GL_TEXTURE_VIEW_NUM_LAYERS_OES 0x82DE +#define GL_TEXTURE_IMMUTABLE_LEVELS 0x82DF +typedef void (GL_APIENTRYP PFNGLTEXTUREVIEWOESPROC) (GLuint texture, GLenum target, GLuint origtexture, GLenum internalformat, GLuint minlevel, GLuint numlevels, GLuint minlayer, GLuint numlayers); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glTextureViewOES (GLuint texture, GLenum target, GLuint origtexture, GLenum internalformat, GLuint minlevel, GLuint numlevels, GLuint minlayer, GLuint numlayers); +#endif +#endif /* GL_OES_texture_view */ + +#ifndef GL_OES_vertex_array_object +#define GL_OES_vertex_array_object 1 +#define GL_VERTEX_ARRAY_BINDING_OES 0x85B5 +typedef void (GL_APIENTRYP PFNGLBINDVERTEXARRAYOESPROC) (GLuint array); +typedef void (GL_APIENTRYP PFNGLDELETEVERTEXARRAYSOESPROC) (GLsizei n, const GLuint *arrays); +typedef void (GL_APIENTRYP PFNGLGENVERTEXARRAYSOESPROC) (GLsizei n, GLuint *arrays); +typedef GLboolean (GL_APIENTRYP PFNGLISVERTEXARRAYOESPROC) (GLuint array); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glBindVertexArrayOES (GLuint array); +GL_APICALL void GL_APIENTRY glDeleteVertexArraysOES (GLsizei n, const GLuint *arrays); +GL_APICALL void GL_APIENTRY glGenVertexArraysOES (GLsizei n, GLuint *arrays); +GL_APICALL GLboolean GL_APIENTRY glIsVertexArrayOES (GLuint array); +#endif +#endif /* GL_OES_vertex_array_object */ + +#ifndef GL_OES_vertex_half_float +#define GL_OES_vertex_half_float 1 +#endif /* GL_OES_vertex_half_float */ + +#ifndef GL_OES_vertex_type_10_10_10_2 +#define GL_OES_vertex_type_10_10_10_2 1 +#define GL_UNSIGNED_INT_10_10_10_2_OES 0x8DF6 +#define GL_INT_10_10_10_2_OES 0x8DF7 +#endif /* GL_OES_vertex_type_10_10_10_2 */ + +#ifndef GL_OES_viewport_array +#define GL_OES_viewport_array 1 +#define GL_MAX_VIEWPORTS_OES 0x825B +#define GL_VIEWPORT_SUBPIXEL_BITS_OES 0x825C +#define GL_VIEWPORT_BOUNDS_RANGE_OES 0x825D +#define GL_VIEWPORT_INDEX_PROVOKING_VERTEX_OES 0x825F +typedef void (GL_APIENTRYP PFNGLVIEWPORTARRAYVOESPROC) (GLuint first, GLsizei count, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLVIEWPORTINDEXEDFOESPROC) (GLuint index, GLfloat x, GLfloat y, GLfloat w, GLfloat h); +typedef void (GL_APIENTRYP PFNGLVIEWPORTINDEXEDFVOESPROC) (GLuint index, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLSCISSORARRAYVOESPROC) (GLuint first, GLsizei count, const GLint *v); +typedef void (GL_APIENTRYP PFNGLSCISSORINDEXEDOESPROC) (GLuint index, GLint left, GLint bottom, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLSCISSORINDEXEDVOESPROC) (GLuint index, const GLint *v); +typedef void (GL_APIENTRYP PFNGLDEPTHRANGEARRAYFVOESPROC) (GLuint first, GLsizei count, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLDEPTHRANGEINDEXEDFOESPROC) (GLuint index, GLfloat n, GLfloat f); +typedef void (GL_APIENTRYP PFNGLGETFLOATI_VOESPROC) (GLenum target, GLuint index, GLfloat *data); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glViewportArrayvOES (GLuint first, GLsizei count, const GLfloat *v); +GL_APICALL void GL_APIENTRY glViewportIndexedfOES (GLuint index, GLfloat x, GLfloat y, GLfloat w, GLfloat h); +GL_APICALL void GL_APIENTRY glViewportIndexedfvOES (GLuint index, const GLfloat *v); +GL_APICALL void GL_APIENTRY glScissorArrayvOES (GLuint first, GLsizei count, const GLint *v); +GL_APICALL void GL_APIENTRY glScissorIndexedOES (GLuint index, GLint left, GLint bottom, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glScissorIndexedvOES (GLuint index, const GLint *v); +GL_APICALL void GL_APIENTRY glDepthRangeArrayfvOES (GLuint first, GLsizei count, const GLfloat *v); +GL_APICALL void GL_APIENTRY glDepthRangeIndexedfOES (GLuint index, GLfloat n, GLfloat f); +GL_APICALL void GL_APIENTRY glGetFloati_vOES (GLenum target, GLuint index, GLfloat *data); +#endif +#endif /* GL_OES_viewport_array */ + +#ifndef GL_AMD_compressed_3DC_texture +#define GL_AMD_compressed_3DC_texture 1 +#define GL_3DC_X_AMD 0x87F9 +#define GL_3DC_XY_AMD 0x87FA +#endif /* GL_AMD_compressed_3DC_texture */ + +#ifndef GL_AMD_compressed_ATC_texture +#define GL_AMD_compressed_ATC_texture 1 +#define GL_ATC_RGB_AMD 0x8C92 +#define GL_ATC_RGBA_EXPLICIT_ALPHA_AMD 0x8C93 +#define GL_ATC_RGBA_INTERPOLATED_ALPHA_AMD 0x87EE +#endif /* GL_AMD_compressed_ATC_texture */ + +#ifndef GL_AMD_performance_monitor +#define GL_AMD_performance_monitor 1 +#define GL_COUNTER_TYPE_AMD 0x8BC0 +#define GL_COUNTER_RANGE_AMD 0x8BC1 +#define GL_UNSIGNED_INT64_AMD 0x8BC2 +#define GL_PERCENTAGE_AMD 0x8BC3 +#define GL_PERFMON_RESULT_AVAILABLE_AMD 0x8BC4 +#define GL_PERFMON_RESULT_SIZE_AMD 0x8BC5 +#define GL_PERFMON_RESULT_AMD 0x8BC6 +typedef void (GL_APIENTRYP PFNGLGETPERFMONITORGROUPSAMDPROC) (GLint *numGroups, GLsizei groupsSize, GLuint *groups); +typedef void (GL_APIENTRYP PFNGLGETPERFMONITORCOUNTERSAMDPROC) (GLuint group, GLint *numCounters, GLint *maxActiveCounters, GLsizei counterSize, GLuint *counters); +typedef void (GL_APIENTRYP PFNGLGETPERFMONITORGROUPSTRINGAMDPROC) (GLuint group, GLsizei bufSize, GLsizei *length, GLchar *groupString); +typedef void (GL_APIENTRYP PFNGLGETPERFMONITORCOUNTERSTRINGAMDPROC) (GLuint group, GLuint counter, GLsizei bufSize, GLsizei *length, GLchar *counterString); +typedef void (GL_APIENTRYP PFNGLGETPERFMONITORCOUNTERINFOAMDPROC) (GLuint group, GLuint counter, GLenum pname, void *data); +typedef void (GL_APIENTRYP PFNGLGENPERFMONITORSAMDPROC) (GLsizei n, GLuint *monitors); +typedef void (GL_APIENTRYP PFNGLDELETEPERFMONITORSAMDPROC) (GLsizei n, GLuint *monitors); +typedef void (GL_APIENTRYP PFNGLSELECTPERFMONITORCOUNTERSAMDPROC) (GLuint monitor, GLboolean enable, GLuint group, GLint numCounters, GLuint *counterList); +typedef void (GL_APIENTRYP PFNGLBEGINPERFMONITORAMDPROC) (GLuint monitor); +typedef void (GL_APIENTRYP PFNGLENDPERFMONITORAMDPROC) (GLuint monitor); +typedef void (GL_APIENTRYP PFNGLGETPERFMONITORCOUNTERDATAAMDPROC) (GLuint monitor, GLenum pname, GLsizei dataSize, GLuint *data, GLint *bytesWritten); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glGetPerfMonitorGroupsAMD (GLint *numGroups, GLsizei groupsSize, GLuint *groups); +GL_APICALL void GL_APIENTRY glGetPerfMonitorCountersAMD (GLuint group, GLint *numCounters, GLint *maxActiveCounters, GLsizei counterSize, GLuint *counters); +GL_APICALL void GL_APIENTRY glGetPerfMonitorGroupStringAMD (GLuint group, GLsizei bufSize, GLsizei *length, GLchar *groupString); +GL_APICALL void GL_APIENTRY glGetPerfMonitorCounterStringAMD (GLuint group, GLuint counter, GLsizei bufSize, GLsizei *length, GLchar *counterString); +GL_APICALL void GL_APIENTRY glGetPerfMonitorCounterInfoAMD (GLuint group, GLuint counter, GLenum pname, void *data); +GL_APICALL void GL_APIENTRY glGenPerfMonitorsAMD (GLsizei n, GLuint *monitors); +GL_APICALL void GL_APIENTRY glDeletePerfMonitorsAMD (GLsizei n, GLuint *monitors); +GL_APICALL void GL_APIENTRY glSelectPerfMonitorCountersAMD (GLuint monitor, GLboolean enable, GLuint group, GLint numCounters, GLuint *counterList); +GL_APICALL void GL_APIENTRY glBeginPerfMonitorAMD (GLuint monitor); +GL_APICALL void GL_APIENTRY glEndPerfMonitorAMD (GLuint monitor); +GL_APICALL void GL_APIENTRY glGetPerfMonitorCounterDataAMD (GLuint monitor, GLenum pname, GLsizei dataSize, GLuint *data, GLint *bytesWritten); +#endif +#endif /* GL_AMD_performance_monitor */ + +#ifndef GL_AMD_program_binary_Z400 +#define GL_AMD_program_binary_Z400 1 +#define GL_Z400_BINARY_AMD 0x8740 +#endif /* GL_AMD_program_binary_Z400 */ + +#ifndef GL_ANDROID_extension_pack_es31a +#define GL_ANDROID_extension_pack_es31a 1 +#endif /* GL_ANDROID_extension_pack_es31a */ + +#ifndef GL_ANGLE_depth_texture +#define GL_ANGLE_depth_texture 1 +#endif /* GL_ANGLE_depth_texture */ + +#ifndef GL_ANGLE_framebuffer_blit +#define GL_ANGLE_framebuffer_blit 1 +#define GL_READ_FRAMEBUFFER_ANGLE 0x8CA8 +#define GL_DRAW_FRAMEBUFFER_ANGLE 0x8CA9 +#define GL_DRAW_FRAMEBUFFER_BINDING_ANGLE 0x8CA6 +#define GL_READ_FRAMEBUFFER_BINDING_ANGLE 0x8CAA +typedef void (GL_APIENTRYP PFNGLBLITFRAMEBUFFERANGLEPROC) (GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glBlitFramebufferANGLE (GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +#endif +#endif /* GL_ANGLE_framebuffer_blit */ + +#ifndef GL_ANGLE_framebuffer_multisample +#define GL_ANGLE_framebuffer_multisample 1 +#define GL_RENDERBUFFER_SAMPLES_ANGLE 0x8CAB +#define GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE_ANGLE 0x8D56 +#define GL_MAX_SAMPLES_ANGLE 0x8D57 +typedef void (GL_APIENTRYP PFNGLRENDERBUFFERSTORAGEMULTISAMPLEANGLEPROC) (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glRenderbufferStorageMultisampleANGLE (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +#endif +#endif /* GL_ANGLE_framebuffer_multisample */ + +#ifndef GL_ANGLE_instanced_arrays +#define GL_ANGLE_instanced_arrays 1 +#define GL_VERTEX_ATTRIB_ARRAY_DIVISOR_ANGLE 0x88FE +typedef void (GL_APIENTRYP PFNGLDRAWARRAYSINSTANCEDANGLEPROC) (GLenum mode, GLint first, GLsizei count, GLsizei primcount); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSINSTANCEDANGLEPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei primcount); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBDIVISORANGLEPROC) (GLuint index, GLuint divisor); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glDrawArraysInstancedANGLE (GLenum mode, GLint first, GLsizei count, GLsizei primcount); +GL_APICALL void GL_APIENTRY glDrawElementsInstancedANGLE (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei primcount); +GL_APICALL void GL_APIENTRY glVertexAttribDivisorANGLE (GLuint index, GLuint divisor); +#endif +#endif /* GL_ANGLE_instanced_arrays */ + +#ifndef GL_ANGLE_pack_reverse_row_order +#define GL_ANGLE_pack_reverse_row_order 1 +#define GL_PACK_REVERSE_ROW_ORDER_ANGLE 0x93A4 +#endif /* GL_ANGLE_pack_reverse_row_order */ + +#ifndef GL_ANGLE_program_binary +#define GL_ANGLE_program_binary 1 +#define GL_PROGRAM_BINARY_ANGLE 0x93A6 +#endif /* GL_ANGLE_program_binary */ + +#ifndef GL_ANGLE_texture_compression_dxt3 +#define GL_ANGLE_texture_compression_dxt3 1 +#define GL_COMPRESSED_RGBA_S3TC_DXT3_ANGLE 0x83F2 +#endif /* GL_ANGLE_texture_compression_dxt3 */ + +#ifndef GL_ANGLE_texture_compression_dxt5 +#define GL_ANGLE_texture_compression_dxt5 1 +#define GL_COMPRESSED_RGBA_S3TC_DXT5_ANGLE 0x83F3 +#endif /* GL_ANGLE_texture_compression_dxt5 */ + +#ifndef GL_ANGLE_texture_usage +#define GL_ANGLE_texture_usage 1 +#define GL_TEXTURE_USAGE_ANGLE 0x93A2 +#define GL_FRAMEBUFFER_ATTACHMENT_ANGLE 0x93A3 +#endif /* GL_ANGLE_texture_usage */ + +#ifndef GL_ANGLE_translated_shader_source +#define GL_ANGLE_translated_shader_source 1 +#define GL_TRANSLATED_SHADER_SOURCE_LENGTH_ANGLE 0x93A0 +typedef void (GL_APIENTRYP PFNGLGETTRANSLATEDSHADERSOURCEANGLEPROC) (GLuint shader, GLsizei bufsize, GLsizei *length, GLchar *source); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glGetTranslatedShaderSourceANGLE (GLuint shader, GLsizei bufsize, GLsizei *length, GLchar *source); +#endif +#endif /* GL_ANGLE_translated_shader_source */ + +#ifndef GL_APPLE_clip_distance +#define GL_APPLE_clip_distance 1 +#define GL_MAX_CLIP_DISTANCES_APPLE 0x0D32 +#define GL_CLIP_DISTANCE0_APPLE 0x3000 +#define GL_CLIP_DISTANCE1_APPLE 0x3001 +#define GL_CLIP_DISTANCE2_APPLE 0x3002 +#define GL_CLIP_DISTANCE3_APPLE 0x3003 +#define GL_CLIP_DISTANCE4_APPLE 0x3004 +#define GL_CLIP_DISTANCE5_APPLE 0x3005 +#define GL_CLIP_DISTANCE6_APPLE 0x3006 +#define GL_CLIP_DISTANCE7_APPLE 0x3007 +#endif /* GL_APPLE_clip_distance */ + +#ifndef GL_APPLE_color_buffer_packed_float +#define GL_APPLE_color_buffer_packed_float 1 +#endif /* GL_APPLE_color_buffer_packed_float */ + +#ifndef GL_APPLE_copy_texture_levels +#define GL_APPLE_copy_texture_levels 1 +typedef void (GL_APIENTRYP PFNGLCOPYTEXTURELEVELSAPPLEPROC) (GLuint destinationTexture, GLuint sourceTexture, GLint sourceBaseLevel, GLsizei sourceLevelCount); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glCopyTextureLevelsAPPLE (GLuint destinationTexture, GLuint sourceTexture, GLint sourceBaseLevel, GLsizei sourceLevelCount); +#endif +#endif /* GL_APPLE_copy_texture_levels */ + +#ifndef GL_APPLE_framebuffer_multisample +#define GL_APPLE_framebuffer_multisample 1 +#define GL_RENDERBUFFER_SAMPLES_APPLE 0x8CAB +#define GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE_APPLE 0x8D56 +#define GL_MAX_SAMPLES_APPLE 0x8D57 +#define GL_READ_FRAMEBUFFER_APPLE 0x8CA8 +#define GL_DRAW_FRAMEBUFFER_APPLE 0x8CA9 +#define GL_DRAW_FRAMEBUFFER_BINDING_APPLE 0x8CA6 +#define GL_READ_FRAMEBUFFER_BINDING_APPLE 0x8CAA +typedef void (GL_APIENTRYP PFNGLRENDERBUFFERSTORAGEMULTISAMPLEAPPLEPROC) (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLRESOLVEMULTISAMPLEFRAMEBUFFERAPPLEPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glRenderbufferStorageMultisampleAPPLE (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glResolveMultisampleFramebufferAPPLE (void); +#endif +#endif /* GL_APPLE_framebuffer_multisample */ + +#ifndef GL_APPLE_rgb_422 +#define GL_APPLE_rgb_422 1 +#define GL_RGB_422_APPLE 0x8A1F +#define GL_UNSIGNED_SHORT_8_8_APPLE 0x85BA +#define GL_UNSIGNED_SHORT_8_8_REV_APPLE 0x85BB +#define GL_RGB_RAW_422_APPLE 0x8A51 +#endif /* GL_APPLE_rgb_422 */ + +#ifndef GL_APPLE_sync +#define GL_APPLE_sync 1 +#define GL_SYNC_OBJECT_APPLE 0x8A53 +#define GL_MAX_SERVER_WAIT_TIMEOUT_APPLE 0x9111 +#define GL_OBJECT_TYPE_APPLE 0x9112 +#define GL_SYNC_CONDITION_APPLE 0x9113 +#define GL_SYNC_STATUS_APPLE 0x9114 +#define GL_SYNC_FLAGS_APPLE 0x9115 +#define GL_SYNC_FENCE_APPLE 0x9116 +#define GL_SYNC_GPU_COMMANDS_COMPLETE_APPLE 0x9117 +#define GL_UNSIGNALED_APPLE 0x9118 +#define GL_SIGNALED_APPLE 0x9119 +#define GL_ALREADY_SIGNALED_APPLE 0x911A +#define GL_TIMEOUT_EXPIRED_APPLE 0x911B +#define GL_CONDITION_SATISFIED_APPLE 0x911C +#define GL_WAIT_FAILED_APPLE 0x911D +#define GL_SYNC_FLUSH_COMMANDS_BIT_APPLE 0x00000001 +#define GL_TIMEOUT_IGNORED_APPLE 0xFFFFFFFFFFFFFFFFull +typedef GLsync (GL_APIENTRYP PFNGLFENCESYNCAPPLEPROC) (GLenum condition, GLbitfield flags); +typedef GLboolean (GL_APIENTRYP PFNGLISSYNCAPPLEPROC) (GLsync sync); +typedef void (GL_APIENTRYP PFNGLDELETESYNCAPPLEPROC) (GLsync sync); +typedef GLenum (GL_APIENTRYP PFNGLCLIENTWAITSYNCAPPLEPROC) (GLsync sync, GLbitfield flags, GLuint64 timeout); +typedef void (GL_APIENTRYP PFNGLWAITSYNCAPPLEPROC) (GLsync sync, GLbitfield flags, GLuint64 timeout); +typedef void (GL_APIENTRYP PFNGLGETINTEGER64VAPPLEPROC) (GLenum pname, GLint64 *params); +typedef void (GL_APIENTRYP PFNGLGETSYNCIVAPPLEPROC) (GLsync sync, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *values); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL GLsync GL_APIENTRY glFenceSyncAPPLE (GLenum condition, GLbitfield flags); +GL_APICALL GLboolean GL_APIENTRY glIsSyncAPPLE (GLsync sync); +GL_APICALL void GL_APIENTRY glDeleteSyncAPPLE (GLsync sync); +GL_APICALL GLenum GL_APIENTRY glClientWaitSyncAPPLE (GLsync sync, GLbitfield flags, GLuint64 timeout); +GL_APICALL void GL_APIENTRY glWaitSyncAPPLE (GLsync sync, GLbitfield flags, GLuint64 timeout); +GL_APICALL void GL_APIENTRY glGetInteger64vAPPLE (GLenum pname, GLint64 *params); +GL_APICALL void GL_APIENTRY glGetSyncivAPPLE (GLsync sync, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *values); +#endif +#endif /* GL_APPLE_sync */ + +#ifndef GL_APPLE_texture_format_BGRA8888 +#define GL_APPLE_texture_format_BGRA8888 1 +#define GL_BGRA_EXT 0x80E1 +#define GL_BGRA8_EXT 0x93A1 +#endif /* GL_APPLE_texture_format_BGRA8888 */ + +#ifndef GL_APPLE_texture_max_level +#define GL_APPLE_texture_max_level 1 +#define GL_TEXTURE_MAX_LEVEL_APPLE 0x813D +#endif /* GL_APPLE_texture_max_level */ + +#ifndef GL_APPLE_texture_packed_float +#define GL_APPLE_texture_packed_float 1 +#define GL_UNSIGNED_INT_10F_11F_11F_REV_APPLE 0x8C3B +#define GL_UNSIGNED_INT_5_9_9_9_REV_APPLE 0x8C3E +#define GL_R11F_G11F_B10F_APPLE 0x8C3A +#define GL_RGB9_E5_APPLE 0x8C3D +#endif /* GL_APPLE_texture_packed_float */ + +#ifndef GL_ARM_mali_program_binary +#define GL_ARM_mali_program_binary 1 +#define GL_MALI_PROGRAM_BINARY_ARM 0x8F61 +#endif /* GL_ARM_mali_program_binary */ + +#ifndef GL_ARM_mali_shader_binary +#define GL_ARM_mali_shader_binary 1 +#define GL_MALI_SHADER_BINARY_ARM 0x8F60 +#endif /* GL_ARM_mali_shader_binary */ + +#ifndef GL_ARM_rgba8 +#define GL_ARM_rgba8 1 +#endif /* GL_ARM_rgba8 */ + +#ifndef GL_ARM_shader_framebuffer_fetch +#define GL_ARM_shader_framebuffer_fetch 1 +#define GL_FETCH_PER_SAMPLE_ARM 0x8F65 +#define GL_FRAGMENT_SHADER_FRAMEBUFFER_FETCH_MRT_ARM 0x8F66 +#endif /* GL_ARM_shader_framebuffer_fetch */ + +#ifndef GL_ARM_shader_framebuffer_fetch_depth_stencil +#define GL_ARM_shader_framebuffer_fetch_depth_stencil 1 +#endif /* GL_ARM_shader_framebuffer_fetch_depth_stencil */ + +#ifndef GL_DMP_program_binary +#define GL_DMP_program_binary 1 +#define GL_SMAPHS30_PROGRAM_BINARY_DMP 0x9251 +#define GL_SMAPHS_PROGRAM_BINARY_DMP 0x9252 +#define GL_DMP_PROGRAM_BINARY_DMP 0x9253 +#endif /* GL_DMP_program_binary */ + +#ifndef GL_DMP_shader_binary +#define GL_DMP_shader_binary 1 +#define GL_SHADER_BINARY_DMP 0x9250 +#endif /* GL_DMP_shader_binary */ + +#ifndef GL_EXT_EGL_image_array +#define GL_EXT_EGL_image_array 1 +#endif /* GL_EXT_EGL_image_array */ + +#ifndef GL_EXT_EGL_image_storage +#define GL_EXT_EGL_image_storage 1 +typedef void (GL_APIENTRYP PFNGLEGLIMAGETARGETTEXSTORAGEEXTPROC) (GLenum target, GLeglImageOES image, const GLint* attrib_list); +typedef void (GL_APIENTRYP PFNGLEGLIMAGETARGETTEXTURESTORAGEEXTPROC) (GLuint texture, GLeglImageOES image, const GLint* attrib_list); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glEGLImageTargetTexStorageEXT (GLenum target, GLeglImageOES image, const GLint* attrib_list); +GL_APICALL void GL_APIENTRY glEGLImageTargetTextureStorageEXT (GLuint texture, GLeglImageOES image, const GLint* attrib_list); +#endif +#endif /* GL_EXT_EGL_image_storage */ + +#ifndef GL_EXT_YUV_target +#define GL_EXT_YUV_target 1 +#define GL_SAMPLER_EXTERNAL_2D_Y2Y_EXT 0x8BE7 +#endif /* GL_EXT_YUV_target */ + +#ifndef GL_EXT_base_instance +#define GL_EXT_base_instance 1 +typedef void (GL_APIENTRYP PFNGLDRAWARRAYSINSTANCEDBASEINSTANCEEXTPROC) (GLenum mode, GLint first, GLsizei count, GLsizei instancecount, GLuint baseinstance); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSINSTANCEDBASEINSTANCEEXTPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLuint baseinstance); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSINSTANCEDBASEVERTEXBASEINSTANCEEXTPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLint basevertex, GLuint baseinstance); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glDrawArraysInstancedBaseInstanceEXT (GLenum mode, GLint first, GLsizei count, GLsizei instancecount, GLuint baseinstance); +GL_APICALL void GL_APIENTRY glDrawElementsInstancedBaseInstanceEXT (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLuint baseinstance); +GL_APICALL void GL_APIENTRY glDrawElementsInstancedBaseVertexBaseInstanceEXT (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLint basevertex, GLuint baseinstance); +#endif +#endif /* GL_EXT_base_instance */ + +#ifndef GL_EXT_blend_func_extended +#define GL_EXT_blend_func_extended 1 +#define GL_SRC1_COLOR_EXT 0x88F9 +#define GL_SRC1_ALPHA_EXT 0x8589 +#define GL_ONE_MINUS_SRC1_COLOR_EXT 0x88FA +#define GL_ONE_MINUS_SRC1_ALPHA_EXT 0x88FB +#define GL_SRC_ALPHA_SATURATE_EXT 0x0308 +#define GL_LOCATION_INDEX_EXT 0x930F +#define GL_MAX_DUAL_SOURCE_DRAW_BUFFERS_EXT 0x88FC +typedef void (GL_APIENTRYP PFNGLBINDFRAGDATALOCATIONINDEXEDEXTPROC) (GLuint program, GLuint colorNumber, GLuint index, const GLchar *name); +typedef void (GL_APIENTRYP PFNGLBINDFRAGDATALOCATIONEXTPROC) (GLuint program, GLuint color, const GLchar *name); +typedef GLint (GL_APIENTRYP PFNGLGETPROGRAMRESOURCELOCATIONINDEXEXTPROC) (GLuint program, GLenum programInterface, const GLchar *name); +typedef GLint (GL_APIENTRYP PFNGLGETFRAGDATAINDEXEXTPROC) (GLuint program, const GLchar *name); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glBindFragDataLocationIndexedEXT (GLuint program, GLuint colorNumber, GLuint index, const GLchar *name); +GL_APICALL void GL_APIENTRY glBindFragDataLocationEXT (GLuint program, GLuint color, const GLchar *name); +GL_APICALL GLint GL_APIENTRY glGetProgramResourceLocationIndexEXT (GLuint program, GLenum programInterface, const GLchar *name); +GL_APICALL GLint GL_APIENTRY glGetFragDataIndexEXT (GLuint program, const GLchar *name); +#endif +#endif /* GL_EXT_blend_func_extended */ + +#ifndef GL_EXT_blend_minmax +#define GL_EXT_blend_minmax 1 +#define GL_MIN_EXT 0x8007 +#define GL_MAX_EXT 0x8008 +#endif /* GL_EXT_blend_minmax */ + +#ifndef GL_EXT_buffer_storage +#define GL_EXT_buffer_storage 1 +#define GL_MAP_READ_BIT 0x0001 +#define GL_MAP_WRITE_BIT 0x0002 +#define GL_MAP_PERSISTENT_BIT_EXT 0x0040 +#define GL_MAP_COHERENT_BIT_EXT 0x0080 +#define GL_DYNAMIC_STORAGE_BIT_EXT 0x0100 +#define GL_CLIENT_STORAGE_BIT_EXT 0x0200 +#define GL_CLIENT_MAPPED_BUFFER_BARRIER_BIT_EXT 0x00004000 +#define GL_BUFFER_IMMUTABLE_STORAGE_EXT 0x821F +#define GL_BUFFER_STORAGE_FLAGS_EXT 0x8220 +typedef void (GL_APIENTRYP PFNGLBUFFERSTORAGEEXTPROC) (GLenum target, GLsizeiptr size, const void *data, GLbitfield flags); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glBufferStorageEXT (GLenum target, GLsizeiptr size, const void *data, GLbitfield flags); +#endif +#endif /* GL_EXT_buffer_storage */ + +#ifndef GL_EXT_clear_texture +#define GL_EXT_clear_texture 1 +typedef void (GL_APIENTRYP PFNGLCLEARTEXIMAGEEXTPROC) (GLuint texture, GLint level, GLenum format, GLenum type, const void *data); +typedef void (GL_APIENTRYP PFNGLCLEARTEXSUBIMAGEEXTPROC) (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *data); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glClearTexImageEXT (GLuint texture, GLint level, GLenum format, GLenum type, const void *data); +GL_APICALL void GL_APIENTRY glClearTexSubImageEXT (GLuint texture, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *data); +#endif +#endif /* GL_EXT_clear_texture */ + +#ifndef GL_EXT_clip_control +#define GL_EXT_clip_control 1 +#define GL_LOWER_LEFT_EXT 0x8CA1 +#define GL_UPPER_LEFT_EXT 0x8CA2 +#define GL_NEGATIVE_ONE_TO_ONE_EXT 0x935E +#define GL_ZERO_TO_ONE_EXT 0x935F +#define GL_CLIP_ORIGIN_EXT 0x935C +#define GL_CLIP_DEPTH_MODE_EXT 0x935D +typedef void (GL_APIENTRYP PFNGLCLIPCONTROLEXTPROC) (GLenum origin, GLenum depth); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glClipControlEXT (GLenum origin, GLenum depth); +#endif +#endif /* GL_EXT_clip_control */ + +#ifndef GL_EXT_clip_cull_distance +#define GL_EXT_clip_cull_distance 1 +#define GL_MAX_CLIP_DISTANCES_EXT 0x0D32 +#define GL_MAX_CULL_DISTANCES_EXT 0x82F9 +#define GL_MAX_COMBINED_CLIP_AND_CULL_DISTANCES_EXT 0x82FA +#define GL_CLIP_DISTANCE0_EXT 0x3000 +#define GL_CLIP_DISTANCE1_EXT 0x3001 +#define GL_CLIP_DISTANCE2_EXT 0x3002 +#define GL_CLIP_DISTANCE3_EXT 0x3003 +#define GL_CLIP_DISTANCE4_EXT 0x3004 +#define GL_CLIP_DISTANCE5_EXT 0x3005 +#define GL_CLIP_DISTANCE6_EXT 0x3006 +#define GL_CLIP_DISTANCE7_EXT 0x3007 +#endif /* GL_EXT_clip_cull_distance */ + +#ifndef GL_EXT_color_buffer_float +#define GL_EXT_color_buffer_float 1 +#endif /* GL_EXT_color_buffer_float */ + +#ifndef GL_EXT_color_buffer_half_float +#define GL_EXT_color_buffer_half_float 1 +#define GL_RGBA16F_EXT 0x881A +#define GL_RGB16F_EXT 0x881B +#define GL_RG16F_EXT 0x822F +#define GL_R16F_EXT 0x822D +#define GL_FRAMEBUFFER_ATTACHMENT_COMPONENT_TYPE_EXT 0x8211 +#define GL_UNSIGNED_NORMALIZED_EXT 0x8C17 +#endif /* GL_EXT_color_buffer_half_float */ + +#ifndef GL_EXT_conservative_depth +#define GL_EXT_conservative_depth 1 +#endif /* GL_EXT_conservative_depth */ + +#ifndef GL_EXT_copy_image +#define GL_EXT_copy_image 1 +typedef void (GL_APIENTRYP PFNGLCOPYIMAGESUBDATAEXTPROC) (GLuint srcName, GLenum srcTarget, GLint srcLevel, GLint srcX, GLint srcY, GLint srcZ, GLuint dstName, GLenum dstTarget, GLint dstLevel, GLint dstX, GLint dstY, GLint dstZ, GLsizei srcWidth, GLsizei srcHeight, GLsizei srcDepth); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glCopyImageSubDataEXT (GLuint srcName, GLenum srcTarget, GLint srcLevel, GLint srcX, GLint srcY, GLint srcZ, GLuint dstName, GLenum dstTarget, GLint dstLevel, GLint dstX, GLint dstY, GLint dstZ, GLsizei srcWidth, GLsizei srcHeight, GLsizei srcDepth); +#endif +#endif /* GL_EXT_copy_image */ + +#ifndef GL_EXT_debug_label +#define GL_EXT_debug_label 1 +#define GL_PROGRAM_PIPELINE_OBJECT_EXT 0x8A4F +#define GL_PROGRAM_OBJECT_EXT 0x8B40 +#define GL_SHADER_OBJECT_EXT 0x8B48 +#define GL_BUFFER_OBJECT_EXT 0x9151 +#define GL_QUERY_OBJECT_EXT 0x9153 +#define GL_VERTEX_ARRAY_OBJECT_EXT 0x9154 +#define GL_TRANSFORM_FEEDBACK 0x8E22 +typedef void (GL_APIENTRYP PFNGLLABELOBJECTEXTPROC) (GLenum type, GLuint object, GLsizei length, const GLchar *label); +typedef void (GL_APIENTRYP PFNGLGETOBJECTLABELEXTPROC) (GLenum type, GLuint object, GLsizei bufSize, GLsizei *length, GLchar *label); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glLabelObjectEXT (GLenum type, GLuint object, GLsizei length, const GLchar *label); +GL_APICALL void GL_APIENTRY glGetObjectLabelEXT (GLenum type, GLuint object, GLsizei bufSize, GLsizei *length, GLchar *label); +#endif +#endif /* GL_EXT_debug_label */ + +#ifndef GL_EXT_debug_marker +#define GL_EXT_debug_marker 1 +typedef void (GL_APIENTRYP PFNGLINSERTEVENTMARKEREXTPROC) (GLsizei length, const GLchar *marker); +typedef void (GL_APIENTRYP PFNGLPUSHGROUPMARKEREXTPROC) (GLsizei length, const GLchar *marker); +typedef void (GL_APIENTRYP PFNGLPOPGROUPMARKEREXTPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glInsertEventMarkerEXT (GLsizei length, const GLchar *marker); +GL_APICALL void GL_APIENTRY glPushGroupMarkerEXT (GLsizei length, const GLchar *marker); +GL_APICALL void GL_APIENTRY glPopGroupMarkerEXT (void); +#endif +#endif /* GL_EXT_debug_marker */ + +#ifndef GL_EXT_discard_framebuffer +#define GL_EXT_discard_framebuffer 1 +#define GL_COLOR_EXT 0x1800 +#define GL_DEPTH_EXT 0x1801 +#define GL_STENCIL_EXT 0x1802 +typedef void (GL_APIENTRYP PFNGLDISCARDFRAMEBUFFEREXTPROC) (GLenum target, GLsizei numAttachments, const GLenum *attachments); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glDiscardFramebufferEXT (GLenum target, GLsizei numAttachments, const GLenum *attachments); +#endif +#endif /* GL_EXT_discard_framebuffer */ + +#ifndef GL_EXT_disjoint_timer_query +#define GL_EXT_disjoint_timer_query 1 +#define GL_QUERY_COUNTER_BITS_EXT 0x8864 +#define GL_CURRENT_QUERY_EXT 0x8865 +#define GL_QUERY_RESULT_EXT 0x8866 +#define GL_QUERY_RESULT_AVAILABLE_EXT 0x8867 +#define GL_TIME_ELAPSED_EXT 0x88BF +#define GL_TIMESTAMP_EXT 0x8E28 +#define GL_GPU_DISJOINT_EXT 0x8FBB +typedef void (GL_APIENTRYP PFNGLGENQUERIESEXTPROC) (GLsizei n, GLuint *ids); +typedef void (GL_APIENTRYP PFNGLDELETEQUERIESEXTPROC) (GLsizei n, const GLuint *ids); +typedef GLboolean (GL_APIENTRYP PFNGLISQUERYEXTPROC) (GLuint id); +typedef void (GL_APIENTRYP PFNGLBEGINQUERYEXTPROC) (GLenum target, GLuint id); +typedef void (GL_APIENTRYP PFNGLENDQUERYEXTPROC) (GLenum target); +typedef void (GL_APIENTRYP PFNGLQUERYCOUNTEREXTPROC) (GLuint id, GLenum target); +typedef void (GL_APIENTRYP PFNGLGETQUERYIVEXTPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETQUERYOBJECTIVEXTPROC) (GLuint id, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETQUERYOBJECTUIVEXTPROC) (GLuint id, GLenum pname, GLuint *params); +typedef void (GL_APIENTRYP PFNGLGETQUERYOBJECTI64VEXTPROC) (GLuint id, GLenum pname, GLint64 *params); +typedef void (GL_APIENTRYP PFNGLGETQUERYOBJECTUI64VEXTPROC) (GLuint id, GLenum pname, GLuint64 *params); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glGenQueriesEXT (GLsizei n, GLuint *ids); +GL_APICALL void GL_APIENTRY glDeleteQueriesEXT (GLsizei n, const GLuint *ids); +GL_APICALL GLboolean GL_APIENTRY glIsQueryEXT (GLuint id); +GL_APICALL void GL_APIENTRY glBeginQueryEXT (GLenum target, GLuint id); +GL_APICALL void GL_APIENTRY glEndQueryEXT (GLenum target); +GL_APICALL void GL_APIENTRY glQueryCounterEXT (GLuint id, GLenum target); +GL_APICALL void GL_APIENTRY glGetQueryivEXT (GLenum target, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetQueryObjectivEXT (GLuint id, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetQueryObjectuivEXT (GLuint id, GLenum pname, GLuint *params); +GL_APICALL void GL_APIENTRY glGetQueryObjecti64vEXT (GLuint id, GLenum pname, GLint64 *params); +GL_APICALL void GL_APIENTRY glGetQueryObjectui64vEXT (GLuint id, GLenum pname, GLuint64 *params); +#endif +#endif /* GL_EXT_disjoint_timer_query */ + +#ifndef GL_EXT_draw_buffers +#define GL_EXT_draw_buffers 1 +#define GL_MAX_COLOR_ATTACHMENTS_EXT 0x8CDF +#define GL_MAX_DRAW_BUFFERS_EXT 0x8824 +#define GL_DRAW_BUFFER0_EXT 0x8825 +#define GL_DRAW_BUFFER1_EXT 0x8826 +#define GL_DRAW_BUFFER2_EXT 0x8827 +#define GL_DRAW_BUFFER3_EXT 0x8828 +#define GL_DRAW_BUFFER4_EXT 0x8829 +#define GL_DRAW_BUFFER5_EXT 0x882A +#define GL_DRAW_BUFFER6_EXT 0x882B +#define GL_DRAW_BUFFER7_EXT 0x882C +#define GL_DRAW_BUFFER8_EXT 0x882D +#define GL_DRAW_BUFFER9_EXT 0x882E +#define GL_DRAW_BUFFER10_EXT 0x882F +#define GL_DRAW_BUFFER11_EXT 0x8830 +#define GL_DRAW_BUFFER12_EXT 0x8831 +#define GL_DRAW_BUFFER13_EXT 0x8832 +#define GL_DRAW_BUFFER14_EXT 0x8833 +#define GL_DRAW_BUFFER15_EXT 0x8834 +#define GL_COLOR_ATTACHMENT0_EXT 0x8CE0 +#define GL_COLOR_ATTACHMENT1_EXT 0x8CE1 +#define GL_COLOR_ATTACHMENT2_EXT 0x8CE2 +#define GL_COLOR_ATTACHMENT3_EXT 0x8CE3 +#define GL_COLOR_ATTACHMENT4_EXT 0x8CE4 +#define GL_COLOR_ATTACHMENT5_EXT 0x8CE5 +#define GL_COLOR_ATTACHMENT6_EXT 0x8CE6 +#define GL_COLOR_ATTACHMENT7_EXT 0x8CE7 +#define GL_COLOR_ATTACHMENT8_EXT 0x8CE8 +#define GL_COLOR_ATTACHMENT9_EXT 0x8CE9 +#define GL_COLOR_ATTACHMENT10_EXT 0x8CEA +#define GL_COLOR_ATTACHMENT11_EXT 0x8CEB +#define GL_COLOR_ATTACHMENT12_EXT 0x8CEC +#define GL_COLOR_ATTACHMENT13_EXT 0x8CED +#define GL_COLOR_ATTACHMENT14_EXT 0x8CEE +#define GL_COLOR_ATTACHMENT15_EXT 0x8CEF +typedef void (GL_APIENTRYP PFNGLDRAWBUFFERSEXTPROC) (GLsizei n, const GLenum *bufs); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glDrawBuffersEXT (GLsizei n, const GLenum *bufs); +#endif +#endif /* GL_EXT_draw_buffers */ + +#ifndef GL_EXT_draw_buffers_indexed +#define GL_EXT_draw_buffers_indexed 1 +typedef void (GL_APIENTRYP PFNGLENABLEIEXTPROC) (GLenum target, GLuint index); +typedef void (GL_APIENTRYP PFNGLDISABLEIEXTPROC) (GLenum target, GLuint index); +typedef void (GL_APIENTRYP PFNGLBLENDEQUATIONIEXTPROC) (GLuint buf, GLenum mode); +typedef void (GL_APIENTRYP PFNGLBLENDEQUATIONSEPARATEIEXTPROC) (GLuint buf, GLenum modeRGB, GLenum modeAlpha); +typedef void (GL_APIENTRYP PFNGLBLENDFUNCIEXTPROC) (GLuint buf, GLenum src, GLenum dst); +typedef void (GL_APIENTRYP PFNGLBLENDFUNCSEPARATEIEXTPROC) (GLuint buf, GLenum srcRGB, GLenum dstRGB, GLenum srcAlpha, GLenum dstAlpha); +typedef void (GL_APIENTRYP PFNGLCOLORMASKIEXTPROC) (GLuint index, GLboolean r, GLboolean g, GLboolean b, GLboolean a); +typedef GLboolean (GL_APIENTRYP PFNGLISENABLEDIEXTPROC) (GLenum target, GLuint index); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glEnableiEXT (GLenum target, GLuint index); +GL_APICALL void GL_APIENTRY glDisableiEXT (GLenum target, GLuint index); +GL_APICALL void GL_APIENTRY glBlendEquationiEXT (GLuint buf, GLenum mode); +GL_APICALL void GL_APIENTRY glBlendEquationSeparateiEXT (GLuint buf, GLenum modeRGB, GLenum modeAlpha); +GL_APICALL void GL_APIENTRY glBlendFunciEXT (GLuint buf, GLenum src, GLenum dst); +GL_APICALL void GL_APIENTRY glBlendFuncSeparateiEXT (GLuint buf, GLenum srcRGB, GLenum dstRGB, GLenum srcAlpha, GLenum dstAlpha); +GL_APICALL void GL_APIENTRY glColorMaskiEXT (GLuint index, GLboolean r, GLboolean g, GLboolean b, GLboolean a); +GL_APICALL GLboolean GL_APIENTRY glIsEnablediEXT (GLenum target, GLuint index); +#endif +#endif /* GL_EXT_draw_buffers_indexed */ + +#ifndef GL_EXT_draw_elements_base_vertex +#define GL_EXT_draw_elements_base_vertex 1 +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSBASEVERTEXEXTPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLint basevertex); +typedef void (GL_APIENTRYP PFNGLDRAWRANGEELEMENTSBASEVERTEXEXTPROC) (GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices, GLint basevertex); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSINSTANCEDBASEVERTEXEXTPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLint basevertex); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glDrawElementsBaseVertexEXT (GLenum mode, GLsizei count, GLenum type, const void *indices, GLint basevertex); +GL_APICALL void GL_APIENTRY glDrawRangeElementsBaseVertexEXT (GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices, GLint basevertex); +GL_APICALL void GL_APIENTRY glDrawElementsInstancedBaseVertexEXT (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLint basevertex); +#endif +#endif /* GL_EXT_draw_elements_base_vertex */ + +#ifndef GL_EXT_draw_instanced +#define GL_EXT_draw_instanced 1 +typedef void (GL_APIENTRYP PFNGLDRAWARRAYSINSTANCEDEXTPROC) (GLenum mode, GLint start, GLsizei count, GLsizei primcount); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSINSTANCEDEXTPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei primcount); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glDrawArraysInstancedEXT (GLenum mode, GLint start, GLsizei count, GLsizei primcount); +GL_APICALL void GL_APIENTRY glDrawElementsInstancedEXT (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei primcount); +#endif +#endif /* GL_EXT_draw_instanced */ + +#ifndef GL_EXT_draw_transform_feedback +#define GL_EXT_draw_transform_feedback 1 +typedef void (GL_APIENTRYP PFNGLDRAWTRANSFORMFEEDBACKEXTPROC) (GLenum mode, GLuint id); +typedef void (GL_APIENTRYP PFNGLDRAWTRANSFORMFEEDBACKINSTANCEDEXTPROC) (GLenum mode, GLuint id, GLsizei instancecount); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glDrawTransformFeedbackEXT (GLenum mode, GLuint id); +GL_APICALL void GL_APIENTRY glDrawTransformFeedbackInstancedEXT (GLenum mode, GLuint id, GLsizei instancecount); +#endif +#endif /* GL_EXT_draw_transform_feedback */ + +#ifndef GL_EXT_external_buffer +#define GL_EXT_external_buffer 1 +typedef void *GLeglClientBufferEXT; +typedef void (GL_APIENTRYP PFNGLBUFFERSTORAGEEXTERNALEXTPROC) (GLenum target, GLintptr offset, GLsizeiptr size, GLeglClientBufferEXT clientBuffer, GLbitfield flags); +typedef void (GL_APIENTRYP PFNGLNAMEDBUFFERSTORAGEEXTERNALEXTPROC) (GLuint buffer, GLintptr offset, GLsizeiptr size, GLeglClientBufferEXT clientBuffer, GLbitfield flags); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glBufferStorageExternalEXT (GLenum target, GLintptr offset, GLsizeiptr size, GLeglClientBufferEXT clientBuffer, GLbitfield flags); +GL_APICALL void GL_APIENTRY glNamedBufferStorageExternalEXT (GLuint buffer, GLintptr offset, GLsizeiptr size, GLeglClientBufferEXT clientBuffer, GLbitfield flags); +#endif +#endif /* GL_EXT_external_buffer */ + +#ifndef GL_EXT_float_blend +#define GL_EXT_float_blend 1 +#endif /* GL_EXT_float_blend */ + +#ifndef GL_EXT_geometry_point_size +#define GL_EXT_geometry_point_size 1 +#endif /* GL_EXT_geometry_point_size */ + +#ifndef GL_EXT_geometry_shader +#define GL_EXT_geometry_shader 1 +#define GL_GEOMETRY_SHADER_EXT 0x8DD9 +#define GL_GEOMETRY_SHADER_BIT_EXT 0x00000004 +#define GL_GEOMETRY_LINKED_VERTICES_OUT_EXT 0x8916 +#define GL_GEOMETRY_LINKED_INPUT_TYPE_EXT 0x8917 +#define GL_GEOMETRY_LINKED_OUTPUT_TYPE_EXT 0x8918 +#define GL_GEOMETRY_SHADER_INVOCATIONS_EXT 0x887F +#define GL_LAYER_PROVOKING_VERTEX_EXT 0x825E +#define GL_LINES_ADJACENCY_EXT 0x000A +#define GL_LINE_STRIP_ADJACENCY_EXT 0x000B +#define GL_TRIANGLES_ADJACENCY_EXT 0x000C +#define GL_TRIANGLE_STRIP_ADJACENCY_EXT 0x000D +#define GL_MAX_GEOMETRY_UNIFORM_COMPONENTS_EXT 0x8DDF +#define GL_MAX_GEOMETRY_UNIFORM_BLOCKS_EXT 0x8A2C +#define GL_MAX_COMBINED_GEOMETRY_UNIFORM_COMPONENTS_EXT 0x8A32 +#define GL_MAX_GEOMETRY_INPUT_COMPONENTS_EXT 0x9123 +#define GL_MAX_GEOMETRY_OUTPUT_COMPONENTS_EXT 0x9124 +#define GL_MAX_GEOMETRY_OUTPUT_VERTICES_EXT 0x8DE0 +#define GL_MAX_GEOMETRY_TOTAL_OUTPUT_COMPONENTS_EXT 0x8DE1 +#define GL_MAX_GEOMETRY_SHADER_INVOCATIONS_EXT 0x8E5A +#define GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS_EXT 0x8C29 +#define GL_MAX_GEOMETRY_ATOMIC_COUNTER_BUFFERS_EXT 0x92CF +#define GL_MAX_GEOMETRY_ATOMIC_COUNTERS_EXT 0x92D5 +#define GL_MAX_GEOMETRY_IMAGE_UNIFORMS_EXT 0x90CD +#define GL_MAX_GEOMETRY_SHADER_STORAGE_BLOCKS_EXT 0x90D7 +#define GL_FIRST_VERTEX_CONVENTION_EXT 0x8E4D +#define GL_LAST_VERTEX_CONVENTION_EXT 0x8E4E +#define GL_UNDEFINED_VERTEX_EXT 0x8260 +#define GL_PRIMITIVES_GENERATED_EXT 0x8C87 +#define GL_FRAMEBUFFER_DEFAULT_LAYERS_EXT 0x9312 +#define GL_MAX_FRAMEBUFFER_LAYERS_EXT 0x9317 +#define GL_FRAMEBUFFER_INCOMPLETE_LAYER_TARGETS_EXT 0x8DA8 +#define GL_FRAMEBUFFER_ATTACHMENT_LAYERED_EXT 0x8DA7 +#define GL_REFERENCED_BY_GEOMETRY_SHADER_EXT 0x9309 +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTUREEXTPROC) (GLenum target, GLenum attachment, GLuint texture, GLint level); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glFramebufferTextureEXT (GLenum target, GLenum attachment, GLuint texture, GLint level); +#endif +#endif /* GL_EXT_geometry_shader */ + +#ifndef GL_EXT_gpu_shader5 +#define GL_EXT_gpu_shader5 1 +#endif /* GL_EXT_gpu_shader5 */ + +#ifndef GL_EXT_instanced_arrays +#define GL_EXT_instanced_arrays 1 +#define GL_VERTEX_ATTRIB_ARRAY_DIVISOR_EXT 0x88FE +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBDIVISOREXTPROC) (GLuint index, GLuint divisor); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glVertexAttribDivisorEXT (GLuint index, GLuint divisor); +#endif +#endif /* GL_EXT_instanced_arrays */ + +#ifndef GL_EXT_map_buffer_range +#define GL_EXT_map_buffer_range 1 +#define GL_MAP_READ_BIT_EXT 0x0001 +#define GL_MAP_WRITE_BIT_EXT 0x0002 +#define GL_MAP_INVALIDATE_RANGE_BIT_EXT 0x0004 +#define GL_MAP_INVALIDATE_BUFFER_BIT_EXT 0x0008 +#define GL_MAP_FLUSH_EXPLICIT_BIT_EXT 0x0010 +#define GL_MAP_UNSYNCHRONIZED_BIT_EXT 0x0020 +typedef void *(GL_APIENTRYP PFNGLMAPBUFFERRANGEEXTPROC) (GLenum target, GLintptr offset, GLsizeiptr length, GLbitfield access); +typedef void (GL_APIENTRYP PFNGLFLUSHMAPPEDBUFFERRANGEEXTPROC) (GLenum target, GLintptr offset, GLsizeiptr length); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void *GL_APIENTRY glMapBufferRangeEXT (GLenum target, GLintptr offset, GLsizeiptr length, GLbitfield access); +GL_APICALL void GL_APIENTRY glFlushMappedBufferRangeEXT (GLenum target, GLintptr offset, GLsizeiptr length); +#endif +#endif /* GL_EXT_map_buffer_range */ + +#ifndef GL_EXT_memory_object +#define GL_EXT_memory_object 1 +#define GL_TEXTURE_TILING_EXT 0x9580 +#define GL_DEDICATED_MEMORY_OBJECT_EXT 0x9581 +#define GL_PROTECTED_MEMORY_OBJECT_EXT 0x959B +#define GL_NUM_TILING_TYPES_EXT 0x9582 +#define GL_TILING_TYPES_EXT 0x9583 +#define GL_OPTIMAL_TILING_EXT 0x9584 +#define GL_LINEAR_TILING_EXT 0x9585 +#define GL_NUM_DEVICE_UUIDS_EXT 0x9596 +#define GL_DEVICE_UUID_EXT 0x9597 +#define GL_DRIVER_UUID_EXT 0x9598 +#define GL_UUID_SIZE_EXT 16 +typedef void (GL_APIENTRYP PFNGLGETUNSIGNEDBYTEVEXTPROC) (GLenum pname, GLubyte *data); +typedef void (GL_APIENTRYP PFNGLGETUNSIGNEDBYTEI_VEXTPROC) (GLenum target, GLuint index, GLubyte *data); +typedef void (GL_APIENTRYP PFNGLDELETEMEMORYOBJECTSEXTPROC) (GLsizei n, const GLuint *memoryObjects); +typedef GLboolean (GL_APIENTRYP PFNGLISMEMORYOBJECTEXTPROC) (GLuint memoryObject); +typedef void (GL_APIENTRYP PFNGLCREATEMEMORYOBJECTSEXTPROC) (GLsizei n, GLuint *memoryObjects); +typedef void (GL_APIENTRYP PFNGLMEMORYOBJECTPARAMETERIVEXTPROC) (GLuint memoryObject, GLenum pname, const GLint *params); +typedef void (GL_APIENTRYP PFNGLGETMEMORYOBJECTPARAMETERIVEXTPROC) (GLuint memoryObject, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGEMEM2DEXTPROC) (GLenum target, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLuint memory, GLuint64 offset); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGEMEM2DMULTISAMPLEEXTPROC) (GLenum target, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGEMEM3DEXTPROC) (GLenum target, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLuint memory, GLuint64 offset); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGEMEM3DMULTISAMPLEEXTPROC) (GLenum target, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset); +typedef void (GL_APIENTRYP PFNGLBUFFERSTORAGEMEMEXTPROC) (GLenum target, GLsizeiptr size, GLuint memory, GLuint64 offset); +typedef void (GL_APIENTRYP PFNGLTEXTURESTORAGEMEM2DEXTPROC) (GLuint texture, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLuint memory, GLuint64 offset); +typedef void (GL_APIENTRYP PFNGLTEXTURESTORAGEMEM2DMULTISAMPLEEXTPROC) (GLuint texture, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset); +typedef void (GL_APIENTRYP PFNGLTEXTURESTORAGEMEM3DEXTPROC) (GLuint texture, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLuint memory, GLuint64 offset); +typedef void (GL_APIENTRYP PFNGLTEXTURESTORAGEMEM3DMULTISAMPLEEXTPROC) (GLuint texture, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset); +typedef void (GL_APIENTRYP PFNGLNAMEDBUFFERSTORAGEMEMEXTPROC) (GLuint buffer, GLsizeiptr size, GLuint memory, GLuint64 offset); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glGetUnsignedBytevEXT (GLenum pname, GLubyte *data); +GL_APICALL void GL_APIENTRY glGetUnsignedBytei_vEXT (GLenum target, GLuint index, GLubyte *data); +GL_APICALL void GL_APIENTRY glDeleteMemoryObjectsEXT (GLsizei n, const GLuint *memoryObjects); +GL_APICALL GLboolean GL_APIENTRY glIsMemoryObjectEXT (GLuint memoryObject); +GL_APICALL void GL_APIENTRY glCreateMemoryObjectsEXT (GLsizei n, GLuint *memoryObjects); +GL_APICALL void GL_APIENTRY glMemoryObjectParameterivEXT (GLuint memoryObject, GLenum pname, const GLint *params); +GL_APICALL void GL_APIENTRY glGetMemoryObjectParameterivEXT (GLuint memoryObject, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glTexStorageMem2DEXT (GLenum target, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLuint memory, GLuint64 offset); +GL_APICALL void GL_APIENTRY glTexStorageMem2DMultisampleEXT (GLenum target, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset); +GL_APICALL void GL_APIENTRY glTexStorageMem3DEXT (GLenum target, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLuint memory, GLuint64 offset); +GL_APICALL void GL_APIENTRY glTexStorageMem3DMultisampleEXT (GLenum target, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset); +GL_APICALL void GL_APIENTRY glBufferStorageMemEXT (GLenum target, GLsizeiptr size, GLuint memory, GLuint64 offset); +GL_APICALL void GL_APIENTRY glTextureStorageMem2DEXT (GLuint texture, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLuint memory, GLuint64 offset); +GL_APICALL void GL_APIENTRY glTextureStorageMem2DMultisampleEXT (GLuint texture, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset); +GL_APICALL void GL_APIENTRY glTextureStorageMem3DEXT (GLuint texture, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLuint memory, GLuint64 offset); +GL_APICALL void GL_APIENTRY glTextureStorageMem3DMultisampleEXT (GLuint texture, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset); +GL_APICALL void GL_APIENTRY glNamedBufferStorageMemEXT (GLuint buffer, GLsizeiptr size, GLuint memory, GLuint64 offset); +#endif +#endif /* GL_EXT_memory_object */ + +#ifndef GL_EXT_memory_object_fd +#define GL_EXT_memory_object_fd 1 +#define GL_HANDLE_TYPE_OPAQUE_FD_EXT 0x9586 +typedef void (GL_APIENTRYP PFNGLIMPORTMEMORYFDEXTPROC) (GLuint memory, GLuint64 size, GLenum handleType, GLint fd); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glImportMemoryFdEXT (GLuint memory, GLuint64 size, GLenum handleType, GLint fd); +#endif +#endif /* GL_EXT_memory_object_fd */ + +#ifndef GL_EXT_memory_object_win32 +#define GL_EXT_memory_object_win32 1 +#define GL_HANDLE_TYPE_OPAQUE_WIN32_EXT 0x9587 +#define GL_HANDLE_TYPE_OPAQUE_WIN32_KMT_EXT 0x9588 +#define GL_DEVICE_LUID_EXT 0x9599 +#define GL_DEVICE_NODE_MASK_EXT 0x959A +#define GL_LUID_SIZE_EXT 8 +#define GL_HANDLE_TYPE_D3D12_TILEPOOL_EXT 0x9589 +#define GL_HANDLE_TYPE_D3D12_RESOURCE_EXT 0x958A +#define GL_HANDLE_TYPE_D3D11_IMAGE_EXT 0x958B +#define GL_HANDLE_TYPE_D3D11_IMAGE_KMT_EXT 0x958C +typedef void (GL_APIENTRYP PFNGLIMPORTMEMORYWIN32HANDLEEXTPROC) (GLuint memory, GLuint64 size, GLenum handleType, void *handle); +typedef void (GL_APIENTRYP PFNGLIMPORTMEMORYWIN32NAMEEXTPROC) (GLuint memory, GLuint64 size, GLenum handleType, const void *name); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glImportMemoryWin32HandleEXT (GLuint memory, GLuint64 size, GLenum handleType, void *handle); +GL_APICALL void GL_APIENTRY glImportMemoryWin32NameEXT (GLuint memory, GLuint64 size, GLenum handleType, const void *name); +#endif +#endif /* GL_EXT_memory_object_win32 */ + +#ifndef GL_EXT_multi_draw_arrays +#define GL_EXT_multi_draw_arrays 1 +typedef void (GL_APIENTRYP PFNGLMULTIDRAWARRAYSEXTPROC) (GLenum mode, const GLint *first, const GLsizei *count, GLsizei primcount); +typedef void (GL_APIENTRYP PFNGLMULTIDRAWELEMENTSEXTPROC) (GLenum mode, const GLsizei *count, GLenum type, const void *const*indices, GLsizei primcount); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glMultiDrawArraysEXT (GLenum mode, const GLint *first, const GLsizei *count, GLsizei primcount); +GL_APICALL void GL_APIENTRY glMultiDrawElementsEXT (GLenum mode, const GLsizei *count, GLenum type, const void *const*indices, GLsizei primcount); +#endif +#endif /* GL_EXT_multi_draw_arrays */ + +#ifndef GL_EXT_multi_draw_indirect +#define GL_EXT_multi_draw_indirect 1 +typedef void (GL_APIENTRYP PFNGLMULTIDRAWARRAYSINDIRECTEXTPROC) (GLenum mode, const void *indirect, GLsizei drawcount, GLsizei stride); +typedef void (GL_APIENTRYP PFNGLMULTIDRAWELEMENTSINDIRECTEXTPROC) (GLenum mode, GLenum type, const void *indirect, GLsizei drawcount, GLsizei stride); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glMultiDrawArraysIndirectEXT (GLenum mode, const void *indirect, GLsizei drawcount, GLsizei stride); +GL_APICALL void GL_APIENTRY glMultiDrawElementsIndirectEXT (GLenum mode, GLenum type, const void *indirect, GLsizei drawcount, GLsizei stride); +#endif +#endif /* GL_EXT_multi_draw_indirect */ + +#ifndef GL_EXT_multisampled_compatibility +#define GL_EXT_multisampled_compatibility 1 +#define GL_MULTISAMPLE_EXT 0x809D +#define GL_SAMPLE_ALPHA_TO_ONE_EXT 0x809F +#endif /* GL_EXT_multisampled_compatibility */ + +#ifndef GL_EXT_multisampled_render_to_texture +#define GL_EXT_multisampled_render_to_texture 1 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_SAMPLES_EXT 0x8D6C +#define GL_RENDERBUFFER_SAMPLES_EXT 0x8CAB +#define GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE_EXT 0x8D56 +#define GL_MAX_SAMPLES_EXT 0x8D57 +typedef void (GL_APIENTRYP PFNGLRENDERBUFFERSTORAGEMULTISAMPLEEXTPROC) (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTURE2DMULTISAMPLEEXTPROC) (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLsizei samples); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glRenderbufferStorageMultisampleEXT (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glFramebufferTexture2DMultisampleEXT (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLsizei samples); +#endif +#endif /* GL_EXT_multisampled_render_to_texture */ + +#ifndef GL_EXT_multiview_draw_buffers +#define GL_EXT_multiview_draw_buffers 1 +#define GL_COLOR_ATTACHMENT_EXT 0x90F0 +#define GL_MULTIVIEW_EXT 0x90F1 +#define GL_DRAW_BUFFER_EXT 0x0C01 +#define GL_READ_BUFFER_EXT 0x0C02 +#define GL_MAX_MULTIVIEW_BUFFERS_EXT 0x90F2 +typedef void (GL_APIENTRYP PFNGLREADBUFFERINDEXEDEXTPROC) (GLenum src, GLint index); +typedef void (GL_APIENTRYP PFNGLDRAWBUFFERSINDEXEDEXTPROC) (GLint n, const GLenum *location, const GLint *indices); +typedef void (GL_APIENTRYP PFNGLGETINTEGERI_VEXTPROC) (GLenum target, GLuint index, GLint *data); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glReadBufferIndexedEXT (GLenum src, GLint index); +GL_APICALL void GL_APIENTRY glDrawBuffersIndexedEXT (GLint n, const GLenum *location, const GLint *indices); +GL_APICALL void GL_APIENTRY glGetIntegeri_vEXT (GLenum target, GLuint index, GLint *data); +#endif +#endif /* GL_EXT_multiview_draw_buffers */ + +#ifndef GL_EXT_occlusion_query_boolean +#define GL_EXT_occlusion_query_boolean 1 +#define GL_ANY_SAMPLES_PASSED_EXT 0x8C2F +#define GL_ANY_SAMPLES_PASSED_CONSERVATIVE_EXT 0x8D6A +#endif /* GL_EXT_occlusion_query_boolean */ + +#ifndef GL_EXT_polygon_offset_clamp +#define GL_EXT_polygon_offset_clamp 1 +#define GL_POLYGON_OFFSET_CLAMP_EXT 0x8E1B +typedef void (GL_APIENTRYP PFNGLPOLYGONOFFSETCLAMPEXTPROC) (GLfloat factor, GLfloat units, GLfloat clamp); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glPolygonOffsetClampEXT (GLfloat factor, GLfloat units, GLfloat clamp); +#endif +#endif /* GL_EXT_polygon_offset_clamp */ + +#ifndef GL_EXT_post_depth_coverage +#define GL_EXT_post_depth_coverage 1 +#endif /* GL_EXT_post_depth_coverage */ + +#ifndef GL_EXT_primitive_bounding_box +#define GL_EXT_primitive_bounding_box 1 +#define GL_PRIMITIVE_BOUNDING_BOX_EXT 0x92BE +typedef void (GL_APIENTRYP PFNGLPRIMITIVEBOUNDINGBOXEXTPROC) (GLfloat minX, GLfloat minY, GLfloat minZ, GLfloat minW, GLfloat maxX, GLfloat maxY, GLfloat maxZ, GLfloat maxW); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glPrimitiveBoundingBoxEXT (GLfloat minX, GLfloat minY, GLfloat minZ, GLfloat minW, GLfloat maxX, GLfloat maxY, GLfloat maxZ, GLfloat maxW); +#endif +#endif /* GL_EXT_primitive_bounding_box */ + +#ifndef GL_EXT_protected_textures +#define GL_EXT_protected_textures 1 +#define GL_CONTEXT_FLAG_PROTECTED_CONTENT_BIT_EXT 0x00000010 +#define GL_TEXTURE_PROTECTED_EXT 0x8BFA +#endif /* GL_EXT_protected_textures */ + +#ifndef GL_EXT_pvrtc_sRGB +#define GL_EXT_pvrtc_sRGB 1 +#define GL_COMPRESSED_SRGB_PVRTC_2BPPV1_EXT 0x8A54 +#define GL_COMPRESSED_SRGB_PVRTC_4BPPV1_EXT 0x8A55 +#define GL_COMPRESSED_SRGB_ALPHA_PVRTC_2BPPV1_EXT 0x8A56 +#define GL_COMPRESSED_SRGB_ALPHA_PVRTC_4BPPV1_EXT 0x8A57 +#define GL_COMPRESSED_SRGB_ALPHA_PVRTC_2BPPV2_IMG 0x93F0 +#define GL_COMPRESSED_SRGB_ALPHA_PVRTC_4BPPV2_IMG 0x93F1 +#endif /* GL_EXT_pvrtc_sRGB */ + +#ifndef GL_EXT_raster_multisample +#define GL_EXT_raster_multisample 1 +#define GL_RASTER_MULTISAMPLE_EXT 0x9327 +#define GL_RASTER_SAMPLES_EXT 0x9328 +#define GL_MAX_RASTER_SAMPLES_EXT 0x9329 +#define GL_RASTER_FIXED_SAMPLE_LOCATIONS_EXT 0x932A +#define GL_MULTISAMPLE_RASTERIZATION_ALLOWED_EXT 0x932B +#define GL_EFFECTIVE_RASTER_SAMPLES_EXT 0x932C +typedef void (GL_APIENTRYP PFNGLRASTERSAMPLESEXTPROC) (GLuint samples, GLboolean fixedsamplelocations); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glRasterSamplesEXT (GLuint samples, GLboolean fixedsamplelocations); +#endif +#endif /* GL_EXT_raster_multisample */ + +#ifndef GL_EXT_read_format_bgra +#define GL_EXT_read_format_bgra 1 +#define GL_UNSIGNED_SHORT_4_4_4_4_REV_EXT 0x8365 +#define GL_UNSIGNED_SHORT_1_5_5_5_REV_EXT 0x8366 +#endif /* GL_EXT_read_format_bgra */ + +#ifndef GL_EXT_render_snorm +#define GL_EXT_render_snorm 1 +#define GL_R8_SNORM 0x8F94 +#define GL_RG8_SNORM 0x8F95 +#define GL_RGBA8_SNORM 0x8F97 +#define GL_R16_SNORM_EXT 0x8F98 +#define GL_RG16_SNORM_EXT 0x8F99 +#define GL_RGBA16_SNORM_EXT 0x8F9B +#endif /* GL_EXT_render_snorm */ + +#ifndef GL_EXT_robustness +#define GL_EXT_robustness 1 +#define GL_GUILTY_CONTEXT_RESET_EXT 0x8253 +#define GL_INNOCENT_CONTEXT_RESET_EXT 0x8254 +#define GL_UNKNOWN_CONTEXT_RESET_EXT 0x8255 +#define GL_CONTEXT_ROBUST_ACCESS_EXT 0x90F3 +#define GL_RESET_NOTIFICATION_STRATEGY_EXT 0x8256 +#define GL_LOSE_CONTEXT_ON_RESET_EXT 0x8252 +#define GL_NO_RESET_NOTIFICATION_EXT 0x8261 +typedef GLenum (GL_APIENTRYP PFNGLGETGRAPHICSRESETSTATUSEXTPROC) (void); +typedef void (GL_APIENTRYP PFNGLREADNPIXELSEXTPROC) (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, void *data); +typedef void (GL_APIENTRYP PFNGLGETNUNIFORMFVEXTPROC) (GLuint program, GLint location, GLsizei bufSize, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETNUNIFORMIVEXTPROC) (GLuint program, GLint location, GLsizei bufSize, GLint *params); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL GLenum GL_APIENTRY glGetGraphicsResetStatusEXT (void); +GL_APICALL void GL_APIENTRY glReadnPixelsEXT (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, GLsizei bufSize, void *data); +GL_APICALL void GL_APIENTRY glGetnUniformfvEXT (GLuint program, GLint location, GLsizei bufSize, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetnUniformivEXT (GLuint program, GLint location, GLsizei bufSize, GLint *params); +#endif +#endif /* GL_EXT_robustness */ + +#ifndef GL_EXT_sRGB +#define GL_EXT_sRGB 1 +#define GL_SRGB_EXT 0x8C40 +#define GL_SRGB_ALPHA_EXT 0x8C42 +#define GL_SRGB8_ALPHA8_EXT 0x8C43 +#define GL_FRAMEBUFFER_ATTACHMENT_COLOR_ENCODING_EXT 0x8210 +#endif /* GL_EXT_sRGB */ + +#ifndef GL_EXT_sRGB_write_control +#define GL_EXT_sRGB_write_control 1 +#define GL_FRAMEBUFFER_SRGB_EXT 0x8DB9 +#endif /* GL_EXT_sRGB_write_control */ + +#ifndef GL_EXT_semaphore +#define GL_EXT_semaphore 1 +#define GL_LAYOUT_GENERAL_EXT 0x958D +#define GL_LAYOUT_COLOR_ATTACHMENT_EXT 0x958E +#define GL_LAYOUT_DEPTH_STENCIL_ATTACHMENT_EXT 0x958F +#define GL_LAYOUT_DEPTH_STENCIL_READ_ONLY_EXT 0x9590 +#define GL_LAYOUT_SHADER_READ_ONLY_EXT 0x9591 +#define GL_LAYOUT_TRANSFER_SRC_EXT 0x9592 +#define GL_LAYOUT_TRANSFER_DST_EXT 0x9593 +#define GL_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_EXT 0x9530 +#define GL_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_EXT 0x9531 +typedef void (GL_APIENTRYP PFNGLGENSEMAPHORESEXTPROC) (GLsizei n, GLuint *semaphores); +typedef void (GL_APIENTRYP PFNGLDELETESEMAPHORESEXTPROC) (GLsizei n, const GLuint *semaphores); +typedef GLboolean (GL_APIENTRYP PFNGLISSEMAPHOREEXTPROC) (GLuint semaphore); +typedef void (GL_APIENTRYP PFNGLSEMAPHOREPARAMETERUI64VEXTPROC) (GLuint semaphore, GLenum pname, const GLuint64 *params); +typedef void (GL_APIENTRYP PFNGLGETSEMAPHOREPARAMETERUI64VEXTPROC) (GLuint semaphore, GLenum pname, GLuint64 *params); +typedef void (GL_APIENTRYP PFNGLWAITSEMAPHOREEXTPROC) (GLuint semaphore, GLuint numBufferBarriers, const GLuint *buffers, GLuint numTextureBarriers, const GLuint *textures, const GLenum *srcLayouts); +typedef void (GL_APIENTRYP PFNGLSIGNALSEMAPHOREEXTPROC) (GLuint semaphore, GLuint numBufferBarriers, const GLuint *buffers, GLuint numTextureBarriers, const GLuint *textures, const GLenum *dstLayouts); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glGenSemaphoresEXT (GLsizei n, GLuint *semaphores); +GL_APICALL void GL_APIENTRY glDeleteSemaphoresEXT (GLsizei n, const GLuint *semaphores); +GL_APICALL GLboolean GL_APIENTRY glIsSemaphoreEXT (GLuint semaphore); +GL_APICALL void GL_APIENTRY glSemaphoreParameterui64vEXT (GLuint semaphore, GLenum pname, const GLuint64 *params); +GL_APICALL void GL_APIENTRY glGetSemaphoreParameterui64vEXT (GLuint semaphore, GLenum pname, GLuint64 *params); +GL_APICALL void GL_APIENTRY glWaitSemaphoreEXT (GLuint semaphore, GLuint numBufferBarriers, const GLuint *buffers, GLuint numTextureBarriers, const GLuint *textures, const GLenum *srcLayouts); +GL_APICALL void GL_APIENTRY glSignalSemaphoreEXT (GLuint semaphore, GLuint numBufferBarriers, const GLuint *buffers, GLuint numTextureBarriers, const GLuint *textures, const GLenum *dstLayouts); +#endif +#endif /* GL_EXT_semaphore */ + +#ifndef GL_EXT_semaphore_fd +#define GL_EXT_semaphore_fd 1 +typedef void (GL_APIENTRYP PFNGLIMPORTSEMAPHOREFDEXTPROC) (GLuint semaphore, GLenum handleType, GLint fd); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glImportSemaphoreFdEXT (GLuint semaphore, GLenum handleType, GLint fd); +#endif +#endif /* GL_EXT_semaphore_fd */ + +#ifndef GL_EXT_semaphore_win32 +#define GL_EXT_semaphore_win32 1 +#define GL_HANDLE_TYPE_D3D12_FENCE_EXT 0x9594 +#define GL_D3D12_FENCE_VALUE_EXT 0x9595 +typedef void (GL_APIENTRYP PFNGLIMPORTSEMAPHOREWIN32HANDLEEXTPROC) (GLuint semaphore, GLenum handleType, void *handle); +typedef void (GL_APIENTRYP PFNGLIMPORTSEMAPHOREWIN32NAMEEXTPROC) (GLuint semaphore, GLenum handleType, const void *name); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glImportSemaphoreWin32HandleEXT (GLuint semaphore, GLenum handleType, void *handle); +GL_APICALL void GL_APIENTRY glImportSemaphoreWin32NameEXT (GLuint semaphore, GLenum handleType, const void *name); +#endif +#endif /* GL_EXT_semaphore_win32 */ + +#ifndef GL_EXT_separate_shader_objects +#define GL_EXT_separate_shader_objects 1 +#define GL_ACTIVE_PROGRAM_EXT 0x8259 +#define GL_VERTEX_SHADER_BIT_EXT 0x00000001 +#define GL_FRAGMENT_SHADER_BIT_EXT 0x00000002 +#define GL_ALL_SHADER_BITS_EXT 0xFFFFFFFF +#define GL_PROGRAM_SEPARABLE_EXT 0x8258 +#define GL_PROGRAM_PIPELINE_BINDING_EXT 0x825A +typedef void (GL_APIENTRYP PFNGLACTIVESHADERPROGRAMEXTPROC) (GLuint pipeline, GLuint program); +typedef void (GL_APIENTRYP PFNGLBINDPROGRAMPIPELINEEXTPROC) (GLuint pipeline); +typedef GLuint (GL_APIENTRYP PFNGLCREATESHADERPROGRAMVEXTPROC) (GLenum type, GLsizei count, const GLchar **strings); +typedef void (GL_APIENTRYP PFNGLDELETEPROGRAMPIPELINESEXTPROC) (GLsizei n, const GLuint *pipelines); +typedef void (GL_APIENTRYP PFNGLGENPROGRAMPIPELINESEXTPROC) (GLsizei n, GLuint *pipelines); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMPIPELINEINFOLOGEXTPROC) (GLuint pipeline, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMPIPELINEIVEXTPROC) (GLuint pipeline, GLenum pname, GLint *params); +typedef GLboolean (GL_APIENTRYP PFNGLISPROGRAMPIPELINEEXTPROC) (GLuint pipeline); +typedef void (GL_APIENTRYP PFNGLPROGRAMPARAMETERIEXTPROC) (GLuint program, GLenum pname, GLint value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1FEXTPROC) (GLuint program, GLint location, GLfloat v0); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1FVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1IEXTPROC) (GLuint program, GLint location, GLint v0); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1IVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2FEXTPROC) (GLuint program, GLint location, GLfloat v0, GLfloat v1); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2FVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2IEXTPROC) (GLuint program, GLint location, GLint v0, GLint v1); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2IVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3FEXTPROC) (GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3FVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3IEXTPROC) (GLuint program, GLint location, GLint v0, GLint v1, GLint v2); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3IVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4FEXTPROC) (GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4FVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4IEXTPROC) (GLuint program, GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4IVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2FVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3FVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4FVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUSEPROGRAMSTAGESEXTPROC) (GLuint pipeline, GLbitfield stages, GLuint program); +typedef void (GL_APIENTRYP PFNGLVALIDATEPROGRAMPIPELINEEXTPROC) (GLuint pipeline); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1UIEXTPROC) (GLuint program, GLint location, GLuint v0); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2UIEXTPROC) (GLuint program, GLint location, GLuint v0, GLuint v1); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3UIEXTPROC) (GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4UIEXTPROC) (GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1UIVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2UIVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3UIVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4UIVEXTPROC) (GLuint program, GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2X3FVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3X2FVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX2X4FVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4X2FVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX3X4FVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMMATRIX4X3FVEXTPROC) (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glActiveShaderProgramEXT (GLuint pipeline, GLuint program); +GL_APICALL void GL_APIENTRY glBindProgramPipelineEXT (GLuint pipeline); +GL_APICALL GLuint GL_APIENTRY glCreateShaderProgramvEXT (GLenum type, GLsizei count, const GLchar **strings); +GL_APICALL void GL_APIENTRY glDeleteProgramPipelinesEXT (GLsizei n, const GLuint *pipelines); +GL_APICALL void GL_APIENTRY glGenProgramPipelinesEXT (GLsizei n, GLuint *pipelines); +GL_APICALL void GL_APIENTRY glGetProgramPipelineInfoLogEXT (GLuint pipeline, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +GL_APICALL void GL_APIENTRY glGetProgramPipelineivEXT (GLuint pipeline, GLenum pname, GLint *params); +GL_APICALL GLboolean GL_APIENTRY glIsProgramPipelineEXT (GLuint pipeline); +GL_APICALL void GL_APIENTRY glProgramParameteriEXT (GLuint program, GLenum pname, GLint value); +GL_APICALL void GL_APIENTRY glProgramUniform1fEXT (GLuint program, GLint location, GLfloat v0); +GL_APICALL void GL_APIENTRY glProgramUniform1fvEXT (GLuint program, GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniform1iEXT (GLuint program, GLint location, GLint v0); +GL_APICALL void GL_APIENTRY glProgramUniform1ivEXT (GLuint program, GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glProgramUniform2fEXT (GLuint program, GLint location, GLfloat v0, GLfloat v1); +GL_APICALL void GL_APIENTRY glProgramUniform2fvEXT (GLuint program, GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniform2iEXT (GLuint program, GLint location, GLint v0, GLint v1); +GL_APICALL void GL_APIENTRY glProgramUniform2ivEXT (GLuint program, GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glProgramUniform3fEXT (GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +GL_APICALL void GL_APIENTRY glProgramUniform3fvEXT (GLuint program, GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniform3iEXT (GLuint program, GLint location, GLint v0, GLint v1, GLint v2); +GL_APICALL void GL_APIENTRY glProgramUniform3ivEXT (GLuint program, GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glProgramUniform4fEXT (GLuint program, GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +GL_APICALL void GL_APIENTRY glProgramUniform4fvEXT (GLuint program, GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniform4iEXT (GLuint program, GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +GL_APICALL void GL_APIENTRY glProgramUniform4ivEXT (GLuint program, GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix2fvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix3fvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix4fvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUseProgramStagesEXT (GLuint pipeline, GLbitfield stages, GLuint program); +GL_APICALL void GL_APIENTRY glValidateProgramPipelineEXT (GLuint pipeline); +GL_APICALL void GL_APIENTRY glProgramUniform1uiEXT (GLuint program, GLint location, GLuint v0); +GL_APICALL void GL_APIENTRY glProgramUniform2uiEXT (GLuint program, GLint location, GLuint v0, GLuint v1); +GL_APICALL void GL_APIENTRY glProgramUniform3uiEXT (GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2); +GL_APICALL void GL_APIENTRY glProgramUniform4uiEXT (GLuint program, GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3); +GL_APICALL void GL_APIENTRY glProgramUniform1uivEXT (GLuint program, GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glProgramUniform2uivEXT (GLuint program, GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glProgramUniform3uivEXT (GLuint program, GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glProgramUniform4uivEXT (GLuint program, GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix2x3fvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix3x2fvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix2x4fvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix4x2fvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix3x4fvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glProgramUniformMatrix4x3fvEXT (GLuint program, GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +#endif +#endif /* GL_EXT_separate_shader_objects */ + +#ifndef GL_EXT_shader_framebuffer_fetch +#define GL_EXT_shader_framebuffer_fetch 1 +#define GL_FRAGMENT_SHADER_DISCARDS_SAMPLES_EXT 0x8A52 +#endif /* GL_EXT_shader_framebuffer_fetch */ + +#ifndef GL_EXT_shader_framebuffer_fetch_non_coherent +#define GL_EXT_shader_framebuffer_fetch_non_coherent 1 +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERFETCHBARRIEREXTPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glFramebufferFetchBarrierEXT (void); +#endif +#endif /* GL_EXT_shader_framebuffer_fetch_non_coherent */ + +#ifndef GL_EXT_shader_group_vote +#define GL_EXT_shader_group_vote 1 +#endif /* GL_EXT_shader_group_vote */ + +#ifndef GL_EXT_shader_implicit_conversions +#define GL_EXT_shader_implicit_conversions 1 +#endif /* GL_EXT_shader_implicit_conversions */ + +#ifndef GL_EXT_shader_integer_mix +#define GL_EXT_shader_integer_mix 1 +#endif /* GL_EXT_shader_integer_mix */ + +#ifndef GL_EXT_shader_io_blocks +#define GL_EXT_shader_io_blocks 1 +#endif /* GL_EXT_shader_io_blocks */ + +#ifndef GL_EXT_shader_non_constant_global_initializers +#define GL_EXT_shader_non_constant_global_initializers 1 +#endif /* GL_EXT_shader_non_constant_global_initializers */ + +#ifndef GL_EXT_shader_pixel_local_storage +#define GL_EXT_shader_pixel_local_storage 1 +#define GL_MAX_SHADER_PIXEL_LOCAL_STORAGE_FAST_SIZE_EXT 0x8F63 +#define GL_MAX_SHADER_PIXEL_LOCAL_STORAGE_SIZE_EXT 0x8F67 +#define GL_SHADER_PIXEL_LOCAL_STORAGE_EXT 0x8F64 +#endif /* GL_EXT_shader_pixel_local_storage */ + +#ifndef GL_EXT_shader_pixel_local_storage2 +#define GL_EXT_shader_pixel_local_storage2 1 +#define GL_MAX_SHADER_COMBINED_LOCAL_STORAGE_FAST_SIZE_EXT 0x9650 +#define GL_MAX_SHADER_COMBINED_LOCAL_STORAGE_SIZE_EXT 0x9651 +#define GL_FRAMEBUFFER_INCOMPLETE_INSUFFICIENT_SHADER_COMBINED_LOCAL_STORAGE_EXT 0x9652 +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERPIXELLOCALSTORAGESIZEEXTPROC) (GLuint target, GLsizei size); +typedef GLsizei (GL_APIENTRYP PFNGLGETFRAMEBUFFERPIXELLOCALSTORAGESIZEEXTPROC) (GLuint target); +typedef void (GL_APIENTRYP PFNGLCLEARPIXELLOCALSTORAGEUIEXTPROC) (GLsizei offset, GLsizei n, const GLuint *values); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glFramebufferPixelLocalStorageSizeEXT (GLuint target, GLsizei size); +GL_APICALL GLsizei GL_APIENTRY glGetFramebufferPixelLocalStorageSizeEXT (GLuint target); +GL_APICALL void GL_APIENTRY glClearPixelLocalStorageuiEXT (GLsizei offset, GLsizei n, const GLuint *values); +#endif +#endif /* GL_EXT_shader_pixel_local_storage2 */ + +#ifndef GL_EXT_shader_texture_lod +#define GL_EXT_shader_texture_lod 1 +#endif /* GL_EXT_shader_texture_lod */ + +#ifndef GL_EXT_shadow_samplers +#define GL_EXT_shadow_samplers 1 +#define GL_TEXTURE_COMPARE_MODE_EXT 0x884C +#define GL_TEXTURE_COMPARE_FUNC_EXT 0x884D +#define GL_COMPARE_REF_TO_TEXTURE_EXT 0x884E +#define GL_SAMPLER_2D_SHADOW_EXT 0x8B62 +#endif /* GL_EXT_shadow_samplers */ + +#ifndef GL_EXT_sparse_texture +#define GL_EXT_sparse_texture 1 +#define GL_TEXTURE_SPARSE_EXT 0x91A6 +#define GL_VIRTUAL_PAGE_SIZE_INDEX_EXT 0x91A7 +#define GL_NUM_SPARSE_LEVELS_EXT 0x91AA +#define GL_NUM_VIRTUAL_PAGE_SIZES_EXT 0x91A8 +#define GL_VIRTUAL_PAGE_SIZE_X_EXT 0x9195 +#define GL_VIRTUAL_PAGE_SIZE_Y_EXT 0x9196 +#define GL_VIRTUAL_PAGE_SIZE_Z_EXT 0x9197 +#define GL_TEXTURE_2D_ARRAY 0x8C1A +#define GL_TEXTURE_3D 0x806F +#define GL_MAX_SPARSE_TEXTURE_SIZE_EXT 0x9198 +#define GL_MAX_SPARSE_3D_TEXTURE_SIZE_EXT 0x9199 +#define GL_MAX_SPARSE_ARRAY_TEXTURE_LAYERS_EXT 0x919A +#define GL_SPARSE_TEXTURE_FULL_ARRAY_CUBE_MIPMAPS_EXT 0x91A9 +typedef void (GL_APIENTRYP PFNGLTEXPAGECOMMITMENTEXTPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLboolean commit); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glTexPageCommitmentEXT (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLboolean commit); +#endif +#endif /* GL_EXT_sparse_texture */ + +#ifndef GL_EXT_sparse_texture2 +#define GL_EXT_sparse_texture2 1 +#endif /* GL_EXT_sparse_texture2 */ + +#ifndef GL_EXT_tessellation_point_size +#define GL_EXT_tessellation_point_size 1 +#endif /* GL_EXT_tessellation_point_size */ + +#ifndef GL_EXT_tessellation_shader +#define GL_EXT_tessellation_shader 1 +#define GL_PATCHES_EXT 0x000E +#define GL_PATCH_VERTICES_EXT 0x8E72 +#define GL_TESS_CONTROL_OUTPUT_VERTICES_EXT 0x8E75 +#define GL_TESS_GEN_MODE_EXT 0x8E76 +#define GL_TESS_GEN_SPACING_EXT 0x8E77 +#define GL_TESS_GEN_VERTEX_ORDER_EXT 0x8E78 +#define GL_TESS_GEN_POINT_MODE_EXT 0x8E79 +#define GL_ISOLINES_EXT 0x8E7A +#define GL_QUADS_EXT 0x0007 +#define GL_FRACTIONAL_ODD_EXT 0x8E7B +#define GL_FRACTIONAL_EVEN_EXT 0x8E7C +#define GL_MAX_PATCH_VERTICES_EXT 0x8E7D +#define GL_MAX_TESS_GEN_LEVEL_EXT 0x8E7E +#define GL_MAX_TESS_CONTROL_UNIFORM_COMPONENTS_EXT 0x8E7F +#define GL_MAX_TESS_EVALUATION_UNIFORM_COMPONENTS_EXT 0x8E80 +#define GL_MAX_TESS_CONTROL_TEXTURE_IMAGE_UNITS_EXT 0x8E81 +#define GL_MAX_TESS_EVALUATION_TEXTURE_IMAGE_UNITS_EXT 0x8E82 +#define GL_MAX_TESS_CONTROL_OUTPUT_COMPONENTS_EXT 0x8E83 +#define GL_MAX_TESS_PATCH_COMPONENTS_EXT 0x8E84 +#define GL_MAX_TESS_CONTROL_TOTAL_OUTPUT_COMPONENTS_EXT 0x8E85 +#define GL_MAX_TESS_EVALUATION_OUTPUT_COMPONENTS_EXT 0x8E86 +#define GL_MAX_TESS_CONTROL_UNIFORM_BLOCKS_EXT 0x8E89 +#define GL_MAX_TESS_EVALUATION_UNIFORM_BLOCKS_EXT 0x8E8A +#define GL_MAX_TESS_CONTROL_INPUT_COMPONENTS_EXT 0x886C +#define GL_MAX_TESS_EVALUATION_INPUT_COMPONENTS_EXT 0x886D +#define GL_MAX_COMBINED_TESS_CONTROL_UNIFORM_COMPONENTS_EXT 0x8E1E +#define GL_MAX_COMBINED_TESS_EVALUATION_UNIFORM_COMPONENTS_EXT 0x8E1F +#define GL_MAX_TESS_CONTROL_ATOMIC_COUNTER_BUFFERS_EXT 0x92CD +#define GL_MAX_TESS_EVALUATION_ATOMIC_COUNTER_BUFFERS_EXT 0x92CE +#define GL_MAX_TESS_CONTROL_ATOMIC_COUNTERS_EXT 0x92D3 +#define GL_MAX_TESS_EVALUATION_ATOMIC_COUNTERS_EXT 0x92D4 +#define GL_MAX_TESS_CONTROL_IMAGE_UNIFORMS_EXT 0x90CB +#define GL_MAX_TESS_EVALUATION_IMAGE_UNIFORMS_EXT 0x90CC +#define GL_MAX_TESS_CONTROL_SHADER_STORAGE_BLOCKS_EXT 0x90D8 +#define GL_MAX_TESS_EVALUATION_SHADER_STORAGE_BLOCKS_EXT 0x90D9 +#define GL_PRIMITIVE_RESTART_FOR_PATCHES_SUPPORTED 0x8221 +#define GL_IS_PER_PATCH_EXT 0x92E7 +#define GL_REFERENCED_BY_TESS_CONTROL_SHADER_EXT 0x9307 +#define GL_REFERENCED_BY_TESS_EVALUATION_SHADER_EXT 0x9308 +#define GL_TESS_CONTROL_SHADER_EXT 0x8E88 +#define GL_TESS_EVALUATION_SHADER_EXT 0x8E87 +#define GL_TESS_CONTROL_SHADER_BIT_EXT 0x00000008 +#define GL_TESS_EVALUATION_SHADER_BIT_EXT 0x00000010 +typedef void (GL_APIENTRYP PFNGLPATCHPARAMETERIEXTPROC) (GLenum pname, GLint value); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glPatchParameteriEXT (GLenum pname, GLint value); +#endif +#endif /* GL_EXT_tessellation_shader */ + +#ifndef GL_EXT_texture_border_clamp +#define GL_EXT_texture_border_clamp 1 +#define GL_TEXTURE_BORDER_COLOR_EXT 0x1004 +#define GL_CLAMP_TO_BORDER_EXT 0x812D +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERIIVEXTPROC) (GLenum target, GLenum pname, const GLint *params); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERIUIVEXTPROC) (GLenum target, GLenum pname, const GLuint *params); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERIIVEXTPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERIUIVEXTPROC) (GLenum target, GLenum pname, GLuint *params); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERIIVEXTPROC) (GLuint sampler, GLenum pname, const GLint *param); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERIUIVEXTPROC) (GLuint sampler, GLenum pname, const GLuint *param); +typedef void (GL_APIENTRYP PFNGLGETSAMPLERPARAMETERIIVEXTPROC) (GLuint sampler, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETSAMPLERPARAMETERIUIVEXTPROC) (GLuint sampler, GLenum pname, GLuint *params); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glTexParameterIivEXT (GLenum target, GLenum pname, const GLint *params); +GL_APICALL void GL_APIENTRY glTexParameterIuivEXT (GLenum target, GLenum pname, const GLuint *params); +GL_APICALL void GL_APIENTRY glGetTexParameterIivEXT (GLenum target, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetTexParameterIuivEXT (GLenum target, GLenum pname, GLuint *params); +GL_APICALL void GL_APIENTRY glSamplerParameterIivEXT (GLuint sampler, GLenum pname, const GLint *param); +GL_APICALL void GL_APIENTRY glSamplerParameterIuivEXT (GLuint sampler, GLenum pname, const GLuint *param); +GL_APICALL void GL_APIENTRY glGetSamplerParameterIivEXT (GLuint sampler, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetSamplerParameterIuivEXT (GLuint sampler, GLenum pname, GLuint *params); +#endif +#endif /* GL_EXT_texture_border_clamp */ + +#ifndef GL_EXT_texture_buffer +#define GL_EXT_texture_buffer 1 +#define GL_TEXTURE_BUFFER_EXT 0x8C2A +#define GL_TEXTURE_BUFFER_BINDING_EXT 0x8C2A +#define GL_MAX_TEXTURE_BUFFER_SIZE_EXT 0x8C2B +#define GL_TEXTURE_BINDING_BUFFER_EXT 0x8C2C +#define GL_TEXTURE_BUFFER_DATA_STORE_BINDING_EXT 0x8C2D +#define GL_TEXTURE_BUFFER_OFFSET_ALIGNMENT_EXT 0x919F +#define GL_SAMPLER_BUFFER_EXT 0x8DC2 +#define GL_INT_SAMPLER_BUFFER_EXT 0x8DD0 +#define GL_UNSIGNED_INT_SAMPLER_BUFFER_EXT 0x8DD8 +#define GL_IMAGE_BUFFER_EXT 0x9051 +#define GL_INT_IMAGE_BUFFER_EXT 0x905C +#define GL_UNSIGNED_INT_IMAGE_BUFFER_EXT 0x9067 +#define GL_TEXTURE_BUFFER_OFFSET_EXT 0x919D +#define GL_TEXTURE_BUFFER_SIZE_EXT 0x919E +typedef void (GL_APIENTRYP PFNGLTEXBUFFEREXTPROC) (GLenum target, GLenum internalformat, GLuint buffer); +typedef void (GL_APIENTRYP PFNGLTEXBUFFERRANGEEXTPROC) (GLenum target, GLenum internalformat, GLuint buffer, GLintptr offset, GLsizeiptr size); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glTexBufferEXT (GLenum target, GLenum internalformat, GLuint buffer); +GL_APICALL void GL_APIENTRY glTexBufferRangeEXT (GLenum target, GLenum internalformat, GLuint buffer, GLintptr offset, GLsizeiptr size); +#endif +#endif /* GL_EXT_texture_buffer */ + +#ifndef GL_EXT_texture_compression_astc_decode_mode +#define GL_EXT_texture_compression_astc_decode_mode 1 +#define GL_TEXTURE_ASTC_DECODE_PRECISION_EXT 0x8F69 +#endif /* GL_EXT_texture_compression_astc_decode_mode */ + +#ifndef GL_EXT_texture_compression_bptc +#define GL_EXT_texture_compression_bptc 1 +#define GL_COMPRESSED_RGBA_BPTC_UNORM_EXT 0x8E8C +#define GL_COMPRESSED_SRGB_ALPHA_BPTC_UNORM_EXT 0x8E8D +#define GL_COMPRESSED_RGB_BPTC_SIGNED_FLOAT_EXT 0x8E8E +#define GL_COMPRESSED_RGB_BPTC_UNSIGNED_FLOAT_EXT 0x8E8F +#endif /* GL_EXT_texture_compression_bptc */ + +#ifndef GL_EXT_texture_compression_dxt1 +#define GL_EXT_texture_compression_dxt1 1 +#define GL_COMPRESSED_RGB_S3TC_DXT1_EXT 0x83F0 +#define GL_COMPRESSED_RGBA_S3TC_DXT1_EXT 0x83F1 +#endif /* GL_EXT_texture_compression_dxt1 */ + +#ifndef GL_EXT_texture_compression_rgtc +#define GL_EXT_texture_compression_rgtc 1 +#define GL_COMPRESSED_RED_RGTC1_EXT 0x8DBB +#define GL_COMPRESSED_SIGNED_RED_RGTC1_EXT 0x8DBC +#define GL_COMPRESSED_RED_GREEN_RGTC2_EXT 0x8DBD +#define GL_COMPRESSED_SIGNED_RED_GREEN_RGTC2_EXT 0x8DBE +#endif /* GL_EXT_texture_compression_rgtc */ + +#ifndef GL_EXT_texture_compression_s3tc +#define GL_EXT_texture_compression_s3tc 1 +#define GL_COMPRESSED_RGBA_S3TC_DXT3_EXT 0x83F2 +#define GL_COMPRESSED_RGBA_S3TC_DXT5_EXT 0x83F3 +#endif /* GL_EXT_texture_compression_s3tc */ + +#ifndef GL_EXT_texture_compression_s3tc_srgb +#define GL_EXT_texture_compression_s3tc_srgb 1 +#define GL_COMPRESSED_SRGB_S3TC_DXT1_EXT 0x8C4C +#define GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT1_EXT 0x8C4D +#define GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT3_EXT 0x8C4E +#define GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT5_EXT 0x8C4F +#endif /* GL_EXT_texture_compression_s3tc_srgb */ + +#ifndef GL_EXT_texture_cube_map_array +#define GL_EXT_texture_cube_map_array 1 +#define GL_TEXTURE_CUBE_MAP_ARRAY_EXT 0x9009 +#define GL_TEXTURE_BINDING_CUBE_MAP_ARRAY_EXT 0x900A +#define GL_SAMPLER_CUBE_MAP_ARRAY_EXT 0x900C +#define GL_SAMPLER_CUBE_MAP_ARRAY_SHADOW_EXT 0x900D +#define GL_INT_SAMPLER_CUBE_MAP_ARRAY_EXT 0x900E +#define GL_UNSIGNED_INT_SAMPLER_CUBE_MAP_ARRAY_EXT 0x900F +#define GL_IMAGE_CUBE_MAP_ARRAY_EXT 0x9054 +#define GL_INT_IMAGE_CUBE_MAP_ARRAY_EXT 0x905F +#define GL_UNSIGNED_INT_IMAGE_CUBE_MAP_ARRAY_EXT 0x906A +#endif /* GL_EXT_texture_cube_map_array */ + +#ifndef GL_EXT_texture_filter_anisotropic +#define GL_EXT_texture_filter_anisotropic 1 +#define GL_TEXTURE_MAX_ANISOTROPY_EXT 0x84FE +#define GL_MAX_TEXTURE_MAX_ANISOTROPY_EXT 0x84FF +#endif /* GL_EXT_texture_filter_anisotropic */ + +#ifndef GL_EXT_texture_filter_minmax +#define GL_EXT_texture_filter_minmax 1 +#define GL_TEXTURE_REDUCTION_MODE_EXT 0x9366 +#define GL_WEIGHTED_AVERAGE_EXT 0x9367 +#endif /* GL_EXT_texture_filter_minmax */ + +#ifndef GL_EXT_texture_format_BGRA8888 +#define GL_EXT_texture_format_BGRA8888 1 +#endif /* GL_EXT_texture_format_BGRA8888 */ + +#ifndef GL_EXT_texture_format_sRGB_override +#define GL_EXT_texture_format_sRGB_override 1 +#define GL_TEXTURE_FORMAT_SRGB_OVERRIDE_EXT 0x8FBF +#endif /* GL_EXT_texture_format_sRGB_override */ + +#ifndef GL_EXT_texture_mirror_clamp_to_edge +#define GL_EXT_texture_mirror_clamp_to_edge 1 +#define GL_MIRROR_CLAMP_TO_EDGE_EXT 0x8743 +#endif /* GL_EXT_texture_mirror_clamp_to_edge */ + +#ifndef GL_EXT_texture_norm16 +#define GL_EXT_texture_norm16 1 +#define GL_R16_EXT 0x822A +#define GL_RG16_EXT 0x822C +#define GL_RGBA16_EXT 0x805B +#define GL_RGB16_EXT 0x8054 +#define GL_RGB16_SNORM_EXT 0x8F9A +#endif /* GL_EXT_texture_norm16 */ + +#ifndef GL_EXT_texture_rg +#define GL_EXT_texture_rg 1 +#define GL_RED_EXT 0x1903 +#define GL_RG_EXT 0x8227 +#define GL_R8_EXT 0x8229 +#define GL_RG8_EXT 0x822B +#endif /* GL_EXT_texture_rg */ + +#ifndef GL_EXT_texture_sRGB_R8 +#define GL_EXT_texture_sRGB_R8 1 +#define GL_SR8_EXT 0x8FBD +#endif /* GL_EXT_texture_sRGB_R8 */ + +#ifndef GL_EXT_texture_sRGB_RG8 +#define GL_EXT_texture_sRGB_RG8 1 +#define GL_SRG8_EXT 0x8FBE +#endif /* GL_EXT_texture_sRGB_RG8 */ + +#ifndef GL_EXT_texture_sRGB_decode +#define GL_EXT_texture_sRGB_decode 1 +#define GL_TEXTURE_SRGB_DECODE_EXT 0x8A48 +#define GL_DECODE_EXT 0x8A49 +#define GL_SKIP_DECODE_EXT 0x8A4A +#endif /* GL_EXT_texture_sRGB_decode */ + +#ifndef GL_EXT_texture_storage +#define GL_EXT_texture_storage 1 +#define GL_TEXTURE_IMMUTABLE_FORMAT_EXT 0x912F +#define GL_ALPHA8_EXT 0x803C +#define GL_LUMINANCE8_EXT 0x8040 +#define GL_LUMINANCE8_ALPHA8_EXT 0x8045 +#define GL_RGBA32F_EXT 0x8814 +#define GL_RGB32F_EXT 0x8815 +#define GL_ALPHA32F_EXT 0x8816 +#define GL_LUMINANCE32F_EXT 0x8818 +#define GL_LUMINANCE_ALPHA32F_EXT 0x8819 +#define GL_ALPHA16F_EXT 0x881C +#define GL_LUMINANCE16F_EXT 0x881E +#define GL_LUMINANCE_ALPHA16F_EXT 0x881F +#define GL_R32F_EXT 0x822E +#define GL_RG32F_EXT 0x8230 +typedef void (GL_APIENTRYP PFNGLTEXSTORAGE1DEXTPROC) (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGE2DEXTPROC) (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGE3DEXTPROC) (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +typedef void (GL_APIENTRYP PFNGLTEXTURESTORAGE1DEXTPROC) (GLuint texture, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width); +typedef void (GL_APIENTRYP PFNGLTEXTURESTORAGE2DEXTPROC) (GLuint texture, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLTEXTURESTORAGE3DEXTPROC) (GLuint texture, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glTexStorage1DEXT (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width); +GL_APICALL void GL_APIENTRY glTexStorage2DEXT (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glTexStorage3DEXT (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +GL_APICALL void GL_APIENTRY glTextureStorage1DEXT (GLuint texture, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width); +GL_APICALL void GL_APIENTRY glTextureStorage2DEXT (GLuint texture, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glTextureStorage3DEXT (GLuint texture, GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +#endif +#endif /* GL_EXT_texture_storage */ + +#ifndef GL_EXT_texture_type_2_10_10_10_REV +#define GL_EXT_texture_type_2_10_10_10_REV 1 +#define GL_UNSIGNED_INT_2_10_10_10_REV_EXT 0x8368 +#endif /* GL_EXT_texture_type_2_10_10_10_REV */ + +#ifndef GL_EXT_texture_view +#define GL_EXT_texture_view 1 +#define GL_TEXTURE_VIEW_MIN_LEVEL_EXT 0x82DB +#define GL_TEXTURE_VIEW_NUM_LEVELS_EXT 0x82DC +#define GL_TEXTURE_VIEW_MIN_LAYER_EXT 0x82DD +#define GL_TEXTURE_VIEW_NUM_LAYERS_EXT 0x82DE +typedef void (GL_APIENTRYP PFNGLTEXTUREVIEWEXTPROC) (GLuint texture, GLenum target, GLuint origtexture, GLenum internalformat, GLuint minlevel, GLuint numlevels, GLuint minlayer, GLuint numlayers); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glTextureViewEXT (GLuint texture, GLenum target, GLuint origtexture, GLenum internalformat, GLuint minlevel, GLuint numlevels, GLuint minlayer, GLuint numlayers); +#endif +#endif /* GL_EXT_texture_view */ + +#ifndef GL_EXT_unpack_subimage +#define GL_EXT_unpack_subimage 1 +#define GL_UNPACK_ROW_LENGTH_EXT 0x0CF2 +#define GL_UNPACK_SKIP_ROWS_EXT 0x0CF3 +#define GL_UNPACK_SKIP_PIXELS_EXT 0x0CF4 +#endif /* GL_EXT_unpack_subimage */ + +#ifndef GL_EXT_win32_keyed_mutex +#define GL_EXT_win32_keyed_mutex 1 +typedef GLboolean (GL_APIENTRYP PFNGLACQUIREKEYEDMUTEXWIN32EXTPROC) (GLuint memory, GLuint64 key, GLuint timeout); +typedef GLboolean (GL_APIENTRYP PFNGLRELEASEKEYEDMUTEXWIN32EXTPROC) (GLuint memory, GLuint64 key); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL GLboolean GL_APIENTRY glAcquireKeyedMutexWin32EXT (GLuint memory, GLuint64 key, GLuint timeout); +GL_APICALL GLboolean GL_APIENTRY glReleaseKeyedMutexWin32EXT (GLuint memory, GLuint64 key); +#endif +#endif /* GL_EXT_win32_keyed_mutex */ + +#ifndef GL_EXT_window_rectangles +#define GL_EXT_window_rectangles 1 +#define GL_INCLUSIVE_EXT 0x8F10 +#define GL_EXCLUSIVE_EXT 0x8F11 +#define GL_WINDOW_RECTANGLE_EXT 0x8F12 +#define GL_WINDOW_RECTANGLE_MODE_EXT 0x8F13 +#define GL_MAX_WINDOW_RECTANGLES_EXT 0x8F14 +#define GL_NUM_WINDOW_RECTANGLES_EXT 0x8F15 +typedef void (GL_APIENTRYP PFNGLWINDOWRECTANGLESEXTPROC) (GLenum mode, GLsizei count, const GLint *box); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glWindowRectanglesEXT (GLenum mode, GLsizei count, const GLint *box); +#endif +#endif /* GL_EXT_window_rectangles */ + +#ifndef GL_FJ_shader_binary_GCCSO +#define GL_FJ_shader_binary_GCCSO 1 +#define GL_GCCSO_SHADER_BINARY_FJ 0x9260 +#endif /* GL_FJ_shader_binary_GCCSO */ + +#ifndef GL_IMG_bindless_texture +#define GL_IMG_bindless_texture 1 +typedef GLuint64 (GL_APIENTRYP PFNGLGETTEXTUREHANDLEIMGPROC) (GLuint texture); +typedef GLuint64 (GL_APIENTRYP PFNGLGETTEXTURESAMPLERHANDLEIMGPROC) (GLuint texture, GLuint sampler); +typedef void (GL_APIENTRYP PFNGLUNIFORMHANDLEUI64IMGPROC) (GLint location, GLuint64 value); +typedef void (GL_APIENTRYP PFNGLUNIFORMHANDLEUI64VIMGPROC) (GLint location, GLsizei count, const GLuint64 *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMHANDLEUI64IMGPROC) (GLuint program, GLint location, GLuint64 value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMHANDLEUI64VIMGPROC) (GLuint program, GLint location, GLsizei count, const GLuint64 *values); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL GLuint64 GL_APIENTRY glGetTextureHandleIMG (GLuint texture); +GL_APICALL GLuint64 GL_APIENTRY glGetTextureSamplerHandleIMG (GLuint texture, GLuint sampler); +GL_APICALL void GL_APIENTRY glUniformHandleui64IMG (GLint location, GLuint64 value); +GL_APICALL void GL_APIENTRY glUniformHandleui64vIMG (GLint location, GLsizei count, const GLuint64 *value); +GL_APICALL void GL_APIENTRY glProgramUniformHandleui64IMG (GLuint program, GLint location, GLuint64 value); +GL_APICALL void GL_APIENTRY glProgramUniformHandleui64vIMG (GLuint program, GLint location, GLsizei count, const GLuint64 *values); +#endif +#endif /* GL_IMG_bindless_texture */ + +#ifndef GL_IMG_framebuffer_downsample +#define GL_IMG_framebuffer_downsample 1 +#define GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE_AND_DOWNSAMPLE_IMG 0x913C +#define GL_NUM_DOWNSAMPLE_SCALES_IMG 0x913D +#define GL_DOWNSAMPLE_SCALES_IMG 0x913E +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_SCALE_IMG 0x913F +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTURE2DDOWNSAMPLEIMGPROC) (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLint xscale, GLint yscale); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTURELAYERDOWNSAMPLEIMGPROC) (GLenum target, GLenum attachment, GLuint texture, GLint level, GLint layer, GLint xscale, GLint yscale); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glFramebufferTexture2DDownsampleIMG (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLint xscale, GLint yscale); +GL_APICALL void GL_APIENTRY glFramebufferTextureLayerDownsampleIMG (GLenum target, GLenum attachment, GLuint texture, GLint level, GLint layer, GLint xscale, GLint yscale); +#endif +#endif /* GL_IMG_framebuffer_downsample */ + +#ifndef GL_IMG_multisampled_render_to_texture +#define GL_IMG_multisampled_render_to_texture 1 +#define GL_RENDERBUFFER_SAMPLES_IMG 0x9133 +#define GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE_IMG 0x9134 +#define GL_MAX_SAMPLES_IMG 0x9135 +#define GL_TEXTURE_SAMPLES_IMG 0x9136 +typedef void (GL_APIENTRYP PFNGLRENDERBUFFERSTORAGEMULTISAMPLEIMGPROC) (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTURE2DMULTISAMPLEIMGPROC) (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLsizei samples); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glRenderbufferStorageMultisampleIMG (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glFramebufferTexture2DMultisampleIMG (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLsizei samples); +#endif +#endif /* GL_IMG_multisampled_render_to_texture */ + +#ifndef GL_IMG_program_binary +#define GL_IMG_program_binary 1 +#define GL_SGX_PROGRAM_BINARY_IMG 0x9130 +#endif /* GL_IMG_program_binary */ + +#ifndef GL_IMG_read_format +#define GL_IMG_read_format 1 +#define GL_BGRA_IMG 0x80E1 +#define GL_UNSIGNED_SHORT_4_4_4_4_REV_IMG 0x8365 +#endif /* GL_IMG_read_format */ + +#ifndef GL_IMG_shader_binary +#define GL_IMG_shader_binary 1 +#define GL_SGX_BINARY_IMG 0x8C0A +#endif /* GL_IMG_shader_binary */ + +#ifndef GL_IMG_texture_compression_pvrtc +#define GL_IMG_texture_compression_pvrtc 1 +#define GL_COMPRESSED_RGB_PVRTC_4BPPV1_IMG 0x8C00 +#define GL_COMPRESSED_RGB_PVRTC_2BPPV1_IMG 0x8C01 +#define GL_COMPRESSED_RGBA_PVRTC_4BPPV1_IMG 0x8C02 +#define GL_COMPRESSED_RGBA_PVRTC_2BPPV1_IMG 0x8C03 +#endif /* GL_IMG_texture_compression_pvrtc */ + +#ifndef GL_IMG_texture_compression_pvrtc2 +#define GL_IMG_texture_compression_pvrtc2 1 +#define GL_COMPRESSED_RGBA_PVRTC_2BPPV2_IMG 0x9137 +#define GL_COMPRESSED_RGBA_PVRTC_4BPPV2_IMG 0x9138 +#endif /* GL_IMG_texture_compression_pvrtc2 */ + +#ifndef GL_IMG_texture_filter_cubic +#define GL_IMG_texture_filter_cubic 1 +#define GL_CUBIC_IMG 0x9139 +#define GL_CUBIC_MIPMAP_NEAREST_IMG 0x913A +#define GL_CUBIC_MIPMAP_LINEAR_IMG 0x913B +#endif /* GL_IMG_texture_filter_cubic */ + +#ifndef GL_INTEL_blackhole_render +#define GL_INTEL_blackhole_render 1 +#define GL_BLACKHOLE_RENDER_INTEL 0x83FC +#endif /* GL_INTEL_blackhole_render */ + +#ifndef GL_INTEL_conservative_rasterization +#define GL_INTEL_conservative_rasterization 1 +#define GL_CONSERVATIVE_RASTERIZATION_INTEL 0x83FE +#endif /* GL_INTEL_conservative_rasterization */ + +#ifndef GL_INTEL_framebuffer_CMAA +#define GL_INTEL_framebuffer_CMAA 1 +typedef void (GL_APIENTRYP PFNGLAPPLYFRAMEBUFFERATTACHMENTCMAAINTELPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glApplyFramebufferAttachmentCMAAINTEL (void); +#endif +#endif /* GL_INTEL_framebuffer_CMAA */ + +#ifndef GL_INTEL_performance_query +#define GL_INTEL_performance_query 1 +#define GL_PERFQUERY_SINGLE_CONTEXT_INTEL 0x00000000 +#define GL_PERFQUERY_GLOBAL_CONTEXT_INTEL 0x00000001 +#define GL_PERFQUERY_WAIT_INTEL 0x83FB +#define GL_PERFQUERY_FLUSH_INTEL 0x83FA +#define GL_PERFQUERY_DONOT_FLUSH_INTEL 0x83F9 +#define GL_PERFQUERY_COUNTER_EVENT_INTEL 0x94F0 +#define GL_PERFQUERY_COUNTER_DURATION_NORM_INTEL 0x94F1 +#define GL_PERFQUERY_COUNTER_DURATION_RAW_INTEL 0x94F2 +#define GL_PERFQUERY_COUNTER_THROUGHPUT_INTEL 0x94F3 +#define GL_PERFQUERY_COUNTER_RAW_INTEL 0x94F4 +#define GL_PERFQUERY_COUNTER_TIMESTAMP_INTEL 0x94F5 +#define GL_PERFQUERY_COUNTER_DATA_UINT32_INTEL 0x94F8 +#define GL_PERFQUERY_COUNTER_DATA_UINT64_INTEL 0x94F9 +#define GL_PERFQUERY_COUNTER_DATA_FLOAT_INTEL 0x94FA +#define GL_PERFQUERY_COUNTER_DATA_DOUBLE_INTEL 0x94FB +#define GL_PERFQUERY_COUNTER_DATA_BOOL32_INTEL 0x94FC +#define GL_PERFQUERY_QUERY_NAME_LENGTH_MAX_INTEL 0x94FD +#define GL_PERFQUERY_COUNTER_NAME_LENGTH_MAX_INTEL 0x94FE +#define GL_PERFQUERY_COUNTER_DESC_LENGTH_MAX_INTEL 0x94FF +#define GL_PERFQUERY_GPA_EXTENDED_COUNTERS_INTEL 0x9500 +typedef void (GL_APIENTRYP PFNGLBEGINPERFQUERYINTELPROC) (GLuint queryHandle); +typedef void (GL_APIENTRYP PFNGLCREATEPERFQUERYINTELPROC) (GLuint queryId, GLuint *queryHandle); +typedef void (GL_APIENTRYP PFNGLDELETEPERFQUERYINTELPROC) (GLuint queryHandle); +typedef void (GL_APIENTRYP PFNGLENDPERFQUERYINTELPROC) (GLuint queryHandle); +typedef void (GL_APIENTRYP PFNGLGETFIRSTPERFQUERYIDINTELPROC) (GLuint *queryId); +typedef void (GL_APIENTRYP PFNGLGETNEXTPERFQUERYIDINTELPROC) (GLuint queryId, GLuint *nextQueryId); +typedef void (GL_APIENTRYP PFNGLGETPERFCOUNTERINFOINTELPROC) (GLuint queryId, GLuint counterId, GLuint counterNameLength, GLchar *counterName, GLuint counterDescLength, GLchar *counterDesc, GLuint *counterOffset, GLuint *counterDataSize, GLuint *counterTypeEnum, GLuint *counterDataTypeEnum, GLuint64 *rawCounterMaxValue); +typedef void (GL_APIENTRYP PFNGLGETPERFQUERYDATAINTELPROC) (GLuint queryHandle, GLuint flags, GLsizei dataSize, void *data, GLuint *bytesWritten); +typedef void (GL_APIENTRYP PFNGLGETPERFQUERYIDBYNAMEINTELPROC) (GLchar *queryName, GLuint *queryId); +typedef void (GL_APIENTRYP PFNGLGETPERFQUERYINFOINTELPROC) (GLuint queryId, GLuint queryNameLength, GLchar *queryName, GLuint *dataSize, GLuint *noCounters, GLuint *noInstances, GLuint *capsMask); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glBeginPerfQueryINTEL (GLuint queryHandle); +GL_APICALL void GL_APIENTRY glCreatePerfQueryINTEL (GLuint queryId, GLuint *queryHandle); +GL_APICALL void GL_APIENTRY glDeletePerfQueryINTEL (GLuint queryHandle); +GL_APICALL void GL_APIENTRY glEndPerfQueryINTEL (GLuint queryHandle); +GL_APICALL void GL_APIENTRY glGetFirstPerfQueryIdINTEL (GLuint *queryId); +GL_APICALL void GL_APIENTRY glGetNextPerfQueryIdINTEL (GLuint queryId, GLuint *nextQueryId); +GL_APICALL void GL_APIENTRY glGetPerfCounterInfoINTEL (GLuint queryId, GLuint counterId, GLuint counterNameLength, GLchar *counterName, GLuint counterDescLength, GLchar *counterDesc, GLuint *counterOffset, GLuint *counterDataSize, GLuint *counterTypeEnum, GLuint *counterDataTypeEnum, GLuint64 *rawCounterMaxValue); +GL_APICALL void GL_APIENTRY glGetPerfQueryDataINTEL (GLuint queryHandle, GLuint flags, GLsizei dataSize, void *data, GLuint *bytesWritten); +GL_APICALL void GL_APIENTRY glGetPerfQueryIdByNameINTEL (GLchar *queryName, GLuint *queryId); +GL_APICALL void GL_APIENTRY glGetPerfQueryInfoINTEL (GLuint queryId, GLuint queryNameLength, GLchar *queryName, GLuint *dataSize, GLuint *noCounters, GLuint *noInstances, GLuint *capsMask); +#endif +#endif /* GL_INTEL_performance_query */ + +#ifndef GL_MESA_program_binary_formats +#define GL_MESA_program_binary_formats 1 +#define GL_PROGRAM_BINARY_FORMAT_MESA 0x875F +#endif /* GL_MESA_program_binary_formats */ + +#ifndef GL_MESA_shader_integer_functions +#define GL_MESA_shader_integer_functions 1 +#endif /* GL_MESA_shader_integer_functions */ + +#ifndef GL_NVX_blend_equation_advanced_multi_draw_buffers +#define GL_NVX_blend_equation_advanced_multi_draw_buffers 1 +#endif /* GL_NVX_blend_equation_advanced_multi_draw_buffers */ + +#ifndef GL_NV_bindless_texture +#define GL_NV_bindless_texture 1 +typedef GLuint64 (GL_APIENTRYP PFNGLGETTEXTUREHANDLENVPROC) (GLuint texture); +typedef GLuint64 (GL_APIENTRYP PFNGLGETTEXTURESAMPLERHANDLENVPROC) (GLuint texture, GLuint sampler); +typedef void (GL_APIENTRYP PFNGLMAKETEXTUREHANDLERESIDENTNVPROC) (GLuint64 handle); +typedef void (GL_APIENTRYP PFNGLMAKETEXTUREHANDLENONRESIDENTNVPROC) (GLuint64 handle); +typedef GLuint64 (GL_APIENTRYP PFNGLGETIMAGEHANDLENVPROC) (GLuint texture, GLint level, GLboolean layered, GLint layer, GLenum format); +typedef void (GL_APIENTRYP PFNGLMAKEIMAGEHANDLERESIDENTNVPROC) (GLuint64 handle, GLenum access); +typedef void (GL_APIENTRYP PFNGLMAKEIMAGEHANDLENONRESIDENTNVPROC) (GLuint64 handle); +typedef void (GL_APIENTRYP PFNGLUNIFORMHANDLEUI64NVPROC) (GLint location, GLuint64 value); +typedef void (GL_APIENTRYP PFNGLUNIFORMHANDLEUI64VNVPROC) (GLint location, GLsizei count, const GLuint64 *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMHANDLEUI64NVPROC) (GLuint program, GLint location, GLuint64 value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORMHANDLEUI64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLuint64 *values); +typedef GLboolean (GL_APIENTRYP PFNGLISTEXTUREHANDLERESIDENTNVPROC) (GLuint64 handle); +typedef GLboolean (GL_APIENTRYP PFNGLISIMAGEHANDLERESIDENTNVPROC) (GLuint64 handle); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL GLuint64 GL_APIENTRY glGetTextureHandleNV (GLuint texture); +GL_APICALL GLuint64 GL_APIENTRY glGetTextureSamplerHandleNV (GLuint texture, GLuint sampler); +GL_APICALL void GL_APIENTRY glMakeTextureHandleResidentNV (GLuint64 handle); +GL_APICALL void GL_APIENTRY glMakeTextureHandleNonResidentNV (GLuint64 handle); +GL_APICALL GLuint64 GL_APIENTRY glGetImageHandleNV (GLuint texture, GLint level, GLboolean layered, GLint layer, GLenum format); +GL_APICALL void GL_APIENTRY glMakeImageHandleResidentNV (GLuint64 handle, GLenum access); +GL_APICALL void GL_APIENTRY glMakeImageHandleNonResidentNV (GLuint64 handle); +GL_APICALL void GL_APIENTRY glUniformHandleui64NV (GLint location, GLuint64 value); +GL_APICALL void GL_APIENTRY glUniformHandleui64vNV (GLint location, GLsizei count, const GLuint64 *value); +GL_APICALL void GL_APIENTRY glProgramUniformHandleui64NV (GLuint program, GLint location, GLuint64 value); +GL_APICALL void GL_APIENTRY glProgramUniformHandleui64vNV (GLuint program, GLint location, GLsizei count, const GLuint64 *values); +GL_APICALL GLboolean GL_APIENTRY glIsTextureHandleResidentNV (GLuint64 handle); +GL_APICALL GLboolean GL_APIENTRY glIsImageHandleResidentNV (GLuint64 handle); +#endif +#endif /* GL_NV_bindless_texture */ + +#ifndef GL_NV_blend_equation_advanced +#define GL_NV_blend_equation_advanced 1 +#define GL_BLEND_OVERLAP_NV 0x9281 +#define GL_BLEND_PREMULTIPLIED_SRC_NV 0x9280 +#define GL_BLUE_NV 0x1905 +#define GL_COLORBURN_NV 0x929A +#define GL_COLORDODGE_NV 0x9299 +#define GL_CONJOINT_NV 0x9284 +#define GL_CONTRAST_NV 0x92A1 +#define GL_DARKEN_NV 0x9297 +#define GL_DIFFERENCE_NV 0x929E +#define GL_DISJOINT_NV 0x9283 +#define GL_DST_ATOP_NV 0x928F +#define GL_DST_IN_NV 0x928B +#define GL_DST_NV 0x9287 +#define GL_DST_OUT_NV 0x928D +#define GL_DST_OVER_NV 0x9289 +#define GL_EXCLUSION_NV 0x92A0 +#define GL_GREEN_NV 0x1904 +#define GL_HARDLIGHT_NV 0x929B +#define GL_HARDMIX_NV 0x92A9 +#define GL_HSL_COLOR_NV 0x92AF +#define GL_HSL_HUE_NV 0x92AD +#define GL_HSL_LUMINOSITY_NV 0x92B0 +#define GL_HSL_SATURATION_NV 0x92AE +#define GL_INVERT_OVG_NV 0x92B4 +#define GL_INVERT_RGB_NV 0x92A3 +#define GL_LIGHTEN_NV 0x9298 +#define GL_LINEARBURN_NV 0x92A5 +#define GL_LINEARDODGE_NV 0x92A4 +#define GL_LINEARLIGHT_NV 0x92A7 +#define GL_MINUS_CLAMPED_NV 0x92B3 +#define GL_MINUS_NV 0x929F +#define GL_MULTIPLY_NV 0x9294 +#define GL_OVERLAY_NV 0x9296 +#define GL_PINLIGHT_NV 0x92A8 +#define GL_PLUS_CLAMPED_ALPHA_NV 0x92B2 +#define GL_PLUS_CLAMPED_NV 0x92B1 +#define GL_PLUS_DARKER_NV 0x9292 +#define GL_PLUS_NV 0x9291 +#define GL_RED_NV 0x1903 +#define GL_SCREEN_NV 0x9295 +#define GL_SOFTLIGHT_NV 0x929C +#define GL_SRC_ATOP_NV 0x928E +#define GL_SRC_IN_NV 0x928A +#define GL_SRC_NV 0x9286 +#define GL_SRC_OUT_NV 0x928C +#define GL_SRC_OVER_NV 0x9288 +#define GL_UNCORRELATED_NV 0x9282 +#define GL_VIVIDLIGHT_NV 0x92A6 +#define GL_XOR_NV 0x1506 +typedef void (GL_APIENTRYP PFNGLBLENDPARAMETERINVPROC) (GLenum pname, GLint value); +typedef void (GL_APIENTRYP PFNGLBLENDBARRIERNVPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glBlendParameteriNV (GLenum pname, GLint value); +GL_APICALL void GL_APIENTRY glBlendBarrierNV (void); +#endif +#endif /* GL_NV_blend_equation_advanced */ + +#ifndef GL_NV_blend_equation_advanced_coherent +#define GL_NV_blend_equation_advanced_coherent 1 +#define GL_BLEND_ADVANCED_COHERENT_NV 0x9285 +#endif /* GL_NV_blend_equation_advanced_coherent */ + +#ifndef GL_NV_blend_minmax_factor +#define GL_NV_blend_minmax_factor 1 +#define GL_FACTOR_MIN_AMD 0x901C +#define GL_FACTOR_MAX_AMD 0x901D +#endif /* GL_NV_blend_minmax_factor */ + +#ifndef GL_NV_clip_space_w_scaling +#define GL_NV_clip_space_w_scaling 1 +#define GL_VIEWPORT_POSITION_W_SCALE_NV 0x937C +#define GL_VIEWPORT_POSITION_W_SCALE_X_COEFF_NV 0x937D +#define GL_VIEWPORT_POSITION_W_SCALE_Y_COEFF_NV 0x937E +typedef void (GL_APIENTRYP PFNGLVIEWPORTPOSITIONWSCALENVPROC) (GLuint index, GLfloat xcoeff, GLfloat ycoeff); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glViewportPositionWScaleNV (GLuint index, GLfloat xcoeff, GLfloat ycoeff); +#endif +#endif /* GL_NV_clip_space_w_scaling */ + +#ifndef GL_NV_conditional_render +#define GL_NV_conditional_render 1 +#define GL_QUERY_WAIT_NV 0x8E13 +#define GL_QUERY_NO_WAIT_NV 0x8E14 +#define GL_QUERY_BY_REGION_WAIT_NV 0x8E15 +#define GL_QUERY_BY_REGION_NO_WAIT_NV 0x8E16 +typedef void (GL_APIENTRYP PFNGLBEGINCONDITIONALRENDERNVPROC) (GLuint id, GLenum mode); +typedef void (GL_APIENTRYP PFNGLENDCONDITIONALRENDERNVPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glBeginConditionalRenderNV (GLuint id, GLenum mode); +GL_APICALL void GL_APIENTRY glEndConditionalRenderNV (void); +#endif +#endif /* GL_NV_conditional_render */ + +#ifndef GL_NV_conservative_raster +#define GL_NV_conservative_raster 1 +#define GL_CONSERVATIVE_RASTERIZATION_NV 0x9346 +#define GL_SUBPIXEL_PRECISION_BIAS_X_BITS_NV 0x9347 +#define GL_SUBPIXEL_PRECISION_BIAS_Y_BITS_NV 0x9348 +#define GL_MAX_SUBPIXEL_PRECISION_BIAS_BITS_NV 0x9349 +typedef void (GL_APIENTRYP PFNGLSUBPIXELPRECISIONBIASNVPROC) (GLuint xbits, GLuint ybits); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glSubpixelPrecisionBiasNV (GLuint xbits, GLuint ybits); +#endif +#endif /* GL_NV_conservative_raster */ + +#ifndef GL_NV_conservative_raster_pre_snap +#define GL_NV_conservative_raster_pre_snap 1 +#define GL_CONSERVATIVE_RASTER_MODE_PRE_SNAP_NV 0x9550 +#endif /* GL_NV_conservative_raster_pre_snap */ + +#ifndef GL_NV_conservative_raster_pre_snap_triangles +#define GL_NV_conservative_raster_pre_snap_triangles 1 +#define GL_CONSERVATIVE_RASTER_MODE_NV 0x954D +#define GL_CONSERVATIVE_RASTER_MODE_POST_SNAP_NV 0x954E +#define GL_CONSERVATIVE_RASTER_MODE_PRE_SNAP_TRIANGLES_NV 0x954F +typedef void (GL_APIENTRYP PFNGLCONSERVATIVERASTERPARAMETERINVPROC) (GLenum pname, GLint param); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glConservativeRasterParameteriNV (GLenum pname, GLint param); +#endif +#endif /* GL_NV_conservative_raster_pre_snap_triangles */ + +#ifndef GL_NV_copy_buffer +#define GL_NV_copy_buffer 1 +#define GL_COPY_READ_BUFFER_NV 0x8F36 +#define GL_COPY_WRITE_BUFFER_NV 0x8F37 +typedef void (GL_APIENTRYP PFNGLCOPYBUFFERSUBDATANVPROC) (GLenum readTarget, GLenum writeTarget, GLintptr readOffset, GLintptr writeOffset, GLsizeiptr size); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glCopyBufferSubDataNV (GLenum readTarget, GLenum writeTarget, GLintptr readOffset, GLintptr writeOffset, GLsizeiptr size); +#endif +#endif /* GL_NV_copy_buffer */ + +#ifndef GL_NV_coverage_sample +#define GL_NV_coverage_sample 1 +#define GL_COVERAGE_COMPONENT_NV 0x8ED0 +#define GL_COVERAGE_COMPONENT4_NV 0x8ED1 +#define GL_COVERAGE_ATTACHMENT_NV 0x8ED2 +#define GL_COVERAGE_BUFFERS_NV 0x8ED3 +#define GL_COVERAGE_SAMPLES_NV 0x8ED4 +#define GL_COVERAGE_ALL_FRAGMENTS_NV 0x8ED5 +#define GL_COVERAGE_EDGE_FRAGMENTS_NV 0x8ED6 +#define GL_COVERAGE_AUTOMATIC_NV 0x8ED7 +#define GL_COVERAGE_BUFFER_BIT_NV 0x00008000 +typedef void (GL_APIENTRYP PFNGLCOVERAGEMASKNVPROC) (GLboolean mask); +typedef void (GL_APIENTRYP PFNGLCOVERAGEOPERATIONNVPROC) (GLenum operation); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glCoverageMaskNV (GLboolean mask); +GL_APICALL void GL_APIENTRY glCoverageOperationNV (GLenum operation); +#endif +#endif /* GL_NV_coverage_sample */ + +#ifndef GL_NV_depth_nonlinear +#define GL_NV_depth_nonlinear 1 +#define GL_DEPTH_COMPONENT16_NONLINEAR_NV 0x8E2C +#endif /* GL_NV_depth_nonlinear */ + +#ifndef GL_NV_draw_buffers +#define GL_NV_draw_buffers 1 +#define GL_MAX_DRAW_BUFFERS_NV 0x8824 +#define GL_DRAW_BUFFER0_NV 0x8825 +#define GL_DRAW_BUFFER1_NV 0x8826 +#define GL_DRAW_BUFFER2_NV 0x8827 +#define GL_DRAW_BUFFER3_NV 0x8828 +#define GL_DRAW_BUFFER4_NV 0x8829 +#define GL_DRAW_BUFFER5_NV 0x882A +#define GL_DRAW_BUFFER6_NV 0x882B +#define GL_DRAW_BUFFER7_NV 0x882C +#define GL_DRAW_BUFFER8_NV 0x882D +#define GL_DRAW_BUFFER9_NV 0x882E +#define GL_DRAW_BUFFER10_NV 0x882F +#define GL_DRAW_BUFFER11_NV 0x8830 +#define GL_DRAW_BUFFER12_NV 0x8831 +#define GL_DRAW_BUFFER13_NV 0x8832 +#define GL_DRAW_BUFFER14_NV 0x8833 +#define GL_DRAW_BUFFER15_NV 0x8834 +#define GL_COLOR_ATTACHMENT0_NV 0x8CE0 +#define GL_COLOR_ATTACHMENT1_NV 0x8CE1 +#define GL_COLOR_ATTACHMENT2_NV 0x8CE2 +#define GL_COLOR_ATTACHMENT3_NV 0x8CE3 +#define GL_COLOR_ATTACHMENT4_NV 0x8CE4 +#define GL_COLOR_ATTACHMENT5_NV 0x8CE5 +#define GL_COLOR_ATTACHMENT6_NV 0x8CE6 +#define GL_COLOR_ATTACHMENT7_NV 0x8CE7 +#define GL_COLOR_ATTACHMENT8_NV 0x8CE8 +#define GL_COLOR_ATTACHMENT9_NV 0x8CE9 +#define GL_COLOR_ATTACHMENT10_NV 0x8CEA +#define GL_COLOR_ATTACHMENT11_NV 0x8CEB +#define GL_COLOR_ATTACHMENT12_NV 0x8CEC +#define GL_COLOR_ATTACHMENT13_NV 0x8CED +#define GL_COLOR_ATTACHMENT14_NV 0x8CEE +#define GL_COLOR_ATTACHMENT15_NV 0x8CEF +typedef void (GL_APIENTRYP PFNGLDRAWBUFFERSNVPROC) (GLsizei n, const GLenum *bufs); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glDrawBuffersNV (GLsizei n, const GLenum *bufs); +#endif +#endif /* GL_NV_draw_buffers */ + +#ifndef GL_NV_draw_instanced +#define GL_NV_draw_instanced 1 +typedef void (GL_APIENTRYP PFNGLDRAWARRAYSINSTANCEDNVPROC) (GLenum mode, GLint first, GLsizei count, GLsizei primcount); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSINSTANCEDNVPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei primcount); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glDrawArraysInstancedNV (GLenum mode, GLint first, GLsizei count, GLsizei primcount); +GL_APICALL void GL_APIENTRY glDrawElementsInstancedNV (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei primcount); +#endif +#endif /* GL_NV_draw_instanced */ + +#ifndef GL_NV_draw_vulkan_image +#define GL_NV_draw_vulkan_image 1 +typedef void (GL_APIENTRY *GLVULKANPROCNV)(void); +typedef void (GL_APIENTRYP PFNGLDRAWVKIMAGENVPROC) (GLuint64 vkImage, GLuint sampler, GLfloat x0, GLfloat y0, GLfloat x1, GLfloat y1, GLfloat z, GLfloat s0, GLfloat t0, GLfloat s1, GLfloat t1); +typedef GLVULKANPROCNV (GL_APIENTRYP PFNGLGETVKPROCADDRNVPROC) (const GLchar *name); +typedef void (GL_APIENTRYP PFNGLWAITVKSEMAPHORENVPROC) (GLuint64 vkSemaphore); +typedef void (GL_APIENTRYP PFNGLSIGNALVKSEMAPHORENVPROC) (GLuint64 vkSemaphore); +typedef void (GL_APIENTRYP PFNGLSIGNALVKFENCENVPROC) (GLuint64 vkFence); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glDrawVkImageNV (GLuint64 vkImage, GLuint sampler, GLfloat x0, GLfloat y0, GLfloat x1, GLfloat y1, GLfloat z, GLfloat s0, GLfloat t0, GLfloat s1, GLfloat t1); +GL_APICALL GLVULKANPROCNV GL_APIENTRY glGetVkProcAddrNV (const GLchar *name); +GL_APICALL void GL_APIENTRY glWaitVkSemaphoreNV (GLuint64 vkSemaphore); +GL_APICALL void GL_APIENTRY glSignalVkSemaphoreNV (GLuint64 vkSemaphore); +GL_APICALL void GL_APIENTRY glSignalVkFenceNV (GLuint64 vkFence); +#endif +#endif /* GL_NV_draw_vulkan_image */ + +#ifndef GL_NV_explicit_attrib_location +#define GL_NV_explicit_attrib_location 1 +#endif /* GL_NV_explicit_attrib_location */ + +#ifndef GL_NV_fbo_color_attachments +#define GL_NV_fbo_color_attachments 1 +#define GL_MAX_COLOR_ATTACHMENTS_NV 0x8CDF +#endif /* GL_NV_fbo_color_attachments */ + +#ifndef GL_NV_fence +#define GL_NV_fence 1 +#define GL_ALL_COMPLETED_NV 0x84F2 +#define GL_FENCE_STATUS_NV 0x84F3 +#define GL_FENCE_CONDITION_NV 0x84F4 +typedef void (GL_APIENTRYP PFNGLDELETEFENCESNVPROC) (GLsizei n, const GLuint *fences); +typedef void (GL_APIENTRYP PFNGLGENFENCESNVPROC) (GLsizei n, GLuint *fences); +typedef GLboolean (GL_APIENTRYP PFNGLISFENCENVPROC) (GLuint fence); +typedef GLboolean (GL_APIENTRYP PFNGLTESTFENCENVPROC) (GLuint fence); +typedef void (GL_APIENTRYP PFNGLGETFENCEIVNVPROC) (GLuint fence, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLFINISHFENCENVPROC) (GLuint fence); +typedef void (GL_APIENTRYP PFNGLSETFENCENVPROC) (GLuint fence, GLenum condition); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glDeleteFencesNV (GLsizei n, const GLuint *fences); +GL_APICALL void GL_APIENTRY glGenFencesNV (GLsizei n, GLuint *fences); +GL_APICALL GLboolean GL_APIENTRY glIsFenceNV (GLuint fence); +GL_APICALL GLboolean GL_APIENTRY glTestFenceNV (GLuint fence); +GL_APICALL void GL_APIENTRY glGetFenceivNV (GLuint fence, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glFinishFenceNV (GLuint fence); +GL_APICALL void GL_APIENTRY glSetFenceNV (GLuint fence, GLenum condition); +#endif +#endif /* GL_NV_fence */ + +#ifndef GL_NV_fill_rectangle +#define GL_NV_fill_rectangle 1 +#define GL_FILL_RECTANGLE_NV 0x933C +#endif /* GL_NV_fill_rectangle */ + +#ifndef GL_NV_fragment_coverage_to_color +#define GL_NV_fragment_coverage_to_color 1 +#define GL_FRAGMENT_COVERAGE_TO_COLOR_NV 0x92DD +#define GL_FRAGMENT_COVERAGE_COLOR_NV 0x92DE +typedef void (GL_APIENTRYP PFNGLFRAGMENTCOVERAGECOLORNVPROC) (GLuint color); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glFragmentCoverageColorNV (GLuint color); +#endif +#endif /* GL_NV_fragment_coverage_to_color */ + +#ifndef GL_NV_fragment_shader_interlock +#define GL_NV_fragment_shader_interlock 1 +#endif /* GL_NV_fragment_shader_interlock */ + +#ifndef GL_NV_framebuffer_blit +#define GL_NV_framebuffer_blit 1 +#define GL_READ_FRAMEBUFFER_NV 0x8CA8 +#define GL_DRAW_FRAMEBUFFER_NV 0x8CA9 +#define GL_DRAW_FRAMEBUFFER_BINDING_NV 0x8CA6 +#define GL_READ_FRAMEBUFFER_BINDING_NV 0x8CAA +typedef void (GL_APIENTRYP PFNGLBLITFRAMEBUFFERNVPROC) (GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glBlitFramebufferNV (GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +#endif +#endif /* GL_NV_framebuffer_blit */ + +#ifndef GL_NV_framebuffer_mixed_samples +#define GL_NV_framebuffer_mixed_samples 1 +#define GL_COVERAGE_MODULATION_TABLE_NV 0x9331 +#define GL_COLOR_SAMPLES_NV 0x8E20 +#define GL_DEPTH_SAMPLES_NV 0x932D +#define GL_STENCIL_SAMPLES_NV 0x932E +#define GL_MIXED_DEPTH_SAMPLES_SUPPORTED_NV 0x932F +#define GL_MIXED_STENCIL_SAMPLES_SUPPORTED_NV 0x9330 +#define GL_COVERAGE_MODULATION_NV 0x9332 +#define GL_COVERAGE_MODULATION_TABLE_SIZE_NV 0x9333 +typedef void (GL_APIENTRYP PFNGLCOVERAGEMODULATIONTABLENVPROC) (GLsizei n, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLGETCOVERAGEMODULATIONTABLENVPROC) (GLsizei bufsize, GLfloat *v); +typedef void (GL_APIENTRYP PFNGLCOVERAGEMODULATIONNVPROC) (GLenum components); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glCoverageModulationTableNV (GLsizei n, const GLfloat *v); +GL_APICALL void GL_APIENTRY glGetCoverageModulationTableNV (GLsizei bufsize, GLfloat *v); +GL_APICALL void GL_APIENTRY glCoverageModulationNV (GLenum components); +#endif +#endif /* GL_NV_framebuffer_mixed_samples */ + +#ifndef GL_NV_framebuffer_multisample +#define GL_NV_framebuffer_multisample 1 +#define GL_RENDERBUFFER_SAMPLES_NV 0x8CAB +#define GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE_NV 0x8D56 +#define GL_MAX_SAMPLES_NV 0x8D57 +typedef void (GL_APIENTRYP PFNGLRENDERBUFFERSTORAGEMULTISAMPLENVPROC) (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glRenderbufferStorageMultisampleNV (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +#endif +#endif /* GL_NV_framebuffer_multisample */ + +#ifndef GL_NV_generate_mipmap_sRGB +#define GL_NV_generate_mipmap_sRGB 1 +#endif /* GL_NV_generate_mipmap_sRGB */ + +#ifndef GL_NV_geometry_shader_passthrough +#define GL_NV_geometry_shader_passthrough 1 +#endif /* GL_NV_geometry_shader_passthrough */ + +#ifndef GL_NV_gpu_shader5 +#define GL_NV_gpu_shader5 1 +typedef khronos_int64_t GLint64EXT; +typedef khronos_uint64_t GLuint64EXT; +#define GL_INT64_NV 0x140E +#define GL_UNSIGNED_INT64_NV 0x140F +#define GL_INT8_NV 0x8FE0 +#define GL_INT8_VEC2_NV 0x8FE1 +#define GL_INT8_VEC3_NV 0x8FE2 +#define GL_INT8_VEC4_NV 0x8FE3 +#define GL_INT16_NV 0x8FE4 +#define GL_INT16_VEC2_NV 0x8FE5 +#define GL_INT16_VEC3_NV 0x8FE6 +#define GL_INT16_VEC4_NV 0x8FE7 +#define GL_INT64_VEC2_NV 0x8FE9 +#define GL_INT64_VEC3_NV 0x8FEA +#define GL_INT64_VEC4_NV 0x8FEB +#define GL_UNSIGNED_INT8_NV 0x8FEC +#define GL_UNSIGNED_INT8_VEC2_NV 0x8FED +#define GL_UNSIGNED_INT8_VEC3_NV 0x8FEE +#define GL_UNSIGNED_INT8_VEC4_NV 0x8FEF +#define GL_UNSIGNED_INT16_NV 0x8FF0 +#define GL_UNSIGNED_INT16_VEC2_NV 0x8FF1 +#define GL_UNSIGNED_INT16_VEC3_NV 0x8FF2 +#define GL_UNSIGNED_INT16_VEC4_NV 0x8FF3 +#define GL_UNSIGNED_INT64_VEC2_NV 0x8FF5 +#define GL_UNSIGNED_INT64_VEC3_NV 0x8FF6 +#define GL_UNSIGNED_INT64_VEC4_NV 0x8FF7 +#define GL_FLOAT16_NV 0x8FF8 +#define GL_FLOAT16_VEC2_NV 0x8FF9 +#define GL_FLOAT16_VEC3_NV 0x8FFA +#define GL_FLOAT16_VEC4_NV 0x8FFB +#define GL_PATCHES 0x000E +typedef void (GL_APIENTRYP PFNGLUNIFORM1I64NVPROC) (GLint location, GLint64EXT x); +typedef void (GL_APIENTRYP PFNGLUNIFORM2I64NVPROC) (GLint location, GLint64EXT x, GLint64EXT y); +typedef void (GL_APIENTRYP PFNGLUNIFORM3I64NVPROC) (GLint location, GLint64EXT x, GLint64EXT y, GLint64EXT z); +typedef void (GL_APIENTRYP PFNGLUNIFORM4I64NVPROC) (GLint location, GLint64EXT x, GLint64EXT y, GLint64EXT z, GLint64EXT w); +typedef void (GL_APIENTRYP PFNGLUNIFORM1I64VNVPROC) (GLint location, GLsizei count, const GLint64EXT *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM2I64VNVPROC) (GLint location, GLsizei count, const GLint64EXT *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM3I64VNVPROC) (GLint location, GLsizei count, const GLint64EXT *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM4I64VNVPROC) (GLint location, GLsizei count, const GLint64EXT *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM1UI64NVPROC) (GLint location, GLuint64EXT x); +typedef void (GL_APIENTRYP PFNGLUNIFORM2UI64NVPROC) (GLint location, GLuint64EXT x, GLuint64EXT y); +typedef void (GL_APIENTRYP PFNGLUNIFORM3UI64NVPROC) (GLint location, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z); +typedef void (GL_APIENTRYP PFNGLUNIFORM4UI64NVPROC) (GLint location, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z, GLuint64EXT w); +typedef void (GL_APIENTRYP PFNGLUNIFORM1UI64VNVPROC) (GLint location, GLsizei count, const GLuint64EXT *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM2UI64VNVPROC) (GLint location, GLsizei count, const GLuint64EXT *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM3UI64VNVPROC) (GLint location, GLsizei count, const GLuint64EXT *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM4UI64VNVPROC) (GLint location, GLsizei count, const GLuint64EXT *value); +typedef void (GL_APIENTRYP PFNGLGETUNIFORMI64VNVPROC) (GLuint program, GLint location, GLint64EXT *params); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1I64NVPROC) (GLuint program, GLint location, GLint64EXT x); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2I64NVPROC) (GLuint program, GLint location, GLint64EXT x, GLint64EXT y); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3I64NVPROC) (GLuint program, GLint location, GLint64EXT x, GLint64EXT y, GLint64EXT z); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4I64NVPROC) (GLuint program, GLint location, GLint64EXT x, GLint64EXT y, GLint64EXT z, GLint64EXT w); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1I64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLint64EXT *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2I64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLint64EXT *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3I64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLint64EXT *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4I64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLint64EXT *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1UI64NVPROC) (GLuint program, GLint location, GLuint64EXT x); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2UI64NVPROC) (GLuint program, GLint location, GLuint64EXT x, GLuint64EXT y); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3UI64NVPROC) (GLuint program, GLint location, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4UI64NVPROC) (GLuint program, GLint location, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z, GLuint64EXT w); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM1UI64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLuint64EXT *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM2UI64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLuint64EXT *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM3UI64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLuint64EXT *value); +typedef void (GL_APIENTRYP PFNGLPROGRAMUNIFORM4UI64VNVPROC) (GLuint program, GLint location, GLsizei count, const GLuint64EXT *value); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glUniform1i64NV (GLint location, GLint64EXT x); +GL_APICALL void GL_APIENTRY glUniform2i64NV (GLint location, GLint64EXT x, GLint64EXT y); +GL_APICALL void GL_APIENTRY glUniform3i64NV (GLint location, GLint64EXT x, GLint64EXT y, GLint64EXT z); +GL_APICALL void GL_APIENTRY glUniform4i64NV (GLint location, GLint64EXT x, GLint64EXT y, GLint64EXT z, GLint64EXT w); +GL_APICALL void GL_APIENTRY glUniform1i64vNV (GLint location, GLsizei count, const GLint64EXT *value); +GL_APICALL void GL_APIENTRY glUniform2i64vNV (GLint location, GLsizei count, const GLint64EXT *value); +GL_APICALL void GL_APIENTRY glUniform3i64vNV (GLint location, GLsizei count, const GLint64EXT *value); +GL_APICALL void GL_APIENTRY glUniform4i64vNV (GLint location, GLsizei count, const GLint64EXT *value); +GL_APICALL void GL_APIENTRY glUniform1ui64NV (GLint location, GLuint64EXT x); +GL_APICALL void GL_APIENTRY glUniform2ui64NV (GLint location, GLuint64EXT x, GLuint64EXT y); +GL_APICALL void GL_APIENTRY glUniform3ui64NV (GLint location, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z); +GL_APICALL void GL_APIENTRY glUniform4ui64NV (GLint location, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z, GLuint64EXT w); +GL_APICALL void GL_APIENTRY glUniform1ui64vNV (GLint location, GLsizei count, const GLuint64EXT *value); +GL_APICALL void GL_APIENTRY glUniform2ui64vNV (GLint location, GLsizei count, const GLuint64EXT *value); +GL_APICALL void GL_APIENTRY glUniform3ui64vNV (GLint location, GLsizei count, const GLuint64EXT *value); +GL_APICALL void GL_APIENTRY glUniform4ui64vNV (GLint location, GLsizei count, const GLuint64EXT *value); +GL_APICALL void GL_APIENTRY glGetUniformi64vNV (GLuint program, GLint location, GLint64EXT *params); +GL_APICALL void GL_APIENTRY glProgramUniform1i64NV (GLuint program, GLint location, GLint64EXT x); +GL_APICALL void GL_APIENTRY glProgramUniform2i64NV (GLuint program, GLint location, GLint64EXT x, GLint64EXT y); +GL_APICALL void GL_APIENTRY glProgramUniform3i64NV (GLuint program, GLint location, GLint64EXT x, GLint64EXT y, GLint64EXT z); +GL_APICALL void GL_APIENTRY glProgramUniform4i64NV (GLuint program, GLint location, GLint64EXT x, GLint64EXT y, GLint64EXT z, GLint64EXT w); +GL_APICALL void GL_APIENTRY glProgramUniform1i64vNV (GLuint program, GLint location, GLsizei count, const GLint64EXT *value); +GL_APICALL void GL_APIENTRY glProgramUniform2i64vNV (GLuint program, GLint location, GLsizei count, const GLint64EXT *value); +GL_APICALL void GL_APIENTRY glProgramUniform3i64vNV (GLuint program, GLint location, GLsizei count, const GLint64EXT *value); +GL_APICALL void GL_APIENTRY glProgramUniform4i64vNV (GLuint program, GLint location, GLsizei count, const GLint64EXT *value); +GL_APICALL void GL_APIENTRY glProgramUniform1ui64NV (GLuint program, GLint location, GLuint64EXT x); +GL_APICALL void GL_APIENTRY glProgramUniform2ui64NV (GLuint program, GLint location, GLuint64EXT x, GLuint64EXT y); +GL_APICALL void GL_APIENTRY glProgramUniform3ui64NV (GLuint program, GLint location, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z); +GL_APICALL void GL_APIENTRY glProgramUniform4ui64NV (GLuint program, GLint location, GLuint64EXT x, GLuint64EXT y, GLuint64EXT z, GLuint64EXT w); +GL_APICALL void GL_APIENTRY glProgramUniform1ui64vNV (GLuint program, GLint location, GLsizei count, const GLuint64EXT *value); +GL_APICALL void GL_APIENTRY glProgramUniform2ui64vNV (GLuint program, GLint location, GLsizei count, const GLuint64EXT *value); +GL_APICALL void GL_APIENTRY glProgramUniform3ui64vNV (GLuint program, GLint location, GLsizei count, const GLuint64EXT *value); +GL_APICALL void GL_APIENTRY glProgramUniform4ui64vNV (GLuint program, GLint location, GLsizei count, const GLuint64EXT *value); +#endif +#endif /* GL_NV_gpu_shader5 */ + +#ifndef GL_NV_image_formats +#define GL_NV_image_formats 1 +#endif /* GL_NV_image_formats */ + +#ifndef GL_NV_instanced_arrays +#define GL_NV_instanced_arrays 1 +#define GL_VERTEX_ATTRIB_ARRAY_DIVISOR_NV 0x88FE +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBDIVISORNVPROC) (GLuint index, GLuint divisor); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glVertexAttribDivisorNV (GLuint index, GLuint divisor); +#endif +#endif /* GL_NV_instanced_arrays */ + +#ifndef GL_NV_internalformat_sample_query +#define GL_NV_internalformat_sample_query 1 +#define GL_TEXTURE_2D_MULTISAMPLE 0x9100 +#define GL_TEXTURE_2D_MULTISAMPLE_ARRAY 0x9102 +#define GL_MULTISAMPLES_NV 0x9371 +#define GL_SUPERSAMPLE_SCALE_X_NV 0x9372 +#define GL_SUPERSAMPLE_SCALE_Y_NV 0x9373 +#define GL_CONFORMANT_NV 0x9374 +typedef void (GL_APIENTRYP PFNGLGETINTERNALFORMATSAMPLEIVNVPROC) (GLenum target, GLenum internalformat, GLsizei samples, GLenum pname, GLsizei bufSize, GLint *params); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glGetInternalformatSampleivNV (GLenum target, GLenum internalformat, GLsizei samples, GLenum pname, GLsizei bufSize, GLint *params); +#endif +#endif /* GL_NV_internalformat_sample_query */ + +#ifndef GL_NV_non_square_matrices +#define GL_NV_non_square_matrices 1 +#define GL_FLOAT_MAT2x3_NV 0x8B65 +#define GL_FLOAT_MAT2x4_NV 0x8B66 +#define GL_FLOAT_MAT3x2_NV 0x8B67 +#define GL_FLOAT_MAT3x4_NV 0x8B68 +#define GL_FLOAT_MAT4x2_NV 0x8B69 +#define GL_FLOAT_MAT4x3_NV 0x8B6A +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX2X3FVNVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX3X2FVNVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX2X4FVNVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX4X2FVNVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX3X4FVNVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX4X3FVNVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glUniformMatrix2x3fvNV (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix3x2fvNV (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix2x4fvNV (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix4x2fvNV (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix3x4fvNV (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix4x3fvNV (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +#endif +#endif /* GL_NV_non_square_matrices */ + +#ifndef GL_NV_path_rendering +#define GL_NV_path_rendering 1 +typedef double GLdouble; +#define GL_PATH_FORMAT_SVG_NV 0x9070 +#define GL_PATH_FORMAT_PS_NV 0x9071 +#define GL_STANDARD_FONT_NAME_NV 0x9072 +#define GL_SYSTEM_FONT_NAME_NV 0x9073 +#define GL_FILE_NAME_NV 0x9074 +#define GL_PATH_STROKE_WIDTH_NV 0x9075 +#define GL_PATH_END_CAPS_NV 0x9076 +#define GL_PATH_INITIAL_END_CAP_NV 0x9077 +#define GL_PATH_TERMINAL_END_CAP_NV 0x9078 +#define GL_PATH_JOIN_STYLE_NV 0x9079 +#define GL_PATH_MITER_LIMIT_NV 0x907A +#define GL_PATH_DASH_CAPS_NV 0x907B +#define GL_PATH_INITIAL_DASH_CAP_NV 0x907C +#define GL_PATH_TERMINAL_DASH_CAP_NV 0x907D +#define GL_PATH_DASH_OFFSET_NV 0x907E +#define GL_PATH_CLIENT_LENGTH_NV 0x907F +#define GL_PATH_FILL_MODE_NV 0x9080 +#define GL_PATH_FILL_MASK_NV 0x9081 +#define GL_PATH_FILL_COVER_MODE_NV 0x9082 +#define GL_PATH_STROKE_COVER_MODE_NV 0x9083 +#define GL_PATH_STROKE_MASK_NV 0x9084 +#define GL_COUNT_UP_NV 0x9088 +#define GL_COUNT_DOWN_NV 0x9089 +#define GL_PATH_OBJECT_BOUNDING_BOX_NV 0x908A +#define GL_CONVEX_HULL_NV 0x908B +#define GL_BOUNDING_BOX_NV 0x908D +#define GL_TRANSLATE_X_NV 0x908E +#define GL_TRANSLATE_Y_NV 0x908F +#define GL_TRANSLATE_2D_NV 0x9090 +#define GL_TRANSLATE_3D_NV 0x9091 +#define GL_AFFINE_2D_NV 0x9092 +#define GL_AFFINE_3D_NV 0x9094 +#define GL_TRANSPOSE_AFFINE_2D_NV 0x9096 +#define GL_TRANSPOSE_AFFINE_3D_NV 0x9098 +#define GL_UTF8_NV 0x909A +#define GL_UTF16_NV 0x909B +#define GL_BOUNDING_BOX_OF_BOUNDING_BOXES_NV 0x909C +#define GL_PATH_COMMAND_COUNT_NV 0x909D +#define GL_PATH_COORD_COUNT_NV 0x909E +#define GL_PATH_DASH_ARRAY_COUNT_NV 0x909F +#define GL_PATH_COMPUTED_LENGTH_NV 0x90A0 +#define GL_PATH_FILL_BOUNDING_BOX_NV 0x90A1 +#define GL_PATH_STROKE_BOUNDING_BOX_NV 0x90A2 +#define GL_SQUARE_NV 0x90A3 +#define GL_ROUND_NV 0x90A4 +#define GL_TRIANGULAR_NV 0x90A5 +#define GL_BEVEL_NV 0x90A6 +#define GL_MITER_REVERT_NV 0x90A7 +#define GL_MITER_TRUNCATE_NV 0x90A8 +#define GL_SKIP_MISSING_GLYPH_NV 0x90A9 +#define GL_USE_MISSING_GLYPH_NV 0x90AA +#define GL_PATH_ERROR_POSITION_NV 0x90AB +#define GL_ACCUM_ADJACENT_PAIRS_NV 0x90AD +#define GL_ADJACENT_PAIRS_NV 0x90AE +#define GL_FIRST_TO_REST_NV 0x90AF +#define GL_PATH_GEN_MODE_NV 0x90B0 +#define GL_PATH_GEN_COEFF_NV 0x90B1 +#define GL_PATH_GEN_COMPONENTS_NV 0x90B3 +#define GL_PATH_STENCIL_FUNC_NV 0x90B7 +#define GL_PATH_STENCIL_REF_NV 0x90B8 +#define GL_PATH_STENCIL_VALUE_MASK_NV 0x90B9 +#define GL_PATH_STENCIL_DEPTH_OFFSET_FACTOR_NV 0x90BD +#define GL_PATH_STENCIL_DEPTH_OFFSET_UNITS_NV 0x90BE +#define GL_PATH_COVER_DEPTH_FUNC_NV 0x90BF +#define GL_PATH_DASH_OFFSET_RESET_NV 0x90B4 +#define GL_MOVE_TO_RESETS_NV 0x90B5 +#define GL_MOVE_TO_CONTINUES_NV 0x90B6 +#define GL_CLOSE_PATH_NV 0x00 +#define GL_MOVE_TO_NV 0x02 +#define GL_RELATIVE_MOVE_TO_NV 0x03 +#define GL_LINE_TO_NV 0x04 +#define GL_RELATIVE_LINE_TO_NV 0x05 +#define GL_HORIZONTAL_LINE_TO_NV 0x06 +#define GL_RELATIVE_HORIZONTAL_LINE_TO_NV 0x07 +#define GL_VERTICAL_LINE_TO_NV 0x08 +#define GL_RELATIVE_VERTICAL_LINE_TO_NV 0x09 +#define GL_QUADRATIC_CURVE_TO_NV 0x0A +#define GL_RELATIVE_QUADRATIC_CURVE_TO_NV 0x0B +#define GL_CUBIC_CURVE_TO_NV 0x0C +#define GL_RELATIVE_CUBIC_CURVE_TO_NV 0x0D +#define GL_SMOOTH_QUADRATIC_CURVE_TO_NV 0x0E +#define GL_RELATIVE_SMOOTH_QUADRATIC_CURVE_TO_NV 0x0F +#define GL_SMOOTH_CUBIC_CURVE_TO_NV 0x10 +#define GL_RELATIVE_SMOOTH_CUBIC_CURVE_TO_NV 0x11 +#define GL_SMALL_CCW_ARC_TO_NV 0x12 +#define GL_RELATIVE_SMALL_CCW_ARC_TO_NV 0x13 +#define GL_SMALL_CW_ARC_TO_NV 0x14 +#define GL_RELATIVE_SMALL_CW_ARC_TO_NV 0x15 +#define GL_LARGE_CCW_ARC_TO_NV 0x16 +#define GL_RELATIVE_LARGE_CCW_ARC_TO_NV 0x17 +#define GL_LARGE_CW_ARC_TO_NV 0x18 +#define GL_RELATIVE_LARGE_CW_ARC_TO_NV 0x19 +#define GL_RESTART_PATH_NV 0xF0 +#define GL_DUP_FIRST_CUBIC_CURVE_TO_NV 0xF2 +#define GL_DUP_LAST_CUBIC_CURVE_TO_NV 0xF4 +#define GL_RECT_NV 0xF6 +#define GL_CIRCULAR_CCW_ARC_TO_NV 0xF8 +#define GL_CIRCULAR_CW_ARC_TO_NV 0xFA +#define GL_CIRCULAR_TANGENT_ARC_TO_NV 0xFC +#define GL_ARC_TO_NV 0xFE +#define GL_RELATIVE_ARC_TO_NV 0xFF +#define GL_BOLD_BIT_NV 0x01 +#define GL_ITALIC_BIT_NV 0x02 +#define GL_GLYPH_WIDTH_BIT_NV 0x01 +#define GL_GLYPH_HEIGHT_BIT_NV 0x02 +#define GL_GLYPH_HORIZONTAL_BEARING_X_BIT_NV 0x04 +#define GL_GLYPH_HORIZONTAL_BEARING_Y_BIT_NV 0x08 +#define GL_GLYPH_HORIZONTAL_BEARING_ADVANCE_BIT_NV 0x10 +#define GL_GLYPH_VERTICAL_BEARING_X_BIT_NV 0x20 +#define GL_GLYPH_VERTICAL_BEARING_Y_BIT_NV 0x40 +#define GL_GLYPH_VERTICAL_BEARING_ADVANCE_BIT_NV 0x80 +#define GL_GLYPH_HAS_KERNING_BIT_NV 0x100 +#define GL_FONT_X_MIN_BOUNDS_BIT_NV 0x00010000 +#define GL_FONT_Y_MIN_BOUNDS_BIT_NV 0x00020000 +#define GL_FONT_X_MAX_BOUNDS_BIT_NV 0x00040000 +#define GL_FONT_Y_MAX_BOUNDS_BIT_NV 0x00080000 +#define GL_FONT_UNITS_PER_EM_BIT_NV 0x00100000 +#define GL_FONT_ASCENDER_BIT_NV 0x00200000 +#define GL_FONT_DESCENDER_BIT_NV 0x00400000 +#define GL_FONT_HEIGHT_BIT_NV 0x00800000 +#define GL_FONT_MAX_ADVANCE_WIDTH_BIT_NV 0x01000000 +#define GL_FONT_MAX_ADVANCE_HEIGHT_BIT_NV 0x02000000 +#define GL_FONT_UNDERLINE_POSITION_BIT_NV 0x04000000 +#define GL_FONT_UNDERLINE_THICKNESS_BIT_NV 0x08000000 +#define GL_FONT_HAS_KERNING_BIT_NV 0x10000000 +#define GL_ROUNDED_RECT_NV 0xE8 +#define GL_RELATIVE_ROUNDED_RECT_NV 0xE9 +#define GL_ROUNDED_RECT2_NV 0xEA +#define GL_RELATIVE_ROUNDED_RECT2_NV 0xEB +#define GL_ROUNDED_RECT4_NV 0xEC +#define GL_RELATIVE_ROUNDED_RECT4_NV 0xED +#define GL_ROUNDED_RECT8_NV 0xEE +#define GL_RELATIVE_ROUNDED_RECT8_NV 0xEF +#define GL_RELATIVE_RECT_NV 0xF7 +#define GL_FONT_GLYPHS_AVAILABLE_NV 0x9368 +#define GL_FONT_TARGET_UNAVAILABLE_NV 0x9369 +#define GL_FONT_UNAVAILABLE_NV 0x936A +#define GL_FONT_UNINTELLIGIBLE_NV 0x936B +#define GL_CONIC_CURVE_TO_NV 0x1A +#define GL_RELATIVE_CONIC_CURVE_TO_NV 0x1B +#define GL_FONT_NUM_GLYPH_INDICES_BIT_NV 0x20000000 +#define GL_STANDARD_FONT_FORMAT_NV 0x936C +#define GL_PATH_PROJECTION_NV 0x1701 +#define GL_PATH_MODELVIEW_NV 0x1700 +#define GL_PATH_MODELVIEW_STACK_DEPTH_NV 0x0BA3 +#define GL_PATH_MODELVIEW_MATRIX_NV 0x0BA6 +#define GL_PATH_MAX_MODELVIEW_STACK_DEPTH_NV 0x0D36 +#define GL_PATH_TRANSPOSE_MODELVIEW_MATRIX_NV 0x84E3 +#define GL_PATH_PROJECTION_STACK_DEPTH_NV 0x0BA4 +#define GL_PATH_PROJECTION_MATRIX_NV 0x0BA7 +#define GL_PATH_MAX_PROJECTION_STACK_DEPTH_NV 0x0D38 +#define GL_PATH_TRANSPOSE_PROJECTION_MATRIX_NV 0x84E4 +#define GL_FRAGMENT_INPUT_NV 0x936D +typedef GLuint (GL_APIENTRYP PFNGLGENPATHSNVPROC) (GLsizei range); +typedef void (GL_APIENTRYP PFNGLDELETEPATHSNVPROC) (GLuint path, GLsizei range); +typedef GLboolean (GL_APIENTRYP PFNGLISPATHNVPROC) (GLuint path); +typedef void (GL_APIENTRYP PFNGLPATHCOMMANDSNVPROC) (GLuint path, GLsizei numCommands, const GLubyte *commands, GLsizei numCoords, GLenum coordType, const void *coords); +typedef void (GL_APIENTRYP PFNGLPATHCOORDSNVPROC) (GLuint path, GLsizei numCoords, GLenum coordType, const void *coords); +typedef void (GL_APIENTRYP PFNGLPATHSUBCOMMANDSNVPROC) (GLuint path, GLsizei commandStart, GLsizei commandsToDelete, GLsizei numCommands, const GLubyte *commands, GLsizei numCoords, GLenum coordType, const void *coords); +typedef void (GL_APIENTRYP PFNGLPATHSUBCOORDSNVPROC) (GLuint path, GLsizei coordStart, GLsizei numCoords, GLenum coordType, const void *coords); +typedef void (GL_APIENTRYP PFNGLPATHSTRINGNVPROC) (GLuint path, GLenum format, GLsizei length, const void *pathString); +typedef void (GL_APIENTRYP PFNGLPATHGLYPHSNVPROC) (GLuint firstPathName, GLenum fontTarget, const void *fontName, GLbitfield fontStyle, GLsizei numGlyphs, GLenum type, const void *charcodes, GLenum handleMissingGlyphs, GLuint pathParameterTemplate, GLfloat emScale); +typedef void (GL_APIENTRYP PFNGLPATHGLYPHRANGENVPROC) (GLuint firstPathName, GLenum fontTarget, const void *fontName, GLbitfield fontStyle, GLuint firstGlyph, GLsizei numGlyphs, GLenum handleMissingGlyphs, GLuint pathParameterTemplate, GLfloat emScale); +typedef void (GL_APIENTRYP PFNGLWEIGHTPATHSNVPROC) (GLuint resultPath, GLsizei numPaths, const GLuint *paths, const GLfloat *weights); +typedef void (GL_APIENTRYP PFNGLCOPYPATHNVPROC) (GLuint resultPath, GLuint srcPath); +typedef void (GL_APIENTRYP PFNGLINTERPOLATEPATHSNVPROC) (GLuint resultPath, GLuint pathA, GLuint pathB, GLfloat weight); +typedef void (GL_APIENTRYP PFNGLTRANSFORMPATHNVPROC) (GLuint resultPath, GLuint srcPath, GLenum transformType, const GLfloat *transformValues); +typedef void (GL_APIENTRYP PFNGLPATHPARAMETERIVNVPROC) (GLuint path, GLenum pname, const GLint *value); +typedef void (GL_APIENTRYP PFNGLPATHPARAMETERINVPROC) (GLuint path, GLenum pname, GLint value); +typedef void (GL_APIENTRYP PFNGLPATHPARAMETERFVNVPROC) (GLuint path, GLenum pname, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLPATHPARAMETERFNVPROC) (GLuint path, GLenum pname, GLfloat value); +typedef void (GL_APIENTRYP PFNGLPATHDASHARRAYNVPROC) (GLuint path, GLsizei dashCount, const GLfloat *dashArray); +typedef void (GL_APIENTRYP PFNGLPATHSTENCILFUNCNVPROC) (GLenum func, GLint ref, GLuint mask); +typedef void (GL_APIENTRYP PFNGLPATHSTENCILDEPTHOFFSETNVPROC) (GLfloat factor, GLfloat units); +typedef void (GL_APIENTRYP PFNGLSTENCILFILLPATHNVPROC) (GLuint path, GLenum fillMode, GLuint mask); +typedef void (GL_APIENTRYP PFNGLSTENCILSTROKEPATHNVPROC) (GLuint path, GLint reference, GLuint mask); +typedef void (GL_APIENTRYP PFNGLSTENCILFILLPATHINSTANCEDNVPROC) (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLenum fillMode, GLuint mask, GLenum transformType, const GLfloat *transformValues); +typedef void (GL_APIENTRYP PFNGLSTENCILSTROKEPATHINSTANCEDNVPROC) (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLint reference, GLuint mask, GLenum transformType, const GLfloat *transformValues); +typedef void (GL_APIENTRYP PFNGLPATHCOVERDEPTHFUNCNVPROC) (GLenum func); +typedef void (GL_APIENTRYP PFNGLCOVERFILLPATHNVPROC) (GLuint path, GLenum coverMode); +typedef void (GL_APIENTRYP PFNGLCOVERSTROKEPATHNVPROC) (GLuint path, GLenum coverMode); +typedef void (GL_APIENTRYP PFNGLCOVERFILLPATHINSTANCEDNVPROC) (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLenum coverMode, GLenum transformType, const GLfloat *transformValues); +typedef void (GL_APIENTRYP PFNGLCOVERSTROKEPATHINSTANCEDNVPROC) (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLenum coverMode, GLenum transformType, const GLfloat *transformValues); +typedef void (GL_APIENTRYP PFNGLGETPATHPARAMETERIVNVPROC) (GLuint path, GLenum pname, GLint *value); +typedef void (GL_APIENTRYP PFNGLGETPATHPARAMETERFVNVPROC) (GLuint path, GLenum pname, GLfloat *value); +typedef void (GL_APIENTRYP PFNGLGETPATHCOMMANDSNVPROC) (GLuint path, GLubyte *commands); +typedef void (GL_APIENTRYP PFNGLGETPATHCOORDSNVPROC) (GLuint path, GLfloat *coords); +typedef void (GL_APIENTRYP PFNGLGETPATHDASHARRAYNVPROC) (GLuint path, GLfloat *dashArray); +typedef void (GL_APIENTRYP PFNGLGETPATHMETRICSNVPROC) (GLbitfield metricQueryMask, GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLsizei stride, GLfloat *metrics); +typedef void (GL_APIENTRYP PFNGLGETPATHMETRICRANGENVPROC) (GLbitfield metricQueryMask, GLuint firstPathName, GLsizei numPaths, GLsizei stride, GLfloat *metrics); +typedef void (GL_APIENTRYP PFNGLGETPATHSPACINGNVPROC) (GLenum pathListMode, GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLfloat advanceScale, GLfloat kerningScale, GLenum transformType, GLfloat *returnedSpacing); +typedef GLboolean (GL_APIENTRYP PFNGLISPOINTINFILLPATHNVPROC) (GLuint path, GLuint mask, GLfloat x, GLfloat y); +typedef GLboolean (GL_APIENTRYP PFNGLISPOINTINSTROKEPATHNVPROC) (GLuint path, GLfloat x, GLfloat y); +typedef GLfloat (GL_APIENTRYP PFNGLGETPATHLENGTHNVPROC) (GLuint path, GLsizei startSegment, GLsizei numSegments); +typedef GLboolean (GL_APIENTRYP PFNGLPOINTALONGPATHNVPROC) (GLuint path, GLsizei startSegment, GLsizei numSegments, GLfloat distance, GLfloat *x, GLfloat *y, GLfloat *tangentX, GLfloat *tangentY); +typedef void (GL_APIENTRYP PFNGLMATRIXLOAD3X2FNVPROC) (GLenum matrixMode, const GLfloat *m); +typedef void (GL_APIENTRYP PFNGLMATRIXLOAD3X3FNVPROC) (GLenum matrixMode, const GLfloat *m); +typedef void (GL_APIENTRYP PFNGLMATRIXLOADTRANSPOSE3X3FNVPROC) (GLenum matrixMode, const GLfloat *m); +typedef void (GL_APIENTRYP PFNGLMATRIXMULT3X2FNVPROC) (GLenum matrixMode, const GLfloat *m); +typedef void (GL_APIENTRYP PFNGLMATRIXMULT3X3FNVPROC) (GLenum matrixMode, const GLfloat *m); +typedef void (GL_APIENTRYP PFNGLMATRIXMULTTRANSPOSE3X3FNVPROC) (GLenum matrixMode, const GLfloat *m); +typedef void (GL_APIENTRYP PFNGLSTENCILTHENCOVERFILLPATHNVPROC) (GLuint path, GLenum fillMode, GLuint mask, GLenum coverMode); +typedef void (GL_APIENTRYP PFNGLSTENCILTHENCOVERSTROKEPATHNVPROC) (GLuint path, GLint reference, GLuint mask, GLenum coverMode); +typedef void (GL_APIENTRYP PFNGLSTENCILTHENCOVERFILLPATHINSTANCEDNVPROC) (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLenum fillMode, GLuint mask, GLenum coverMode, GLenum transformType, const GLfloat *transformValues); +typedef void (GL_APIENTRYP PFNGLSTENCILTHENCOVERSTROKEPATHINSTANCEDNVPROC) (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLint reference, GLuint mask, GLenum coverMode, GLenum transformType, const GLfloat *transformValues); +typedef GLenum (GL_APIENTRYP PFNGLPATHGLYPHINDEXRANGENVPROC) (GLenum fontTarget, const void *fontName, GLbitfield fontStyle, GLuint pathParameterTemplate, GLfloat emScale, GLuint baseAndCount[2]); +typedef GLenum (GL_APIENTRYP PFNGLPATHGLYPHINDEXARRAYNVPROC) (GLuint firstPathName, GLenum fontTarget, const void *fontName, GLbitfield fontStyle, GLuint firstGlyphIndex, GLsizei numGlyphs, GLuint pathParameterTemplate, GLfloat emScale); +typedef GLenum (GL_APIENTRYP PFNGLPATHMEMORYGLYPHINDEXARRAYNVPROC) (GLuint firstPathName, GLenum fontTarget, GLsizeiptr fontSize, const void *fontData, GLsizei faceIndex, GLuint firstGlyphIndex, GLsizei numGlyphs, GLuint pathParameterTemplate, GLfloat emScale); +typedef void (GL_APIENTRYP PFNGLPROGRAMPATHFRAGMENTINPUTGENNVPROC) (GLuint program, GLint location, GLenum genMode, GLint components, const GLfloat *coeffs); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMRESOURCEFVNVPROC) (GLuint program, GLenum programInterface, GLuint index, GLsizei propCount, const GLenum *props, GLsizei bufSize, GLsizei *length, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLMATRIXFRUSTUMEXTPROC) (GLenum mode, GLdouble left, GLdouble right, GLdouble bottom, GLdouble top, GLdouble zNear, GLdouble zFar); +typedef void (GL_APIENTRYP PFNGLMATRIXLOADIDENTITYEXTPROC) (GLenum mode); +typedef void (GL_APIENTRYP PFNGLMATRIXLOADTRANSPOSEFEXTPROC) (GLenum mode, const GLfloat *m); +typedef void (GL_APIENTRYP PFNGLMATRIXLOADTRANSPOSEDEXTPROC) (GLenum mode, const GLdouble *m); +typedef void (GL_APIENTRYP PFNGLMATRIXLOADFEXTPROC) (GLenum mode, const GLfloat *m); +typedef void (GL_APIENTRYP PFNGLMATRIXLOADDEXTPROC) (GLenum mode, const GLdouble *m); +typedef void (GL_APIENTRYP PFNGLMATRIXMULTTRANSPOSEFEXTPROC) (GLenum mode, const GLfloat *m); +typedef void (GL_APIENTRYP PFNGLMATRIXMULTTRANSPOSEDEXTPROC) (GLenum mode, const GLdouble *m); +typedef void (GL_APIENTRYP PFNGLMATRIXMULTFEXTPROC) (GLenum mode, const GLfloat *m); +typedef void (GL_APIENTRYP PFNGLMATRIXMULTDEXTPROC) (GLenum mode, const GLdouble *m); +typedef void (GL_APIENTRYP PFNGLMATRIXORTHOEXTPROC) (GLenum mode, GLdouble left, GLdouble right, GLdouble bottom, GLdouble top, GLdouble zNear, GLdouble zFar); +typedef void (GL_APIENTRYP PFNGLMATRIXPOPEXTPROC) (GLenum mode); +typedef void (GL_APIENTRYP PFNGLMATRIXPUSHEXTPROC) (GLenum mode); +typedef void (GL_APIENTRYP PFNGLMATRIXROTATEFEXTPROC) (GLenum mode, GLfloat angle, GLfloat x, GLfloat y, GLfloat z); +typedef void (GL_APIENTRYP PFNGLMATRIXROTATEDEXTPROC) (GLenum mode, GLdouble angle, GLdouble x, GLdouble y, GLdouble z); +typedef void (GL_APIENTRYP PFNGLMATRIXSCALEFEXTPROC) (GLenum mode, GLfloat x, GLfloat y, GLfloat z); +typedef void (GL_APIENTRYP PFNGLMATRIXSCALEDEXTPROC) (GLenum mode, GLdouble x, GLdouble y, GLdouble z); +typedef void (GL_APIENTRYP PFNGLMATRIXTRANSLATEFEXTPROC) (GLenum mode, GLfloat x, GLfloat y, GLfloat z); +typedef void (GL_APIENTRYP PFNGLMATRIXTRANSLATEDEXTPROC) (GLenum mode, GLdouble x, GLdouble y, GLdouble z); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL GLuint GL_APIENTRY glGenPathsNV (GLsizei range); +GL_APICALL void GL_APIENTRY glDeletePathsNV (GLuint path, GLsizei range); +GL_APICALL GLboolean GL_APIENTRY glIsPathNV (GLuint path); +GL_APICALL void GL_APIENTRY glPathCommandsNV (GLuint path, GLsizei numCommands, const GLubyte *commands, GLsizei numCoords, GLenum coordType, const void *coords); +GL_APICALL void GL_APIENTRY glPathCoordsNV (GLuint path, GLsizei numCoords, GLenum coordType, const void *coords); +GL_APICALL void GL_APIENTRY glPathSubCommandsNV (GLuint path, GLsizei commandStart, GLsizei commandsToDelete, GLsizei numCommands, const GLubyte *commands, GLsizei numCoords, GLenum coordType, const void *coords); +GL_APICALL void GL_APIENTRY glPathSubCoordsNV (GLuint path, GLsizei coordStart, GLsizei numCoords, GLenum coordType, const void *coords); +GL_APICALL void GL_APIENTRY glPathStringNV (GLuint path, GLenum format, GLsizei length, const void *pathString); +GL_APICALL void GL_APIENTRY glPathGlyphsNV (GLuint firstPathName, GLenum fontTarget, const void *fontName, GLbitfield fontStyle, GLsizei numGlyphs, GLenum type, const void *charcodes, GLenum handleMissingGlyphs, GLuint pathParameterTemplate, GLfloat emScale); +GL_APICALL void GL_APIENTRY glPathGlyphRangeNV (GLuint firstPathName, GLenum fontTarget, const void *fontName, GLbitfield fontStyle, GLuint firstGlyph, GLsizei numGlyphs, GLenum handleMissingGlyphs, GLuint pathParameterTemplate, GLfloat emScale); +GL_APICALL void GL_APIENTRY glWeightPathsNV (GLuint resultPath, GLsizei numPaths, const GLuint *paths, const GLfloat *weights); +GL_APICALL void GL_APIENTRY glCopyPathNV (GLuint resultPath, GLuint srcPath); +GL_APICALL void GL_APIENTRY glInterpolatePathsNV (GLuint resultPath, GLuint pathA, GLuint pathB, GLfloat weight); +GL_APICALL void GL_APIENTRY glTransformPathNV (GLuint resultPath, GLuint srcPath, GLenum transformType, const GLfloat *transformValues); +GL_APICALL void GL_APIENTRY glPathParameterivNV (GLuint path, GLenum pname, const GLint *value); +GL_APICALL void GL_APIENTRY glPathParameteriNV (GLuint path, GLenum pname, GLint value); +GL_APICALL void GL_APIENTRY glPathParameterfvNV (GLuint path, GLenum pname, const GLfloat *value); +GL_APICALL void GL_APIENTRY glPathParameterfNV (GLuint path, GLenum pname, GLfloat value); +GL_APICALL void GL_APIENTRY glPathDashArrayNV (GLuint path, GLsizei dashCount, const GLfloat *dashArray); +GL_APICALL void GL_APIENTRY glPathStencilFuncNV (GLenum func, GLint ref, GLuint mask); +GL_APICALL void GL_APIENTRY glPathStencilDepthOffsetNV (GLfloat factor, GLfloat units); +GL_APICALL void GL_APIENTRY glStencilFillPathNV (GLuint path, GLenum fillMode, GLuint mask); +GL_APICALL void GL_APIENTRY glStencilStrokePathNV (GLuint path, GLint reference, GLuint mask); +GL_APICALL void GL_APIENTRY glStencilFillPathInstancedNV (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLenum fillMode, GLuint mask, GLenum transformType, const GLfloat *transformValues); +GL_APICALL void GL_APIENTRY glStencilStrokePathInstancedNV (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLint reference, GLuint mask, GLenum transformType, const GLfloat *transformValues); +GL_APICALL void GL_APIENTRY glPathCoverDepthFuncNV (GLenum func); +GL_APICALL void GL_APIENTRY glCoverFillPathNV (GLuint path, GLenum coverMode); +GL_APICALL void GL_APIENTRY glCoverStrokePathNV (GLuint path, GLenum coverMode); +GL_APICALL void GL_APIENTRY glCoverFillPathInstancedNV (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLenum coverMode, GLenum transformType, const GLfloat *transformValues); +GL_APICALL void GL_APIENTRY glCoverStrokePathInstancedNV (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLenum coverMode, GLenum transformType, const GLfloat *transformValues); +GL_APICALL void GL_APIENTRY glGetPathParameterivNV (GLuint path, GLenum pname, GLint *value); +GL_APICALL void GL_APIENTRY glGetPathParameterfvNV (GLuint path, GLenum pname, GLfloat *value); +GL_APICALL void GL_APIENTRY glGetPathCommandsNV (GLuint path, GLubyte *commands); +GL_APICALL void GL_APIENTRY glGetPathCoordsNV (GLuint path, GLfloat *coords); +GL_APICALL void GL_APIENTRY glGetPathDashArrayNV (GLuint path, GLfloat *dashArray); +GL_APICALL void GL_APIENTRY glGetPathMetricsNV (GLbitfield metricQueryMask, GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLsizei stride, GLfloat *metrics); +GL_APICALL void GL_APIENTRY glGetPathMetricRangeNV (GLbitfield metricQueryMask, GLuint firstPathName, GLsizei numPaths, GLsizei stride, GLfloat *metrics); +GL_APICALL void GL_APIENTRY glGetPathSpacingNV (GLenum pathListMode, GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLfloat advanceScale, GLfloat kerningScale, GLenum transformType, GLfloat *returnedSpacing); +GL_APICALL GLboolean GL_APIENTRY glIsPointInFillPathNV (GLuint path, GLuint mask, GLfloat x, GLfloat y); +GL_APICALL GLboolean GL_APIENTRY glIsPointInStrokePathNV (GLuint path, GLfloat x, GLfloat y); +GL_APICALL GLfloat GL_APIENTRY glGetPathLengthNV (GLuint path, GLsizei startSegment, GLsizei numSegments); +GL_APICALL GLboolean GL_APIENTRY glPointAlongPathNV (GLuint path, GLsizei startSegment, GLsizei numSegments, GLfloat distance, GLfloat *x, GLfloat *y, GLfloat *tangentX, GLfloat *tangentY); +GL_APICALL void GL_APIENTRY glMatrixLoad3x2fNV (GLenum matrixMode, const GLfloat *m); +GL_APICALL void GL_APIENTRY glMatrixLoad3x3fNV (GLenum matrixMode, const GLfloat *m); +GL_APICALL void GL_APIENTRY glMatrixLoadTranspose3x3fNV (GLenum matrixMode, const GLfloat *m); +GL_APICALL void GL_APIENTRY glMatrixMult3x2fNV (GLenum matrixMode, const GLfloat *m); +GL_APICALL void GL_APIENTRY glMatrixMult3x3fNV (GLenum matrixMode, const GLfloat *m); +GL_APICALL void GL_APIENTRY glMatrixMultTranspose3x3fNV (GLenum matrixMode, const GLfloat *m); +GL_APICALL void GL_APIENTRY glStencilThenCoverFillPathNV (GLuint path, GLenum fillMode, GLuint mask, GLenum coverMode); +GL_APICALL void GL_APIENTRY glStencilThenCoverStrokePathNV (GLuint path, GLint reference, GLuint mask, GLenum coverMode); +GL_APICALL void GL_APIENTRY glStencilThenCoverFillPathInstancedNV (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLenum fillMode, GLuint mask, GLenum coverMode, GLenum transformType, const GLfloat *transformValues); +GL_APICALL void GL_APIENTRY glStencilThenCoverStrokePathInstancedNV (GLsizei numPaths, GLenum pathNameType, const void *paths, GLuint pathBase, GLint reference, GLuint mask, GLenum coverMode, GLenum transformType, const GLfloat *transformValues); +GL_APICALL GLenum GL_APIENTRY glPathGlyphIndexRangeNV (GLenum fontTarget, const void *fontName, GLbitfield fontStyle, GLuint pathParameterTemplate, GLfloat emScale, GLuint baseAndCount[2]); +GL_APICALL GLenum GL_APIENTRY glPathGlyphIndexArrayNV (GLuint firstPathName, GLenum fontTarget, const void *fontName, GLbitfield fontStyle, GLuint firstGlyphIndex, GLsizei numGlyphs, GLuint pathParameterTemplate, GLfloat emScale); +GL_APICALL GLenum GL_APIENTRY glPathMemoryGlyphIndexArrayNV (GLuint firstPathName, GLenum fontTarget, GLsizeiptr fontSize, const void *fontData, GLsizei faceIndex, GLuint firstGlyphIndex, GLsizei numGlyphs, GLuint pathParameterTemplate, GLfloat emScale); +GL_APICALL void GL_APIENTRY glProgramPathFragmentInputGenNV (GLuint program, GLint location, GLenum genMode, GLint components, const GLfloat *coeffs); +GL_APICALL void GL_APIENTRY glGetProgramResourcefvNV (GLuint program, GLenum programInterface, GLuint index, GLsizei propCount, const GLenum *props, GLsizei bufSize, GLsizei *length, GLfloat *params); +GL_APICALL void GL_APIENTRY glMatrixFrustumEXT (GLenum mode, GLdouble left, GLdouble right, GLdouble bottom, GLdouble top, GLdouble zNear, GLdouble zFar); +GL_APICALL void GL_APIENTRY glMatrixLoadIdentityEXT (GLenum mode); +GL_APICALL void GL_APIENTRY glMatrixLoadTransposefEXT (GLenum mode, const GLfloat *m); +GL_APICALL void GL_APIENTRY glMatrixLoadTransposedEXT (GLenum mode, const GLdouble *m); +GL_APICALL void GL_APIENTRY glMatrixLoadfEXT (GLenum mode, const GLfloat *m); +GL_APICALL void GL_APIENTRY glMatrixLoaddEXT (GLenum mode, const GLdouble *m); +GL_APICALL void GL_APIENTRY glMatrixMultTransposefEXT (GLenum mode, const GLfloat *m); +GL_APICALL void GL_APIENTRY glMatrixMultTransposedEXT (GLenum mode, const GLdouble *m); +GL_APICALL void GL_APIENTRY glMatrixMultfEXT (GLenum mode, const GLfloat *m); +GL_APICALL void GL_APIENTRY glMatrixMultdEXT (GLenum mode, const GLdouble *m); +GL_APICALL void GL_APIENTRY glMatrixOrthoEXT (GLenum mode, GLdouble left, GLdouble right, GLdouble bottom, GLdouble top, GLdouble zNear, GLdouble zFar); +GL_APICALL void GL_APIENTRY glMatrixPopEXT (GLenum mode); +GL_APICALL void GL_APIENTRY glMatrixPushEXT (GLenum mode); +GL_APICALL void GL_APIENTRY glMatrixRotatefEXT (GLenum mode, GLfloat angle, GLfloat x, GLfloat y, GLfloat z); +GL_APICALL void GL_APIENTRY glMatrixRotatedEXT (GLenum mode, GLdouble angle, GLdouble x, GLdouble y, GLdouble z); +GL_APICALL void GL_APIENTRY glMatrixScalefEXT (GLenum mode, GLfloat x, GLfloat y, GLfloat z); +GL_APICALL void GL_APIENTRY glMatrixScaledEXT (GLenum mode, GLdouble x, GLdouble y, GLdouble z); +GL_APICALL void GL_APIENTRY glMatrixTranslatefEXT (GLenum mode, GLfloat x, GLfloat y, GLfloat z); +GL_APICALL void GL_APIENTRY glMatrixTranslatedEXT (GLenum mode, GLdouble x, GLdouble y, GLdouble z); +#endif +#endif /* GL_NV_path_rendering */ + +#ifndef GL_NV_path_rendering_shared_edge +#define GL_NV_path_rendering_shared_edge 1 +#define GL_SHARED_EDGE_NV 0xC0 +#endif /* GL_NV_path_rendering_shared_edge */ + +#ifndef GL_NV_pixel_buffer_object +#define GL_NV_pixel_buffer_object 1 +#define GL_PIXEL_PACK_BUFFER_NV 0x88EB +#define GL_PIXEL_UNPACK_BUFFER_NV 0x88EC +#define GL_PIXEL_PACK_BUFFER_BINDING_NV 0x88ED +#define GL_PIXEL_UNPACK_BUFFER_BINDING_NV 0x88EF +#endif /* GL_NV_pixel_buffer_object */ + +#ifndef GL_NV_polygon_mode +#define GL_NV_polygon_mode 1 +#define GL_POLYGON_MODE_NV 0x0B40 +#define GL_POLYGON_OFFSET_POINT_NV 0x2A01 +#define GL_POLYGON_OFFSET_LINE_NV 0x2A02 +#define GL_POINT_NV 0x1B00 +#define GL_LINE_NV 0x1B01 +#define GL_FILL_NV 0x1B02 +typedef void (GL_APIENTRYP PFNGLPOLYGONMODENVPROC) (GLenum face, GLenum mode); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glPolygonModeNV (GLenum face, GLenum mode); +#endif +#endif /* GL_NV_polygon_mode */ + +#ifndef GL_NV_read_buffer +#define GL_NV_read_buffer 1 +#define GL_READ_BUFFER_NV 0x0C02 +typedef void (GL_APIENTRYP PFNGLREADBUFFERNVPROC) (GLenum mode); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glReadBufferNV (GLenum mode); +#endif +#endif /* GL_NV_read_buffer */ + +#ifndef GL_NV_read_buffer_front +#define GL_NV_read_buffer_front 1 +#endif /* GL_NV_read_buffer_front */ + +#ifndef GL_NV_read_depth +#define GL_NV_read_depth 1 +#endif /* GL_NV_read_depth */ + +#ifndef GL_NV_read_depth_stencil +#define GL_NV_read_depth_stencil 1 +#endif /* GL_NV_read_depth_stencil */ + +#ifndef GL_NV_read_stencil +#define GL_NV_read_stencil 1 +#endif /* GL_NV_read_stencil */ + +#ifndef GL_NV_sRGB_formats +#define GL_NV_sRGB_formats 1 +#define GL_SLUMINANCE_NV 0x8C46 +#define GL_SLUMINANCE_ALPHA_NV 0x8C44 +#define GL_SRGB8_NV 0x8C41 +#define GL_SLUMINANCE8_NV 0x8C47 +#define GL_SLUMINANCE8_ALPHA8_NV 0x8C45 +#define GL_COMPRESSED_SRGB_S3TC_DXT1_NV 0x8C4C +#define GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT1_NV 0x8C4D +#define GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT3_NV 0x8C4E +#define GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT5_NV 0x8C4F +#define GL_ETC1_SRGB8_NV 0x88EE +#endif /* GL_NV_sRGB_formats */ + +#ifndef GL_NV_sample_locations +#define GL_NV_sample_locations 1 +#define GL_SAMPLE_LOCATION_SUBPIXEL_BITS_NV 0x933D +#define GL_SAMPLE_LOCATION_PIXEL_GRID_WIDTH_NV 0x933E +#define GL_SAMPLE_LOCATION_PIXEL_GRID_HEIGHT_NV 0x933F +#define GL_PROGRAMMABLE_SAMPLE_LOCATION_TABLE_SIZE_NV 0x9340 +#define GL_SAMPLE_LOCATION_NV 0x8E50 +#define GL_PROGRAMMABLE_SAMPLE_LOCATION_NV 0x9341 +#define GL_FRAMEBUFFER_PROGRAMMABLE_SAMPLE_LOCATIONS_NV 0x9342 +#define GL_FRAMEBUFFER_SAMPLE_LOCATION_PIXEL_GRID_NV 0x9343 +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERSAMPLELOCATIONSFVNVPROC) (GLenum target, GLuint start, GLsizei count, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLNAMEDFRAMEBUFFERSAMPLELOCATIONSFVNVPROC) (GLuint framebuffer, GLuint start, GLsizei count, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLRESOLVEDEPTHVALUESNVPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glFramebufferSampleLocationsfvNV (GLenum target, GLuint start, GLsizei count, const GLfloat *v); +GL_APICALL void GL_APIENTRY glNamedFramebufferSampleLocationsfvNV (GLuint framebuffer, GLuint start, GLsizei count, const GLfloat *v); +GL_APICALL void GL_APIENTRY glResolveDepthValuesNV (void); +#endif +#endif /* GL_NV_sample_locations */ + +#ifndef GL_NV_sample_mask_override_coverage +#define GL_NV_sample_mask_override_coverage 1 +#endif /* GL_NV_sample_mask_override_coverage */ + +#ifndef GL_NV_shader_atomic_fp16_vector +#define GL_NV_shader_atomic_fp16_vector 1 +#endif /* GL_NV_shader_atomic_fp16_vector */ + +#ifndef GL_NV_shader_noperspective_interpolation +#define GL_NV_shader_noperspective_interpolation 1 +#endif /* GL_NV_shader_noperspective_interpolation */ + +#ifndef GL_NV_shadow_samplers_array +#define GL_NV_shadow_samplers_array 1 +#define GL_SAMPLER_2D_ARRAY_SHADOW_NV 0x8DC4 +#endif /* GL_NV_shadow_samplers_array */ + +#ifndef GL_NV_shadow_samplers_cube +#define GL_NV_shadow_samplers_cube 1 +#define GL_SAMPLER_CUBE_SHADOW_NV 0x8DC5 +#endif /* GL_NV_shadow_samplers_cube */ + +#ifndef GL_NV_stereo_view_rendering +#define GL_NV_stereo_view_rendering 1 +#endif /* GL_NV_stereo_view_rendering */ + +#ifndef GL_NV_texture_border_clamp +#define GL_NV_texture_border_clamp 1 +#define GL_TEXTURE_BORDER_COLOR_NV 0x1004 +#define GL_CLAMP_TO_BORDER_NV 0x812D +#endif /* GL_NV_texture_border_clamp */ + +#ifndef GL_NV_texture_compression_s3tc_update +#define GL_NV_texture_compression_s3tc_update 1 +#endif /* GL_NV_texture_compression_s3tc_update */ + +#ifndef GL_NV_texture_npot_2D_mipmap +#define GL_NV_texture_npot_2D_mipmap 1 +#endif /* GL_NV_texture_npot_2D_mipmap */ + +#ifndef GL_NV_viewport_array +#define GL_NV_viewport_array 1 +#define GL_MAX_VIEWPORTS_NV 0x825B +#define GL_VIEWPORT_SUBPIXEL_BITS_NV 0x825C +#define GL_VIEWPORT_BOUNDS_RANGE_NV 0x825D +#define GL_VIEWPORT_INDEX_PROVOKING_VERTEX_NV 0x825F +typedef void (GL_APIENTRYP PFNGLVIEWPORTARRAYVNVPROC) (GLuint first, GLsizei count, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLVIEWPORTINDEXEDFNVPROC) (GLuint index, GLfloat x, GLfloat y, GLfloat w, GLfloat h); +typedef void (GL_APIENTRYP PFNGLVIEWPORTINDEXEDFVNVPROC) (GLuint index, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLSCISSORARRAYVNVPROC) (GLuint first, GLsizei count, const GLint *v); +typedef void (GL_APIENTRYP PFNGLSCISSORINDEXEDNVPROC) (GLuint index, GLint left, GLint bottom, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLSCISSORINDEXEDVNVPROC) (GLuint index, const GLint *v); +typedef void (GL_APIENTRYP PFNGLDEPTHRANGEARRAYFVNVPROC) (GLuint first, GLsizei count, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLDEPTHRANGEINDEXEDFNVPROC) (GLuint index, GLfloat n, GLfloat f); +typedef void (GL_APIENTRYP PFNGLGETFLOATI_VNVPROC) (GLenum target, GLuint index, GLfloat *data); +typedef void (GL_APIENTRYP PFNGLENABLEINVPROC) (GLenum target, GLuint index); +typedef void (GL_APIENTRYP PFNGLDISABLEINVPROC) (GLenum target, GLuint index); +typedef GLboolean (GL_APIENTRYP PFNGLISENABLEDINVPROC) (GLenum target, GLuint index); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glViewportArrayvNV (GLuint first, GLsizei count, const GLfloat *v); +GL_APICALL void GL_APIENTRY glViewportIndexedfNV (GLuint index, GLfloat x, GLfloat y, GLfloat w, GLfloat h); +GL_APICALL void GL_APIENTRY glViewportIndexedfvNV (GLuint index, const GLfloat *v); +GL_APICALL void GL_APIENTRY glScissorArrayvNV (GLuint first, GLsizei count, const GLint *v); +GL_APICALL void GL_APIENTRY glScissorIndexedNV (GLuint index, GLint left, GLint bottom, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glScissorIndexedvNV (GLuint index, const GLint *v); +GL_APICALL void GL_APIENTRY glDepthRangeArrayfvNV (GLuint first, GLsizei count, const GLfloat *v); +GL_APICALL void GL_APIENTRY glDepthRangeIndexedfNV (GLuint index, GLfloat n, GLfloat f); +GL_APICALL void GL_APIENTRY glGetFloati_vNV (GLenum target, GLuint index, GLfloat *data); +GL_APICALL void GL_APIENTRY glEnableiNV (GLenum target, GLuint index); +GL_APICALL void GL_APIENTRY glDisableiNV (GLenum target, GLuint index); +GL_APICALL GLboolean GL_APIENTRY glIsEnablediNV (GLenum target, GLuint index); +#endif +#endif /* GL_NV_viewport_array */ + +#ifndef GL_NV_viewport_array2 +#define GL_NV_viewport_array2 1 +#endif /* GL_NV_viewport_array2 */ + +#ifndef GL_NV_viewport_swizzle +#define GL_NV_viewport_swizzle 1 +#define GL_VIEWPORT_SWIZZLE_POSITIVE_X_NV 0x9350 +#define GL_VIEWPORT_SWIZZLE_NEGATIVE_X_NV 0x9351 +#define GL_VIEWPORT_SWIZZLE_POSITIVE_Y_NV 0x9352 +#define GL_VIEWPORT_SWIZZLE_NEGATIVE_Y_NV 0x9353 +#define GL_VIEWPORT_SWIZZLE_POSITIVE_Z_NV 0x9354 +#define GL_VIEWPORT_SWIZZLE_NEGATIVE_Z_NV 0x9355 +#define GL_VIEWPORT_SWIZZLE_POSITIVE_W_NV 0x9356 +#define GL_VIEWPORT_SWIZZLE_NEGATIVE_W_NV 0x9357 +#define GL_VIEWPORT_SWIZZLE_X_NV 0x9358 +#define GL_VIEWPORT_SWIZZLE_Y_NV 0x9359 +#define GL_VIEWPORT_SWIZZLE_Z_NV 0x935A +#define GL_VIEWPORT_SWIZZLE_W_NV 0x935B +typedef void (GL_APIENTRYP PFNGLVIEWPORTSWIZZLENVPROC) (GLuint index, GLenum swizzlex, GLenum swizzley, GLenum swizzlez, GLenum swizzlew); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glViewportSwizzleNV (GLuint index, GLenum swizzlex, GLenum swizzley, GLenum swizzlez, GLenum swizzlew); +#endif +#endif /* GL_NV_viewport_swizzle */ + +#ifndef GL_OVR_multiview +#define GL_OVR_multiview 1 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_NUM_VIEWS_OVR 0x9630 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_BASE_VIEW_INDEX_OVR 0x9632 +#define GL_MAX_VIEWS_OVR 0x9631 +#define GL_FRAMEBUFFER_INCOMPLETE_VIEW_TARGETS_OVR 0x9633 +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTUREMULTIVIEWOVRPROC) (GLenum target, GLenum attachment, GLuint texture, GLint level, GLint baseViewIndex, GLsizei numViews); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glFramebufferTextureMultiviewOVR (GLenum target, GLenum attachment, GLuint texture, GLint level, GLint baseViewIndex, GLsizei numViews); +#endif +#endif /* GL_OVR_multiview */ + +#ifndef GL_OVR_multiview2 +#define GL_OVR_multiview2 1 +#endif /* GL_OVR_multiview2 */ + +#ifndef GL_OVR_multiview_multisampled_render_to_texture +#define GL_OVR_multiview_multisampled_render_to_texture 1 +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTUREMULTISAMPLEMULTIVIEWOVRPROC) (GLenum target, GLenum attachment, GLuint texture, GLint level, GLsizei samples, GLint baseViewIndex, GLsizei numViews); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glFramebufferTextureMultisampleMultiviewOVR (GLenum target, GLenum attachment, GLuint texture, GLint level, GLsizei samples, GLint baseViewIndex, GLsizei numViews); +#endif +#endif /* GL_OVR_multiview_multisampled_render_to_texture */ + +#ifndef GL_QCOM_alpha_test +#define GL_QCOM_alpha_test 1 +#define GL_ALPHA_TEST_QCOM 0x0BC0 +#define GL_ALPHA_TEST_FUNC_QCOM 0x0BC1 +#define GL_ALPHA_TEST_REF_QCOM 0x0BC2 +typedef void (GL_APIENTRYP PFNGLALPHAFUNCQCOMPROC) (GLenum func, GLclampf ref); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glAlphaFuncQCOM (GLenum func, GLclampf ref); +#endif +#endif /* GL_QCOM_alpha_test */ + +#ifndef GL_QCOM_binning_control +#define GL_QCOM_binning_control 1 +#define GL_BINNING_CONTROL_HINT_QCOM 0x8FB0 +#define GL_CPU_OPTIMIZED_QCOM 0x8FB1 +#define GL_GPU_OPTIMIZED_QCOM 0x8FB2 +#define GL_RENDER_DIRECT_TO_FRAMEBUFFER_QCOM 0x8FB3 +#endif /* GL_QCOM_binning_control */ + +#ifndef GL_QCOM_driver_control +#define GL_QCOM_driver_control 1 +typedef void (GL_APIENTRYP PFNGLGETDRIVERCONTROLSQCOMPROC) (GLint *num, GLsizei size, GLuint *driverControls); +typedef void (GL_APIENTRYP PFNGLGETDRIVERCONTROLSTRINGQCOMPROC) (GLuint driverControl, GLsizei bufSize, GLsizei *length, GLchar *driverControlString); +typedef void (GL_APIENTRYP PFNGLENABLEDRIVERCONTROLQCOMPROC) (GLuint driverControl); +typedef void (GL_APIENTRYP PFNGLDISABLEDRIVERCONTROLQCOMPROC) (GLuint driverControl); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glGetDriverControlsQCOM (GLint *num, GLsizei size, GLuint *driverControls); +GL_APICALL void GL_APIENTRY glGetDriverControlStringQCOM (GLuint driverControl, GLsizei bufSize, GLsizei *length, GLchar *driverControlString); +GL_APICALL void GL_APIENTRY glEnableDriverControlQCOM (GLuint driverControl); +GL_APICALL void GL_APIENTRY glDisableDriverControlQCOM (GLuint driverControl); +#endif +#endif /* GL_QCOM_driver_control */ + +#ifndef GL_QCOM_extended_get +#define GL_QCOM_extended_get 1 +#define GL_TEXTURE_WIDTH_QCOM 0x8BD2 +#define GL_TEXTURE_HEIGHT_QCOM 0x8BD3 +#define GL_TEXTURE_DEPTH_QCOM 0x8BD4 +#define GL_TEXTURE_INTERNAL_FORMAT_QCOM 0x8BD5 +#define GL_TEXTURE_FORMAT_QCOM 0x8BD6 +#define GL_TEXTURE_TYPE_QCOM 0x8BD7 +#define GL_TEXTURE_IMAGE_VALID_QCOM 0x8BD8 +#define GL_TEXTURE_NUM_LEVELS_QCOM 0x8BD9 +#define GL_TEXTURE_TARGET_QCOM 0x8BDA +#define GL_TEXTURE_OBJECT_VALID_QCOM 0x8BDB +#define GL_STATE_RESTORE 0x8BDC +typedef void (GL_APIENTRYP PFNGLEXTGETTEXTURESQCOMPROC) (GLuint *textures, GLint maxTextures, GLint *numTextures); +typedef void (GL_APIENTRYP PFNGLEXTGETBUFFERSQCOMPROC) (GLuint *buffers, GLint maxBuffers, GLint *numBuffers); +typedef void (GL_APIENTRYP PFNGLEXTGETRENDERBUFFERSQCOMPROC) (GLuint *renderbuffers, GLint maxRenderbuffers, GLint *numRenderbuffers); +typedef void (GL_APIENTRYP PFNGLEXTGETFRAMEBUFFERSQCOMPROC) (GLuint *framebuffers, GLint maxFramebuffers, GLint *numFramebuffers); +typedef void (GL_APIENTRYP PFNGLEXTGETTEXLEVELPARAMETERIVQCOMPROC) (GLuint texture, GLenum face, GLint level, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLEXTTEXOBJECTSTATEOVERRIDEIQCOMPROC) (GLenum target, GLenum pname, GLint param); +typedef void (GL_APIENTRYP PFNGLEXTGETTEXSUBIMAGEQCOMPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, void *texels); +typedef void (GL_APIENTRYP PFNGLEXTGETBUFFERPOINTERVQCOMPROC) (GLenum target, void **params); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glExtGetTexturesQCOM (GLuint *textures, GLint maxTextures, GLint *numTextures); +GL_APICALL void GL_APIENTRY glExtGetBuffersQCOM (GLuint *buffers, GLint maxBuffers, GLint *numBuffers); +GL_APICALL void GL_APIENTRY glExtGetRenderbuffersQCOM (GLuint *renderbuffers, GLint maxRenderbuffers, GLint *numRenderbuffers); +GL_APICALL void GL_APIENTRY glExtGetFramebuffersQCOM (GLuint *framebuffers, GLint maxFramebuffers, GLint *numFramebuffers); +GL_APICALL void GL_APIENTRY glExtGetTexLevelParameterivQCOM (GLuint texture, GLenum face, GLint level, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glExtTexObjectStateOverrideiQCOM (GLenum target, GLenum pname, GLint param); +GL_APICALL void GL_APIENTRY glExtGetTexSubImageQCOM (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, void *texels); +GL_APICALL void GL_APIENTRY glExtGetBufferPointervQCOM (GLenum target, void **params); +#endif +#endif /* GL_QCOM_extended_get */ + +#ifndef GL_QCOM_extended_get2 +#define GL_QCOM_extended_get2 1 +typedef void (GL_APIENTRYP PFNGLEXTGETSHADERSQCOMPROC) (GLuint *shaders, GLint maxShaders, GLint *numShaders); +typedef void (GL_APIENTRYP PFNGLEXTGETPROGRAMSQCOMPROC) (GLuint *programs, GLint maxPrograms, GLint *numPrograms); +typedef GLboolean (GL_APIENTRYP PFNGLEXTISPROGRAMBINARYQCOMPROC) (GLuint program); +typedef void (GL_APIENTRYP PFNGLEXTGETPROGRAMBINARYSOURCEQCOMPROC) (GLuint program, GLenum shadertype, GLchar *source, GLint *length); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glExtGetShadersQCOM (GLuint *shaders, GLint maxShaders, GLint *numShaders); +GL_APICALL void GL_APIENTRY glExtGetProgramsQCOM (GLuint *programs, GLint maxPrograms, GLint *numPrograms); +GL_APICALL GLboolean GL_APIENTRY glExtIsProgramBinaryQCOM (GLuint program); +GL_APICALL void GL_APIENTRY glExtGetProgramBinarySourceQCOM (GLuint program, GLenum shadertype, GLchar *source, GLint *length); +#endif +#endif /* GL_QCOM_extended_get2 */ + +#ifndef GL_QCOM_framebuffer_foveated +#define GL_QCOM_framebuffer_foveated 1 +#define GL_FOVEATION_ENABLE_BIT_QCOM 0x00000001 +#define GL_FOVEATION_SCALED_BIN_METHOD_BIT_QCOM 0x00000002 +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERFOVEATIONCONFIGQCOMPROC) (GLuint framebuffer, GLuint numLayers, GLuint focalPointsPerLayer, GLuint requestedFeatures, GLuint *providedFeatures); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERFOVEATIONPARAMETERSQCOMPROC) (GLuint framebuffer, GLuint layer, GLuint focalPoint, GLfloat focalX, GLfloat focalY, GLfloat gainX, GLfloat gainY, GLfloat foveaArea); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glFramebufferFoveationConfigQCOM (GLuint framebuffer, GLuint numLayers, GLuint focalPointsPerLayer, GLuint requestedFeatures, GLuint *providedFeatures); +GL_APICALL void GL_APIENTRY glFramebufferFoveationParametersQCOM (GLuint framebuffer, GLuint layer, GLuint focalPoint, GLfloat focalX, GLfloat focalY, GLfloat gainX, GLfloat gainY, GLfloat foveaArea); +#endif +#endif /* GL_QCOM_framebuffer_foveated */ + +#ifndef GL_QCOM_perfmon_global_mode +#define GL_QCOM_perfmon_global_mode 1 +#define GL_PERFMON_GLOBAL_MODE_QCOM 0x8FA0 +#endif /* GL_QCOM_perfmon_global_mode */ + +#ifndef GL_QCOM_shader_framebuffer_fetch_noncoherent +#define GL_QCOM_shader_framebuffer_fetch_noncoherent 1 +#define GL_FRAMEBUFFER_FETCH_NONCOHERENT_QCOM 0x96A2 +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERFETCHBARRIERQCOMPROC) (void); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glFramebufferFetchBarrierQCOM (void); +#endif +#endif /* GL_QCOM_shader_framebuffer_fetch_noncoherent */ + +#ifndef GL_QCOM_texture_foveated +#define GL_QCOM_texture_foveated 1 +#define GL_TEXTURE_FOVEATED_FEATURE_BITS_QCOM 0x8BFB +#define GL_TEXTURE_FOVEATED_MIN_PIXEL_DENSITY_QCOM 0x8BFC +#define GL_TEXTURE_FOVEATED_FEATURE_QUERY_QCOM 0x8BFD +#define GL_TEXTURE_FOVEATED_NUM_FOCAL_POINTS_QUERY_QCOM 0x8BFE +#define GL_FRAMEBUFFER_INCOMPLETE_FOVEATION_QCOM 0x8BFF +typedef void (GL_APIENTRYP PFNGLTEXTUREFOVEATIONPARAMETERSQCOMPROC) (GLuint texture, GLuint layer, GLuint focalPoint, GLfloat focalX, GLfloat focalY, GLfloat gainX, GLfloat gainY, GLfloat foveaArea); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glTextureFoveationParametersQCOM (GLuint texture, GLuint layer, GLuint focalPoint, GLfloat focalX, GLfloat focalY, GLfloat gainX, GLfloat gainY, GLfloat foveaArea); +#endif +#endif /* GL_QCOM_texture_foveated */ + +#ifndef GL_QCOM_tiled_rendering +#define GL_QCOM_tiled_rendering 1 +#define GL_COLOR_BUFFER_BIT0_QCOM 0x00000001 +#define GL_COLOR_BUFFER_BIT1_QCOM 0x00000002 +#define GL_COLOR_BUFFER_BIT2_QCOM 0x00000004 +#define GL_COLOR_BUFFER_BIT3_QCOM 0x00000008 +#define GL_COLOR_BUFFER_BIT4_QCOM 0x00000010 +#define GL_COLOR_BUFFER_BIT5_QCOM 0x00000020 +#define GL_COLOR_BUFFER_BIT6_QCOM 0x00000040 +#define GL_COLOR_BUFFER_BIT7_QCOM 0x00000080 +#define GL_DEPTH_BUFFER_BIT0_QCOM 0x00000100 +#define GL_DEPTH_BUFFER_BIT1_QCOM 0x00000200 +#define GL_DEPTH_BUFFER_BIT2_QCOM 0x00000400 +#define GL_DEPTH_BUFFER_BIT3_QCOM 0x00000800 +#define GL_DEPTH_BUFFER_BIT4_QCOM 0x00001000 +#define GL_DEPTH_BUFFER_BIT5_QCOM 0x00002000 +#define GL_DEPTH_BUFFER_BIT6_QCOM 0x00004000 +#define GL_DEPTH_BUFFER_BIT7_QCOM 0x00008000 +#define GL_STENCIL_BUFFER_BIT0_QCOM 0x00010000 +#define GL_STENCIL_BUFFER_BIT1_QCOM 0x00020000 +#define GL_STENCIL_BUFFER_BIT2_QCOM 0x00040000 +#define GL_STENCIL_BUFFER_BIT3_QCOM 0x00080000 +#define GL_STENCIL_BUFFER_BIT4_QCOM 0x00100000 +#define GL_STENCIL_BUFFER_BIT5_QCOM 0x00200000 +#define GL_STENCIL_BUFFER_BIT6_QCOM 0x00400000 +#define GL_STENCIL_BUFFER_BIT7_QCOM 0x00800000 +#define GL_MULTISAMPLE_BUFFER_BIT0_QCOM 0x01000000 +#define GL_MULTISAMPLE_BUFFER_BIT1_QCOM 0x02000000 +#define GL_MULTISAMPLE_BUFFER_BIT2_QCOM 0x04000000 +#define GL_MULTISAMPLE_BUFFER_BIT3_QCOM 0x08000000 +#define GL_MULTISAMPLE_BUFFER_BIT4_QCOM 0x10000000 +#define GL_MULTISAMPLE_BUFFER_BIT5_QCOM 0x20000000 +#define GL_MULTISAMPLE_BUFFER_BIT6_QCOM 0x40000000 +#define GL_MULTISAMPLE_BUFFER_BIT7_QCOM 0x80000000 +typedef void (GL_APIENTRYP PFNGLSTARTTILINGQCOMPROC) (GLuint x, GLuint y, GLuint width, GLuint height, GLbitfield preserveMask); +typedef void (GL_APIENTRYP PFNGLENDTILINGQCOMPROC) (GLbitfield preserveMask); +#ifdef GL_GLEXT_PROTOTYPES +GL_APICALL void GL_APIENTRY glStartTilingQCOM (GLuint x, GLuint y, GLuint width, GLuint height, GLbitfield preserveMask); +GL_APICALL void GL_APIENTRY glEndTilingQCOM (GLbitfield preserveMask); +#endif +#endif /* GL_QCOM_tiled_rendering */ + +#ifndef GL_QCOM_writeonly_rendering +#define GL_QCOM_writeonly_rendering 1 +#define GL_WRITEONLY_RENDERING_QCOM 0x8823 +#endif /* GL_QCOM_writeonly_rendering */ + +#ifndef GL_VIV_shader_binary +#define GL_VIV_shader_binary 1 +#define GL_SHADER_BINARY_VIV 0x8FC4 +#endif /* GL_VIV_shader_binary */ + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/swiftshader/include/GLES2/gl2platform.h b/vendor/swiftshader/include/GLES2/gl2platform.h new file mode 100644 index 00000000..eb318dc3 --- /dev/null +++ b/vendor/swiftshader/include/GLES2/gl2platform.h @@ -0,0 +1,38 @@ +#ifndef __gl2platform_h_ +#define __gl2platform_h_ + +/* +** Copyright (c) 2017 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* Platform-specific types and definitions for OpenGL ES 2.X gl2.h + * + * Adopters may modify khrplatform.h and this file to suit their platform. + * Please contribute modifications back to Khronos as pull requests on the + * public github repository: + * https://github.com/KhronosGroup/OpenGL-Registry + */ + +#include + +#ifndef GL_APICALL +#define GL_APICALL KHRONOS_APICALL +#endif + +#ifndef GL_APIENTRY +#define GL_APIENTRY KHRONOS_APIENTRY +#endif + +#endif /* __gl2platform_h_ */ diff --git a/vendor/swiftshader/include/GLES3/gl3.h b/vendor/swiftshader/include/GLES3/gl3.h new file mode 100644 index 00000000..639df8c0 --- /dev/null +++ b/vendor/swiftshader/include/GLES3/gl3.h @@ -0,0 +1,1211 @@ +#ifndef __gles2_gl3_h_ +#define __gles2_gl3_h_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2013-2018 The Khronos Group Inc. +** +** Permission is hereby granted, free of charge, to any person obtaining a +** copy of this software and/or associated documentation files (the +** "Materials"), to deal in the Materials without restriction, including +** without limitation the rights to use, copy, modify, merge, publish, +** distribute, sublicense, and/or sell copies of the Materials, and to +** permit persons to whom the Materials are furnished to do so, subject to +** the following conditions: +** +** The above copyright notice and this permission notice shall be included +** in all copies or substantial portions of the Materials. +** +** THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +** EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +** MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +** IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +** CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +** MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. +*/ +/* +** This header is generated from the Khronos OpenGL / OpenGL ES XML +** API Registry. The current version of the Registry, generator scripts +** used to make the header, and the header can be found at +** https://github.com/KhronosGroup/OpenGL-Registry +*/ + +#include + +#ifndef GL_APIENTRYP +#define GL_APIENTRYP GL_APIENTRY* +#endif + +#ifndef GL_GLES_PROTOTYPES +#define GL_GLES_PROTOTYPES 1 +#endif + +/* Generated on date 20180525 */ + +/* Generated C header for: + * API: gles2 + * Profile: common + * Versions considered: 2\.[0-9]|3\.0 + * Versions emitted: .* + * Default extensions included: None + * Additional extensions included: _nomatch_^ + * Extensions removed: _nomatch_^ + */ + +#ifndef GL_ES_VERSION_2_0 +#define GL_ES_VERSION_2_0 1 +#include +typedef khronos_int8_t GLbyte; +typedef khronos_float_t GLclampf; +typedef khronos_int32_t GLfixed; +typedef short GLshort; +typedef unsigned short GLushort; +typedef void GLvoid; +typedef struct __GLsync *GLsync; +typedef khronos_int64_t GLint64; +typedef khronos_uint64_t GLuint64; +typedef unsigned int GLenum; +typedef unsigned int GLuint; +typedef char GLchar; +typedef khronos_float_t GLfloat; +typedef khronos_ssize_t GLsizeiptr; +typedef khronos_intptr_t GLintptr; +typedef unsigned int GLbitfield; +typedef int GLint; +typedef unsigned char GLboolean; +typedef int GLsizei; +typedef khronos_uint8_t GLubyte; +#define GL_DEPTH_BUFFER_BIT 0x00000100 +#define GL_STENCIL_BUFFER_BIT 0x00000400 +#define GL_COLOR_BUFFER_BIT 0x00004000 +#define GL_FALSE 0 +#define GL_TRUE 1 +#define GL_POINTS 0x0000 +#define GL_LINES 0x0001 +#define GL_LINE_LOOP 0x0002 +#define GL_LINE_STRIP 0x0003 +#define GL_TRIANGLES 0x0004 +#define GL_TRIANGLE_STRIP 0x0005 +#define GL_TRIANGLE_FAN 0x0006 +#define GL_ZERO 0 +#define GL_ONE 1 +#define GL_SRC_COLOR 0x0300 +#define GL_ONE_MINUS_SRC_COLOR 0x0301 +#define GL_SRC_ALPHA 0x0302 +#define GL_ONE_MINUS_SRC_ALPHA 0x0303 +#define GL_DST_ALPHA 0x0304 +#define GL_ONE_MINUS_DST_ALPHA 0x0305 +#define GL_DST_COLOR 0x0306 +#define GL_ONE_MINUS_DST_COLOR 0x0307 +#define GL_SRC_ALPHA_SATURATE 0x0308 +#define GL_FUNC_ADD 0x8006 +#define GL_BLEND_EQUATION 0x8009 +#define GL_BLEND_EQUATION_RGB 0x8009 +#define GL_BLEND_EQUATION_ALPHA 0x883D +#define GL_FUNC_SUBTRACT 0x800A +#define GL_FUNC_REVERSE_SUBTRACT 0x800B +#define GL_BLEND_DST_RGB 0x80C8 +#define GL_BLEND_SRC_RGB 0x80C9 +#define GL_BLEND_DST_ALPHA 0x80CA +#define GL_BLEND_SRC_ALPHA 0x80CB +#define GL_CONSTANT_COLOR 0x8001 +#define GL_ONE_MINUS_CONSTANT_COLOR 0x8002 +#define GL_CONSTANT_ALPHA 0x8003 +#define GL_ONE_MINUS_CONSTANT_ALPHA 0x8004 +#define GL_BLEND_COLOR 0x8005 +#define GL_ARRAY_BUFFER 0x8892 +#define GL_ELEMENT_ARRAY_BUFFER 0x8893 +#define GL_ARRAY_BUFFER_BINDING 0x8894 +#define GL_ELEMENT_ARRAY_BUFFER_BINDING 0x8895 +#define GL_STREAM_DRAW 0x88E0 +#define GL_STATIC_DRAW 0x88E4 +#define GL_DYNAMIC_DRAW 0x88E8 +#define GL_BUFFER_SIZE 0x8764 +#define GL_BUFFER_USAGE 0x8765 +#define GL_CURRENT_VERTEX_ATTRIB 0x8626 +#define GL_FRONT 0x0404 +#define GL_BACK 0x0405 +#define GL_FRONT_AND_BACK 0x0408 +#define GL_TEXTURE_2D 0x0DE1 +#define GL_CULL_FACE 0x0B44 +#define GL_BLEND 0x0BE2 +#define GL_DITHER 0x0BD0 +#define GL_STENCIL_TEST 0x0B90 +#define GL_DEPTH_TEST 0x0B71 +#define GL_SCISSOR_TEST 0x0C11 +#define GL_POLYGON_OFFSET_FILL 0x8037 +#define GL_SAMPLE_ALPHA_TO_COVERAGE 0x809E +#define GL_SAMPLE_COVERAGE 0x80A0 +#define GL_NO_ERROR 0 +#define GL_INVALID_ENUM 0x0500 +#define GL_INVALID_VALUE 0x0501 +#define GL_INVALID_OPERATION 0x0502 +#define GL_OUT_OF_MEMORY 0x0505 +#define GL_CW 0x0900 +#define GL_CCW 0x0901 +#define GL_LINE_WIDTH 0x0B21 +#define GL_ALIASED_POINT_SIZE_RANGE 0x846D +#define GL_ALIASED_LINE_WIDTH_RANGE 0x846E +#define GL_CULL_FACE_MODE 0x0B45 +#define GL_FRONT_FACE 0x0B46 +#define GL_DEPTH_RANGE 0x0B70 +#define GL_DEPTH_WRITEMASK 0x0B72 +#define GL_DEPTH_CLEAR_VALUE 0x0B73 +#define GL_DEPTH_FUNC 0x0B74 +#define GL_STENCIL_CLEAR_VALUE 0x0B91 +#define GL_STENCIL_FUNC 0x0B92 +#define GL_STENCIL_FAIL 0x0B94 +#define GL_STENCIL_PASS_DEPTH_FAIL 0x0B95 +#define GL_STENCIL_PASS_DEPTH_PASS 0x0B96 +#define GL_STENCIL_REF 0x0B97 +#define GL_STENCIL_VALUE_MASK 0x0B93 +#define GL_STENCIL_WRITEMASK 0x0B98 +#define GL_STENCIL_BACK_FUNC 0x8800 +#define GL_STENCIL_BACK_FAIL 0x8801 +#define GL_STENCIL_BACK_PASS_DEPTH_FAIL 0x8802 +#define GL_STENCIL_BACK_PASS_DEPTH_PASS 0x8803 +#define GL_STENCIL_BACK_REF 0x8CA3 +#define GL_STENCIL_BACK_VALUE_MASK 0x8CA4 +#define GL_STENCIL_BACK_WRITEMASK 0x8CA5 +#define GL_VIEWPORT 0x0BA2 +#define GL_SCISSOR_BOX 0x0C10 +#define GL_COLOR_CLEAR_VALUE 0x0C22 +#define GL_COLOR_WRITEMASK 0x0C23 +#define GL_UNPACK_ALIGNMENT 0x0CF5 +#define GL_PACK_ALIGNMENT 0x0D05 +#define GL_MAX_TEXTURE_SIZE 0x0D33 +#define GL_MAX_VIEWPORT_DIMS 0x0D3A +#define GL_SUBPIXEL_BITS 0x0D50 +#define GL_RED_BITS 0x0D52 +#define GL_GREEN_BITS 0x0D53 +#define GL_BLUE_BITS 0x0D54 +#define GL_ALPHA_BITS 0x0D55 +#define GL_DEPTH_BITS 0x0D56 +#define GL_STENCIL_BITS 0x0D57 +#define GL_POLYGON_OFFSET_UNITS 0x2A00 +#define GL_POLYGON_OFFSET_FACTOR 0x8038 +#define GL_TEXTURE_BINDING_2D 0x8069 +#define GL_SAMPLE_BUFFERS 0x80A8 +#define GL_SAMPLES 0x80A9 +#define GL_SAMPLE_COVERAGE_VALUE 0x80AA +#define GL_SAMPLE_COVERAGE_INVERT 0x80AB +#define GL_NUM_COMPRESSED_TEXTURE_FORMATS 0x86A2 +#define GL_COMPRESSED_TEXTURE_FORMATS 0x86A3 +#define GL_DONT_CARE 0x1100 +#define GL_FASTEST 0x1101 +#define GL_NICEST 0x1102 +#define GL_GENERATE_MIPMAP_HINT 0x8192 +#define GL_BYTE 0x1400 +#define GL_UNSIGNED_BYTE 0x1401 +#define GL_SHORT 0x1402 +#define GL_UNSIGNED_SHORT 0x1403 +#define GL_INT 0x1404 +#define GL_UNSIGNED_INT 0x1405 +#define GL_FLOAT 0x1406 +#define GL_FIXED 0x140C +#define GL_DEPTH_COMPONENT 0x1902 +#define GL_ALPHA 0x1906 +#define GL_RGB 0x1907 +#define GL_RGBA 0x1908 +#define GL_LUMINANCE 0x1909 +#define GL_LUMINANCE_ALPHA 0x190A +#define GL_UNSIGNED_SHORT_4_4_4_4 0x8033 +#define GL_UNSIGNED_SHORT_5_5_5_1 0x8034 +#define GL_UNSIGNED_SHORT_5_6_5 0x8363 +#define GL_FRAGMENT_SHADER 0x8B30 +#define GL_VERTEX_SHADER 0x8B31 +#define GL_MAX_VERTEX_ATTRIBS 0x8869 +#define GL_MAX_VERTEX_UNIFORM_VECTORS 0x8DFB +#define GL_MAX_VARYING_VECTORS 0x8DFC +#define GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS 0x8B4D +#define GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS 0x8B4C +#define GL_MAX_TEXTURE_IMAGE_UNITS 0x8872 +#define GL_MAX_FRAGMENT_UNIFORM_VECTORS 0x8DFD +#define GL_SHADER_TYPE 0x8B4F +#define GL_DELETE_STATUS 0x8B80 +#define GL_LINK_STATUS 0x8B82 +#define GL_VALIDATE_STATUS 0x8B83 +#define GL_ATTACHED_SHADERS 0x8B85 +#define GL_ACTIVE_UNIFORMS 0x8B86 +#define GL_ACTIVE_UNIFORM_MAX_LENGTH 0x8B87 +#define GL_ACTIVE_ATTRIBUTES 0x8B89 +#define GL_ACTIVE_ATTRIBUTE_MAX_LENGTH 0x8B8A +#define GL_SHADING_LANGUAGE_VERSION 0x8B8C +#define GL_CURRENT_PROGRAM 0x8B8D +#define GL_NEVER 0x0200 +#define GL_LESS 0x0201 +#define GL_EQUAL 0x0202 +#define GL_LEQUAL 0x0203 +#define GL_GREATER 0x0204 +#define GL_NOTEQUAL 0x0205 +#define GL_GEQUAL 0x0206 +#define GL_ALWAYS 0x0207 +#define GL_KEEP 0x1E00 +#define GL_REPLACE 0x1E01 +#define GL_INCR 0x1E02 +#define GL_DECR 0x1E03 +#define GL_INVERT 0x150A +#define GL_INCR_WRAP 0x8507 +#define GL_DECR_WRAP 0x8508 +#define GL_VENDOR 0x1F00 +#define GL_RENDERER 0x1F01 +#define GL_VERSION 0x1F02 +#define GL_EXTENSIONS 0x1F03 +#define GL_NEAREST 0x2600 +#define GL_LINEAR 0x2601 +#define GL_NEAREST_MIPMAP_NEAREST 0x2700 +#define GL_LINEAR_MIPMAP_NEAREST 0x2701 +#define GL_NEAREST_MIPMAP_LINEAR 0x2702 +#define GL_LINEAR_MIPMAP_LINEAR 0x2703 +#define GL_TEXTURE_MAG_FILTER 0x2800 +#define GL_TEXTURE_MIN_FILTER 0x2801 +#define GL_TEXTURE_WRAP_S 0x2802 +#define GL_TEXTURE_WRAP_T 0x2803 +#define GL_TEXTURE 0x1702 +#define GL_TEXTURE_CUBE_MAP 0x8513 +#define GL_TEXTURE_BINDING_CUBE_MAP 0x8514 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_X 0x8515 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_X 0x8516 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_Y 0x8517 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_Y 0x8518 +#define GL_TEXTURE_CUBE_MAP_POSITIVE_Z 0x8519 +#define GL_TEXTURE_CUBE_MAP_NEGATIVE_Z 0x851A +#define GL_MAX_CUBE_MAP_TEXTURE_SIZE 0x851C +#define GL_TEXTURE0 0x84C0 +#define GL_TEXTURE1 0x84C1 +#define GL_TEXTURE2 0x84C2 +#define GL_TEXTURE3 0x84C3 +#define GL_TEXTURE4 0x84C4 +#define GL_TEXTURE5 0x84C5 +#define GL_TEXTURE6 0x84C6 +#define GL_TEXTURE7 0x84C7 +#define GL_TEXTURE8 0x84C8 +#define GL_TEXTURE9 0x84C9 +#define GL_TEXTURE10 0x84CA +#define GL_TEXTURE11 0x84CB +#define GL_TEXTURE12 0x84CC +#define GL_TEXTURE13 0x84CD +#define GL_TEXTURE14 0x84CE +#define GL_TEXTURE15 0x84CF +#define GL_TEXTURE16 0x84D0 +#define GL_TEXTURE17 0x84D1 +#define GL_TEXTURE18 0x84D2 +#define GL_TEXTURE19 0x84D3 +#define GL_TEXTURE20 0x84D4 +#define GL_TEXTURE21 0x84D5 +#define GL_TEXTURE22 0x84D6 +#define GL_TEXTURE23 0x84D7 +#define GL_TEXTURE24 0x84D8 +#define GL_TEXTURE25 0x84D9 +#define GL_TEXTURE26 0x84DA +#define GL_TEXTURE27 0x84DB +#define GL_TEXTURE28 0x84DC +#define GL_TEXTURE29 0x84DD +#define GL_TEXTURE30 0x84DE +#define GL_TEXTURE31 0x84DF +#define GL_ACTIVE_TEXTURE 0x84E0 +#define GL_REPEAT 0x2901 +#define GL_CLAMP_TO_EDGE 0x812F +#define GL_MIRRORED_REPEAT 0x8370 +#define GL_FLOAT_VEC2 0x8B50 +#define GL_FLOAT_VEC3 0x8B51 +#define GL_FLOAT_VEC4 0x8B52 +#define GL_INT_VEC2 0x8B53 +#define GL_INT_VEC3 0x8B54 +#define GL_INT_VEC4 0x8B55 +#define GL_BOOL 0x8B56 +#define GL_BOOL_VEC2 0x8B57 +#define GL_BOOL_VEC3 0x8B58 +#define GL_BOOL_VEC4 0x8B59 +#define GL_FLOAT_MAT2 0x8B5A +#define GL_FLOAT_MAT3 0x8B5B +#define GL_FLOAT_MAT4 0x8B5C +#define GL_SAMPLER_2D 0x8B5E +#define GL_SAMPLER_CUBE 0x8B60 +#define GL_VERTEX_ATTRIB_ARRAY_ENABLED 0x8622 +#define GL_VERTEX_ATTRIB_ARRAY_SIZE 0x8623 +#define GL_VERTEX_ATTRIB_ARRAY_STRIDE 0x8624 +#define GL_VERTEX_ATTRIB_ARRAY_TYPE 0x8625 +#define GL_VERTEX_ATTRIB_ARRAY_NORMALIZED 0x886A +#define GL_VERTEX_ATTRIB_ARRAY_POINTER 0x8645 +#define GL_VERTEX_ATTRIB_ARRAY_BUFFER_BINDING 0x889F +#define GL_IMPLEMENTATION_COLOR_READ_TYPE 0x8B9A +#define GL_IMPLEMENTATION_COLOR_READ_FORMAT 0x8B9B +#define GL_COMPILE_STATUS 0x8B81 +#define GL_INFO_LOG_LENGTH 0x8B84 +#define GL_SHADER_SOURCE_LENGTH 0x8B88 +#define GL_SHADER_COMPILER 0x8DFA +#define GL_SHADER_BINARY_FORMATS 0x8DF8 +#define GL_NUM_SHADER_BINARY_FORMATS 0x8DF9 +#define GL_LOW_FLOAT 0x8DF0 +#define GL_MEDIUM_FLOAT 0x8DF1 +#define GL_HIGH_FLOAT 0x8DF2 +#define GL_LOW_INT 0x8DF3 +#define GL_MEDIUM_INT 0x8DF4 +#define GL_HIGH_INT 0x8DF5 +#define GL_FRAMEBUFFER 0x8D40 +#define GL_RENDERBUFFER 0x8D41 +#define GL_RGBA4 0x8056 +#define GL_RGB5_A1 0x8057 +#define GL_RGB565 0x8D62 +#define GL_DEPTH_COMPONENT16 0x81A5 +#define GL_STENCIL_INDEX8 0x8D48 +#define GL_RENDERBUFFER_WIDTH 0x8D42 +#define GL_RENDERBUFFER_HEIGHT 0x8D43 +#define GL_RENDERBUFFER_INTERNAL_FORMAT 0x8D44 +#define GL_RENDERBUFFER_RED_SIZE 0x8D50 +#define GL_RENDERBUFFER_GREEN_SIZE 0x8D51 +#define GL_RENDERBUFFER_BLUE_SIZE 0x8D52 +#define GL_RENDERBUFFER_ALPHA_SIZE 0x8D53 +#define GL_RENDERBUFFER_DEPTH_SIZE 0x8D54 +#define GL_RENDERBUFFER_STENCIL_SIZE 0x8D55 +#define GL_FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE 0x8CD0 +#define GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME 0x8CD1 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL 0x8CD2 +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE 0x8CD3 +#define GL_COLOR_ATTACHMENT0 0x8CE0 +#define GL_DEPTH_ATTACHMENT 0x8D00 +#define GL_STENCIL_ATTACHMENT 0x8D20 +#define GL_NONE 0 +#define GL_FRAMEBUFFER_COMPLETE 0x8CD5 +#define GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT 0x8CD6 +#define GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT 0x8CD7 +#define GL_FRAMEBUFFER_INCOMPLETE_DIMENSIONS 0x8CD9 +#define GL_FRAMEBUFFER_UNSUPPORTED 0x8CDD +#define GL_FRAMEBUFFER_BINDING 0x8CA6 +#define GL_RENDERBUFFER_BINDING 0x8CA7 +#define GL_MAX_RENDERBUFFER_SIZE 0x84E8 +#define GL_INVALID_FRAMEBUFFER_OPERATION 0x0506 +typedef void (GL_APIENTRYP PFNGLACTIVETEXTUREPROC) (GLenum texture); +typedef void (GL_APIENTRYP PFNGLATTACHSHADERPROC) (GLuint program, GLuint shader); +typedef void (GL_APIENTRYP PFNGLBINDATTRIBLOCATIONPROC) (GLuint program, GLuint index, const GLchar *name); +typedef void (GL_APIENTRYP PFNGLBINDBUFFERPROC) (GLenum target, GLuint buffer); +typedef void (GL_APIENTRYP PFNGLBINDFRAMEBUFFERPROC) (GLenum target, GLuint framebuffer); +typedef void (GL_APIENTRYP PFNGLBINDRENDERBUFFERPROC) (GLenum target, GLuint renderbuffer); +typedef void (GL_APIENTRYP PFNGLBINDTEXTUREPROC) (GLenum target, GLuint texture); +typedef void (GL_APIENTRYP PFNGLBLENDCOLORPROC) (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +typedef void (GL_APIENTRYP PFNGLBLENDEQUATIONPROC) (GLenum mode); +typedef void (GL_APIENTRYP PFNGLBLENDEQUATIONSEPARATEPROC) (GLenum modeRGB, GLenum modeAlpha); +typedef void (GL_APIENTRYP PFNGLBLENDFUNCPROC) (GLenum sfactor, GLenum dfactor); +typedef void (GL_APIENTRYP PFNGLBLENDFUNCSEPARATEPROC) (GLenum sfactorRGB, GLenum dfactorRGB, GLenum sfactorAlpha, GLenum dfactorAlpha); +typedef void (GL_APIENTRYP PFNGLBUFFERDATAPROC) (GLenum target, GLsizeiptr size, const void *data, GLenum usage); +typedef void (GL_APIENTRYP PFNGLBUFFERSUBDATAPROC) (GLenum target, GLintptr offset, GLsizeiptr size, const void *data); +typedef GLenum (GL_APIENTRYP PFNGLCHECKFRAMEBUFFERSTATUSPROC) (GLenum target); +typedef void (GL_APIENTRYP PFNGLCLEARPROC) (GLbitfield mask); +typedef void (GL_APIENTRYP PFNGLCLEARCOLORPROC) (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +typedef void (GL_APIENTRYP PFNGLCLEARDEPTHFPROC) (GLfloat d); +typedef void (GL_APIENTRYP PFNGLCLEARSTENCILPROC) (GLint s); +typedef void (GL_APIENTRYP PFNGLCOLORMASKPROC) (GLboolean red, GLboolean green, GLboolean blue, GLboolean alpha); +typedef void (GL_APIENTRYP PFNGLCOMPILESHADERPROC) (GLuint shader); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXIMAGE2DPROC) (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, const void *data); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXSUBIMAGE2DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *data); +typedef void (GL_APIENTRYP PFNGLCOPYTEXIMAGE2DPROC) (GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLsizei height, GLint border); +typedef void (GL_APIENTRYP PFNGLCOPYTEXSUBIMAGE2DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint x, GLint y, GLsizei width, GLsizei height); +typedef GLuint (GL_APIENTRYP PFNGLCREATEPROGRAMPROC) (void); +typedef GLuint (GL_APIENTRYP PFNGLCREATESHADERPROC) (GLenum type); +typedef void (GL_APIENTRYP PFNGLCULLFACEPROC) (GLenum mode); +typedef void (GL_APIENTRYP PFNGLDELETEBUFFERSPROC) (GLsizei n, const GLuint *buffers); +typedef void (GL_APIENTRYP PFNGLDELETEFRAMEBUFFERSPROC) (GLsizei n, const GLuint *framebuffers); +typedef void (GL_APIENTRYP PFNGLDELETEPROGRAMPROC) (GLuint program); +typedef void (GL_APIENTRYP PFNGLDELETERENDERBUFFERSPROC) (GLsizei n, const GLuint *renderbuffers); +typedef void (GL_APIENTRYP PFNGLDELETESHADERPROC) (GLuint shader); +typedef void (GL_APIENTRYP PFNGLDELETETEXTURESPROC) (GLsizei n, const GLuint *textures); +typedef void (GL_APIENTRYP PFNGLDEPTHFUNCPROC) (GLenum func); +typedef void (GL_APIENTRYP PFNGLDEPTHMASKPROC) (GLboolean flag); +typedef void (GL_APIENTRYP PFNGLDEPTHRANGEFPROC) (GLfloat n, GLfloat f); +typedef void (GL_APIENTRYP PFNGLDETACHSHADERPROC) (GLuint program, GLuint shader); +typedef void (GL_APIENTRYP PFNGLDISABLEPROC) (GLenum cap); +typedef void (GL_APIENTRYP PFNGLDISABLEVERTEXATTRIBARRAYPROC) (GLuint index); +typedef void (GL_APIENTRYP PFNGLDRAWARRAYSPROC) (GLenum mode, GLint first, GLsizei count); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices); +typedef void (GL_APIENTRYP PFNGLENABLEPROC) (GLenum cap); +typedef void (GL_APIENTRYP PFNGLENABLEVERTEXATTRIBARRAYPROC) (GLuint index); +typedef void (GL_APIENTRYP PFNGLFINISHPROC) (void); +typedef void (GL_APIENTRYP PFNGLFLUSHPROC) (void); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERRENDERBUFFERPROC) (GLenum target, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTURE2DPROC) (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +typedef void (GL_APIENTRYP PFNGLFRONTFACEPROC) (GLenum mode); +typedef void (GL_APIENTRYP PFNGLGENBUFFERSPROC) (GLsizei n, GLuint *buffers); +typedef void (GL_APIENTRYP PFNGLGENERATEMIPMAPPROC) (GLenum target); +typedef void (GL_APIENTRYP PFNGLGENFRAMEBUFFERSPROC) (GLsizei n, GLuint *framebuffers); +typedef void (GL_APIENTRYP PFNGLGENRENDERBUFFERSPROC) (GLsizei n, GLuint *renderbuffers); +typedef void (GL_APIENTRYP PFNGLGENTEXTURESPROC) (GLsizei n, GLuint *textures); +typedef void (GL_APIENTRYP PFNGLGETACTIVEATTRIBPROC) (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name); +typedef void (GL_APIENTRYP PFNGLGETACTIVEUNIFORMPROC) (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name); +typedef void (GL_APIENTRYP PFNGLGETATTACHEDSHADERSPROC) (GLuint program, GLsizei maxCount, GLsizei *count, GLuint *shaders); +typedef GLint (GL_APIENTRYP PFNGLGETATTRIBLOCATIONPROC) (GLuint program, const GLchar *name); +typedef void (GL_APIENTRYP PFNGLGETBOOLEANVPROC) (GLenum pname, GLboolean *data); +typedef void (GL_APIENTRYP PFNGLGETBUFFERPARAMETERIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef GLenum (GL_APIENTRYP PFNGLGETERRORPROC) (void); +typedef void (GL_APIENTRYP PFNGLGETFLOATVPROC) (GLenum pname, GLfloat *data); +typedef void (GL_APIENTRYP PFNGLGETFRAMEBUFFERATTACHMENTPARAMETERIVPROC) (GLenum target, GLenum attachment, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETINTEGERVPROC) (GLenum pname, GLint *data); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMIVPROC) (GLuint program, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMINFOLOGPROC) (GLuint program, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +typedef void (GL_APIENTRYP PFNGLGETRENDERBUFFERPARAMETERIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETSHADERIVPROC) (GLuint shader, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETSHADERINFOLOGPROC) (GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +typedef void (GL_APIENTRYP PFNGLGETSHADERPRECISIONFORMATPROC) (GLenum shadertype, GLenum precisiontype, GLint *range, GLint *precision); +typedef void (GL_APIENTRYP PFNGLGETSHADERSOURCEPROC) (GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *source); +typedef const GLubyte *(GL_APIENTRYP PFNGLGETSTRINGPROC) (GLenum name); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERFVPROC) (GLenum target, GLenum pname, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETTEXPARAMETERIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETUNIFORMFVPROC) (GLuint program, GLint location, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETUNIFORMIVPROC) (GLuint program, GLint location, GLint *params); +typedef GLint (GL_APIENTRYP PFNGLGETUNIFORMLOCATIONPROC) (GLuint program, const GLchar *name); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBFVPROC) (GLuint index, GLenum pname, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBIVPROC) (GLuint index, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBPOINTERVPROC) (GLuint index, GLenum pname, void **pointer); +typedef void (GL_APIENTRYP PFNGLHINTPROC) (GLenum target, GLenum mode); +typedef GLboolean (GL_APIENTRYP PFNGLISBUFFERPROC) (GLuint buffer); +typedef GLboolean (GL_APIENTRYP PFNGLISENABLEDPROC) (GLenum cap); +typedef GLboolean (GL_APIENTRYP PFNGLISFRAMEBUFFERPROC) (GLuint framebuffer); +typedef GLboolean (GL_APIENTRYP PFNGLISPROGRAMPROC) (GLuint program); +typedef GLboolean (GL_APIENTRYP PFNGLISRENDERBUFFERPROC) (GLuint renderbuffer); +typedef GLboolean (GL_APIENTRYP PFNGLISSHADERPROC) (GLuint shader); +typedef GLboolean (GL_APIENTRYP PFNGLISTEXTUREPROC) (GLuint texture); +typedef void (GL_APIENTRYP PFNGLLINEWIDTHPROC) (GLfloat width); +typedef void (GL_APIENTRYP PFNGLLINKPROGRAMPROC) (GLuint program); +typedef void (GL_APIENTRYP PFNGLPIXELSTOREIPROC) (GLenum pname, GLint param); +typedef void (GL_APIENTRYP PFNGLPOLYGONOFFSETPROC) (GLfloat factor, GLfloat units); +typedef void (GL_APIENTRYP PFNGLREADPIXELSPROC) (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, void *pixels); +typedef void (GL_APIENTRYP PFNGLRELEASESHADERCOMPILERPROC) (void); +typedef void (GL_APIENTRYP PFNGLRENDERBUFFERSTORAGEPROC) (GLenum target, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLSAMPLECOVERAGEPROC) (GLfloat value, GLboolean invert); +typedef void (GL_APIENTRYP PFNGLSCISSORPROC) (GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLSHADERBINARYPROC) (GLsizei count, const GLuint *shaders, GLenum binaryformat, const void *binary, GLsizei length); +typedef void (GL_APIENTRYP PFNGLSHADERSOURCEPROC) (GLuint shader, GLsizei count, const GLchar *const*string, const GLint *length); +typedef void (GL_APIENTRYP PFNGLSTENCILFUNCPROC) (GLenum func, GLint ref, GLuint mask); +typedef void (GL_APIENTRYP PFNGLSTENCILFUNCSEPARATEPROC) (GLenum face, GLenum func, GLint ref, GLuint mask); +typedef void (GL_APIENTRYP PFNGLSTENCILMASKPROC) (GLuint mask); +typedef void (GL_APIENTRYP PFNGLSTENCILMASKSEPARATEPROC) (GLenum face, GLuint mask); +typedef void (GL_APIENTRYP PFNGLSTENCILOPPROC) (GLenum fail, GLenum zfail, GLenum zpass); +typedef void (GL_APIENTRYP PFNGLSTENCILOPSEPARATEPROC) (GLenum face, GLenum sfail, GLenum dpfail, GLenum dppass); +typedef void (GL_APIENTRYP PFNGLTEXIMAGE2DPROC) (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, const void *pixels); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERFPROC) (GLenum target, GLenum pname, GLfloat param); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERFVPROC) (GLenum target, GLenum pname, const GLfloat *params); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERIPROC) (GLenum target, GLenum pname, GLint param); +typedef void (GL_APIENTRYP PFNGLTEXPARAMETERIVPROC) (GLenum target, GLenum pname, const GLint *params); +typedef void (GL_APIENTRYP PFNGLTEXSUBIMAGE2DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *pixels); +typedef void (GL_APIENTRYP PFNGLUNIFORM1FPROC) (GLint location, GLfloat v0); +typedef void (GL_APIENTRYP PFNGLUNIFORM1FVPROC) (GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM1IPROC) (GLint location, GLint v0); +typedef void (GL_APIENTRYP PFNGLUNIFORM1IVPROC) (GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM2FPROC) (GLint location, GLfloat v0, GLfloat v1); +typedef void (GL_APIENTRYP PFNGLUNIFORM2FVPROC) (GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM2IPROC) (GLint location, GLint v0, GLint v1); +typedef void (GL_APIENTRYP PFNGLUNIFORM2IVPROC) (GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM3FPROC) (GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +typedef void (GL_APIENTRYP PFNGLUNIFORM3FVPROC) (GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM3IPROC) (GLint location, GLint v0, GLint v1, GLint v2); +typedef void (GL_APIENTRYP PFNGLUNIFORM3IVPROC) (GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM4FPROC) (GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +typedef void (GL_APIENTRYP PFNGLUNIFORM4FVPROC) (GLint location, GLsizei count, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM4IPROC) (GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +typedef void (GL_APIENTRYP PFNGLUNIFORM4IVPROC) (GLint location, GLsizei count, const GLint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX2FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX3FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX4FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUSEPROGRAMPROC) (GLuint program); +typedef void (GL_APIENTRYP PFNGLVALIDATEPROGRAMPROC) (GLuint program); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB1FPROC) (GLuint index, GLfloat x); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB1FVPROC) (GLuint index, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB2FPROC) (GLuint index, GLfloat x, GLfloat y); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB2FVPROC) (GLuint index, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB3FPROC) (GLuint index, GLfloat x, GLfloat y, GLfloat z); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB3FVPROC) (GLuint index, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB4FPROC) (GLuint index, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIB4FVPROC) (GLuint index, const GLfloat *v); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBPOINTERPROC) (GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride, const void *pointer); +typedef void (GL_APIENTRYP PFNGLVIEWPORTPROC) (GLint x, GLint y, GLsizei width, GLsizei height); +#if GL_GLES_PROTOTYPES +GL_APICALL void GL_APIENTRY glActiveTexture (GLenum texture); +GL_APICALL void GL_APIENTRY glAttachShader (GLuint program, GLuint shader); +GL_APICALL void GL_APIENTRY glBindAttribLocation (GLuint program, GLuint index, const GLchar *name); +GL_APICALL void GL_APIENTRY glBindBuffer (GLenum target, GLuint buffer); +GL_APICALL void GL_APIENTRY glBindFramebuffer (GLenum target, GLuint framebuffer); +GL_APICALL void GL_APIENTRY glBindRenderbuffer (GLenum target, GLuint renderbuffer); +GL_APICALL void GL_APIENTRY glBindTexture (GLenum target, GLuint texture); +GL_APICALL void GL_APIENTRY glBlendColor (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +GL_APICALL void GL_APIENTRY glBlendEquation (GLenum mode); +GL_APICALL void GL_APIENTRY glBlendEquationSeparate (GLenum modeRGB, GLenum modeAlpha); +GL_APICALL void GL_APIENTRY glBlendFunc (GLenum sfactor, GLenum dfactor); +GL_APICALL void GL_APIENTRY glBlendFuncSeparate (GLenum sfactorRGB, GLenum dfactorRGB, GLenum sfactorAlpha, GLenum dfactorAlpha); +GL_APICALL void GL_APIENTRY glBufferData (GLenum target, GLsizeiptr size, const void *data, GLenum usage); +GL_APICALL void GL_APIENTRY glBufferSubData (GLenum target, GLintptr offset, GLsizeiptr size, const void *data); +GL_APICALL GLenum GL_APIENTRY glCheckFramebufferStatus (GLenum target); +GL_APICALL void GL_APIENTRY glClear (GLbitfield mask); +GL_APICALL void GL_APIENTRY glClearColor (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha); +GL_APICALL void GL_APIENTRY glClearDepthf (GLfloat d); +GL_APICALL void GL_APIENTRY glClearStencil (GLint s); +GL_APICALL void GL_APIENTRY glColorMask (GLboolean red, GLboolean green, GLboolean blue, GLboolean alpha); +GL_APICALL void GL_APIENTRY glCompileShader (GLuint shader); +GL_APICALL void GL_APIENTRY glCompressedTexImage2D (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, const void *data); +GL_APICALL void GL_APIENTRY glCompressedTexSubImage2D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *data); +GL_APICALL void GL_APIENTRY glCopyTexImage2D (GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLsizei height, GLint border); +GL_APICALL void GL_APIENTRY glCopyTexSubImage2D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint x, GLint y, GLsizei width, GLsizei height); +GL_APICALL GLuint GL_APIENTRY glCreateProgram (void); +GL_APICALL GLuint GL_APIENTRY glCreateShader (GLenum type); +GL_APICALL void GL_APIENTRY glCullFace (GLenum mode); +GL_APICALL void GL_APIENTRY glDeleteBuffers (GLsizei n, const GLuint *buffers); +GL_APICALL void GL_APIENTRY glDeleteFramebuffers (GLsizei n, const GLuint *framebuffers); +GL_APICALL void GL_APIENTRY glDeleteProgram (GLuint program); +GL_APICALL void GL_APIENTRY glDeleteRenderbuffers (GLsizei n, const GLuint *renderbuffers); +GL_APICALL void GL_APIENTRY glDeleteShader (GLuint shader); +GL_APICALL void GL_APIENTRY glDeleteTextures (GLsizei n, const GLuint *textures); +GL_APICALL void GL_APIENTRY glDepthFunc (GLenum func); +GL_APICALL void GL_APIENTRY glDepthMask (GLboolean flag); +GL_APICALL void GL_APIENTRY glDepthRangef (GLfloat n, GLfloat f); +GL_APICALL void GL_APIENTRY glDetachShader (GLuint program, GLuint shader); +GL_APICALL void GL_APIENTRY glDisable (GLenum cap); +GL_APICALL void GL_APIENTRY glDisableVertexAttribArray (GLuint index); +GL_APICALL void GL_APIENTRY glDrawArrays (GLenum mode, GLint first, GLsizei count); +GL_APICALL void GL_APIENTRY glDrawElements (GLenum mode, GLsizei count, GLenum type, const void *indices); +GL_APICALL void GL_APIENTRY glEnable (GLenum cap); +GL_APICALL void GL_APIENTRY glEnableVertexAttribArray (GLuint index); +GL_APICALL void GL_APIENTRY glFinish (void); +GL_APICALL void GL_APIENTRY glFlush (void); +GL_APICALL void GL_APIENTRY glFramebufferRenderbuffer (GLenum target, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer); +GL_APICALL void GL_APIENTRY glFramebufferTexture2D (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level); +GL_APICALL void GL_APIENTRY glFrontFace (GLenum mode); +GL_APICALL void GL_APIENTRY glGenBuffers (GLsizei n, GLuint *buffers); +GL_APICALL void GL_APIENTRY glGenerateMipmap (GLenum target); +GL_APICALL void GL_APIENTRY glGenFramebuffers (GLsizei n, GLuint *framebuffers); +GL_APICALL void GL_APIENTRY glGenRenderbuffers (GLsizei n, GLuint *renderbuffers); +GL_APICALL void GL_APIENTRY glGenTextures (GLsizei n, GLuint *textures); +GL_APICALL void GL_APIENTRY glGetActiveAttrib (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name); +GL_APICALL void GL_APIENTRY glGetActiveUniform (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name); +GL_APICALL void GL_APIENTRY glGetAttachedShaders (GLuint program, GLsizei maxCount, GLsizei *count, GLuint *shaders); +GL_APICALL GLint GL_APIENTRY glGetAttribLocation (GLuint program, const GLchar *name); +GL_APICALL void GL_APIENTRY glGetBooleanv (GLenum pname, GLboolean *data); +GL_APICALL void GL_APIENTRY glGetBufferParameteriv (GLenum target, GLenum pname, GLint *params); +GL_APICALL GLenum GL_APIENTRY glGetError (void); +GL_APICALL void GL_APIENTRY glGetFloatv (GLenum pname, GLfloat *data); +GL_APICALL void GL_APIENTRY glGetFramebufferAttachmentParameteriv (GLenum target, GLenum attachment, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetIntegerv (GLenum pname, GLint *data); +GL_APICALL void GL_APIENTRY glGetProgramiv (GLuint program, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetProgramInfoLog (GLuint program, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +GL_APICALL void GL_APIENTRY glGetRenderbufferParameteriv (GLenum target, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetShaderiv (GLuint shader, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetShaderInfoLog (GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *infoLog); +GL_APICALL void GL_APIENTRY glGetShaderPrecisionFormat (GLenum shadertype, GLenum precisiontype, GLint *range, GLint *precision); +GL_APICALL void GL_APIENTRY glGetShaderSource (GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *source); +GL_APICALL const GLubyte *GL_APIENTRY glGetString (GLenum name); +GL_APICALL void GL_APIENTRY glGetTexParameterfv (GLenum target, GLenum pname, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetTexParameteriv (GLenum target, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetUniformfv (GLuint program, GLint location, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetUniformiv (GLuint program, GLint location, GLint *params); +GL_APICALL GLint GL_APIENTRY glGetUniformLocation (GLuint program, const GLchar *name); +GL_APICALL void GL_APIENTRY glGetVertexAttribfv (GLuint index, GLenum pname, GLfloat *params); +GL_APICALL void GL_APIENTRY glGetVertexAttribiv (GLuint index, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetVertexAttribPointerv (GLuint index, GLenum pname, void **pointer); +GL_APICALL void GL_APIENTRY glHint (GLenum target, GLenum mode); +GL_APICALL GLboolean GL_APIENTRY glIsBuffer (GLuint buffer); +GL_APICALL GLboolean GL_APIENTRY glIsEnabled (GLenum cap); +GL_APICALL GLboolean GL_APIENTRY glIsFramebuffer (GLuint framebuffer); +GL_APICALL GLboolean GL_APIENTRY glIsProgram (GLuint program); +GL_APICALL GLboolean GL_APIENTRY glIsRenderbuffer (GLuint renderbuffer); +GL_APICALL GLboolean GL_APIENTRY glIsShader (GLuint shader); +GL_APICALL GLboolean GL_APIENTRY glIsTexture (GLuint texture); +GL_APICALL void GL_APIENTRY glLineWidth (GLfloat width); +GL_APICALL void GL_APIENTRY glLinkProgram (GLuint program); +GL_APICALL void GL_APIENTRY glPixelStorei (GLenum pname, GLint param); +GL_APICALL void GL_APIENTRY glPolygonOffset (GLfloat factor, GLfloat units); +GL_APICALL void GL_APIENTRY glReadPixels (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, void *pixels); +GL_APICALL void GL_APIENTRY glReleaseShaderCompiler (void); +GL_APICALL void GL_APIENTRY glRenderbufferStorage (GLenum target, GLenum internalformat, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glSampleCoverage (GLfloat value, GLboolean invert); +GL_APICALL void GL_APIENTRY glScissor (GLint x, GLint y, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glShaderBinary (GLsizei count, const GLuint *shaders, GLenum binaryformat, const void *binary, GLsizei length); +GL_APICALL void GL_APIENTRY glShaderSource (GLuint shader, GLsizei count, const GLchar *const*string, const GLint *length); +GL_APICALL void GL_APIENTRY glStencilFunc (GLenum func, GLint ref, GLuint mask); +GL_APICALL void GL_APIENTRY glStencilFuncSeparate (GLenum face, GLenum func, GLint ref, GLuint mask); +GL_APICALL void GL_APIENTRY glStencilMask (GLuint mask); +GL_APICALL void GL_APIENTRY glStencilMaskSeparate (GLenum face, GLuint mask); +GL_APICALL void GL_APIENTRY glStencilOp (GLenum fail, GLenum zfail, GLenum zpass); +GL_APICALL void GL_APIENTRY glStencilOpSeparate (GLenum face, GLenum sfail, GLenum dpfail, GLenum dppass); +GL_APICALL void GL_APIENTRY glTexImage2D (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, const void *pixels); +GL_APICALL void GL_APIENTRY glTexParameterf (GLenum target, GLenum pname, GLfloat param); +GL_APICALL void GL_APIENTRY glTexParameterfv (GLenum target, GLenum pname, const GLfloat *params); +GL_APICALL void GL_APIENTRY glTexParameteri (GLenum target, GLenum pname, GLint param); +GL_APICALL void GL_APIENTRY glTexParameteriv (GLenum target, GLenum pname, const GLint *params); +GL_APICALL void GL_APIENTRY glTexSubImage2D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *pixels); +GL_APICALL void GL_APIENTRY glUniform1f (GLint location, GLfloat v0); +GL_APICALL void GL_APIENTRY glUniform1fv (GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniform1i (GLint location, GLint v0); +GL_APICALL void GL_APIENTRY glUniform1iv (GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glUniform2f (GLint location, GLfloat v0, GLfloat v1); +GL_APICALL void GL_APIENTRY glUniform2fv (GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniform2i (GLint location, GLint v0, GLint v1); +GL_APICALL void GL_APIENTRY glUniform2iv (GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glUniform3f (GLint location, GLfloat v0, GLfloat v1, GLfloat v2); +GL_APICALL void GL_APIENTRY glUniform3fv (GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniform3i (GLint location, GLint v0, GLint v1, GLint v2); +GL_APICALL void GL_APIENTRY glUniform3iv (GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glUniform4f (GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3); +GL_APICALL void GL_APIENTRY glUniform4fv (GLint location, GLsizei count, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniform4i (GLint location, GLint v0, GLint v1, GLint v2, GLint v3); +GL_APICALL void GL_APIENTRY glUniform4iv (GLint location, GLsizei count, const GLint *value); +GL_APICALL void GL_APIENTRY glUniformMatrix2fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix3fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix4fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUseProgram (GLuint program); +GL_APICALL void GL_APIENTRY glValidateProgram (GLuint program); +GL_APICALL void GL_APIENTRY glVertexAttrib1f (GLuint index, GLfloat x); +GL_APICALL void GL_APIENTRY glVertexAttrib1fv (GLuint index, const GLfloat *v); +GL_APICALL void GL_APIENTRY glVertexAttrib2f (GLuint index, GLfloat x, GLfloat y); +GL_APICALL void GL_APIENTRY glVertexAttrib2fv (GLuint index, const GLfloat *v); +GL_APICALL void GL_APIENTRY glVertexAttrib3f (GLuint index, GLfloat x, GLfloat y, GLfloat z); +GL_APICALL void GL_APIENTRY glVertexAttrib3fv (GLuint index, const GLfloat *v); +GL_APICALL void GL_APIENTRY glVertexAttrib4f (GLuint index, GLfloat x, GLfloat y, GLfloat z, GLfloat w); +GL_APICALL void GL_APIENTRY glVertexAttrib4fv (GLuint index, const GLfloat *v); +GL_APICALL void GL_APIENTRY glVertexAttribPointer (GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride, const void *pointer); +GL_APICALL void GL_APIENTRY glViewport (GLint x, GLint y, GLsizei width, GLsizei height); +#endif +#endif /* GL_ES_VERSION_2_0 */ + +#ifndef GL_ES_VERSION_3_0 +#define GL_ES_VERSION_3_0 1 +typedef unsigned short GLhalf; +#define GL_READ_BUFFER 0x0C02 +#define GL_UNPACK_ROW_LENGTH 0x0CF2 +#define GL_UNPACK_SKIP_ROWS 0x0CF3 +#define GL_UNPACK_SKIP_PIXELS 0x0CF4 +#define GL_PACK_ROW_LENGTH 0x0D02 +#define GL_PACK_SKIP_ROWS 0x0D03 +#define GL_PACK_SKIP_PIXELS 0x0D04 +#define GL_COLOR 0x1800 +#define GL_DEPTH 0x1801 +#define GL_STENCIL 0x1802 +#define GL_RED 0x1903 +#define GL_RGB8 0x8051 +#define GL_RGBA8 0x8058 +#define GL_RGB10_A2 0x8059 +#define GL_TEXTURE_BINDING_3D 0x806A +#define GL_UNPACK_SKIP_IMAGES 0x806D +#define GL_UNPACK_IMAGE_HEIGHT 0x806E +#define GL_TEXTURE_3D 0x806F +#define GL_TEXTURE_WRAP_R 0x8072 +#define GL_MAX_3D_TEXTURE_SIZE 0x8073 +#define GL_UNSIGNED_INT_2_10_10_10_REV 0x8368 +#define GL_MAX_ELEMENTS_VERTICES 0x80E8 +#define GL_MAX_ELEMENTS_INDICES 0x80E9 +#define GL_TEXTURE_MIN_LOD 0x813A +#define GL_TEXTURE_MAX_LOD 0x813B +#define GL_TEXTURE_BASE_LEVEL 0x813C +#define GL_TEXTURE_MAX_LEVEL 0x813D +#define GL_MIN 0x8007 +#define GL_MAX 0x8008 +#define GL_DEPTH_COMPONENT24 0x81A6 +#define GL_MAX_TEXTURE_LOD_BIAS 0x84FD +#define GL_TEXTURE_COMPARE_MODE 0x884C +#define GL_TEXTURE_COMPARE_FUNC 0x884D +#define GL_CURRENT_QUERY 0x8865 +#define GL_QUERY_RESULT 0x8866 +#define GL_QUERY_RESULT_AVAILABLE 0x8867 +#define GL_BUFFER_MAPPED 0x88BC +#define GL_BUFFER_MAP_POINTER 0x88BD +#define GL_STREAM_READ 0x88E1 +#define GL_STREAM_COPY 0x88E2 +#define GL_STATIC_READ 0x88E5 +#define GL_STATIC_COPY 0x88E6 +#define GL_DYNAMIC_READ 0x88E9 +#define GL_DYNAMIC_COPY 0x88EA +#define GL_MAX_DRAW_BUFFERS 0x8824 +#define GL_DRAW_BUFFER0 0x8825 +#define GL_DRAW_BUFFER1 0x8826 +#define GL_DRAW_BUFFER2 0x8827 +#define GL_DRAW_BUFFER3 0x8828 +#define GL_DRAW_BUFFER4 0x8829 +#define GL_DRAW_BUFFER5 0x882A +#define GL_DRAW_BUFFER6 0x882B +#define GL_DRAW_BUFFER7 0x882C +#define GL_DRAW_BUFFER8 0x882D +#define GL_DRAW_BUFFER9 0x882E +#define GL_DRAW_BUFFER10 0x882F +#define GL_DRAW_BUFFER11 0x8830 +#define GL_DRAW_BUFFER12 0x8831 +#define GL_DRAW_BUFFER13 0x8832 +#define GL_DRAW_BUFFER14 0x8833 +#define GL_DRAW_BUFFER15 0x8834 +#define GL_MAX_FRAGMENT_UNIFORM_COMPONENTS 0x8B49 +#define GL_MAX_VERTEX_UNIFORM_COMPONENTS 0x8B4A +#define GL_SAMPLER_3D 0x8B5F +#define GL_SAMPLER_2D_SHADOW 0x8B62 +#define GL_FRAGMENT_SHADER_DERIVATIVE_HINT 0x8B8B +#define GL_PIXEL_PACK_BUFFER 0x88EB +#define GL_PIXEL_UNPACK_BUFFER 0x88EC +#define GL_PIXEL_PACK_BUFFER_BINDING 0x88ED +#define GL_PIXEL_UNPACK_BUFFER_BINDING 0x88EF +#define GL_FLOAT_MAT2x3 0x8B65 +#define GL_FLOAT_MAT2x4 0x8B66 +#define GL_FLOAT_MAT3x2 0x8B67 +#define GL_FLOAT_MAT3x4 0x8B68 +#define GL_FLOAT_MAT4x2 0x8B69 +#define GL_FLOAT_MAT4x3 0x8B6A +#define GL_SRGB 0x8C40 +#define GL_SRGB8 0x8C41 +#define GL_SRGB8_ALPHA8 0x8C43 +#define GL_COMPARE_REF_TO_TEXTURE 0x884E +#define GL_MAJOR_VERSION 0x821B +#define GL_MINOR_VERSION 0x821C +#define GL_NUM_EXTENSIONS 0x821D +#define GL_RGBA32F 0x8814 +#define GL_RGB32F 0x8815 +#define GL_RGBA16F 0x881A +#define GL_RGB16F 0x881B +#define GL_VERTEX_ATTRIB_ARRAY_INTEGER 0x88FD +#define GL_MAX_ARRAY_TEXTURE_LAYERS 0x88FF +#define GL_MIN_PROGRAM_TEXEL_OFFSET 0x8904 +#define GL_MAX_PROGRAM_TEXEL_OFFSET 0x8905 +#define GL_MAX_VARYING_COMPONENTS 0x8B4B +#define GL_TEXTURE_2D_ARRAY 0x8C1A +#define GL_TEXTURE_BINDING_2D_ARRAY 0x8C1D +#define GL_R11F_G11F_B10F 0x8C3A +#define GL_UNSIGNED_INT_10F_11F_11F_REV 0x8C3B +#define GL_RGB9_E5 0x8C3D +#define GL_UNSIGNED_INT_5_9_9_9_REV 0x8C3E +#define GL_TRANSFORM_FEEDBACK_VARYING_MAX_LENGTH 0x8C76 +#define GL_TRANSFORM_FEEDBACK_BUFFER_MODE 0x8C7F +#define GL_MAX_TRANSFORM_FEEDBACK_SEPARATE_COMPONENTS 0x8C80 +#define GL_TRANSFORM_FEEDBACK_VARYINGS 0x8C83 +#define GL_TRANSFORM_FEEDBACK_BUFFER_START 0x8C84 +#define GL_TRANSFORM_FEEDBACK_BUFFER_SIZE 0x8C85 +#define GL_TRANSFORM_FEEDBACK_PRIMITIVES_WRITTEN 0x8C88 +#define GL_RASTERIZER_DISCARD 0x8C89 +#define GL_MAX_TRANSFORM_FEEDBACK_INTERLEAVED_COMPONENTS 0x8C8A +#define GL_MAX_TRANSFORM_FEEDBACK_SEPARATE_ATTRIBS 0x8C8B +#define GL_INTERLEAVED_ATTRIBS 0x8C8C +#define GL_SEPARATE_ATTRIBS 0x8C8D +#define GL_TRANSFORM_FEEDBACK_BUFFER 0x8C8E +#define GL_TRANSFORM_FEEDBACK_BUFFER_BINDING 0x8C8F +#define GL_RGBA32UI 0x8D70 +#define GL_RGB32UI 0x8D71 +#define GL_RGBA16UI 0x8D76 +#define GL_RGB16UI 0x8D77 +#define GL_RGBA8UI 0x8D7C +#define GL_RGB8UI 0x8D7D +#define GL_RGBA32I 0x8D82 +#define GL_RGB32I 0x8D83 +#define GL_RGBA16I 0x8D88 +#define GL_RGB16I 0x8D89 +#define GL_RGBA8I 0x8D8E +#define GL_RGB8I 0x8D8F +#define GL_RED_INTEGER 0x8D94 +#define GL_RGB_INTEGER 0x8D98 +#define GL_RGBA_INTEGER 0x8D99 +#define GL_SAMPLER_2D_ARRAY 0x8DC1 +#define GL_SAMPLER_2D_ARRAY_SHADOW 0x8DC4 +#define GL_SAMPLER_CUBE_SHADOW 0x8DC5 +#define GL_UNSIGNED_INT_VEC2 0x8DC6 +#define GL_UNSIGNED_INT_VEC3 0x8DC7 +#define GL_UNSIGNED_INT_VEC4 0x8DC8 +#define GL_INT_SAMPLER_2D 0x8DCA +#define GL_INT_SAMPLER_3D 0x8DCB +#define GL_INT_SAMPLER_CUBE 0x8DCC +#define GL_INT_SAMPLER_2D_ARRAY 0x8DCF +#define GL_UNSIGNED_INT_SAMPLER_2D 0x8DD2 +#define GL_UNSIGNED_INT_SAMPLER_3D 0x8DD3 +#define GL_UNSIGNED_INT_SAMPLER_CUBE 0x8DD4 +#define GL_UNSIGNED_INT_SAMPLER_2D_ARRAY 0x8DD7 +#define GL_BUFFER_ACCESS_FLAGS 0x911F +#define GL_BUFFER_MAP_LENGTH 0x9120 +#define GL_BUFFER_MAP_OFFSET 0x9121 +#define GL_DEPTH_COMPONENT32F 0x8CAC +#define GL_DEPTH32F_STENCIL8 0x8CAD +#define GL_FLOAT_32_UNSIGNED_INT_24_8_REV 0x8DAD +#define GL_FRAMEBUFFER_ATTACHMENT_COLOR_ENCODING 0x8210 +#define GL_FRAMEBUFFER_ATTACHMENT_COMPONENT_TYPE 0x8211 +#define GL_FRAMEBUFFER_ATTACHMENT_RED_SIZE 0x8212 +#define GL_FRAMEBUFFER_ATTACHMENT_GREEN_SIZE 0x8213 +#define GL_FRAMEBUFFER_ATTACHMENT_BLUE_SIZE 0x8214 +#define GL_FRAMEBUFFER_ATTACHMENT_ALPHA_SIZE 0x8215 +#define GL_FRAMEBUFFER_ATTACHMENT_DEPTH_SIZE 0x8216 +#define GL_FRAMEBUFFER_ATTACHMENT_STENCIL_SIZE 0x8217 +#define GL_FRAMEBUFFER_DEFAULT 0x8218 +#define GL_FRAMEBUFFER_UNDEFINED 0x8219 +#define GL_DEPTH_STENCIL_ATTACHMENT 0x821A +#define GL_DEPTH_STENCIL 0x84F9 +#define GL_UNSIGNED_INT_24_8 0x84FA +#define GL_DEPTH24_STENCIL8 0x88F0 +#define GL_UNSIGNED_NORMALIZED 0x8C17 +#define GL_DRAW_FRAMEBUFFER_BINDING 0x8CA6 +#define GL_READ_FRAMEBUFFER 0x8CA8 +#define GL_DRAW_FRAMEBUFFER 0x8CA9 +#define GL_READ_FRAMEBUFFER_BINDING 0x8CAA +#define GL_RENDERBUFFER_SAMPLES 0x8CAB +#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LAYER 0x8CD4 +#define GL_MAX_COLOR_ATTACHMENTS 0x8CDF +#define GL_COLOR_ATTACHMENT1 0x8CE1 +#define GL_COLOR_ATTACHMENT2 0x8CE2 +#define GL_COLOR_ATTACHMENT3 0x8CE3 +#define GL_COLOR_ATTACHMENT4 0x8CE4 +#define GL_COLOR_ATTACHMENT5 0x8CE5 +#define GL_COLOR_ATTACHMENT6 0x8CE6 +#define GL_COLOR_ATTACHMENT7 0x8CE7 +#define GL_COLOR_ATTACHMENT8 0x8CE8 +#define GL_COLOR_ATTACHMENT9 0x8CE9 +#define GL_COLOR_ATTACHMENT10 0x8CEA +#define GL_COLOR_ATTACHMENT11 0x8CEB +#define GL_COLOR_ATTACHMENT12 0x8CEC +#define GL_COLOR_ATTACHMENT13 0x8CED +#define GL_COLOR_ATTACHMENT14 0x8CEE +#define GL_COLOR_ATTACHMENT15 0x8CEF +#define GL_COLOR_ATTACHMENT16 0x8CF0 +#define GL_COLOR_ATTACHMENT17 0x8CF1 +#define GL_COLOR_ATTACHMENT18 0x8CF2 +#define GL_COLOR_ATTACHMENT19 0x8CF3 +#define GL_COLOR_ATTACHMENT20 0x8CF4 +#define GL_COLOR_ATTACHMENT21 0x8CF5 +#define GL_COLOR_ATTACHMENT22 0x8CF6 +#define GL_COLOR_ATTACHMENT23 0x8CF7 +#define GL_COLOR_ATTACHMENT24 0x8CF8 +#define GL_COLOR_ATTACHMENT25 0x8CF9 +#define GL_COLOR_ATTACHMENT26 0x8CFA +#define GL_COLOR_ATTACHMENT27 0x8CFB +#define GL_COLOR_ATTACHMENT28 0x8CFC +#define GL_COLOR_ATTACHMENT29 0x8CFD +#define GL_COLOR_ATTACHMENT30 0x8CFE +#define GL_COLOR_ATTACHMENT31 0x8CFF +#define GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE 0x8D56 +#define GL_MAX_SAMPLES 0x8D57 +#define GL_HALF_FLOAT 0x140B +#define GL_MAP_READ_BIT 0x0001 +#define GL_MAP_WRITE_BIT 0x0002 +#define GL_MAP_INVALIDATE_RANGE_BIT 0x0004 +#define GL_MAP_INVALIDATE_BUFFER_BIT 0x0008 +#define GL_MAP_FLUSH_EXPLICIT_BIT 0x0010 +#define GL_MAP_UNSYNCHRONIZED_BIT 0x0020 +#define GL_RG 0x8227 +#define GL_RG_INTEGER 0x8228 +#define GL_R8 0x8229 +#define GL_RG8 0x822B +#define GL_R16F 0x822D +#define GL_R32F 0x822E +#define GL_RG16F 0x822F +#define GL_RG32F 0x8230 +#define GL_R8I 0x8231 +#define GL_R8UI 0x8232 +#define GL_R16I 0x8233 +#define GL_R16UI 0x8234 +#define GL_R32I 0x8235 +#define GL_R32UI 0x8236 +#define GL_RG8I 0x8237 +#define GL_RG8UI 0x8238 +#define GL_RG16I 0x8239 +#define GL_RG16UI 0x823A +#define GL_RG32I 0x823B +#define GL_RG32UI 0x823C +#define GL_VERTEX_ARRAY_BINDING 0x85B5 +#define GL_R8_SNORM 0x8F94 +#define GL_RG8_SNORM 0x8F95 +#define GL_RGB8_SNORM 0x8F96 +#define GL_RGBA8_SNORM 0x8F97 +#define GL_SIGNED_NORMALIZED 0x8F9C +#define GL_PRIMITIVE_RESTART_FIXED_INDEX 0x8D69 +#define GL_COPY_READ_BUFFER 0x8F36 +#define GL_COPY_WRITE_BUFFER 0x8F37 +#define GL_COPY_READ_BUFFER_BINDING 0x8F36 +#define GL_COPY_WRITE_BUFFER_BINDING 0x8F37 +#define GL_UNIFORM_BUFFER 0x8A11 +#define GL_UNIFORM_BUFFER_BINDING 0x8A28 +#define GL_UNIFORM_BUFFER_START 0x8A29 +#define GL_UNIFORM_BUFFER_SIZE 0x8A2A +#define GL_MAX_VERTEX_UNIFORM_BLOCKS 0x8A2B +#define GL_MAX_FRAGMENT_UNIFORM_BLOCKS 0x8A2D +#define GL_MAX_COMBINED_UNIFORM_BLOCKS 0x8A2E +#define GL_MAX_UNIFORM_BUFFER_BINDINGS 0x8A2F +#define GL_MAX_UNIFORM_BLOCK_SIZE 0x8A30 +#define GL_MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS 0x8A31 +#define GL_MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS 0x8A33 +#define GL_UNIFORM_BUFFER_OFFSET_ALIGNMENT 0x8A34 +#define GL_ACTIVE_UNIFORM_BLOCK_MAX_NAME_LENGTH 0x8A35 +#define GL_ACTIVE_UNIFORM_BLOCKS 0x8A36 +#define GL_UNIFORM_TYPE 0x8A37 +#define GL_UNIFORM_SIZE 0x8A38 +#define GL_UNIFORM_NAME_LENGTH 0x8A39 +#define GL_UNIFORM_BLOCK_INDEX 0x8A3A +#define GL_UNIFORM_OFFSET 0x8A3B +#define GL_UNIFORM_ARRAY_STRIDE 0x8A3C +#define GL_UNIFORM_MATRIX_STRIDE 0x8A3D +#define GL_UNIFORM_IS_ROW_MAJOR 0x8A3E +#define GL_UNIFORM_BLOCK_BINDING 0x8A3F +#define GL_UNIFORM_BLOCK_DATA_SIZE 0x8A40 +#define GL_UNIFORM_BLOCK_NAME_LENGTH 0x8A41 +#define GL_UNIFORM_BLOCK_ACTIVE_UNIFORMS 0x8A42 +#define GL_UNIFORM_BLOCK_ACTIVE_UNIFORM_INDICES 0x8A43 +#define GL_UNIFORM_BLOCK_REFERENCED_BY_VERTEX_SHADER 0x8A44 +#define GL_UNIFORM_BLOCK_REFERENCED_BY_FRAGMENT_SHADER 0x8A46 +#define GL_INVALID_INDEX 0xFFFFFFFFu +#define GL_MAX_VERTEX_OUTPUT_COMPONENTS 0x9122 +#define GL_MAX_FRAGMENT_INPUT_COMPONENTS 0x9125 +#define GL_MAX_SERVER_WAIT_TIMEOUT 0x9111 +#define GL_OBJECT_TYPE 0x9112 +#define GL_SYNC_CONDITION 0x9113 +#define GL_SYNC_STATUS 0x9114 +#define GL_SYNC_FLAGS 0x9115 +#define GL_SYNC_FENCE 0x9116 +#define GL_SYNC_GPU_COMMANDS_COMPLETE 0x9117 +#define GL_UNSIGNALED 0x9118 +#define GL_SIGNALED 0x9119 +#define GL_ALREADY_SIGNALED 0x911A +#define GL_TIMEOUT_EXPIRED 0x911B +#define GL_CONDITION_SATISFIED 0x911C +#define GL_WAIT_FAILED 0x911D +#define GL_SYNC_FLUSH_COMMANDS_BIT 0x00000001 +#define GL_TIMEOUT_IGNORED 0xFFFFFFFFFFFFFFFFull +#define GL_VERTEX_ATTRIB_ARRAY_DIVISOR 0x88FE +#define GL_ANY_SAMPLES_PASSED 0x8C2F +#define GL_ANY_SAMPLES_PASSED_CONSERVATIVE 0x8D6A +#define GL_SAMPLER_BINDING 0x8919 +#define GL_RGB10_A2UI 0x906F +#define GL_TEXTURE_SWIZZLE_R 0x8E42 +#define GL_TEXTURE_SWIZZLE_G 0x8E43 +#define GL_TEXTURE_SWIZZLE_B 0x8E44 +#define GL_TEXTURE_SWIZZLE_A 0x8E45 +#define GL_GREEN 0x1904 +#define GL_BLUE 0x1905 +#define GL_INT_2_10_10_10_REV 0x8D9F +#define GL_TRANSFORM_FEEDBACK 0x8E22 +#define GL_TRANSFORM_FEEDBACK_PAUSED 0x8E23 +#define GL_TRANSFORM_FEEDBACK_ACTIVE 0x8E24 +#define GL_TRANSFORM_FEEDBACK_BINDING 0x8E25 +#define GL_PROGRAM_BINARY_RETRIEVABLE_HINT 0x8257 +#define GL_PROGRAM_BINARY_LENGTH 0x8741 +#define GL_NUM_PROGRAM_BINARY_FORMATS 0x87FE +#define GL_PROGRAM_BINARY_FORMATS 0x87FF +#define GL_COMPRESSED_R11_EAC 0x9270 +#define GL_COMPRESSED_SIGNED_R11_EAC 0x9271 +#define GL_COMPRESSED_RG11_EAC 0x9272 +#define GL_COMPRESSED_SIGNED_RG11_EAC 0x9273 +#define GL_COMPRESSED_RGB8_ETC2 0x9274 +#define GL_COMPRESSED_SRGB8_ETC2 0x9275 +#define GL_COMPRESSED_RGB8_PUNCHTHROUGH_ALPHA1_ETC2 0x9276 +#define GL_COMPRESSED_SRGB8_PUNCHTHROUGH_ALPHA1_ETC2 0x9277 +#define GL_COMPRESSED_RGBA8_ETC2_EAC 0x9278 +#define GL_COMPRESSED_SRGB8_ALPHA8_ETC2_EAC 0x9279 +#define GL_TEXTURE_IMMUTABLE_FORMAT 0x912F +#define GL_MAX_ELEMENT_INDEX 0x8D6B +#define GL_NUM_SAMPLE_COUNTS 0x9380 +#define GL_TEXTURE_IMMUTABLE_LEVELS 0x82DF +typedef void (GL_APIENTRYP PFNGLREADBUFFERPROC) (GLenum src); +typedef void (GL_APIENTRYP PFNGLDRAWRANGEELEMENTSPROC) (GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices); +typedef void (GL_APIENTRYP PFNGLTEXIMAGE3DPROC) (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, const void *pixels); +typedef void (GL_APIENTRYP PFNGLTEXSUBIMAGE3DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *pixels); +typedef void (GL_APIENTRYP PFNGLCOPYTEXSUBIMAGE3DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXIMAGE3DPROC) (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, const void *data); +typedef void (GL_APIENTRYP PFNGLCOMPRESSEDTEXSUBIMAGE3DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void *data); +typedef void (GL_APIENTRYP PFNGLGENQUERIESPROC) (GLsizei n, GLuint *ids); +typedef void (GL_APIENTRYP PFNGLDELETEQUERIESPROC) (GLsizei n, const GLuint *ids); +typedef GLboolean (GL_APIENTRYP PFNGLISQUERYPROC) (GLuint id); +typedef void (GL_APIENTRYP PFNGLBEGINQUERYPROC) (GLenum target, GLuint id); +typedef void (GL_APIENTRYP PFNGLENDQUERYPROC) (GLenum target); +typedef void (GL_APIENTRYP PFNGLGETQUERYIVPROC) (GLenum target, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETQUERYOBJECTUIVPROC) (GLuint id, GLenum pname, GLuint *params); +typedef GLboolean (GL_APIENTRYP PFNGLUNMAPBUFFERPROC) (GLenum target); +typedef void (GL_APIENTRYP PFNGLGETBUFFERPOINTERVPROC) (GLenum target, GLenum pname, void **params); +typedef void (GL_APIENTRYP PFNGLDRAWBUFFERSPROC) (GLsizei n, const GLenum *bufs); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX2X3FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX3X2FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX2X4FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX4X2FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX3X4FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLUNIFORMMATRIX4X3FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLBLITFRAMEBUFFERPROC) (GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +typedef void (GL_APIENTRYP PFNGLRENDERBUFFERSTORAGEMULTISAMPLEPROC) (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLFRAMEBUFFERTEXTURELAYERPROC) (GLenum target, GLenum attachment, GLuint texture, GLint level, GLint layer); +typedef void *(GL_APIENTRYP PFNGLMAPBUFFERRANGEPROC) (GLenum target, GLintptr offset, GLsizeiptr length, GLbitfield access); +typedef void (GL_APIENTRYP PFNGLFLUSHMAPPEDBUFFERRANGEPROC) (GLenum target, GLintptr offset, GLsizeiptr length); +typedef void (GL_APIENTRYP PFNGLBINDVERTEXARRAYPROC) (GLuint array); +typedef void (GL_APIENTRYP PFNGLDELETEVERTEXARRAYSPROC) (GLsizei n, const GLuint *arrays); +typedef void (GL_APIENTRYP PFNGLGENVERTEXARRAYSPROC) (GLsizei n, GLuint *arrays); +typedef GLboolean (GL_APIENTRYP PFNGLISVERTEXARRAYPROC) (GLuint array); +typedef void (GL_APIENTRYP PFNGLGETINTEGERI_VPROC) (GLenum target, GLuint index, GLint *data); +typedef void (GL_APIENTRYP PFNGLBEGINTRANSFORMFEEDBACKPROC) (GLenum primitiveMode); +typedef void (GL_APIENTRYP PFNGLENDTRANSFORMFEEDBACKPROC) (void); +typedef void (GL_APIENTRYP PFNGLBINDBUFFERRANGEPROC) (GLenum target, GLuint index, GLuint buffer, GLintptr offset, GLsizeiptr size); +typedef void (GL_APIENTRYP PFNGLBINDBUFFERBASEPROC) (GLenum target, GLuint index, GLuint buffer); +typedef void (GL_APIENTRYP PFNGLTRANSFORMFEEDBACKVARYINGSPROC) (GLuint program, GLsizei count, const GLchar *const*varyings, GLenum bufferMode); +typedef void (GL_APIENTRYP PFNGLGETTRANSFORMFEEDBACKVARYINGPROC) (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLsizei *size, GLenum *type, GLchar *name); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBIPOINTERPROC) (GLuint index, GLint size, GLenum type, GLsizei stride, const void *pointer); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBIIVPROC) (GLuint index, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETVERTEXATTRIBIUIVPROC) (GLuint index, GLenum pname, GLuint *params); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBI4IPROC) (GLuint index, GLint x, GLint y, GLint z, GLint w); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBI4UIPROC) (GLuint index, GLuint x, GLuint y, GLuint z, GLuint w); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBI4IVPROC) (GLuint index, const GLint *v); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBI4UIVPROC) (GLuint index, const GLuint *v); +typedef void (GL_APIENTRYP PFNGLGETUNIFORMUIVPROC) (GLuint program, GLint location, GLuint *params); +typedef GLint (GL_APIENTRYP PFNGLGETFRAGDATALOCATIONPROC) (GLuint program, const GLchar *name); +typedef void (GL_APIENTRYP PFNGLUNIFORM1UIPROC) (GLint location, GLuint v0); +typedef void (GL_APIENTRYP PFNGLUNIFORM2UIPROC) (GLint location, GLuint v0, GLuint v1); +typedef void (GL_APIENTRYP PFNGLUNIFORM3UIPROC) (GLint location, GLuint v0, GLuint v1, GLuint v2); +typedef void (GL_APIENTRYP PFNGLUNIFORM4UIPROC) (GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3); +typedef void (GL_APIENTRYP PFNGLUNIFORM1UIVPROC) (GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM2UIVPROC) (GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM3UIVPROC) (GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLUNIFORM4UIVPROC) (GLint location, GLsizei count, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLCLEARBUFFERIVPROC) (GLenum buffer, GLint drawbuffer, const GLint *value); +typedef void (GL_APIENTRYP PFNGLCLEARBUFFERUIVPROC) (GLenum buffer, GLint drawbuffer, const GLuint *value); +typedef void (GL_APIENTRYP PFNGLCLEARBUFFERFVPROC) (GLenum buffer, GLint drawbuffer, const GLfloat *value); +typedef void (GL_APIENTRYP PFNGLCLEARBUFFERFIPROC) (GLenum buffer, GLint drawbuffer, GLfloat depth, GLint stencil); +typedef const GLubyte *(GL_APIENTRYP PFNGLGETSTRINGIPROC) (GLenum name, GLuint index); +typedef void (GL_APIENTRYP PFNGLCOPYBUFFERSUBDATAPROC) (GLenum readTarget, GLenum writeTarget, GLintptr readOffset, GLintptr writeOffset, GLsizeiptr size); +typedef void (GL_APIENTRYP PFNGLGETUNIFORMINDICESPROC) (GLuint program, GLsizei uniformCount, const GLchar *const*uniformNames, GLuint *uniformIndices); +typedef void (GL_APIENTRYP PFNGLGETACTIVEUNIFORMSIVPROC) (GLuint program, GLsizei uniformCount, const GLuint *uniformIndices, GLenum pname, GLint *params); +typedef GLuint (GL_APIENTRYP PFNGLGETUNIFORMBLOCKINDEXPROC) (GLuint program, const GLchar *uniformBlockName); +typedef void (GL_APIENTRYP PFNGLGETACTIVEUNIFORMBLOCKIVPROC) (GLuint program, GLuint uniformBlockIndex, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETACTIVEUNIFORMBLOCKNAMEPROC) (GLuint program, GLuint uniformBlockIndex, GLsizei bufSize, GLsizei *length, GLchar *uniformBlockName); +typedef void (GL_APIENTRYP PFNGLUNIFORMBLOCKBINDINGPROC) (GLuint program, GLuint uniformBlockIndex, GLuint uniformBlockBinding); +typedef void (GL_APIENTRYP PFNGLDRAWARRAYSINSTANCEDPROC) (GLenum mode, GLint first, GLsizei count, GLsizei instancecount); +typedef void (GL_APIENTRYP PFNGLDRAWELEMENTSINSTANCEDPROC) (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount); +typedef GLsync (GL_APIENTRYP PFNGLFENCESYNCPROC) (GLenum condition, GLbitfield flags); +typedef GLboolean (GL_APIENTRYP PFNGLISSYNCPROC) (GLsync sync); +typedef void (GL_APIENTRYP PFNGLDELETESYNCPROC) (GLsync sync); +typedef GLenum (GL_APIENTRYP PFNGLCLIENTWAITSYNCPROC) (GLsync sync, GLbitfield flags, GLuint64 timeout); +typedef void (GL_APIENTRYP PFNGLWAITSYNCPROC) (GLsync sync, GLbitfield flags, GLuint64 timeout); +typedef void (GL_APIENTRYP PFNGLGETINTEGER64VPROC) (GLenum pname, GLint64 *data); +typedef void (GL_APIENTRYP PFNGLGETSYNCIVPROC) (GLsync sync, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *values); +typedef void (GL_APIENTRYP PFNGLGETINTEGER64I_VPROC) (GLenum target, GLuint index, GLint64 *data); +typedef void (GL_APIENTRYP PFNGLGETBUFFERPARAMETERI64VPROC) (GLenum target, GLenum pname, GLint64 *params); +typedef void (GL_APIENTRYP PFNGLGENSAMPLERSPROC) (GLsizei count, GLuint *samplers); +typedef void (GL_APIENTRYP PFNGLDELETESAMPLERSPROC) (GLsizei count, const GLuint *samplers); +typedef GLboolean (GL_APIENTRYP PFNGLISSAMPLERPROC) (GLuint sampler); +typedef void (GL_APIENTRYP PFNGLBINDSAMPLERPROC) (GLuint unit, GLuint sampler); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERIPROC) (GLuint sampler, GLenum pname, GLint param); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERIVPROC) (GLuint sampler, GLenum pname, const GLint *param); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERFPROC) (GLuint sampler, GLenum pname, GLfloat param); +typedef void (GL_APIENTRYP PFNGLSAMPLERPARAMETERFVPROC) (GLuint sampler, GLenum pname, const GLfloat *param); +typedef void (GL_APIENTRYP PFNGLGETSAMPLERPARAMETERIVPROC) (GLuint sampler, GLenum pname, GLint *params); +typedef void (GL_APIENTRYP PFNGLGETSAMPLERPARAMETERFVPROC) (GLuint sampler, GLenum pname, GLfloat *params); +typedef void (GL_APIENTRYP PFNGLVERTEXATTRIBDIVISORPROC) (GLuint index, GLuint divisor); +typedef void (GL_APIENTRYP PFNGLBINDTRANSFORMFEEDBACKPROC) (GLenum target, GLuint id); +typedef void (GL_APIENTRYP PFNGLDELETETRANSFORMFEEDBACKSPROC) (GLsizei n, const GLuint *ids); +typedef void (GL_APIENTRYP PFNGLGENTRANSFORMFEEDBACKSPROC) (GLsizei n, GLuint *ids); +typedef GLboolean (GL_APIENTRYP PFNGLISTRANSFORMFEEDBACKPROC) (GLuint id); +typedef void (GL_APIENTRYP PFNGLPAUSETRANSFORMFEEDBACKPROC) (void); +typedef void (GL_APIENTRYP PFNGLRESUMETRANSFORMFEEDBACKPROC) (void); +typedef void (GL_APIENTRYP PFNGLGETPROGRAMBINARYPROC) (GLuint program, GLsizei bufSize, GLsizei *length, GLenum *binaryFormat, void *binary); +typedef void (GL_APIENTRYP PFNGLPROGRAMBINARYPROC) (GLuint program, GLenum binaryFormat, const void *binary, GLsizei length); +typedef void (GL_APIENTRYP PFNGLPROGRAMPARAMETERIPROC) (GLuint program, GLenum pname, GLint value); +typedef void (GL_APIENTRYP PFNGLINVALIDATEFRAMEBUFFERPROC) (GLenum target, GLsizei numAttachments, const GLenum *attachments); +typedef void (GL_APIENTRYP PFNGLINVALIDATESUBFRAMEBUFFERPROC) (GLenum target, GLsizei numAttachments, const GLenum *attachments, GLint x, GLint y, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGE2DPROC) (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +typedef void (GL_APIENTRYP PFNGLTEXSTORAGE3DPROC) (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +typedef void (GL_APIENTRYP PFNGLGETINTERNALFORMATIVPROC) (GLenum target, GLenum internalformat, GLenum pname, GLsizei bufSize, GLint *params); +#if GL_GLES_PROTOTYPES +GL_APICALL void GL_APIENTRY glReadBuffer (GLenum src); +GL_APICALL void GL_APIENTRY glDrawRangeElements (GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices); +GL_APICALL void GL_APIENTRY glTexImage3D (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, const void *pixels); +GL_APICALL void GL_APIENTRY glTexSubImage3D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *pixels); +GL_APICALL void GL_APIENTRY glCopyTexSubImage3D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glCompressedTexImage3D (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, const void *data); +GL_APICALL void GL_APIENTRY glCompressedTexSubImage3D (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void *data); +GL_APICALL void GL_APIENTRY glGenQueries (GLsizei n, GLuint *ids); +GL_APICALL void GL_APIENTRY glDeleteQueries (GLsizei n, const GLuint *ids); +GL_APICALL GLboolean GL_APIENTRY glIsQuery (GLuint id); +GL_APICALL void GL_APIENTRY glBeginQuery (GLenum target, GLuint id); +GL_APICALL void GL_APIENTRY glEndQuery (GLenum target); +GL_APICALL void GL_APIENTRY glGetQueryiv (GLenum target, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetQueryObjectuiv (GLuint id, GLenum pname, GLuint *params); +GL_APICALL GLboolean GL_APIENTRY glUnmapBuffer (GLenum target); +GL_APICALL void GL_APIENTRY glGetBufferPointerv (GLenum target, GLenum pname, void **params); +GL_APICALL void GL_APIENTRY glDrawBuffers (GLsizei n, const GLenum *bufs); +GL_APICALL void GL_APIENTRY glUniformMatrix2x3fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix3x2fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix2x4fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix4x2fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix3x4fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glUniformMatrix4x3fv (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +GL_APICALL void GL_APIENTRY glBlitFramebuffer (GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter); +GL_APICALL void GL_APIENTRY glRenderbufferStorageMultisample (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glFramebufferTextureLayer (GLenum target, GLenum attachment, GLuint texture, GLint level, GLint layer); +GL_APICALL void *GL_APIENTRY glMapBufferRange (GLenum target, GLintptr offset, GLsizeiptr length, GLbitfield access); +GL_APICALL void GL_APIENTRY glFlushMappedBufferRange (GLenum target, GLintptr offset, GLsizeiptr length); +GL_APICALL void GL_APIENTRY glBindVertexArray (GLuint array); +GL_APICALL void GL_APIENTRY glDeleteVertexArrays (GLsizei n, const GLuint *arrays); +GL_APICALL void GL_APIENTRY glGenVertexArrays (GLsizei n, GLuint *arrays); +GL_APICALL GLboolean GL_APIENTRY glIsVertexArray (GLuint array); +GL_APICALL void GL_APIENTRY glGetIntegeri_v (GLenum target, GLuint index, GLint *data); +GL_APICALL void GL_APIENTRY glBeginTransformFeedback (GLenum primitiveMode); +GL_APICALL void GL_APIENTRY glEndTransformFeedback (void); +GL_APICALL void GL_APIENTRY glBindBufferRange (GLenum target, GLuint index, GLuint buffer, GLintptr offset, GLsizeiptr size); +GL_APICALL void GL_APIENTRY glBindBufferBase (GLenum target, GLuint index, GLuint buffer); +GL_APICALL void GL_APIENTRY glTransformFeedbackVaryings (GLuint program, GLsizei count, const GLchar *const*varyings, GLenum bufferMode); +GL_APICALL void GL_APIENTRY glGetTransformFeedbackVarying (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLsizei *size, GLenum *type, GLchar *name); +GL_APICALL void GL_APIENTRY glVertexAttribIPointer (GLuint index, GLint size, GLenum type, GLsizei stride, const void *pointer); +GL_APICALL void GL_APIENTRY glGetVertexAttribIiv (GLuint index, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetVertexAttribIuiv (GLuint index, GLenum pname, GLuint *params); +GL_APICALL void GL_APIENTRY glVertexAttribI4i (GLuint index, GLint x, GLint y, GLint z, GLint w); +GL_APICALL void GL_APIENTRY glVertexAttribI4ui (GLuint index, GLuint x, GLuint y, GLuint z, GLuint w); +GL_APICALL void GL_APIENTRY glVertexAttribI4iv (GLuint index, const GLint *v); +GL_APICALL void GL_APIENTRY glVertexAttribI4uiv (GLuint index, const GLuint *v); +GL_APICALL void GL_APIENTRY glGetUniformuiv (GLuint program, GLint location, GLuint *params); +GL_APICALL GLint GL_APIENTRY glGetFragDataLocation (GLuint program, const GLchar *name); +GL_APICALL void GL_APIENTRY glUniform1ui (GLint location, GLuint v0); +GL_APICALL void GL_APIENTRY glUniform2ui (GLint location, GLuint v0, GLuint v1); +GL_APICALL void GL_APIENTRY glUniform3ui (GLint location, GLuint v0, GLuint v1, GLuint v2); +GL_APICALL void GL_APIENTRY glUniform4ui (GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3); +GL_APICALL void GL_APIENTRY glUniform1uiv (GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glUniform2uiv (GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glUniform3uiv (GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glUniform4uiv (GLint location, GLsizei count, const GLuint *value); +GL_APICALL void GL_APIENTRY glClearBufferiv (GLenum buffer, GLint drawbuffer, const GLint *value); +GL_APICALL void GL_APIENTRY glClearBufferuiv (GLenum buffer, GLint drawbuffer, const GLuint *value); +GL_APICALL void GL_APIENTRY glClearBufferfv (GLenum buffer, GLint drawbuffer, const GLfloat *value); +GL_APICALL void GL_APIENTRY glClearBufferfi (GLenum buffer, GLint drawbuffer, GLfloat depth, GLint stencil); +GL_APICALL const GLubyte *GL_APIENTRY glGetStringi (GLenum name, GLuint index); +GL_APICALL void GL_APIENTRY glCopyBufferSubData (GLenum readTarget, GLenum writeTarget, GLintptr readOffset, GLintptr writeOffset, GLsizeiptr size); +GL_APICALL void GL_APIENTRY glGetUniformIndices (GLuint program, GLsizei uniformCount, const GLchar *const*uniformNames, GLuint *uniformIndices); +GL_APICALL void GL_APIENTRY glGetActiveUniformsiv (GLuint program, GLsizei uniformCount, const GLuint *uniformIndices, GLenum pname, GLint *params); +GL_APICALL GLuint GL_APIENTRY glGetUniformBlockIndex (GLuint program, const GLchar *uniformBlockName); +GL_APICALL void GL_APIENTRY glGetActiveUniformBlockiv (GLuint program, GLuint uniformBlockIndex, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetActiveUniformBlockName (GLuint program, GLuint uniformBlockIndex, GLsizei bufSize, GLsizei *length, GLchar *uniformBlockName); +GL_APICALL void GL_APIENTRY glUniformBlockBinding (GLuint program, GLuint uniformBlockIndex, GLuint uniformBlockBinding); +GL_APICALL void GL_APIENTRY glDrawArraysInstanced (GLenum mode, GLint first, GLsizei count, GLsizei instancecount); +GL_APICALL void GL_APIENTRY glDrawElementsInstanced (GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount); +GL_APICALL GLsync GL_APIENTRY glFenceSync (GLenum condition, GLbitfield flags); +GL_APICALL GLboolean GL_APIENTRY glIsSync (GLsync sync); +GL_APICALL void GL_APIENTRY glDeleteSync (GLsync sync); +GL_APICALL GLenum GL_APIENTRY glClientWaitSync (GLsync sync, GLbitfield flags, GLuint64 timeout); +GL_APICALL void GL_APIENTRY glWaitSync (GLsync sync, GLbitfield flags, GLuint64 timeout); +GL_APICALL void GL_APIENTRY glGetInteger64v (GLenum pname, GLint64 *data); +GL_APICALL void GL_APIENTRY glGetSynciv (GLsync sync, GLenum pname, GLsizei bufSize, GLsizei *length, GLint *values); +GL_APICALL void GL_APIENTRY glGetInteger64i_v (GLenum target, GLuint index, GLint64 *data); +GL_APICALL void GL_APIENTRY glGetBufferParameteri64v (GLenum target, GLenum pname, GLint64 *params); +GL_APICALL void GL_APIENTRY glGenSamplers (GLsizei count, GLuint *samplers); +GL_APICALL void GL_APIENTRY glDeleteSamplers (GLsizei count, const GLuint *samplers); +GL_APICALL GLboolean GL_APIENTRY glIsSampler (GLuint sampler); +GL_APICALL void GL_APIENTRY glBindSampler (GLuint unit, GLuint sampler); +GL_APICALL void GL_APIENTRY glSamplerParameteri (GLuint sampler, GLenum pname, GLint param); +GL_APICALL void GL_APIENTRY glSamplerParameteriv (GLuint sampler, GLenum pname, const GLint *param); +GL_APICALL void GL_APIENTRY glSamplerParameterf (GLuint sampler, GLenum pname, GLfloat param); +GL_APICALL void GL_APIENTRY glSamplerParameterfv (GLuint sampler, GLenum pname, const GLfloat *param); +GL_APICALL void GL_APIENTRY glGetSamplerParameteriv (GLuint sampler, GLenum pname, GLint *params); +GL_APICALL void GL_APIENTRY glGetSamplerParameterfv (GLuint sampler, GLenum pname, GLfloat *params); +GL_APICALL void GL_APIENTRY glVertexAttribDivisor (GLuint index, GLuint divisor); +GL_APICALL void GL_APIENTRY glBindTransformFeedback (GLenum target, GLuint id); +GL_APICALL void GL_APIENTRY glDeleteTransformFeedbacks (GLsizei n, const GLuint *ids); +GL_APICALL void GL_APIENTRY glGenTransformFeedbacks (GLsizei n, GLuint *ids); +GL_APICALL GLboolean GL_APIENTRY glIsTransformFeedback (GLuint id); +GL_APICALL void GL_APIENTRY glPauseTransformFeedback (void); +GL_APICALL void GL_APIENTRY glResumeTransformFeedback (void); +GL_APICALL void GL_APIENTRY glGetProgramBinary (GLuint program, GLsizei bufSize, GLsizei *length, GLenum *binaryFormat, void *binary); +GL_APICALL void GL_APIENTRY glProgramBinary (GLuint program, GLenum binaryFormat, const void *binary, GLsizei length); +GL_APICALL void GL_APIENTRY glProgramParameteri (GLuint program, GLenum pname, GLint value); +GL_APICALL void GL_APIENTRY glInvalidateFramebuffer (GLenum target, GLsizei numAttachments, const GLenum *attachments); +GL_APICALL void GL_APIENTRY glInvalidateSubFramebuffer (GLenum target, GLsizei numAttachments, const GLenum *attachments, GLint x, GLint y, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glTexStorage2D (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height); +GL_APICALL void GL_APIENTRY glTexStorage3D (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth); +GL_APICALL void GL_APIENTRY glGetInternalformativ (GLenum target, GLenum internalformat, GLenum pname, GLsizei bufSize, GLint *params); +#endif +#endif /* GL_ES_VERSION_3_0 */ + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/swiftshader/include/GLES3/gl3platform.h b/vendor/swiftshader/include/GLES3/gl3platform.h new file mode 100644 index 00000000..ca9d7a6d --- /dev/null +++ b/vendor/swiftshader/include/GLES3/gl3platform.h @@ -0,0 +1,38 @@ +#ifndef __gl3platform_h_ +#define __gl3platform_h_ + +/* +** Copyright (c) 2017 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* Platform-specific types and definitions for OpenGL ES 3.X gl3.h + * + * Adopters may modify khrplatform.h and this file to suit their platform. + * Please contribute modifications back to Khronos as pull requests on the + * public github repository: + * https://github.com/KhronosGroup/OpenGL-Registry + */ + +#include + +#ifndef GL_APICALL +#define GL_APICALL KHRONOS_APICALL +#endif + +#ifndef GL_APIENTRY +#define GL_APIENTRY KHRONOS_APIENTRY +#endif + +#endif /* __gl3platform_h_ */ diff --git a/vendor/swiftshader/include/KHR/khrplatform.h b/vendor/swiftshader/include/KHR/khrplatform.h new file mode 100644 index 00000000..975bbffe --- /dev/null +++ b/vendor/swiftshader/include/KHR/khrplatform.h @@ -0,0 +1,282 @@ +#ifndef __khrplatform_h_ +#define __khrplatform_h_ + +/* +** Copyright (c) 2008-2018 The Khronos Group Inc. +** +** Permission is hereby granted, free of charge, to any person obtaining a +** copy of this software and/or associated documentation files (the +** "Materials"), to deal in the Materials without restriction, including +** without limitation the rights to use, copy, modify, merge, publish, +** distribute, sublicense, and/or sell copies of the Materials, and to +** permit persons to whom the Materials are furnished to do so, subject to +** the following conditions: +** +** The above copyright notice and this permission notice shall be included +** in all copies or substantial portions of the Materials. +** +** THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +** EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +** MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +** IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +** CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +** MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. +*/ + +/* Khronos platform-specific types and definitions. + * + * The master copy of khrplatform.h is maintained in the Khronos EGL + * Registry repository at https://github.com/KhronosGroup/EGL-Registry + * The last semantic modification to khrplatform.h was at commit ID: + * 67a3e0864c2d75ea5287b9f3d2eb74a745936692 + * + * Adopters may modify this file to suit their platform. Adopters are + * encouraged to submit platform specific modifications to the Khronos + * group so that they can be included in future versions of this file. + * Please submit changes by filing pull requests or issues on + * the EGL Registry repository linked above. + * + * + * See the Implementer's Guidelines for information about where this file + * should be located on your system and for more details of its use: + * http://www.khronos.org/registry/implementers_guide.pdf + * + * This file should be included as + * #include + * by Khronos client API header files that use its types and defines. + * + * The types in khrplatform.h should only be used to define API-specific types. + * + * Types defined in khrplatform.h: + * khronos_int8_t signed 8 bit + * khronos_uint8_t unsigned 8 bit + * khronos_int16_t signed 16 bit + * khronos_uint16_t unsigned 16 bit + * khronos_int32_t signed 32 bit + * khronos_uint32_t unsigned 32 bit + * khronos_int64_t signed 64 bit + * khronos_uint64_t unsigned 64 bit + * khronos_intptr_t signed same number of bits as a pointer + * khronos_uintptr_t unsigned same number of bits as a pointer + * khronos_ssize_t signed size + * khronos_usize_t unsigned size + * khronos_float_t signed 32 bit floating point + * khronos_time_ns_t unsigned 64 bit time in nanoseconds + * khronos_utime_nanoseconds_t unsigned time interval or absolute time in + * nanoseconds + * khronos_stime_nanoseconds_t signed time interval in nanoseconds + * khronos_boolean_enum_t enumerated boolean type. This should + * only be used as a base type when a client API's boolean type is + * an enum. Client APIs which use an integer or other type for + * booleans cannot use this as the base type for their boolean. + * + * Tokens defined in khrplatform.h: + * + * KHRONOS_FALSE, KHRONOS_TRUE Enumerated boolean false/true values. + * + * KHRONOS_SUPPORT_INT64 is 1 if 64 bit integers are supported; otherwise 0. + * KHRONOS_SUPPORT_FLOAT is 1 if floats are supported; otherwise 0. + * + * Calling convention macros defined in this file: + * KHRONOS_APICALL + * KHRONOS_APIENTRY + * KHRONOS_APIATTRIBUTES + * + * These may be used in function prototypes as: + * + * KHRONOS_APICALL void KHRONOS_APIENTRY funcname( + * int arg1, + * int arg2) KHRONOS_APIATTRIBUTES; + */ + +/*------------------------------------------------------------------------- + * Definition of KHRONOS_APICALL + *------------------------------------------------------------------------- + * This precedes the return type of the function in the function prototype. + */ +#if defined(_WIN32) && !defined(__SCITECH_SNAP__) +# define KHRONOS_APICALL __declspec(dllimport) +#elif defined (__SYMBIAN32__) +# define KHRONOS_APICALL IMPORT_C +#elif defined(__ANDROID__) +# define KHRONOS_APICALL __attribute__((visibility("default"))) +#else +# define KHRONOS_APICALL +#endif + +/*------------------------------------------------------------------------- + * Definition of KHRONOS_APIENTRY + *------------------------------------------------------------------------- + * This follows the return type of the function and precedes the function + * name in the function prototype. + */ +#if defined(_WIN32) && !defined(_WIN32_WCE) && !defined(__SCITECH_SNAP__) + /* Win32 but not WinCE */ +# define KHRONOS_APIENTRY __stdcall +#else +# define KHRONOS_APIENTRY +#endif + +/*------------------------------------------------------------------------- + * Definition of KHRONOS_APIATTRIBUTES + *------------------------------------------------------------------------- + * This follows the closing parenthesis of the function prototype arguments. + */ +#if defined (__ARMCC_2__) +#define KHRONOS_APIATTRIBUTES __softfp +#else +#define KHRONOS_APIATTRIBUTES +#endif + +/*------------------------------------------------------------------------- + * basic type definitions + *-----------------------------------------------------------------------*/ +#if (defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L) || defined(__GNUC__) || defined(__SCO__) || defined(__USLC__) + + +/* + * Using + */ +#include +typedef int32_t khronos_int32_t; +typedef uint32_t khronos_uint32_t; +typedef int64_t khronos_int64_t; +typedef uint64_t khronos_uint64_t; +#define KHRONOS_SUPPORT_INT64 1 +#define KHRONOS_SUPPORT_FLOAT 1 + +#elif defined(__VMS ) || defined(__sgi) + +/* + * Using + */ +#include +typedef int32_t khronos_int32_t; +typedef uint32_t khronos_uint32_t; +typedef int64_t khronos_int64_t; +typedef uint64_t khronos_uint64_t; +#define KHRONOS_SUPPORT_INT64 1 +#define KHRONOS_SUPPORT_FLOAT 1 + +#elif defined(_WIN32) && !defined(__SCITECH_SNAP__) + +/* + * Win32 + */ +typedef __int32 khronos_int32_t; +typedef unsigned __int32 khronos_uint32_t; +typedef __int64 khronos_int64_t; +typedef unsigned __int64 khronos_uint64_t; +#define KHRONOS_SUPPORT_INT64 1 +#define KHRONOS_SUPPORT_FLOAT 1 + +#elif defined(__sun__) || defined(__digital__) + +/* + * Sun or Digital + */ +typedef int khronos_int32_t; +typedef unsigned int khronos_uint32_t; +#if defined(__arch64__) || defined(_LP64) +typedef long int khronos_int64_t; +typedef unsigned long int khronos_uint64_t; +#else +typedef long long int khronos_int64_t; +typedef unsigned long long int khronos_uint64_t; +#endif /* __arch64__ */ +#define KHRONOS_SUPPORT_INT64 1 +#define KHRONOS_SUPPORT_FLOAT 1 + +#elif 0 + +/* + * Hypothetical platform with no float or int64 support + */ +typedef int khronos_int32_t; +typedef unsigned int khronos_uint32_t; +#define KHRONOS_SUPPORT_INT64 0 +#define KHRONOS_SUPPORT_FLOAT 0 + +#else + +/* + * Generic fallback + */ +#include +typedef int32_t khronos_int32_t; +typedef uint32_t khronos_uint32_t; +typedef int64_t khronos_int64_t; +typedef uint64_t khronos_uint64_t; +#define KHRONOS_SUPPORT_INT64 1 +#define KHRONOS_SUPPORT_FLOAT 1 + +#endif + + +/* + * Types that are (so far) the same on all platforms + */ +typedef signed char khronos_int8_t; +typedef unsigned char khronos_uint8_t; +typedef signed short int khronos_int16_t; +typedef unsigned short int khronos_uint16_t; + +/* + * Types that differ between LLP64 and LP64 architectures - in LLP64, + * pointers are 64 bits, but 'long' is still 32 bits. Win64 appears + * to be the only LLP64 architecture in current use. + */ +#ifdef _WIN64 +typedef signed long long int khronos_intptr_t; +typedef unsigned long long int khronos_uintptr_t; +typedef signed long long int khronos_ssize_t; +typedef unsigned long long int khronos_usize_t; +#else +typedef signed long int khronos_intptr_t; +typedef unsigned long int khronos_uintptr_t; +typedef signed long int khronos_ssize_t; +typedef unsigned long int khronos_usize_t; +#endif + +#if KHRONOS_SUPPORT_FLOAT +/* + * Float type + */ +typedef float khronos_float_t; +#endif + +#if KHRONOS_SUPPORT_INT64 +/* Time types + * + * These types can be used to represent a time interval in nanoseconds or + * an absolute Unadjusted System Time. Unadjusted System Time is the number + * of nanoseconds since some arbitrary system event (e.g. since the last + * time the system booted). The Unadjusted System Time is an unsigned + * 64 bit value that wraps back to 0 every 584 years. Time intervals + * may be either signed or unsigned. + */ +typedef khronos_uint64_t khronos_utime_nanoseconds_t; +typedef khronos_int64_t khronos_stime_nanoseconds_t; +#endif + +/* + * Dummy value used to pad enum types to 32 bits. + */ +#ifndef KHRONOS_MAX_ENUM +#define KHRONOS_MAX_ENUM 0x7FFFFFFF +#endif + +/* + * Enumerated boolean type + * + * Values other than zero should be considered to be true. Therefore + * comparisons should not be made against KHRONOS_TRUE. + */ +typedef enum { + KHRONOS_FALSE = 0, + KHRONOS_TRUE = 1, + KHRONOS_BOOLEAN_ENUM_FORCE_SIZE = KHRONOS_MAX_ENUM +} khronos_boolean_enum_t; + +#endif /* __khrplatform_h_ */ diff --git a/vendor/swiftshader/include/vulkan/vk_google_filtering_precision.h b/vendor/swiftshader/include/vulkan/vk_google_filtering_precision.h new file mode 100644 index 00000000..65e3bcf1 --- /dev/null +++ b/vendor/swiftshader/include/vulkan/vk_google_filtering_precision.h @@ -0,0 +1,41 @@ +// Copyright 2020 The SwiftShader Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "vulkan_core.h" + +// THIS FILE SHOULD BE DELETED IF VK_GOOGLE_sampler_filtering_precision IS EVER ADDED TO THE VULKAN HEADERS +#ifdef VK_GOOGLE_sampler_filtering_precision +#error "VK_GOOGLE_sampler_filtering_precision is already defined in the Vulkan headers, you can delete this file" +#endif + +static constexpr VkStructureType VK_STRUCTURE_TYPE_SAMPLER_FILTERING_PRECISION_GOOGLE = static_cast(1000264000); + +#define VK_GOOGLE_sampler_filtering_precisions 1 +#define VK_GOOGLE_SAMPLER_FILTERING_PRECISION_SPEC_VERSION 1 +#define VK_GOOGLE_SAMPLER_FILTERING_PRECISION_EXTENSION_NAME "VK_GOOGLE_sampler_filtering_precision" + +typedef enum VkSamplerFilteringPrecisionModeGOOGLE { + VK_SAMPLER_FILTERING_PRECISION_MODE_LOW_GOOGLE = 0, + VK_SAMPLER_FILTERING_PRECISION_MODE_HIGH_GOOGLE = 1, + VK_SAMPLER_FILTERING_PRECISION_MODE_BEGIN_RANGE_GOOGLE = VK_SAMPLER_FILTERING_PRECISION_MODE_LOW_GOOGLE, + VK_SAMPLER_FILTERING_PRECISION_MODE_END_RANGE_GOOGLE = VK_SAMPLER_FILTERING_PRECISION_MODE_HIGH_GOOGLE, + VK_SAMPLER_FILTERING_PRECISION_MODE_RANGE_SIZE_GOOGLE = (VK_SAMPLER_FILTERING_PRECISION_MODE_HIGH_GOOGLE - VK_SAMPLER_FILTERING_PRECISION_MODE_LOW_GOOGLE + 1), + VK_SAMPLER_FILTERING_PRECISION_MODE_MAX_ENUM_GOOGLE = 0x7FFFFFFF +} VkSamplerFilteringPrecisionModeGOOGLE; + +typedef struct VkSamplerFilteringPrecisionGOOGLE { + VkStructureType sType; + const void* pNext; + VkSamplerFilteringPrecisionModeGOOGLE samplerFilteringPrecisionMode; +} VkSamplerFilteringPrecisionGOOGLE; diff --git a/vendor/swiftshader/include/vulkan/vk_icd.h b/vendor/swiftshader/include/vulkan/vk_icd.h new file mode 100644 index 00000000..ae006d06 --- /dev/null +++ b/vendor/swiftshader/include/vulkan/vk_icd.h @@ -0,0 +1,245 @@ +// +// File: vk_icd.h +// +/* + * Copyright (c) 2015-2016 The Khronos Group Inc. + * Copyright (c) 2015-2016 Valve Corporation + * Copyright (c) 2015-2016 LunarG, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +#ifndef VKICD_H +#define VKICD_H + +#include "vulkan.h" +#include + +// Loader-ICD version negotiation API. Versions add the following features: +// Version 0 - Initial. Doesn't support vk_icdGetInstanceProcAddr +// or vk_icdNegotiateLoaderICDInterfaceVersion. +// Version 1 - Add support for vk_icdGetInstanceProcAddr. +// Version 2 - Add Loader/ICD Interface version negotiation +// via vk_icdNegotiateLoaderICDInterfaceVersion. +// Version 3 - Add ICD creation/destruction of KHR_surface objects. +// Version 4 - Add unknown physical device extension qyering via +// vk_icdGetPhysicalDeviceProcAddr. +// Version 5 - Tells ICDs that the loader is now paying attention to the +// application version of Vulkan passed into the ApplicationInfo +// structure during vkCreateInstance. This will tell the ICD +// that if the loader is older, it should automatically fail a +// call for any API version > 1.0. Otherwise, the loader will +// manually determine if it can support the expected version. +// Version 6 - Add support for vk_icdEnumerateAdapterPhysicalDevices. +#define CURRENT_LOADER_ICD_INTERFACE_VERSION 6 +#define MIN_SUPPORTED_LOADER_ICD_INTERFACE_VERSION 0 +#define MIN_PHYS_DEV_EXTENSION_ICD_INTERFACE_VERSION 4 + +// Old typedefs that don't follow a proper naming convention but are preserved for compatibility +typedef VkResult(VKAPI_PTR *PFN_vkNegotiateLoaderICDInterfaceVersion)(uint32_t *pVersion); +// This is defined in vk_layer.h which will be found by the loader, but if an ICD is building against this +// file directly, it won't be found. +#ifndef PFN_GetPhysicalDeviceProcAddr +typedef PFN_vkVoidFunction(VKAPI_PTR *PFN_GetPhysicalDeviceProcAddr)(VkInstance instance, const char *pName); +#endif + +// Typedefs for loader/ICD interface +typedef VkResult (VKAPI_PTR *PFN_vk_icdNegotiateLoaderICDInterfaceVersion)(uint32_t* pVersion); +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vk_icdGetInstanceProcAddr)(VkInstance instance, const char* pName); +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vk_icdGetPhysicalDeviceProcAddr)(VkInstance instance, const char* pName); +#if defined(VK_USE_PLATFORM_WIN32_KHR) +typedef VkResult (VKAPI_PTR *PFN_vk_icdEnumerateAdapterPhysicalDevices)(VkInstance instance, LUID adapterLUID, + uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices); +#endif + +// Prototypes for loader/ICD interface +#if !defined(VK_NO_PROTOTYPES) +#ifdef __cplusplus +extern "C" { +#endif + VKAPI_ATTR VkResult VKAPI_CALL vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t* pVersion); + VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetInstanceProcAddr(VkInstance instance, const char* pName); + VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetPhysicalDeviceProcAddr(VkInstance isntance, const char* pName); +#if defined(VK_USE_PLATFORM_WIN32_KHR) + VKAPI_ATTR VkResult VKAPI_CALL vk_icdEnumerateAdapterPhysicalDevices(VkInstance instance, LUID adapterLUID, + uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices); +#endif +#ifdef __cplusplus +} +#endif +#endif + +/* + * The ICD must reserve space for a pointer for the loader's dispatch + * table, at the start of . + * The ICD must initialize this variable using the SET_LOADER_MAGIC_VALUE macro. + */ + +#define ICD_LOADER_MAGIC 0x01CDC0DE + +typedef union { + uintptr_t loaderMagic; + void *loaderData; +} VK_LOADER_DATA; + +static inline void set_loader_magic_value(void *pNewObject) { + VK_LOADER_DATA *loader_info = (VK_LOADER_DATA *)pNewObject; + loader_info->loaderMagic = ICD_LOADER_MAGIC; +} + +static inline bool valid_loader_magic_value(void *pNewObject) { + const VK_LOADER_DATA *loader_info = (VK_LOADER_DATA *)pNewObject; + return (loader_info->loaderMagic & 0xffffffff) == ICD_LOADER_MAGIC; +} + +/* + * Windows and Linux ICDs will treat VkSurfaceKHR as a pointer to a struct that + * contains the platform-specific connection and surface information. + */ +typedef enum { + VK_ICD_WSI_PLATFORM_MIR, + VK_ICD_WSI_PLATFORM_WAYLAND, + VK_ICD_WSI_PLATFORM_WIN32, + VK_ICD_WSI_PLATFORM_XCB, + VK_ICD_WSI_PLATFORM_XLIB, + VK_ICD_WSI_PLATFORM_ANDROID, + VK_ICD_WSI_PLATFORM_MACOS, + VK_ICD_WSI_PLATFORM_IOS, + VK_ICD_WSI_PLATFORM_DISPLAY, + VK_ICD_WSI_PLATFORM_HEADLESS, + VK_ICD_WSI_PLATFORM_METAL, + VK_ICD_WSI_PLATFORM_DIRECTFB, + VK_ICD_WSI_PLATFORM_VI, + VK_ICD_WSI_PLATFORM_GGP, + VK_ICD_WSI_PLATFORM_SCREEN, +} VkIcdWsiPlatform; + +typedef struct { + VkIcdWsiPlatform platform; +} VkIcdSurfaceBase; + +#ifdef VK_USE_PLATFORM_MIR_KHR +typedef struct { + VkIcdSurfaceBase base; + MirConnection *connection; + MirSurface *mirSurface; +} VkIcdSurfaceMir; +#endif // VK_USE_PLATFORM_MIR_KHR + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR +typedef struct { + VkIcdSurfaceBase base; + struct wl_display *display; + struct wl_surface *surface; +} VkIcdSurfaceWayland; +#endif // VK_USE_PLATFORM_WAYLAND_KHR + +#ifdef VK_USE_PLATFORM_WIN32_KHR +typedef struct { + VkIcdSurfaceBase base; + HINSTANCE hinstance; + HWND hwnd; +} VkIcdSurfaceWin32; +#endif // VK_USE_PLATFORM_WIN32_KHR + +#ifdef VK_USE_PLATFORM_XCB_KHR +typedef struct { + VkIcdSurfaceBase base; + xcb_connection_t *connection; + xcb_window_t window; +} VkIcdSurfaceXcb; +#endif // VK_USE_PLATFORM_XCB_KHR + +#ifdef VK_USE_PLATFORM_XLIB_KHR +typedef struct { + VkIcdSurfaceBase base; + Display *dpy; + Window window; +} VkIcdSurfaceXlib; +#endif // VK_USE_PLATFORM_XLIB_KHR + +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT +typedef struct { + VkIcdSurfaceBase base; + IDirectFB *dfb; + IDirectFBSurface *surface; +} VkIcdSurfaceDirectFB; +#endif // VK_USE_PLATFORM_DIRECTFB_EXT + +#ifdef VK_USE_PLATFORM_ANDROID_KHR +typedef struct { + VkIcdSurfaceBase base; + struct ANativeWindow *window; +} VkIcdSurfaceAndroid; +#endif // VK_USE_PLATFORM_ANDROID_KHR + +#ifdef VK_USE_PLATFORM_MACOS_MVK +typedef struct { + VkIcdSurfaceBase base; + const void *pView; +} VkIcdSurfaceMacOS; +#endif // VK_USE_PLATFORM_MACOS_MVK + +#ifdef VK_USE_PLATFORM_IOS_MVK +typedef struct { + VkIcdSurfaceBase base; + const void *pView; +} VkIcdSurfaceIOS; +#endif // VK_USE_PLATFORM_IOS_MVK + +#ifdef VK_USE_PLATFORM_GGP +typedef struct { + VkIcdSurfaceBase base; + GgpStreamDescriptor streamDescriptor; +} VkIcdSurfaceGgp; +#endif // VK_USE_PLATFORM_GGP + +typedef struct { + VkIcdSurfaceBase base; + VkDisplayModeKHR displayMode; + uint32_t planeIndex; + uint32_t planeStackIndex; + VkSurfaceTransformFlagBitsKHR transform; + float globalAlpha; + VkDisplayPlaneAlphaFlagBitsKHR alphaMode; + VkExtent2D imageExtent; +} VkIcdSurfaceDisplay; + +typedef struct { + VkIcdSurfaceBase base; +} VkIcdSurfaceHeadless; + +#ifdef VK_USE_PLATFORM_METAL_EXT +typedef struct { + VkIcdSurfaceBase base; + const CAMetalLayer *pLayer; +} VkIcdSurfaceMetal; +#endif // VK_USE_PLATFORM_METAL_EXT + +#ifdef VK_USE_PLATFORM_VI_NN +typedef struct { + VkIcdSurfaceBase base; + void *window; +} VkIcdSurfaceVi; +#endif // VK_USE_PLATFORM_VI_NN + +#ifdef VK_USE_PLATFORM_SCREEN_QNX +typedef struct { + VkIcdSurfaceBase base; + struct _screen_context *context; + struct _screen_window *window; +} VkIcdSurfaceScreen; +#endif // VK_USE_PLATFORM_SCREEN_QNX + +#endif // VKICD_H diff --git a/vendor/swiftshader/include/vulkan/vk_layer.h b/vendor/swiftshader/include/vulkan/vk_layer.h new file mode 100644 index 00000000..0651870c --- /dev/null +++ b/vendor/swiftshader/include/vulkan/vk_layer.h @@ -0,0 +1,210 @@ +// +// File: vk_layer.h +// +/* + * Copyright (c) 2015-2017 The Khronos Group Inc. + * Copyright (c) 2015-2017 Valve Corporation + * Copyright (c) 2015-2017 LunarG, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +/* Need to define dispatch table + * Core struct can then have ptr to dispatch table at the top + * Along with object ptrs for current and next OBJ + */ +#pragma once + +#include "vulkan.h" +#if defined(__GNUC__) && __GNUC__ >= 4 +#define VK_LAYER_EXPORT __attribute__((visibility("default"))) +#elif defined(__SUNPRO_C) && (__SUNPRO_C >= 0x590) +#define VK_LAYER_EXPORT __attribute__((visibility("default"))) +#else +#define VK_LAYER_EXPORT +#endif + +#define MAX_NUM_UNKNOWN_EXTS 250 + + // Loader-Layer version negotiation API. Versions add the following features: + // Versions 0/1 - Initial. Doesn't support vk_layerGetPhysicalDeviceProcAddr + // or vk_icdNegotiateLoaderLayerInterfaceVersion. + // Version 2 - Add support for vk_layerGetPhysicalDeviceProcAddr and + // vk_icdNegotiateLoaderLayerInterfaceVersion. +#define CURRENT_LOADER_LAYER_INTERFACE_VERSION 2 +#define MIN_SUPPORTED_LOADER_LAYER_INTERFACE_VERSION 1 + +#define VK_CURRENT_CHAIN_VERSION 1 + +// Typedef for use in the interfaces below +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_GetPhysicalDeviceProcAddr)(VkInstance instance, const char* pName); + +// Version negotiation values +typedef enum VkNegotiateLayerStructType { + LAYER_NEGOTIATE_UNINTIALIZED = 0, + LAYER_NEGOTIATE_INTERFACE_STRUCT = 1, +} VkNegotiateLayerStructType; + +// Version negotiation structures +typedef struct VkNegotiateLayerInterface { + VkNegotiateLayerStructType sType; + void *pNext; + uint32_t loaderLayerInterfaceVersion; + PFN_vkGetInstanceProcAddr pfnGetInstanceProcAddr; + PFN_vkGetDeviceProcAddr pfnGetDeviceProcAddr; + PFN_GetPhysicalDeviceProcAddr pfnGetPhysicalDeviceProcAddr; +} VkNegotiateLayerInterface; + +// Version negotiation functions +typedef VkResult (VKAPI_PTR *PFN_vkNegotiateLoaderLayerInterfaceVersion)(VkNegotiateLayerInterface *pVersionStruct); + +// Function prototype for unknown physical device extension command +typedef VkResult(VKAPI_PTR *PFN_PhysDevExt)(VkPhysicalDevice phys_device); + +// ------------------------------------------------------------------------------------------------ +// CreateInstance and CreateDevice support structures + +/* Sub type of structure for instance and device loader ext of CreateInfo. + * When sType == VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO + * or sType == VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO + * then VkLayerFunction indicates struct type pointed to by pNext + */ +typedef enum VkLayerFunction_ { + VK_LAYER_LINK_INFO = 0, + VK_LOADER_DATA_CALLBACK = 1, + VK_LOADER_LAYER_CREATE_DEVICE_CALLBACK = 2, + VK_LOADER_FEATURES = 3, +} VkLayerFunction; + +typedef struct VkLayerInstanceLink_ { + struct VkLayerInstanceLink_ *pNext; + PFN_vkGetInstanceProcAddr pfnNextGetInstanceProcAddr; + PFN_GetPhysicalDeviceProcAddr pfnNextGetPhysicalDeviceProcAddr; +} VkLayerInstanceLink; + +/* + * When creating the device chain the loader needs to pass + * down information about it's device structure needed at + * the end of the chain. Passing the data via the + * VkLayerDeviceInfo avoids issues with finding the + * exact instance being used. + */ +typedef struct VkLayerDeviceInfo_ { + void *device_info; + PFN_vkGetInstanceProcAddr pfnNextGetInstanceProcAddr; +} VkLayerDeviceInfo; + +typedef VkResult (VKAPI_PTR *PFN_vkSetInstanceLoaderData)(VkInstance instance, + void *object); +typedef VkResult (VKAPI_PTR *PFN_vkSetDeviceLoaderData)(VkDevice device, + void *object); +typedef VkResult (VKAPI_PTR *PFN_vkLayerCreateDevice)(VkInstance instance, VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo, + const VkAllocationCallbacks *pAllocator, VkDevice *pDevice, PFN_vkGetInstanceProcAddr layerGIPA, PFN_vkGetDeviceProcAddr *nextGDPA); +typedef void (VKAPI_PTR *PFN_vkLayerDestroyDevice)(VkDevice physicalDevice, const VkAllocationCallbacks *pAllocator, PFN_vkDestroyDevice destroyFunction); + +typedef enum VkLoaderFeastureFlagBits { + VK_LOADER_FEATURE_PHYSICAL_DEVICE_SORTING = 0x00000001, +} VkLoaderFlagBits; +typedef VkFlags VkLoaderFeatureFlags; + +typedef struct { + VkStructureType sType; // VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO + const void *pNext; + VkLayerFunction function; + union { + VkLayerInstanceLink *pLayerInfo; + PFN_vkSetInstanceLoaderData pfnSetInstanceLoaderData; + struct { + PFN_vkLayerCreateDevice pfnLayerCreateDevice; + PFN_vkLayerDestroyDevice pfnLayerDestroyDevice; + } layerDevice; + VkLoaderFeatureFlags loaderFeatures; + } u; +} VkLayerInstanceCreateInfo; + +typedef struct VkLayerDeviceLink_ { + struct VkLayerDeviceLink_ *pNext; + PFN_vkGetInstanceProcAddr pfnNextGetInstanceProcAddr; + PFN_vkGetDeviceProcAddr pfnNextGetDeviceProcAddr; +} VkLayerDeviceLink; + +typedef struct { + VkStructureType sType; // VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO + const void *pNext; + VkLayerFunction function; + union { + VkLayerDeviceLink *pLayerInfo; + PFN_vkSetDeviceLoaderData pfnSetDeviceLoaderData; + } u; +} VkLayerDeviceCreateInfo; + +#ifdef __cplusplus +extern "C" { +#endif + +VKAPI_ATTR VkResult VKAPI_CALL vkNegotiateLoaderLayerInterfaceVersion(VkNegotiateLayerInterface *pVersionStruct); + +typedef enum VkChainType { + VK_CHAIN_TYPE_UNKNOWN = 0, + VK_CHAIN_TYPE_ENUMERATE_INSTANCE_EXTENSION_PROPERTIES = 1, + VK_CHAIN_TYPE_ENUMERATE_INSTANCE_LAYER_PROPERTIES = 2, + VK_CHAIN_TYPE_ENUMERATE_INSTANCE_VERSION = 3, +} VkChainType; + +typedef struct VkChainHeader { + VkChainType type; + uint32_t version; + uint32_t size; +} VkChainHeader; + +typedef struct VkEnumerateInstanceExtensionPropertiesChain { + VkChainHeader header; + VkResult(VKAPI_PTR *pfnNextLayer)(const struct VkEnumerateInstanceExtensionPropertiesChain *, const char *, uint32_t *, + VkExtensionProperties *); + const struct VkEnumerateInstanceExtensionPropertiesChain *pNextLink; + +#if defined(__cplusplus) + inline VkResult CallDown(const char *pLayerName, uint32_t *pPropertyCount, VkExtensionProperties *pProperties) const { + return pfnNextLayer(pNextLink, pLayerName, pPropertyCount, pProperties); + } +#endif +} VkEnumerateInstanceExtensionPropertiesChain; + +typedef struct VkEnumerateInstanceLayerPropertiesChain { + VkChainHeader header; + VkResult(VKAPI_PTR *pfnNextLayer)(const struct VkEnumerateInstanceLayerPropertiesChain *, uint32_t *, VkLayerProperties *); + const struct VkEnumerateInstanceLayerPropertiesChain *pNextLink; + +#if defined(__cplusplus) + inline VkResult CallDown(uint32_t *pPropertyCount, VkLayerProperties *pProperties) const { + return pfnNextLayer(pNextLink, pPropertyCount, pProperties); + } +#endif +} VkEnumerateInstanceLayerPropertiesChain; + +typedef struct VkEnumerateInstanceVersionChain { + VkChainHeader header; + VkResult(VKAPI_PTR *pfnNextLayer)(const struct VkEnumerateInstanceVersionChain *, uint32_t *); + const struct VkEnumerateInstanceVersionChain *pNextLink; + +#if defined(__cplusplus) + inline VkResult CallDown(uint32_t *pApiVersion) const { + return pfnNextLayer(pNextLink, pApiVersion); + } +#endif +} VkEnumerateInstanceVersionChain; + +#ifdef __cplusplus +} +#endif diff --git a/vendor/swiftshader/include/vulkan/vk_platform.h b/vendor/swiftshader/include/vulkan/vk_platform.h new file mode 100644 index 00000000..18b913ab --- /dev/null +++ b/vendor/swiftshader/include/vulkan/vk_platform.h @@ -0,0 +1,84 @@ +// +// File: vk_platform.h +// +/* +** Copyright 2014-2021 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + + +#ifndef VK_PLATFORM_H_ +#define VK_PLATFORM_H_ + +#ifdef __cplusplus +extern "C" +{ +#endif // __cplusplus + +/* +*************************************************************************************************** +* Platform-specific directives and type declarations +*************************************************************************************************** +*/ + +/* Platform-specific calling convention macros. + * + * Platforms should define these so that Vulkan clients call Vulkan commands + * with the same calling conventions that the Vulkan implementation expects. + * + * VKAPI_ATTR - Placed before the return type in function declarations. + * Useful for C++11 and GCC/Clang-style function attribute syntax. + * VKAPI_CALL - Placed after the return type in function declarations. + * Useful for MSVC-style calling convention syntax. + * VKAPI_PTR - Placed between the '(' and '*' in function pointer types. + * + * Function declaration: VKAPI_ATTR void VKAPI_CALL vkCommand(void); + * Function pointer type: typedef void (VKAPI_PTR *PFN_vkCommand)(void); + */ +#if defined(_WIN32) + // On Windows, Vulkan commands use the stdcall convention + #define VKAPI_ATTR + #define VKAPI_CALL __stdcall + #define VKAPI_PTR VKAPI_CALL +#elif defined(__ANDROID__) && defined(__ARM_ARCH) && __ARM_ARCH < 7 + #error "Vulkan isn't supported for the 'armeabi' NDK ABI" +#elif defined(__ANDROID__) && defined(__ARM_ARCH) && __ARM_ARCH >= 7 && defined(__ARM_32BIT_STATE) + // On Android 32-bit ARM targets, Vulkan functions use the "hardfloat" + // calling convention, i.e. float parameters are passed in registers. This + // is true even if the rest of the application passes floats on the stack, + // as it does by default when compiling for the armeabi-v7a NDK ABI. + #define VKAPI_ATTR __attribute__((pcs("aapcs-vfp"))) + #define VKAPI_CALL + #define VKAPI_PTR VKAPI_ATTR +#else + // On other platforms, use the default calling convention + #define VKAPI_ATTR + #define VKAPI_CALL + #define VKAPI_PTR +#endif + +#if !defined(VK_NO_STDDEF_H) + #include +#endif // !defined(VK_NO_STDDEF_H) + +#if !defined(VK_NO_STDINT_H) + #if defined(_MSC_VER) && (_MSC_VER < 1600) + typedef signed __int8 int8_t; + typedef unsigned __int8 uint8_t; + typedef signed __int16 int16_t; + typedef unsigned __int16 uint16_t; + typedef signed __int32 int32_t; + typedef unsigned __int32 uint32_t; + typedef signed __int64 int64_t; + typedef unsigned __int64 uint64_t; + #else + #include + #endif +#endif // !defined(VK_NO_STDINT_H) + +#ifdef __cplusplus +} // extern "C" +#endif // __cplusplus + +#endif diff --git a/vendor/swiftshader/include/vulkan/vk_sdk_platform.h b/vendor/swiftshader/include/vulkan/vk_sdk_platform.h new file mode 100644 index 00000000..96d86769 --- /dev/null +++ b/vendor/swiftshader/include/vulkan/vk_sdk_platform.h @@ -0,0 +1,69 @@ +// +// File: vk_sdk_platform.h +// +/* + * Copyright (c) 2015-2016 The Khronos Group Inc. + * Copyright (c) 2015-2016 Valve Corporation + * Copyright (c) 2015-2016 LunarG, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VK_SDK_PLATFORM_H +#define VK_SDK_PLATFORM_H + +#if defined(_WIN32) +#define NOMINMAX +#ifndef __cplusplus +#undef inline +#define inline __inline +#endif // __cplusplus + +#if (defined(_MSC_VER) && _MSC_VER < 1900 /*vs2015*/) +// C99: +// Microsoft didn't implement C99 in Visual Studio; but started adding it with +// VS2013. However, VS2013 still didn't have snprintf(). The following is a +// work-around (Note: The _CRT_SECURE_NO_WARNINGS macro must be set in the +// "CMakeLists.txt" file). +// NOTE: This is fixed in Visual Studio 2015. +#define snprintf _snprintf +#endif + +#define strdup _strdup + +#endif // _WIN32 + +// Check for noexcept support using clang, with fallback to Windows or GCC version numbers +#ifndef NOEXCEPT +#if defined(__clang__) +#if __has_feature(cxx_noexcept) +#define HAS_NOEXCEPT +#endif +#else +#if defined(__GXX_EXPERIMENTAL_CXX0X__) && __GNUC__ * 10 + __GNUC_MINOR__ >= 46 +#define HAS_NOEXCEPT +#else +#if defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023026 && defined(_HAS_EXCEPTIONS) && _HAS_EXCEPTIONS +#define HAS_NOEXCEPT +#endif +#endif +#endif + +#ifdef HAS_NOEXCEPT +#define NOEXCEPT noexcept +#else +#define NOEXCEPT +#endif +#endif + +#endif // VK_SDK_PLATFORM_H diff --git a/vendor/swiftshader/include/vulkan/vulkan.h b/vendor/swiftshader/include/vulkan/vulkan.h new file mode 100644 index 00000000..f916bcc9 --- /dev/null +++ b/vendor/swiftshader/include/vulkan/vulkan.h @@ -0,0 +1,94 @@ +#ifndef VULKAN_H_ +#define VULKAN_H_ 1 + +/* +** Copyright 2015-2021 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +#include "vk_platform.h" +#include "vulkan_core.h" + +#ifdef VK_USE_PLATFORM_ANDROID_KHR +#include "vulkan_android.h" +#endif + +#ifdef VK_USE_PLATFORM_FUCHSIA +#include +#include "vulkan_fuchsia.h" +#include "vulkan_fuchsia_extras.h" +#endif + +#ifdef VK_USE_PLATFORM_IOS_MVK +#include "vulkan_ios.h" +#endif + + +#ifdef VK_USE_PLATFORM_MACOS_MVK +#include "vulkan_macos.h" +#endif + +#ifdef VK_USE_PLATFORM_METAL_EXT +#include "vulkan_metal.h" +#endif + +#ifdef VK_USE_PLATFORM_VI_NN +#include "vulkan_vi.h" +#endif + + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR +#include +#include "vulkan_wayland.h" +#endif + + +#ifdef VK_USE_PLATFORM_WIN32_KHR +#include +#include "vulkan_win32.h" +#endif + + +#ifdef VK_USE_PLATFORM_XCB_KHR +#include +#include "vulkan_xcb.h" +#endif + + +#ifdef VK_USE_PLATFORM_XLIB_KHR +#include +#include "vulkan_xlib.h" +#endif + + +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT +#include +#include "vulkan_directfb.h" +#endif + + +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT +#include +#include +#include "vulkan_xlib_xrandr.h" +#endif + + +#ifdef VK_USE_PLATFORM_GGP +#include +#include "vulkan_ggp.h" +#endif + + +#ifdef VK_USE_PLATFORM_SCREEN_QNX +#include +#include "vulkan_screen.h" +#endif + + +#ifdef VK_ENABLE_BETA_EXTENSIONS +#include "vulkan_beta.h" +#endif + +#endif // VULKAN_H_ diff --git a/vendor/swiftshader/include/vulkan/vulkan.hpp b/vendor/swiftshader/include/vulkan/vulkan.hpp new file mode 100644 index 00000000..ca4b2e70 --- /dev/null +++ b/vendor/swiftshader/include/vulkan/vulkan.hpp @@ -0,0 +1,126130 @@ +// Copyright 2015-2021 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_HPP +#define VULKAN_HPP + +#if defined( _MSVC_LANG ) +# define VULKAN_HPP_CPLUSPLUS _MSVC_LANG +#else +# define VULKAN_HPP_CPLUSPLUS __cplusplus +#endif + +#if 201703L < VULKAN_HPP_CPLUSPLUS +# define VULKAN_HPP_CPP_VERSION 20 +#elif 201402L < VULKAN_HPP_CPLUSPLUS +# define VULKAN_HPP_CPP_VERSION 17 +#elif 201103L < VULKAN_HPP_CPLUSPLUS +# define VULKAN_HPP_CPP_VERSION 14 +#elif 199711L < VULKAN_HPP_CPLUSPLUS +# define VULKAN_HPP_CPP_VERSION 11 +#else +# error "vulkan.hpp needs at least c++ standard version 11" +#endif + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#if 17 <= VULKAN_HPP_CPP_VERSION +# include +#endif + +#if defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) +# if !defined( VULKAN_HPP_NO_SMART_HANDLE ) +# define VULKAN_HPP_NO_SMART_HANDLE +# endif +#else +# include +# include +#endif + +#if !defined( VULKAN_HPP_ASSERT ) +# include +# define VULKAN_HPP_ASSERT assert +#endif + +#if !defined( VULKAN_HPP_ASSERT_ON_RESULT ) +# define VULKAN_HPP_ASSERT_ON_RESULT VULKAN_HPP_ASSERT +#endif + +#if !defined( VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL ) +# define VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL 1 +#endif + +#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL == 1 +# if defined( __linux__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ ) +# include +# elif defined( _WIN32 ) +typedef struct HINSTANCE__ * HINSTANCE; +# if defined( _WIN64 ) +typedef int64_t( __stdcall * FARPROC )(); +# else +typedef int( __stdcall * FARPROC )(); +# endif +extern "C" __declspec( dllimport ) HINSTANCE __stdcall LoadLibraryA( char const * lpLibFileName ); +extern "C" __declspec( dllimport ) int __stdcall FreeLibrary( HINSTANCE hLibModule ); +extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE hModule, const char * lpProcName ); +# endif +#endif + +#if !defined( __has_include ) +# define __has_include( x ) false +#endif + +#if ( 201711 <= __cpp_impl_three_way_comparison ) && __has_include( ) && !defined( VULKAN_HPP_NO_SPACESHIP_OPERATOR ) +# define VULKAN_HPP_HAS_SPACESHIP_OPERATOR +#endif +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) +# include +#endif + +static_assert( VK_HEADER_VERSION == 178, "Wrong VK_HEADER_VERSION!" ); + +// 32-bit vulkan is not typesafe for handles, so don't allow copy constructors on this platform by default. +// To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) +# if !defined( VULKAN_HPP_TYPESAFE_CONVERSION ) +# define VULKAN_HPP_TYPESAFE_CONVERSION +# endif +#endif + +// includes through some other header +// this results in major(x) being resolved to gnu_dev_major(x) +// which is an expression in a constructor initializer list. +#if defined( major ) +# undef major +#endif +#if defined( minor ) +# undef minor +#endif + +// Windows defines MemoryBarrier which is deprecated and collides +// with the VULKAN_HPP_NAMESPACE::MemoryBarrier struct. +#if defined( MemoryBarrier ) +# undef MemoryBarrier +#endif + +#if !defined( VULKAN_HPP_HAS_UNRESTRICTED_UNIONS ) +# if defined( __clang__ ) +# if __has_feature( cxx_unrestricted_unions ) +# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +# endif +# elif defined( __GNUC__ ) +# define GCC_VERSION ( __GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__ ) +# if 40600 <= GCC_VERSION +# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +# endif +# elif defined( _MSC_VER ) +# if 1900 <= _MSC_VER +# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +# endif +# endif +#endif + +#if !defined( VULKAN_HPP_INLINE ) +# if defined( __clang__ ) +# if __has_attribute( always_inline ) +# define VULKAN_HPP_INLINE __attribute__( ( always_inline ) ) __inline__ +# else +# define VULKAN_HPP_INLINE inline +# endif +# elif defined( __GNUC__ ) +# define VULKAN_HPP_INLINE __attribute__( ( always_inline ) ) __inline__ +# elif defined( _MSC_VER ) +# define VULKAN_HPP_INLINE inline +# else +# define VULKAN_HPP_INLINE inline +# endif +#endif + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) +# define VULKAN_HPP_TYPESAFE_EXPLICIT +#else +# define VULKAN_HPP_TYPESAFE_EXPLICIT explicit +#endif + +#if defined( __cpp_constexpr ) +# define VULKAN_HPP_CONSTEXPR constexpr +# if __cpp_constexpr >= 201304 +# define VULKAN_HPP_CONSTEXPR_14 constexpr +# else +# define VULKAN_HPP_CONSTEXPR_14 +# endif +# define VULKAN_HPP_CONST_OR_CONSTEXPR constexpr +#else +# define VULKAN_HPP_CONSTEXPR +# define VULKAN_HPP_CONSTEXPR_14 +# define VULKAN_HPP_CONST_OR_CONSTEXPR const +#endif + +#if !defined( VULKAN_HPP_NOEXCEPT ) +# if defined( _MSC_VER ) && ( _MSC_VER <= 1800 ) +# define VULKAN_HPP_NOEXCEPT +# else +# define VULKAN_HPP_NOEXCEPT noexcept +# define VULKAN_HPP_HAS_NOEXCEPT 1 +# if defined( VULKAN_HPP_NO_EXCEPTIONS ) +# define VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS noexcept +# else +# define VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS +# endif +# endif +#endif + +#if 14 <= VULKAN_HPP_CPP_VERSION +# define VULKAN_HPP_DEPRECATED( msg ) [[deprecated( msg )]] +#else +# define VULKAN_HPP_DEPRECATED( msg ) +#endif + +#if ( 17 <= VULKAN_HPP_CPP_VERSION ) && !defined( VULKAN_HPP_NO_NODISCARD_WARNINGS ) +# define VULKAN_HPP_NODISCARD [[nodiscard]] +# if defined( VULKAN_HPP_NO_EXCEPTIONS ) +# define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS [[nodiscard]] +# else +# define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS +# endif +#else +# define VULKAN_HPP_NODISCARD +# define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS +#endif + +#if !defined( VULKAN_HPP_NAMESPACE ) +# define VULKAN_HPP_NAMESPACE vk +#endif + +#define VULKAN_HPP_STRINGIFY2( text ) #text +#define VULKAN_HPP_STRINGIFY( text ) VULKAN_HPP_STRINGIFY2( text ) +#define VULKAN_HPP_NAMESPACE_STRING VULKAN_HPP_STRINGIFY( VULKAN_HPP_NAMESPACE ) + +namespace VULKAN_HPP_NAMESPACE +{ +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + class ArrayProxy + { + public: + VULKAN_HPP_CONSTEXPR ArrayProxy() VULKAN_HPP_NOEXCEPT + : m_count( 0 ) + , m_ptr( nullptr ) + {} + + VULKAN_HPP_CONSTEXPR ArrayProxy( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_count( 0 ) + , m_ptr( nullptr ) + {} + + ArrayProxy( T & value ) VULKAN_HPP_NOEXCEPT + : m_count( 1 ) + , m_ptr( &value ) + {} + + template ::value, int>::type = 0> + ArrayProxy( typename std::remove_const::type & value ) VULKAN_HPP_NOEXCEPT + : m_count( 1 ) + , m_ptr( &value ) + {} + + ArrayProxy( uint32_t count, T * ptr ) VULKAN_HPP_NOEXCEPT + : m_count( count ) + , m_ptr( ptr ) + {} + + template ::value, int>::type = 0> + ArrayProxy( uint32_t count, typename std::remove_const::type * ptr ) VULKAN_HPP_NOEXCEPT + : m_count( count ) + , m_ptr( ptr ) + {} + +# if __GNUC__ >= 9 +# pragma GCC diagnostic push +# pragma GCC diagnostic ignored "-Winit-list-lifetime" +# endif + + ArrayProxy( std::initializer_list const & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) + {} + + template ::value, int>::type = 0> + ArrayProxy( std::initializer_list::type> const & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) + {} + + ArrayProxy( std::initializer_list & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) + {} + + template ::value, int>::type = 0> + ArrayProxy( std::initializer_list::type> & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) + {} + +# if __GNUC__ >= 9 +# pragma GCC diagnostic pop +# endif + + template + ArrayProxy( std::array const & data ) VULKAN_HPP_NOEXCEPT + : m_count( N ) + , m_ptr( data.data() ) + {} + + template ::value, int>::type = 0> + ArrayProxy( std::array::type, N> const & data ) VULKAN_HPP_NOEXCEPT + : m_count( N ) + , m_ptr( data.data() ) + {} + + template + ArrayProxy( std::array & data ) VULKAN_HPP_NOEXCEPT + : m_count( N ) + , m_ptr( data.data() ) + {} + + template ::value, int>::type = 0> + ArrayProxy( std::array::type, N> & data ) VULKAN_HPP_NOEXCEPT + : m_count( N ) + , m_ptr( data.data() ) + {} + + template ::type>> + ArrayProxy( std::vector const & data ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( data.size() ) ) + , m_ptr( data.data() ) + {} + + template ::type>, + typename B = T, + typename std::enable_if::value, int>::type = 0> + ArrayProxy( std::vector::type, Allocator> const & data ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( data.size() ) ) + , m_ptr( data.data() ) + {} + + template ::type>> + ArrayProxy( std::vector & data ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( data.size() ) ) + , m_ptr( data.data() ) + {} + + template ::type>, + typename B = T, + typename std::enable_if::value, int>::type = 0> + ArrayProxy( std::vector::type, Allocator> & data ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( data.size() ) ) + , m_ptr( data.data() ) + {} + + const T * begin() const VULKAN_HPP_NOEXCEPT + { + return m_ptr; + } + + const T * end() const VULKAN_HPP_NOEXCEPT + { + return m_ptr + m_count; + } + + const T & front() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_count && m_ptr ); + return *m_ptr; + } + + const T & back() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_count && m_ptr ); + return *( m_ptr + m_count - 1 ); + } + + bool empty() const VULKAN_HPP_NOEXCEPT + { + return ( m_count == 0 ); + } + + uint32_t size() const VULKAN_HPP_NOEXCEPT + { + return m_count; + } + + T * data() const VULKAN_HPP_NOEXCEPT + { + return m_ptr; + } + + private: + uint32_t m_count; + T * m_ptr; + }; + + template + class ArrayProxyNoTemporaries + { + public: + VULKAN_HPP_CONSTEXPR ArrayProxyNoTemporaries() VULKAN_HPP_NOEXCEPT + : m_count( 0 ) + , m_ptr( nullptr ) + {} + + VULKAN_HPP_CONSTEXPR ArrayProxyNoTemporaries( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_count( 0 ) + , m_ptr( nullptr ) + {} + + ArrayProxyNoTemporaries( T & value ) VULKAN_HPP_NOEXCEPT + : m_count( 1 ) + , m_ptr( &value ) + {} + + ArrayProxyNoTemporaries( T && value ) = delete; + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( typename std::remove_const::type & value ) VULKAN_HPP_NOEXCEPT + : m_count( 1 ) + , m_ptr( &value ) + {} + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( typename std::remove_const::type && value ) = delete; + + ArrayProxyNoTemporaries( uint32_t count, T * ptr ) VULKAN_HPP_NOEXCEPT + : m_count( count ) + , m_ptr( ptr ) + {} + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( uint32_t count, typename std::remove_const::type * ptr ) VULKAN_HPP_NOEXCEPT + : m_count( count ) + , m_ptr( ptr ) + {} + + ArrayProxyNoTemporaries( std::initializer_list const & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) + {} + + ArrayProxyNoTemporaries( std::initializer_list const && list ) = delete; + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( std::initializer_list::type> const & list ) + VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) + {} + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( std::initializer_list::type> const && list ) = delete; + + ArrayProxyNoTemporaries( std::initializer_list & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) + {} + + ArrayProxyNoTemporaries( std::initializer_list && list ) = delete; + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( std::initializer_list::type> & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) + {} + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( std::initializer_list::type> && list ) = delete; + + template + ArrayProxyNoTemporaries( std::array const & data ) VULKAN_HPP_NOEXCEPT + : m_count( N ) + , m_ptr( data.data() ) + {} + + template + ArrayProxyNoTemporaries( std::array const && data ) = delete; + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( std::array::type, N> const & data ) VULKAN_HPP_NOEXCEPT + : m_count( N ) + , m_ptr( data.data() ) + {} + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( std::array::type, N> const && data ) = delete; + + template + ArrayProxyNoTemporaries( std::array & data ) VULKAN_HPP_NOEXCEPT + : m_count( N ) + , m_ptr( data.data() ) + {} + + template + ArrayProxyNoTemporaries( std::array && data ) = delete; + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( std::array::type, N> & data ) VULKAN_HPP_NOEXCEPT + : m_count( N ) + , m_ptr( data.data() ) + {} + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( std::array::type, N> && data ) = delete; + + template ::type>> + ArrayProxyNoTemporaries( std::vector const & data ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( data.size() ) ) + , m_ptr( data.data() ) + {} + + template ::type>> + ArrayProxyNoTemporaries( std::vector const && data ) = delete; + + template ::type>, + typename B = T, + typename std::enable_if::value, int>::type = 0> + ArrayProxyNoTemporaries( std::vector::type, Allocator> const & data ) + VULKAN_HPP_NOEXCEPT + : m_count( static_cast( data.size() ) ) + , m_ptr( data.data() ) + {} + + template ::type>, + typename B = T, + typename std::enable_if::value, int>::type = 0> + ArrayProxyNoTemporaries( std::vector::type, Allocator> const && data ) = delete; + + template ::type>> + ArrayProxyNoTemporaries( std::vector & data ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( data.size() ) ) + , m_ptr( data.data() ) + {} + + template ::type>> + ArrayProxyNoTemporaries( std::vector && data ) = delete; + + template ::type>, + typename B = T, + typename std::enable_if::value, int>::type = 0> + ArrayProxyNoTemporaries( std::vector::type, Allocator> & data ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( data.size() ) ) + , m_ptr( data.data() ) + {} + + template ::type>, + typename B = T, + typename std::enable_if::value, int>::type = 0> + ArrayProxyNoTemporaries( std::vector::type, Allocator> && data ) = delete; + + const T * begin() const VULKAN_HPP_NOEXCEPT + { + return m_ptr; + } + + const T * end() const VULKAN_HPP_NOEXCEPT + { + return m_ptr + m_count; + } + + const T & front() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_count && m_ptr ); + return *m_ptr; + } + + const T & back() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_count && m_ptr ); + return *( m_ptr + m_count - 1 ); + } + + bool empty() const VULKAN_HPP_NOEXCEPT + { + return ( m_count == 0 ); + } + + uint32_t size() const VULKAN_HPP_NOEXCEPT + { + return m_count; + } + + T * data() const VULKAN_HPP_NOEXCEPT + { + return m_ptr; + } + + private: + uint32_t m_count; + T * m_ptr; + }; +#endif + + template + class ArrayWrapper1D : public std::array + { + public: + VULKAN_HPP_CONSTEXPR ArrayWrapper1D() VULKAN_HPP_NOEXCEPT : std::array() {} + + VULKAN_HPP_CONSTEXPR ArrayWrapper1D( std::array const & data ) VULKAN_HPP_NOEXCEPT : std::array( data ) + {} + +#if defined( _WIN32 ) && !defined( _WIN64 ) + VULKAN_HPP_CONSTEXPR T const & operator[]( int index ) const VULKAN_HPP_NOEXCEPT + { + return std::array::operator[]( index ); + } + + T & operator[]( int index ) VULKAN_HPP_NOEXCEPT + { + return std::array::operator[]( index ); + } +#endif + + operator T const *() const VULKAN_HPP_NOEXCEPT + { + return this->data(); + } + + operator T *() VULKAN_HPP_NOEXCEPT + { + return this->data(); + } + + template ::value, int>::type = 0> + operator std::string() const + { + return std::string( this->data() ); + } + +#if 17 <= VULKAN_HPP_CPP_VERSION + template ::value, int>::type = 0> + operator std::string_view() const + { + return std::string_view( this->data() ); + } +#endif + + template ::value, int>::type = 0> + bool operator<( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return *static_cast const *>( this ) < *static_cast const *>( &rhs ); + } + + template ::value, int>::type = 0> + bool operator<=( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return *static_cast const *>( this ) <= *static_cast const *>( &rhs ); + } + + template ::value, int>::type = 0> + bool operator>( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return *static_cast const *>( this ) > *static_cast const *>( &rhs ); + } + + template ::value, int>::type = 0> + bool operator>=( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return *static_cast const *>( this ) >= *static_cast const *>( &rhs ); + } + + template ::value, int>::type = 0> + bool operator==( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return *static_cast const *>( this ) == *static_cast const *>( &rhs ); + } + + template ::value, int>::type = 0> + bool operator!=( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return *static_cast const *>( this ) != *static_cast const *>( &rhs ); + } + }; + + // specialization of relational operators between std::string and arrays of chars + template + bool operator<( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return lhs < rhs.data(); + } + + template + bool operator<=( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return lhs <= rhs.data(); + } + + template + bool operator>( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return lhs > rhs.data(); + } + + template + bool operator>=( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return lhs >= rhs.data(); + } + + template + bool operator==( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return lhs == rhs.data(); + } + + template + bool operator!=( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return lhs != rhs.data(); + } + + template + class ArrayWrapper2D : public std::array, N> + { + public: + VULKAN_HPP_CONSTEXPR ArrayWrapper2D() VULKAN_HPP_NOEXCEPT : std::array, N>() {} + + VULKAN_HPP_CONSTEXPR ArrayWrapper2D( std::array, N> const & data ) VULKAN_HPP_NOEXCEPT + : std::array, N>( *reinterpret_cast, N> const *>( &data ) ) + {} + }; + + template + struct FlagTraits + { + enum + { + allFlags = 0 + }; + }; + + template + class Flags + { + public: + using MaskType = typename std::underlying_type::type; + + // constructors + VULKAN_HPP_CONSTEXPR Flags() VULKAN_HPP_NOEXCEPT : m_mask( 0 ) {} + + VULKAN_HPP_CONSTEXPR Flags( BitType bit ) VULKAN_HPP_NOEXCEPT : m_mask( static_cast( bit ) ) {} + + VULKAN_HPP_CONSTEXPR Flags( Flags const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VULKAN_HPP_CONSTEXPR explicit Flags( MaskType flags ) VULKAN_HPP_NOEXCEPT : m_mask( flags ) {} + + // relational operators +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Flags const & ) const = default; +#else + VULKAN_HPP_CONSTEXPR bool operator<( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_mask < rhs.m_mask; + } + + VULKAN_HPP_CONSTEXPR bool operator<=( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_mask <= rhs.m_mask; + } + + VULKAN_HPP_CONSTEXPR bool operator>( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_mask > rhs.m_mask; + } + + VULKAN_HPP_CONSTEXPR bool operator>=( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_mask >= rhs.m_mask; + } + + VULKAN_HPP_CONSTEXPR bool operator==( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_mask == rhs.m_mask; + } + + VULKAN_HPP_CONSTEXPR bool operator!=( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_mask != rhs.m_mask; + } +#endif + + // logical operator + VULKAN_HPP_CONSTEXPR bool operator!() const VULKAN_HPP_NOEXCEPT + { + return !m_mask; + } + + // bitwise operators + VULKAN_HPP_CONSTEXPR Flags operator&( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return Flags( m_mask & rhs.m_mask ); + } + + VULKAN_HPP_CONSTEXPR Flags operator|( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return Flags( m_mask | rhs.m_mask ); + } + + VULKAN_HPP_CONSTEXPR Flags operator^( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return Flags( m_mask ^ rhs.m_mask ); + } + + VULKAN_HPP_CONSTEXPR Flags operator~() const VULKAN_HPP_NOEXCEPT + { + return Flags( m_mask ^ FlagTraits::allFlags ); + } + + // assignment operators + VULKAN_HPP_CONSTEXPR_14 Flags & operator=( Flags const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VULKAN_HPP_CONSTEXPR_14 Flags & operator|=( Flags const & rhs ) VULKAN_HPP_NOEXCEPT + { + m_mask |= rhs.m_mask; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Flags & operator&=( Flags const & rhs ) VULKAN_HPP_NOEXCEPT + { + m_mask &= rhs.m_mask; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Flags & operator^=( Flags const & rhs ) VULKAN_HPP_NOEXCEPT + { + m_mask ^= rhs.m_mask; + return *this; + } + + // cast operators + explicit VULKAN_HPP_CONSTEXPR operator bool() const VULKAN_HPP_NOEXCEPT + { + return !!m_mask; + } + + explicit VULKAN_HPP_CONSTEXPR operator MaskType() const VULKAN_HPP_NOEXCEPT + { + return m_mask; + } + +#if defined( VULKAN_HPP_FLAGS_MASK_TYPE_AS_PUBLIC ) + public: +#else + private: +#endif + MaskType m_mask; + }; + +#if !defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + // relational operators only needed for pre C++20 + template + VULKAN_HPP_CONSTEXPR bool operator<( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator>( bit ); + } + + template + VULKAN_HPP_CONSTEXPR bool operator<=( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator>=( bit ); + } + + template + VULKAN_HPP_CONSTEXPR bool operator>( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator<( bit ); + } + + template + VULKAN_HPP_CONSTEXPR bool operator>=( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator<=( bit ); + } + + template + VULKAN_HPP_CONSTEXPR bool operator==( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator==( bit ); + } + + template + VULKAN_HPP_CONSTEXPR bool operator!=( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator!=( bit ); + } +#endif + + // bitwise operators + template + VULKAN_HPP_CONSTEXPR Flags operator&(BitType bit, Flags const & flags)VULKAN_HPP_NOEXCEPT + { + return flags.operator&( bit ); + } + + template + VULKAN_HPP_CONSTEXPR Flags operator|( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator|( bit ); + } + + template + VULKAN_HPP_CONSTEXPR Flags operator^( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator^( bit ); + } + + template + class Optional + { + public: + Optional( RefType & reference ) VULKAN_HPP_NOEXCEPT + { + m_ptr = &reference; + } + Optional( RefType * ptr ) VULKAN_HPP_NOEXCEPT + { + m_ptr = ptr; + } + Optional( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_ptr = nullptr; + } + + operator RefType *() const VULKAN_HPP_NOEXCEPT + { + return m_ptr; + } + RefType const * operator->() const VULKAN_HPP_NOEXCEPT + { + return m_ptr; + } + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return !!m_ptr; + } + + private: + RefType * m_ptr; + }; + + template + struct StructExtends + { + enum + { + value = false + }; + }; + + template + struct IsPartOfStructureChain + { + static const bool valid = false; + }; + + template + struct IsPartOfStructureChain + { + static const bool valid = std::is_same::value || IsPartOfStructureChain::valid; + }; + + template + struct StructureChainContains + { + static const bool value = + std::is_same>::type>::value || + StructureChainContains::value; + }; + + template + struct StructureChainContains<0, T, ChainElements...> + { + static const bool value = + std::is_same>::type>::value; + }; + + template + struct StructureChainValidation + { + using TestType = typename std::tuple_element>::type; + static const bool valid = + StructExtends>::type>::value && + ( TestType::allowDuplicate || !StructureChainContains::value ) && + StructureChainValidation::valid; + }; + + template + struct StructureChainValidation<0, ChainElements...> + { + static const bool valid = true; + }; + + template + class StructureChain : public std::tuple + { + public: + StructureChain() VULKAN_HPP_NOEXCEPT + { + static_assert( StructureChainValidation::valid, + "The structure chain is not valid!" ); + link(); + } + + StructureChain( StructureChain const & rhs ) VULKAN_HPP_NOEXCEPT : std::tuple( rhs ) + { + static_assert( StructureChainValidation::valid, + "The structure chain is not valid!" ); + link(); + } + + StructureChain( StructureChain && rhs ) VULKAN_HPP_NOEXCEPT + : std::tuple( std::forward>( rhs ) ) + { + static_assert( StructureChainValidation::valid, + "The structure chain is not valid!" ); + link(); + } + + StructureChain( ChainElements const &... elems ) VULKAN_HPP_NOEXCEPT : std::tuple( elems... ) + { + static_assert( StructureChainValidation::valid, + "The structure chain is not valid!" ); + link(); + } + + StructureChain & operator=( StructureChain const & rhs ) VULKAN_HPP_NOEXCEPT + { + std::tuple::operator=( rhs ); + link(); + return *this; + } + + StructureChain & operator=( StructureChain && rhs ) = delete; + + template >::type, size_t Which = 0> + T & get() VULKAN_HPP_NOEXCEPT + { + return std::get::value>( + static_cast &>( *this ) ); + } + + template >::type, size_t Which = 0> + T const & get() const VULKAN_HPP_NOEXCEPT + { + return std::get::value>( + static_cast const &>( *this ) ); + } + + template + std::tuple get() VULKAN_HPP_NOEXCEPT + { + return std::tie( get(), get(), get()... ); + } + + template + std::tuple get() const VULKAN_HPP_NOEXCEPT + { + return std::tie( get(), get(), get()... ); + } + + template + typename std::enable_if< + std::is_same>::type>::value && + ( Which == 0 ), + bool>::type + isLinked() const VULKAN_HPP_NOEXCEPT + { + return true; + } + + template + typename std::enable_if< + !std::is_same>::type>::value || + ( Which != 0 ), + bool>::type + isLinked() const VULKAN_HPP_NOEXCEPT + { + static_assert( IsPartOfStructureChain::valid, + "Can't unlink Structure that's not part of this StructureChain!" ); + return isLinked( reinterpret_cast( &get() ) ); + } + + template + typename std::enable_if< + !std::is_same>::type>::value || + ( Which != 0 ), + void>::type + relink() VULKAN_HPP_NOEXCEPT + { + static_assert( IsPartOfStructureChain::valid, + "Can't relink Structure that's not part of this StructureChain!" ); + auto pNext = reinterpret_cast( &get() ); + VULKAN_HPP_ASSERT( !isLinked( pNext ) ); + auto & headElement = std::get<0>( static_cast &>( *this ) ); + pNext->pNext = reinterpret_cast( headElement.pNext ); + headElement.pNext = pNext; + } + + template + typename std::enable_if< + !std::is_same>::type>::value || + ( Which != 0 ), + void>::type + unlink() VULKAN_HPP_NOEXCEPT + { + static_assert( IsPartOfStructureChain::valid, + "Can't unlink Structure that's not part of this StructureChain!" ); + unlink( reinterpret_cast( &get() ) ); + } + + private: + template + struct ChainElementIndex : ChainElementIndex + {}; + + template + struct ChainElementIndex::value, void>::type, + First, + Types...> : ChainElementIndex + {}; + + template + struct ChainElementIndex::value, void>::type, + First, + Types...> : ChainElementIndex + {}; + + template + struct ChainElementIndex::value, void>::type, + First, + Types...> : std::integral_constant + {}; + + bool isLinked( VkBaseInStructure const * pNext ) const VULKAN_HPP_NOEXCEPT + { + VkBaseInStructure const * elementPtr = reinterpret_cast( + &std::get<0>( static_cast const &>( *this ) ) ); + while ( elementPtr ) + { + if ( elementPtr->pNext == pNext ) + { + return true; + } + elementPtr = elementPtr->pNext; + } + return false; + } + + template + typename std::enable_if::type link() VULKAN_HPP_NOEXCEPT + { + auto & x = std::get( static_cast &>( *this ) ); + x.pNext = &std::get( static_cast &>( *this ) ); + link(); + } + + template + typename std::enable_if::type link() VULKAN_HPP_NOEXCEPT + {} + + void unlink( VkBaseOutStructure const * pNext ) VULKAN_HPP_NOEXCEPT + { + VkBaseOutStructure * elementPtr = + reinterpret_cast( &std::get<0>( static_cast &>( *this ) ) ); + while ( elementPtr && ( elementPtr->pNext != pNext ) ) + { + elementPtr = elementPtr->pNext; + } + if ( elementPtr ) + { + elementPtr->pNext = pNext->pNext; + } + else + { + VULKAN_HPP_ASSERT( false ); // fires, if the ClassType member has already been unlinked ! + } + } + }; + +#if !defined( VULKAN_HPP_NO_SMART_HANDLE ) + template + class UniqueHandleTraits; + + template + class UniqueHandle : public UniqueHandleTraits::deleter + { + private: + using Deleter = typename UniqueHandleTraits::deleter; + + public: + using element_type = Type; + + UniqueHandle() : Deleter(), m_value() {} + + explicit UniqueHandle( Type const & value, Deleter const & deleter = Deleter() ) VULKAN_HPP_NOEXCEPT + : Deleter( deleter ) + , m_value( value ) + {} + + UniqueHandle( UniqueHandle const & ) = delete; + + UniqueHandle( UniqueHandle && other ) VULKAN_HPP_NOEXCEPT + : Deleter( std::move( static_cast( other ) ) ) + , m_value( other.release() ) + {} + + ~UniqueHandle() VULKAN_HPP_NOEXCEPT + { + if ( m_value ) + { + this->destroy( m_value ); + } + } + + UniqueHandle & operator=( UniqueHandle const & ) = delete; + + UniqueHandle & operator=( UniqueHandle && other ) VULKAN_HPP_NOEXCEPT + { + reset( other.release() ); + *static_cast( this ) = std::move( static_cast( other ) ); + return *this; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_value.operator bool(); + } + + Type const * operator->() const VULKAN_HPP_NOEXCEPT + { + return &m_value; + } + + Type * operator->() VULKAN_HPP_NOEXCEPT + { + return &m_value; + } + + Type const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_value; + } + + Type & operator*() VULKAN_HPP_NOEXCEPT + { + return m_value; + } + + const Type & get() const VULKAN_HPP_NOEXCEPT + { + return m_value; + } + + Type & get() VULKAN_HPP_NOEXCEPT + { + return m_value; + } + + void reset( Type const & value = Type() ) VULKAN_HPP_NOEXCEPT + { + if ( m_value != value ) + { + if ( m_value ) + { + this->destroy( m_value ); + } + m_value = value; + } + } + + Type release() VULKAN_HPP_NOEXCEPT + { + Type value = m_value; + m_value = nullptr; + return value; + } + + void swap( UniqueHandle & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_value, rhs.m_value ); + std::swap( static_cast( *this ), static_cast( rhs ) ); + } + + private: + Type m_value; + }; + + template + VULKAN_HPP_INLINE std::vector + uniqueToRaw( std::vector const & handles ) + { + std::vector newBuffer( handles.size() ); + std::transform( + handles.begin(), handles.end(), newBuffer.begin(), []( UniqueType const & handle ) { return handle.get(); } ); + return newBuffer; + } + + template + VULKAN_HPP_INLINE void swap( UniqueHandle & lhs, + UniqueHandle & rhs ) VULKAN_HPP_NOEXCEPT + { + lhs.swap( rhs ); + } +#endif + +#if !defined( VK_NO_PROTOTYPES ) + class DispatchLoaderStatic + { + public: + //=== VK_VERSION_1_0 === + + VkResult vkCreateInstance( const VkInstanceCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkInstance * pInstance ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateInstance( pCreateInfo, pAllocator, pInstance ); + } + + void vkDestroyInstance( VkInstance instance, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyInstance( instance, pAllocator ); + } + + VkResult vkEnumeratePhysicalDevices( VkInstance instance, + uint32_t * pPhysicalDeviceCount, + VkPhysicalDevice * pPhysicalDevices ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumeratePhysicalDevices( instance, pPhysicalDeviceCount, pPhysicalDevices ); + } + + void vkGetPhysicalDeviceFeatures( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures * pFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFeatures( physicalDevice, pFeatures ); + } + + void vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties * pFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFormatProperties( physicalDevice, format, pFormatProperties ); + } + + VkResult vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkImageFormatProperties * pImageFormatProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceImageFormatProperties( + physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties ); + } + + void vkGetPhysicalDeviceProperties( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceProperties( physicalDevice, pProperties ); + } + + void vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice physicalDevice, + uint32_t * pQueueFamilyPropertyCount, + VkQueueFamilyProperties * pQueueFamilyProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceQueueFamilyProperties( + physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); + } + + void vkGetPhysicalDeviceMemoryProperties( + VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceMemoryProperties( physicalDevice, pMemoryProperties ); + } + + PFN_vkVoidFunction vkGetInstanceProcAddr( VkInstance instance, const char * pName ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetInstanceProcAddr( instance, pName ); + } + + PFN_vkVoidFunction vkGetDeviceProcAddr( VkDevice device, const char * pName ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceProcAddr( device, pName ); + } + + VkResult vkCreateDevice( VkPhysicalDevice physicalDevice, + const VkDeviceCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDevice * pDevice ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDevice( physicalDevice, pCreateInfo, pAllocator, pDevice ); + } + + void vkDestroyDevice( VkDevice device, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDevice( device, pAllocator ); + } + + VkResult vkEnumerateInstanceExtensionProperties( const char * pLayerName, + uint32_t * pPropertyCount, + VkExtensionProperties * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, pProperties ); + } + + VkResult vkEnumerateDeviceExtensionProperties( VkPhysicalDevice physicalDevice, + const char * pLayerName, + uint32_t * pPropertyCount, + VkExtensionProperties * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateDeviceExtensionProperties( physicalDevice, pLayerName, pPropertyCount, pProperties ); + } + + VkResult vkEnumerateInstanceLayerProperties( uint32_t * pPropertyCount, + VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateInstanceLayerProperties( pPropertyCount, pProperties ); + } + + VkResult vkEnumerateDeviceLayerProperties( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateDeviceLayerProperties( physicalDevice, pPropertyCount, pProperties ); + } + + void vkGetDeviceQueue( VkDevice device, + uint32_t queueFamilyIndex, + uint32_t queueIndex, + VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceQueue( device, queueFamilyIndex, queueIndex, pQueue ); + } + + VkResult vkQueueSubmit( VkQueue queue, + uint32_t submitCount, + const VkSubmitInfo * pSubmits, + VkFence fence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueSubmit( queue, submitCount, pSubmits, fence ); + } + + VkResult vkQueueWaitIdle( VkQueue queue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueWaitIdle( queue ); + } + + VkResult vkDeviceWaitIdle( VkDevice device ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDeviceWaitIdle( device ); + } + + VkResult vkAllocateMemory( VkDevice device, + const VkMemoryAllocateInfo * pAllocateInfo, + const VkAllocationCallbacks * pAllocator, + VkDeviceMemory * pMemory ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAllocateMemory( device, pAllocateInfo, pAllocator, pMemory ); + } + + void vkFreeMemory( VkDevice device, + VkDeviceMemory memory, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkFreeMemory( device, memory, pAllocator ); + } + + VkResult vkMapMemory( VkDevice device, + VkDeviceMemory memory, + VkDeviceSize offset, + VkDeviceSize size, + VkMemoryMapFlags flags, + void ** ppData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkMapMemory( device, memory, offset, size, flags, ppData ); + } + + void vkUnmapMemory( VkDevice device, VkDeviceMemory memory ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUnmapMemory( device, memory ); + } + + VkResult vkFlushMappedMemoryRanges( VkDevice device, + uint32_t memoryRangeCount, + const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT + { + return ::vkFlushMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); + } + + VkResult vkInvalidateMappedMemoryRanges( VkDevice device, + uint32_t memoryRangeCount, + const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT + { + return ::vkInvalidateMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); + } + + void vkGetDeviceMemoryCommitment( VkDevice device, + VkDeviceMemory memory, + VkDeviceSize * pCommittedMemoryInBytes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceMemoryCommitment( device, memory, pCommittedMemoryInBytes ); + } + + VkResult vkBindBufferMemory( VkDevice device, + VkBuffer buffer, + VkDeviceMemory memory, + VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindBufferMemory( device, buffer, memory, memoryOffset ); + } + + VkResult vkBindImageMemory( VkDevice device, + VkImage image, + VkDeviceMemory memory, + VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindImageMemory( device, image, memory, memoryOffset ); + } + + void vkGetBufferMemoryRequirements( VkDevice device, + VkBuffer buffer, + VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferMemoryRequirements( device, buffer, pMemoryRequirements ); + } + + void vkGetImageMemoryRequirements( VkDevice device, + VkImage image, + VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageMemoryRequirements( device, image, pMemoryRequirements ); + } + + void vkGetImageSparseMemoryRequirements( VkDevice device, + VkImage image, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements * pSparseMemoryRequirements ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSparseMemoryRequirements( + device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + } + + void vkGetPhysicalDeviceSparseImageFormatProperties( VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkSampleCountFlagBits samples, + VkImageUsageFlags usage, + VkImageTiling tiling, + uint32_t * pPropertyCount, + VkSparseImageFormatProperties * pProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSparseImageFormatProperties( + physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties ); + } + + VkResult vkQueueBindSparse( VkQueue queue, + uint32_t bindInfoCount, + const VkBindSparseInfo * pBindInfo, + VkFence fence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueBindSparse( queue, bindInfoCount, pBindInfo, fence ); + } + + VkResult vkCreateFence( VkDevice device, + const VkFenceCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkFence * pFence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateFence( device, pCreateInfo, pAllocator, pFence ); + } + + void vkDestroyFence( VkDevice device, + VkFence fence, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyFence( device, fence, pAllocator ); + } + + VkResult vkResetFences( VkDevice device, uint32_t fenceCount, const VkFence * pFences ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetFences( device, fenceCount, pFences ); + } + + VkResult vkGetFenceStatus( VkDevice device, VkFence fence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetFenceStatus( device, fence ); + } + + VkResult vkWaitForFences( VkDevice device, + uint32_t fenceCount, + const VkFence * pFences, + VkBool32 waitAll, + uint64_t timeout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWaitForFences( device, fenceCount, pFences, waitAll, timeout ); + } + + VkResult vkCreateSemaphore( VkDevice device, + const VkSemaphoreCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSemaphore * pSemaphore ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSemaphore( device, pCreateInfo, pAllocator, pSemaphore ); + } + + void vkDestroySemaphore( VkDevice device, + VkSemaphore semaphore, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySemaphore( device, semaphore, pAllocator ); + } + + VkResult vkCreateEvent( VkDevice device, + const VkEventCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkEvent * pEvent ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateEvent( device, pCreateInfo, pAllocator, pEvent ); + } + + void vkDestroyEvent( VkDevice device, + VkEvent event, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyEvent( device, event, pAllocator ); + } + + VkResult vkGetEventStatus( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetEventStatus( device, event ); + } + + VkResult vkSetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetEvent( device, event ); + } + + VkResult vkResetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetEvent( device, event ); + } + + VkResult vkCreateQueryPool( VkDevice device, + const VkQueryPoolCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkQueryPool * pQueryPool ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateQueryPool( device, pCreateInfo, pAllocator, pQueryPool ); + } + + void vkDestroyQueryPool( VkDevice device, + VkQueryPool queryPool, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyQueryPool( device, queryPool, pAllocator ); + } + + VkResult vkGetQueryPoolResults( VkDevice device, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + void * pData, + VkDeviceSize stride, + VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetQueryPoolResults( device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags ); + } + + VkResult vkCreateBuffer( VkDevice device, + const VkBufferCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkBuffer * pBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateBuffer( device, pCreateInfo, pAllocator, pBuffer ); + } + + void vkDestroyBuffer( VkDevice device, + VkBuffer buffer, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyBuffer( device, buffer, pAllocator ); + } + + VkResult vkCreateBufferView( VkDevice device, + const VkBufferViewCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkBufferView * pView ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateBufferView( device, pCreateInfo, pAllocator, pView ); + } + + void vkDestroyBufferView( VkDevice device, + VkBufferView bufferView, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyBufferView( device, bufferView, pAllocator ); + } + + VkResult vkCreateImage( VkDevice device, + const VkImageCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkImage * pImage ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateImage( device, pCreateInfo, pAllocator, pImage ); + } + + void vkDestroyImage( VkDevice device, + VkImage image, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyImage( device, image, pAllocator ); + } + + void vkGetImageSubresourceLayout( VkDevice device, + VkImage image, + const VkImageSubresource * pSubresource, + VkSubresourceLayout * pLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSubresourceLayout( device, image, pSubresource, pLayout ); + } + + VkResult vkCreateImageView( VkDevice device, + const VkImageViewCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkImageView * pView ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateImageView( device, pCreateInfo, pAllocator, pView ); + } + + void vkDestroyImageView( VkDevice device, + VkImageView imageView, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyImageView( device, imageView, pAllocator ); + } + + VkResult vkCreateShaderModule( VkDevice device, + const VkShaderModuleCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkShaderModule * pShaderModule ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateShaderModule( device, pCreateInfo, pAllocator, pShaderModule ); + } + + void vkDestroyShaderModule( VkDevice device, + VkShaderModule shaderModule, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyShaderModule( device, shaderModule, pAllocator ); + } + + VkResult vkCreatePipelineCache( VkDevice device, + const VkPipelineCacheCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPipelineCache * pPipelineCache ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreatePipelineCache( device, pCreateInfo, pAllocator, pPipelineCache ); + } + + void vkDestroyPipelineCache( VkDevice device, + VkPipelineCache pipelineCache, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPipelineCache( device, pipelineCache, pAllocator ); + } + + VkResult vkGetPipelineCacheData( VkDevice device, + VkPipelineCache pipelineCache, + size_t * pDataSize, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineCacheData( device, pipelineCache, pDataSize, pData ); + } + + VkResult vkMergePipelineCaches( VkDevice device, + VkPipelineCache dstCache, + uint32_t srcCacheCount, + const VkPipelineCache * pSrcCaches ) const VULKAN_HPP_NOEXCEPT + { + return ::vkMergePipelineCaches( device, dstCache, srcCacheCount, pSrcCaches ); + } + + VkResult vkCreateGraphicsPipelines( VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkGraphicsPipelineCreateInfo * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateGraphicsPipelines( + device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } + + VkResult vkCreateComputePipelines( VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkComputePipelineCreateInfo * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateComputePipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } + + void vkDestroyPipeline( VkDevice device, + VkPipeline pipeline, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPipeline( device, pipeline, pAllocator ); + } + + VkResult vkCreatePipelineLayout( VkDevice device, + const VkPipelineLayoutCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPipelineLayout * pPipelineLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreatePipelineLayout( device, pCreateInfo, pAllocator, pPipelineLayout ); + } + + void vkDestroyPipelineLayout( VkDevice device, + VkPipelineLayout pipelineLayout, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPipelineLayout( device, pipelineLayout, pAllocator ); + } + + VkResult vkCreateSampler( VkDevice device, + const VkSamplerCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSampler * pSampler ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSampler( device, pCreateInfo, pAllocator, pSampler ); + } + + void vkDestroySampler( VkDevice device, + VkSampler sampler, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySampler( device, sampler, pAllocator ); + } + + VkResult vkCreateDescriptorSetLayout( VkDevice device, + const VkDescriptorSetLayoutCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDescriptorSetLayout * pSetLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDescriptorSetLayout( device, pCreateInfo, pAllocator, pSetLayout ); + } + + void vkDestroyDescriptorSetLayout( VkDevice device, + VkDescriptorSetLayout descriptorSetLayout, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDescriptorSetLayout( device, descriptorSetLayout, pAllocator ); + } + + VkResult vkCreateDescriptorPool( VkDevice device, + const VkDescriptorPoolCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDescriptorPool * pDescriptorPool ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDescriptorPool( device, pCreateInfo, pAllocator, pDescriptorPool ); + } + + void vkDestroyDescriptorPool( VkDevice device, + VkDescriptorPool descriptorPool, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDescriptorPool( device, descriptorPool, pAllocator ); + } + + VkResult vkResetDescriptorPool( VkDevice device, + VkDescriptorPool descriptorPool, + VkDescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetDescriptorPool( device, descriptorPool, flags ); + } + + VkResult vkAllocateDescriptorSets( VkDevice device, + const VkDescriptorSetAllocateInfo * pAllocateInfo, + VkDescriptorSet * pDescriptorSets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAllocateDescriptorSets( device, pAllocateInfo, pDescriptorSets ); + } + + VkResult vkFreeDescriptorSets( VkDevice device, + VkDescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VkDescriptorSet * pDescriptorSets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkFreeDescriptorSets( device, descriptorPool, descriptorSetCount, pDescriptorSets ); + } + + void vkUpdateDescriptorSets( VkDevice device, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet * pDescriptorWrites, + uint32_t descriptorCopyCount, + const VkCopyDescriptorSet * pDescriptorCopies ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateDescriptorSets( + device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies ); + } + + VkResult vkCreateFramebuffer( VkDevice device, + const VkFramebufferCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkFramebuffer * pFramebuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateFramebuffer( device, pCreateInfo, pAllocator, pFramebuffer ); + } + + void vkDestroyFramebuffer( VkDevice device, + VkFramebuffer framebuffer, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyFramebuffer( device, framebuffer, pAllocator ); + } + + VkResult vkCreateRenderPass( VkDevice device, + const VkRenderPassCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRenderPass( device, pCreateInfo, pAllocator, pRenderPass ); + } + + void vkDestroyRenderPass( VkDevice device, + VkRenderPass renderPass, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyRenderPass( device, renderPass, pAllocator ); + } + + void vkGetRenderAreaGranularity( VkDevice device, + VkRenderPass renderPass, + VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRenderAreaGranularity( device, renderPass, pGranularity ); + } + + VkResult vkCreateCommandPool( VkDevice device, + const VkCommandPoolCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkCommandPool * pCommandPool ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateCommandPool( device, pCreateInfo, pAllocator, pCommandPool ); + } + + void vkDestroyCommandPool( VkDevice device, + VkCommandPool commandPool, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyCommandPool( device, commandPool, pAllocator ); + } + + VkResult vkResetCommandPool( VkDevice device, + VkCommandPool commandPool, + VkCommandPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetCommandPool( device, commandPool, flags ); + } + + VkResult vkAllocateCommandBuffers( VkDevice device, + const VkCommandBufferAllocateInfo * pAllocateInfo, + VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAllocateCommandBuffers( device, pAllocateInfo, pCommandBuffers ); + } + + void vkFreeCommandBuffers( VkDevice device, + VkCommandPool commandPool, + uint32_t commandBufferCount, + const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkFreeCommandBuffers( device, commandPool, commandBufferCount, pCommandBuffers ); + } + + VkResult vkBeginCommandBuffer( VkCommandBuffer commandBuffer, + const VkCommandBufferBeginInfo * pBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBeginCommandBuffer( commandBuffer, pBeginInfo ); + } + + VkResult vkEndCommandBuffer( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEndCommandBuffer( commandBuffer ); + } + + VkResult vkResetCommandBuffer( VkCommandBuffer commandBuffer, + VkCommandBufferResetFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetCommandBuffer( commandBuffer, flags ); + } + + void vkCmdBindPipeline( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindPipeline( commandBuffer, pipelineBindPoint, pipeline ); + } + + void vkCmdSetViewport( VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewport( commandBuffer, firstViewport, viewportCount, pViewports ); + } + + void vkCmdSetScissor( VkCommandBuffer commandBuffer, + uint32_t firstScissor, + uint32_t scissorCount, + const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetScissor( commandBuffer, firstScissor, scissorCount, pScissors ); + } + + void vkCmdSetLineWidth( VkCommandBuffer commandBuffer, float lineWidth ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLineWidth( commandBuffer, lineWidth ); + } + + void vkCmdSetDepthBias( VkCommandBuffer commandBuffer, + float depthBiasConstantFactor, + float depthBiasClamp, + float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBias( commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); + } + + void vkCmdSetBlendConstants( VkCommandBuffer commandBuffer, + const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetBlendConstants( commandBuffer, blendConstants ); + } + + void vkCmdSetDepthBounds( VkCommandBuffer commandBuffer, + float minDepthBounds, + float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBounds( commandBuffer, minDepthBounds, maxDepthBounds ); + } + + void vkCmdSetStencilCompareMask( VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilCompareMask( commandBuffer, faceMask, compareMask ); + } + + void vkCmdSetStencilWriteMask( VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilWriteMask( commandBuffer, faceMask, writeMask ); + } + + void vkCmdSetStencilReference( VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t reference ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilReference( commandBuffer, faceMask, reference ); + } + + void vkCmdBindDescriptorSets( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t firstSet, + uint32_t descriptorSetCount, + const VkDescriptorSet * pDescriptorSets, + uint32_t dynamicOffsetCount, + const uint32_t * pDynamicOffsets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindDescriptorSets( commandBuffer, + pipelineBindPoint, + layout, + firstSet, + descriptorSetCount, + pDescriptorSets, + dynamicOffsetCount, + pDynamicOffsets ); + } + + void vkCmdBindIndexBuffer( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkIndexType indexType ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindIndexBuffer( commandBuffer, buffer, offset, indexType ); + } + + void vkCmdBindVertexBuffers( VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer * pBuffers, + const VkDeviceSize * pOffsets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindVertexBuffers( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets ); + } + + void vkCmdDraw( VkCommandBuffer commandBuffer, + uint32_t vertexCount, + uint32_t instanceCount, + uint32_t firstVertex, + uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDraw( commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); + } + + void vkCmdDrawIndexed( VkCommandBuffer commandBuffer, + uint32_t indexCount, + uint32_t instanceCount, + uint32_t firstIndex, + int32_t vertexOffset, + uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndexed( commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); + } + + void vkCmdDrawIndirect( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndirect( commandBuffer, buffer, offset, drawCount, stride ); + } + + void vkCmdDrawIndexedIndirect( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndexedIndirect( commandBuffer, buffer, offset, drawCount, stride ); + } + + void vkCmdDispatch( VkCommandBuffer commandBuffer, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatch( commandBuffer, groupCountX, groupCountY, groupCountZ ); + } + + void vkCmdDispatchIndirect( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchIndirect( commandBuffer, buffer, offset ); + } + + void vkCmdCopyBuffer( VkCommandBuffer commandBuffer, + VkBuffer srcBuffer, + VkBuffer dstBuffer, + uint32_t regionCount, + const VkBufferCopy * pRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBuffer( commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions ); + } + + void vkCmdCopyImage( VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImage( + commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); + } + + void vkCmdBlitImage( VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageBlit * pRegions, + VkFilter filter ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBlitImage( + commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter ); + } + + void vkCmdCopyBufferToImage( VkCommandBuffer commandBuffer, + VkBuffer srcBuffer, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkBufferImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBufferToImage( commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions ); + } + + void vkCmdCopyImageToBuffer( VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkBuffer dstBuffer, + uint32_t regionCount, + const VkBufferImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImageToBuffer( commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions ); + } + + void vkCmdUpdateBuffer( VkCommandBuffer commandBuffer, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize dataSize, + const void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdUpdateBuffer( commandBuffer, dstBuffer, dstOffset, dataSize, pData ); + } + + void vkCmdFillBuffer( VkCommandBuffer commandBuffer, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize size, + uint32_t data ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdFillBuffer( commandBuffer, dstBuffer, dstOffset, size, data ); + } + + void vkCmdClearColorImage( VkCommandBuffer commandBuffer, + VkImage image, + VkImageLayout imageLayout, + const VkClearColorValue * pColor, + uint32_t rangeCount, + const VkImageSubresourceRange * pRanges ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdClearColorImage( commandBuffer, image, imageLayout, pColor, rangeCount, pRanges ); + } + + void vkCmdClearDepthStencilImage( VkCommandBuffer commandBuffer, + VkImage image, + VkImageLayout imageLayout, + const VkClearDepthStencilValue * pDepthStencil, + uint32_t rangeCount, + const VkImageSubresourceRange * pRanges ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdClearDepthStencilImage( commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges ); + } + + void vkCmdClearAttachments( VkCommandBuffer commandBuffer, + uint32_t attachmentCount, + const VkClearAttachment * pAttachments, + uint32_t rectCount, + const VkClearRect * pRects ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdClearAttachments( commandBuffer, attachmentCount, pAttachments, rectCount, pRects ); + } + + void vkCmdResolveImage( VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageResolve * pRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResolveImage( + commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); + } + + void vkCmdSetEvent( VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetEvent( commandBuffer, event, stageMask ); + } + + void vkCmdResetEvent( VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResetEvent( commandBuffer, event, stageMask ); + } + + void vkCmdWaitEvents( VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent * pEvents, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, + uint32_t memoryBarrierCount, + const VkMemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier * pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWaitEvents( commandBuffer, + eventCount, + pEvents, + srcStageMask, + dstStageMask, + memoryBarrierCount, + pMemoryBarriers, + bufferMemoryBarrierCount, + pBufferMemoryBarriers, + imageMemoryBarrierCount, + pImageMemoryBarriers ); + } + + void vkCmdPipelineBarrier( VkCommandBuffer commandBuffer, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, + VkDependencyFlags dependencyFlags, + uint32_t memoryBarrierCount, + const VkMemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier * pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPipelineBarrier( commandBuffer, + srcStageMask, + dstStageMask, + dependencyFlags, + memoryBarrierCount, + pMemoryBarriers, + bufferMemoryBarrierCount, + pBufferMemoryBarriers, + imageMemoryBarrierCount, + pImageMemoryBarriers ); + } + + void vkCmdBeginQuery( VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + VkQueryControlFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginQuery( commandBuffer, queryPool, query, flags ); + } + + void vkCmdEndQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndQuery( commandBuffer, queryPool, query ); + } + + void vkCmdResetQueryPool( VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResetQueryPool( commandBuffer, queryPool, firstQuery, queryCount ); + } + + void vkCmdWriteTimestamp( VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkQueryPool queryPool, + uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteTimestamp( commandBuffer, pipelineStage, queryPool, query ); + } + + void vkCmdCopyQueryPoolResults( VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize stride, + VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyQueryPoolResults( + commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags ); + } + + void vkCmdPushConstants( VkCommandBuffer commandBuffer, + VkPipelineLayout layout, + VkShaderStageFlags stageFlags, + uint32_t offset, + uint32_t size, + const void * pValues ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushConstants( commandBuffer, layout, stageFlags, offset, size, pValues ); + } + + void vkCmdBeginRenderPass( VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo * pRenderPassBegin, + VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginRenderPass( commandBuffer, pRenderPassBegin, contents ); + } + + void vkCmdNextSubpass( VkCommandBuffer commandBuffer, VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdNextSubpass( commandBuffer, contents ); + } + + void vkCmdEndRenderPass( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndRenderPass( commandBuffer ); + } + + void vkCmdExecuteCommands( VkCommandBuffer commandBuffer, + uint32_t commandBufferCount, + const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdExecuteCommands( commandBuffer, commandBufferCount, pCommandBuffers ); + } + + //=== VK_VERSION_1_1 === + + VkResult vkEnumerateInstanceVersion( uint32_t * pApiVersion ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateInstanceVersion( pApiVersion ); + } + + VkResult vkBindBufferMemory2( VkDevice device, + uint32_t bindInfoCount, + const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindBufferMemory2( device, bindInfoCount, pBindInfos ); + } + + VkResult vkBindImageMemory2( VkDevice device, + uint32_t bindInfoCount, + const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindImageMemory2( device, bindInfoCount, pBindInfos ); + } + + void vkGetDeviceGroupPeerMemoryFeatures( VkDevice device, + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupPeerMemoryFeatures( + device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); + } + + void vkCmdSetDeviceMask( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDeviceMask( commandBuffer, deviceMask ); + } + + void vkCmdDispatchBase( VkCommandBuffer commandBuffer, + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchBase( + commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } + + VkResult vkEnumeratePhysicalDeviceGroups( VkInstance instance, + uint32_t * pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkEnumeratePhysicalDeviceGroups( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); + } + + void vkGetImageMemoryRequirements2( VkDevice device, + const VkImageMemoryRequirementsInfo2 * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageMemoryRequirements2( device, pInfo, pMemoryRequirements ); + } + + void vkGetBufferMemoryRequirements2( VkDevice device, + const VkBufferMemoryRequirementsInfo2 * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferMemoryRequirements2( device, pInfo, pMemoryRequirements ); + } + + void vkGetImageSparseMemoryRequirements2( VkDevice device, + const VkImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSparseMemoryRequirements2( + device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + } + + void vkGetPhysicalDeviceFeatures2( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFeatures2( physicalDevice, pFeatures ); + } + + void vkGetPhysicalDeviceProperties2( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceProperties2( physicalDevice, pProperties ); + } + + void vkGetPhysicalDeviceFormatProperties2( VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFormatProperties2( physicalDevice, format, pFormatProperties ); + } + + VkResult vkGetPhysicalDeviceImageFormatProperties2( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VkImageFormatProperties2 * pImageFormatProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceImageFormatProperties2( physicalDevice, pImageFormatInfo, pImageFormatProperties ); + } + + void vkGetPhysicalDeviceQueueFamilyProperties2( VkPhysicalDevice physicalDevice, + uint32_t * pQueueFamilyPropertyCount, + VkQueueFamilyProperties2 * pQueueFamilyProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceQueueFamilyProperties2( + physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); + } + + void vkGetPhysicalDeviceMemoryProperties2( + VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceMemoryProperties2( physicalDevice, pMemoryProperties ); + } + + void vkGetPhysicalDeviceSparseImageFormatProperties2( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VkSparseImageFormatProperties2 * pProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSparseImageFormatProperties2( + physicalDevice, pFormatInfo, pPropertyCount, pProperties ); + } + + void vkTrimCommandPool( VkDevice device, + VkCommandPool commandPool, + VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkTrimCommandPool( device, commandPool, flags ); + } + + void vkGetDeviceQueue2( VkDevice device, + const VkDeviceQueueInfo2 * pQueueInfo, + VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceQueue2( device, pQueueInfo, pQueue ); + } + + VkResult vkCreateSamplerYcbcrConversion( VkDevice device, + const VkSamplerYcbcrConversionCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSamplerYcbcrConversion( device, pCreateInfo, pAllocator, pYcbcrConversion ); + } + + void vkDestroySamplerYcbcrConversion( VkDevice device, + VkSamplerYcbcrConversion ycbcrConversion, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySamplerYcbcrConversion( device, ycbcrConversion, pAllocator ); + } + + VkResult vkCreateDescriptorUpdateTemplate( VkDevice device, + const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDescriptorUpdateTemplate( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate ); + } + + void vkDestroyDescriptorUpdateTemplate( VkDevice device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDescriptorUpdateTemplate( device, descriptorUpdateTemplate, pAllocator ); + } + + void vkUpdateDescriptorSetWithTemplate( VkDevice device, + VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateDescriptorSetWithTemplate( device, descriptorSet, descriptorUpdateTemplate, pData ); + } + + void vkGetPhysicalDeviceExternalBufferProperties( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VkExternalBufferProperties * pExternalBufferProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalBufferProperties( + physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); + } + + void vkGetPhysicalDeviceExternalFenceProperties( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VkExternalFenceProperties * pExternalFenceProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalFenceProperties( + physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); + } + + void vkGetPhysicalDeviceExternalSemaphoreProperties( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalSemaphoreProperties( + physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); + } + + void vkGetDescriptorSetLayoutSupport( VkDevice device, + const VkDescriptorSetLayoutCreateInfo * pCreateInfo, + VkDescriptorSetLayoutSupport * pSupport ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorSetLayoutSupport( device, pCreateInfo, pSupport ); + } + + //=== VK_VERSION_1_2 === + + void vkCmdDrawIndirectCount( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndirectCount( + commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + void vkCmdDrawIndexedIndirectCount( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndexedIndirectCount( + commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + VkResult vkCreateRenderPass2( VkDevice device, + const VkRenderPassCreateInfo2 * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRenderPass2( device, pCreateInfo, pAllocator, pRenderPass ); + } + + void vkCmdBeginRenderPass2( VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo * pRenderPassBegin, + const VkSubpassBeginInfo * pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginRenderPass2( commandBuffer, pRenderPassBegin, pSubpassBeginInfo ); + } + + void vkCmdNextSubpass2( VkCommandBuffer commandBuffer, + const VkSubpassBeginInfo * pSubpassBeginInfo, + const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdNextSubpass2( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo ); + } + + void vkCmdEndRenderPass2( VkCommandBuffer commandBuffer, + const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndRenderPass2( commandBuffer, pSubpassEndInfo ); + } + + void vkResetQueryPool( VkDevice device, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetQueryPool( device, queryPool, firstQuery, queryCount ); + } + + VkResult + vkGetSemaphoreCounterValue( VkDevice device, VkSemaphore semaphore, uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreCounterValue( device, semaphore, pValue ); + } + + VkResult vkWaitSemaphores( VkDevice device, + const VkSemaphoreWaitInfo * pWaitInfo, + uint64_t timeout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWaitSemaphores( device, pWaitInfo, timeout ); + } + + VkResult vkSignalSemaphore( VkDevice device, const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSignalSemaphore( device, pSignalInfo ); + } + + VkDeviceAddress vkGetBufferDeviceAddress( VkDevice device, + const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferDeviceAddress( device, pInfo ); + } + + uint64_t vkGetBufferOpaqueCaptureAddress( VkDevice device, + const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferOpaqueCaptureAddress( device, pInfo ); + } + + uint64_t vkGetDeviceMemoryOpaqueCaptureAddress( + VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceMemoryOpaqueCaptureAddress( device, pInfo ); + } + + //=== VK_KHR_surface === + + void vkDestroySurfaceKHR( VkInstance instance, + VkSurfaceKHR surface, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySurfaceKHR( instance, surface, pAllocator ); + } + + VkResult vkGetPhysicalDeviceSurfaceSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + VkSurfaceKHR surface, + VkBool32 * pSupported ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceSupportKHR( physicalDevice, queueFamilyIndex, surface, pSupported ); + } + + VkResult vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilitiesKHR * pSurfaceCapabilities ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceCapabilitiesKHR( physicalDevice, surface, pSurfaceCapabilities ); + } + + VkResult vkGetPhysicalDeviceSurfaceFormatsKHR( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t * pSurfaceFormatCount, + VkSurfaceFormatKHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceFormatsKHR( physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats ); + } + + VkResult vkGetPhysicalDeviceSurfacePresentModesKHR( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t * pPresentModeCount, + VkPresentModeKHR * pPresentModes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfacePresentModesKHR( physicalDevice, surface, pPresentModeCount, pPresentModes ); + } + + //=== VK_KHR_swapchain === + + VkResult vkCreateSwapchainKHR( VkDevice device, + const VkSwapchainCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSwapchainKHR * pSwapchain ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSwapchainKHR( device, pCreateInfo, pAllocator, pSwapchain ); + } + + void vkDestroySwapchainKHR( VkDevice device, + VkSwapchainKHR swapchain, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySwapchainKHR( device, swapchain, pAllocator ); + } + + VkResult vkGetSwapchainImagesKHR( VkDevice device, + VkSwapchainKHR swapchain, + uint32_t * pSwapchainImageCount, + VkImage * pSwapchainImages ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSwapchainImagesKHR( device, swapchain, pSwapchainImageCount, pSwapchainImages ); + } + + VkResult vkAcquireNextImageKHR( VkDevice device, + VkSwapchainKHR swapchain, + uint64_t timeout, + VkSemaphore semaphore, + VkFence fence, + uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireNextImageKHR( device, swapchain, timeout, semaphore, fence, pImageIndex ); + } + + VkResult vkQueuePresentKHR( VkQueue queue, const VkPresentInfoKHR * pPresentInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueuePresentKHR( queue, pPresentInfo ); + } + + VkResult vkGetDeviceGroupPresentCapabilitiesKHR( + VkDevice device, VkDeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupPresentCapabilitiesKHR( device, pDeviceGroupPresentCapabilities ); + } + + VkResult vkGetDeviceGroupSurfacePresentModesKHR( + VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupSurfacePresentModesKHR( device, surface, pModes ); + } + + VkResult vkGetPhysicalDevicePresentRectanglesKHR( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t * pRectCount, + VkRect2D * pRects ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDevicePresentRectanglesKHR( physicalDevice, surface, pRectCount, pRects ); + } + + VkResult vkAcquireNextImage2KHR( VkDevice device, + const VkAcquireNextImageInfoKHR * pAcquireInfo, + uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireNextImage2KHR( device, pAcquireInfo, pImageIndex ); + } + + //=== VK_KHR_display === + + VkResult vkGetPhysicalDeviceDisplayPropertiesKHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkDisplayPropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDisplayPropertiesKHR( physicalDevice, pPropertyCount, pProperties ); + } + + VkResult vkGetPhysicalDeviceDisplayPlanePropertiesKHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkDisplayPlanePropertiesKHR * pProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDisplayPlanePropertiesKHR( physicalDevice, pPropertyCount, pProperties ); + } + + VkResult vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice, + uint32_t planeIndex, + uint32_t * pDisplayCount, + VkDisplayKHR * pDisplays ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayPlaneSupportedDisplaysKHR( physicalDevice, planeIndex, pDisplayCount, pDisplays ); + } + + VkResult vkGetDisplayModePropertiesKHR( VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + uint32_t * pPropertyCount, + VkDisplayModePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayModePropertiesKHR( physicalDevice, display, pPropertyCount, pProperties ); + } + + VkResult vkCreateDisplayModeKHR( VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + const VkDisplayModeCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDisplayModeKHR * pMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDisplayModeKHR( physicalDevice, display, pCreateInfo, pAllocator, pMode ); + } + + VkResult vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice, + VkDisplayModeKHR mode, + uint32_t planeIndex, + VkDisplayPlaneCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayPlaneCapabilitiesKHR( physicalDevice, mode, planeIndex, pCapabilities ); + } + + VkResult vkCreateDisplayPlaneSurfaceKHR( VkInstance instance, + const VkDisplaySurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDisplayPlaneSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } + + //=== VK_KHR_display_swapchain === + + VkResult vkCreateSharedSwapchainsKHR( VkDevice device, + uint32_t swapchainCount, + const VkSwapchainCreateInfoKHR * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkSwapchainKHR * pSwapchains ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSharedSwapchainsKHR( device, swapchainCount, pCreateInfos, pAllocator, pSwapchains ); + } + +# if defined( VK_USE_PLATFORM_XLIB_KHR ) + + //=== VK_KHR_xlib_surface === + + VkResult vkCreateXlibSurfaceKHR( VkInstance instance, + const VkXlibSurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateXlibSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } + + VkBool32 vkGetPhysicalDeviceXlibPresentationSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + Display * dpy, + VisualID visualID ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceXlibPresentationSupportKHR( physicalDevice, queueFamilyIndex, dpy, visualID ); + } +# endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +# if defined( VK_USE_PLATFORM_XCB_KHR ) + + //=== VK_KHR_xcb_surface === + + VkResult vkCreateXcbSurfaceKHR( VkInstance instance, + const VkXcbSurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateXcbSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } + + VkBool32 vkGetPhysicalDeviceXcbPresentationSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + xcb_connection_t * connection, + xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceXcbPresentationSupportKHR( physicalDevice, queueFamilyIndex, connection, visual_id ); + } +# endif /*VK_USE_PLATFORM_XCB_KHR*/ + +# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + + //=== VK_KHR_wayland_surface === + + VkResult vkCreateWaylandSurfaceKHR( VkInstance instance, + const VkWaylandSurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateWaylandSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } + + VkBool32 vkGetPhysicalDeviceWaylandPresentationSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + struct wl_display * display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceWaylandPresentationSupportKHR( physicalDevice, queueFamilyIndex, display ); + } +# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + + //=== VK_KHR_android_surface === + + VkResult vkCreateAndroidSurfaceKHR( VkInstance instance, + const VkAndroidSurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateAndroidSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + + //=== VK_KHR_win32_surface === + + VkResult vkCreateWin32SurfaceKHR( VkInstance instance, + const VkWin32SurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateWin32SurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } + + VkBool32 vkGetPhysicalDeviceWin32PresentationSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceWin32PresentationSupportKHR( physicalDevice, queueFamilyIndex ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + + VkResult vkCreateDebugReportCallbackEXT( VkInstance instance, + const VkDebugReportCallbackCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDebugReportCallbackEXT * pCallback ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDebugReportCallbackEXT( instance, pCreateInfo, pAllocator, pCallback ); + } + + void vkDestroyDebugReportCallbackEXT( VkInstance instance, + VkDebugReportCallbackEXT callback, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDebugReportCallbackEXT( instance, callback, pAllocator ); + } + + void vkDebugReportMessageEXT( VkInstance instance, + VkDebugReportFlagsEXT flags, + VkDebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char * pLayerPrefix, + const char * pMessage ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDebugReportMessageEXT( + instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage ); + } + + //=== VK_EXT_debug_marker === + + VkResult vkDebugMarkerSetObjectTagEXT( VkDevice device, + const VkDebugMarkerObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDebugMarkerSetObjectTagEXT( device, pTagInfo ); + } + + VkResult vkDebugMarkerSetObjectNameEXT( VkDevice device, + const VkDebugMarkerObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDebugMarkerSetObjectNameEXT( device, pNameInfo ); + } + + void vkCmdDebugMarkerBeginEXT( VkCommandBuffer commandBuffer, + const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDebugMarkerBeginEXT( commandBuffer, pMarkerInfo ); + } + + void vkCmdDebugMarkerEndEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDebugMarkerEndEXT( commandBuffer ); + } + + void vkCmdDebugMarkerInsertEXT( VkCommandBuffer commandBuffer, + const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDebugMarkerInsertEXT( commandBuffer, pMarkerInfo ); + } + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + + //=== VK_KHR_video_queue === + + VkResult vkGetPhysicalDeviceVideoCapabilitiesKHR( VkPhysicalDevice physicalDevice, + const VkVideoProfileKHR * pVideoProfile, + VkVideoCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceVideoCapabilitiesKHR( physicalDevice, pVideoProfile, pCapabilities ); + } + + VkResult vkGetPhysicalDeviceVideoFormatPropertiesKHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, + uint32_t * pVideoFormatPropertyCount, + VkVideoFormatPropertiesKHR * pVideoFormatProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceVideoFormatPropertiesKHR( + physicalDevice, pVideoFormatInfo, pVideoFormatPropertyCount, pVideoFormatProperties ); + } + + VkResult vkCreateVideoSessionKHR( VkDevice device, + const VkVideoSessionCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkVideoSessionKHR * pVideoSession ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateVideoSessionKHR( device, pCreateInfo, pAllocator, pVideoSession ); + } + + void vkDestroyVideoSessionKHR( VkDevice device, + VkVideoSessionKHR videoSession, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyVideoSessionKHR( device, videoSession, pAllocator ); + } + + VkResult vkGetVideoSessionMemoryRequirementsKHR( + VkDevice device, + VkVideoSessionKHR videoSession, + uint32_t * pVideoSessionMemoryRequirementsCount, + VkVideoGetMemoryPropertiesKHR * pVideoSessionMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetVideoSessionMemoryRequirementsKHR( + device, videoSession, pVideoSessionMemoryRequirementsCount, pVideoSessionMemoryRequirements ); + } + + VkResult + vkBindVideoSessionMemoryKHR( VkDevice device, + VkVideoSessionKHR videoSession, + uint32_t videoSessionBindMemoryCount, + const VkVideoBindMemoryKHR * pVideoSessionBindMemories ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindVideoSessionMemoryKHR( + device, videoSession, videoSessionBindMemoryCount, pVideoSessionBindMemories ); + } + + VkResult vkCreateVideoSessionParametersKHR( VkDevice device, + const VkVideoSessionParametersCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkVideoSessionParametersKHR * pVideoSessionParameters ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCreateVideoSessionParametersKHR( device, pCreateInfo, pAllocator, pVideoSessionParameters ); + } + + VkResult vkUpdateVideoSessionParametersKHR( VkDevice device, + VkVideoSessionParametersKHR videoSessionParameters, + const VkVideoSessionParametersUpdateInfoKHR * pUpdateInfo ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateVideoSessionParametersKHR( device, videoSessionParameters, pUpdateInfo ); + } + + void vkDestroyVideoSessionParametersKHR( VkDevice device, + VkVideoSessionParametersKHR videoSessionParameters, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyVideoSessionParametersKHR( device, videoSessionParameters, pAllocator ); + } + + void vkCmdBeginVideoCodingKHR( VkCommandBuffer commandBuffer, + const VkVideoBeginCodingInfoKHR * pBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginVideoCodingKHR( commandBuffer, pBeginInfo ); + } + + void vkCmdEndVideoCodingKHR( VkCommandBuffer commandBuffer, + const VkVideoEndCodingInfoKHR * pEndCodingInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndVideoCodingKHR( commandBuffer, pEndCodingInfo ); + } + + void vkCmdControlVideoCodingKHR( VkCommandBuffer commandBuffer, + const VkVideoCodingControlInfoKHR * pCodingControlInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdControlVideoCodingKHR( commandBuffer, pCodingControlInfo ); + } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + + //=== VK_KHR_video_decode_queue === + + void vkCmdDecodeVideoKHR( VkCommandBuffer commandBuffer, + const VkVideoDecodeInfoKHR * pFrameInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDecodeVideoKHR( commandBuffer, pFrameInfo ); + } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_transform_feedback === + + void vkCmdBindTransformFeedbackBuffersEXT( VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer * pBuffers, + const VkDeviceSize * pOffsets, + const VkDeviceSize * pSizes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindTransformFeedbackBuffersEXT( + commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes ); + } + + void vkCmdBeginTransformFeedbackEXT( VkCommandBuffer commandBuffer, + uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VkBuffer * pCounterBuffers, + const VkDeviceSize * pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginTransformFeedbackEXT( + commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets ); + } + + void vkCmdEndTransformFeedbackEXT( VkCommandBuffer commandBuffer, + uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VkBuffer * pCounterBuffers, + const VkDeviceSize * pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndTransformFeedbackEXT( + commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets ); + } + + void vkCmdBeginQueryIndexedEXT( VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + VkQueryControlFlags flags, + uint32_t index ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginQueryIndexedEXT( commandBuffer, queryPool, query, flags, index ); + } + + void vkCmdEndQueryIndexedEXT( VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + uint32_t index ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndQueryIndexedEXT( commandBuffer, queryPool, query, index ); + } + + void vkCmdDrawIndirectByteCountEXT( VkCommandBuffer commandBuffer, + uint32_t instanceCount, + uint32_t firstInstance, + VkBuffer counterBuffer, + VkDeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndirectByteCountEXT( + commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride ); + } + + //=== VK_NVX_binary_import === + + VkResult vkCreateCuModuleNVX( VkDevice device, + const VkCuModuleCreateInfoNVX * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkCuModuleNVX * pModule ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateCuModuleNVX( device, pCreateInfo, pAllocator, pModule ); + } + + VkResult vkCreateCuFunctionNVX( VkDevice device, + const VkCuFunctionCreateInfoNVX * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkCuFunctionNVX * pFunction ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateCuFunctionNVX( device, pCreateInfo, pAllocator, pFunction ); + } + + void vkDestroyCuModuleNVX( VkDevice device, + VkCuModuleNVX module, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyCuModuleNVX( device, module, pAllocator ); + } + + void vkDestroyCuFunctionNVX( VkDevice device, + VkCuFunctionNVX function, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyCuFunctionNVX( device, function, pAllocator ); + } + + void vkCmdCuLaunchKernelNVX( VkCommandBuffer commandBuffer, + const VkCuLaunchInfoNVX * pLaunchInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCuLaunchKernelNVX( commandBuffer, pLaunchInfo ); + } + + //=== VK_NVX_image_view_handle === + + uint32_t vkGetImageViewHandleNVX( VkDevice device, + const VkImageViewHandleInfoNVX * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageViewHandleNVX( device, pInfo ); + } + + VkResult vkGetImageViewAddressNVX( VkDevice device, + VkImageView imageView, + VkImageViewAddressPropertiesNVX * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageViewAddressNVX( device, imageView, pProperties ); + } + + //=== VK_AMD_draw_indirect_count === + + void vkCmdDrawIndirectCountAMD( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndirectCountAMD( + commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + void vkCmdDrawIndexedIndirectCountAMD( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndexedIndirectCountAMD( + commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + //=== VK_AMD_shader_info === + + VkResult vkGetShaderInfoAMD( VkDevice device, + VkPipeline pipeline, + VkShaderStageFlagBits shaderStage, + VkShaderInfoTypeAMD infoType, + size_t * pInfoSize, + void * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetShaderInfoAMD( device, pipeline, shaderStage, infoType, pInfoSize, pInfo ); + } + +# if defined( VK_USE_PLATFORM_GGP ) + + //=== VK_GGP_stream_descriptor_surface === + + VkResult vkCreateStreamDescriptorSurfaceGGP( VkInstance instance, + const VkStreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateStreamDescriptorSurfaceGGP( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + + VkResult vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkExternalMemoryHandleTypeFlagsNV externalHandleType, + VkExternalImageFormatPropertiesNV * pExternalImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties ); + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + + //=== VK_NV_external_memory_win32 === + + VkResult vkGetMemoryWin32HandleNV( VkDevice device, + VkDeviceMemory memory, + VkExternalMemoryHandleTypeFlagsNV handleType, + HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryWin32HandleNV( device, memory, handleType, pHandle ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_get_physical_device_properties2 === + + void vkGetPhysicalDeviceFeatures2KHR( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFeatures2KHR( physicalDevice, pFeatures ); + } + + void vkGetPhysicalDeviceProperties2KHR( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceProperties2KHR( physicalDevice, pProperties ); + } + + void vkGetPhysicalDeviceFormatProperties2KHR( VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFormatProperties2KHR( physicalDevice, format, pFormatProperties ); + } + + VkResult vkGetPhysicalDeviceImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VkImageFormatProperties2 * pImageFormatProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceImageFormatProperties2KHR( physicalDevice, pImageFormatInfo, pImageFormatProperties ); + } + + void vkGetPhysicalDeviceQueueFamilyProperties2KHR( VkPhysicalDevice physicalDevice, + uint32_t * pQueueFamilyPropertyCount, + VkQueueFamilyProperties2 * pQueueFamilyProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceQueueFamilyProperties2KHR( + physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); + } + + void vkGetPhysicalDeviceMemoryProperties2KHR( + VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceMemoryProperties2KHR( physicalDevice, pMemoryProperties ); + } + + void vkGetPhysicalDeviceSparseImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VkSparseImageFormatProperties2 * pProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + physicalDevice, pFormatInfo, pPropertyCount, pProperties ); + } + + //=== VK_KHR_device_group === + + void + vkGetDeviceGroupPeerMemoryFeaturesKHR( VkDevice device, + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupPeerMemoryFeaturesKHR( + device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); + } + + void vkCmdSetDeviceMaskKHR( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDeviceMaskKHR( commandBuffer, deviceMask ); + } + + void vkCmdDispatchBaseKHR( VkCommandBuffer commandBuffer, + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchBaseKHR( + commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } + +# if defined( VK_USE_PLATFORM_VI_NN ) + + //=== VK_NN_vi_surface === + + VkResult vkCreateViSurfaceNN( VkInstance instance, + const VkViSurfaceCreateInfoNN * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateViSurfaceNN( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_maintenance1 === + + void vkTrimCommandPoolKHR( VkDevice device, + VkCommandPool commandPool, + VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkTrimCommandPoolKHR( device, commandPool, flags ); + } + + //=== VK_KHR_device_group_creation === + + VkResult vkEnumeratePhysicalDeviceGroupsKHR( + VkInstance instance, + uint32_t * pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumeratePhysicalDeviceGroupsKHR( + instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); + } + + //=== VK_KHR_external_memory_capabilities === + + void vkGetPhysicalDeviceExternalBufferPropertiesKHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VkExternalBufferProperties * pExternalBufferProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalBufferPropertiesKHR( + physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + + //=== VK_KHR_external_memory_win32 === + + VkResult vkGetMemoryWin32HandleKHR( VkDevice device, + const VkMemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); + } + + VkResult vkGetMemoryWin32HandlePropertiesKHR( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + VkMemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryWin32HandlePropertiesKHR( device, handleType, handle, pMemoryWin32HandleProperties ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + + VkResult + vkGetMemoryFdKHR( VkDevice device, const VkMemoryGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryFdKHR( device, pGetFdInfo, pFd ); + } + + VkResult vkGetMemoryFdPropertiesKHR( VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + int fd, + VkMemoryFdPropertiesKHR * pMemoryFdProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryFdPropertiesKHR( device, handleType, fd, pMemoryFdProperties ); + } + + //=== VK_KHR_external_semaphore_capabilities === + + void vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( + physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + + //=== VK_KHR_external_semaphore_win32 === + + VkResult vkImportSemaphoreWin32HandleKHR( + VkDevice device, + const VkImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportSemaphoreWin32HandleKHR( device, pImportSemaphoreWin32HandleInfo ); + } + + VkResult vkGetSemaphoreWin32HandleKHR( VkDevice device, + const VkSemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + + VkResult + vkImportSemaphoreFdKHR( VkDevice device, + const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportSemaphoreFdKHR( device, pImportSemaphoreFdInfo ); + } + + VkResult vkGetSemaphoreFdKHR( VkDevice device, + const VkSemaphoreGetFdInfoKHR * pGetFdInfo, + int * pFd ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreFdKHR( device, pGetFdInfo, pFd ); + } + + //=== VK_KHR_push_descriptor === + + void vkCmdPushDescriptorSetKHR( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet * pDescriptorWrites ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSetKHR( + commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites ); + } + + void vkCmdPushDescriptorSetWithTemplateKHR( VkCommandBuffer commandBuffer, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + VkPipelineLayout layout, + uint32_t set, + const void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSetWithTemplateKHR( commandBuffer, descriptorUpdateTemplate, layout, set, pData ); + } + + //=== VK_EXT_conditional_rendering === + + void vkCmdBeginConditionalRenderingEXT( + VkCommandBuffer commandBuffer, + const VkConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginConditionalRenderingEXT( commandBuffer, pConditionalRenderingBegin ); + } + + void vkCmdEndConditionalRenderingEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndConditionalRenderingEXT( commandBuffer ); + } + + //=== VK_KHR_descriptor_update_template === + + VkResult vkCreateDescriptorUpdateTemplateKHR( VkDevice device, + const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDescriptorUpdateTemplateKHR( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate ); + } + + void vkDestroyDescriptorUpdateTemplateKHR( VkDevice device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDescriptorUpdateTemplateKHR( device, descriptorUpdateTemplate, pAllocator ); + } + + void vkUpdateDescriptorSetWithTemplateKHR( VkDevice device, + VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateDescriptorSetWithTemplateKHR( device, descriptorSet, descriptorUpdateTemplate, pData ); + } + + //=== VK_NV_clip_space_w_scaling === + + void vkCmdSetViewportWScalingNV( VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewportWScalingNV * pViewportWScalings ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportWScalingNV( commandBuffer, firstViewport, viewportCount, pViewportWScalings ); + } + + //=== VK_EXT_direct_mode_display === + + VkResult vkReleaseDisplayEXT( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseDisplayEXT( physicalDevice, display ); + } + +# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + + //=== VK_EXT_acquire_xlib_display === + + VkResult vkAcquireXlibDisplayEXT( VkPhysicalDevice physicalDevice, + Display * dpy, + VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireXlibDisplayEXT( physicalDevice, dpy, display ); + } + + VkResult vkGetRandROutputDisplayEXT( VkPhysicalDevice physicalDevice, + Display * dpy, + RROutput rrOutput, + VkDisplayKHR * pDisplay ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRandROutputDisplayEXT( physicalDevice, dpy, rrOutput, pDisplay ); + } +# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + + VkResult vkGetPhysicalDeviceSurfaceCapabilities2EXT( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilities2EXT * pSurfaceCapabilities ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceCapabilities2EXT( physicalDevice, surface, pSurfaceCapabilities ); + } + + //=== VK_EXT_display_control === + + VkResult vkDisplayPowerControlEXT( VkDevice device, + VkDisplayKHR display, + const VkDisplayPowerInfoEXT * pDisplayPowerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDisplayPowerControlEXT( device, display, pDisplayPowerInfo ); + } + + VkResult vkRegisterDeviceEventEXT( VkDevice device, + const VkDeviceEventInfoEXT * pDeviceEventInfo, + const VkAllocationCallbacks * pAllocator, + VkFence * pFence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkRegisterDeviceEventEXT( device, pDeviceEventInfo, pAllocator, pFence ); + } + + VkResult vkRegisterDisplayEventEXT( VkDevice device, + VkDisplayKHR display, + const VkDisplayEventInfoEXT * pDisplayEventInfo, + const VkAllocationCallbacks * pAllocator, + VkFence * pFence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkRegisterDisplayEventEXT( device, display, pDisplayEventInfo, pAllocator, pFence ); + } + + VkResult vkGetSwapchainCounterEXT( VkDevice device, + VkSwapchainKHR swapchain, + VkSurfaceCounterFlagBitsEXT counter, + uint64_t * pCounterValue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSwapchainCounterEXT( device, swapchain, counter, pCounterValue ); + } + + //=== VK_GOOGLE_display_timing === + + VkResult vkGetRefreshCycleDurationGOOGLE( VkDevice device, + VkSwapchainKHR swapchain, + VkRefreshCycleDurationGOOGLE * pDisplayTimingProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetRefreshCycleDurationGOOGLE( device, swapchain, pDisplayTimingProperties ); + } + + VkResult vkGetPastPresentationTimingGOOGLE( VkDevice device, + VkSwapchainKHR swapchain, + uint32_t * pPresentationTimingCount, + VkPastPresentationTimingGOOGLE * pPresentationTimings ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPastPresentationTimingGOOGLE( device, swapchain, pPresentationTimingCount, pPresentationTimings ); + } + + //=== VK_EXT_discard_rectangles === + + void vkCmdSetDiscardRectangleEXT( VkCommandBuffer commandBuffer, + uint32_t firstDiscardRectangle, + uint32_t discardRectangleCount, + const VkRect2D * pDiscardRectangles ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDiscardRectangleEXT( + commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles ); + } + + //=== VK_EXT_hdr_metadata === + + void vkSetHdrMetadataEXT( VkDevice device, + uint32_t swapchainCount, + const VkSwapchainKHR * pSwapchains, + const VkHdrMetadataEXT * pMetadata ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetHdrMetadataEXT( device, swapchainCount, pSwapchains, pMetadata ); + } + + //=== VK_KHR_create_renderpass2 === + + VkResult vkCreateRenderPass2KHR( VkDevice device, + const VkRenderPassCreateInfo2 * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRenderPass2KHR( device, pCreateInfo, pAllocator, pRenderPass ); + } + + void vkCmdBeginRenderPass2KHR( VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo * pRenderPassBegin, + const VkSubpassBeginInfo * pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginRenderPass2KHR( commandBuffer, pRenderPassBegin, pSubpassBeginInfo ); + } + + void vkCmdNextSubpass2KHR( VkCommandBuffer commandBuffer, + const VkSubpassBeginInfo * pSubpassBeginInfo, + const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdNextSubpass2KHR( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo ); + } + + void vkCmdEndRenderPass2KHR( VkCommandBuffer commandBuffer, + const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndRenderPass2KHR( commandBuffer, pSubpassEndInfo ); + } + + //=== VK_KHR_shared_presentable_image === + + VkResult vkGetSwapchainStatusKHR( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSwapchainStatusKHR( device, swapchain ); + } + + //=== VK_KHR_external_fence_capabilities === + + void vkGetPhysicalDeviceExternalFencePropertiesKHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VkExternalFenceProperties * pExternalFenceProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalFencePropertiesKHR( + physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + + //=== VK_KHR_external_fence_win32 === + + VkResult vkImportFenceWin32HandleKHR( + VkDevice device, const VkImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportFenceWin32HandleKHR( device, pImportFenceWin32HandleInfo ); + } + + VkResult vkGetFenceWin32HandleKHR( VkDevice device, + const VkFenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetFenceWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + + VkResult vkImportFenceFdKHR( VkDevice device, + const VkImportFenceFdInfoKHR * pImportFenceFdInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportFenceFdKHR( device, pImportFenceFdInfo ); + } + + VkResult + vkGetFenceFdKHR( VkDevice device, const VkFenceGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetFenceFdKHR( device, pGetFdInfo, pFd ); + } + + //=== VK_KHR_performance_query === + + VkResult vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + uint32_t * pCounterCount, + VkPerformanceCounterKHR * pCounters, + VkPerformanceCounterDescriptionKHR * pCounterDescriptions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions ); + } + + void vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + VkPhysicalDevice physicalDevice, + const VkQueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, + uint32_t * pNumPasses ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + physicalDevice, pPerformanceQueryCreateInfo, pNumPasses ); + } + + VkResult vkAcquireProfilingLockKHR( VkDevice device, + const VkAcquireProfilingLockInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireProfilingLockKHR( device, pInfo ); + } + + void vkReleaseProfilingLockKHR( VkDevice device ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseProfilingLockKHR( device ); + } + + //=== VK_KHR_get_surface_capabilities2 === + + VkResult vkGetPhysicalDeviceSurfaceCapabilities2KHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VkSurfaceCapabilities2KHR * pSurfaceCapabilities ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceCapabilities2KHR( physicalDevice, pSurfaceInfo, pSurfaceCapabilities ); + } + + VkResult vkGetPhysicalDeviceSurfaceFormats2KHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pSurfaceFormatCount, + VkSurfaceFormat2KHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceFormats2KHR( + physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats ); + } + + //=== VK_KHR_get_display_properties2 === + + VkResult vkGetPhysicalDeviceDisplayProperties2KHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkDisplayProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDisplayProperties2KHR( physicalDevice, pPropertyCount, pProperties ); + } + + VkResult vkGetPhysicalDeviceDisplayPlaneProperties2KHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkDisplayPlaneProperties2KHR * pProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDisplayPlaneProperties2KHR( physicalDevice, pPropertyCount, pProperties ); + } + + VkResult vkGetDisplayModeProperties2KHR( VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + uint32_t * pPropertyCount, + VkDisplayModeProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayModeProperties2KHR( physicalDevice, display, pPropertyCount, pProperties ); + } + + VkResult + vkGetDisplayPlaneCapabilities2KHR( VkPhysicalDevice physicalDevice, + const VkDisplayPlaneInfo2KHR * pDisplayPlaneInfo, + VkDisplayPlaneCapabilities2KHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayPlaneCapabilities2KHR( physicalDevice, pDisplayPlaneInfo, pCapabilities ); + } + +# if defined( VK_USE_PLATFORM_IOS_MVK ) + + //=== VK_MVK_ios_surface === + + VkResult vkCreateIOSSurfaceMVK( VkInstance instance, + const VkIOSSurfaceCreateInfoMVK * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateIOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_IOS_MVK*/ + +# if defined( VK_USE_PLATFORM_MACOS_MVK ) + + //=== VK_MVK_macos_surface === + + VkResult vkCreateMacOSSurfaceMVK( VkInstance instance, + const VkMacOSSurfaceCreateInfoMVK * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateMacOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + + VkResult vkSetDebugUtilsObjectNameEXT( VkDevice device, + const VkDebugUtilsObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetDebugUtilsObjectNameEXT( device, pNameInfo ); + } + + VkResult vkSetDebugUtilsObjectTagEXT( VkDevice device, + const VkDebugUtilsObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetDebugUtilsObjectTagEXT( device, pTagInfo ); + } + + void vkQueueBeginDebugUtilsLabelEXT( VkQueue queue, + const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueBeginDebugUtilsLabelEXT( queue, pLabelInfo ); + } + + void vkQueueEndDebugUtilsLabelEXT( VkQueue queue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueEndDebugUtilsLabelEXT( queue ); + } + + void vkQueueInsertDebugUtilsLabelEXT( VkQueue queue, + const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueInsertDebugUtilsLabelEXT( queue, pLabelInfo ); + } + + void vkCmdBeginDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, + const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginDebugUtilsLabelEXT( commandBuffer, pLabelInfo ); + } + + void vkCmdEndDebugUtilsLabelEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndDebugUtilsLabelEXT( commandBuffer ); + } + + void vkCmdInsertDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, + const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdInsertDebugUtilsLabelEXT( commandBuffer, pLabelInfo ); + } + + VkResult vkCreateDebugUtilsMessengerEXT( VkInstance instance, + const VkDebugUtilsMessengerCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDebugUtilsMessengerEXT * pMessenger ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDebugUtilsMessengerEXT( instance, pCreateInfo, pAllocator, pMessenger ); + } + + void vkDestroyDebugUtilsMessengerEXT( VkInstance instance, + VkDebugUtilsMessengerEXT messenger, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDebugUtilsMessengerEXT( instance, messenger, pAllocator ); + } + + void vkSubmitDebugUtilsMessageEXT( VkInstance instance, + VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VkDebugUtilsMessageTypeFlagsEXT messageTypes, + const VkDebugUtilsMessengerCallbackDataEXT * pCallbackData ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkSubmitDebugUtilsMessageEXT( instance, messageSeverity, messageTypes, pCallbackData ); + } + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + + //=== VK_ANDROID_external_memory_android_hardware_buffer === + + VkResult vkGetAndroidHardwareBufferPropertiesANDROID( VkDevice device, + const struct AHardwareBuffer * buffer, + VkAndroidHardwareBufferPropertiesANDROID * pProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetAndroidHardwareBufferPropertiesANDROID( device, buffer, pProperties ); + } + + VkResult vkGetMemoryAndroidHardwareBufferANDROID( VkDevice device, + const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo, + struct AHardwareBuffer ** pBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryAndroidHardwareBufferANDROID( device, pInfo, pBuffer ); + } +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + //=== VK_EXT_sample_locations === + + void vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, + const VkSampleLocationsInfoEXT * pSampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetSampleLocationsEXT( commandBuffer, pSampleLocationsInfo ); + } + + void vkGetPhysicalDeviceMultisamplePropertiesEXT( VkPhysicalDevice physicalDevice, + VkSampleCountFlagBits samples, + VkMultisamplePropertiesEXT * pMultisampleProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceMultisamplePropertiesEXT( physicalDevice, samples, pMultisampleProperties ); + } + + //=== VK_KHR_get_memory_requirements2 === + + void vkGetImageMemoryRequirements2KHR( VkDevice device, + const VkImageMemoryRequirementsInfo2 * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageMemoryRequirements2KHR( device, pInfo, pMemoryRequirements ); + } + + void vkGetBufferMemoryRequirements2KHR( VkDevice device, + const VkBufferMemoryRequirementsInfo2 * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferMemoryRequirements2KHR( device, pInfo, pMemoryRequirements ); + } + + void vkGetImageSparseMemoryRequirements2KHR( VkDevice device, + const VkImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSparseMemoryRequirements2KHR( + device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + } + + //=== VK_KHR_acceleration_structure === + + VkResult + vkCreateAccelerationStructureKHR( VkDevice device, + const VkAccelerationStructureCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkAccelerationStructureKHR * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateAccelerationStructureKHR( device, pCreateInfo, pAllocator, pAccelerationStructure ); + } + + void vkDestroyAccelerationStructureKHR( VkDevice device, + VkAccelerationStructureKHR accelerationStructure, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyAccelerationStructureKHR( device, accelerationStructure, pAllocator ); + } + + void vkCmdBuildAccelerationStructuresKHR( + VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, + const VkAccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildAccelerationStructuresKHR( commandBuffer, infoCount, pInfos, ppBuildRangeInfos ); + } + + void vkCmdBuildAccelerationStructuresIndirectKHR( VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, + const VkDeviceAddress * pIndirectDeviceAddresses, + const uint32_t * pIndirectStrides, + const uint32_t * const * ppMaxPrimitiveCounts ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildAccelerationStructuresIndirectKHR( + commandBuffer, infoCount, pInfos, pIndirectDeviceAddresses, pIndirectStrides, ppMaxPrimitiveCounts ); + } + + VkResult vkBuildAccelerationStructuresKHR( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, + const VkAccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBuildAccelerationStructuresKHR( device, deferredOperation, infoCount, pInfos, ppBuildRangeInfos ); + } + + VkResult + vkCopyAccelerationStructureKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyAccelerationStructureKHR( device, deferredOperation, pInfo ); + } + + VkResult vkCopyAccelerationStructureToMemoryKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCopyAccelerationStructureToMemoryKHR( device, deferredOperation, pInfo ); + } + + VkResult vkCopyMemoryToAccelerationStructureKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCopyMemoryToAccelerationStructureKHR( device, deferredOperation, pInfo ); + } + + VkResult vkWriteAccelerationStructuresPropertiesKHR( VkDevice device, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR * pAccelerationStructures, + VkQueryType queryType, + size_t dataSize, + void * pData, + size_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWriteAccelerationStructuresPropertiesKHR( + device, accelerationStructureCount, pAccelerationStructures, queryType, dataSize, pData, stride ); + } + + void vkCmdCopyAccelerationStructureKHR( VkCommandBuffer commandBuffer, + const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyAccelerationStructureKHR( commandBuffer, pInfo ); + } + + void vkCmdCopyAccelerationStructureToMemoryKHR( VkCommandBuffer commandBuffer, + const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyAccelerationStructureToMemoryKHR( commandBuffer, pInfo ); + } + + void vkCmdCopyMemoryToAccelerationStructureKHR( VkCommandBuffer commandBuffer, + const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyMemoryToAccelerationStructureKHR( commandBuffer, pInfo ); + } + + VkDeviceAddress vkGetAccelerationStructureDeviceAddressKHR( + VkDevice device, const VkAccelerationStructureDeviceAddressInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureDeviceAddressKHR( device, pInfo ); + } + + void vkCmdWriteAccelerationStructuresPropertiesKHR( VkCommandBuffer commandBuffer, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR * pAccelerationStructures, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteAccelerationStructuresPropertiesKHR( + commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); + } + + void vkGetDeviceAccelerationStructureCompatibilityKHR( + VkDevice device, + const VkAccelerationStructureVersionInfoKHR * pVersionInfo, + VkAccelerationStructureCompatibilityKHR * pCompatibility ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceAccelerationStructureCompatibilityKHR( device, pVersionInfo, pCompatibility ); + } + + void vkGetAccelerationStructureBuildSizesKHR( VkDevice device, + VkAccelerationStructureBuildTypeKHR buildType, + const VkAccelerationStructureBuildGeometryInfoKHR * pBuildInfo, + const uint32_t * pMaxPrimitiveCounts, + VkAccelerationStructureBuildSizesInfoKHR * pSizeInfo ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureBuildSizesKHR( device, buildType, pBuildInfo, pMaxPrimitiveCounts, pSizeInfo ); + } + + //=== VK_KHR_sampler_ycbcr_conversion === + + VkResult vkCreateSamplerYcbcrConversionKHR( VkDevice device, + const VkSamplerYcbcrConversionCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSamplerYcbcrConversionKHR( device, pCreateInfo, pAllocator, pYcbcrConversion ); + } + + void vkDestroySamplerYcbcrConversionKHR( VkDevice device, + VkSamplerYcbcrConversion ycbcrConversion, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySamplerYcbcrConversionKHR( device, ycbcrConversion, pAllocator ); + } + + //=== VK_KHR_bind_memory2 === + + VkResult vkBindBufferMemory2KHR( VkDevice device, + uint32_t bindInfoCount, + const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindBufferMemory2KHR( device, bindInfoCount, pBindInfos ); + } + + VkResult vkBindImageMemory2KHR( VkDevice device, + uint32_t bindInfoCount, + const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindImageMemory2KHR( device, bindInfoCount, pBindInfos ); + } + + //=== VK_EXT_image_drm_format_modifier === + + VkResult vkGetImageDrmFormatModifierPropertiesEXT( + VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageDrmFormatModifierPropertiesEXT( device, image, pProperties ); + } + + //=== VK_EXT_validation_cache === + + VkResult vkCreateValidationCacheEXT( VkDevice device, + const VkValidationCacheCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkValidationCacheEXT * pValidationCache ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateValidationCacheEXT( device, pCreateInfo, pAllocator, pValidationCache ); + } + + void vkDestroyValidationCacheEXT( VkDevice device, + VkValidationCacheEXT validationCache, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyValidationCacheEXT( device, validationCache, pAllocator ); + } + + VkResult vkMergeValidationCachesEXT( VkDevice device, + VkValidationCacheEXT dstCache, + uint32_t srcCacheCount, + const VkValidationCacheEXT * pSrcCaches ) const VULKAN_HPP_NOEXCEPT + { + return ::vkMergeValidationCachesEXT( device, dstCache, srcCacheCount, pSrcCaches ); + } + + VkResult vkGetValidationCacheDataEXT( VkDevice device, + VkValidationCacheEXT validationCache, + size_t * pDataSize, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetValidationCacheDataEXT( device, validationCache, pDataSize, pData ); + } + + //=== VK_NV_shading_rate_image === + + void vkCmdBindShadingRateImageNV( VkCommandBuffer commandBuffer, + VkImageView imageView, + VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindShadingRateImageNV( commandBuffer, imageView, imageLayout ); + } + + void vkCmdSetViewportShadingRatePaletteNV( VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkShadingRatePaletteNV * pShadingRatePalettes ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportShadingRatePaletteNV( + commandBuffer, firstViewport, viewportCount, pShadingRatePalettes ); + } + + void + vkCmdSetCoarseSampleOrderNV( VkCommandBuffer commandBuffer, + VkCoarseSampleOrderTypeNV sampleOrderType, + uint32_t customSampleOrderCount, + const VkCoarseSampleOrderCustomNV * pCustomSampleOrders ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoarseSampleOrderNV( + commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders ); + } + + //=== VK_NV_ray_tracing === + + VkResult + vkCreateAccelerationStructureNV( VkDevice device, + const VkAccelerationStructureCreateInfoNV * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkAccelerationStructureNV * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateAccelerationStructureNV( device, pCreateInfo, pAllocator, pAccelerationStructure ); + } + + void vkDestroyAccelerationStructureNV( VkDevice device, + VkAccelerationStructureNV accelerationStructure, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyAccelerationStructureNV( device, accelerationStructure, pAllocator ); + } + + void vkGetAccelerationStructureMemoryRequirementsNV( VkDevice device, + const VkAccelerationStructureMemoryRequirementsInfoNV * pInfo, + VkMemoryRequirements2KHR * pMemoryRequirements ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureMemoryRequirementsNV( device, pInfo, pMemoryRequirements ); + } + + VkResult vkBindAccelerationStructureMemoryNV( VkDevice device, + uint32_t bindInfoCount, + const VkBindAccelerationStructureMemoryInfoNV * pBindInfos ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkBindAccelerationStructureMemoryNV( device, bindInfoCount, pBindInfos ); + } + + void vkCmdBuildAccelerationStructureNV( VkCommandBuffer commandBuffer, + const VkAccelerationStructureInfoNV * pInfo, + VkBuffer instanceData, + VkDeviceSize instanceOffset, + VkBool32 update, + VkAccelerationStructureNV dst, + VkAccelerationStructureNV src, + VkBuffer scratch, + VkDeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildAccelerationStructureNV( + commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset ); + } + + void vkCmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer, + VkAccelerationStructureNV dst, + VkAccelerationStructureNV src, + VkCopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyAccelerationStructureNV( commandBuffer, dst, src, mode ); + } + + void vkCmdTraceRaysNV( VkCommandBuffer commandBuffer, + VkBuffer raygenShaderBindingTableBuffer, + VkDeviceSize raygenShaderBindingOffset, + VkBuffer missShaderBindingTableBuffer, + VkDeviceSize missShaderBindingOffset, + VkDeviceSize missShaderBindingStride, + VkBuffer hitShaderBindingTableBuffer, + VkDeviceSize hitShaderBindingOffset, + VkDeviceSize hitShaderBindingStride, + VkBuffer callableShaderBindingTableBuffer, + VkDeviceSize callableShaderBindingOffset, + VkDeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdTraceRaysNV( commandBuffer, + raygenShaderBindingTableBuffer, + raygenShaderBindingOffset, + missShaderBindingTableBuffer, + missShaderBindingOffset, + missShaderBindingStride, + hitShaderBindingTableBuffer, + hitShaderBindingOffset, + hitShaderBindingStride, + callableShaderBindingTableBuffer, + callableShaderBindingOffset, + callableShaderBindingStride, + width, + height, + depth ); + } + + VkResult vkCreateRayTracingPipelinesNV( VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkRayTracingPipelineCreateInfoNV * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRayTracingPipelinesNV( + device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } + + VkResult vkGetRayTracingShaderGroupHandlesNV( VkDevice device, + VkPipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingShaderGroupHandlesNV( device, pipeline, firstGroup, groupCount, dataSize, pData ); + } + + VkResult vkGetAccelerationStructureHandleNV( VkDevice device, + VkAccelerationStructureNV accelerationStructure, + size_t dataSize, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureHandleNV( device, accelerationStructure, dataSize, pData ); + } + + void vkCmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer commandBuffer, + uint32_t accelerationStructureCount, + const VkAccelerationStructureNV * pAccelerationStructures, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteAccelerationStructuresPropertiesNV( + commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); + } + + VkResult vkCompileDeferredNV( VkDevice device, VkPipeline pipeline, uint32_t shader ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCompileDeferredNV( device, pipeline, shader ); + } + + //=== VK_KHR_maintenance3 === + + void vkGetDescriptorSetLayoutSupportKHR( VkDevice device, + const VkDescriptorSetLayoutCreateInfo * pCreateInfo, + VkDescriptorSetLayoutSupport * pSupport ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorSetLayoutSupportKHR( device, pCreateInfo, pSupport ); + } + + //=== VK_KHR_draw_indirect_count === + + void vkCmdDrawIndirectCountKHR( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndirectCountKHR( + commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + void vkCmdDrawIndexedIndirectCountKHR( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndexedIndirectCountKHR( + commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + //=== VK_EXT_external_memory_host === + + VkResult vkGetMemoryHostPointerPropertiesEXT( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + VkMemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryHostPointerPropertiesEXT( device, handleType, pHostPointer, pMemoryHostPointerProperties ); + } + + //=== VK_AMD_buffer_marker === + + void vkCmdWriteBufferMarkerAMD( VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + uint32_t marker ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteBufferMarkerAMD( commandBuffer, pipelineStage, dstBuffer, dstOffset, marker ); + } + + //=== VK_EXT_calibrated_timestamps === + + VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( VkPhysicalDevice physicalDevice, + uint32_t * pTimeDomainCount, + VkTimeDomainEXT * pTimeDomains ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( physicalDevice, pTimeDomainCount, pTimeDomains ); + } + + VkResult vkGetCalibratedTimestampsEXT( VkDevice device, + uint32_t timestampCount, + const VkCalibratedTimestampInfoEXT * pTimestampInfos, + uint64_t * pTimestamps, + uint64_t * pMaxDeviation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetCalibratedTimestampsEXT( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation ); + } + + //=== VK_NV_mesh_shader === + + void vkCmdDrawMeshTasksNV( VkCommandBuffer commandBuffer, + uint32_t taskCount, + uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksNV( commandBuffer, taskCount, firstTask ); + } + + void vkCmdDrawMeshTasksIndirectNV( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksIndirectNV( commandBuffer, buffer, offset, drawCount, stride ); + } + + void vkCmdDrawMeshTasksIndirectCountNV( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksIndirectCountNV( + commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + //=== VK_NV_scissor_exclusive === + + void vkCmdSetExclusiveScissorNV( VkCommandBuffer commandBuffer, + uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VkRect2D * pExclusiveScissors ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetExclusiveScissorNV( + commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors ); + } + + //=== VK_NV_device_diagnostic_checkpoints === + + void vkCmdSetCheckpointNV( VkCommandBuffer commandBuffer, const void * pCheckpointMarker ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCheckpointNV( commandBuffer, pCheckpointMarker ); + } + + void vkGetQueueCheckpointDataNV( VkQueue queue, + uint32_t * pCheckpointDataCount, + VkCheckpointDataNV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetQueueCheckpointDataNV( queue, pCheckpointDataCount, pCheckpointData ); + } + + //=== VK_KHR_timeline_semaphore === + + VkResult vkGetSemaphoreCounterValueKHR( VkDevice device, + VkSemaphore semaphore, + uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreCounterValueKHR( device, semaphore, pValue ); + } + + VkResult vkWaitSemaphoresKHR( VkDevice device, + const VkSemaphoreWaitInfo * pWaitInfo, + uint64_t timeout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWaitSemaphoresKHR( device, pWaitInfo, timeout ); + } + + VkResult vkSignalSemaphoreKHR( VkDevice device, + const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSignalSemaphoreKHR( device, pSignalInfo ); + } + + //=== VK_INTEL_performance_query === + + VkResult vkInitializePerformanceApiINTEL( + VkDevice device, const VkInitializePerformanceApiInfoINTEL * pInitializeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkInitializePerformanceApiINTEL( device, pInitializeInfo ); + } + + void vkUninitializePerformanceApiINTEL( VkDevice device ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUninitializePerformanceApiINTEL( device ); + } + + VkResult + vkCmdSetPerformanceMarkerINTEL( VkCommandBuffer commandBuffer, + const VkPerformanceMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPerformanceMarkerINTEL( commandBuffer, pMarkerInfo ); + } + + VkResult vkCmdSetPerformanceStreamMarkerINTEL( + VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPerformanceStreamMarkerINTEL( commandBuffer, pMarkerInfo ); + } + + VkResult + vkCmdSetPerformanceOverrideINTEL( VkCommandBuffer commandBuffer, + const VkPerformanceOverrideInfoINTEL * pOverrideInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPerformanceOverrideINTEL( commandBuffer, pOverrideInfo ); + } + + VkResult vkAcquirePerformanceConfigurationINTEL( VkDevice device, + const VkPerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, + VkPerformanceConfigurationINTEL * pConfiguration ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkAcquirePerformanceConfigurationINTEL( device, pAcquireInfo, pConfiguration ); + } + + VkResult + vkReleasePerformanceConfigurationINTEL( VkDevice device, + VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleasePerformanceConfigurationINTEL( device, configuration ); + } + + VkResult + vkQueueSetPerformanceConfigurationINTEL( VkQueue queue, + VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueSetPerformanceConfigurationINTEL( queue, configuration ); + } + + VkResult vkGetPerformanceParameterINTEL( VkDevice device, + VkPerformanceParameterTypeINTEL parameter, + VkPerformanceValueINTEL * pValue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPerformanceParameterINTEL( device, parameter, pValue ); + } + + //=== VK_AMD_display_native_hdr === + + void vkSetLocalDimmingAMD( VkDevice device, + VkSwapchainKHR swapChain, + VkBool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetLocalDimmingAMD( device, swapChain, localDimmingEnable ); + } + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + + //=== VK_FUCHSIA_imagepipe_surface === + + VkResult vkCreateImagePipeSurfaceFUCHSIA( VkInstance instance, + const VkImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateImagePipeSurfaceFUCHSIA( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + + //=== VK_EXT_metal_surface === + + VkResult vkCreateMetalSurfaceEXT( VkInstance instance, + const VkMetalSurfaceCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateMetalSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_fragment_shading_rate === + + VkResult vkGetPhysicalDeviceFragmentShadingRatesKHR( + VkPhysicalDevice physicalDevice, + uint32_t * pFragmentShadingRateCount, + VkPhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFragmentShadingRatesKHR( + physicalDevice, pFragmentShadingRateCount, pFragmentShadingRates ); + } + + void vkCmdSetFragmentShadingRateKHR( VkCommandBuffer commandBuffer, + const VkExtent2D * pFragmentSize, + const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetFragmentShadingRateKHR( commandBuffer, pFragmentSize, combinerOps ); + } + + //=== VK_EXT_buffer_device_address === + + VkDeviceAddress vkGetBufferDeviceAddressEXT( VkDevice device, + const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferDeviceAddressEXT( device, pInfo ); + } + + //=== VK_EXT_tooling_info === + + VkResult vkGetPhysicalDeviceToolPropertiesEXT( VkPhysicalDevice physicalDevice, + uint32_t * pToolCount, + VkPhysicalDeviceToolPropertiesEXT * pToolProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceToolPropertiesEXT( physicalDevice, pToolCount, pToolProperties ); + } + + //=== VK_NV_cooperative_matrix === + + VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkCooperativeMatrixPropertiesNV * pProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( physicalDevice, pPropertyCount, pProperties ); + } + + //=== VK_NV_coverage_reduction_mode === + + VkResult vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + VkPhysicalDevice physicalDevice, + uint32_t * pCombinationCount, + VkFramebufferMixedSamplesCombinationNV * pCombinations ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + physicalDevice, pCombinationCount, pCombinations ); + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + + //=== VK_EXT_full_screen_exclusive === + + VkResult vkGetPhysicalDeviceSurfacePresentModes2EXT( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pPresentModeCount, + VkPresentModeKHR * pPresentModes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfacePresentModes2EXT( + physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes ); + } + + VkResult vkAcquireFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireFullScreenExclusiveModeEXT( device, swapchain ); + } + + VkResult vkReleaseFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseFullScreenExclusiveModeEXT( device, swapchain ); + } + + VkResult + vkGetDeviceGroupSurfacePresentModes2EXT( VkDevice device, + const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupSurfacePresentModes2EXT( device, pSurfaceInfo, pModes ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + + VkResult vkCreateHeadlessSurfaceEXT( VkInstance instance, + const VkHeadlessSurfaceCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateHeadlessSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); + } + + //=== VK_KHR_buffer_device_address === + + VkDeviceAddress vkGetBufferDeviceAddressKHR( VkDevice device, + const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferDeviceAddressKHR( device, pInfo ); + } + + uint64_t vkGetBufferOpaqueCaptureAddressKHR( VkDevice device, + const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferOpaqueCaptureAddressKHR( device, pInfo ); + } + + uint64_t vkGetDeviceMemoryOpaqueCaptureAddressKHR( + VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceMemoryOpaqueCaptureAddressKHR( device, pInfo ); + } + + //=== VK_EXT_line_rasterization === + + void vkCmdSetLineStippleEXT( VkCommandBuffer commandBuffer, + uint32_t lineStippleFactor, + uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLineStippleEXT( commandBuffer, lineStippleFactor, lineStipplePattern ); + } + + //=== VK_EXT_host_query_reset === + + void vkResetQueryPoolEXT( VkDevice device, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetQueryPoolEXT( device, queryPool, firstQuery, queryCount ); + } + + //=== VK_EXT_extended_dynamic_state === + + void vkCmdSetCullModeEXT( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCullModeEXT( commandBuffer, cullMode ); + } + + void vkCmdSetFrontFaceEXT( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetFrontFaceEXT( commandBuffer, frontFace ); + } + + void vkCmdSetPrimitiveTopologyEXT( VkCommandBuffer commandBuffer, + VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPrimitiveTopologyEXT( commandBuffer, primitiveTopology ); + } + + void vkCmdSetViewportWithCountEXT( VkCommandBuffer commandBuffer, + uint32_t viewportCount, + const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportWithCountEXT( commandBuffer, viewportCount, pViewports ); + } + + void vkCmdSetScissorWithCountEXT( VkCommandBuffer commandBuffer, + uint32_t scissorCount, + const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetScissorWithCountEXT( commandBuffer, scissorCount, pScissors ); + } + + void vkCmdBindVertexBuffers2EXT( VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer * pBuffers, + const VkDeviceSize * pOffsets, + const VkDeviceSize * pSizes, + const VkDeviceSize * pStrides ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindVertexBuffers2EXT( + commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides ); + } + + void vkCmdSetDepthTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthTestEnableEXT( commandBuffer, depthTestEnable ); + } + + void vkCmdSetDepthWriteEnableEXT( VkCommandBuffer commandBuffer, + VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthWriteEnableEXT( commandBuffer, depthWriteEnable ); + } + + void vkCmdSetDepthCompareOpEXT( VkCommandBuffer commandBuffer, + VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthCompareOpEXT( commandBuffer, depthCompareOp ); + } + + void vkCmdSetDepthBoundsTestEnableEXT( VkCommandBuffer commandBuffer, + VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBoundsTestEnableEXT( commandBuffer, depthBoundsTestEnable ); + } + + void vkCmdSetStencilTestEnableEXT( VkCommandBuffer commandBuffer, + VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilTestEnableEXT( commandBuffer, stencilTestEnable ); + } + + void vkCmdSetStencilOpEXT( VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + VkStencilOp failOp, + VkStencilOp passOp, + VkStencilOp depthFailOp, + VkCompareOp compareOp ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilOpEXT( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp ); + } + + //=== VK_KHR_deferred_host_operations === + + VkResult vkCreateDeferredOperationKHR( VkDevice device, + const VkAllocationCallbacks * pAllocator, + VkDeferredOperationKHR * pDeferredOperation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDeferredOperationKHR( device, pAllocator, pDeferredOperation ); + } + + void vkDestroyDeferredOperationKHR( VkDevice device, + VkDeferredOperationKHR operation, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDeferredOperationKHR( device, operation, pAllocator ); + } + + uint32_t vkGetDeferredOperationMaxConcurrencyKHR( VkDevice device, + VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeferredOperationMaxConcurrencyKHR( device, operation ); + } + + VkResult vkGetDeferredOperationResultKHR( VkDevice device, + VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeferredOperationResultKHR( device, operation ); + } + + VkResult vkDeferredOperationJoinKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDeferredOperationJoinKHR( device, operation ); + } + + //=== VK_KHR_pipeline_executable_properties === + + VkResult + vkGetPipelineExecutablePropertiesKHR( VkDevice device, + const VkPipelineInfoKHR * pPipelineInfo, + uint32_t * pExecutableCount, + VkPipelineExecutablePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineExecutablePropertiesKHR( device, pPipelineInfo, pExecutableCount, pProperties ); + } + + VkResult + vkGetPipelineExecutableStatisticsKHR( VkDevice device, + const VkPipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pStatisticCount, + VkPipelineExecutableStatisticKHR * pStatistics ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineExecutableStatisticsKHR( device, pExecutableInfo, pStatisticCount, pStatistics ); + } + + VkResult vkGetPipelineExecutableInternalRepresentationsKHR( + VkDevice device, + const VkPipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pInternalRepresentationCount, + VkPipelineExecutableInternalRepresentationKHR * pInternalRepresentations ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineExecutableInternalRepresentationsKHR( + device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations ); + } + + //=== VK_NV_device_generated_commands === + + void vkGetGeneratedCommandsMemoryRequirementsNV( VkDevice device, + const VkGeneratedCommandsMemoryRequirementsInfoNV * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetGeneratedCommandsMemoryRequirementsNV( device, pInfo, pMemoryRequirements ); + } + + void vkCmdPreprocessGeneratedCommandsNV( VkCommandBuffer commandBuffer, + const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPreprocessGeneratedCommandsNV( commandBuffer, pGeneratedCommandsInfo ); + } + + void vkCmdExecuteGeneratedCommandsNV( VkCommandBuffer commandBuffer, + VkBool32 isPreprocessed, + const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdExecuteGeneratedCommandsNV( commandBuffer, isPreprocessed, pGeneratedCommandsInfo ); + } + + void vkCmdBindPipelineShaderGroupNV( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline, + uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindPipelineShaderGroupNV( commandBuffer, pipelineBindPoint, pipeline, groupIndex ); + } + + VkResult + vkCreateIndirectCommandsLayoutNV( VkDevice device, + const VkIndirectCommandsLayoutCreateInfoNV * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkIndirectCommandsLayoutNV * pIndirectCommandsLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateIndirectCommandsLayoutNV( device, pCreateInfo, pAllocator, pIndirectCommandsLayout ); + } + + void vkDestroyIndirectCommandsLayoutNV( VkDevice device, + VkIndirectCommandsLayoutNV indirectCommandsLayout, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyIndirectCommandsLayoutNV( device, indirectCommandsLayout, pAllocator ); + } + + //=== VK_EXT_private_data === + + VkResult vkCreatePrivateDataSlotEXT( VkDevice device, + const VkPrivateDataSlotCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPrivateDataSlotEXT * pPrivateDataSlot ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreatePrivateDataSlotEXT( device, pCreateInfo, pAllocator, pPrivateDataSlot ); + } + + void vkDestroyPrivateDataSlotEXT( VkDevice device, + VkPrivateDataSlotEXT privateDataSlot, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPrivateDataSlotEXT( device, privateDataSlot, pAllocator ); + } + + VkResult vkSetPrivateDataEXT( VkDevice device, + VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlotEXT privateDataSlot, + uint64_t data ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, data ); + } + + void vkGetPrivateDataEXT( VkDevice device, + VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlotEXT privateDataSlot, + uint64_t * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, pData ); + } + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + + //=== VK_KHR_video_encode_queue === + + void vkCmdEncodeVideoKHR( VkCommandBuffer commandBuffer, + const VkVideoEncodeInfoKHR * pEncodeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEncodeVideoKHR( commandBuffer, pEncodeInfo ); + } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_KHR_synchronization2 === + + void vkCmdSetEvent2KHR( VkCommandBuffer commandBuffer, + VkEvent event, + const VkDependencyInfoKHR * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetEvent2KHR( commandBuffer, event, pDependencyInfo ); + } + + void vkCmdResetEvent2KHR( VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags2KHR stageMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResetEvent2KHR( commandBuffer, event, stageMask ); + } + + void vkCmdWaitEvents2KHR( VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent * pEvents, + const VkDependencyInfoKHR * pDependencyInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWaitEvents2KHR( commandBuffer, eventCount, pEvents, pDependencyInfos ); + } + + void vkCmdPipelineBarrier2KHR( VkCommandBuffer commandBuffer, + const VkDependencyInfoKHR * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPipelineBarrier2KHR( commandBuffer, pDependencyInfo ); + } + + void vkCmdWriteTimestamp2KHR( VkCommandBuffer commandBuffer, + VkPipelineStageFlags2KHR stage, + VkQueryPool queryPool, + uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteTimestamp2KHR( commandBuffer, stage, queryPool, query ); + } + + VkResult vkQueueSubmit2KHR( VkQueue queue, + uint32_t submitCount, + const VkSubmitInfo2KHR * pSubmits, + VkFence fence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueSubmit2KHR( queue, submitCount, pSubmits, fence ); + } + + void vkCmdWriteBufferMarker2AMD( VkCommandBuffer commandBuffer, + VkPipelineStageFlags2KHR stage, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + uint32_t marker ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteBufferMarker2AMD( commandBuffer, stage, dstBuffer, dstOffset, marker ); + } + + void vkGetQueueCheckpointData2NV( VkQueue queue, + uint32_t * pCheckpointDataCount, + VkCheckpointData2NV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetQueueCheckpointData2NV( queue, pCheckpointDataCount, pCheckpointData ); + } + + //=== VK_NV_fragment_shading_rate_enums === + + void vkCmdSetFragmentShadingRateEnumNV( VkCommandBuffer commandBuffer, + VkFragmentShadingRateNV shadingRate, + const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetFragmentShadingRateEnumNV( commandBuffer, shadingRate, combinerOps ); + } + + //=== VK_KHR_copy_commands2 === + + void vkCmdCopyBuffer2KHR( VkCommandBuffer commandBuffer, + const VkCopyBufferInfo2KHR * pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBuffer2KHR( commandBuffer, pCopyBufferInfo ); + } + + void vkCmdCopyImage2KHR( VkCommandBuffer commandBuffer, + const VkCopyImageInfo2KHR * pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImage2KHR( commandBuffer, pCopyImageInfo ); + } + + void + vkCmdCopyBufferToImage2KHR( VkCommandBuffer commandBuffer, + const VkCopyBufferToImageInfo2KHR * pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBufferToImage2KHR( commandBuffer, pCopyBufferToImageInfo ); + } + + void + vkCmdCopyImageToBuffer2KHR( VkCommandBuffer commandBuffer, + const VkCopyImageToBufferInfo2KHR * pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImageToBuffer2KHR( commandBuffer, pCopyImageToBufferInfo ); + } + + void vkCmdBlitImage2KHR( VkCommandBuffer commandBuffer, + const VkBlitImageInfo2KHR * pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBlitImage2KHR( commandBuffer, pBlitImageInfo ); + } + + void vkCmdResolveImage2KHR( VkCommandBuffer commandBuffer, + const VkResolveImageInfo2KHR * pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResolveImage2KHR( commandBuffer, pResolveImageInfo ); + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + + //=== VK_NV_acquire_winrt_display === + + VkResult vkAcquireWinrtDisplayNV( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireWinrtDisplayNV( physicalDevice, display ); + } + + VkResult vkGetWinrtDisplayNV( VkPhysicalDevice physicalDevice, + uint32_t deviceRelativeId, + VkDisplayKHR * pDisplay ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetWinrtDisplayNV( physicalDevice, deviceRelativeId, pDisplay ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + + //=== VK_EXT_directfb_surface === + + VkResult vkCreateDirectFBSurfaceEXT( VkInstance instance, + const VkDirectFBSurfaceCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDirectFBSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); + } + + VkBool32 vkGetPhysicalDeviceDirectFBPresentationSupportEXT( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + IDirectFB * dfb ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDirectFBPresentationSupportEXT( physicalDevice, queueFamilyIndex, dfb ); + } +# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_KHR_ray_tracing_pipeline === + + void vkCmdTraceRaysKHR( VkCommandBuffer commandBuffer, + const VkStridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdTraceRaysKHR( commandBuffer, + pRaygenShaderBindingTable, + pMissShaderBindingTable, + pHitShaderBindingTable, + pCallableShaderBindingTable, + width, + height, + depth ); + } + + VkResult vkCreateRayTracingPipelinesKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkRayTracingPipelineCreateInfoKHR * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRayTracingPipelinesKHR( + device, deferredOperation, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } + + VkResult vkGetRayTracingShaderGroupHandlesKHR( VkDevice device, + VkPipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData ); + } + + VkResult vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( VkDevice device, + VkPipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + device, pipeline, firstGroup, groupCount, dataSize, pData ); + } + + void vkCmdTraceRaysIndirectKHR( VkCommandBuffer commandBuffer, + const VkStridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + VkDeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdTraceRaysIndirectKHR( commandBuffer, + pRaygenShaderBindingTable, + pMissShaderBindingTable, + pHitShaderBindingTable, + pCallableShaderBindingTable, + indirectDeviceAddress ); + } + + VkDeviceSize vkGetRayTracingShaderGroupStackSizeKHR( VkDevice device, + VkPipeline pipeline, + uint32_t group, + VkShaderGroupShaderKHR groupShader ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingShaderGroupStackSizeKHR( device, pipeline, group, groupShader ); + } + + void vkCmdSetRayTracingPipelineStackSizeKHR( VkCommandBuffer commandBuffer, + uint32_t pipelineStackSize ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRayTracingPipelineStackSizeKHR( commandBuffer, pipelineStackSize ); + } + + //=== VK_EXT_vertex_input_dynamic_state === + + void vkCmdSetVertexInputEXT( VkCommandBuffer commandBuffer, + uint32_t vertexBindingDescriptionCount, + const VkVertexInputBindingDescription2EXT * pVertexBindingDescriptions, + uint32_t vertexAttributeDescriptionCount, + const VkVertexInputAttributeDescription2EXT * pVertexAttributeDescriptions ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetVertexInputEXT( commandBuffer, + vertexBindingDescriptionCount, + pVertexBindingDescriptions, + vertexAttributeDescriptionCount, + pVertexAttributeDescriptions ); + } + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + + //=== VK_FUCHSIA_external_memory === + + VkResult vkGetMemoryZirconHandleFUCHSIA( VkDevice device, + const VkMemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryZirconHandleFUCHSIA( device, pGetZirconHandleInfo, pZirconHandle ); + } + + VkResult vkGetMemoryZirconHandlePropertiesFUCHSIA( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + VkMemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryZirconHandlePropertiesFUCHSIA( + device, handleType, zirconHandle, pMemoryZirconHandleProperties ); + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + + //=== VK_FUCHSIA_external_semaphore === + + VkResult vkImportSemaphoreZirconHandleFUCHSIA( + VkDevice device, + const VkImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportSemaphoreZirconHandleFUCHSIA( device, pImportSemaphoreZirconHandleInfo ); + } + + VkResult vkGetSemaphoreZirconHandleFUCHSIA( VkDevice device, + const VkSemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreZirconHandleFUCHSIA( device, pGetZirconHandleInfo, pZirconHandle ); + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_extended_dynamic_state2 === + + void vkCmdSetPatchControlPointsEXT( VkCommandBuffer commandBuffer, + uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPatchControlPointsEXT( commandBuffer, patchControlPoints ); + } + + void vkCmdSetRasterizerDiscardEnableEXT( VkCommandBuffer commandBuffer, + VkBool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRasterizerDiscardEnableEXT( commandBuffer, rasterizerDiscardEnable ); + } + + void vkCmdSetDepthBiasEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBiasEnableEXT( commandBuffer, depthBiasEnable ); + } + + void vkCmdSetLogicOpEXT( VkCommandBuffer commandBuffer, VkLogicOp logicOp ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLogicOpEXT( commandBuffer, logicOp ); + } + + void vkCmdSetPrimitiveRestartEnableEXT( VkCommandBuffer commandBuffer, + VkBool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPrimitiveRestartEnableEXT( commandBuffer, primitiveRestartEnable ); + } + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + + //=== VK_QNX_screen_surface === + + VkResult vkCreateScreenSurfaceQNX( VkInstance instance, + const VkScreenSurfaceCreateInfoQNX * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateScreenSurfaceQNX( instance, pCreateInfo, pAllocator, pSurface ); + } + + VkBool32 vkGetPhysicalDeviceScreenPresentationSupportQNX( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + struct _screen_window * window ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceScreenPresentationSupportQNX( physicalDevice, queueFamilyIndex, window ); + } +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_color_write_enable === + + void vkCmdSetColorWriteEnableEXT( VkCommandBuffer commandBuffer, + uint32_t attachmentCount, + const VkBool32 * pColorWriteEnables ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetColorWriteEnableEXT( commandBuffer, attachmentCount, pColorWriteEnables ); + } + }; +#endif + + class DispatchLoaderDynamic; +#if !defined( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC ) +# if defined( VK_NO_PROTOTYPES ) +# define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 1 +# else +# define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 0 +# endif +#endif + +#if !defined( VULKAN_HPP_STORAGE_API ) +# if defined( VULKAN_HPP_STORAGE_SHARED ) +# if defined( _MSC_VER ) +# if defined( VULKAN_HPP_STORAGE_SHARED_EXPORT ) +# define VULKAN_HPP_STORAGE_API __declspec( dllexport ) +# else +# define VULKAN_HPP_STORAGE_API __declspec( dllimport ) +# endif +# elif defined( __clang__ ) || defined( __GNUC__ ) +# if defined( VULKAN_HPP_STORAGE_SHARED_EXPORT ) +# define VULKAN_HPP_STORAGE_API __attribute__( ( visibility( "default" ) ) ) +# else +# define VULKAN_HPP_STORAGE_API +# endif +# else +# define VULKAN_HPP_STORAGE_API +# pragma warning Unknown import / export semantics +# endif +# else +# define VULKAN_HPP_STORAGE_API +# endif +#endif + +#if !defined( VULKAN_HPP_DEFAULT_DISPATCHER ) +# if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 +# define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::defaultDispatchLoaderDynamic +# define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE \ + namespace VULKAN_HPP_NAMESPACE \ + { \ + VULKAN_HPP_STORAGE_API DispatchLoaderDynamic defaultDispatchLoaderDynamic; \ + } + extern VULKAN_HPP_STORAGE_API DispatchLoaderDynamic defaultDispatchLoaderDynamic; +# else +# define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic() +# define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE +# endif +#endif + +#if !defined( VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ) +# if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 +# define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderDynamic +# else +# define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic +# endif +#endif + +#if defined( VULKAN_HPP_NO_DEFAULT_DISPATCHER ) +# define VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT +# define VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT +# define VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT +#else +# define VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT = {} +# define VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT = nullptr +# define VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT = VULKAN_HPP_DEFAULT_DISPATCHER +#endif + + struct AllocationCallbacks; + + template + class ObjectDestroy + { + public: + ObjectDestroy() = default; + + ObjectDestroy( OwnerType owner, + Optional allocationCallbacks + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_allocationCallbacks( allocationCallbacks ) + , m_dispatch( &dispatch ) + {} + + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + { + return m_owner; + } + Optional getAllocator() const VULKAN_HPP_NOEXCEPT + { + return m_allocationCallbacks; + } + + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_owner && m_dispatch ); + m_owner.destroy( t, m_allocationCallbacks, *m_dispatch ); + } + + private: + OwnerType m_owner = {}; + Optional m_allocationCallbacks = nullptr; + Dispatch const * m_dispatch = nullptr; + }; + + class NoParent; + + template + class ObjectDestroy + { + public: + ObjectDestroy() = default; + + ObjectDestroy( Optional allocationCallbacks, + Dispatch const & dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT + : m_allocationCallbacks( allocationCallbacks ) + , m_dispatch( &dispatch ) + {} + + Optional getAllocator() const VULKAN_HPP_NOEXCEPT + { + return m_allocationCallbacks; + } + + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_dispatch ); + t.destroy( m_allocationCallbacks, *m_dispatch ); + } + + private: + Optional m_allocationCallbacks = nullptr; + Dispatch const * m_dispatch = nullptr; + }; + + template + class ObjectFree + { + public: + ObjectFree() = default; + + ObjectFree( OwnerType owner, + Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_allocationCallbacks( allocationCallbacks ) + , m_dispatch( &dispatch ) + {} + + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + { + return m_owner; + } + + Optional getAllocator() const VULKAN_HPP_NOEXCEPT + { + return m_allocationCallbacks; + } + + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_owner && m_dispatch ); + m_owner.free( t, m_allocationCallbacks, *m_dispatch ); + } + + private: + OwnerType m_owner = {}; + Optional m_allocationCallbacks = nullptr; + Dispatch const * m_dispatch = nullptr; + }; + + template + class ObjectRelease + { + public: + ObjectRelease() = default; + + ObjectRelease( OwnerType owner, Dispatch const & dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_dispatch( &dispatch ) + {} + + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + { + return m_owner; + } + + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_owner && m_dispatch ); + m_owner.release( t, *m_dispatch ); + } + + private: + OwnerType m_owner = {}; + Dispatch const * m_dispatch = nullptr; + }; + + template + class PoolFree + { + public: + PoolFree() = default; + + PoolFree( OwnerType owner, + PoolType pool, + Dispatch const & dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_pool( pool ) + , m_dispatch( &dispatch ) + {} + + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + { + return m_owner; + } + PoolType getPool() const VULKAN_HPP_NOEXCEPT + { + return m_pool; + } + + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + m_owner.free( m_pool, t, *m_dispatch ); + } + + private: + OwnerType m_owner = OwnerType(); + PoolType m_pool = PoolType(); + Dispatch const * m_dispatch = nullptr; + }; + + using Bool32 = uint32_t; + using DeviceAddress = uint64_t; + using DeviceSize = uint64_t; + using SampleMask = uint32_t; + + template + struct CppType + {}; + + template + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = false; + }; + + VULKAN_HPP_INLINE std::string toHexString( uint32_t value ) + { + std::stringstream stream; + stream << std::hex << value; + return stream.str(); + } + + //============= + //=== ENUMs === + //============= + + //=== VK_VERSION_1_0 === + + enum class Result + { + eSuccess = VK_SUCCESS, + eNotReady = VK_NOT_READY, + eTimeout = VK_TIMEOUT, + eEventSet = VK_EVENT_SET, + eEventReset = VK_EVENT_RESET, + eIncomplete = VK_INCOMPLETE, + eErrorOutOfHostMemory = VK_ERROR_OUT_OF_HOST_MEMORY, + eErrorOutOfDeviceMemory = VK_ERROR_OUT_OF_DEVICE_MEMORY, + eErrorInitializationFailed = VK_ERROR_INITIALIZATION_FAILED, + eErrorDeviceLost = VK_ERROR_DEVICE_LOST, + eErrorMemoryMapFailed = VK_ERROR_MEMORY_MAP_FAILED, + eErrorLayerNotPresent = VK_ERROR_LAYER_NOT_PRESENT, + eErrorExtensionNotPresent = VK_ERROR_EXTENSION_NOT_PRESENT, + eErrorFeatureNotPresent = VK_ERROR_FEATURE_NOT_PRESENT, + eErrorIncompatibleDriver = VK_ERROR_INCOMPATIBLE_DRIVER, + eErrorTooManyObjects = VK_ERROR_TOO_MANY_OBJECTS, + eErrorFormatNotSupported = VK_ERROR_FORMAT_NOT_SUPPORTED, + eErrorFragmentedPool = VK_ERROR_FRAGMENTED_POOL, + eErrorUnknown = VK_ERROR_UNKNOWN, + eErrorOutOfPoolMemory = VK_ERROR_OUT_OF_POOL_MEMORY, + eErrorInvalidExternalHandle = VK_ERROR_INVALID_EXTERNAL_HANDLE, + eErrorFragmentation = VK_ERROR_FRAGMENTATION, + eErrorInvalidOpaqueCaptureAddress = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS, + eErrorSurfaceLostKHR = VK_ERROR_SURFACE_LOST_KHR, + eErrorNativeWindowInUseKHR = VK_ERROR_NATIVE_WINDOW_IN_USE_KHR, + eSuboptimalKHR = VK_SUBOPTIMAL_KHR, + eErrorOutOfDateKHR = VK_ERROR_OUT_OF_DATE_KHR, + eErrorIncompatibleDisplayKHR = VK_ERROR_INCOMPATIBLE_DISPLAY_KHR, + eErrorValidationFailedEXT = VK_ERROR_VALIDATION_FAILED_EXT, + eErrorInvalidShaderNV = VK_ERROR_INVALID_SHADER_NV, + eErrorInvalidDrmFormatModifierPlaneLayoutEXT = VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT, + eErrorNotPermittedEXT = VK_ERROR_NOT_PERMITTED_EXT, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + eErrorFullScreenExclusiveModeLostEXT = VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + eThreadIdleKHR = VK_THREAD_IDLE_KHR, + eThreadDoneKHR = VK_THREAD_DONE_KHR, + eOperationDeferredKHR = VK_OPERATION_DEFERRED_KHR, + eOperationNotDeferredKHR = VK_OPERATION_NOT_DEFERRED_KHR, + ePipelineCompileRequiredEXT = VK_PIPELINE_COMPILE_REQUIRED_EXT, + eErrorFragmentationEXT = VK_ERROR_FRAGMENTATION_EXT, + eErrorInvalidDeviceAddressEXT = VK_ERROR_INVALID_DEVICE_ADDRESS_EXT, + eErrorInvalidExternalHandleKHR = VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR, + eErrorInvalidOpaqueCaptureAddressKHR = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR, + eErrorOutOfPoolMemoryKHR = VK_ERROR_OUT_OF_POOL_MEMORY_KHR, + eErrorPipelineCompileRequiredEXT = VK_ERROR_PIPELINE_COMPILE_REQUIRED_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( Result value ) + { + switch ( value ) + { + case Result::eSuccess: return "Success"; + case Result::eNotReady: return "NotReady"; + case Result::eTimeout: return "Timeout"; + case Result::eEventSet: return "EventSet"; + case Result::eEventReset: return "EventReset"; + case Result::eIncomplete: return "Incomplete"; + case Result::eErrorOutOfHostMemory: return "ErrorOutOfHostMemory"; + case Result::eErrorOutOfDeviceMemory: return "ErrorOutOfDeviceMemory"; + case Result::eErrorInitializationFailed: return "ErrorInitializationFailed"; + case Result::eErrorDeviceLost: return "ErrorDeviceLost"; + case Result::eErrorMemoryMapFailed: return "ErrorMemoryMapFailed"; + case Result::eErrorLayerNotPresent: return "ErrorLayerNotPresent"; + case Result::eErrorExtensionNotPresent: return "ErrorExtensionNotPresent"; + case Result::eErrorFeatureNotPresent: return "ErrorFeatureNotPresent"; + case Result::eErrorIncompatibleDriver: return "ErrorIncompatibleDriver"; + case Result::eErrorTooManyObjects: return "ErrorTooManyObjects"; + case Result::eErrorFormatNotSupported: return "ErrorFormatNotSupported"; + case Result::eErrorFragmentedPool: return "ErrorFragmentedPool"; + case Result::eErrorUnknown: return "ErrorUnknown"; + case Result::eErrorOutOfPoolMemory: return "ErrorOutOfPoolMemory"; + case Result::eErrorInvalidExternalHandle: return "ErrorInvalidExternalHandle"; + case Result::eErrorFragmentation: return "ErrorFragmentation"; + case Result::eErrorInvalidOpaqueCaptureAddress: return "ErrorInvalidOpaqueCaptureAddress"; + case Result::eErrorSurfaceLostKHR: return "ErrorSurfaceLostKHR"; + case Result::eErrorNativeWindowInUseKHR: return "ErrorNativeWindowInUseKHR"; + case Result::eSuboptimalKHR: return "SuboptimalKHR"; + case Result::eErrorOutOfDateKHR: return "ErrorOutOfDateKHR"; + case Result::eErrorIncompatibleDisplayKHR: return "ErrorIncompatibleDisplayKHR"; + case Result::eErrorValidationFailedEXT: return "ErrorValidationFailedEXT"; + case Result::eErrorInvalidShaderNV: return "ErrorInvalidShaderNV"; + case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: return "ErrorInvalidDrmFormatModifierPlaneLayoutEXT"; + case Result::eErrorNotPermittedEXT: return "ErrorNotPermittedEXT"; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + case Result::eErrorFullScreenExclusiveModeLostEXT: return "ErrorFullScreenExclusiveModeLostEXT"; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case Result::eThreadIdleKHR: return "ThreadIdleKHR"; + case Result::eThreadDoneKHR: return "ThreadDoneKHR"; + case Result::eOperationDeferredKHR: return "OperationDeferredKHR"; + case Result::eOperationNotDeferredKHR: return "OperationNotDeferredKHR"; + case Result::ePipelineCompileRequiredEXT: return "PipelineCompileRequiredEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class StructureType + { + eApplicationInfo = VK_STRUCTURE_TYPE_APPLICATION_INFO, + eInstanceCreateInfo = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO, + eDeviceQueueCreateInfo = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, + eDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, + eSubmitInfo = VK_STRUCTURE_TYPE_SUBMIT_INFO, + eMemoryAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, + eMappedMemoryRange = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, + eBindSparseInfo = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, + eFenceCreateInfo = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, + eSemaphoreCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, + eEventCreateInfo = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO, + eQueryPoolCreateInfo = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO, + eBufferCreateInfo = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, + eBufferViewCreateInfo = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO, + eImageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, + eImageViewCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO, + eShaderModuleCreateInfo = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO, + ePipelineCacheCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO, + ePipelineShaderStageCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, + ePipelineVertexInputStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO, + ePipelineInputAssemblyStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, + ePipelineTessellationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, + ePipelineViewportStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO, + ePipelineRasterizationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO, + ePipelineMultisampleStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO, + ePipelineDepthStencilStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO, + ePipelineColorBlendStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO, + ePipelineDynamicStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO, + eGraphicsPipelineCreateInfo = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO, + eComputePipelineCreateInfo = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO, + ePipelineLayoutCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, + eSamplerCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, + eDescriptorSetLayoutCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, + eDescriptorPoolCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO, + eDescriptorSetAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, + eWriteDescriptorSet = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET, + eCopyDescriptorSet = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET, + eFramebufferCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, + eRenderPassCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, + eCommandPoolCreateInfo = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, + eCommandBufferAllocateInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, + eCommandBufferInheritanceInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO, + eCommandBufferBeginInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, + eRenderPassBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, + eBufferMemoryBarrier = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, + eImageMemoryBarrier = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, + eMemoryBarrier = VK_STRUCTURE_TYPE_MEMORY_BARRIER, + eLoaderInstanceCreateInfo = VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO, + eLoaderDeviceCreateInfo = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO, + ePhysicalDeviceSubgroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES, + eBindBufferMemoryInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO, + eBindImageMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO, + ePhysicalDevice16BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES, + eMemoryDedicatedRequirements = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS, + eMemoryDedicatedAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, + eMemoryAllocateFlagsInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO, + eDeviceGroupRenderPassBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO, + eDeviceGroupCommandBufferBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO, + eDeviceGroupSubmitInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO, + eDeviceGroupBindSparseInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO, + eBindBufferMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO, + eBindImageMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO, + ePhysicalDeviceGroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES, + eDeviceGroupDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO, + eBufferMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2, + eImageMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2, + eImageSparseMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2, + eMemoryRequirements2 = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, + eSparseImageMemoryRequirements2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2, + ePhysicalDeviceFeatures2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2, + ePhysicalDeviceProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2, + eFormatProperties2 = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, + eImageFormatProperties2 = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2, + ePhysicalDeviceImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2, + eQueueFamilyProperties2 = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2, + ePhysicalDeviceMemoryProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2, + eSparseImageFormatProperties2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2, + ePhysicalDeviceSparseImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2, + ePhysicalDevicePointClippingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES, + eRenderPassInputAttachmentAspectCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO, + eImageViewUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO, + ePipelineTessellationDomainOriginStateCreateInfo = + VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO, + eRenderPassMultiviewCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO, + ePhysicalDeviceMultiviewFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES, + ePhysicalDeviceMultiviewProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES, + ePhysicalDeviceVariablePointersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, + eProtectedSubmitInfo = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO, + ePhysicalDeviceProtectedMemoryFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES, + ePhysicalDeviceProtectedMemoryProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES, + eDeviceQueueInfo2 = VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2, + eSamplerYcbcrConversionCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO, + eSamplerYcbcrConversionInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO, + eBindImagePlaneMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO, + eImagePlaneMemoryRequirementsInfo = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, + ePhysicalDeviceSamplerYcbcrConversionFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES, + eSamplerYcbcrConversionImageFormatProperties = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES, + eDescriptorUpdateTemplateCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO, + ePhysicalDeviceExternalImageFormatInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO, + eExternalImageFormatProperties = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES, + ePhysicalDeviceExternalBufferInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO, + eExternalBufferProperties = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES, + ePhysicalDeviceIdProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES, + eExternalMemoryBufferCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO, + eExternalMemoryImageCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, + eExportMemoryAllocateInfo = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO, + ePhysicalDeviceExternalFenceInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO, + eExternalFenceProperties = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES, + eExportFenceCreateInfo = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO, + eExportSemaphoreCreateInfo = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO, + ePhysicalDeviceExternalSemaphoreInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO, + eExternalSemaphoreProperties = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES, + ePhysicalDeviceMaintenance3Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES, + eDescriptorSetLayoutSupport = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT, + ePhysicalDeviceShaderDrawParametersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES, + ePhysicalDeviceVulkan11Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES, + ePhysicalDeviceVulkan11Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES, + ePhysicalDeviceVulkan12Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES, + ePhysicalDeviceVulkan12Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES, + eImageFormatListCreateInfo = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO, + eAttachmentDescription2 = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2, + eAttachmentReference2 = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2, + eSubpassDescription2 = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2, + eSubpassDependency2 = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2, + eRenderPassCreateInfo2 = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2, + eSubpassBeginInfo = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO, + eSubpassEndInfo = VK_STRUCTURE_TYPE_SUBPASS_END_INFO, + ePhysicalDevice8BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES, + ePhysicalDeviceDriverProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES, + ePhysicalDeviceShaderAtomicInt64Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES, + ePhysicalDeviceShaderFloat16Int8Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES, + ePhysicalDeviceFloatControlsProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES, + eDescriptorSetLayoutBindingFlagsCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO, + ePhysicalDeviceDescriptorIndexingFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES, + ePhysicalDeviceDescriptorIndexingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES, + eDescriptorSetVariableDescriptorCountAllocateInfo = + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO, + eDescriptorSetVariableDescriptorCountLayoutSupport = + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT, + ePhysicalDeviceDepthStencilResolveProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES, + eSubpassDescriptionDepthStencilResolve = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE, + ePhysicalDeviceScalarBlockLayoutFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES, + eImageStencilUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO, + ePhysicalDeviceSamplerFilterMinmaxProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES, + eSamplerReductionModeCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO, + ePhysicalDeviceVulkanMemoryModelFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES, + ePhysicalDeviceImagelessFramebufferFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES, + eFramebufferAttachmentsCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO, + eFramebufferAttachmentImageInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO, + eRenderPassAttachmentBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO, + ePhysicalDeviceUniformBufferStandardLayoutFeatures = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES, + ePhysicalDeviceShaderSubgroupExtendedTypesFeatures = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES, + ePhysicalDeviceSeparateDepthStencilLayoutsFeatures = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES, + eAttachmentReferenceStencilLayout = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT, + eAttachmentDescriptionStencilLayout = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT, + ePhysicalDeviceHostQueryResetFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES, + ePhysicalDeviceTimelineSemaphoreFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES, + ePhysicalDeviceTimelineSemaphoreProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES, + eSemaphoreTypeCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO, + eTimelineSemaphoreSubmitInfo = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO, + eSemaphoreWaitInfo = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO, + eSemaphoreSignalInfo = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO, + ePhysicalDeviceBufferDeviceAddressFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES, + eBufferDeviceAddressInfo = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO, + eBufferOpaqueCaptureAddressCreateInfo = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO, + eMemoryOpaqueCaptureAddressAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO, + eDeviceMemoryOpaqueCaptureAddressInfo = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO, + eSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR, + ePresentInfoKHR = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR, + eDeviceGroupPresentCapabilitiesKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR, + eImageSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR, + eBindImageMemorySwapchainInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR, + eAcquireNextImageInfoKHR = VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR, + eDeviceGroupPresentInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR, + eDeviceGroupSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR, + eDisplayModeCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR, + eDisplaySurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR, + eDisplayPresentInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR, +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + eXlibSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR, +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ +#if defined( VK_USE_PLATFORM_XCB_KHR ) + eXcbSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR, +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + eWaylandSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR, +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + eAndroidSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR, +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + eWin32SurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + eDebugReportCallbackCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT, + ePipelineRasterizationStateRasterizationOrderAMD = + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD, + eDebugMarkerObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT, + eDebugMarkerObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT, + eDebugMarkerMarkerInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoProfileKHR = VK_STRUCTURE_TYPE_VIDEO_PROFILE_KHR, + eVideoCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_CAPABILITIES_KHR, + eVideoPictureResourceKHR = VK_STRUCTURE_TYPE_VIDEO_PICTURE_RESOURCE_KHR, + eVideoGetMemoryPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_GET_MEMORY_PROPERTIES_KHR, + eVideoBindMemoryKHR = VK_STRUCTURE_TYPE_VIDEO_BIND_MEMORY_KHR, + eVideoSessionCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_CREATE_INFO_KHR, + eVideoSessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_CREATE_INFO_KHR, + eVideoSessionParametersUpdateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_UPDATE_INFO_KHR, + eVideoBeginCodingInfoKHR = VK_STRUCTURE_TYPE_VIDEO_BEGIN_CODING_INFO_KHR, + eVideoEndCodingInfoKHR = VK_STRUCTURE_TYPE_VIDEO_END_CODING_INFO_KHR, + eVideoCodingControlInfoKHR = VK_STRUCTURE_TYPE_VIDEO_CODING_CONTROL_INFO_KHR, + eVideoReferenceSlotKHR = VK_STRUCTURE_TYPE_VIDEO_REFERENCE_SLOT_KHR, + eVideoQueueFamilyProperties2KHR = VK_STRUCTURE_TYPE_VIDEO_QUEUE_FAMILY_PROPERTIES_2_KHR, + eVideoProfilesKHR = VK_STRUCTURE_TYPE_VIDEO_PROFILES_KHR, + ePhysicalDeviceVideoFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_FORMAT_INFO_KHR, + eVideoFormatPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_FORMAT_PROPERTIES_KHR, + eVideoDecodeInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_INFO_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eDedicatedAllocationImageCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV, + eDedicatedAllocationBufferCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV, + eDedicatedAllocationMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV, + ePhysicalDeviceTransformFeedbackFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT, + ePhysicalDeviceTransformFeedbackPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT, + ePipelineRasterizationStateStreamCreateInfoEXT = + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT, + eCuModuleCreateInfoNVX = VK_STRUCTURE_TYPE_CU_MODULE_CREATE_INFO_NVX, + eCuFunctionCreateInfoNVX = VK_STRUCTURE_TYPE_CU_FUNCTION_CREATE_INFO_NVX, + eCuLaunchInfoNVX = VK_STRUCTURE_TYPE_CU_LAUNCH_INFO_NVX, + eImageViewHandleInfoNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX, + eImageViewAddressPropertiesNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoEncodeH264CapabilitiesEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_CAPABILITIES_EXT, + eVideoEncodeH264SessionCreateInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_CREATE_INFO_EXT, + eVideoEncodeH264SessionParametersCreateInfoEXT = + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_EXT, + eVideoEncodeH264SessionParametersAddInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT, + eVideoEncodeH264VclFrameInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_VCL_FRAME_INFO_EXT, + eVideoEncodeH264DpbSlotInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_DPB_SLOT_INFO_EXT, + eVideoEncodeH264NaluSliceEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_NALU_SLICE_EXT, + eVideoEncodeH264EmitPictureParametersEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_EMIT_PICTURE_PARAMETERS_EXT, + eVideoEncodeH264ProfileEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PROFILE_EXT, + eVideoDecodeH264CapabilitiesEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_CAPABILITIES_EXT, + eVideoDecodeH264SessionCreateInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_CREATE_INFO_EXT, + eVideoDecodeH264PictureInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PICTURE_INFO_EXT, + eVideoDecodeH264MvcEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_MVC_EXT, + eVideoDecodeH264ProfileEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PROFILE_EXT, + eVideoDecodeH264SessionParametersCreateInfoEXT = + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_EXT, + eVideoDecodeH264SessionParametersAddInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT, + eVideoDecodeH264DpbSlotInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_DPB_SLOT_INFO_EXT, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eTextureLodGatherFormatPropertiesAMD = VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD, +#if defined( VK_USE_PLATFORM_GGP ) + eStreamDescriptorSurfaceCreateInfoGGP = VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP, +#endif /*VK_USE_PLATFORM_GGP*/ + ePhysicalDeviceCornerSampledImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV, + eExternalMemoryImageCreateInfoNV = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV, + eExportMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + eImportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV, + eExportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV, + eWin32KeyedMutexAcquireReleaseInfoNV = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + eValidationFlagsEXT = VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT, +#if defined( VK_USE_PLATFORM_VI_NN ) + eViSurfaceCreateInfoNN = VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN, +#endif /*VK_USE_PLATFORM_VI_NN*/ + ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT, + eImageViewAstcDecodeModeEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT, + ePhysicalDeviceAstcDecodeFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + eImportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR, + eExportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR, + eMemoryWin32HandlePropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR, + eMemoryGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + eImportMemoryFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR, + eMemoryFdPropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR, + eMemoryGetFdInfoKHR = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + eWin32KeyedMutexAcquireReleaseInfoKHR = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR, + eImportSemaphoreWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR, + eExportSemaphoreWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR, + eD3D12FenceSubmitInfoKHR = VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR, + eSemaphoreGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + eImportSemaphoreFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR, + eSemaphoreGetFdInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR, + ePhysicalDevicePushDescriptorPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR, + eCommandBufferInheritanceConditionalRenderingInfoEXT = + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT, + ePhysicalDeviceConditionalRenderingFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT, + eConditionalRenderingBeginInfoEXT = VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT, + ePresentRegionsKHR = VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR, + ePipelineViewportWScalingStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV, + eSurfaceCapabilities2EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT, + eDisplayPowerInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT, + eDeviceEventInfoEXT = VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT, + eDisplayEventInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT, + eSwapchainCounterCreateInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT, + ePresentTimesInfoGOOGLE = VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE, + ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX, + ePipelineViewportSwizzleStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV, + ePhysicalDeviceDiscardRectanglePropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT, + ePipelineDiscardRectangleStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT, + ePhysicalDeviceConservativeRasterizationPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT, + ePipelineRasterizationConservativeStateCreateInfoEXT = + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT, + ePhysicalDeviceDepthClipEnableFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT, + ePipelineRasterizationDepthClipStateCreateInfoEXT = + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT, + eHdrMetadataEXT = VK_STRUCTURE_TYPE_HDR_METADATA_EXT, + eSharedPresentSurfaceCapabilitiesKHR = VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + eImportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR, + eExportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR, + eFenceGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + eImportFenceFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR, + eFenceGetFdInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR, + ePhysicalDevicePerformanceQueryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR, + ePhysicalDevicePerformanceQueryPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR, + eQueryPoolPerformanceCreateInfoKHR = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR, + ePerformanceQuerySubmitInfoKHR = VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR, + eAcquireProfilingLockInfoKHR = VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR, + ePerformanceCounterKHR = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR, + ePerformanceCounterDescriptionKHR = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR, + ePhysicalDeviceSurfaceInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR, + eSurfaceCapabilities2KHR = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR, + eSurfaceFormat2KHR = VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR, + eDisplayProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR, + eDisplayPlaneProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR, + eDisplayModeProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR, + eDisplayPlaneInfo2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR, + eDisplayPlaneCapabilities2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR, +#if defined( VK_USE_PLATFORM_IOS_MVK ) + eIosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK, +#endif /*VK_USE_PLATFORM_IOS_MVK*/ +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + eMacosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK, +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + eDebugUtilsObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT, + eDebugUtilsObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT, + eDebugUtilsLabelEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT, + eDebugUtilsMessengerCallbackDataEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT, + eDebugUtilsMessengerCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT, +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + eAndroidHardwareBufferUsageANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID, + eAndroidHardwareBufferPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID, + eAndroidHardwareBufferFormatPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID, + eImportAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID, + eMemoryGetAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID, + eExternalFormatANDROID = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID, +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + ePhysicalDeviceInlineUniformBlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT, + ePhysicalDeviceInlineUniformBlockPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT, + eWriteDescriptorSetInlineUniformBlockEXT = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT, + eDescriptorPoolInlineUniformBlockCreateInfoEXT = + VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT, + eSampleLocationsInfoEXT = VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT, + eRenderPassSampleLocationsBeginInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT, + ePipelineSampleLocationsStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT, + ePhysicalDeviceSampleLocationsPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT, + eMultisamplePropertiesEXT = VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT, + ePhysicalDeviceBlendOperationAdvancedFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT, + ePhysicalDeviceBlendOperationAdvancedPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT, + ePipelineColorBlendAdvancedStateCreateInfoEXT = + VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT, + ePipelineCoverageToColorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV, + eWriteDescriptorSetAccelerationStructureKHR = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR, + eAccelerationStructureBuildGeometryInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR, + eAccelerationStructureDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR, + eAccelerationStructureGeometryAabbsDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR, + eAccelerationStructureGeometryInstancesDataKHR = + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR, + eAccelerationStructureGeometryTrianglesDataKHR = + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR, + eAccelerationStructureGeometryKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR, + eAccelerationStructureVersionInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_INFO_KHR, + eCopyAccelerationStructureInfoKHR = VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR, + eCopyAccelerationStructureToMemoryInfoKHR = VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR, + eCopyMemoryToAccelerationStructureInfoKHR = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR, + ePhysicalDeviceAccelerationStructureFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_FEATURES_KHR, + ePhysicalDeviceAccelerationStructurePropertiesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_PROPERTIES_KHR, + eAccelerationStructureCreateInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR, + eAccelerationStructureBuildSizesInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_SIZES_INFO_KHR, + ePhysicalDeviceRayTracingPipelineFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR, + ePhysicalDeviceRayTracingPipelinePropertiesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR, + eRayTracingPipelineCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR, + eRayTracingShaderGroupCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR, + eRayTracingPipelineInterfaceCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR, + ePhysicalDeviceRayQueryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR, + ePipelineCoverageModulationStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV, + ePhysicalDeviceShaderSmBuiltinsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV, + ePhysicalDeviceShaderSmBuiltinsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV, + eDrmFormatModifierPropertiesListEXT = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT, + ePhysicalDeviceImageDrmFormatModifierInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT, + eImageDrmFormatModifierListCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT, + eImageDrmFormatModifierExplicitCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT, + eImageDrmFormatModifierPropertiesEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT, + eValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT, + eShaderModuleValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + ePhysicalDevicePortabilitySubsetFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR, + ePhysicalDevicePortabilitySubsetPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + ePipelineViewportShadingRateImageStateCreateInfoNV = + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV, + ePhysicalDeviceShadingRateImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV, + ePhysicalDeviceShadingRateImagePropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV, + ePipelineViewportCoarseSampleOrderStateCreateInfoNV = + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV, + eRayTracingPipelineCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV, + eAccelerationStructureCreateInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV, + eGeometryNV = VK_STRUCTURE_TYPE_GEOMETRY_NV, + eGeometryTrianglesNV = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV, + eGeometryAabbNV = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV, + eBindAccelerationStructureMemoryInfoNV = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV, + eWriteDescriptorSetAccelerationStructureNV = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV, + eAccelerationStructureMemoryRequirementsInfoNV = + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV, + ePhysicalDeviceRayTracingPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV, + eRayTracingShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV, + eAccelerationStructureInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV, + ePhysicalDeviceRepresentativeFragmentTestFeaturesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV, + ePipelineRepresentativeFragmentTestStateCreateInfoNV = + VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV, + ePhysicalDeviceImageViewImageFormatInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT, + eFilterCubicImageViewImageFormatPropertiesEXT = + VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT, + eDeviceQueueGlobalPriorityCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT, + eImportMemoryHostPointerInfoEXT = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT, + eMemoryHostPointerPropertiesEXT = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT, + ePhysicalDeviceExternalMemoryHostPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT, + ePhysicalDeviceShaderClockFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR, + ePipelineCompilerControlCreateInfoAMD = VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD, + eCalibratedTimestampInfoEXT = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT, + ePhysicalDeviceShaderCorePropertiesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoDecodeH265CapabilitiesEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_CAPABILITIES_EXT, + eVideoDecodeH265SessionCreateInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_CREATE_INFO_EXT, + eVideoDecodeH265SessionParametersCreateInfoEXT = + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_EXT, + eVideoDecodeH265SessionParametersAddInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_EXT, + eVideoDecodeH265ProfileEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_EXT, + eVideoDecodeH265PictureInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PICTURE_INFO_EXT, + eVideoDecodeH265DpbSlotInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_DPB_SLOT_INFO_EXT, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eDeviceMemoryOverallocationCreateInfoAMD = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD, + ePhysicalDeviceVertexAttributeDivisorPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT, + ePipelineVertexInputDivisorStateCreateInfoEXT = + VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT, + ePhysicalDeviceVertexAttributeDivisorFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT, +#if defined( VK_USE_PLATFORM_GGP ) + ePresentFrameTokenGGP = VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP, +#endif /*VK_USE_PLATFORM_GGP*/ + ePipelineCreationFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT, + ePhysicalDeviceComputeShaderDerivativesFeaturesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV, + ePhysicalDeviceMeshShaderFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV, + ePhysicalDeviceMeshShaderPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV, + ePhysicalDeviceFragmentShaderBarycentricFeaturesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV, + ePhysicalDeviceShaderImageFootprintFeaturesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV, + ePipelineViewportExclusiveScissorStateCreateInfoNV = + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV, + ePhysicalDeviceExclusiveScissorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV, + eCheckpointDataNV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV, + eQueueFamilyCheckpointPropertiesNV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV, + ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL, + eQueryPoolPerformanceQueryCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL, + eInitializePerformanceApiInfoINTEL = VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL, + ePerformanceMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL, + ePerformanceStreamMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL, + ePerformanceOverrideInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL, + ePerformanceConfigurationAcquireInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL, + ePhysicalDevicePciBusInfoPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT, + eDisplayNativeHdrSurfaceCapabilitiesAMD = VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD, + eSwapchainDisplayNativeHdrCreateInfoAMD = VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD, +#if defined( VK_USE_PLATFORM_FUCHSIA ) + eImagepipeSurfaceCreateInfoFUCHSIA = VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA, +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + ePhysicalDeviceShaderTerminateInvocationFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES_KHR, +#if defined( VK_USE_PLATFORM_METAL_EXT ) + eMetalSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT, +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + ePhysicalDeviceFragmentDensityMapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT, + ePhysicalDeviceFragmentDensityMapPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT, + eRenderPassFragmentDensityMapCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT, + ePhysicalDeviceSubgroupSizeControlPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT, + ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = + VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT, + ePhysicalDeviceSubgroupSizeControlFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT, + eFragmentShadingRateAttachmentInfoKHR = VK_STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR, + ePipelineFragmentShadingRateStateCreateInfoKHR = + VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR, + ePhysicalDeviceFragmentShadingRatePropertiesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR, + ePhysicalDeviceFragmentShadingRateFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR, + ePhysicalDeviceFragmentShadingRateKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR, + ePhysicalDeviceShaderCoreProperties2AMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD, + ePhysicalDeviceCoherentMemoryFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD, + ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT, + ePhysicalDeviceMemoryBudgetPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT, + ePhysicalDeviceMemoryPriorityFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT, + eMemoryPriorityAllocateInfoEXT = VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT, + eSurfaceProtectedCapabilitiesKHR = VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR, + ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV, + ePhysicalDeviceBufferDeviceAddressFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT, + eBufferDeviceAddressCreateInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT, + ePhysicalDeviceToolPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT, + eValidationFeaturesEXT = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT, + ePhysicalDeviceCooperativeMatrixFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV, + eCooperativeMatrixPropertiesNV = VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV, + ePhysicalDeviceCooperativeMatrixPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV, + ePhysicalDeviceCoverageReductionModeFeaturesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV, + ePipelineCoverageReductionStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV, + eFramebufferMixedSamplesCombinationNV = VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV, + ePhysicalDeviceFragmentShaderInterlockFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT, + ePhysicalDeviceYcbcrImageArraysFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT, + ePhysicalDeviceProvokingVertexFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT, + ePipelineRasterizationProvokingVertexStateCreateInfoEXT = + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_PROVOKING_VERTEX_STATE_CREATE_INFO_EXT, + ePhysicalDeviceProvokingVertexPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + eSurfaceFullScreenExclusiveInfoEXT = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT, + eSurfaceCapabilitiesFullScreenExclusiveEXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT, + eSurfaceFullScreenExclusiveWin32InfoEXT = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + eHeadlessSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT, + ePhysicalDeviceLineRasterizationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT, + ePipelineRasterizationLineStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT, + ePhysicalDeviceLineRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT, + ePhysicalDeviceShaderAtomicFloatFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT, + ePhysicalDeviceIndexTypeUint8FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT, + ePhysicalDeviceExtendedDynamicStateFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT, + ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR, + ePipelineInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR, + ePipelineExecutablePropertiesKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR, + ePipelineExecutableInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR, + ePipelineExecutableStatisticKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR, + ePipelineExecutableInternalRepresentationKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR, + ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT, + ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV, + eGraphicsShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV, + eGraphicsPipelineShaderGroupsCreateInfoNV = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV, + eIndirectCommandsLayoutTokenNV = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_NV, + eIndirectCommandsLayoutCreateInfoNV = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV, + eGeneratedCommandsInfoNV = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_NV, + eGeneratedCommandsMemoryRequirementsInfoNV = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV, + ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV, + ePhysicalDeviceInheritedViewportScissorFeaturesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INHERITED_VIEWPORT_SCISSOR_FEATURES_NV, + eCommandBufferInheritanceViewportScissorInfoNV = + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_VIEWPORT_SCISSOR_INFO_NV, + ePhysicalDeviceTexelBufferAlignmentFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT, + ePhysicalDeviceTexelBufferAlignmentPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT, + eCommandBufferInheritanceRenderPassTransformInfoQCOM = + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM, + eRenderPassTransformBeginInfoQCOM = VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM, + ePhysicalDeviceDeviceMemoryReportFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT, + eDeviceDeviceMemoryReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT, + eDeviceMemoryReportCallbackDataEXT = VK_STRUCTURE_TYPE_DEVICE_MEMORY_REPORT_CALLBACK_DATA_EXT, + ePhysicalDeviceRobustness2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT, + ePhysicalDeviceRobustness2PropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT, + eSamplerCustomBorderColorCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT, + ePhysicalDeviceCustomBorderColorPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT, + ePhysicalDeviceCustomBorderColorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT, + ePipelineLibraryCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR, + ePhysicalDevicePrivateDataFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT, + eDevicePrivateDataCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO_EXT, + ePrivateDataSlotCreateInfoEXT = VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO_EXT, + ePhysicalDevicePipelineCreationCacheControlFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoEncodeInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_INFO_KHR, + eVideoEncodeRateControlInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_INFO_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + ePhysicalDeviceDiagnosticsConfigFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV, + eDeviceDiagnosticsConfigCreateInfoNV = VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV, + eMemoryBarrier2KHR = VK_STRUCTURE_TYPE_MEMORY_BARRIER_2_KHR, + eBufferMemoryBarrier2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2_KHR, + eImageMemoryBarrier2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2_KHR, + eDependencyInfoKHR = VK_STRUCTURE_TYPE_DEPENDENCY_INFO_KHR, + eSubmitInfo2KHR = VK_STRUCTURE_TYPE_SUBMIT_INFO_2_KHR, + eSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO_KHR, + eCommandBufferSubmitInfoKHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO_KHR, + ePhysicalDeviceSynchronization2FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR, + eQueueFamilyCheckpointProperties2Nv = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV, + eCheckpointData2Nv = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV, + ePhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES_KHR, + ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV, + ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV, + ePipelineFragmentShadingRateEnumStateCreateInfoNV = + VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_ENUM_STATE_CREATE_INFO_NV, + ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT, + ePhysicalDeviceFragmentDensityMap2FeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT, + ePhysicalDeviceFragmentDensityMap2PropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT, + eCopyCommandTransformInfoQCOM = VK_STRUCTURE_TYPE_COPY_COMMAND_TRANSFORM_INFO_QCOM, + ePhysicalDeviceImageRobustnessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES_EXT, + ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR, + eCopyBufferInfo2KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2_KHR, + eCopyImageInfo2KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2_KHR, + eCopyBufferToImageInfo2KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2_KHR, + eCopyImageToBufferInfo2KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2_KHR, + eBlitImageInfo2KHR = VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2_KHR, + eResolveImageInfo2KHR = VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2_KHR, + eBufferCopy2KHR = VK_STRUCTURE_TYPE_BUFFER_COPY_2_KHR, + eImageCopy2KHR = VK_STRUCTURE_TYPE_IMAGE_COPY_2_KHR, + eImageBlit2KHR = VK_STRUCTURE_TYPE_IMAGE_BLIT_2_KHR, + eBufferImageCopy2KHR = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2_KHR, + eImageResolve2KHR = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2_KHR, + ePhysicalDevice4444FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT, +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + eDirectfbSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT, +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_VALVE, + eMutableDescriptorTypeCreateInfoVALVE = VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE, + ePhysicalDeviceVertexInputDynamicStateFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT, + eVertexInputBindingDescription2EXT = VK_STRUCTURE_TYPE_VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT, + eVertexInputAttributeDescription2EXT = VK_STRUCTURE_TYPE_VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT, +#if defined( VK_USE_PLATFORM_FUCHSIA ) + eImportMemoryZirconHandleInfoFUCHSIA = VK_STRUCTURE_TYPE_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA, + eMemoryZirconHandlePropertiesFUCHSIA = VK_STRUCTURE_TYPE_MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA, + eMemoryGetZirconHandleInfoFUCHSIA = VK_STRUCTURE_TYPE_MEMORY_GET_ZIRCON_HANDLE_INFO_FUCHSIA, + eImportSemaphoreZirconHandleInfoFUCHSIA = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_ZIRCON_HANDLE_INFO_FUCHSIA, + eSemaphoreGetZirconHandleInfoFUCHSIA = VK_STRUCTURE_TYPE_SEMAPHORE_GET_ZIRCON_HANDLE_INFO_FUCHSIA, +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + ePhysicalDeviceExtendedDynamicState2FeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT, +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + eScreenSurfaceCreateInfoQNX = VK_STRUCTURE_TYPE_SCREEN_SURFACE_CREATE_INFO_QNX, +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + ePhysicalDeviceColorWriteEnableFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT, + ePipelineColorWriteCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_COLOR_WRITE_CREATE_INFO_EXT, + eAttachmentDescription2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR, + eAttachmentDescriptionStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR, + eAttachmentReference2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR, + eAttachmentReferenceStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR, + eBindBufferMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR, + eBindBufferMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR, + eBindImageMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR, + eBindImageMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR, + eBindImagePlaneMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR, + eBufferDeviceAddressInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT, + eBufferDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR, + eBufferMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR, + eBufferOpaqueCaptureAddressCreateInfoKHR = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR, + eDebugReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT, + eDescriptorSetLayoutBindingFlagsCreateInfoEXT = + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT, + eDescriptorSetLayoutSupportKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR, + eDescriptorSetVariableDescriptorCountAllocateInfoEXT = + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT, + eDescriptorSetVariableDescriptorCountLayoutSupportEXT = + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT, + eDescriptorUpdateTemplateCreateInfoKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR, + eDeviceGroupBindSparseInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR, + eDeviceGroupCommandBufferBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR, + eDeviceGroupDeviceCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR, + eDeviceGroupRenderPassBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR, + eDeviceGroupSubmitInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR, + eDeviceMemoryOpaqueCaptureAddressInfoKHR = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR, + eExportFenceCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR, + eExportMemoryAllocateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR, + eExportSemaphoreCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR, + eExternalBufferPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR, + eExternalFencePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR, + eExternalImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR, + eExternalMemoryBufferCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR, + eExternalMemoryImageCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR, + eExternalSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR, + eFormatProperties2KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR, + eFramebufferAttachmentsCreateInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR, + eFramebufferAttachmentImageInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR, + eImageFormatListCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR, + eImageFormatProperties2KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR, + eImageMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR, + eImagePlaneMemoryRequirementsInfoKHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR, + eImageSparseMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR, + eImageStencilUsageCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT, + eImageViewUsageCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR, + eMemoryAllocateFlagsInfoKHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR, + eMemoryDedicatedAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR, + eMemoryDedicatedRequirementsKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR, + eMemoryOpaqueCaptureAddressAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR, + eMemoryRequirements2KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR, + ePhysicalDevice16BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR, + ePhysicalDevice8BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR, + ePhysicalDeviceBufferAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT, + ePhysicalDeviceBufferDeviceAddressFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR, + ePhysicalDeviceDepthStencilResolvePropertiesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR, + ePhysicalDeviceDescriptorIndexingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT, + ePhysicalDeviceDescriptorIndexingPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT, + ePhysicalDeviceDriverPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR, + ePhysicalDeviceExternalBufferInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR, + ePhysicalDeviceExternalFenceInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR, + ePhysicalDeviceExternalImageFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR, + ePhysicalDeviceExternalSemaphoreInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR, + ePhysicalDeviceFeatures2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR, + ePhysicalDeviceFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR, + ePhysicalDeviceFloatControlsPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR, + ePhysicalDeviceGroupPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR, + ePhysicalDeviceHostQueryResetFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT, + ePhysicalDeviceIdPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR, + ePhysicalDeviceImagelessFramebufferFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR, + ePhysicalDeviceImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR, + ePhysicalDeviceMaintenance3PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR, + ePhysicalDeviceMemoryProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR, + ePhysicalDeviceMultiviewFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR, + ePhysicalDeviceMultiviewPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR, + ePhysicalDevicePointClippingPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR, + ePhysicalDeviceProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR, + ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT, + ePhysicalDeviceSamplerYcbcrConversionFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR, + ePhysicalDeviceScalarBlockLayoutFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT, + ePhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR, + ePhysicalDeviceShaderAtomicInt64FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR, + ePhysicalDeviceShaderDrawParameterFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES, + ePhysicalDeviceShaderFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR, + ePhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR, + ePhysicalDeviceSparseImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR, + ePhysicalDeviceTimelineSemaphoreFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR, + ePhysicalDeviceTimelineSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR, + ePhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR, + ePhysicalDeviceVariablePointersFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR, + ePhysicalDeviceVariablePointerFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES, + ePhysicalDeviceVariablePointerFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR, + ePhysicalDeviceVulkanMemoryModelFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR, + ePipelineTessellationDomainOriginStateCreateInfoKHR = + VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR, + eQueryPoolCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL, + eQueueFamilyProperties2KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR, + eRenderPassAttachmentBeginInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR, + eRenderPassCreateInfo2KHR = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR, + eRenderPassInputAttachmentAspectCreateInfoKHR = + VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR, + eRenderPassMultiviewCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR, + eSamplerReductionModeCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT, + eSamplerYcbcrConversionCreateInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR, + eSamplerYcbcrConversionImageFormatPropertiesKHR = + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR, + eSamplerYcbcrConversionInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR, + eSemaphoreSignalInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR, + eSemaphoreTypeCreateInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR, + eSemaphoreWaitInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR, + eSparseImageFormatProperties2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR, + eSparseImageMemoryRequirements2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR, + eSubpassBeginInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR, + eSubpassDependency2KHR = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR, + eSubpassDescription2KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR, + eSubpassDescriptionDepthStencilResolveKHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR, + eSubpassEndInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR, + eTimelineSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( StructureType value ) + { + switch ( value ) + { + case StructureType::eApplicationInfo: return "ApplicationInfo"; + case StructureType::eInstanceCreateInfo: return "InstanceCreateInfo"; + case StructureType::eDeviceQueueCreateInfo: return "DeviceQueueCreateInfo"; + case StructureType::eDeviceCreateInfo: return "DeviceCreateInfo"; + case StructureType::eSubmitInfo: return "SubmitInfo"; + case StructureType::eMemoryAllocateInfo: return "MemoryAllocateInfo"; + case StructureType::eMappedMemoryRange: return "MappedMemoryRange"; + case StructureType::eBindSparseInfo: return "BindSparseInfo"; + case StructureType::eFenceCreateInfo: return "FenceCreateInfo"; + case StructureType::eSemaphoreCreateInfo: return "SemaphoreCreateInfo"; + case StructureType::eEventCreateInfo: return "EventCreateInfo"; + case StructureType::eQueryPoolCreateInfo: return "QueryPoolCreateInfo"; + case StructureType::eBufferCreateInfo: return "BufferCreateInfo"; + case StructureType::eBufferViewCreateInfo: return "BufferViewCreateInfo"; + case StructureType::eImageCreateInfo: return "ImageCreateInfo"; + case StructureType::eImageViewCreateInfo: return "ImageViewCreateInfo"; + case StructureType::eShaderModuleCreateInfo: return "ShaderModuleCreateInfo"; + case StructureType::ePipelineCacheCreateInfo: return "PipelineCacheCreateInfo"; + case StructureType::ePipelineShaderStageCreateInfo: return "PipelineShaderStageCreateInfo"; + case StructureType::ePipelineVertexInputStateCreateInfo: return "PipelineVertexInputStateCreateInfo"; + case StructureType::ePipelineInputAssemblyStateCreateInfo: return "PipelineInputAssemblyStateCreateInfo"; + case StructureType::ePipelineTessellationStateCreateInfo: return "PipelineTessellationStateCreateInfo"; + case StructureType::ePipelineViewportStateCreateInfo: return "PipelineViewportStateCreateInfo"; + case StructureType::ePipelineRasterizationStateCreateInfo: return "PipelineRasterizationStateCreateInfo"; + case StructureType::ePipelineMultisampleStateCreateInfo: return "PipelineMultisampleStateCreateInfo"; + case StructureType::ePipelineDepthStencilStateCreateInfo: return "PipelineDepthStencilStateCreateInfo"; + case StructureType::ePipelineColorBlendStateCreateInfo: return "PipelineColorBlendStateCreateInfo"; + case StructureType::ePipelineDynamicStateCreateInfo: return "PipelineDynamicStateCreateInfo"; + case StructureType::eGraphicsPipelineCreateInfo: return "GraphicsPipelineCreateInfo"; + case StructureType::eComputePipelineCreateInfo: return "ComputePipelineCreateInfo"; + case StructureType::ePipelineLayoutCreateInfo: return "PipelineLayoutCreateInfo"; + case StructureType::eSamplerCreateInfo: return "SamplerCreateInfo"; + case StructureType::eDescriptorSetLayoutCreateInfo: return "DescriptorSetLayoutCreateInfo"; + case StructureType::eDescriptorPoolCreateInfo: return "DescriptorPoolCreateInfo"; + case StructureType::eDescriptorSetAllocateInfo: return "DescriptorSetAllocateInfo"; + case StructureType::eWriteDescriptorSet: return "WriteDescriptorSet"; + case StructureType::eCopyDescriptorSet: return "CopyDescriptorSet"; + case StructureType::eFramebufferCreateInfo: return "FramebufferCreateInfo"; + case StructureType::eRenderPassCreateInfo: return "RenderPassCreateInfo"; + case StructureType::eCommandPoolCreateInfo: return "CommandPoolCreateInfo"; + case StructureType::eCommandBufferAllocateInfo: return "CommandBufferAllocateInfo"; + case StructureType::eCommandBufferInheritanceInfo: return "CommandBufferInheritanceInfo"; + case StructureType::eCommandBufferBeginInfo: return "CommandBufferBeginInfo"; + case StructureType::eRenderPassBeginInfo: return "RenderPassBeginInfo"; + case StructureType::eBufferMemoryBarrier: return "BufferMemoryBarrier"; + case StructureType::eImageMemoryBarrier: return "ImageMemoryBarrier"; + case StructureType::eMemoryBarrier: return "MemoryBarrier"; + case StructureType::eLoaderInstanceCreateInfo: return "LoaderInstanceCreateInfo"; + case StructureType::eLoaderDeviceCreateInfo: return "LoaderDeviceCreateInfo"; + case StructureType::ePhysicalDeviceSubgroupProperties: return "PhysicalDeviceSubgroupProperties"; + case StructureType::eBindBufferMemoryInfo: return "BindBufferMemoryInfo"; + case StructureType::eBindImageMemoryInfo: return "BindImageMemoryInfo"; + case StructureType::ePhysicalDevice16BitStorageFeatures: return "PhysicalDevice16BitStorageFeatures"; + case StructureType::eMemoryDedicatedRequirements: return "MemoryDedicatedRequirements"; + case StructureType::eMemoryDedicatedAllocateInfo: return "MemoryDedicatedAllocateInfo"; + case StructureType::eMemoryAllocateFlagsInfo: return "MemoryAllocateFlagsInfo"; + case StructureType::eDeviceGroupRenderPassBeginInfo: return "DeviceGroupRenderPassBeginInfo"; + case StructureType::eDeviceGroupCommandBufferBeginInfo: return "DeviceGroupCommandBufferBeginInfo"; + case StructureType::eDeviceGroupSubmitInfo: return "DeviceGroupSubmitInfo"; + case StructureType::eDeviceGroupBindSparseInfo: return "DeviceGroupBindSparseInfo"; + case StructureType::eBindBufferMemoryDeviceGroupInfo: return "BindBufferMemoryDeviceGroupInfo"; + case StructureType::eBindImageMemoryDeviceGroupInfo: return "BindImageMemoryDeviceGroupInfo"; + case StructureType::ePhysicalDeviceGroupProperties: return "PhysicalDeviceGroupProperties"; + case StructureType::eDeviceGroupDeviceCreateInfo: return "DeviceGroupDeviceCreateInfo"; + case StructureType::eBufferMemoryRequirementsInfo2: return "BufferMemoryRequirementsInfo2"; + case StructureType::eImageMemoryRequirementsInfo2: return "ImageMemoryRequirementsInfo2"; + case StructureType::eImageSparseMemoryRequirementsInfo2: return "ImageSparseMemoryRequirementsInfo2"; + case StructureType::eMemoryRequirements2: return "MemoryRequirements2"; + case StructureType::eSparseImageMemoryRequirements2: return "SparseImageMemoryRequirements2"; + case StructureType::ePhysicalDeviceFeatures2: return "PhysicalDeviceFeatures2"; + case StructureType::ePhysicalDeviceProperties2: return "PhysicalDeviceProperties2"; + case StructureType::eFormatProperties2: return "FormatProperties2"; + case StructureType::eImageFormatProperties2: return "ImageFormatProperties2"; + case StructureType::ePhysicalDeviceImageFormatInfo2: return "PhysicalDeviceImageFormatInfo2"; + case StructureType::eQueueFamilyProperties2: return "QueueFamilyProperties2"; + case StructureType::ePhysicalDeviceMemoryProperties2: return "PhysicalDeviceMemoryProperties2"; + case StructureType::eSparseImageFormatProperties2: return "SparseImageFormatProperties2"; + case StructureType::ePhysicalDeviceSparseImageFormatInfo2: return "PhysicalDeviceSparseImageFormatInfo2"; + case StructureType::ePhysicalDevicePointClippingProperties: return "PhysicalDevicePointClippingProperties"; + case StructureType::eRenderPassInputAttachmentAspectCreateInfo: + return "RenderPassInputAttachmentAspectCreateInfo"; + case StructureType::eImageViewUsageCreateInfo: return "ImageViewUsageCreateInfo"; + case StructureType::ePipelineTessellationDomainOriginStateCreateInfo: + return "PipelineTessellationDomainOriginStateCreateInfo"; + case StructureType::eRenderPassMultiviewCreateInfo: return "RenderPassMultiviewCreateInfo"; + case StructureType::ePhysicalDeviceMultiviewFeatures: return "PhysicalDeviceMultiviewFeatures"; + case StructureType::ePhysicalDeviceMultiviewProperties: return "PhysicalDeviceMultiviewProperties"; + case StructureType::ePhysicalDeviceVariablePointersFeatures: return "PhysicalDeviceVariablePointersFeatures"; + case StructureType::eProtectedSubmitInfo: return "ProtectedSubmitInfo"; + case StructureType::ePhysicalDeviceProtectedMemoryFeatures: return "PhysicalDeviceProtectedMemoryFeatures"; + case StructureType::ePhysicalDeviceProtectedMemoryProperties: return "PhysicalDeviceProtectedMemoryProperties"; + case StructureType::eDeviceQueueInfo2: return "DeviceQueueInfo2"; + case StructureType::eSamplerYcbcrConversionCreateInfo: return "SamplerYcbcrConversionCreateInfo"; + case StructureType::eSamplerYcbcrConversionInfo: return "SamplerYcbcrConversionInfo"; + case StructureType::eBindImagePlaneMemoryInfo: return "BindImagePlaneMemoryInfo"; + case StructureType::eImagePlaneMemoryRequirementsInfo: return "ImagePlaneMemoryRequirementsInfo"; + case StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures: + return "PhysicalDeviceSamplerYcbcrConversionFeatures"; + case StructureType::eSamplerYcbcrConversionImageFormatProperties: + return "SamplerYcbcrConversionImageFormatProperties"; + case StructureType::eDescriptorUpdateTemplateCreateInfo: return "DescriptorUpdateTemplateCreateInfo"; + case StructureType::ePhysicalDeviceExternalImageFormatInfo: return "PhysicalDeviceExternalImageFormatInfo"; + case StructureType::eExternalImageFormatProperties: return "ExternalImageFormatProperties"; + case StructureType::ePhysicalDeviceExternalBufferInfo: return "PhysicalDeviceExternalBufferInfo"; + case StructureType::eExternalBufferProperties: return "ExternalBufferProperties"; + case StructureType::ePhysicalDeviceIdProperties: return "PhysicalDeviceIdProperties"; + case StructureType::eExternalMemoryBufferCreateInfo: return "ExternalMemoryBufferCreateInfo"; + case StructureType::eExternalMemoryImageCreateInfo: return "ExternalMemoryImageCreateInfo"; + case StructureType::eExportMemoryAllocateInfo: return "ExportMemoryAllocateInfo"; + case StructureType::ePhysicalDeviceExternalFenceInfo: return "PhysicalDeviceExternalFenceInfo"; + case StructureType::eExternalFenceProperties: return "ExternalFenceProperties"; + case StructureType::eExportFenceCreateInfo: return "ExportFenceCreateInfo"; + case StructureType::eExportSemaphoreCreateInfo: return "ExportSemaphoreCreateInfo"; + case StructureType::ePhysicalDeviceExternalSemaphoreInfo: return "PhysicalDeviceExternalSemaphoreInfo"; + case StructureType::eExternalSemaphoreProperties: return "ExternalSemaphoreProperties"; + case StructureType::ePhysicalDeviceMaintenance3Properties: return "PhysicalDeviceMaintenance3Properties"; + case StructureType::eDescriptorSetLayoutSupport: return "DescriptorSetLayoutSupport"; + case StructureType::ePhysicalDeviceShaderDrawParametersFeatures: + return "PhysicalDeviceShaderDrawParametersFeatures"; + case StructureType::ePhysicalDeviceVulkan11Features: return "PhysicalDeviceVulkan11Features"; + case StructureType::ePhysicalDeviceVulkan11Properties: return "PhysicalDeviceVulkan11Properties"; + case StructureType::ePhysicalDeviceVulkan12Features: return "PhysicalDeviceVulkan12Features"; + case StructureType::ePhysicalDeviceVulkan12Properties: return "PhysicalDeviceVulkan12Properties"; + case StructureType::eImageFormatListCreateInfo: return "ImageFormatListCreateInfo"; + case StructureType::eAttachmentDescription2: return "AttachmentDescription2"; + case StructureType::eAttachmentReference2: return "AttachmentReference2"; + case StructureType::eSubpassDescription2: return "SubpassDescription2"; + case StructureType::eSubpassDependency2: return "SubpassDependency2"; + case StructureType::eRenderPassCreateInfo2: return "RenderPassCreateInfo2"; + case StructureType::eSubpassBeginInfo: return "SubpassBeginInfo"; + case StructureType::eSubpassEndInfo: return "SubpassEndInfo"; + case StructureType::ePhysicalDevice8BitStorageFeatures: return "PhysicalDevice8BitStorageFeatures"; + case StructureType::ePhysicalDeviceDriverProperties: return "PhysicalDeviceDriverProperties"; + case StructureType::ePhysicalDeviceShaderAtomicInt64Features: return "PhysicalDeviceShaderAtomicInt64Features"; + case StructureType::ePhysicalDeviceShaderFloat16Int8Features: return "PhysicalDeviceShaderFloat16Int8Features"; + case StructureType::ePhysicalDeviceFloatControlsProperties: return "PhysicalDeviceFloatControlsProperties"; + case StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo: + return "DescriptorSetLayoutBindingFlagsCreateInfo"; + case StructureType::ePhysicalDeviceDescriptorIndexingFeatures: return "PhysicalDeviceDescriptorIndexingFeatures"; + case StructureType::ePhysicalDeviceDescriptorIndexingProperties: + return "PhysicalDeviceDescriptorIndexingProperties"; + case StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo: + return "DescriptorSetVariableDescriptorCountAllocateInfo"; + case StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport: + return "DescriptorSetVariableDescriptorCountLayoutSupport"; + case StructureType::ePhysicalDeviceDepthStencilResolveProperties: + return "PhysicalDeviceDepthStencilResolveProperties"; + case StructureType::eSubpassDescriptionDepthStencilResolve: return "SubpassDescriptionDepthStencilResolve"; + case StructureType::ePhysicalDeviceScalarBlockLayoutFeatures: return "PhysicalDeviceScalarBlockLayoutFeatures"; + case StructureType::eImageStencilUsageCreateInfo: return "ImageStencilUsageCreateInfo"; + case StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties: + return "PhysicalDeviceSamplerFilterMinmaxProperties"; + case StructureType::eSamplerReductionModeCreateInfo: return "SamplerReductionModeCreateInfo"; + case StructureType::ePhysicalDeviceVulkanMemoryModelFeatures: return "PhysicalDeviceVulkanMemoryModelFeatures"; + case StructureType::ePhysicalDeviceImagelessFramebufferFeatures: + return "PhysicalDeviceImagelessFramebufferFeatures"; + case StructureType::eFramebufferAttachmentsCreateInfo: return "FramebufferAttachmentsCreateInfo"; + case StructureType::eFramebufferAttachmentImageInfo: return "FramebufferAttachmentImageInfo"; + case StructureType::eRenderPassAttachmentBeginInfo: return "RenderPassAttachmentBeginInfo"; + case StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures: + return "PhysicalDeviceUniformBufferStandardLayoutFeatures"; + case StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures: + return "PhysicalDeviceShaderSubgroupExtendedTypesFeatures"; + case StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures: + return "PhysicalDeviceSeparateDepthStencilLayoutsFeatures"; + case StructureType::eAttachmentReferenceStencilLayout: return "AttachmentReferenceStencilLayout"; + case StructureType::eAttachmentDescriptionStencilLayout: return "AttachmentDescriptionStencilLayout"; + case StructureType::ePhysicalDeviceHostQueryResetFeatures: return "PhysicalDeviceHostQueryResetFeatures"; + case StructureType::ePhysicalDeviceTimelineSemaphoreFeatures: return "PhysicalDeviceTimelineSemaphoreFeatures"; + case StructureType::ePhysicalDeviceTimelineSemaphoreProperties: + return "PhysicalDeviceTimelineSemaphoreProperties"; + case StructureType::eSemaphoreTypeCreateInfo: return "SemaphoreTypeCreateInfo"; + case StructureType::eTimelineSemaphoreSubmitInfo: return "TimelineSemaphoreSubmitInfo"; + case StructureType::eSemaphoreWaitInfo: return "SemaphoreWaitInfo"; + case StructureType::eSemaphoreSignalInfo: return "SemaphoreSignalInfo"; + case StructureType::ePhysicalDeviceBufferDeviceAddressFeatures: + return "PhysicalDeviceBufferDeviceAddressFeatures"; + case StructureType::eBufferDeviceAddressInfo: return "BufferDeviceAddressInfo"; + case StructureType::eBufferOpaqueCaptureAddressCreateInfo: return "BufferOpaqueCaptureAddressCreateInfo"; + case StructureType::eMemoryOpaqueCaptureAddressAllocateInfo: return "MemoryOpaqueCaptureAddressAllocateInfo"; + case StructureType::eDeviceMemoryOpaqueCaptureAddressInfo: return "DeviceMemoryOpaqueCaptureAddressInfo"; + case StructureType::eSwapchainCreateInfoKHR: return "SwapchainCreateInfoKHR"; + case StructureType::ePresentInfoKHR: return "PresentInfoKHR"; + case StructureType::eDeviceGroupPresentCapabilitiesKHR: return "DeviceGroupPresentCapabilitiesKHR"; + case StructureType::eImageSwapchainCreateInfoKHR: return "ImageSwapchainCreateInfoKHR"; + case StructureType::eBindImageMemorySwapchainInfoKHR: return "BindImageMemorySwapchainInfoKHR"; + case StructureType::eAcquireNextImageInfoKHR: return "AcquireNextImageInfoKHR"; + case StructureType::eDeviceGroupPresentInfoKHR: return "DeviceGroupPresentInfoKHR"; + case StructureType::eDeviceGroupSwapchainCreateInfoKHR: return "DeviceGroupSwapchainCreateInfoKHR"; + case StructureType::eDisplayModeCreateInfoKHR: return "DisplayModeCreateInfoKHR"; + case StructureType::eDisplaySurfaceCreateInfoKHR: return "DisplaySurfaceCreateInfoKHR"; + case StructureType::eDisplayPresentInfoKHR: return "DisplayPresentInfoKHR"; +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + case StructureType::eXlibSurfaceCreateInfoKHR: return "XlibSurfaceCreateInfoKHR"; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ +#if defined( VK_USE_PLATFORM_XCB_KHR ) + case StructureType::eXcbSurfaceCreateInfoKHR: return "XcbSurfaceCreateInfoKHR"; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + case StructureType::eWaylandSurfaceCreateInfoKHR: return "WaylandSurfaceCreateInfoKHR"; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + case StructureType::eAndroidSurfaceCreateInfoKHR: return "AndroidSurfaceCreateInfoKHR"; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + case StructureType::eWin32SurfaceCreateInfoKHR: return "Win32SurfaceCreateInfoKHR"; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case StructureType::eDebugReportCallbackCreateInfoEXT: return "DebugReportCallbackCreateInfoEXT"; + case StructureType::ePipelineRasterizationStateRasterizationOrderAMD: + return "PipelineRasterizationStateRasterizationOrderAMD"; + case StructureType::eDebugMarkerObjectNameInfoEXT: return "DebugMarkerObjectNameInfoEXT"; + case StructureType::eDebugMarkerObjectTagInfoEXT: return "DebugMarkerObjectTagInfoEXT"; + case StructureType::eDebugMarkerMarkerInfoEXT: return "DebugMarkerMarkerInfoEXT"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case StructureType::eVideoProfileKHR: return "VideoProfileKHR"; + case StructureType::eVideoCapabilitiesKHR: return "VideoCapabilitiesKHR"; + case StructureType::eVideoPictureResourceKHR: return "VideoPictureResourceKHR"; + case StructureType::eVideoGetMemoryPropertiesKHR: return "VideoGetMemoryPropertiesKHR"; + case StructureType::eVideoBindMemoryKHR: return "VideoBindMemoryKHR"; + case StructureType::eVideoSessionCreateInfoKHR: return "VideoSessionCreateInfoKHR"; + case StructureType::eVideoSessionParametersCreateInfoKHR: return "VideoSessionParametersCreateInfoKHR"; + case StructureType::eVideoSessionParametersUpdateInfoKHR: return "VideoSessionParametersUpdateInfoKHR"; + case StructureType::eVideoBeginCodingInfoKHR: return "VideoBeginCodingInfoKHR"; + case StructureType::eVideoEndCodingInfoKHR: return "VideoEndCodingInfoKHR"; + case StructureType::eVideoCodingControlInfoKHR: return "VideoCodingControlInfoKHR"; + case StructureType::eVideoReferenceSlotKHR: return "VideoReferenceSlotKHR"; + case StructureType::eVideoQueueFamilyProperties2KHR: return "VideoQueueFamilyProperties2KHR"; + case StructureType::eVideoProfilesKHR: return "VideoProfilesKHR"; + case StructureType::ePhysicalDeviceVideoFormatInfoKHR: return "PhysicalDeviceVideoFormatInfoKHR"; + case StructureType::eVideoFormatPropertiesKHR: return "VideoFormatPropertiesKHR"; + case StructureType::eVideoDecodeInfoKHR: return "VideoDecodeInfoKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case StructureType::eDedicatedAllocationImageCreateInfoNV: return "DedicatedAllocationImageCreateInfoNV"; + case StructureType::eDedicatedAllocationBufferCreateInfoNV: return "DedicatedAllocationBufferCreateInfoNV"; + case StructureType::eDedicatedAllocationMemoryAllocateInfoNV: return "DedicatedAllocationMemoryAllocateInfoNV"; + case StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT: + return "PhysicalDeviceTransformFeedbackFeaturesEXT"; + case StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT: + return "PhysicalDeviceTransformFeedbackPropertiesEXT"; + case StructureType::ePipelineRasterizationStateStreamCreateInfoEXT: + return "PipelineRasterizationStateStreamCreateInfoEXT"; + case StructureType::eCuModuleCreateInfoNVX: return "CuModuleCreateInfoNVX"; + case StructureType::eCuFunctionCreateInfoNVX: return "CuFunctionCreateInfoNVX"; + case StructureType::eCuLaunchInfoNVX: return "CuLaunchInfoNVX"; + case StructureType::eImageViewHandleInfoNVX: return "ImageViewHandleInfoNVX"; + case StructureType::eImageViewAddressPropertiesNVX: return "ImageViewAddressPropertiesNVX"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case StructureType::eVideoEncodeH264CapabilitiesEXT: return "VideoEncodeH264CapabilitiesEXT"; + case StructureType::eVideoEncodeH264SessionCreateInfoEXT: return "VideoEncodeH264SessionCreateInfoEXT"; + case StructureType::eVideoEncodeH264SessionParametersCreateInfoEXT: + return "VideoEncodeH264SessionParametersCreateInfoEXT"; + case StructureType::eVideoEncodeH264SessionParametersAddInfoEXT: + return "VideoEncodeH264SessionParametersAddInfoEXT"; + case StructureType::eVideoEncodeH264VclFrameInfoEXT: return "VideoEncodeH264VclFrameInfoEXT"; + case StructureType::eVideoEncodeH264DpbSlotInfoEXT: return "VideoEncodeH264DpbSlotInfoEXT"; + case StructureType::eVideoEncodeH264NaluSliceEXT: return "VideoEncodeH264NaluSliceEXT"; + case StructureType::eVideoEncodeH264EmitPictureParametersEXT: return "VideoEncodeH264EmitPictureParametersEXT"; + case StructureType::eVideoEncodeH264ProfileEXT: return "VideoEncodeH264ProfileEXT"; + case StructureType::eVideoDecodeH264CapabilitiesEXT: return "VideoDecodeH264CapabilitiesEXT"; + case StructureType::eVideoDecodeH264SessionCreateInfoEXT: return "VideoDecodeH264SessionCreateInfoEXT"; + case StructureType::eVideoDecodeH264PictureInfoEXT: return "VideoDecodeH264PictureInfoEXT"; + case StructureType::eVideoDecodeH264MvcEXT: return "VideoDecodeH264MvcEXT"; + case StructureType::eVideoDecodeH264ProfileEXT: return "VideoDecodeH264ProfileEXT"; + case StructureType::eVideoDecodeH264SessionParametersCreateInfoEXT: + return "VideoDecodeH264SessionParametersCreateInfoEXT"; + case StructureType::eVideoDecodeH264SessionParametersAddInfoEXT: + return "VideoDecodeH264SessionParametersAddInfoEXT"; + case StructureType::eVideoDecodeH264DpbSlotInfoEXT: return "VideoDecodeH264DpbSlotInfoEXT"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case StructureType::eTextureLodGatherFormatPropertiesAMD: return "TextureLodGatherFormatPropertiesAMD"; +#if defined( VK_USE_PLATFORM_GGP ) + case StructureType::eStreamDescriptorSurfaceCreateInfoGGP: return "StreamDescriptorSurfaceCreateInfoGGP"; +#endif /*VK_USE_PLATFORM_GGP*/ + case StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV: + return "PhysicalDeviceCornerSampledImageFeaturesNV"; + case StructureType::eExternalMemoryImageCreateInfoNV: return "ExternalMemoryImageCreateInfoNV"; + case StructureType::eExportMemoryAllocateInfoNV: return "ExportMemoryAllocateInfoNV"; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + case StructureType::eImportMemoryWin32HandleInfoNV: return "ImportMemoryWin32HandleInfoNV"; + case StructureType::eExportMemoryWin32HandleInfoNV: return "ExportMemoryWin32HandleInfoNV"; + case StructureType::eWin32KeyedMutexAcquireReleaseInfoNV: return "Win32KeyedMutexAcquireReleaseInfoNV"; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case StructureType::eValidationFlagsEXT: return "ValidationFlagsEXT"; +#if defined( VK_USE_PLATFORM_VI_NN ) + case StructureType::eViSurfaceCreateInfoNN: return "ViSurfaceCreateInfoNN"; +#endif /*VK_USE_PLATFORM_VI_NN*/ + case StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT: + return "PhysicalDeviceTextureCompressionAstcHdrFeaturesEXT"; + case StructureType::eImageViewAstcDecodeModeEXT: return "ImageViewAstcDecodeModeEXT"; + case StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT: return "PhysicalDeviceAstcDecodeFeaturesEXT"; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + case StructureType::eImportMemoryWin32HandleInfoKHR: return "ImportMemoryWin32HandleInfoKHR"; + case StructureType::eExportMemoryWin32HandleInfoKHR: return "ExportMemoryWin32HandleInfoKHR"; + case StructureType::eMemoryWin32HandlePropertiesKHR: return "MemoryWin32HandlePropertiesKHR"; + case StructureType::eMemoryGetWin32HandleInfoKHR: return "MemoryGetWin32HandleInfoKHR"; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case StructureType::eImportMemoryFdInfoKHR: return "ImportMemoryFdInfoKHR"; + case StructureType::eMemoryFdPropertiesKHR: return "MemoryFdPropertiesKHR"; + case StructureType::eMemoryGetFdInfoKHR: return "MemoryGetFdInfoKHR"; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + case StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR: return "Win32KeyedMutexAcquireReleaseInfoKHR"; + case StructureType::eImportSemaphoreWin32HandleInfoKHR: return "ImportSemaphoreWin32HandleInfoKHR"; + case StructureType::eExportSemaphoreWin32HandleInfoKHR: return "ExportSemaphoreWin32HandleInfoKHR"; + case StructureType::eD3D12FenceSubmitInfoKHR: return "D3D12FenceSubmitInfoKHR"; + case StructureType::eSemaphoreGetWin32HandleInfoKHR: return "SemaphoreGetWin32HandleInfoKHR"; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case StructureType::eImportSemaphoreFdInfoKHR: return "ImportSemaphoreFdInfoKHR"; + case StructureType::eSemaphoreGetFdInfoKHR: return "SemaphoreGetFdInfoKHR"; + case StructureType::ePhysicalDevicePushDescriptorPropertiesKHR: + return "PhysicalDevicePushDescriptorPropertiesKHR"; + case StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT: + return "CommandBufferInheritanceConditionalRenderingInfoEXT"; + case StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT: + return "PhysicalDeviceConditionalRenderingFeaturesEXT"; + case StructureType::eConditionalRenderingBeginInfoEXT: return "ConditionalRenderingBeginInfoEXT"; + case StructureType::ePresentRegionsKHR: return "PresentRegionsKHR"; + case StructureType::ePipelineViewportWScalingStateCreateInfoNV: + return "PipelineViewportWScalingStateCreateInfoNV"; + case StructureType::eSurfaceCapabilities2EXT: return "SurfaceCapabilities2EXT"; + case StructureType::eDisplayPowerInfoEXT: return "DisplayPowerInfoEXT"; + case StructureType::eDeviceEventInfoEXT: return "DeviceEventInfoEXT"; + case StructureType::eDisplayEventInfoEXT: return "DisplayEventInfoEXT"; + case StructureType::eSwapchainCounterCreateInfoEXT: return "SwapchainCounterCreateInfoEXT"; + case StructureType::ePresentTimesInfoGOOGLE: return "PresentTimesInfoGOOGLE"; + case StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX: + return "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX"; + case StructureType::ePipelineViewportSwizzleStateCreateInfoNV: return "PipelineViewportSwizzleStateCreateInfoNV"; + case StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT: + return "PhysicalDeviceDiscardRectanglePropertiesEXT"; + case StructureType::ePipelineDiscardRectangleStateCreateInfoEXT: + return "PipelineDiscardRectangleStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT: + return "PhysicalDeviceConservativeRasterizationPropertiesEXT"; + case StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT: + return "PipelineRasterizationConservativeStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT: return "PhysicalDeviceDepthClipEnableFeaturesEXT"; + case StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT: + return "PipelineRasterizationDepthClipStateCreateInfoEXT"; + case StructureType::eHdrMetadataEXT: return "HdrMetadataEXT"; + case StructureType::eSharedPresentSurfaceCapabilitiesKHR: return "SharedPresentSurfaceCapabilitiesKHR"; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + case StructureType::eImportFenceWin32HandleInfoKHR: return "ImportFenceWin32HandleInfoKHR"; + case StructureType::eExportFenceWin32HandleInfoKHR: return "ExportFenceWin32HandleInfoKHR"; + case StructureType::eFenceGetWin32HandleInfoKHR: return "FenceGetWin32HandleInfoKHR"; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case StructureType::eImportFenceFdInfoKHR: return "ImportFenceFdInfoKHR"; + case StructureType::eFenceGetFdInfoKHR: return "FenceGetFdInfoKHR"; + case StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR: + return "PhysicalDevicePerformanceQueryFeaturesKHR"; + case StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR: + return "PhysicalDevicePerformanceQueryPropertiesKHR"; + case StructureType::eQueryPoolPerformanceCreateInfoKHR: return "QueryPoolPerformanceCreateInfoKHR"; + case StructureType::ePerformanceQuerySubmitInfoKHR: return "PerformanceQuerySubmitInfoKHR"; + case StructureType::eAcquireProfilingLockInfoKHR: return "AcquireProfilingLockInfoKHR"; + case StructureType::ePerformanceCounterKHR: return "PerformanceCounterKHR"; + case StructureType::ePerformanceCounterDescriptionKHR: return "PerformanceCounterDescriptionKHR"; + case StructureType::ePhysicalDeviceSurfaceInfo2KHR: return "PhysicalDeviceSurfaceInfo2KHR"; + case StructureType::eSurfaceCapabilities2KHR: return "SurfaceCapabilities2KHR"; + case StructureType::eSurfaceFormat2KHR: return "SurfaceFormat2KHR"; + case StructureType::eDisplayProperties2KHR: return "DisplayProperties2KHR"; + case StructureType::eDisplayPlaneProperties2KHR: return "DisplayPlaneProperties2KHR"; + case StructureType::eDisplayModeProperties2KHR: return "DisplayModeProperties2KHR"; + case StructureType::eDisplayPlaneInfo2KHR: return "DisplayPlaneInfo2KHR"; + case StructureType::eDisplayPlaneCapabilities2KHR: return "DisplayPlaneCapabilities2KHR"; +#if defined( VK_USE_PLATFORM_IOS_MVK ) + case StructureType::eIosSurfaceCreateInfoMVK: return "IosSurfaceCreateInfoMVK"; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + case StructureType::eMacosSurfaceCreateInfoMVK: return "MacosSurfaceCreateInfoMVK"; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + case StructureType::eDebugUtilsObjectNameInfoEXT: return "DebugUtilsObjectNameInfoEXT"; + case StructureType::eDebugUtilsObjectTagInfoEXT: return "DebugUtilsObjectTagInfoEXT"; + case StructureType::eDebugUtilsLabelEXT: return "DebugUtilsLabelEXT"; + case StructureType::eDebugUtilsMessengerCallbackDataEXT: return "DebugUtilsMessengerCallbackDataEXT"; + case StructureType::eDebugUtilsMessengerCreateInfoEXT: return "DebugUtilsMessengerCreateInfoEXT"; +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + case StructureType::eAndroidHardwareBufferUsageANDROID: return "AndroidHardwareBufferUsageANDROID"; + case StructureType::eAndroidHardwareBufferPropertiesANDROID: return "AndroidHardwareBufferPropertiesANDROID"; + case StructureType::eAndroidHardwareBufferFormatPropertiesANDROID: + return "AndroidHardwareBufferFormatPropertiesANDROID"; + case StructureType::eImportAndroidHardwareBufferInfoANDROID: return "ImportAndroidHardwareBufferInfoANDROID"; + case StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID: + return "MemoryGetAndroidHardwareBufferInfoANDROID"; + case StructureType::eExternalFormatANDROID: return "ExternalFormatANDROID"; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + case StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT: + return "PhysicalDeviceInlineUniformBlockFeaturesEXT"; + case StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT: + return "PhysicalDeviceInlineUniformBlockPropertiesEXT"; + case StructureType::eWriteDescriptorSetInlineUniformBlockEXT: return "WriteDescriptorSetInlineUniformBlockEXT"; + case StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT: + return "DescriptorPoolInlineUniformBlockCreateInfoEXT"; + case StructureType::eSampleLocationsInfoEXT: return "SampleLocationsInfoEXT"; + case StructureType::eRenderPassSampleLocationsBeginInfoEXT: return "RenderPassSampleLocationsBeginInfoEXT"; + case StructureType::ePipelineSampleLocationsStateCreateInfoEXT: + return "PipelineSampleLocationsStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT: + return "PhysicalDeviceSampleLocationsPropertiesEXT"; + case StructureType::eMultisamplePropertiesEXT: return "MultisamplePropertiesEXT"; + case StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT: + return "PhysicalDeviceBlendOperationAdvancedFeaturesEXT"; + case StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT: + return "PhysicalDeviceBlendOperationAdvancedPropertiesEXT"; + case StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT: + return "PipelineColorBlendAdvancedStateCreateInfoEXT"; + case StructureType::ePipelineCoverageToColorStateCreateInfoNV: return "PipelineCoverageToColorStateCreateInfoNV"; + case StructureType::eWriteDescriptorSetAccelerationStructureKHR: + return "WriteDescriptorSetAccelerationStructureKHR"; + case StructureType::eAccelerationStructureBuildGeometryInfoKHR: + return "AccelerationStructureBuildGeometryInfoKHR"; + case StructureType::eAccelerationStructureDeviceAddressInfoKHR: + return "AccelerationStructureDeviceAddressInfoKHR"; + case StructureType::eAccelerationStructureGeometryAabbsDataKHR: + return "AccelerationStructureGeometryAabbsDataKHR"; + case StructureType::eAccelerationStructureGeometryInstancesDataKHR: + return "AccelerationStructureGeometryInstancesDataKHR"; + case StructureType::eAccelerationStructureGeometryTrianglesDataKHR: + return "AccelerationStructureGeometryTrianglesDataKHR"; + case StructureType::eAccelerationStructureGeometryKHR: return "AccelerationStructureGeometryKHR"; + case StructureType::eAccelerationStructureVersionInfoKHR: return "AccelerationStructureVersionInfoKHR"; + case StructureType::eCopyAccelerationStructureInfoKHR: return "CopyAccelerationStructureInfoKHR"; + case StructureType::eCopyAccelerationStructureToMemoryInfoKHR: return "CopyAccelerationStructureToMemoryInfoKHR"; + case StructureType::eCopyMemoryToAccelerationStructureInfoKHR: return "CopyMemoryToAccelerationStructureInfoKHR"; + case StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR: + return "PhysicalDeviceAccelerationStructureFeaturesKHR"; + case StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR: + return "PhysicalDeviceAccelerationStructurePropertiesKHR"; + case StructureType::eAccelerationStructureCreateInfoKHR: return "AccelerationStructureCreateInfoKHR"; + case StructureType::eAccelerationStructureBuildSizesInfoKHR: return "AccelerationStructureBuildSizesInfoKHR"; + case StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR: + return "PhysicalDeviceRayTracingPipelineFeaturesKHR"; + case StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR: + return "PhysicalDeviceRayTracingPipelinePropertiesKHR"; + case StructureType::eRayTracingPipelineCreateInfoKHR: return "RayTracingPipelineCreateInfoKHR"; + case StructureType::eRayTracingShaderGroupCreateInfoKHR: return "RayTracingShaderGroupCreateInfoKHR"; + case StructureType::eRayTracingPipelineInterfaceCreateInfoKHR: return "RayTracingPipelineInterfaceCreateInfoKHR"; + case StructureType::ePhysicalDeviceRayQueryFeaturesKHR: return "PhysicalDeviceRayQueryFeaturesKHR"; + case StructureType::ePipelineCoverageModulationStateCreateInfoNV: + return "PipelineCoverageModulationStateCreateInfoNV"; + case StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV: return "PhysicalDeviceShaderSmBuiltinsFeaturesNV"; + case StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV: + return "PhysicalDeviceShaderSmBuiltinsPropertiesNV"; + case StructureType::eDrmFormatModifierPropertiesListEXT: return "DrmFormatModifierPropertiesListEXT"; + case StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT: + return "PhysicalDeviceImageDrmFormatModifierInfoEXT"; + case StructureType::eImageDrmFormatModifierListCreateInfoEXT: return "ImageDrmFormatModifierListCreateInfoEXT"; + case StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT: + return "ImageDrmFormatModifierExplicitCreateInfoEXT"; + case StructureType::eImageDrmFormatModifierPropertiesEXT: return "ImageDrmFormatModifierPropertiesEXT"; + case StructureType::eValidationCacheCreateInfoEXT: return "ValidationCacheCreateInfoEXT"; + case StructureType::eShaderModuleValidationCacheCreateInfoEXT: return "ShaderModuleValidationCacheCreateInfoEXT"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR: + return "PhysicalDevicePortabilitySubsetFeaturesKHR"; + case StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR: + return "PhysicalDevicePortabilitySubsetPropertiesKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV: + return "PipelineViewportShadingRateImageStateCreateInfoNV"; + case StructureType::ePhysicalDeviceShadingRateImageFeaturesNV: return "PhysicalDeviceShadingRateImageFeaturesNV"; + case StructureType::ePhysicalDeviceShadingRateImagePropertiesNV: + return "PhysicalDeviceShadingRateImagePropertiesNV"; + case StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV: + return "PipelineViewportCoarseSampleOrderStateCreateInfoNV"; + case StructureType::eRayTracingPipelineCreateInfoNV: return "RayTracingPipelineCreateInfoNV"; + case StructureType::eAccelerationStructureCreateInfoNV: return "AccelerationStructureCreateInfoNV"; + case StructureType::eGeometryNV: return "GeometryNV"; + case StructureType::eGeometryTrianglesNV: return "GeometryTrianglesNV"; + case StructureType::eGeometryAabbNV: return "GeometryAabbNV"; + case StructureType::eBindAccelerationStructureMemoryInfoNV: return "BindAccelerationStructureMemoryInfoNV"; + case StructureType::eWriteDescriptorSetAccelerationStructureNV: + return "WriteDescriptorSetAccelerationStructureNV"; + case StructureType::eAccelerationStructureMemoryRequirementsInfoNV: + return "AccelerationStructureMemoryRequirementsInfoNV"; + case StructureType::ePhysicalDeviceRayTracingPropertiesNV: return "PhysicalDeviceRayTracingPropertiesNV"; + case StructureType::eRayTracingShaderGroupCreateInfoNV: return "RayTracingShaderGroupCreateInfoNV"; + case StructureType::eAccelerationStructureInfoNV: return "AccelerationStructureInfoNV"; + case StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV: + return "PhysicalDeviceRepresentativeFragmentTestFeaturesNV"; + case StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV: + return "PipelineRepresentativeFragmentTestStateCreateInfoNV"; + case StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT: + return "PhysicalDeviceImageViewImageFormatInfoEXT"; + case StructureType::eFilterCubicImageViewImageFormatPropertiesEXT: + return "FilterCubicImageViewImageFormatPropertiesEXT"; + case StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT: return "DeviceQueueGlobalPriorityCreateInfoEXT"; + case StructureType::eImportMemoryHostPointerInfoEXT: return "ImportMemoryHostPointerInfoEXT"; + case StructureType::eMemoryHostPointerPropertiesEXT: return "MemoryHostPointerPropertiesEXT"; + case StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT: + return "PhysicalDeviceExternalMemoryHostPropertiesEXT"; + case StructureType::ePhysicalDeviceShaderClockFeaturesKHR: return "PhysicalDeviceShaderClockFeaturesKHR"; + case StructureType::ePipelineCompilerControlCreateInfoAMD: return "PipelineCompilerControlCreateInfoAMD"; + case StructureType::eCalibratedTimestampInfoEXT: return "CalibratedTimestampInfoEXT"; + case StructureType::ePhysicalDeviceShaderCorePropertiesAMD: return "PhysicalDeviceShaderCorePropertiesAMD"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case StructureType::eVideoDecodeH265CapabilitiesEXT: return "VideoDecodeH265CapabilitiesEXT"; + case StructureType::eVideoDecodeH265SessionCreateInfoEXT: return "VideoDecodeH265SessionCreateInfoEXT"; + case StructureType::eVideoDecodeH265SessionParametersCreateInfoEXT: + return "VideoDecodeH265SessionParametersCreateInfoEXT"; + case StructureType::eVideoDecodeH265SessionParametersAddInfoEXT: + return "VideoDecodeH265SessionParametersAddInfoEXT"; + case StructureType::eVideoDecodeH265ProfileEXT: return "VideoDecodeH265ProfileEXT"; + case StructureType::eVideoDecodeH265PictureInfoEXT: return "VideoDecodeH265PictureInfoEXT"; + case StructureType::eVideoDecodeH265DpbSlotInfoEXT: return "VideoDecodeH265DpbSlotInfoEXT"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case StructureType::eDeviceMemoryOverallocationCreateInfoAMD: return "DeviceMemoryOverallocationCreateInfoAMD"; + case StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT: + return "PhysicalDeviceVertexAttributeDivisorPropertiesEXT"; + case StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT: + return "PipelineVertexInputDivisorStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT: + return "PhysicalDeviceVertexAttributeDivisorFeaturesEXT"; +#if defined( VK_USE_PLATFORM_GGP ) + case StructureType::ePresentFrameTokenGGP: return "PresentFrameTokenGGP"; +#endif /*VK_USE_PLATFORM_GGP*/ + case StructureType::ePipelineCreationFeedbackCreateInfoEXT: return "PipelineCreationFeedbackCreateInfoEXT"; + case StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV: + return "PhysicalDeviceComputeShaderDerivativesFeaturesNV"; + case StructureType::ePhysicalDeviceMeshShaderFeaturesNV: return "PhysicalDeviceMeshShaderFeaturesNV"; + case StructureType::ePhysicalDeviceMeshShaderPropertiesNV: return "PhysicalDeviceMeshShaderPropertiesNV"; + case StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV: + return "PhysicalDeviceFragmentShaderBarycentricFeaturesNV"; + case StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV: + return "PhysicalDeviceShaderImageFootprintFeaturesNV"; + case StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV: + return "PipelineViewportExclusiveScissorStateCreateInfoNV"; + case StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV: return "PhysicalDeviceExclusiveScissorFeaturesNV"; + case StructureType::eCheckpointDataNV: return "CheckpointDataNV"; + case StructureType::eQueueFamilyCheckpointPropertiesNV: return "QueueFamilyCheckpointPropertiesNV"; + case StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL: + return "PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL"; + case StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL: return "QueryPoolPerformanceQueryCreateInfoINTEL"; + case StructureType::eInitializePerformanceApiInfoINTEL: return "InitializePerformanceApiInfoINTEL"; + case StructureType::ePerformanceMarkerInfoINTEL: return "PerformanceMarkerInfoINTEL"; + case StructureType::ePerformanceStreamMarkerInfoINTEL: return "PerformanceStreamMarkerInfoINTEL"; + case StructureType::ePerformanceOverrideInfoINTEL: return "PerformanceOverrideInfoINTEL"; + case StructureType::ePerformanceConfigurationAcquireInfoINTEL: return "PerformanceConfigurationAcquireInfoINTEL"; + case StructureType::ePhysicalDevicePciBusInfoPropertiesEXT: return "PhysicalDevicePciBusInfoPropertiesEXT"; + case StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD: return "DisplayNativeHdrSurfaceCapabilitiesAMD"; + case StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD: return "SwapchainDisplayNativeHdrCreateInfoAMD"; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + case StructureType::eImagepipeSurfaceCreateInfoFUCHSIA: return "ImagepipeSurfaceCreateInfoFUCHSIA"; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + case StructureType::ePhysicalDeviceShaderTerminateInvocationFeaturesKHR: + return "PhysicalDeviceShaderTerminateInvocationFeaturesKHR"; +#if defined( VK_USE_PLATFORM_METAL_EXT ) + case StructureType::eMetalSurfaceCreateInfoEXT: return "MetalSurfaceCreateInfoEXT"; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + case StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT: + return "PhysicalDeviceFragmentDensityMapFeaturesEXT"; + case StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT: + return "PhysicalDeviceFragmentDensityMapPropertiesEXT"; + case StructureType::eRenderPassFragmentDensityMapCreateInfoEXT: + return "RenderPassFragmentDensityMapCreateInfoEXT"; + case StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT: + return "PhysicalDeviceSubgroupSizeControlPropertiesEXT"; + case StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT: + return "PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT"; + case StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT: + return "PhysicalDeviceSubgroupSizeControlFeaturesEXT"; + case StructureType::eFragmentShadingRateAttachmentInfoKHR: return "FragmentShadingRateAttachmentInfoKHR"; + case StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR: + return "PipelineFragmentShadingRateStateCreateInfoKHR"; + case StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR: + return "PhysicalDeviceFragmentShadingRatePropertiesKHR"; + case StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR: + return "PhysicalDeviceFragmentShadingRateFeaturesKHR"; + case StructureType::ePhysicalDeviceFragmentShadingRateKHR: return "PhysicalDeviceFragmentShadingRateKHR"; + case StructureType::ePhysicalDeviceShaderCoreProperties2AMD: return "PhysicalDeviceShaderCoreProperties2AMD"; + case StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD: return "PhysicalDeviceCoherentMemoryFeaturesAMD"; + case StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT: + return "PhysicalDeviceShaderImageAtomicInt64FeaturesEXT"; + case StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT: return "PhysicalDeviceMemoryBudgetPropertiesEXT"; + case StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT: return "PhysicalDeviceMemoryPriorityFeaturesEXT"; + case StructureType::eMemoryPriorityAllocateInfoEXT: return "MemoryPriorityAllocateInfoEXT"; + case StructureType::eSurfaceProtectedCapabilitiesKHR: return "SurfaceProtectedCapabilitiesKHR"; + case StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV: + return "PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV"; + case StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT: + return "PhysicalDeviceBufferDeviceAddressFeaturesEXT"; + case StructureType::eBufferDeviceAddressCreateInfoEXT: return "BufferDeviceAddressCreateInfoEXT"; + case StructureType::ePhysicalDeviceToolPropertiesEXT: return "PhysicalDeviceToolPropertiesEXT"; + case StructureType::eValidationFeaturesEXT: return "ValidationFeaturesEXT"; + case StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV: + return "PhysicalDeviceCooperativeMatrixFeaturesNV"; + case StructureType::eCooperativeMatrixPropertiesNV: return "CooperativeMatrixPropertiesNV"; + case StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV: + return "PhysicalDeviceCooperativeMatrixPropertiesNV"; + case StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV: + return "PhysicalDeviceCoverageReductionModeFeaturesNV"; + case StructureType::ePipelineCoverageReductionStateCreateInfoNV: + return "PipelineCoverageReductionStateCreateInfoNV"; + case StructureType::eFramebufferMixedSamplesCombinationNV: return "FramebufferMixedSamplesCombinationNV"; + case StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT: + return "PhysicalDeviceFragmentShaderInterlockFeaturesEXT"; + case StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT: + return "PhysicalDeviceYcbcrImageArraysFeaturesEXT"; + case StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT: return "PhysicalDeviceProvokingVertexFeaturesEXT"; + case StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT: + return "PipelineRasterizationProvokingVertexStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT: + return "PhysicalDeviceProvokingVertexPropertiesEXT"; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + case StructureType::eSurfaceFullScreenExclusiveInfoEXT: return "SurfaceFullScreenExclusiveInfoEXT"; + case StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT: + return "SurfaceCapabilitiesFullScreenExclusiveEXT"; + case StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT: return "SurfaceFullScreenExclusiveWin32InfoEXT"; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case StructureType::eHeadlessSurfaceCreateInfoEXT: return "HeadlessSurfaceCreateInfoEXT"; + case StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT: + return "PhysicalDeviceLineRasterizationFeaturesEXT"; + case StructureType::ePipelineRasterizationLineStateCreateInfoEXT: + return "PipelineRasterizationLineStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT: + return "PhysicalDeviceLineRasterizationPropertiesEXT"; + case StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT: + return "PhysicalDeviceShaderAtomicFloatFeaturesEXT"; + case StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT: return "PhysicalDeviceIndexTypeUint8FeaturesEXT"; + case StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT: + return "PhysicalDeviceExtendedDynamicStateFeaturesEXT"; + case StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR: + return "PhysicalDevicePipelineExecutablePropertiesFeaturesKHR"; + case StructureType::ePipelineInfoKHR: return "PipelineInfoKHR"; + case StructureType::ePipelineExecutablePropertiesKHR: return "PipelineExecutablePropertiesKHR"; + case StructureType::ePipelineExecutableInfoKHR: return "PipelineExecutableInfoKHR"; + case StructureType::ePipelineExecutableStatisticKHR: return "PipelineExecutableStatisticKHR"; + case StructureType::ePipelineExecutableInternalRepresentationKHR: + return "PipelineExecutableInternalRepresentationKHR"; + case StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT: + return "PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT"; + case StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV: + return "PhysicalDeviceDeviceGeneratedCommandsPropertiesNV"; + case StructureType::eGraphicsShaderGroupCreateInfoNV: return "GraphicsShaderGroupCreateInfoNV"; + case StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV: return "GraphicsPipelineShaderGroupsCreateInfoNV"; + case StructureType::eIndirectCommandsLayoutTokenNV: return "IndirectCommandsLayoutTokenNV"; + case StructureType::eIndirectCommandsLayoutCreateInfoNV: return "IndirectCommandsLayoutCreateInfoNV"; + case StructureType::eGeneratedCommandsInfoNV: return "GeneratedCommandsInfoNV"; + case StructureType::eGeneratedCommandsMemoryRequirementsInfoNV: + return "GeneratedCommandsMemoryRequirementsInfoNV"; + case StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV: + return "PhysicalDeviceDeviceGeneratedCommandsFeaturesNV"; + case StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV: + return "PhysicalDeviceInheritedViewportScissorFeaturesNV"; + case StructureType::eCommandBufferInheritanceViewportScissorInfoNV: + return "CommandBufferInheritanceViewportScissorInfoNV"; + case StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT: + return "PhysicalDeviceTexelBufferAlignmentFeaturesEXT"; + case StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT: + return "PhysicalDeviceTexelBufferAlignmentPropertiesEXT"; + case StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM: + return "CommandBufferInheritanceRenderPassTransformInfoQCOM"; + case StructureType::eRenderPassTransformBeginInfoQCOM: return "RenderPassTransformBeginInfoQCOM"; + case StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT: + return "PhysicalDeviceDeviceMemoryReportFeaturesEXT"; + case StructureType::eDeviceDeviceMemoryReportCreateInfoEXT: return "DeviceDeviceMemoryReportCreateInfoEXT"; + case StructureType::eDeviceMemoryReportCallbackDataEXT: return "DeviceMemoryReportCallbackDataEXT"; + case StructureType::ePhysicalDeviceRobustness2FeaturesEXT: return "PhysicalDeviceRobustness2FeaturesEXT"; + case StructureType::ePhysicalDeviceRobustness2PropertiesEXT: return "PhysicalDeviceRobustness2PropertiesEXT"; + case StructureType::eSamplerCustomBorderColorCreateInfoEXT: return "SamplerCustomBorderColorCreateInfoEXT"; + case StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT: + return "PhysicalDeviceCustomBorderColorPropertiesEXT"; + case StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT: + return "PhysicalDeviceCustomBorderColorFeaturesEXT"; + case StructureType::ePipelineLibraryCreateInfoKHR: return "PipelineLibraryCreateInfoKHR"; + case StructureType::ePhysicalDevicePrivateDataFeaturesEXT: return "PhysicalDevicePrivateDataFeaturesEXT"; + case StructureType::eDevicePrivateDataCreateInfoEXT: return "DevicePrivateDataCreateInfoEXT"; + case StructureType::ePrivateDataSlotCreateInfoEXT: return "PrivateDataSlotCreateInfoEXT"; + case StructureType::ePhysicalDevicePipelineCreationCacheControlFeaturesEXT: + return "PhysicalDevicePipelineCreationCacheControlFeaturesEXT"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case StructureType::eVideoEncodeInfoKHR: return "VideoEncodeInfoKHR"; + case StructureType::eVideoEncodeRateControlInfoKHR: return "VideoEncodeRateControlInfoKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV: + return "PhysicalDeviceDiagnosticsConfigFeaturesNV"; + case StructureType::eDeviceDiagnosticsConfigCreateInfoNV: return "DeviceDiagnosticsConfigCreateInfoNV"; + case StructureType::eMemoryBarrier2KHR: return "MemoryBarrier2KHR"; + case StructureType::eBufferMemoryBarrier2KHR: return "BufferMemoryBarrier2KHR"; + case StructureType::eImageMemoryBarrier2KHR: return "ImageMemoryBarrier2KHR"; + case StructureType::eDependencyInfoKHR: return "DependencyInfoKHR"; + case StructureType::eSubmitInfo2KHR: return "SubmitInfo2KHR"; + case StructureType::eSemaphoreSubmitInfoKHR: return "SemaphoreSubmitInfoKHR"; + case StructureType::eCommandBufferSubmitInfoKHR: return "CommandBufferSubmitInfoKHR"; + case StructureType::ePhysicalDeviceSynchronization2FeaturesKHR: + return "PhysicalDeviceSynchronization2FeaturesKHR"; + case StructureType::eQueueFamilyCheckpointProperties2Nv: return "QueueFamilyCheckpointProperties2Nv"; + case StructureType::eCheckpointData2Nv: return "CheckpointData2Nv"; + case StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR: + return "PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR"; + case StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV: + return "PhysicalDeviceFragmentShadingRateEnumsPropertiesNV"; + case StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV: + return "PhysicalDeviceFragmentShadingRateEnumsFeaturesNV"; + case StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV: + return "PipelineFragmentShadingRateEnumStateCreateInfoNV"; + case StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT: + return "PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT"; + case StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT: + return "PhysicalDeviceFragmentDensityMap2FeaturesEXT"; + case StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT: + return "PhysicalDeviceFragmentDensityMap2PropertiesEXT"; + case StructureType::eCopyCommandTransformInfoQCOM: return "CopyCommandTransformInfoQCOM"; + case StructureType::ePhysicalDeviceImageRobustnessFeaturesEXT: return "PhysicalDeviceImageRobustnessFeaturesEXT"; + case StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR: + return "PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR"; + case StructureType::eCopyBufferInfo2KHR: return "CopyBufferInfo2KHR"; + case StructureType::eCopyImageInfo2KHR: return "CopyImageInfo2KHR"; + case StructureType::eCopyBufferToImageInfo2KHR: return "CopyBufferToImageInfo2KHR"; + case StructureType::eCopyImageToBufferInfo2KHR: return "CopyImageToBufferInfo2KHR"; + case StructureType::eBlitImageInfo2KHR: return "BlitImageInfo2KHR"; + case StructureType::eResolveImageInfo2KHR: return "ResolveImageInfo2KHR"; + case StructureType::eBufferCopy2KHR: return "BufferCopy2KHR"; + case StructureType::eImageCopy2KHR: return "ImageCopy2KHR"; + case StructureType::eImageBlit2KHR: return "ImageBlit2KHR"; + case StructureType::eBufferImageCopy2KHR: return "BufferImageCopy2KHR"; + case StructureType::eImageResolve2KHR: return "ImageResolve2KHR"; + case StructureType::ePhysicalDevice4444FormatsFeaturesEXT: return "PhysicalDevice4444FormatsFeaturesEXT"; +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + case StructureType::eDirectfbSurfaceCreateInfoEXT: return "DirectfbSurfaceCreateInfoEXT"; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + case StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE: + return "PhysicalDeviceMutableDescriptorTypeFeaturesVALVE"; + case StructureType::eMutableDescriptorTypeCreateInfoVALVE: return "MutableDescriptorTypeCreateInfoVALVE"; + case StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT: + return "PhysicalDeviceVertexInputDynamicStateFeaturesEXT"; + case StructureType::eVertexInputBindingDescription2EXT: return "VertexInputBindingDescription2EXT"; + case StructureType::eVertexInputAttributeDescription2EXT: return "VertexInputAttributeDescription2EXT"; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + case StructureType::eImportMemoryZirconHandleInfoFUCHSIA: return "ImportMemoryZirconHandleInfoFUCHSIA"; + case StructureType::eMemoryZirconHandlePropertiesFUCHSIA: return "MemoryZirconHandlePropertiesFUCHSIA"; + case StructureType::eMemoryGetZirconHandleInfoFUCHSIA: return "MemoryGetZirconHandleInfoFUCHSIA"; + case StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA: return "ImportSemaphoreZirconHandleInfoFUCHSIA"; + case StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA: return "SemaphoreGetZirconHandleInfoFUCHSIA"; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + case StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT: + return "PhysicalDeviceExtendedDynamicState2FeaturesEXT"; +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + case StructureType::eScreenSurfaceCreateInfoQNX: return "ScreenSurfaceCreateInfoQNX"; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + case StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT: + return "PhysicalDeviceColorWriteEnableFeaturesEXT"; + case StructureType::ePipelineColorWriteCreateInfoEXT: return "PipelineColorWriteCreateInfoEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ObjectType + { + eUnknown = VK_OBJECT_TYPE_UNKNOWN, + eInstance = VK_OBJECT_TYPE_INSTANCE, + ePhysicalDevice = VK_OBJECT_TYPE_PHYSICAL_DEVICE, + eDevice = VK_OBJECT_TYPE_DEVICE, + eQueue = VK_OBJECT_TYPE_QUEUE, + eSemaphore = VK_OBJECT_TYPE_SEMAPHORE, + eCommandBuffer = VK_OBJECT_TYPE_COMMAND_BUFFER, + eFence = VK_OBJECT_TYPE_FENCE, + eDeviceMemory = VK_OBJECT_TYPE_DEVICE_MEMORY, + eBuffer = VK_OBJECT_TYPE_BUFFER, + eImage = VK_OBJECT_TYPE_IMAGE, + eEvent = VK_OBJECT_TYPE_EVENT, + eQueryPool = VK_OBJECT_TYPE_QUERY_POOL, + eBufferView = VK_OBJECT_TYPE_BUFFER_VIEW, + eImageView = VK_OBJECT_TYPE_IMAGE_VIEW, + eShaderModule = VK_OBJECT_TYPE_SHADER_MODULE, + ePipelineCache = VK_OBJECT_TYPE_PIPELINE_CACHE, + ePipelineLayout = VK_OBJECT_TYPE_PIPELINE_LAYOUT, + eRenderPass = VK_OBJECT_TYPE_RENDER_PASS, + ePipeline = VK_OBJECT_TYPE_PIPELINE, + eDescriptorSetLayout = VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT, + eSampler = VK_OBJECT_TYPE_SAMPLER, + eDescriptorPool = VK_OBJECT_TYPE_DESCRIPTOR_POOL, + eDescriptorSet = VK_OBJECT_TYPE_DESCRIPTOR_SET, + eFramebuffer = VK_OBJECT_TYPE_FRAMEBUFFER, + eCommandPool = VK_OBJECT_TYPE_COMMAND_POOL, + eSamplerYcbcrConversion = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, + eDescriptorUpdateTemplate = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, + eSurfaceKHR = VK_OBJECT_TYPE_SURFACE_KHR, + eSwapchainKHR = VK_OBJECT_TYPE_SWAPCHAIN_KHR, + eDisplayKHR = VK_OBJECT_TYPE_DISPLAY_KHR, + eDisplayModeKHR = VK_OBJECT_TYPE_DISPLAY_MODE_KHR, + eDebugReportCallbackEXT = VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoSessionKHR = VK_OBJECT_TYPE_VIDEO_SESSION_KHR, + eVideoSessionParametersKHR = VK_OBJECT_TYPE_VIDEO_SESSION_PARAMETERS_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eCuModuleNVX = VK_OBJECT_TYPE_CU_MODULE_NVX, + eCuFunctionNVX = VK_OBJECT_TYPE_CU_FUNCTION_NVX, + eDebugUtilsMessengerEXT = VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT, + eAccelerationStructureKHR = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR, + eValidationCacheEXT = VK_OBJECT_TYPE_VALIDATION_CACHE_EXT, + eAccelerationStructureNV = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV, + ePerformanceConfigurationINTEL = VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL, + eDeferredOperationKHR = VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR, + eIndirectCommandsLayoutNV = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV, + ePrivateDataSlotEXT = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT, + eDescriptorUpdateTemplateKHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR, + eSamplerYcbcrConversionKHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( ObjectType value ) + { + switch ( value ) + { + case ObjectType::eUnknown: return "Unknown"; + case ObjectType::eInstance: return "Instance"; + case ObjectType::ePhysicalDevice: return "PhysicalDevice"; + case ObjectType::eDevice: return "Device"; + case ObjectType::eQueue: return "Queue"; + case ObjectType::eSemaphore: return "Semaphore"; + case ObjectType::eCommandBuffer: return "CommandBuffer"; + case ObjectType::eFence: return "Fence"; + case ObjectType::eDeviceMemory: return "DeviceMemory"; + case ObjectType::eBuffer: return "Buffer"; + case ObjectType::eImage: return "Image"; + case ObjectType::eEvent: return "Event"; + case ObjectType::eQueryPool: return "QueryPool"; + case ObjectType::eBufferView: return "BufferView"; + case ObjectType::eImageView: return "ImageView"; + case ObjectType::eShaderModule: return "ShaderModule"; + case ObjectType::ePipelineCache: return "PipelineCache"; + case ObjectType::ePipelineLayout: return "PipelineLayout"; + case ObjectType::eRenderPass: return "RenderPass"; + case ObjectType::ePipeline: return "Pipeline"; + case ObjectType::eDescriptorSetLayout: return "DescriptorSetLayout"; + case ObjectType::eSampler: return "Sampler"; + case ObjectType::eDescriptorPool: return "DescriptorPool"; + case ObjectType::eDescriptorSet: return "DescriptorSet"; + case ObjectType::eFramebuffer: return "Framebuffer"; + case ObjectType::eCommandPool: return "CommandPool"; + case ObjectType::eSamplerYcbcrConversion: return "SamplerYcbcrConversion"; + case ObjectType::eDescriptorUpdateTemplate: return "DescriptorUpdateTemplate"; + case ObjectType::eSurfaceKHR: return "SurfaceKHR"; + case ObjectType::eSwapchainKHR: return "SwapchainKHR"; + case ObjectType::eDisplayKHR: return "DisplayKHR"; + case ObjectType::eDisplayModeKHR: return "DisplayModeKHR"; + case ObjectType::eDebugReportCallbackEXT: return "DebugReportCallbackEXT"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case ObjectType::eVideoSessionKHR: return "VideoSessionKHR"; + case ObjectType::eVideoSessionParametersKHR: return "VideoSessionParametersKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case ObjectType::eCuModuleNVX: return "CuModuleNVX"; + case ObjectType::eCuFunctionNVX: return "CuFunctionNVX"; + case ObjectType::eDebugUtilsMessengerEXT: return "DebugUtilsMessengerEXT"; + case ObjectType::eAccelerationStructureKHR: return "AccelerationStructureKHR"; + case ObjectType::eValidationCacheEXT: return "ValidationCacheEXT"; + case ObjectType::eAccelerationStructureNV: return "AccelerationStructureNV"; + case ObjectType::ePerformanceConfigurationINTEL: return "PerformanceConfigurationINTEL"; + case ObjectType::eDeferredOperationKHR: return "DeferredOperationKHR"; + case ObjectType::eIndirectCommandsLayoutNV: return "IndirectCommandsLayoutNV"; + case ObjectType::ePrivateDataSlotEXT: return "PrivateDataSlotEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VendorId + { + eVIV = VK_VENDOR_ID_VIV, + eVSI = VK_VENDOR_ID_VSI, + eKazan = VK_VENDOR_ID_KAZAN, + eCodeplay = VK_VENDOR_ID_CODEPLAY, + eMESA = VK_VENDOR_ID_MESA, + ePocl = VK_VENDOR_ID_POCL + }; + + VULKAN_HPP_INLINE std::string to_string( VendorId value ) + { + switch ( value ) + { + case VendorId::eVIV: return "VIV"; + case VendorId::eVSI: return "VSI"; + case VendorId::eKazan: return "Kazan"; + case VendorId::eCodeplay: return "Codeplay"; + case VendorId::eMESA: return "MESA"; + case VendorId::ePocl: return "Pocl"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PipelineCacheHeaderVersion + { + eOne = VK_PIPELINE_CACHE_HEADER_VERSION_ONE + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineCacheHeaderVersion value ) + { + switch ( value ) + { + case PipelineCacheHeaderVersion::eOne: return "One"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class Format + { + eUndefined = VK_FORMAT_UNDEFINED, + eR4G4UnormPack8 = VK_FORMAT_R4G4_UNORM_PACK8, + eR4G4B4A4UnormPack16 = VK_FORMAT_R4G4B4A4_UNORM_PACK16, + eB4G4R4A4UnormPack16 = VK_FORMAT_B4G4R4A4_UNORM_PACK16, + eR5G6B5UnormPack16 = VK_FORMAT_R5G6B5_UNORM_PACK16, + eB5G6R5UnormPack16 = VK_FORMAT_B5G6R5_UNORM_PACK16, + eR5G5B5A1UnormPack16 = VK_FORMAT_R5G5B5A1_UNORM_PACK16, + eB5G5R5A1UnormPack16 = VK_FORMAT_B5G5R5A1_UNORM_PACK16, + eA1R5G5B5UnormPack16 = VK_FORMAT_A1R5G5B5_UNORM_PACK16, + eR8Unorm = VK_FORMAT_R8_UNORM, + eR8Snorm = VK_FORMAT_R8_SNORM, + eR8Uscaled = VK_FORMAT_R8_USCALED, + eR8Sscaled = VK_FORMAT_R8_SSCALED, + eR8Uint = VK_FORMAT_R8_UINT, + eR8Sint = VK_FORMAT_R8_SINT, + eR8Srgb = VK_FORMAT_R8_SRGB, + eR8G8Unorm = VK_FORMAT_R8G8_UNORM, + eR8G8Snorm = VK_FORMAT_R8G8_SNORM, + eR8G8Uscaled = VK_FORMAT_R8G8_USCALED, + eR8G8Sscaled = VK_FORMAT_R8G8_SSCALED, + eR8G8Uint = VK_FORMAT_R8G8_UINT, + eR8G8Sint = VK_FORMAT_R8G8_SINT, + eR8G8Srgb = VK_FORMAT_R8G8_SRGB, + eR8G8B8Unorm = VK_FORMAT_R8G8B8_UNORM, + eR8G8B8Snorm = VK_FORMAT_R8G8B8_SNORM, + eR8G8B8Uscaled = VK_FORMAT_R8G8B8_USCALED, + eR8G8B8Sscaled = VK_FORMAT_R8G8B8_SSCALED, + eR8G8B8Uint = VK_FORMAT_R8G8B8_UINT, + eR8G8B8Sint = VK_FORMAT_R8G8B8_SINT, + eR8G8B8Srgb = VK_FORMAT_R8G8B8_SRGB, + eB8G8R8Unorm = VK_FORMAT_B8G8R8_UNORM, + eB8G8R8Snorm = VK_FORMAT_B8G8R8_SNORM, + eB8G8R8Uscaled = VK_FORMAT_B8G8R8_USCALED, + eB8G8R8Sscaled = VK_FORMAT_B8G8R8_SSCALED, + eB8G8R8Uint = VK_FORMAT_B8G8R8_UINT, + eB8G8R8Sint = VK_FORMAT_B8G8R8_SINT, + eB8G8R8Srgb = VK_FORMAT_B8G8R8_SRGB, + eR8G8B8A8Unorm = VK_FORMAT_R8G8B8A8_UNORM, + eR8G8B8A8Snorm = VK_FORMAT_R8G8B8A8_SNORM, + eR8G8B8A8Uscaled = VK_FORMAT_R8G8B8A8_USCALED, + eR8G8B8A8Sscaled = VK_FORMAT_R8G8B8A8_SSCALED, + eR8G8B8A8Uint = VK_FORMAT_R8G8B8A8_UINT, + eR8G8B8A8Sint = VK_FORMAT_R8G8B8A8_SINT, + eR8G8B8A8Srgb = VK_FORMAT_R8G8B8A8_SRGB, + eB8G8R8A8Unorm = VK_FORMAT_B8G8R8A8_UNORM, + eB8G8R8A8Snorm = VK_FORMAT_B8G8R8A8_SNORM, + eB8G8R8A8Uscaled = VK_FORMAT_B8G8R8A8_USCALED, + eB8G8R8A8Sscaled = VK_FORMAT_B8G8R8A8_SSCALED, + eB8G8R8A8Uint = VK_FORMAT_B8G8R8A8_UINT, + eB8G8R8A8Sint = VK_FORMAT_B8G8R8A8_SINT, + eB8G8R8A8Srgb = VK_FORMAT_B8G8R8A8_SRGB, + eA8B8G8R8UnormPack32 = VK_FORMAT_A8B8G8R8_UNORM_PACK32, + eA8B8G8R8SnormPack32 = VK_FORMAT_A8B8G8R8_SNORM_PACK32, + eA8B8G8R8UscaledPack32 = VK_FORMAT_A8B8G8R8_USCALED_PACK32, + eA8B8G8R8SscaledPack32 = VK_FORMAT_A8B8G8R8_SSCALED_PACK32, + eA8B8G8R8UintPack32 = VK_FORMAT_A8B8G8R8_UINT_PACK32, + eA8B8G8R8SintPack32 = VK_FORMAT_A8B8G8R8_SINT_PACK32, + eA8B8G8R8SrgbPack32 = VK_FORMAT_A8B8G8R8_SRGB_PACK32, + eA2R10G10B10UnormPack32 = VK_FORMAT_A2R10G10B10_UNORM_PACK32, + eA2R10G10B10SnormPack32 = VK_FORMAT_A2R10G10B10_SNORM_PACK32, + eA2R10G10B10UscaledPack32 = VK_FORMAT_A2R10G10B10_USCALED_PACK32, + eA2R10G10B10SscaledPack32 = VK_FORMAT_A2R10G10B10_SSCALED_PACK32, + eA2R10G10B10UintPack32 = VK_FORMAT_A2R10G10B10_UINT_PACK32, + eA2R10G10B10SintPack32 = VK_FORMAT_A2R10G10B10_SINT_PACK32, + eA2B10G10R10UnormPack32 = VK_FORMAT_A2B10G10R10_UNORM_PACK32, + eA2B10G10R10SnormPack32 = VK_FORMAT_A2B10G10R10_SNORM_PACK32, + eA2B10G10R10UscaledPack32 = VK_FORMAT_A2B10G10R10_USCALED_PACK32, + eA2B10G10R10SscaledPack32 = VK_FORMAT_A2B10G10R10_SSCALED_PACK32, + eA2B10G10R10UintPack32 = VK_FORMAT_A2B10G10R10_UINT_PACK32, + eA2B10G10R10SintPack32 = VK_FORMAT_A2B10G10R10_SINT_PACK32, + eR16Unorm = VK_FORMAT_R16_UNORM, + eR16Snorm = VK_FORMAT_R16_SNORM, + eR16Uscaled = VK_FORMAT_R16_USCALED, + eR16Sscaled = VK_FORMAT_R16_SSCALED, + eR16Uint = VK_FORMAT_R16_UINT, + eR16Sint = VK_FORMAT_R16_SINT, + eR16Sfloat = VK_FORMAT_R16_SFLOAT, + eR16G16Unorm = VK_FORMAT_R16G16_UNORM, + eR16G16Snorm = VK_FORMAT_R16G16_SNORM, + eR16G16Uscaled = VK_FORMAT_R16G16_USCALED, + eR16G16Sscaled = VK_FORMAT_R16G16_SSCALED, + eR16G16Uint = VK_FORMAT_R16G16_UINT, + eR16G16Sint = VK_FORMAT_R16G16_SINT, + eR16G16Sfloat = VK_FORMAT_R16G16_SFLOAT, + eR16G16B16Unorm = VK_FORMAT_R16G16B16_UNORM, + eR16G16B16Snorm = VK_FORMAT_R16G16B16_SNORM, + eR16G16B16Uscaled = VK_FORMAT_R16G16B16_USCALED, + eR16G16B16Sscaled = VK_FORMAT_R16G16B16_SSCALED, + eR16G16B16Uint = VK_FORMAT_R16G16B16_UINT, + eR16G16B16Sint = VK_FORMAT_R16G16B16_SINT, + eR16G16B16Sfloat = VK_FORMAT_R16G16B16_SFLOAT, + eR16G16B16A16Unorm = VK_FORMAT_R16G16B16A16_UNORM, + eR16G16B16A16Snorm = VK_FORMAT_R16G16B16A16_SNORM, + eR16G16B16A16Uscaled = VK_FORMAT_R16G16B16A16_USCALED, + eR16G16B16A16Sscaled = VK_FORMAT_R16G16B16A16_SSCALED, + eR16G16B16A16Uint = VK_FORMAT_R16G16B16A16_UINT, + eR16G16B16A16Sint = VK_FORMAT_R16G16B16A16_SINT, + eR16G16B16A16Sfloat = VK_FORMAT_R16G16B16A16_SFLOAT, + eR32Uint = VK_FORMAT_R32_UINT, + eR32Sint = VK_FORMAT_R32_SINT, + eR32Sfloat = VK_FORMAT_R32_SFLOAT, + eR32G32Uint = VK_FORMAT_R32G32_UINT, + eR32G32Sint = VK_FORMAT_R32G32_SINT, + eR32G32Sfloat = VK_FORMAT_R32G32_SFLOAT, + eR32G32B32Uint = VK_FORMAT_R32G32B32_UINT, + eR32G32B32Sint = VK_FORMAT_R32G32B32_SINT, + eR32G32B32Sfloat = VK_FORMAT_R32G32B32_SFLOAT, + eR32G32B32A32Uint = VK_FORMAT_R32G32B32A32_UINT, + eR32G32B32A32Sint = VK_FORMAT_R32G32B32A32_SINT, + eR32G32B32A32Sfloat = VK_FORMAT_R32G32B32A32_SFLOAT, + eR64Uint = VK_FORMAT_R64_UINT, + eR64Sint = VK_FORMAT_R64_SINT, + eR64Sfloat = VK_FORMAT_R64_SFLOAT, + eR64G64Uint = VK_FORMAT_R64G64_UINT, + eR64G64Sint = VK_FORMAT_R64G64_SINT, + eR64G64Sfloat = VK_FORMAT_R64G64_SFLOAT, + eR64G64B64Uint = VK_FORMAT_R64G64B64_UINT, + eR64G64B64Sint = VK_FORMAT_R64G64B64_SINT, + eR64G64B64Sfloat = VK_FORMAT_R64G64B64_SFLOAT, + eR64G64B64A64Uint = VK_FORMAT_R64G64B64A64_UINT, + eR64G64B64A64Sint = VK_FORMAT_R64G64B64A64_SINT, + eR64G64B64A64Sfloat = VK_FORMAT_R64G64B64A64_SFLOAT, + eB10G11R11UfloatPack32 = VK_FORMAT_B10G11R11_UFLOAT_PACK32, + eE5B9G9R9UfloatPack32 = VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, + eD16Unorm = VK_FORMAT_D16_UNORM, + eX8D24UnormPack32 = VK_FORMAT_X8_D24_UNORM_PACK32, + eD32Sfloat = VK_FORMAT_D32_SFLOAT, + eS8Uint = VK_FORMAT_S8_UINT, + eD16UnormS8Uint = VK_FORMAT_D16_UNORM_S8_UINT, + eD24UnormS8Uint = VK_FORMAT_D24_UNORM_S8_UINT, + eD32SfloatS8Uint = VK_FORMAT_D32_SFLOAT_S8_UINT, + eBc1RgbUnormBlock = VK_FORMAT_BC1_RGB_UNORM_BLOCK, + eBc1RgbSrgbBlock = VK_FORMAT_BC1_RGB_SRGB_BLOCK, + eBc1RgbaUnormBlock = VK_FORMAT_BC1_RGBA_UNORM_BLOCK, + eBc1RgbaSrgbBlock = VK_FORMAT_BC1_RGBA_SRGB_BLOCK, + eBc2UnormBlock = VK_FORMAT_BC2_UNORM_BLOCK, + eBc2SrgbBlock = VK_FORMAT_BC2_SRGB_BLOCK, + eBc3UnormBlock = VK_FORMAT_BC3_UNORM_BLOCK, + eBc3SrgbBlock = VK_FORMAT_BC3_SRGB_BLOCK, + eBc4UnormBlock = VK_FORMAT_BC4_UNORM_BLOCK, + eBc4SnormBlock = VK_FORMAT_BC4_SNORM_BLOCK, + eBc5UnormBlock = VK_FORMAT_BC5_UNORM_BLOCK, + eBc5SnormBlock = VK_FORMAT_BC5_SNORM_BLOCK, + eBc6HUfloatBlock = VK_FORMAT_BC6H_UFLOAT_BLOCK, + eBc6HSfloatBlock = VK_FORMAT_BC6H_SFLOAT_BLOCK, + eBc7UnormBlock = VK_FORMAT_BC7_UNORM_BLOCK, + eBc7SrgbBlock = VK_FORMAT_BC7_SRGB_BLOCK, + eEtc2R8G8B8UnormBlock = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK, + eEtc2R8G8B8SrgbBlock = VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK, + eEtc2R8G8B8A1UnormBlock = VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK, + eEtc2R8G8B8A1SrgbBlock = VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK, + eEtc2R8G8B8A8UnormBlock = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK, + eEtc2R8G8B8A8SrgbBlock = VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK, + eEacR11UnormBlock = VK_FORMAT_EAC_R11_UNORM_BLOCK, + eEacR11SnormBlock = VK_FORMAT_EAC_R11_SNORM_BLOCK, + eEacR11G11UnormBlock = VK_FORMAT_EAC_R11G11_UNORM_BLOCK, + eEacR11G11SnormBlock = VK_FORMAT_EAC_R11G11_SNORM_BLOCK, + eAstc4x4UnormBlock = VK_FORMAT_ASTC_4x4_UNORM_BLOCK, + eAstc4x4SrgbBlock = VK_FORMAT_ASTC_4x4_SRGB_BLOCK, + eAstc5x4UnormBlock = VK_FORMAT_ASTC_5x4_UNORM_BLOCK, + eAstc5x4SrgbBlock = VK_FORMAT_ASTC_5x4_SRGB_BLOCK, + eAstc5x5UnormBlock = VK_FORMAT_ASTC_5x5_UNORM_BLOCK, + eAstc5x5SrgbBlock = VK_FORMAT_ASTC_5x5_SRGB_BLOCK, + eAstc6x5UnormBlock = VK_FORMAT_ASTC_6x5_UNORM_BLOCK, + eAstc6x5SrgbBlock = VK_FORMAT_ASTC_6x5_SRGB_BLOCK, + eAstc6x6UnormBlock = VK_FORMAT_ASTC_6x6_UNORM_BLOCK, + eAstc6x6SrgbBlock = VK_FORMAT_ASTC_6x6_SRGB_BLOCK, + eAstc8x5UnormBlock = VK_FORMAT_ASTC_8x5_UNORM_BLOCK, + eAstc8x5SrgbBlock = VK_FORMAT_ASTC_8x5_SRGB_BLOCK, + eAstc8x6UnormBlock = VK_FORMAT_ASTC_8x6_UNORM_BLOCK, + eAstc8x6SrgbBlock = VK_FORMAT_ASTC_8x6_SRGB_BLOCK, + eAstc8x8UnormBlock = VK_FORMAT_ASTC_8x8_UNORM_BLOCK, + eAstc8x8SrgbBlock = VK_FORMAT_ASTC_8x8_SRGB_BLOCK, + eAstc10x5UnormBlock = VK_FORMAT_ASTC_10x5_UNORM_BLOCK, + eAstc10x5SrgbBlock = VK_FORMAT_ASTC_10x5_SRGB_BLOCK, + eAstc10x6UnormBlock = VK_FORMAT_ASTC_10x6_UNORM_BLOCK, + eAstc10x6SrgbBlock = VK_FORMAT_ASTC_10x6_SRGB_BLOCK, + eAstc10x8UnormBlock = VK_FORMAT_ASTC_10x8_UNORM_BLOCK, + eAstc10x8SrgbBlock = VK_FORMAT_ASTC_10x8_SRGB_BLOCK, + eAstc10x10UnormBlock = VK_FORMAT_ASTC_10x10_UNORM_BLOCK, + eAstc10x10SrgbBlock = VK_FORMAT_ASTC_10x10_SRGB_BLOCK, + eAstc12x10UnormBlock = VK_FORMAT_ASTC_12x10_UNORM_BLOCK, + eAstc12x10SrgbBlock = VK_FORMAT_ASTC_12x10_SRGB_BLOCK, + eAstc12x12UnormBlock = VK_FORMAT_ASTC_12x12_UNORM_BLOCK, + eAstc12x12SrgbBlock = VK_FORMAT_ASTC_12x12_SRGB_BLOCK, + eG8B8G8R8422Unorm = VK_FORMAT_G8B8G8R8_422_UNORM, + eB8G8R8G8422Unorm = VK_FORMAT_B8G8R8G8_422_UNORM, + eG8B8R83Plane420Unorm = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM, + eG8B8R82Plane420Unorm = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM, + eG8B8R83Plane422Unorm = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM, + eG8B8R82Plane422Unorm = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM, + eG8B8R83Plane444Unorm = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM, + eR10X6UnormPack16 = VK_FORMAT_R10X6_UNORM_PACK16, + eR10X6G10X6Unorm2Pack16 = VK_FORMAT_R10X6G10X6_UNORM_2PACK16, + eR10X6G10X6B10X6A10X6Unorm4Pack16 = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16, + eG10X6B10X6G10X6R10X6422Unorm4Pack16 = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16, + eB10X6G10X6R10X6G10X6422Unorm4Pack16 = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16, + eG10X6B10X6R10X63Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, + eG10X6B10X6R10X62Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16, + eG10X6B10X6R10X63Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16, + eG10X6B10X6R10X62Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16, + eG10X6B10X6R10X63Plane444Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16, + eR12X4UnormPack16 = VK_FORMAT_R12X4_UNORM_PACK16, + eR12X4G12X4Unorm2Pack16 = VK_FORMAT_R12X4G12X4_UNORM_2PACK16, + eR12X4G12X4B12X4A12X4Unorm4Pack16 = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16, + eG12X4B12X4G12X4R12X4422Unorm4Pack16 = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16, + eB12X4G12X4R12X4G12X4422Unorm4Pack16 = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16, + eG12X4B12X4R12X43Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16, + eG12X4B12X4R12X42Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16, + eG12X4B12X4R12X43Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16, + eG12X4B12X4R12X42Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16, + eG12X4B12X4R12X43Plane444Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16, + eG16B16G16R16422Unorm = VK_FORMAT_G16B16G16R16_422_UNORM, + eB16G16R16G16422Unorm = VK_FORMAT_B16G16R16G16_422_UNORM, + eG16B16R163Plane420Unorm = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, + eG16B16R162Plane420Unorm = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM, + eG16B16R163Plane422Unorm = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM, + eG16B16R162Plane422Unorm = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, + eG16B16R163Plane444Unorm = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM, + ePvrtc12BppUnormBlockIMG = VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG, + ePvrtc14BppUnormBlockIMG = VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG, + ePvrtc22BppUnormBlockIMG = VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG, + ePvrtc24BppUnormBlockIMG = VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG, + ePvrtc12BppSrgbBlockIMG = VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG, + ePvrtc14BppSrgbBlockIMG = VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG, + ePvrtc22BppSrgbBlockIMG = VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG, + ePvrtc24BppSrgbBlockIMG = VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG, + eAstc4x4SfloatBlockEXT = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT, + eAstc5x4SfloatBlockEXT = VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT, + eAstc5x5SfloatBlockEXT = VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT, + eAstc6x5SfloatBlockEXT = VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT, + eAstc6x6SfloatBlockEXT = VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT, + eAstc8x5SfloatBlockEXT = VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT, + eAstc8x6SfloatBlockEXT = VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT, + eAstc8x8SfloatBlockEXT = VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT, + eAstc10x5SfloatBlockEXT = VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT, + eAstc10x6SfloatBlockEXT = VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT, + eAstc10x8SfloatBlockEXT = VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT, + eAstc10x10SfloatBlockEXT = VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT, + eAstc12x10SfloatBlockEXT = VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT, + eAstc12x12SfloatBlockEXT = VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT, + eG8B8R82Plane444UnormEXT = VK_FORMAT_G8_B8R8_2PLANE_444_UNORM_EXT, + eG10X6B10X6R10X62Plane444Unorm3Pack16EXT = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16_EXT, + eG12X4B12X4R12X42Plane444Unorm3Pack16EXT = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16_EXT, + eG16B16R162Plane444UnormEXT = VK_FORMAT_G16_B16R16_2PLANE_444_UNORM_EXT, + eA4R4G4B4UnormPack16EXT = VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT, + eA4B4G4R4UnormPack16EXT = VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT, + eB10X6G10X6R10X6G10X6422Unorm4Pack16KHR = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR, + eB12X4G12X4R12X4G12X4422Unorm4Pack16KHR = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR, + eB16G16R16G16422UnormKHR = VK_FORMAT_B16G16R16G16_422_UNORM_KHR, + eB8G8R8G8422UnormKHR = VK_FORMAT_B8G8R8G8_422_UNORM_KHR, + eG10X6B10X6G10X6R10X6422Unorm4Pack16KHR = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR, + eG10X6B10X6R10X62Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR, + eG10X6B10X6R10X62Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR, + eG10X6B10X6R10X63Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR, + eG10X6B10X6R10X63Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR, + eG10X6B10X6R10X63Plane444Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR, + eG12X4B12X4G12X4R12X4422Unorm4Pack16KHR = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR, + eG12X4B12X4R12X42Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR, + eG12X4B12X4R12X42Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR, + eG12X4B12X4R12X43Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR, + eG12X4B12X4R12X43Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR, + eG12X4B12X4R12X43Plane444Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR, + eG16B16G16R16422UnormKHR = VK_FORMAT_G16B16G16R16_422_UNORM_KHR, + eG16B16R162Plane420UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR, + eG16B16R162Plane422UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR, + eG16B16R163Plane420UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR, + eG16B16R163Plane422UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR, + eG16B16R163Plane444UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR, + eG8B8G8R8422UnormKHR = VK_FORMAT_G8B8G8R8_422_UNORM_KHR, + eG8B8R82Plane420UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR, + eG8B8R82Plane422UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR, + eG8B8R83Plane420UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR, + eG8B8R83Plane422UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR, + eG8B8R83Plane444UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR, + eR10X6G10X6B10X6A10X6Unorm4Pack16KHR = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR, + eR10X6G10X6Unorm2Pack16KHR = VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR, + eR10X6UnormPack16KHR = VK_FORMAT_R10X6_UNORM_PACK16_KHR, + eR12X4G12X4B12X4A12X4Unorm4Pack16KHR = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR, + eR12X4G12X4Unorm2Pack16KHR = VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR, + eR12X4UnormPack16KHR = VK_FORMAT_R12X4_UNORM_PACK16_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( Format value ) + { + switch ( value ) + { + case Format::eUndefined: return "Undefined"; + case Format::eR4G4UnormPack8: return "R4G4UnormPack8"; + case Format::eR4G4B4A4UnormPack16: return "R4G4B4A4UnormPack16"; + case Format::eB4G4R4A4UnormPack16: return "B4G4R4A4UnormPack16"; + case Format::eR5G6B5UnormPack16: return "R5G6B5UnormPack16"; + case Format::eB5G6R5UnormPack16: return "B5G6R5UnormPack16"; + case Format::eR5G5B5A1UnormPack16: return "R5G5B5A1UnormPack16"; + case Format::eB5G5R5A1UnormPack16: return "B5G5R5A1UnormPack16"; + case Format::eA1R5G5B5UnormPack16: return "A1R5G5B5UnormPack16"; + case Format::eR8Unorm: return "R8Unorm"; + case Format::eR8Snorm: return "R8Snorm"; + case Format::eR8Uscaled: return "R8Uscaled"; + case Format::eR8Sscaled: return "R8Sscaled"; + case Format::eR8Uint: return "R8Uint"; + case Format::eR8Sint: return "R8Sint"; + case Format::eR8Srgb: return "R8Srgb"; + case Format::eR8G8Unorm: return "R8G8Unorm"; + case Format::eR8G8Snorm: return "R8G8Snorm"; + case Format::eR8G8Uscaled: return "R8G8Uscaled"; + case Format::eR8G8Sscaled: return "R8G8Sscaled"; + case Format::eR8G8Uint: return "R8G8Uint"; + case Format::eR8G8Sint: return "R8G8Sint"; + case Format::eR8G8Srgb: return "R8G8Srgb"; + case Format::eR8G8B8Unorm: return "R8G8B8Unorm"; + case Format::eR8G8B8Snorm: return "R8G8B8Snorm"; + case Format::eR8G8B8Uscaled: return "R8G8B8Uscaled"; + case Format::eR8G8B8Sscaled: return "R8G8B8Sscaled"; + case Format::eR8G8B8Uint: return "R8G8B8Uint"; + case Format::eR8G8B8Sint: return "R8G8B8Sint"; + case Format::eR8G8B8Srgb: return "R8G8B8Srgb"; + case Format::eB8G8R8Unorm: return "B8G8R8Unorm"; + case Format::eB8G8R8Snorm: return "B8G8R8Snorm"; + case Format::eB8G8R8Uscaled: return "B8G8R8Uscaled"; + case Format::eB8G8R8Sscaled: return "B8G8R8Sscaled"; + case Format::eB8G8R8Uint: return "B8G8R8Uint"; + case Format::eB8G8R8Sint: return "B8G8R8Sint"; + case Format::eB8G8R8Srgb: return "B8G8R8Srgb"; + case Format::eR8G8B8A8Unorm: return "R8G8B8A8Unorm"; + case Format::eR8G8B8A8Snorm: return "R8G8B8A8Snorm"; + case Format::eR8G8B8A8Uscaled: return "R8G8B8A8Uscaled"; + case Format::eR8G8B8A8Sscaled: return "R8G8B8A8Sscaled"; + case Format::eR8G8B8A8Uint: return "R8G8B8A8Uint"; + case Format::eR8G8B8A8Sint: return "R8G8B8A8Sint"; + case Format::eR8G8B8A8Srgb: return "R8G8B8A8Srgb"; + case Format::eB8G8R8A8Unorm: return "B8G8R8A8Unorm"; + case Format::eB8G8R8A8Snorm: return "B8G8R8A8Snorm"; + case Format::eB8G8R8A8Uscaled: return "B8G8R8A8Uscaled"; + case Format::eB8G8R8A8Sscaled: return "B8G8R8A8Sscaled"; + case Format::eB8G8R8A8Uint: return "B8G8R8A8Uint"; + case Format::eB8G8R8A8Sint: return "B8G8R8A8Sint"; + case Format::eB8G8R8A8Srgb: return "B8G8R8A8Srgb"; + case Format::eA8B8G8R8UnormPack32: return "A8B8G8R8UnormPack32"; + case Format::eA8B8G8R8SnormPack32: return "A8B8G8R8SnormPack32"; + case Format::eA8B8G8R8UscaledPack32: return "A8B8G8R8UscaledPack32"; + case Format::eA8B8G8R8SscaledPack32: return "A8B8G8R8SscaledPack32"; + case Format::eA8B8G8R8UintPack32: return "A8B8G8R8UintPack32"; + case Format::eA8B8G8R8SintPack32: return "A8B8G8R8SintPack32"; + case Format::eA8B8G8R8SrgbPack32: return "A8B8G8R8SrgbPack32"; + case Format::eA2R10G10B10UnormPack32: return "A2R10G10B10UnormPack32"; + case Format::eA2R10G10B10SnormPack32: return "A2R10G10B10SnormPack32"; + case Format::eA2R10G10B10UscaledPack32: return "A2R10G10B10UscaledPack32"; + case Format::eA2R10G10B10SscaledPack32: return "A2R10G10B10SscaledPack32"; + case Format::eA2R10G10B10UintPack32: return "A2R10G10B10UintPack32"; + case Format::eA2R10G10B10SintPack32: return "A2R10G10B10SintPack32"; + case Format::eA2B10G10R10UnormPack32: return "A2B10G10R10UnormPack32"; + case Format::eA2B10G10R10SnormPack32: return "A2B10G10R10SnormPack32"; + case Format::eA2B10G10R10UscaledPack32: return "A2B10G10R10UscaledPack32"; + case Format::eA2B10G10R10SscaledPack32: return "A2B10G10R10SscaledPack32"; + case Format::eA2B10G10R10UintPack32: return "A2B10G10R10UintPack32"; + case Format::eA2B10G10R10SintPack32: return "A2B10G10R10SintPack32"; + case Format::eR16Unorm: return "R16Unorm"; + case Format::eR16Snorm: return "R16Snorm"; + case Format::eR16Uscaled: return "R16Uscaled"; + case Format::eR16Sscaled: return "R16Sscaled"; + case Format::eR16Uint: return "R16Uint"; + case Format::eR16Sint: return "R16Sint"; + case Format::eR16Sfloat: return "R16Sfloat"; + case Format::eR16G16Unorm: return "R16G16Unorm"; + case Format::eR16G16Snorm: return "R16G16Snorm"; + case Format::eR16G16Uscaled: return "R16G16Uscaled"; + case Format::eR16G16Sscaled: return "R16G16Sscaled"; + case Format::eR16G16Uint: return "R16G16Uint"; + case Format::eR16G16Sint: return "R16G16Sint"; + case Format::eR16G16Sfloat: return "R16G16Sfloat"; + case Format::eR16G16B16Unorm: return "R16G16B16Unorm"; + case Format::eR16G16B16Snorm: return "R16G16B16Snorm"; + case Format::eR16G16B16Uscaled: return "R16G16B16Uscaled"; + case Format::eR16G16B16Sscaled: return "R16G16B16Sscaled"; + case Format::eR16G16B16Uint: return "R16G16B16Uint"; + case Format::eR16G16B16Sint: return "R16G16B16Sint"; + case Format::eR16G16B16Sfloat: return "R16G16B16Sfloat"; + case Format::eR16G16B16A16Unorm: return "R16G16B16A16Unorm"; + case Format::eR16G16B16A16Snorm: return "R16G16B16A16Snorm"; + case Format::eR16G16B16A16Uscaled: return "R16G16B16A16Uscaled"; + case Format::eR16G16B16A16Sscaled: return "R16G16B16A16Sscaled"; + case Format::eR16G16B16A16Uint: return "R16G16B16A16Uint"; + case Format::eR16G16B16A16Sint: return "R16G16B16A16Sint"; + case Format::eR16G16B16A16Sfloat: return "R16G16B16A16Sfloat"; + case Format::eR32Uint: return "R32Uint"; + case Format::eR32Sint: return "R32Sint"; + case Format::eR32Sfloat: return "R32Sfloat"; + case Format::eR32G32Uint: return "R32G32Uint"; + case Format::eR32G32Sint: return "R32G32Sint"; + case Format::eR32G32Sfloat: return "R32G32Sfloat"; + case Format::eR32G32B32Uint: return "R32G32B32Uint"; + case Format::eR32G32B32Sint: return "R32G32B32Sint"; + case Format::eR32G32B32Sfloat: return "R32G32B32Sfloat"; + case Format::eR32G32B32A32Uint: return "R32G32B32A32Uint"; + case Format::eR32G32B32A32Sint: return "R32G32B32A32Sint"; + case Format::eR32G32B32A32Sfloat: return "R32G32B32A32Sfloat"; + case Format::eR64Uint: return "R64Uint"; + case Format::eR64Sint: return "R64Sint"; + case Format::eR64Sfloat: return "R64Sfloat"; + case Format::eR64G64Uint: return "R64G64Uint"; + case Format::eR64G64Sint: return "R64G64Sint"; + case Format::eR64G64Sfloat: return "R64G64Sfloat"; + case Format::eR64G64B64Uint: return "R64G64B64Uint"; + case Format::eR64G64B64Sint: return "R64G64B64Sint"; + case Format::eR64G64B64Sfloat: return "R64G64B64Sfloat"; + case Format::eR64G64B64A64Uint: return "R64G64B64A64Uint"; + case Format::eR64G64B64A64Sint: return "R64G64B64A64Sint"; + case Format::eR64G64B64A64Sfloat: return "R64G64B64A64Sfloat"; + case Format::eB10G11R11UfloatPack32: return "B10G11R11UfloatPack32"; + case Format::eE5B9G9R9UfloatPack32: return "E5B9G9R9UfloatPack32"; + case Format::eD16Unorm: return "D16Unorm"; + case Format::eX8D24UnormPack32: return "X8D24UnormPack32"; + case Format::eD32Sfloat: return "D32Sfloat"; + case Format::eS8Uint: return "S8Uint"; + case Format::eD16UnormS8Uint: return "D16UnormS8Uint"; + case Format::eD24UnormS8Uint: return "D24UnormS8Uint"; + case Format::eD32SfloatS8Uint: return "D32SfloatS8Uint"; + case Format::eBc1RgbUnormBlock: return "Bc1RgbUnormBlock"; + case Format::eBc1RgbSrgbBlock: return "Bc1RgbSrgbBlock"; + case Format::eBc1RgbaUnormBlock: return "Bc1RgbaUnormBlock"; + case Format::eBc1RgbaSrgbBlock: return "Bc1RgbaSrgbBlock"; + case Format::eBc2UnormBlock: return "Bc2UnormBlock"; + case Format::eBc2SrgbBlock: return "Bc2SrgbBlock"; + case Format::eBc3UnormBlock: return "Bc3UnormBlock"; + case Format::eBc3SrgbBlock: return "Bc3SrgbBlock"; + case Format::eBc4UnormBlock: return "Bc4UnormBlock"; + case Format::eBc4SnormBlock: return "Bc4SnormBlock"; + case Format::eBc5UnormBlock: return "Bc5UnormBlock"; + case Format::eBc5SnormBlock: return "Bc5SnormBlock"; + case Format::eBc6HUfloatBlock: return "Bc6HUfloatBlock"; + case Format::eBc6HSfloatBlock: return "Bc6HSfloatBlock"; + case Format::eBc7UnormBlock: return "Bc7UnormBlock"; + case Format::eBc7SrgbBlock: return "Bc7SrgbBlock"; + case Format::eEtc2R8G8B8UnormBlock: return "Etc2R8G8B8UnormBlock"; + case Format::eEtc2R8G8B8SrgbBlock: return "Etc2R8G8B8SrgbBlock"; + case Format::eEtc2R8G8B8A1UnormBlock: return "Etc2R8G8B8A1UnormBlock"; + case Format::eEtc2R8G8B8A1SrgbBlock: return "Etc2R8G8B8A1SrgbBlock"; + case Format::eEtc2R8G8B8A8UnormBlock: return "Etc2R8G8B8A8UnormBlock"; + case Format::eEtc2R8G8B8A8SrgbBlock: return "Etc2R8G8B8A8SrgbBlock"; + case Format::eEacR11UnormBlock: return "EacR11UnormBlock"; + case Format::eEacR11SnormBlock: return "EacR11SnormBlock"; + case Format::eEacR11G11UnormBlock: return "EacR11G11UnormBlock"; + case Format::eEacR11G11SnormBlock: return "EacR11G11SnormBlock"; + case Format::eAstc4x4UnormBlock: return "Astc4x4UnormBlock"; + case Format::eAstc4x4SrgbBlock: return "Astc4x4SrgbBlock"; + case Format::eAstc5x4UnormBlock: return "Astc5x4UnormBlock"; + case Format::eAstc5x4SrgbBlock: return "Astc5x4SrgbBlock"; + case Format::eAstc5x5UnormBlock: return "Astc5x5UnormBlock"; + case Format::eAstc5x5SrgbBlock: return "Astc5x5SrgbBlock"; + case Format::eAstc6x5UnormBlock: return "Astc6x5UnormBlock"; + case Format::eAstc6x5SrgbBlock: return "Astc6x5SrgbBlock"; + case Format::eAstc6x6UnormBlock: return "Astc6x6UnormBlock"; + case Format::eAstc6x6SrgbBlock: return "Astc6x6SrgbBlock"; + case Format::eAstc8x5UnormBlock: return "Astc8x5UnormBlock"; + case Format::eAstc8x5SrgbBlock: return "Astc8x5SrgbBlock"; + case Format::eAstc8x6UnormBlock: return "Astc8x6UnormBlock"; + case Format::eAstc8x6SrgbBlock: return "Astc8x6SrgbBlock"; + case Format::eAstc8x8UnormBlock: return "Astc8x8UnormBlock"; + case Format::eAstc8x8SrgbBlock: return "Astc8x8SrgbBlock"; + case Format::eAstc10x5UnormBlock: return "Astc10x5UnormBlock"; + case Format::eAstc10x5SrgbBlock: return "Astc10x5SrgbBlock"; + case Format::eAstc10x6UnormBlock: return "Astc10x6UnormBlock"; + case Format::eAstc10x6SrgbBlock: return "Astc10x6SrgbBlock"; + case Format::eAstc10x8UnormBlock: return "Astc10x8UnormBlock"; + case Format::eAstc10x8SrgbBlock: return "Astc10x8SrgbBlock"; + case Format::eAstc10x10UnormBlock: return "Astc10x10UnormBlock"; + case Format::eAstc10x10SrgbBlock: return "Astc10x10SrgbBlock"; + case Format::eAstc12x10UnormBlock: return "Astc12x10UnormBlock"; + case Format::eAstc12x10SrgbBlock: return "Astc12x10SrgbBlock"; + case Format::eAstc12x12UnormBlock: return "Astc12x12UnormBlock"; + case Format::eAstc12x12SrgbBlock: return "Astc12x12SrgbBlock"; + case Format::eG8B8G8R8422Unorm: return "G8B8G8R8422Unorm"; + case Format::eB8G8R8G8422Unorm: return "B8G8R8G8422Unorm"; + case Format::eG8B8R83Plane420Unorm: return "G8B8R83Plane420Unorm"; + case Format::eG8B8R82Plane420Unorm: return "G8B8R82Plane420Unorm"; + case Format::eG8B8R83Plane422Unorm: return "G8B8R83Plane422Unorm"; + case Format::eG8B8R82Plane422Unorm: return "G8B8R82Plane422Unorm"; + case Format::eG8B8R83Plane444Unorm: return "G8B8R83Plane444Unorm"; + case Format::eR10X6UnormPack16: return "R10X6UnormPack16"; + case Format::eR10X6G10X6Unorm2Pack16: return "R10X6G10X6Unorm2Pack16"; + case Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return "R10X6G10X6B10X6A10X6Unorm4Pack16"; + case Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return "G10X6B10X6G10X6R10X6422Unorm4Pack16"; + case Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return "B10X6G10X6R10X6G10X6422Unorm4Pack16"; + case Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return "G10X6B10X6R10X63Plane420Unorm3Pack16"; + case Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return "G10X6B10X6R10X62Plane420Unorm3Pack16"; + case Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return "G10X6B10X6R10X63Plane422Unorm3Pack16"; + case Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return "G10X6B10X6R10X62Plane422Unorm3Pack16"; + case Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return "G10X6B10X6R10X63Plane444Unorm3Pack16"; + case Format::eR12X4UnormPack16: return "R12X4UnormPack16"; + case Format::eR12X4G12X4Unorm2Pack16: return "R12X4G12X4Unorm2Pack16"; + case Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return "R12X4G12X4B12X4A12X4Unorm4Pack16"; + case Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return "G12X4B12X4G12X4R12X4422Unorm4Pack16"; + case Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return "B12X4G12X4R12X4G12X4422Unorm4Pack16"; + case Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return "G12X4B12X4R12X43Plane420Unorm3Pack16"; + case Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return "G12X4B12X4R12X42Plane420Unorm3Pack16"; + case Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return "G12X4B12X4R12X43Plane422Unorm3Pack16"; + case Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return "G12X4B12X4R12X42Plane422Unorm3Pack16"; + case Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return "G12X4B12X4R12X43Plane444Unorm3Pack16"; + case Format::eG16B16G16R16422Unorm: return "G16B16G16R16422Unorm"; + case Format::eB16G16R16G16422Unorm: return "B16G16R16G16422Unorm"; + case Format::eG16B16R163Plane420Unorm: return "G16B16R163Plane420Unorm"; + case Format::eG16B16R162Plane420Unorm: return "G16B16R162Plane420Unorm"; + case Format::eG16B16R163Plane422Unorm: return "G16B16R163Plane422Unorm"; + case Format::eG16B16R162Plane422Unorm: return "G16B16R162Plane422Unorm"; + case Format::eG16B16R163Plane444Unorm: return "G16B16R163Plane444Unorm"; + case Format::ePvrtc12BppUnormBlockIMG: return "Pvrtc12BppUnormBlockIMG"; + case Format::ePvrtc14BppUnormBlockIMG: return "Pvrtc14BppUnormBlockIMG"; + case Format::ePvrtc22BppUnormBlockIMG: return "Pvrtc22BppUnormBlockIMG"; + case Format::ePvrtc24BppUnormBlockIMG: return "Pvrtc24BppUnormBlockIMG"; + case Format::ePvrtc12BppSrgbBlockIMG: return "Pvrtc12BppSrgbBlockIMG"; + case Format::ePvrtc14BppSrgbBlockIMG: return "Pvrtc14BppSrgbBlockIMG"; + case Format::ePvrtc22BppSrgbBlockIMG: return "Pvrtc22BppSrgbBlockIMG"; + case Format::ePvrtc24BppSrgbBlockIMG: return "Pvrtc24BppSrgbBlockIMG"; + case Format::eAstc4x4SfloatBlockEXT: return "Astc4x4SfloatBlockEXT"; + case Format::eAstc5x4SfloatBlockEXT: return "Astc5x4SfloatBlockEXT"; + case Format::eAstc5x5SfloatBlockEXT: return "Astc5x5SfloatBlockEXT"; + case Format::eAstc6x5SfloatBlockEXT: return "Astc6x5SfloatBlockEXT"; + case Format::eAstc6x6SfloatBlockEXT: return "Astc6x6SfloatBlockEXT"; + case Format::eAstc8x5SfloatBlockEXT: return "Astc8x5SfloatBlockEXT"; + case Format::eAstc8x6SfloatBlockEXT: return "Astc8x6SfloatBlockEXT"; + case Format::eAstc8x8SfloatBlockEXT: return "Astc8x8SfloatBlockEXT"; + case Format::eAstc10x5SfloatBlockEXT: return "Astc10x5SfloatBlockEXT"; + case Format::eAstc10x6SfloatBlockEXT: return "Astc10x6SfloatBlockEXT"; + case Format::eAstc10x8SfloatBlockEXT: return "Astc10x8SfloatBlockEXT"; + case Format::eAstc10x10SfloatBlockEXT: return "Astc10x10SfloatBlockEXT"; + case Format::eAstc12x10SfloatBlockEXT: return "Astc12x10SfloatBlockEXT"; + case Format::eAstc12x12SfloatBlockEXT: return "Astc12x12SfloatBlockEXT"; + case Format::eG8B8R82Plane444UnormEXT: return "G8B8R82Plane444UnormEXT"; + case Format::eG10X6B10X6R10X62Plane444Unorm3Pack16EXT: return "G10X6B10X6R10X62Plane444Unorm3Pack16EXT"; + case Format::eG12X4B12X4R12X42Plane444Unorm3Pack16EXT: return "G12X4B12X4R12X42Plane444Unorm3Pack16EXT"; + case Format::eG16B16R162Plane444UnormEXT: return "G16B16R162Plane444UnormEXT"; + case Format::eA4R4G4B4UnormPack16EXT: return "A4R4G4B4UnormPack16EXT"; + case Format::eA4B4G4R4UnormPack16EXT: return "A4B4G4R4UnormPack16EXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class FormatFeatureFlagBits : VkFormatFeatureFlags + { + eSampledImage = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT, + eStorageImage = VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT, + eStorageImageAtomic = VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT, + eUniformTexelBuffer = VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT, + eStorageTexelBuffer = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT, + eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT, + eVertexBuffer = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT, + eColorAttachment = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT, + eColorAttachmentBlend = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT, + eDepthStencilAttachment = VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT, + eBlitSrc = VK_FORMAT_FEATURE_BLIT_SRC_BIT, + eBlitDst = VK_FORMAT_FEATURE_BLIT_DST_BIT, + eSampledImageFilterLinear = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT, + eTransferSrc = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, + eTransferDst = VK_FORMAT_FEATURE_TRANSFER_DST_BIT, + eMidpointChromaSamples = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT, + eSampledImageYcbcrConversionLinearFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT, + eSampledImageYcbcrConversionSeparateReconstructionFilter = + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT, + eSampledImageYcbcrConversionChromaReconstructionExplicit = + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT, + eSampledImageYcbcrConversionChromaReconstructionExplicitForceable = + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT, + eDisjoint = VK_FORMAT_FEATURE_DISJOINT_BIT, + eCositedChromaSamples = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT, + eSampledImageFilterMinmax = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT, + eSampledImageFilterCubicIMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoDecodeOutputKHR = VK_FORMAT_FEATURE_VIDEO_DECODE_OUTPUT_BIT_KHR, + eVideoDecodeDpbKHR = VK_FORMAT_FEATURE_VIDEO_DECODE_DPB_BIT_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eAccelerationStructureVertexBufferKHR = VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR, + eFragmentDensityMapEXT = VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT, + eFragmentShadingRateAttachmentKHR = VK_FORMAT_FEATURE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoEncodeInputKHR = VK_FORMAT_FEATURE_VIDEO_ENCODE_INPUT_BIT_KHR, + eVideoEncodeDpbKHR = VK_FORMAT_FEATURE_VIDEO_ENCODE_DPB_BIT_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eCositedChromaSamplesKHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR, + eDisjointKHR = VK_FORMAT_FEATURE_DISJOINT_BIT_KHR, + eMidpointChromaSamplesKHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR, + eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT, + eSampledImageFilterMinmaxEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT, + eSampledImageYcbcrConversionChromaReconstructionExplicitKHR = + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR, + eSampledImageYcbcrConversionChromaReconstructionExplicitForceableKHR = + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR, + eSampledImageYcbcrConversionLinearFilterKHR = + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR, + eSampledImageYcbcrConversionSeparateReconstructionFilterKHR = + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR, + eTransferDstKHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR, + eTransferSrcKHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlagBits value ) + { + switch ( value ) + { + case FormatFeatureFlagBits::eSampledImage: return "SampledImage"; + case FormatFeatureFlagBits::eStorageImage: return "StorageImage"; + case FormatFeatureFlagBits::eStorageImageAtomic: return "StorageImageAtomic"; + case FormatFeatureFlagBits::eUniformTexelBuffer: return "UniformTexelBuffer"; + case FormatFeatureFlagBits::eStorageTexelBuffer: return "StorageTexelBuffer"; + case FormatFeatureFlagBits::eStorageTexelBufferAtomic: return "StorageTexelBufferAtomic"; + case FormatFeatureFlagBits::eVertexBuffer: return "VertexBuffer"; + case FormatFeatureFlagBits::eColorAttachment: return "ColorAttachment"; + case FormatFeatureFlagBits::eColorAttachmentBlend: return "ColorAttachmentBlend"; + case FormatFeatureFlagBits::eDepthStencilAttachment: return "DepthStencilAttachment"; + case FormatFeatureFlagBits::eBlitSrc: return "BlitSrc"; + case FormatFeatureFlagBits::eBlitDst: return "BlitDst"; + case FormatFeatureFlagBits::eSampledImageFilterLinear: return "SampledImageFilterLinear"; + case FormatFeatureFlagBits::eTransferSrc: return "TransferSrc"; + case FormatFeatureFlagBits::eTransferDst: return "TransferDst"; + case FormatFeatureFlagBits::eMidpointChromaSamples: return "MidpointChromaSamples"; + case FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter: + return "SampledImageYcbcrConversionLinearFilter"; + case FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter: + return "SampledImageYcbcrConversionSeparateReconstructionFilter"; + case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit: + return "SampledImageYcbcrConversionChromaReconstructionExplicit"; + case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable: + return "SampledImageYcbcrConversionChromaReconstructionExplicitForceable"; + case FormatFeatureFlagBits::eDisjoint: return "Disjoint"; + case FormatFeatureFlagBits::eCositedChromaSamples: return "CositedChromaSamples"; + case FormatFeatureFlagBits::eSampledImageFilterMinmax: return "SampledImageFilterMinmax"; + case FormatFeatureFlagBits::eSampledImageFilterCubicIMG: return "SampledImageFilterCubicIMG"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case FormatFeatureFlagBits::eVideoDecodeOutputKHR: return "VideoDecodeOutputKHR"; + case FormatFeatureFlagBits::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR: return "AccelerationStructureVertexBufferKHR"; + case FormatFeatureFlagBits::eFragmentDensityMapEXT: return "FragmentDensityMapEXT"; + case FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case FormatFeatureFlagBits::eVideoEncodeInputKHR: return "VideoEncodeInputKHR"; + case FormatFeatureFlagBits::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ImageCreateFlagBits : VkImageCreateFlags + { + eSparseBinding = VK_IMAGE_CREATE_SPARSE_BINDING_BIT, + eSparseResidency = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT, + eSparseAliased = VK_IMAGE_CREATE_SPARSE_ALIASED_BIT, + eMutableFormat = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT, + eCubeCompatible = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT, + eAlias = VK_IMAGE_CREATE_ALIAS_BIT, + eSplitInstanceBindRegions = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT, + e2DArrayCompatible = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT, + eBlockTexelViewCompatible = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT, + eExtendedUsage = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT, + eProtected = VK_IMAGE_CREATE_PROTECTED_BIT, + eDisjoint = VK_IMAGE_CREATE_DISJOINT_BIT, + eCornerSampledNV = VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV, + eSampleLocationsCompatibleDepthEXT = VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT, + eSubsampledEXT = VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT, + e2DArrayCompatibleKHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR, + eAliasKHR = VK_IMAGE_CREATE_ALIAS_BIT_KHR, + eBlockTexelViewCompatibleKHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR, + eDisjointKHR = VK_IMAGE_CREATE_DISJOINT_BIT_KHR, + eExtendedUsageKHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR, + eSplitInstanceBindRegionsKHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( ImageCreateFlagBits value ) + { + switch ( value ) + { + case ImageCreateFlagBits::eSparseBinding: return "SparseBinding"; + case ImageCreateFlagBits::eSparseResidency: return "SparseResidency"; + case ImageCreateFlagBits::eSparseAliased: return "SparseAliased"; + case ImageCreateFlagBits::eMutableFormat: return "MutableFormat"; + case ImageCreateFlagBits::eCubeCompatible: return "CubeCompatible"; + case ImageCreateFlagBits::eAlias: return "Alias"; + case ImageCreateFlagBits::eSplitInstanceBindRegions: return "SplitInstanceBindRegions"; + case ImageCreateFlagBits::e2DArrayCompatible: return "2DArrayCompatible"; + case ImageCreateFlagBits::eBlockTexelViewCompatible: return "BlockTexelViewCompatible"; + case ImageCreateFlagBits::eExtendedUsage: return "ExtendedUsage"; + case ImageCreateFlagBits::eProtected: return "Protected"; + case ImageCreateFlagBits::eDisjoint: return "Disjoint"; + case ImageCreateFlagBits::eCornerSampledNV: return "CornerSampledNV"; + case ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT: return "SampleLocationsCompatibleDepthEXT"; + case ImageCreateFlagBits::eSubsampledEXT: return "SubsampledEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ImageTiling + { + eOptimal = VK_IMAGE_TILING_OPTIMAL, + eLinear = VK_IMAGE_TILING_LINEAR, + eDrmFormatModifierEXT = VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ImageTiling value ) + { + switch ( value ) + { + case ImageTiling::eOptimal: return "Optimal"; + case ImageTiling::eLinear: return "Linear"; + case ImageTiling::eDrmFormatModifierEXT: return "DrmFormatModifierEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ImageType + { + e1D = VK_IMAGE_TYPE_1D, + e2D = VK_IMAGE_TYPE_2D, + e3D = VK_IMAGE_TYPE_3D + }; + + VULKAN_HPP_INLINE std::string to_string( ImageType value ) + { + switch ( value ) + { + case ImageType::e1D: return "1D"; + case ImageType::e2D: return "2D"; + case ImageType::e3D: return "3D"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ImageUsageFlagBits : VkImageUsageFlags + { + eTransferSrc = VK_IMAGE_USAGE_TRANSFER_SRC_BIT, + eTransferDst = VK_IMAGE_USAGE_TRANSFER_DST_BIT, + eSampled = VK_IMAGE_USAGE_SAMPLED_BIT, + eStorage = VK_IMAGE_USAGE_STORAGE_BIT, + eColorAttachment = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, + eDepthStencilAttachment = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, + eTransientAttachment = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT, + eInputAttachment = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoDecodeDstKHR = VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR, + eVideoDecodeSrcKHR = VK_IMAGE_USAGE_VIDEO_DECODE_SRC_BIT_KHR, + eVideoDecodeDpbKHR = VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eShadingRateImageNV = VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, + eFragmentDensityMapEXT = VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoEncodeDstKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR, + eVideoEncodeSrcKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR, + eVideoEncodeDpbKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eFragmentShadingRateAttachmentKHR = VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( ImageUsageFlagBits value ) + { + switch ( value ) + { + case ImageUsageFlagBits::eTransferSrc: return "TransferSrc"; + case ImageUsageFlagBits::eTransferDst: return "TransferDst"; + case ImageUsageFlagBits::eSampled: return "Sampled"; + case ImageUsageFlagBits::eStorage: return "Storage"; + case ImageUsageFlagBits::eColorAttachment: return "ColorAttachment"; + case ImageUsageFlagBits::eDepthStencilAttachment: return "DepthStencilAttachment"; + case ImageUsageFlagBits::eTransientAttachment: return "TransientAttachment"; + case ImageUsageFlagBits::eInputAttachment: return "InputAttachment"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case ImageUsageFlagBits::eVideoDecodeDstKHR: return "VideoDecodeDstKHR"; + case ImageUsageFlagBits::eVideoDecodeSrcKHR: return "VideoDecodeSrcKHR"; + case ImageUsageFlagBits::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case ImageUsageFlagBits::eShadingRateImageNV: return "ShadingRateImageNV"; + case ImageUsageFlagBits::eFragmentDensityMapEXT: return "FragmentDensityMapEXT"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case ImageUsageFlagBits::eVideoEncodeDstKHR: return "VideoEncodeDstKHR"; + case ImageUsageFlagBits::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR"; + case ImageUsageFlagBits::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class InstanceCreateFlagBits + { + }; + + VULKAN_HPP_INLINE std::string to_string( InstanceCreateFlagBits ) + { + return "(void)"; + } + + enum class InternalAllocationType + { + eExecutable = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE + }; + + VULKAN_HPP_INLINE std::string to_string( InternalAllocationType value ) + { + switch ( value ) + { + case InternalAllocationType::eExecutable: return "Executable"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class MemoryHeapFlagBits : VkMemoryHeapFlags + { + eDeviceLocal = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT, + eMultiInstance = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT, + eMultiInstanceKHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( MemoryHeapFlagBits value ) + { + switch ( value ) + { + case MemoryHeapFlagBits::eDeviceLocal: return "DeviceLocal"; + case MemoryHeapFlagBits::eMultiInstance: return "MultiInstance"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class MemoryPropertyFlagBits : VkMemoryPropertyFlags + { + eDeviceLocal = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, + eHostVisible = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, + eHostCoherent = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT, + eHostCached = VK_MEMORY_PROPERTY_HOST_CACHED_BIT, + eLazilyAllocated = VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT, + eProtected = VK_MEMORY_PROPERTY_PROTECTED_BIT, + eDeviceCoherentAMD = VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD, + eDeviceUncachedAMD = VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD + }; + + VULKAN_HPP_INLINE std::string to_string( MemoryPropertyFlagBits value ) + { + switch ( value ) + { + case MemoryPropertyFlagBits::eDeviceLocal: return "DeviceLocal"; + case MemoryPropertyFlagBits::eHostVisible: return "HostVisible"; + case MemoryPropertyFlagBits::eHostCoherent: return "HostCoherent"; + case MemoryPropertyFlagBits::eHostCached: return "HostCached"; + case MemoryPropertyFlagBits::eLazilyAllocated: return "LazilyAllocated"; + case MemoryPropertyFlagBits::eProtected: return "Protected"; + case MemoryPropertyFlagBits::eDeviceCoherentAMD: return "DeviceCoherentAMD"; + case MemoryPropertyFlagBits::eDeviceUncachedAMD: return "DeviceUncachedAMD"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PhysicalDeviceType + { + eOther = VK_PHYSICAL_DEVICE_TYPE_OTHER, + eIntegratedGpu = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU, + eDiscreteGpu = VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU, + eVirtualGpu = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU, + eCpu = VK_PHYSICAL_DEVICE_TYPE_CPU + }; + + VULKAN_HPP_INLINE std::string to_string( PhysicalDeviceType value ) + { + switch ( value ) + { + case PhysicalDeviceType::eOther: return "Other"; + case PhysicalDeviceType::eIntegratedGpu: return "IntegratedGpu"; + case PhysicalDeviceType::eDiscreteGpu: return "DiscreteGpu"; + case PhysicalDeviceType::eVirtualGpu: return "VirtualGpu"; + case PhysicalDeviceType::eCpu: return "Cpu"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class QueueFlagBits : VkQueueFlags + { + eGraphics = VK_QUEUE_GRAPHICS_BIT, + eCompute = VK_QUEUE_COMPUTE_BIT, + eTransfer = VK_QUEUE_TRANSFER_BIT, + eSparseBinding = VK_QUEUE_SPARSE_BINDING_BIT, + eProtected = VK_QUEUE_PROTECTED_BIT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoDecodeKHR = VK_QUEUE_VIDEO_DECODE_BIT_KHR, + eVideoEncodeKHR = VK_QUEUE_VIDEO_ENCODE_BIT_KHR +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + }; + + VULKAN_HPP_INLINE std::string to_string( QueueFlagBits value ) + { + switch ( value ) + { + case QueueFlagBits::eGraphics: return "Graphics"; + case QueueFlagBits::eCompute: return "Compute"; + case QueueFlagBits::eTransfer: return "Transfer"; + case QueueFlagBits::eSparseBinding: return "SparseBinding"; + case QueueFlagBits::eProtected: return "Protected"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case QueueFlagBits::eVideoDecodeKHR: return "VideoDecodeKHR"; + case QueueFlagBits::eVideoEncodeKHR: return "VideoEncodeKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SampleCountFlagBits : VkSampleCountFlags + { + e1 = VK_SAMPLE_COUNT_1_BIT, + e2 = VK_SAMPLE_COUNT_2_BIT, + e4 = VK_SAMPLE_COUNT_4_BIT, + e8 = VK_SAMPLE_COUNT_8_BIT, + e16 = VK_SAMPLE_COUNT_16_BIT, + e32 = VK_SAMPLE_COUNT_32_BIT, + e64 = VK_SAMPLE_COUNT_64_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( SampleCountFlagBits value ) + { + switch ( value ) + { + case SampleCountFlagBits::e1: return "1"; + case SampleCountFlagBits::e2: return "2"; + case SampleCountFlagBits::e4: return "4"; + case SampleCountFlagBits::e8: return "8"; + case SampleCountFlagBits::e16: return "16"; + case SampleCountFlagBits::e32: return "32"; + case SampleCountFlagBits::e64: return "64"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SystemAllocationScope + { + eCommand = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND, + eObject = VK_SYSTEM_ALLOCATION_SCOPE_OBJECT, + eCache = VK_SYSTEM_ALLOCATION_SCOPE_CACHE, + eDevice = VK_SYSTEM_ALLOCATION_SCOPE_DEVICE, + eInstance = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE + }; + + VULKAN_HPP_INLINE std::string to_string( SystemAllocationScope value ) + { + switch ( value ) + { + case SystemAllocationScope::eCommand: return "Command"; + case SystemAllocationScope::eObject: return "Object"; + case SystemAllocationScope::eCache: return "Cache"; + case SystemAllocationScope::eDevice: return "Device"; + case SystemAllocationScope::eInstance: return "Instance"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DeviceCreateFlagBits + { + }; + + VULKAN_HPP_INLINE std::string to_string( DeviceCreateFlagBits ) + { + return "(void)"; + } + + enum class DeviceQueueCreateFlagBits : VkDeviceQueueCreateFlags + { + eProtected = VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlagBits value ) + { + switch ( value ) + { + case DeviceQueueCreateFlagBits::eProtected: return "Protected"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PipelineStageFlagBits : VkPipelineStageFlags + { + eTopOfPipe = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, + eDrawIndirect = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT, + eVertexInput = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, + eVertexShader = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, + eTessellationControlShader = VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT, + eTessellationEvaluationShader = VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT, + eGeometryShader = VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT, + eFragmentShader = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, + eEarlyFragmentTests = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT, + eLateFragmentTests = VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT, + eColorAttachmentOutput = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, + eComputeShader = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, + eTransfer = VK_PIPELINE_STAGE_TRANSFER_BIT, + eBottomOfPipe = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, + eHost = VK_PIPELINE_STAGE_HOST_BIT, + eAllGraphics = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, + eAllCommands = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, + eTransformFeedbackEXT = VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT, + eConditionalRenderingEXT = VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT, + eAccelerationStructureBuildKHR = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, + eRayTracingShaderKHR = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR, + eShadingRateImageNV = VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV, + eTaskShaderNV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV, + eMeshShaderNV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV, + eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT, + eCommandPreprocessNV = VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV, + eNoneKHR = VK_PIPELINE_STAGE_NONE_KHR, + eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV, + eFragmentShadingRateAttachmentKHR = VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + eRayTracingShaderNV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineStageFlagBits value ) + { + switch ( value ) + { + case PipelineStageFlagBits::eTopOfPipe: return "TopOfPipe"; + case PipelineStageFlagBits::eDrawIndirect: return "DrawIndirect"; + case PipelineStageFlagBits::eVertexInput: return "VertexInput"; + case PipelineStageFlagBits::eVertexShader: return "VertexShader"; + case PipelineStageFlagBits::eTessellationControlShader: return "TessellationControlShader"; + case PipelineStageFlagBits::eTessellationEvaluationShader: return "TessellationEvaluationShader"; + case PipelineStageFlagBits::eGeometryShader: return "GeometryShader"; + case PipelineStageFlagBits::eFragmentShader: return "FragmentShader"; + case PipelineStageFlagBits::eEarlyFragmentTests: return "EarlyFragmentTests"; + case PipelineStageFlagBits::eLateFragmentTests: return "LateFragmentTests"; + case PipelineStageFlagBits::eColorAttachmentOutput: return "ColorAttachmentOutput"; + case PipelineStageFlagBits::eComputeShader: return "ComputeShader"; + case PipelineStageFlagBits::eTransfer: return "Transfer"; + case PipelineStageFlagBits::eBottomOfPipe: return "BottomOfPipe"; + case PipelineStageFlagBits::eHost: return "Host"; + case PipelineStageFlagBits::eAllGraphics: return "AllGraphics"; + case PipelineStageFlagBits::eAllCommands: return "AllCommands"; + case PipelineStageFlagBits::eTransformFeedbackEXT: return "TransformFeedbackEXT"; + case PipelineStageFlagBits::eConditionalRenderingEXT: return "ConditionalRenderingEXT"; + case PipelineStageFlagBits::eAccelerationStructureBuildKHR: return "AccelerationStructureBuildKHR"; + case PipelineStageFlagBits::eRayTracingShaderKHR: return "RayTracingShaderKHR"; + case PipelineStageFlagBits::eShadingRateImageNV: return "ShadingRateImageNV"; + case PipelineStageFlagBits::eTaskShaderNV: return "TaskShaderNV"; + case PipelineStageFlagBits::eMeshShaderNV: return "MeshShaderNV"; + case PipelineStageFlagBits::eFragmentDensityProcessEXT: return "FragmentDensityProcessEXT"; + case PipelineStageFlagBits::eCommandPreprocessNV: return "CommandPreprocessNV"; + case PipelineStageFlagBits::eNoneKHR: return "NoneKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ImageAspectFlagBits : VkImageAspectFlags + { + eColor = VK_IMAGE_ASPECT_COLOR_BIT, + eDepth = VK_IMAGE_ASPECT_DEPTH_BIT, + eStencil = VK_IMAGE_ASPECT_STENCIL_BIT, + eMetadata = VK_IMAGE_ASPECT_METADATA_BIT, + ePlane0 = VK_IMAGE_ASPECT_PLANE_0_BIT, + ePlane1 = VK_IMAGE_ASPECT_PLANE_1_BIT, + ePlane2 = VK_IMAGE_ASPECT_PLANE_2_BIT, + eMemoryPlane0EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT, + eMemoryPlane1EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT, + eMemoryPlane2EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT, + eMemoryPlane3EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT, + ePlane0KHR = VK_IMAGE_ASPECT_PLANE_0_BIT_KHR, + ePlane1KHR = VK_IMAGE_ASPECT_PLANE_1_BIT_KHR, + ePlane2KHR = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( ImageAspectFlagBits value ) + { + switch ( value ) + { + case ImageAspectFlagBits::eColor: return "Color"; + case ImageAspectFlagBits::eDepth: return "Depth"; + case ImageAspectFlagBits::eStencil: return "Stencil"; + case ImageAspectFlagBits::eMetadata: return "Metadata"; + case ImageAspectFlagBits::ePlane0: return "Plane0"; + case ImageAspectFlagBits::ePlane1: return "Plane1"; + case ImageAspectFlagBits::ePlane2: return "Plane2"; + case ImageAspectFlagBits::eMemoryPlane0EXT: return "MemoryPlane0EXT"; + case ImageAspectFlagBits::eMemoryPlane1EXT: return "MemoryPlane1EXT"; + case ImageAspectFlagBits::eMemoryPlane2EXT: return "MemoryPlane2EXT"; + case ImageAspectFlagBits::eMemoryPlane3EXT: return "MemoryPlane3EXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SparseImageFormatFlagBits : VkSparseImageFormatFlags + { + eSingleMiptail = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT, + eAlignedMipSize = VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT, + eNonstandardBlockSize = VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( SparseImageFormatFlagBits value ) + { + switch ( value ) + { + case SparseImageFormatFlagBits::eSingleMiptail: return "SingleMiptail"; + case SparseImageFormatFlagBits::eAlignedMipSize: return "AlignedMipSize"; + case SparseImageFormatFlagBits::eNonstandardBlockSize: return "NonstandardBlockSize"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SparseMemoryBindFlagBits : VkSparseMemoryBindFlags + { + eMetadata = VK_SPARSE_MEMORY_BIND_METADATA_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( SparseMemoryBindFlagBits value ) + { + switch ( value ) + { + case SparseMemoryBindFlagBits::eMetadata: return "Metadata"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class FenceCreateFlagBits : VkFenceCreateFlags + { + eSignaled = VK_FENCE_CREATE_SIGNALED_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( FenceCreateFlagBits value ) + { + switch ( value ) + { + case FenceCreateFlagBits::eSignaled: return "Signaled"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class EventCreateFlagBits : VkEventCreateFlags + { + eDeviceOnlyKHR = VK_EVENT_CREATE_DEVICE_ONLY_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( EventCreateFlagBits value ) + { + switch ( value ) + { + case EventCreateFlagBits::eDeviceOnlyKHR: return "DeviceOnlyKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class QueryPipelineStatisticFlagBits : VkQueryPipelineStatisticFlags + { + eInputAssemblyVertices = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT, + eInputAssemblyPrimitives = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT, + eVertexShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT, + eGeometryShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT, + eGeometryShaderPrimitives = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT, + eClippingInvocations = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT, + eClippingPrimitives = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT, + eFragmentShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT, + eTessellationControlShaderPatches = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT, + eTessellationEvaluationShaderInvocations = + VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT, + eComputeShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( QueryPipelineStatisticFlagBits value ) + { + switch ( value ) + { + case QueryPipelineStatisticFlagBits::eInputAssemblyVertices: return "InputAssemblyVertices"; + case QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives: return "InputAssemblyPrimitives"; + case QueryPipelineStatisticFlagBits::eVertexShaderInvocations: return "VertexShaderInvocations"; + case QueryPipelineStatisticFlagBits::eGeometryShaderInvocations: return "GeometryShaderInvocations"; + case QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives: return "GeometryShaderPrimitives"; + case QueryPipelineStatisticFlagBits::eClippingInvocations: return "ClippingInvocations"; + case QueryPipelineStatisticFlagBits::eClippingPrimitives: return "ClippingPrimitives"; + case QueryPipelineStatisticFlagBits::eFragmentShaderInvocations: return "FragmentShaderInvocations"; + case QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches: return "TessellationControlShaderPatches"; + case QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations: + return "TessellationEvaluationShaderInvocations"; + case QueryPipelineStatisticFlagBits::eComputeShaderInvocations: return "ComputeShaderInvocations"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class QueryPoolCreateFlagBits + { + }; + + VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlagBits ) + { + return "(void)"; + } + + enum class QueryResultFlagBits : VkQueryResultFlags + { + e64 = VK_QUERY_RESULT_64_BIT, + eWait = VK_QUERY_RESULT_WAIT_BIT, + eWithAvailability = VK_QUERY_RESULT_WITH_AVAILABILITY_BIT, + ePartial = VK_QUERY_RESULT_PARTIAL_BIT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eWithStatusKHR = VK_QUERY_RESULT_WITH_STATUS_BIT_KHR +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + }; + + VULKAN_HPP_INLINE std::string to_string( QueryResultFlagBits value ) + { + switch ( value ) + { + case QueryResultFlagBits::e64: return "64"; + case QueryResultFlagBits::eWait: return "Wait"; + case QueryResultFlagBits::eWithAvailability: return "WithAvailability"; + case QueryResultFlagBits::ePartial: return "Partial"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case QueryResultFlagBits::eWithStatusKHR: return "WithStatusKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class QueryType + { + eOcclusion = VK_QUERY_TYPE_OCCLUSION, + ePipelineStatistics = VK_QUERY_TYPE_PIPELINE_STATISTICS, + eTimestamp = VK_QUERY_TYPE_TIMESTAMP, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eResultStatusOnlyKHR = VK_QUERY_TYPE_RESULT_STATUS_ONLY_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eTransformFeedbackStreamEXT = VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT, + ePerformanceQueryKHR = VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, + eAccelerationStructureCompactedSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR, + eAccelerationStructureSerializationSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR, + eAccelerationStructureCompactedSizeNV = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV, + ePerformanceQueryINTEL = VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoEncodeBitstreamBufferRangeKHR = VK_QUERY_TYPE_VIDEO_ENCODE_BITSTREAM_BUFFER_RANGE_KHR +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + }; + + VULKAN_HPP_INLINE std::string to_string( QueryType value ) + { + switch ( value ) + { + case QueryType::eOcclusion: return "Occlusion"; + case QueryType::ePipelineStatistics: return "PipelineStatistics"; + case QueryType::eTimestamp: return "Timestamp"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case QueryType::eResultStatusOnlyKHR: return "ResultStatusOnlyKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case QueryType::eTransformFeedbackStreamEXT: return "TransformFeedbackStreamEXT"; + case QueryType::ePerformanceQueryKHR: return "PerformanceQueryKHR"; + case QueryType::eAccelerationStructureCompactedSizeKHR: return "AccelerationStructureCompactedSizeKHR"; + case QueryType::eAccelerationStructureSerializationSizeKHR: return "AccelerationStructureSerializationSizeKHR"; + case QueryType::eAccelerationStructureCompactedSizeNV: return "AccelerationStructureCompactedSizeNV"; + case QueryType::ePerformanceQueryINTEL: return "PerformanceQueryINTEL"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case QueryType::eVideoEncodeBitstreamBufferRangeKHR: return "VideoEncodeBitstreamBufferRangeKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class BufferCreateFlagBits : VkBufferCreateFlags + { + eSparseBinding = VK_BUFFER_CREATE_SPARSE_BINDING_BIT, + eSparseResidency = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT, + eSparseAliased = VK_BUFFER_CREATE_SPARSE_ALIASED_BIT, + eProtected = VK_BUFFER_CREATE_PROTECTED_BIT, + eDeviceAddressCaptureReplay = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT, + eDeviceAddressCaptureReplayEXT = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT, + eDeviceAddressCaptureReplayKHR = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( BufferCreateFlagBits value ) + { + switch ( value ) + { + case BufferCreateFlagBits::eSparseBinding: return "SparseBinding"; + case BufferCreateFlagBits::eSparseResidency: return "SparseResidency"; + case BufferCreateFlagBits::eSparseAliased: return "SparseAliased"; + case BufferCreateFlagBits::eProtected: return "Protected"; + case BufferCreateFlagBits::eDeviceAddressCaptureReplay: return "DeviceAddressCaptureReplay"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class BufferUsageFlagBits : VkBufferUsageFlags + { + eTransferSrc = VK_BUFFER_USAGE_TRANSFER_SRC_BIT, + eTransferDst = VK_BUFFER_USAGE_TRANSFER_DST_BIT, + eUniformTexelBuffer = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT, + eStorageTexelBuffer = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, + eUniformBuffer = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, + eStorageBuffer = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, + eIndexBuffer = VK_BUFFER_USAGE_INDEX_BUFFER_BIT, + eVertexBuffer = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, + eIndirectBuffer = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, + eShaderDeviceAddress = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoDecodeSrcKHR = VK_BUFFER_USAGE_VIDEO_DECODE_SRC_BIT_KHR, + eVideoDecodeDstKHR = VK_BUFFER_USAGE_VIDEO_DECODE_DST_BIT_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eTransformFeedbackBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT, + eTransformFeedbackCounterBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT, + eConditionalRenderingEXT = VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT, + eAccelerationStructureBuildInputReadOnlyKHR = VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR, + eAccelerationStructureStorageKHR = VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR, + eShaderBindingTableKHR = VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoEncodeDstKHR = VK_BUFFER_USAGE_VIDEO_ENCODE_DST_BIT_KHR, + eVideoEncodeSrcKHR = VK_BUFFER_USAGE_VIDEO_ENCODE_SRC_BIT_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eRayTracingNV = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV, + eShaderDeviceAddressEXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT, + eShaderDeviceAddressKHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( BufferUsageFlagBits value ) + { + switch ( value ) + { + case BufferUsageFlagBits::eTransferSrc: return "TransferSrc"; + case BufferUsageFlagBits::eTransferDst: return "TransferDst"; + case BufferUsageFlagBits::eUniformTexelBuffer: return "UniformTexelBuffer"; + case BufferUsageFlagBits::eStorageTexelBuffer: return "StorageTexelBuffer"; + case BufferUsageFlagBits::eUniformBuffer: return "UniformBuffer"; + case BufferUsageFlagBits::eStorageBuffer: return "StorageBuffer"; + case BufferUsageFlagBits::eIndexBuffer: return "IndexBuffer"; + case BufferUsageFlagBits::eVertexBuffer: return "VertexBuffer"; + case BufferUsageFlagBits::eIndirectBuffer: return "IndirectBuffer"; + case BufferUsageFlagBits::eShaderDeviceAddress: return "ShaderDeviceAddress"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case BufferUsageFlagBits::eVideoDecodeSrcKHR: return "VideoDecodeSrcKHR"; + case BufferUsageFlagBits::eVideoDecodeDstKHR: return "VideoDecodeDstKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case BufferUsageFlagBits::eTransformFeedbackBufferEXT: return "TransformFeedbackBufferEXT"; + case BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT: return "TransformFeedbackCounterBufferEXT"; + case BufferUsageFlagBits::eConditionalRenderingEXT: return "ConditionalRenderingEXT"; + case BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR: + return "AccelerationStructureBuildInputReadOnlyKHR"; + case BufferUsageFlagBits::eAccelerationStructureStorageKHR: return "AccelerationStructureStorageKHR"; + case BufferUsageFlagBits::eShaderBindingTableKHR: return "ShaderBindingTableKHR"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case BufferUsageFlagBits::eVideoEncodeDstKHR: return "VideoEncodeDstKHR"; + case BufferUsageFlagBits::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SharingMode + { + eExclusive = VK_SHARING_MODE_EXCLUSIVE, + eConcurrent = VK_SHARING_MODE_CONCURRENT + }; + + VULKAN_HPP_INLINE std::string to_string( SharingMode value ) + { + switch ( value ) + { + case SharingMode::eExclusive: return "Exclusive"; + case SharingMode::eConcurrent: return "Concurrent"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ImageLayout + { + eUndefined = VK_IMAGE_LAYOUT_UNDEFINED, + eGeneral = VK_IMAGE_LAYOUT_GENERAL, + eColorAttachmentOptimal = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, + eDepthStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, + eDepthStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, + eShaderReadOnlyOptimal = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, + eTransferSrcOptimal = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, + eTransferDstOptimal = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, + ePreinitialized = VK_IMAGE_LAYOUT_PREINITIALIZED, + eDepthReadOnlyStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, + eDepthAttachmentStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, + eDepthAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL, + eDepthReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL, + eStencilAttachmentOptimal = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL, + eStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL, + ePresentSrcKHR = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoDecodeDstKHR = VK_IMAGE_LAYOUT_VIDEO_DECODE_DST_KHR, + eVideoDecodeSrcKHR = VK_IMAGE_LAYOUT_VIDEO_DECODE_SRC_KHR, + eVideoDecodeDpbKHR = VK_IMAGE_LAYOUT_VIDEO_DECODE_DPB_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eSharedPresentKHR = VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, + eShadingRateOptimalNV = VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV, + eFragmentDensityMapOptimalEXT = VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoEncodeDstKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_DST_KHR, + eVideoEncodeSrcKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_SRC_KHR, + eVideoEncodeDpbKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_DPB_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL_KHR, + eAttachmentOptimalKHR = VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL_KHR, + eDepthAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, + eDepthAttachmentStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR, + eDepthReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, + eDepthReadOnlyStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR, + eFragmentShadingRateAttachmentOptimalKHR = VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR, + eStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, + eStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( ImageLayout value ) + { + switch ( value ) + { + case ImageLayout::eUndefined: return "Undefined"; + case ImageLayout::eGeneral: return "General"; + case ImageLayout::eColorAttachmentOptimal: return "ColorAttachmentOptimal"; + case ImageLayout::eDepthStencilAttachmentOptimal: return "DepthStencilAttachmentOptimal"; + case ImageLayout::eDepthStencilReadOnlyOptimal: return "DepthStencilReadOnlyOptimal"; + case ImageLayout::eShaderReadOnlyOptimal: return "ShaderReadOnlyOptimal"; + case ImageLayout::eTransferSrcOptimal: return "TransferSrcOptimal"; + case ImageLayout::eTransferDstOptimal: return "TransferDstOptimal"; + case ImageLayout::ePreinitialized: return "Preinitialized"; + case ImageLayout::eDepthReadOnlyStencilAttachmentOptimal: return "DepthReadOnlyStencilAttachmentOptimal"; + case ImageLayout::eDepthAttachmentStencilReadOnlyOptimal: return "DepthAttachmentStencilReadOnlyOptimal"; + case ImageLayout::eDepthAttachmentOptimal: return "DepthAttachmentOptimal"; + case ImageLayout::eDepthReadOnlyOptimal: return "DepthReadOnlyOptimal"; + case ImageLayout::eStencilAttachmentOptimal: return "StencilAttachmentOptimal"; + case ImageLayout::eStencilReadOnlyOptimal: return "StencilReadOnlyOptimal"; + case ImageLayout::ePresentSrcKHR: return "PresentSrcKHR"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case ImageLayout::eVideoDecodeDstKHR: return "VideoDecodeDstKHR"; + case ImageLayout::eVideoDecodeSrcKHR: return "VideoDecodeSrcKHR"; + case ImageLayout::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case ImageLayout::eSharedPresentKHR: return "SharedPresentKHR"; + case ImageLayout::eShadingRateOptimalNV: return "ShadingRateOptimalNV"; + case ImageLayout::eFragmentDensityMapOptimalEXT: return "FragmentDensityMapOptimalEXT"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case ImageLayout::eVideoEncodeDstKHR: return "VideoEncodeDstKHR"; + case ImageLayout::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR"; + case ImageLayout::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case ImageLayout::eReadOnlyOptimalKHR: return "ReadOnlyOptimalKHR"; + case ImageLayout::eAttachmentOptimalKHR: return "AttachmentOptimalKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ComponentSwizzle + { + eIdentity = VK_COMPONENT_SWIZZLE_IDENTITY, + eZero = VK_COMPONENT_SWIZZLE_ZERO, + eOne = VK_COMPONENT_SWIZZLE_ONE, + eR = VK_COMPONENT_SWIZZLE_R, + eG = VK_COMPONENT_SWIZZLE_G, + eB = VK_COMPONENT_SWIZZLE_B, + eA = VK_COMPONENT_SWIZZLE_A + }; + + VULKAN_HPP_INLINE std::string to_string( ComponentSwizzle value ) + { + switch ( value ) + { + case ComponentSwizzle::eIdentity: return "Identity"; + case ComponentSwizzle::eZero: return "Zero"; + case ComponentSwizzle::eOne: return "One"; + case ComponentSwizzle::eR: return "R"; + case ComponentSwizzle::eG: return "G"; + case ComponentSwizzle::eB: return "B"; + case ComponentSwizzle::eA: return "A"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ImageViewCreateFlagBits : VkImageViewCreateFlags + { + eFragmentDensityMapDynamicEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT, + eFragmentDensityMapDeferredEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DEFERRED_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlagBits value ) + { + switch ( value ) + { + case ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT: return "FragmentDensityMapDynamicEXT"; + case ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT: return "FragmentDensityMapDeferredEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ImageViewType + { + e1D = VK_IMAGE_VIEW_TYPE_1D, + e2D = VK_IMAGE_VIEW_TYPE_2D, + e3D = VK_IMAGE_VIEW_TYPE_3D, + eCube = VK_IMAGE_VIEW_TYPE_CUBE, + e1DArray = VK_IMAGE_VIEW_TYPE_1D_ARRAY, + e2DArray = VK_IMAGE_VIEW_TYPE_2D_ARRAY, + eCubeArray = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY + }; + + VULKAN_HPP_INLINE std::string to_string( ImageViewType value ) + { + switch ( value ) + { + case ImageViewType::e1D: return "1D"; + case ImageViewType::e2D: return "2D"; + case ImageViewType::e3D: return "3D"; + case ImageViewType::eCube: return "Cube"; + case ImageViewType::e1DArray: return "1DArray"; + case ImageViewType::e2DArray: return "2DArray"; + case ImageViewType::eCubeArray: return "CubeArray"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ShaderModuleCreateFlagBits : VkShaderModuleCreateFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlagBits ) + { + return "(void)"; + } + + enum class PipelineCacheCreateFlagBits : VkPipelineCacheCreateFlags + { + eExternallySynchronizedEXT = VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlagBits value ) + { + switch ( value ) + { + case PipelineCacheCreateFlagBits::eExternallySynchronizedEXT: return "ExternallySynchronizedEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class BlendFactor + { + eZero = VK_BLEND_FACTOR_ZERO, + eOne = VK_BLEND_FACTOR_ONE, + eSrcColor = VK_BLEND_FACTOR_SRC_COLOR, + eOneMinusSrcColor = VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR, + eDstColor = VK_BLEND_FACTOR_DST_COLOR, + eOneMinusDstColor = VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR, + eSrcAlpha = VK_BLEND_FACTOR_SRC_ALPHA, + eOneMinusSrcAlpha = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA, + eDstAlpha = VK_BLEND_FACTOR_DST_ALPHA, + eOneMinusDstAlpha = VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA, + eConstantColor = VK_BLEND_FACTOR_CONSTANT_COLOR, + eOneMinusConstantColor = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR, + eConstantAlpha = VK_BLEND_FACTOR_CONSTANT_ALPHA, + eOneMinusConstantAlpha = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA, + eSrcAlphaSaturate = VK_BLEND_FACTOR_SRC_ALPHA_SATURATE, + eSrc1Color = VK_BLEND_FACTOR_SRC1_COLOR, + eOneMinusSrc1Color = VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR, + eSrc1Alpha = VK_BLEND_FACTOR_SRC1_ALPHA, + eOneMinusSrc1Alpha = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA + }; + + VULKAN_HPP_INLINE std::string to_string( BlendFactor value ) + { + switch ( value ) + { + case BlendFactor::eZero: return "Zero"; + case BlendFactor::eOne: return "One"; + case BlendFactor::eSrcColor: return "SrcColor"; + case BlendFactor::eOneMinusSrcColor: return "OneMinusSrcColor"; + case BlendFactor::eDstColor: return "DstColor"; + case BlendFactor::eOneMinusDstColor: return "OneMinusDstColor"; + case BlendFactor::eSrcAlpha: return "SrcAlpha"; + case BlendFactor::eOneMinusSrcAlpha: return "OneMinusSrcAlpha"; + case BlendFactor::eDstAlpha: return "DstAlpha"; + case BlendFactor::eOneMinusDstAlpha: return "OneMinusDstAlpha"; + case BlendFactor::eConstantColor: return "ConstantColor"; + case BlendFactor::eOneMinusConstantColor: return "OneMinusConstantColor"; + case BlendFactor::eConstantAlpha: return "ConstantAlpha"; + case BlendFactor::eOneMinusConstantAlpha: return "OneMinusConstantAlpha"; + case BlendFactor::eSrcAlphaSaturate: return "SrcAlphaSaturate"; + case BlendFactor::eSrc1Color: return "Src1Color"; + case BlendFactor::eOneMinusSrc1Color: return "OneMinusSrc1Color"; + case BlendFactor::eSrc1Alpha: return "Src1Alpha"; + case BlendFactor::eOneMinusSrc1Alpha: return "OneMinusSrc1Alpha"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class BlendOp + { + eAdd = VK_BLEND_OP_ADD, + eSubtract = VK_BLEND_OP_SUBTRACT, + eReverseSubtract = VK_BLEND_OP_REVERSE_SUBTRACT, + eMin = VK_BLEND_OP_MIN, + eMax = VK_BLEND_OP_MAX, + eZeroEXT = VK_BLEND_OP_ZERO_EXT, + eSrcEXT = VK_BLEND_OP_SRC_EXT, + eDstEXT = VK_BLEND_OP_DST_EXT, + eSrcOverEXT = VK_BLEND_OP_SRC_OVER_EXT, + eDstOverEXT = VK_BLEND_OP_DST_OVER_EXT, + eSrcInEXT = VK_BLEND_OP_SRC_IN_EXT, + eDstInEXT = VK_BLEND_OP_DST_IN_EXT, + eSrcOutEXT = VK_BLEND_OP_SRC_OUT_EXT, + eDstOutEXT = VK_BLEND_OP_DST_OUT_EXT, + eSrcAtopEXT = VK_BLEND_OP_SRC_ATOP_EXT, + eDstAtopEXT = VK_BLEND_OP_DST_ATOP_EXT, + eXorEXT = VK_BLEND_OP_XOR_EXT, + eMultiplyEXT = VK_BLEND_OP_MULTIPLY_EXT, + eScreenEXT = VK_BLEND_OP_SCREEN_EXT, + eOverlayEXT = VK_BLEND_OP_OVERLAY_EXT, + eDarkenEXT = VK_BLEND_OP_DARKEN_EXT, + eLightenEXT = VK_BLEND_OP_LIGHTEN_EXT, + eColordodgeEXT = VK_BLEND_OP_COLORDODGE_EXT, + eColorburnEXT = VK_BLEND_OP_COLORBURN_EXT, + eHardlightEXT = VK_BLEND_OP_HARDLIGHT_EXT, + eSoftlightEXT = VK_BLEND_OP_SOFTLIGHT_EXT, + eDifferenceEXT = VK_BLEND_OP_DIFFERENCE_EXT, + eExclusionEXT = VK_BLEND_OP_EXCLUSION_EXT, + eInvertEXT = VK_BLEND_OP_INVERT_EXT, + eInvertRgbEXT = VK_BLEND_OP_INVERT_RGB_EXT, + eLineardodgeEXT = VK_BLEND_OP_LINEARDODGE_EXT, + eLinearburnEXT = VK_BLEND_OP_LINEARBURN_EXT, + eVividlightEXT = VK_BLEND_OP_VIVIDLIGHT_EXT, + eLinearlightEXT = VK_BLEND_OP_LINEARLIGHT_EXT, + ePinlightEXT = VK_BLEND_OP_PINLIGHT_EXT, + eHardmixEXT = VK_BLEND_OP_HARDMIX_EXT, + eHslHueEXT = VK_BLEND_OP_HSL_HUE_EXT, + eHslSaturationEXT = VK_BLEND_OP_HSL_SATURATION_EXT, + eHslColorEXT = VK_BLEND_OP_HSL_COLOR_EXT, + eHslLuminosityEXT = VK_BLEND_OP_HSL_LUMINOSITY_EXT, + ePlusEXT = VK_BLEND_OP_PLUS_EXT, + ePlusClampedEXT = VK_BLEND_OP_PLUS_CLAMPED_EXT, + ePlusClampedAlphaEXT = VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT, + ePlusDarkerEXT = VK_BLEND_OP_PLUS_DARKER_EXT, + eMinusEXT = VK_BLEND_OP_MINUS_EXT, + eMinusClampedEXT = VK_BLEND_OP_MINUS_CLAMPED_EXT, + eContrastEXT = VK_BLEND_OP_CONTRAST_EXT, + eInvertOvgEXT = VK_BLEND_OP_INVERT_OVG_EXT, + eRedEXT = VK_BLEND_OP_RED_EXT, + eGreenEXT = VK_BLEND_OP_GREEN_EXT, + eBlueEXT = VK_BLEND_OP_BLUE_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( BlendOp value ) + { + switch ( value ) + { + case BlendOp::eAdd: return "Add"; + case BlendOp::eSubtract: return "Subtract"; + case BlendOp::eReverseSubtract: return "ReverseSubtract"; + case BlendOp::eMin: return "Min"; + case BlendOp::eMax: return "Max"; + case BlendOp::eZeroEXT: return "ZeroEXT"; + case BlendOp::eSrcEXT: return "SrcEXT"; + case BlendOp::eDstEXT: return "DstEXT"; + case BlendOp::eSrcOverEXT: return "SrcOverEXT"; + case BlendOp::eDstOverEXT: return "DstOverEXT"; + case BlendOp::eSrcInEXT: return "SrcInEXT"; + case BlendOp::eDstInEXT: return "DstInEXT"; + case BlendOp::eSrcOutEXT: return "SrcOutEXT"; + case BlendOp::eDstOutEXT: return "DstOutEXT"; + case BlendOp::eSrcAtopEXT: return "SrcAtopEXT"; + case BlendOp::eDstAtopEXT: return "DstAtopEXT"; + case BlendOp::eXorEXT: return "XorEXT"; + case BlendOp::eMultiplyEXT: return "MultiplyEXT"; + case BlendOp::eScreenEXT: return "ScreenEXT"; + case BlendOp::eOverlayEXT: return "OverlayEXT"; + case BlendOp::eDarkenEXT: return "DarkenEXT"; + case BlendOp::eLightenEXT: return "LightenEXT"; + case BlendOp::eColordodgeEXT: return "ColordodgeEXT"; + case BlendOp::eColorburnEXT: return "ColorburnEXT"; + case BlendOp::eHardlightEXT: return "HardlightEXT"; + case BlendOp::eSoftlightEXT: return "SoftlightEXT"; + case BlendOp::eDifferenceEXT: return "DifferenceEXT"; + case BlendOp::eExclusionEXT: return "ExclusionEXT"; + case BlendOp::eInvertEXT: return "InvertEXT"; + case BlendOp::eInvertRgbEXT: return "InvertRgbEXT"; + case BlendOp::eLineardodgeEXT: return "LineardodgeEXT"; + case BlendOp::eLinearburnEXT: return "LinearburnEXT"; + case BlendOp::eVividlightEXT: return "VividlightEXT"; + case BlendOp::eLinearlightEXT: return "LinearlightEXT"; + case BlendOp::ePinlightEXT: return "PinlightEXT"; + case BlendOp::eHardmixEXT: return "HardmixEXT"; + case BlendOp::eHslHueEXT: return "HslHueEXT"; + case BlendOp::eHslSaturationEXT: return "HslSaturationEXT"; + case BlendOp::eHslColorEXT: return "HslColorEXT"; + case BlendOp::eHslLuminosityEXT: return "HslLuminosityEXT"; + case BlendOp::ePlusEXT: return "PlusEXT"; + case BlendOp::ePlusClampedEXT: return "PlusClampedEXT"; + case BlendOp::ePlusClampedAlphaEXT: return "PlusClampedAlphaEXT"; + case BlendOp::ePlusDarkerEXT: return "PlusDarkerEXT"; + case BlendOp::eMinusEXT: return "MinusEXT"; + case BlendOp::eMinusClampedEXT: return "MinusClampedEXT"; + case BlendOp::eContrastEXT: return "ContrastEXT"; + case BlendOp::eInvertOvgEXT: return "InvertOvgEXT"; + case BlendOp::eRedEXT: return "RedEXT"; + case BlendOp::eGreenEXT: return "GreenEXT"; + case BlendOp::eBlueEXT: return "BlueEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ColorComponentFlagBits : VkColorComponentFlags + { + eR = VK_COLOR_COMPONENT_R_BIT, + eG = VK_COLOR_COMPONENT_G_BIT, + eB = VK_COLOR_COMPONENT_B_BIT, + eA = VK_COLOR_COMPONENT_A_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( ColorComponentFlagBits value ) + { + switch ( value ) + { + case ColorComponentFlagBits::eR: return "R"; + case ColorComponentFlagBits::eG: return "G"; + case ColorComponentFlagBits::eB: return "B"; + case ColorComponentFlagBits::eA: return "A"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class CompareOp + { + eNever = VK_COMPARE_OP_NEVER, + eLess = VK_COMPARE_OP_LESS, + eEqual = VK_COMPARE_OP_EQUAL, + eLessOrEqual = VK_COMPARE_OP_LESS_OR_EQUAL, + eGreater = VK_COMPARE_OP_GREATER, + eNotEqual = VK_COMPARE_OP_NOT_EQUAL, + eGreaterOrEqual = VK_COMPARE_OP_GREATER_OR_EQUAL, + eAlways = VK_COMPARE_OP_ALWAYS + }; + + VULKAN_HPP_INLINE std::string to_string( CompareOp value ) + { + switch ( value ) + { + case CompareOp::eNever: return "Never"; + case CompareOp::eLess: return "Less"; + case CompareOp::eEqual: return "Equal"; + case CompareOp::eLessOrEqual: return "LessOrEqual"; + case CompareOp::eGreater: return "Greater"; + case CompareOp::eNotEqual: return "NotEqual"; + case CompareOp::eGreaterOrEqual: return "GreaterOrEqual"; + case CompareOp::eAlways: return "Always"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class CullModeFlagBits : VkCullModeFlags + { + eNone = VK_CULL_MODE_NONE, + eFront = VK_CULL_MODE_FRONT_BIT, + eBack = VK_CULL_MODE_BACK_BIT, + eFrontAndBack = VK_CULL_MODE_FRONT_AND_BACK + }; + + VULKAN_HPP_INLINE std::string to_string( CullModeFlagBits value ) + { + switch ( value ) + { + case CullModeFlagBits::eNone: return "None"; + case CullModeFlagBits::eFront: return "Front"; + case CullModeFlagBits::eBack: return "Back"; + case CullModeFlagBits::eFrontAndBack: return "FrontAndBack"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DynamicState + { + eViewport = VK_DYNAMIC_STATE_VIEWPORT, + eScissor = VK_DYNAMIC_STATE_SCISSOR, + eLineWidth = VK_DYNAMIC_STATE_LINE_WIDTH, + eDepthBias = VK_DYNAMIC_STATE_DEPTH_BIAS, + eBlendConstants = VK_DYNAMIC_STATE_BLEND_CONSTANTS, + eDepthBounds = VK_DYNAMIC_STATE_DEPTH_BOUNDS, + eStencilCompareMask = VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK, + eStencilWriteMask = VK_DYNAMIC_STATE_STENCIL_WRITE_MASK, + eStencilReference = VK_DYNAMIC_STATE_STENCIL_REFERENCE, + eViewportWScalingNV = VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV, + eDiscardRectangleEXT = VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT, + eSampleLocationsEXT = VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, + eRayTracingPipelineStackSizeKHR = VK_DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR, + eViewportShadingRatePaletteNV = VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV, + eViewportCoarseSampleOrderNV = VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV, + eExclusiveScissorNV = VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV, + eFragmentShadingRateKHR = VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR, + eLineStippleEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_EXT, + eCullModeEXT = VK_DYNAMIC_STATE_CULL_MODE_EXT, + eFrontFaceEXT = VK_DYNAMIC_STATE_FRONT_FACE_EXT, + ePrimitiveTopologyEXT = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT, + eViewportWithCountEXT = VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT, + eScissorWithCountEXT = VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT, + eVertexInputBindingStrideEXT = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT, + eDepthTestEnableEXT = VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT, + eDepthWriteEnableEXT = VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT, + eDepthCompareOpEXT = VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT, + eDepthBoundsTestEnableEXT = VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT, + eStencilTestEnableEXT = VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT, + eStencilOpEXT = VK_DYNAMIC_STATE_STENCIL_OP_EXT, + eVertexInputEXT = VK_DYNAMIC_STATE_VERTEX_INPUT_EXT, + ePatchControlPointsEXT = VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT, + eRasterizerDiscardEnableEXT = VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT, + eDepthBiasEnableEXT = VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT, + eLogicOpEXT = VK_DYNAMIC_STATE_LOGIC_OP_EXT, + ePrimitiveRestartEnableEXT = VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT, + eColorWriteEnableEXT = VK_DYNAMIC_STATE_COLOR_WRITE_ENABLE_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DynamicState value ) + { + switch ( value ) + { + case DynamicState::eViewport: return "Viewport"; + case DynamicState::eScissor: return "Scissor"; + case DynamicState::eLineWidth: return "LineWidth"; + case DynamicState::eDepthBias: return "DepthBias"; + case DynamicState::eBlendConstants: return "BlendConstants"; + case DynamicState::eDepthBounds: return "DepthBounds"; + case DynamicState::eStencilCompareMask: return "StencilCompareMask"; + case DynamicState::eStencilWriteMask: return "StencilWriteMask"; + case DynamicState::eStencilReference: return "StencilReference"; + case DynamicState::eViewportWScalingNV: return "ViewportWScalingNV"; + case DynamicState::eDiscardRectangleEXT: return "DiscardRectangleEXT"; + case DynamicState::eSampleLocationsEXT: return "SampleLocationsEXT"; + case DynamicState::eRayTracingPipelineStackSizeKHR: return "RayTracingPipelineStackSizeKHR"; + case DynamicState::eViewportShadingRatePaletteNV: return "ViewportShadingRatePaletteNV"; + case DynamicState::eViewportCoarseSampleOrderNV: return "ViewportCoarseSampleOrderNV"; + case DynamicState::eExclusiveScissorNV: return "ExclusiveScissorNV"; + case DynamicState::eFragmentShadingRateKHR: return "FragmentShadingRateKHR"; + case DynamicState::eLineStippleEXT: return "LineStippleEXT"; + case DynamicState::eCullModeEXT: return "CullModeEXT"; + case DynamicState::eFrontFaceEXT: return "FrontFaceEXT"; + case DynamicState::ePrimitiveTopologyEXT: return "PrimitiveTopologyEXT"; + case DynamicState::eViewportWithCountEXT: return "ViewportWithCountEXT"; + case DynamicState::eScissorWithCountEXT: return "ScissorWithCountEXT"; + case DynamicState::eVertexInputBindingStrideEXT: return "VertexInputBindingStrideEXT"; + case DynamicState::eDepthTestEnableEXT: return "DepthTestEnableEXT"; + case DynamicState::eDepthWriteEnableEXT: return "DepthWriteEnableEXT"; + case DynamicState::eDepthCompareOpEXT: return "DepthCompareOpEXT"; + case DynamicState::eDepthBoundsTestEnableEXT: return "DepthBoundsTestEnableEXT"; + case DynamicState::eStencilTestEnableEXT: return "StencilTestEnableEXT"; + case DynamicState::eStencilOpEXT: return "StencilOpEXT"; + case DynamicState::eVertexInputEXT: return "VertexInputEXT"; + case DynamicState::ePatchControlPointsEXT: return "PatchControlPointsEXT"; + case DynamicState::eRasterizerDiscardEnableEXT: return "RasterizerDiscardEnableEXT"; + case DynamicState::eDepthBiasEnableEXT: return "DepthBiasEnableEXT"; + case DynamicState::eLogicOpEXT: return "LogicOpEXT"; + case DynamicState::ePrimitiveRestartEnableEXT: return "PrimitiveRestartEnableEXT"; + case DynamicState::eColorWriteEnableEXT: return "ColorWriteEnableEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class FrontFace + { + eCounterClockwise = VK_FRONT_FACE_COUNTER_CLOCKWISE, + eClockwise = VK_FRONT_FACE_CLOCKWISE + }; + + VULKAN_HPP_INLINE std::string to_string( FrontFace value ) + { + switch ( value ) + { + case FrontFace::eCounterClockwise: return "CounterClockwise"; + case FrontFace::eClockwise: return "Clockwise"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class LogicOp + { + eClear = VK_LOGIC_OP_CLEAR, + eAnd = VK_LOGIC_OP_AND, + eAndReverse = VK_LOGIC_OP_AND_REVERSE, + eCopy = VK_LOGIC_OP_COPY, + eAndInverted = VK_LOGIC_OP_AND_INVERTED, + eNoOp = VK_LOGIC_OP_NO_OP, + eXor = VK_LOGIC_OP_XOR, + eOr = VK_LOGIC_OP_OR, + eNor = VK_LOGIC_OP_NOR, + eEquivalent = VK_LOGIC_OP_EQUIVALENT, + eInvert = VK_LOGIC_OP_INVERT, + eOrReverse = VK_LOGIC_OP_OR_REVERSE, + eCopyInverted = VK_LOGIC_OP_COPY_INVERTED, + eOrInverted = VK_LOGIC_OP_OR_INVERTED, + eNand = VK_LOGIC_OP_NAND, + eSet = VK_LOGIC_OP_SET + }; + + VULKAN_HPP_INLINE std::string to_string( LogicOp value ) + { + switch ( value ) + { + case LogicOp::eClear: return "Clear"; + case LogicOp::eAnd: return "And"; + case LogicOp::eAndReverse: return "AndReverse"; + case LogicOp::eCopy: return "Copy"; + case LogicOp::eAndInverted: return "AndInverted"; + case LogicOp::eNoOp: return "NoOp"; + case LogicOp::eXor: return "Xor"; + case LogicOp::eOr: return "Or"; + case LogicOp::eNor: return "Nor"; + case LogicOp::eEquivalent: return "Equivalent"; + case LogicOp::eInvert: return "Invert"; + case LogicOp::eOrReverse: return "OrReverse"; + case LogicOp::eCopyInverted: return "CopyInverted"; + case LogicOp::eOrInverted: return "OrInverted"; + case LogicOp::eNand: return "Nand"; + case LogicOp::eSet: return "Set"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PipelineCreateFlagBits : VkPipelineCreateFlags + { + eDisableOptimization = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT, + eAllowDerivatives = VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT, + eDerivative = VK_PIPELINE_CREATE_DERIVATIVE_BIT, + eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT, + eDispatchBase = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT, + eRayTracingNoNullAnyHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR, + eRayTracingNoNullClosestHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR, + eRayTracingNoNullMissShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR, + eRayTracingNoNullIntersectionShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR, + eRayTracingSkipTrianglesKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR, + eRayTracingSkipAabbsKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR, + eRayTracingShaderGroupHandleCaptureReplayKHR = + VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR, + eDeferCompileNV = VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV, + eCaptureStatisticsKHR = VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR, + eCaptureInternalRepresentationsKHR = VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR, + eIndirectBindableNV = VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV, + eLibraryKHR = VK_PIPELINE_CREATE_LIBRARY_BIT_KHR, + eFailOnPipelineCompileRequiredEXT = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT, + eEarlyReturnOnFailureEXT = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT, + eDispatchBaseKHR = VK_PIPELINE_CREATE_DISPATCH_BASE_KHR, + eViewIndexFromDeviceIndexKHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlagBits value ) + { + switch ( value ) + { + case PipelineCreateFlagBits::eDisableOptimization: return "DisableOptimization"; + case PipelineCreateFlagBits::eAllowDerivatives: return "AllowDerivatives"; + case PipelineCreateFlagBits::eDerivative: return "Derivative"; + case PipelineCreateFlagBits::eViewIndexFromDeviceIndex: return "ViewIndexFromDeviceIndex"; + case PipelineCreateFlagBits::eDispatchBase: return "DispatchBase"; + case PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR: return "RayTracingNoNullAnyHitShadersKHR"; + case PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR: return "RayTracingNoNullClosestHitShadersKHR"; + case PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR: return "RayTracingNoNullMissShadersKHR"; + case PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR: + return "RayTracingNoNullIntersectionShadersKHR"; + case PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR: return "RayTracingSkipTrianglesKHR"; + case PipelineCreateFlagBits::eRayTracingSkipAabbsKHR: return "RayTracingSkipAabbsKHR"; + case PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR: + return "RayTracingShaderGroupHandleCaptureReplayKHR"; + case PipelineCreateFlagBits::eDeferCompileNV: return "DeferCompileNV"; + case PipelineCreateFlagBits::eCaptureStatisticsKHR: return "CaptureStatisticsKHR"; + case PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR: return "CaptureInternalRepresentationsKHR"; + case PipelineCreateFlagBits::eIndirectBindableNV: return "IndirectBindableNV"; + case PipelineCreateFlagBits::eLibraryKHR: return "LibraryKHR"; + case PipelineCreateFlagBits::eFailOnPipelineCompileRequiredEXT: return "FailOnPipelineCompileRequiredEXT"; + case PipelineCreateFlagBits::eEarlyReturnOnFailureEXT: return "EarlyReturnOnFailureEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PipelineShaderStageCreateFlagBits : VkPipelineShaderStageCreateFlags + { + eAllowVaryingSubgroupSizeEXT = VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT, + eRequireFullSubgroupsEXT = VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlagBits value ) + { + switch ( value ) + { + case PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT: return "AllowVaryingSubgroupSizeEXT"; + case PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT: return "RequireFullSubgroupsEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PolygonMode + { + eFill = VK_POLYGON_MODE_FILL, + eLine = VK_POLYGON_MODE_LINE, + ePoint = VK_POLYGON_MODE_POINT, + eFillRectangleNV = VK_POLYGON_MODE_FILL_RECTANGLE_NV + }; + + VULKAN_HPP_INLINE std::string to_string( PolygonMode value ) + { + switch ( value ) + { + case PolygonMode::eFill: return "Fill"; + case PolygonMode::eLine: return "Line"; + case PolygonMode::ePoint: return "Point"; + case PolygonMode::eFillRectangleNV: return "FillRectangleNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PrimitiveTopology + { + ePointList = VK_PRIMITIVE_TOPOLOGY_POINT_LIST, + eLineList = VK_PRIMITIVE_TOPOLOGY_LINE_LIST, + eLineStrip = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP, + eTriangleList = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, + eTriangleStrip = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP, + eTriangleFan = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN, + eLineListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY, + eLineStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY, + eTriangleListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY, + eTriangleStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY, + ePatchList = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST + }; + + VULKAN_HPP_INLINE std::string to_string( PrimitiveTopology value ) + { + switch ( value ) + { + case PrimitiveTopology::ePointList: return "PointList"; + case PrimitiveTopology::eLineList: return "LineList"; + case PrimitiveTopology::eLineStrip: return "LineStrip"; + case PrimitiveTopology::eTriangleList: return "TriangleList"; + case PrimitiveTopology::eTriangleStrip: return "TriangleStrip"; + case PrimitiveTopology::eTriangleFan: return "TriangleFan"; + case PrimitiveTopology::eLineListWithAdjacency: return "LineListWithAdjacency"; + case PrimitiveTopology::eLineStripWithAdjacency: return "LineStripWithAdjacency"; + case PrimitiveTopology::eTriangleListWithAdjacency: return "TriangleListWithAdjacency"; + case PrimitiveTopology::eTriangleStripWithAdjacency: return "TriangleStripWithAdjacency"; + case PrimitiveTopology::ePatchList: return "PatchList"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ShaderStageFlagBits : VkShaderStageFlags + { + eVertex = VK_SHADER_STAGE_VERTEX_BIT, + eTessellationControl = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, + eTessellationEvaluation = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, + eGeometry = VK_SHADER_STAGE_GEOMETRY_BIT, + eFragment = VK_SHADER_STAGE_FRAGMENT_BIT, + eCompute = VK_SHADER_STAGE_COMPUTE_BIT, + eAllGraphics = VK_SHADER_STAGE_ALL_GRAPHICS, + eAll = VK_SHADER_STAGE_ALL, + eRaygenKHR = VK_SHADER_STAGE_RAYGEN_BIT_KHR, + eAnyHitKHR = VK_SHADER_STAGE_ANY_HIT_BIT_KHR, + eClosestHitKHR = VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR, + eMissKHR = VK_SHADER_STAGE_MISS_BIT_KHR, + eIntersectionKHR = VK_SHADER_STAGE_INTERSECTION_BIT_KHR, + eCallableKHR = VK_SHADER_STAGE_CALLABLE_BIT_KHR, + eTaskNV = VK_SHADER_STAGE_TASK_BIT_NV, + eMeshNV = VK_SHADER_STAGE_MESH_BIT_NV, + eAnyHitNV = VK_SHADER_STAGE_ANY_HIT_BIT_NV, + eCallableNV = VK_SHADER_STAGE_CALLABLE_BIT_NV, + eClosestHitNV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV, + eIntersectionNV = VK_SHADER_STAGE_INTERSECTION_BIT_NV, + eMissNV = VK_SHADER_STAGE_MISS_BIT_NV, + eRaygenNV = VK_SHADER_STAGE_RAYGEN_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( ShaderStageFlagBits value ) + { + switch ( value ) + { + case ShaderStageFlagBits::eVertex: return "Vertex"; + case ShaderStageFlagBits::eTessellationControl: return "TessellationControl"; + case ShaderStageFlagBits::eTessellationEvaluation: return "TessellationEvaluation"; + case ShaderStageFlagBits::eGeometry: return "Geometry"; + case ShaderStageFlagBits::eFragment: return "Fragment"; + case ShaderStageFlagBits::eCompute: return "Compute"; + case ShaderStageFlagBits::eAllGraphics: return "AllGraphics"; + case ShaderStageFlagBits::eAll: return "All"; + case ShaderStageFlagBits::eRaygenKHR: return "RaygenKHR"; + case ShaderStageFlagBits::eAnyHitKHR: return "AnyHitKHR"; + case ShaderStageFlagBits::eClosestHitKHR: return "ClosestHitKHR"; + case ShaderStageFlagBits::eMissKHR: return "MissKHR"; + case ShaderStageFlagBits::eIntersectionKHR: return "IntersectionKHR"; + case ShaderStageFlagBits::eCallableKHR: return "CallableKHR"; + case ShaderStageFlagBits::eTaskNV: return "TaskNV"; + case ShaderStageFlagBits::eMeshNV: return "MeshNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class StencilOp + { + eKeep = VK_STENCIL_OP_KEEP, + eZero = VK_STENCIL_OP_ZERO, + eReplace = VK_STENCIL_OP_REPLACE, + eIncrementAndClamp = VK_STENCIL_OP_INCREMENT_AND_CLAMP, + eDecrementAndClamp = VK_STENCIL_OP_DECREMENT_AND_CLAMP, + eInvert = VK_STENCIL_OP_INVERT, + eIncrementAndWrap = VK_STENCIL_OP_INCREMENT_AND_WRAP, + eDecrementAndWrap = VK_STENCIL_OP_DECREMENT_AND_WRAP + }; + + VULKAN_HPP_INLINE std::string to_string( StencilOp value ) + { + switch ( value ) + { + case StencilOp::eKeep: return "Keep"; + case StencilOp::eZero: return "Zero"; + case StencilOp::eReplace: return "Replace"; + case StencilOp::eIncrementAndClamp: return "IncrementAndClamp"; + case StencilOp::eDecrementAndClamp: return "DecrementAndClamp"; + case StencilOp::eInvert: return "Invert"; + case StencilOp::eIncrementAndWrap: return "IncrementAndWrap"; + case StencilOp::eDecrementAndWrap: return "DecrementAndWrap"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VertexInputRate + { + eVertex = VK_VERTEX_INPUT_RATE_VERTEX, + eInstance = VK_VERTEX_INPUT_RATE_INSTANCE + }; + + VULKAN_HPP_INLINE std::string to_string( VertexInputRate value ) + { + switch ( value ) + { + case VertexInputRate::eVertex: return "Vertex"; + case VertexInputRate::eInstance: return "Instance"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class BorderColor + { + eFloatTransparentBlack = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, + eIntTransparentBlack = VK_BORDER_COLOR_INT_TRANSPARENT_BLACK, + eFloatOpaqueBlack = VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK, + eIntOpaqueBlack = VK_BORDER_COLOR_INT_OPAQUE_BLACK, + eFloatOpaqueWhite = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE, + eIntOpaqueWhite = VK_BORDER_COLOR_INT_OPAQUE_WHITE, + eFloatCustomEXT = VK_BORDER_COLOR_FLOAT_CUSTOM_EXT, + eIntCustomEXT = VK_BORDER_COLOR_INT_CUSTOM_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( BorderColor value ) + { + switch ( value ) + { + case BorderColor::eFloatTransparentBlack: return "FloatTransparentBlack"; + case BorderColor::eIntTransparentBlack: return "IntTransparentBlack"; + case BorderColor::eFloatOpaqueBlack: return "FloatOpaqueBlack"; + case BorderColor::eIntOpaqueBlack: return "IntOpaqueBlack"; + case BorderColor::eFloatOpaqueWhite: return "FloatOpaqueWhite"; + case BorderColor::eIntOpaqueWhite: return "IntOpaqueWhite"; + case BorderColor::eFloatCustomEXT: return "FloatCustomEXT"; + case BorderColor::eIntCustomEXT: return "IntCustomEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class Filter + { + eNearest = VK_FILTER_NEAREST, + eLinear = VK_FILTER_LINEAR, + eCubicIMG = VK_FILTER_CUBIC_IMG, + eCubicEXT = VK_FILTER_CUBIC_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( Filter value ) + { + switch ( value ) + { + case Filter::eNearest: return "Nearest"; + case Filter::eLinear: return "Linear"; + case Filter::eCubicIMG: return "CubicIMG"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SamplerAddressMode + { + eRepeat = VK_SAMPLER_ADDRESS_MODE_REPEAT, + eMirroredRepeat = VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT, + eClampToEdge = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, + eClampToBorder = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER, + eMirrorClampToEdge = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE, + eMirrorClampToEdgeKHR = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( SamplerAddressMode value ) + { + switch ( value ) + { + case SamplerAddressMode::eRepeat: return "Repeat"; + case SamplerAddressMode::eMirroredRepeat: return "MirroredRepeat"; + case SamplerAddressMode::eClampToEdge: return "ClampToEdge"; + case SamplerAddressMode::eClampToBorder: return "ClampToBorder"; + case SamplerAddressMode::eMirrorClampToEdge: return "MirrorClampToEdge"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SamplerCreateFlagBits : VkSamplerCreateFlags + { + eSubsampledEXT = VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT, + eSubsampledCoarseReconstructionEXT = VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( SamplerCreateFlagBits value ) + { + switch ( value ) + { + case SamplerCreateFlagBits::eSubsampledEXT: return "SubsampledEXT"; + case SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT: return "SubsampledCoarseReconstructionEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SamplerMipmapMode + { + eNearest = VK_SAMPLER_MIPMAP_MODE_NEAREST, + eLinear = VK_SAMPLER_MIPMAP_MODE_LINEAR + }; + + VULKAN_HPP_INLINE std::string to_string( SamplerMipmapMode value ) + { + switch ( value ) + { + case SamplerMipmapMode::eNearest: return "Nearest"; + case SamplerMipmapMode::eLinear: return "Linear"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DescriptorPoolCreateFlagBits : VkDescriptorPoolCreateFlags + { + eFreeDescriptorSet = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, + eUpdateAfterBind = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT, + eHostOnlyVALVE = VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE, + eUpdateAfterBindEXT = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DescriptorPoolCreateFlagBits value ) + { + switch ( value ) + { + case DescriptorPoolCreateFlagBits::eFreeDescriptorSet: return "FreeDescriptorSet"; + case DescriptorPoolCreateFlagBits::eUpdateAfterBind: return "UpdateAfterBind"; + case DescriptorPoolCreateFlagBits::eHostOnlyVALVE: return "HostOnlyVALVE"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DescriptorSetLayoutCreateFlagBits : VkDescriptorSetLayoutCreateFlags + { + eUpdateAfterBindPool = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT, + ePushDescriptorKHR = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR, + eHostOnlyPoolVALVE = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE, + eUpdateAfterBindPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlagBits value ) + { + switch ( value ) + { + case DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool: return "UpdateAfterBindPool"; + case DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR: return "PushDescriptorKHR"; + case DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolVALVE: return "HostOnlyPoolVALVE"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DescriptorType + { + eSampler = VK_DESCRIPTOR_TYPE_SAMPLER, + eCombinedImageSampler = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, + eSampledImage = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, + eStorageImage = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, + eUniformTexelBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, + eStorageTexelBuffer = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, + eUniformBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, + eStorageBuffer = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, + eUniformBufferDynamic = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, + eStorageBufferDynamic = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, + eInputAttachment = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, + eInlineUniformBlockEXT = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT, + eAccelerationStructureKHR = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, + eAccelerationStructureNV = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV, + eMutableVALVE = VK_DESCRIPTOR_TYPE_MUTABLE_VALVE + }; + + VULKAN_HPP_INLINE std::string to_string( DescriptorType value ) + { + switch ( value ) + { + case DescriptorType::eSampler: return "Sampler"; + case DescriptorType::eCombinedImageSampler: return "CombinedImageSampler"; + case DescriptorType::eSampledImage: return "SampledImage"; + case DescriptorType::eStorageImage: return "StorageImage"; + case DescriptorType::eUniformTexelBuffer: return "UniformTexelBuffer"; + case DescriptorType::eStorageTexelBuffer: return "StorageTexelBuffer"; + case DescriptorType::eUniformBuffer: return "UniformBuffer"; + case DescriptorType::eStorageBuffer: return "StorageBuffer"; + case DescriptorType::eUniformBufferDynamic: return "UniformBufferDynamic"; + case DescriptorType::eStorageBufferDynamic: return "StorageBufferDynamic"; + case DescriptorType::eInputAttachment: return "InputAttachment"; + case DescriptorType::eInlineUniformBlockEXT: return "InlineUniformBlockEXT"; + case DescriptorType::eAccelerationStructureKHR: return "AccelerationStructureKHR"; + case DescriptorType::eAccelerationStructureNV: return "AccelerationStructureNV"; + case DescriptorType::eMutableVALVE: return "MutableVALVE"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class AccessFlagBits : VkAccessFlags + { + eIndirectCommandRead = VK_ACCESS_INDIRECT_COMMAND_READ_BIT, + eIndexRead = VK_ACCESS_INDEX_READ_BIT, + eVertexAttributeRead = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, + eUniformRead = VK_ACCESS_UNIFORM_READ_BIT, + eInputAttachmentRead = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT, + eShaderRead = VK_ACCESS_SHADER_READ_BIT, + eShaderWrite = VK_ACCESS_SHADER_WRITE_BIT, + eColorAttachmentRead = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT, + eColorAttachmentWrite = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, + eDepthStencilAttachmentRead = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT, + eDepthStencilAttachmentWrite = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT, + eTransferRead = VK_ACCESS_TRANSFER_READ_BIT, + eTransferWrite = VK_ACCESS_TRANSFER_WRITE_BIT, + eHostRead = VK_ACCESS_HOST_READ_BIT, + eHostWrite = VK_ACCESS_HOST_WRITE_BIT, + eMemoryRead = VK_ACCESS_MEMORY_READ_BIT, + eMemoryWrite = VK_ACCESS_MEMORY_WRITE_BIT, + eTransformFeedbackWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT, + eTransformFeedbackCounterReadEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT, + eTransformFeedbackCounterWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT, + eConditionalRenderingReadEXT = VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT, + eColorAttachmentReadNoncoherentEXT = VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT, + eAccelerationStructureReadKHR = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR, + eAccelerationStructureWriteKHR = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR, + eShadingRateImageReadNV = VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV, + eFragmentDensityMapReadEXT = VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT, + eCommandPreprocessReadNV = VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV, + eCommandPreprocessWriteNV = VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV, + eNoneKHR = VK_ACCESS_NONE_KHR, + eAccelerationStructureReadNV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV, + eAccelerationStructureWriteNV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV, + eFragmentShadingRateAttachmentReadKHR = VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( AccessFlagBits value ) + { + switch ( value ) + { + case AccessFlagBits::eIndirectCommandRead: return "IndirectCommandRead"; + case AccessFlagBits::eIndexRead: return "IndexRead"; + case AccessFlagBits::eVertexAttributeRead: return "VertexAttributeRead"; + case AccessFlagBits::eUniformRead: return "UniformRead"; + case AccessFlagBits::eInputAttachmentRead: return "InputAttachmentRead"; + case AccessFlagBits::eShaderRead: return "ShaderRead"; + case AccessFlagBits::eShaderWrite: return "ShaderWrite"; + case AccessFlagBits::eColorAttachmentRead: return "ColorAttachmentRead"; + case AccessFlagBits::eColorAttachmentWrite: return "ColorAttachmentWrite"; + case AccessFlagBits::eDepthStencilAttachmentRead: return "DepthStencilAttachmentRead"; + case AccessFlagBits::eDepthStencilAttachmentWrite: return "DepthStencilAttachmentWrite"; + case AccessFlagBits::eTransferRead: return "TransferRead"; + case AccessFlagBits::eTransferWrite: return "TransferWrite"; + case AccessFlagBits::eHostRead: return "HostRead"; + case AccessFlagBits::eHostWrite: return "HostWrite"; + case AccessFlagBits::eMemoryRead: return "MemoryRead"; + case AccessFlagBits::eMemoryWrite: return "MemoryWrite"; + case AccessFlagBits::eTransformFeedbackWriteEXT: return "TransformFeedbackWriteEXT"; + case AccessFlagBits::eTransformFeedbackCounterReadEXT: return "TransformFeedbackCounterReadEXT"; + case AccessFlagBits::eTransformFeedbackCounterWriteEXT: return "TransformFeedbackCounterWriteEXT"; + case AccessFlagBits::eConditionalRenderingReadEXT: return "ConditionalRenderingReadEXT"; + case AccessFlagBits::eColorAttachmentReadNoncoherentEXT: return "ColorAttachmentReadNoncoherentEXT"; + case AccessFlagBits::eAccelerationStructureReadKHR: return "AccelerationStructureReadKHR"; + case AccessFlagBits::eAccelerationStructureWriteKHR: return "AccelerationStructureWriteKHR"; + case AccessFlagBits::eShadingRateImageReadNV: return "ShadingRateImageReadNV"; + case AccessFlagBits::eFragmentDensityMapReadEXT: return "FragmentDensityMapReadEXT"; + case AccessFlagBits::eCommandPreprocessReadNV: return "CommandPreprocessReadNV"; + case AccessFlagBits::eCommandPreprocessWriteNV: return "CommandPreprocessWriteNV"; + case AccessFlagBits::eNoneKHR: return "NoneKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class AttachmentDescriptionFlagBits : VkAttachmentDescriptionFlags + { + eMayAlias = VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( AttachmentDescriptionFlagBits value ) + { + switch ( value ) + { + case AttachmentDescriptionFlagBits::eMayAlias: return "MayAlias"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class AttachmentLoadOp + { + eLoad = VK_ATTACHMENT_LOAD_OP_LOAD, + eClear = VK_ATTACHMENT_LOAD_OP_CLEAR, + eDontCare = VK_ATTACHMENT_LOAD_OP_DONT_CARE + }; + + VULKAN_HPP_INLINE std::string to_string( AttachmentLoadOp value ) + { + switch ( value ) + { + case AttachmentLoadOp::eLoad: return "Load"; + case AttachmentLoadOp::eClear: return "Clear"; + case AttachmentLoadOp::eDontCare: return "DontCare"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class AttachmentStoreOp + { + eStore = VK_ATTACHMENT_STORE_OP_STORE, + eDontCare = VK_ATTACHMENT_STORE_OP_DONT_CARE, + eNoneQCOM = VK_ATTACHMENT_STORE_OP_NONE_QCOM + }; + + VULKAN_HPP_INLINE std::string to_string( AttachmentStoreOp value ) + { + switch ( value ) + { + case AttachmentStoreOp::eStore: return "Store"; + case AttachmentStoreOp::eDontCare: return "DontCare"; + case AttachmentStoreOp::eNoneQCOM: return "NoneQCOM"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DependencyFlagBits : VkDependencyFlags + { + eByRegion = VK_DEPENDENCY_BY_REGION_BIT, + eDeviceGroup = VK_DEPENDENCY_DEVICE_GROUP_BIT, + eViewLocal = VK_DEPENDENCY_VIEW_LOCAL_BIT, + eDeviceGroupKHR = VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR, + eViewLocalKHR = VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( DependencyFlagBits value ) + { + switch ( value ) + { + case DependencyFlagBits::eByRegion: return "ByRegion"; + case DependencyFlagBits::eDeviceGroup: return "DeviceGroup"; + case DependencyFlagBits::eViewLocal: return "ViewLocal"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class FramebufferCreateFlagBits : VkFramebufferCreateFlags + { + eImageless = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, + eImagelessKHR = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( FramebufferCreateFlagBits value ) + { + switch ( value ) + { + case FramebufferCreateFlagBits::eImageless: return "Imageless"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PipelineBindPoint + { + eGraphics = VK_PIPELINE_BIND_POINT_GRAPHICS, + eCompute = VK_PIPELINE_BIND_POINT_COMPUTE, + eRayTracingKHR = VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR, + eRayTracingNV = VK_PIPELINE_BIND_POINT_RAY_TRACING_NV + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineBindPoint value ) + { + switch ( value ) + { + case PipelineBindPoint::eGraphics: return "Graphics"; + case PipelineBindPoint::eCompute: return "Compute"; + case PipelineBindPoint::eRayTracingKHR: return "RayTracingKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class RenderPassCreateFlagBits : VkRenderPassCreateFlags + { + eTransformQCOM = VK_RENDER_PASS_CREATE_TRANSFORM_BIT_QCOM + }; + + VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlagBits value ) + { + switch ( value ) + { + case RenderPassCreateFlagBits::eTransformQCOM: return "TransformQCOM"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SubpassDescriptionFlagBits : VkSubpassDescriptionFlags + { + ePerViewAttributesNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX, + ePerViewPositionXOnlyNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX, + eFragmentRegionQCOM = VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM, + eShaderResolveQCOM = VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM + }; + + VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlagBits value ) + { + switch ( value ) + { + case SubpassDescriptionFlagBits::ePerViewAttributesNVX: return "PerViewAttributesNVX"; + case SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX: return "PerViewPositionXOnlyNVX"; + case SubpassDescriptionFlagBits::eFragmentRegionQCOM: return "FragmentRegionQCOM"; + case SubpassDescriptionFlagBits::eShaderResolveQCOM: return "ShaderResolveQCOM"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class CommandPoolCreateFlagBits : VkCommandPoolCreateFlags + { + eTransient = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT, + eResetCommandBuffer = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, + eProtected = VK_COMMAND_POOL_CREATE_PROTECTED_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlagBits value ) + { + switch ( value ) + { + case CommandPoolCreateFlagBits::eTransient: return "Transient"; + case CommandPoolCreateFlagBits::eResetCommandBuffer: return "ResetCommandBuffer"; + case CommandPoolCreateFlagBits::eProtected: return "Protected"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class CommandPoolResetFlagBits : VkCommandPoolResetFlags + { + eReleaseResources = VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( CommandPoolResetFlagBits value ) + { + switch ( value ) + { + case CommandPoolResetFlagBits::eReleaseResources: return "ReleaseResources"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class CommandBufferLevel + { + ePrimary = VK_COMMAND_BUFFER_LEVEL_PRIMARY, + eSecondary = VK_COMMAND_BUFFER_LEVEL_SECONDARY + }; + + VULKAN_HPP_INLINE std::string to_string( CommandBufferLevel value ) + { + switch ( value ) + { + case CommandBufferLevel::ePrimary: return "Primary"; + case CommandBufferLevel::eSecondary: return "Secondary"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class CommandBufferResetFlagBits : VkCommandBufferResetFlags + { + eReleaseResources = VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( CommandBufferResetFlagBits value ) + { + switch ( value ) + { + case CommandBufferResetFlagBits::eReleaseResources: return "ReleaseResources"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class CommandBufferUsageFlagBits : VkCommandBufferUsageFlags + { + eOneTimeSubmit = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, + eRenderPassContinue = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT, + eSimultaneousUse = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlagBits value ) + { + switch ( value ) + { + case CommandBufferUsageFlagBits::eOneTimeSubmit: return "OneTimeSubmit"; + case CommandBufferUsageFlagBits::eRenderPassContinue: return "RenderPassContinue"; + case CommandBufferUsageFlagBits::eSimultaneousUse: return "SimultaneousUse"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class QueryControlFlagBits : VkQueryControlFlags + { + ePrecise = VK_QUERY_CONTROL_PRECISE_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( QueryControlFlagBits value ) + { + switch ( value ) + { + case QueryControlFlagBits::ePrecise: return "Precise"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class IndexType + { + eUint16 = VK_INDEX_TYPE_UINT16, + eUint32 = VK_INDEX_TYPE_UINT32, + eNoneKHR = VK_INDEX_TYPE_NONE_KHR, + eUint8EXT = VK_INDEX_TYPE_UINT8_EXT, + eNoneNV = VK_INDEX_TYPE_NONE_NV + }; + + VULKAN_HPP_INLINE std::string to_string( IndexType value ) + { + switch ( value ) + { + case IndexType::eUint16: return "Uint16"; + case IndexType::eUint32: return "Uint32"; + case IndexType::eNoneKHR: return "NoneKHR"; + case IndexType::eUint8EXT: return "Uint8EXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class StencilFaceFlagBits : VkStencilFaceFlags + { + eFront = VK_STENCIL_FACE_FRONT_BIT, + eBack = VK_STENCIL_FACE_BACK_BIT, + eFrontAndBack = VK_STENCIL_FACE_FRONT_AND_BACK, + eVkStencilFrontAndBack = VK_STENCIL_FRONT_AND_BACK + }; + + VULKAN_HPP_INLINE std::string to_string( StencilFaceFlagBits value ) + { + switch ( value ) + { + case StencilFaceFlagBits::eFront: return "Front"; + case StencilFaceFlagBits::eBack: return "Back"; + case StencilFaceFlagBits::eFrontAndBack: return "FrontAndBack"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SubpassContents + { + eInline = VK_SUBPASS_CONTENTS_INLINE, + eSecondaryCommandBuffers = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS + }; + + VULKAN_HPP_INLINE std::string to_string( SubpassContents value ) + { + switch ( value ) + { + case SubpassContents::eInline: return "Inline"; + case SubpassContents::eSecondaryCommandBuffers: return "SecondaryCommandBuffers"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_VERSION_1_1 === + + enum class SubgroupFeatureFlagBits : VkSubgroupFeatureFlags + { + eBasic = VK_SUBGROUP_FEATURE_BASIC_BIT, + eVote = VK_SUBGROUP_FEATURE_VOTE_BIT, + eArithmetic = VK_SUBGROUP_FEATURE_ARITHMETIC_BIT, + eBallot = VK_SUBGROUP_FEATURE_BALLOT_BIT, + eShuffle = VK_SUBGROUP_FEATURE_SHUFFLE_BIT, + eShuffleRelative = VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT, + eClustered = VK_SUBGROUP_FEATURE_CLUSTERED_BIT, + eQuad = VK_SUBGROUP_FEATURE_QUAD_BIT, + ePartitionedNV = VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlagBits value ) + { + switch ( value ) + { + case SubgroupFeatureFlagBits::eBasic: return "Basic"; + case SubgroupFeatureFlagBits::eVote: return "Vote"; + case SubgroupFeatureFlagBits::eArithmetic: return "Arithmetic"; + case SubgroupFeatureFlagBits::eBallot: return "Ballot"; + case SubgroupFeatureFlagBits::eShuffle: return "Shuffle"; + case SubgroupFeatureFlagBits::eShuffleRelative: return "ShuffleRelative"; + case SubgroupFeatureFlagBits::eClustered: return "Clustered"; + case SubgroupFeatureFlagBits::eQuad: return "Quad"; + case SubgroupFeatureFlagBits::ePartitionedNV: return "PartitionedNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PeerMemoryFeatureFlagBits : VkPeerMemoryFeatureFlags + { + eCopySrc = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT, + eCopyDst = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT, + eGenericSrc = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT, + eGenericDst = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT + }; + using PeerMemoryFeatureFlagBitsKHR = PeerMemoryFeatureFlagBits; + + VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlagBits value ) + { + switch ( value ) + { + case PeerMemoryFeatureFlagBits::eCopySrc: return "CopySrc"; + case PeerMemoryFeatureFlagBits::eCopyDst: return "CopyDst"; + case PeerMemoryFeatureFlagBits::eGenericSrc: return "GenericSrc"; + case PeerMemoryFeatureFlagBits::eGenericDst: return "GenericDst"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class MemoryAllocateFlagBits : VkMemoryAllocateFlags + { + eDeviceMask = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT, + eDeviceAddress = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT, + eDeviceAddressCaptureReplay = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT + }; + using MemoryAllocateFlagBitsKHR = MemoryAllocateFlagBits; + + VULKAN_HPP_INLINE std::string to_string( MemoryAllocateFlagBits value ) + { + switch ( value ) + { + case MemoryAllocateFlagBits::eDeviceMask: return "DeviceMask"; + case MemoryAllocateFlagBits::eDeviceAddress: return "DeviceAddress"; + case MemoryAllocateFlagBits::eDeviceAddressCaptureReplay: return "DeviceAddressCaptureReplay"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PointClippingBehavior + { + eAllClipPlanes = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES, + eUserClipPlanesOnly = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY + }; + using PointClippingBehaviorKHR = PointClippingBehavior; + + VULKAN_HPP_INLINE std::string to_string( PointClippingBehavior value ) + { + switch ( value ) + { + case PointClippingBehavior::eAllClipPlanes: return "AllClipPlanes"; + case PointClippingBehavior::eUserClipPlanesOnly: return "UserClipPlanesOnly"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class TessellationDomainOrigin + { + eUpperLeft = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT, + eLowerLeft = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT + }; + using TessellationDomainOriginKHR = TessellationDomainOrigin; + + VULKAN_HPP_INLINE std::string to_string( TessellationDomainOrigin value ) + { + switch ( value ) + { + case TessellationDomainOrigin::eUpperLeft: return "UpperLeft"; + case TessellationDomainOrigin::eLowerLeft: return "LowerLeft"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SamplerYcbcrModelConversion + { + eRgbIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY, + eYcbcrIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY, + eYcbcr709 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709, + eYcbcr601 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601, + eYcbcr2020 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020 + }; + using SamplerYcbcrModelConversionKHR = SamplerYcbcrModelConversion; + + VULKAN_HPP_INLINE std::string to_string( SamplerYcbcrModelConversion value ) + { + switch ( value ) + { + case SamplerYcbcrModelConversion::eRgbIdentity: return "RgbIdentity"; + case SamplerYcbcrModelConversion::eYcbcrIdentity: return "YcbcrIdentity"; + case SamplerYcbcrModelConversion::eYcbcr709: return "Ycbcr709"; + case SamplerYcbcrModelConversion::eYcbcr601: return "Ycbcr601"; + case SamplerYcbcrModelConversion::eYcbcr2020: return "Ycbcr2020"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SamplerYcbcrRange + { + eItuFull = VK_SAMPLER_YCBCR_RANGE_ITU_FULL, + eItuNarrow = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW + }; + using SamplerYcbcrRangeKHR = SamplerYcbcrRange; + + VULKAN_HPP_INLINE std::string to_string( SamplerYcbcrRange value ) + { + switch ( value ) + { + case SamplerYcbcrRange::eItuFull: return "ItuFull"; + case SamplerYcbcrRange::eItuNarrow: return "ItuNarrow"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ChromaLocation + { + eCositedEven = VK_CHROMA_LOCATION_COSITED_EVEN, + eMidpoint = VK_CHROMA_LOCATION_MIDPOINT + }; + using ChromaLocationKHR = ChromaLocation; + + VULKAN_HPP_INLINE std::string to_string( ChromaLocation value ) + { + switch ( value ) + { + case ChromaLocation::eCositedEven: return "CositedEven"; + case ChromaLocation::eMidpoint: return "Midpoint"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DescriptorUpdateTemplateType + { + eDescriptorSet = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, + ePushDescriptorsKHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR + }; + using DescriptorUpdateTemplateTypeKHR = DescriptorUpdateTemplateType; + + VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateType value ) + { + switch ( value ) + { + case DescriptorUpdateTemplateType::eDescriptorSet: return "DescriptorSet"; + case DescriptorUpdateTemplateType::ePushDescriptorsKHR: return "PushDescriptorsKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ExternalMemoryHandleTypeFlagBits : VkExternalMemoryHandleTypeFlags + { + eOpaqueFd = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT, + eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT, + eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + eD3D11Texture = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT, + eD3D11TextureKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT, + eD3D12Heap = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT, + eD3D12Resource = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT, + eDmaBufEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT, +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + eAndroidHardwareBufferANDROID = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + eHostAllocationEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT, + eHostMappedForeignMemoryEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT, +#if defined( VK_USE_PLATFORM_FUCHSIA ) + eZirconVmoFUCHSIA = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + }; + using ExternalMemoryHandleTypeFlagBitsKHR = ExternalMemoryHandleTypeFlagBits; + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagBits value ) + { + switch ( value ) + { + case ExternalMemoryHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd"; + case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32"; + case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt"; + case ExternalMemoryHandleTypeFlagBits::eD3D11Texture: return "D3D11Texture"; + case ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt: return "D3D11TextureKmt"; + case ExternalMemoryHandleTypeFlagBits::eD3D12Heap: return "D3D12Heap"; + case ExternalMemoryHandleTypeFlagBits::eD3D12Resource: return "D3D12Resource"; + case ExternalMemoryHandleTypeFlagBits::eDmaBufEXT: return "DmaBufEXT"; +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + case ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID: return "AndroidHardwareBufferANDROID"; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + case ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT: return "HostAllocationEXT"; + case ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT: return "HostMappedForeignMemoryEXT"; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + case ExternalMemoryHandleTypeFlagBits::eZirconVmoFUCHSIA: return "ZirconVmoFUCHSIA"; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ExternalMemoryFeatureFlagBits : VkExternalMemoryFeatureFlags + { + eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT, + eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT, + eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT + }; + using ExternalMemoryFeatureFlagBitsKHR = ExternalMemoryFeatureFlagBits; + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagBits value ) + { + switch ( value ) + { + case ExternalMemoryFeatureFlagBits::eDedicatedOnly: return "DedicatedOnly"; + case ExternalMemoryFeatureFlagBits::eExportable: return "Exportable"; + case ExternalMemoryFeatureFlagBits::eImportable: return "Importable"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ExternalFenceHandleTypeFlagBits : VkExternalFenceHandleTypeFlags + { + eOpaqueFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT, + eOpaqueWin32 = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT, + eOpaqueWin32Kmt = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + eSyncFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT + }; + using ExternalFenceHandleTypeFlagBitsKHR = ExternalFenceHandleTypeFlagBits; + + VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlagBits value ) + { + switch ( value ) + { + case ExternalFenceHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd"; + case ExternalFenceHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32"; + case ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt"; + case ExternalFenceHandleTypeFlagBits::eSyncFd: return "SyncFd"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ExternalFenceFeatureFlagBits : VkExternalFenceFeatureFlags + { + eExportable = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT, + eImportable = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT + }; + using ExternalFenceFeatureFlagBitsKHR = ExternalFenceFeatureFlagBits; + + VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlagBits value ) + { + switch ( value ) + { + case ExternalFenceFeatureFlagBits::eExportable: return "Exportable"; + case ExternalFenceFeatureFlagBits::eImportable: return "Importable"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class FenceImportFlagBits : VkFenceImportFlags + { + eTemporary = VK_FENCE_IMPORT_TEMPORARY_BIT + }; + using FenceImportFlagBitsKHR = FenceImportFlagBits; + + VULKAN_HPP_INLINE std::string to_string( FenceImportFlagBits value ) + { + switch ( value ) + { + case FenceImportFlagBits::eTemporary: return "Temporary"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SemaphoreImportFlagBits : VkSemaphoreImportFlags + { + eTemporary = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT + }; + using SemaphoreImportFlagBitsKHR = SemaphoreImportFlagBits; + + VULKAN_HPP_INLINE std::string to_string( SemaphoreImportFlagBits value ) + { + switch ( value ) + { + case SemaphoreImportFlagBits::eTemporary: return "Temporary"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ExternalSemaphoreHandleTypeFlagBits : VkExternalSemaphoreHandleTypeFlags + { + eOpaqueFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT, + eOpaqueWin32 = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT, + eOpaqueWin32Kmt = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + eD3D12Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT, + eSyncFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT, +#if defined( VK_USE_PLATFORM_FUCHSIA ) + eZirconEventFUCHSIA = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_ZIRCON_EVENT_BIT_FUCHSIA, +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + eD3D11Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D11_FENCE_BIT + }; + using ExternalSemaphoreHandleTypeFlagBitsKHR = ExternalSemaphoreHandleTypeFlagBits; + + VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlagBits value ) + { + switch ( value ) + { + case ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd"; + case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32"; + case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt"; + case ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence: return "D3D12Fence"; + case ExternalSemaphoreHandleTypeFlagBits::eSyncFd: return "SyncFd"; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + case ExternalSemaphoreHandleTypeFlagBits::eZirconEventFUCHSIA: return "ZirconEventFUCHSIA"; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ExternalSemaphoreFeatureFlagBits : VkExternalSemaphoreFeatureFlags + { + eExportable = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT, + eImportable = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT + }; + using ExternalSemaphoreFeatureFlagBitsKHR = ExternalSemaphoreFeatureFlagBits; + + VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlagBits value ) + { + switch ( value ) + { + case ExternalSemaphoreFeatureFlagBits::eExportable: return "Exportable"; + case ExternalSemaphoreFeatureFlagBits::eImportable: return "Importable"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_VERSION_1_2 === + + enum class DriverId + { + eAmdProprietary = VK_DRIVER_ID_AMD_PROPRIETARY, + eAmdOpenSource = VK_DRIVER_ID_AMD_OPEN_SOURCE, + eMesaRadv = VK_DRIVER_ID_MESA_RADV, + eNvidiaProprietary = VK_DRIVER_ID_NVIDIA_PROPRIETARY, + eIntelProprietaryWindows = VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS, + eIntelOpenSourceMESA = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA, + eImaginationProprietary = VK_DRIVER_ID_IMAGINATION_PROPRIETARY, + eQualcommProprietary = VK_DRIVER_ID_QUALCOMM_PROPRIETARY, + eArmProprietary = VK_DRIVER_ID_ARM_PROPRIETARY, + eGoogleSwiftshader = VK_DRIVER_ID_GOOGLE_SWIFTSHADER, + eGgpProprietary = VK_DRIVER_ID_GGP_PROPRIETARY, + eBroadcomProprietary = VK_DRIVER_ID_BROADCOM_PROPRIETARY, + eMesaLlvmpipe = VK_DRIVER_ID_MESA_LLVMPIPE, + eMoltenvk = VK_DRIVER_ID_MOLTENVK, + eCoreaviProprietary = VK_DRIVER_ID_COREAVI_PROPRIETARY, + eJuiceProprietary = VK_DRIVER_ID_JUICE_PROPRIETARY, + eIntelOpenSourceMesa = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR + }; + using DriverIdKHR = DriverId; + + VULKAN_HPP_INLINE std::string to_string( DriverId value ) + { + switch ( value ) + { + case DriverId::eAmdProprietary: return "AmdProprietary"; + case DriverId::eAmdOpenSource: return "AmdOpenSource"; + case DriverId::eMesaRadv: return "MesaRadv"; + case DriverId::eNvidiaProprietary: return "NvidiaProprietary"; + case DriverId::eIntelProprietaryWindows: return "IntelProprietaryWindows"; + case DriverId::eIntelOpenSourceMESA: return "IntelOpenSourceMESA"; + case DriverId::eImaginationProprietary: return "ImaginationProprietary"; + case DriverId::eQualcommProprietary: return "QualcommProprietary"; + case DriverId::eArmProprietary: return "ArmProprietary"; + case DriverId::eGoogleSwiftshader: return "GoogleSwiftshader"; + case DriverId::eGgpProprietary: return "GgpProprietary"; + case DriverId::eBroadcomProprietary: return "BroadcomProprietary"; + case DriverId::eMesaLlvmpipe: return "MesaLlvmpipe"; + case DriverId::eMoltenvk: return "Moltenvk"; + case DriverId::eCoreaviProprietary: return "CoreaviProprietary"; + case DriverId::eJuiceProprietary: return "JuiceProprietary"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ShaderFloatControlsIndependence + { + e32BitOnly = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY, + eAll = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL, + eNone = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE + }; + using ShaderFloatControlsIndependenceKHR = ShaderFloatControlsIndependence; + + VULKAN_HPP_INLINE std::string to_string( ShaderFloatControlsIndependence value ) + { + switch ( value ) + { + case ShaderFloatControlsIndependence::e32BitOnly: return "32BitOnly"; + case ShaderFloatControlsIndependence::eAll: return "All"; + case ShaderFloatControlsIndependence::eNone: return "None"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DescriptorBindingFlagBits : VkDescriptorBindingFlags + { + eUpdateAfterBind = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT, + eUpdateUnusedWhilePending = VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT, + ePartiallyBound = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT, + eVariableDescriptorCount = VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT + }; + using DescriptorBindingFlagBitsEXT = DescriptorBindingFlagBits; + + VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlagBits value ) + { + switch ( value ) + { + case DescriptorBindingFlagBits::eUpdateAfterBind: return "UpdateAfterBind"; + case DescriptorBindingFlagBits::eUpdateUnusedWhilePending: return "UpdateUnusedWhilePending"; + case DescriptorBindingFlagBits::ePartiallyBound: return "PartiallyBound"; + case DescriptorBindingFlagBits::eVariableDescriptorCount: return "VariableDescriptorCount"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ResolveModeFlagBits : VkResolveModeFlags + { + eNone = VK_RESOLVE_MODE_NONE, + eSampleZero = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT, + eAverage = VK_RESOLVE_MODE_AVERAGE_BIT, + eMin = VK_RESOLVE_MODE_MIN_BIT, + eMax = VK_RESOLVE_MODE_MAX_BIT + }; + using ResolveModeFlagBitsKHR = ResolveModeFlagBits; + + VULKAN_HPP_INLINE std::string to_string( ResolveModeFlagBits value ) + { + switch ( value ) + { + case ResolveModeFlagBits::eNone: return "None"; + case ResolveModeFlagBits::eSampleZero: return "SampleZero"; + case ResolveModeFlagBits::eAverage: return "Average"; + case ResolveModeFlagBits::eMin: return "Min"; + case ResolveModeFlagBits::eMax: return "Max"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SamplerReductionMode + { + eWeightedAverage = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE, + eMin = VK_SAMPLER_REDUCTION_MODE_MIN, + eMax = VK_SAMPLER_REDUCTION_MODE_MAX + }; + using SamplerReductionModeEXT = SamplerReductionMode; + + VULKAN_HPP_INLINE std::string to_string( SamplerReductionMode value ) + { + switch ( value ) + { + case SamplerReductionMode::eWeightedAverage: return "WeightedAverage"; + case SamplerReductionMode::eMin: return "Min"; + case SamplerReductionMode::eMax: return "Max"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SemaphoreType + { + eBinary = VK_SEMAPHORE_TYPE_BINARY, + eTimeline = VK_SEMAPHORE_TYPE_TIMELINE + }; + using SemaphoreTypeKHR = SemaphoreType; + + VULKAN_HPP_INLINE std::string to_string( SemaphoreType value ) + { + switch ( value ) + { + case SemaphoreType::eBinary: return "Binary"; + case SemaphoreType::eTimeline: return "Timeline"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SemaphoreWaitFlagBits : VkSemaphoreWaitFlags + { + eAny = VK_SEMAPHORE_WAIT_ANY_BIT + }; + using SemaphoreWaitFlagBitsKHR = SemaphoreWaitFlagBits; + + VULKAN_HPP_INLINE std::string to_string( SemaphoreWaitFlagBits value ) + { + switch ( value ) + { + case SemaphoreWaitFlagBits::eAny: return "Any"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_surface === + + enum class SurfaceTransformFlagBitsKHR : VkSurfaceTransformFlagsKHR + { + eIdentity = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR, + eRotate90 = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR, + eRotate180 = VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR, + eRotate270 = VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR, + eHorizontalMirror = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR, + eHorizontalMirrorRotate90 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR, + eHorizontalMirrorRotate180 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR, + eHorizontalMirrorRotate270 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR, + eInherit = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagBitsKHR value ) + { + switch ( value ) + { + case SurfaceTransformFlagBitsKHR::eIdentity: return "Identity"; + case SurfaceTransformFlagBitsKHR::eRotate90: return "Rotate90"; + case SurfaceTransformFlagBitsKHR::eRotate180: return "Rotate180"; + case SurfaceTransformFlagBitsKHR::eRotate270: return "Rotate270"; + case SurfaceTransformFlagBitsKHR::eHorizontalMirror: return "HorizontalMirror"; + case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90: return "HorizontalMirrorRotate90"; + case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180: return "HorizontalMirrorRotate180"; + case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270: return "HorizontalMirrorRotate270"; + case SurfaceTransformFlagBitsKHR::eInherit: return "Inherit"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PresentModeKHR + { + eImmediate = VK_PRESENT_MODE_IMMEDIATE_KHR, + eMailbox = VK_PRESENT_MODE_MAILBOX_KHR, + eFifo = VK_PRESENT_MODE_FIFO_KHR, + eFifoRelaxed = VK_PRESENT_MODE_FIFO_RELAXED_KHR, + eSharedDemandRefresh = VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR, + eSharedContinuousRefresh = VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( PresentModeKHR value ) + { + switch ( value ) + { + case PresentModeKHR::eImmediate: return "Immediate"; + case PresentModeKHR::eMailbox: return "Mailbox"; + case PresentModeKHR::eFifo: return "Fifo"; + case PresentModeKHR::eFifoRelaxed: return "FifoRelaxed"; + case PresentModeKHR::eSharedDemandRefresh: return "SharedDemandRefresh"; + case PresentModeKHR::eSharedContinuousRefresh: return "SharedContinuousRefresh"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ColorSpaceKHR + { + eSrgbNonlinear = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, + eDisplayP3NonlinearEXT = VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT, + eExtendedSrgbLinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT, + eDisplayP3LinearEXT = VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT, + eDciP3NonlinearEXT = VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT, + eBt709LinearEXT = VK_COLOR_SPACE_BT709_LINEAR_EXT, + eBt709NonlinearEXT = VK_COLOR_SPACE_BT709_NONLINEAR_EXT, + eBt2020LinearEXT = VK_COLOR_SPACE_BT2020_LINEAR_EXT, + eHdr10St2084EXT = VK_COLOR_SPACE_HDR10_ST2084_EXT, + eDolbyvisionEXT = VK_COLOR_SPACE_DOLBYVISION_EXT, + eHdr10HlgEXT = VK_COLOR_SPACE_HDR10_HLG_EXT, + eAdobergbLinearEXT = VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT, + eAdobergbNonlinearEXT = VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT, + ePassThroughEXT = VK_COLOR_SPACE_PASS_THROUGH_EXT, + eExtendedSrgbNonlinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT, + eDisplayNativeAMD = VK_COLOR_SPACE_DISPLAY_NATIVE_AMD, + eVkColorspaceSrgbNonlinear = VK_COLORSPACE_SRGB_NONLINEAR_KHR, + eDciP3LinearEXT = VK_COLOR_SPACE_DCI_P3_LINEAR_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ColorSpaceKHR value ) + { + switch ( value ) + { + case ColorSpaceKHR::eSrgbNonlinear: return "SrgbNonlinear"; + case ColorSpaceKHR::eDisplayP3NonlinearEXT: return "DisplayP3NonlinearEXT"; + case ColorSpaceKHR::eExtendedSrgbLinearEXT: return "ExtendedSrgbLinearEXT"; + case ColorSpaceKHR::eDisplayP3LinearEXT: return "DisplayP3LinearEXT"; + case ColorSpaceKHR::eDciP3NonlinearEXT: return "DciP3NonlinearEXT"; + case ColorSpaceKHR::eBt709LinearEXT: return "Bt709LinearEXT"; + case ColorSpaceKHR::eBt709NonlinearEXT: return "Bt709NonlinearEXT"; + case ColorSpaceKHR::eBt2020LinearEXT: return "Bt2020LinearEXT"; + case ColorSpaceKHR::eHdr10St2084EXT: return "Hdr10St2084EXT"; + case ColorSpaceKHR::eDolbyvisionEXT: return "DolbyvisionEXT"; + case ColorSpaceKHR::eHdr10HlgEXT: return "Hdr10HlgEXT"; + case ColorSpaceKHR::eAdobergbLinearEXT: return "AdobergbLinearEXT"; + case ColorSpaceKHR::eAdobergbNonlinearEXT: return "AdobergbNonlinearEXT"; + case ColorSpaceKHR::ePassThroughEXT: return "PassThroughEXT"; + case ColorSpaceKHR::eExtendedSrgbNonlinearEXT: return "ExtendedSrgbNonlinearEXT"; + case ColorSpaceKHR::eDisplayNativeAMD: return "DisplayNativeAMD"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class CompositeAlphaFlagBitsKHR : VkCompositeAlphaFlagsKHR + { + eOpaque = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR, + ePreMultiplied = VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR, + ePostMultiplied = VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR, + eInherit = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagBitsKHR value ) + { + switch ( value ) + { + case CompositeAlphaFlagBitsKHR::eOpaque: return "Opaque"; + case CompositeAlphaFlagBitsKHR::ePreMultiplied: return "PreMultiplied"; + case CompositeAlphaFlagBitsKHR::ePostMultiplied: return "PostMultiplied"; + case CompositeAlphaFlagBitsKHR::eInherit: return "Inherit"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_swapchain === + + enum class SwapchainCreateFlagBitsKHR : VkSwapchainCreateFlagsKHR + { + eSplitInstanceBindRegions = VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR, + eProtected = VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR, + eMutableFormat = VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagBitsKHR value ) + { + switch ( value ) + { + case SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions: return "SplitInstanceBindRegions"; + case SwapchainCreateFlagBitsKHR::eProtected: return "Protected"; + case SwapchainCreateFlagBitsKHR::eMutableFormat: return "MutableFormat"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DeviceGroupPresentModeFlagBitsKHR : VkDeviceGroupPresentModeFlagsKHR + { + eLocal = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR, + eRemote = VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR, + eSum = VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR, + eLocalMultiDevice = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( DeviceGroupPresentModeFlagBitsKHR value ) + { + switch ( value ) + { + case DeviceGroupPresentModeFlagBitsKHR::eLocal: return "Local"; + case DeviceGroupPresentModeFlagBitsKHR::eRemote: return "Remote"; + case DeviceGroupPresentModeFlagBitsKHR::eSum: return "Sum"; + case DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice: return "LocalMultiDevice"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_display === + + enum class DisplayPlaneAlphaFlagBitsKHR : VkDisplayPlaneAlphaFlagsKHR + { + eOpaque = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR, + eGlobal = VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR, + ePerPixel = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR, + ePerPixelPremultiplied = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( DisplayPlaneAlphaFlagBitsKHR value ) + { + switch ( value ) + { + case DisplayPlaneAlphaFlagBitsKHR::eOpaque: return "Opaque"; + case DisplayPlaneAlphaFlagBitsKHR::eGlobal: return "Global"; + case DisplayPlaneAlphaFlagBitsKHR::ePerPixel: return "PerPixel"; + case DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied: return "PerPixelPremultiplied"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_debug_report === + + enum class DebugReportFlagBitsEXT : VkDebugReportFlagsEXT + { + eInformation = VK_DEBUG_REPORT_INFORMATION_BIT_EXT, + eWarning = VK_DEBUG_REPORT_WARNING_BIT_EXT, + ePerformanceWarning = VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, + eError = VK_DEBUG_REPORT_ERROR_BIT_EXT, + eDebug = VK_DEBUG_REPORT_DEBUG_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DebugReportFlagBitsEXT value ) + { + switch ( value ) + { + case DebugReportFlagBitsEXT::eInformation: return "Information"; + case DebugReportFlagBitsEXT::eWarning: return "Warning"; + case DebugReportFlagBitsEXT::ePerformanceWarning: return "PerformanceWarning"; + case DebugReportFlagBitsEXT::eError: return "Error"; + case DebugReportFlagBitsEXT::eDebug: return "Debug"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DebugReportObjectTypeEXT + { + eUnknown = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, + eInstance = VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, + ePhysicalDevice = VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, + eDevice = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, + eQueue = VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, + eSemaphore = VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, + eCommandBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, + eFence = VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, + eDeviceMemory = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, + eBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, + eImage = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, + eEvent = VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, + eQueryPool = VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, + eBufferView = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, + eImageView = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, + eShaderModule = VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, + ePipelineCache = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, + ePipelineLayout = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, + eRenderPass = VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, + ePipeline = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, + eDescriptorSetLayout = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, + eSampler = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, + eDescriptorPool = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, + eDescriptorSet = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, + eFramebuffer = VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, + eCommandPool = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, + eSurfaceKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, + eSwapchainKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, + eDebugReportCallbackEXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT, + eDisplayKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT, + eDisplayModeKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT, + eValidationCacheEXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, + eSamplerYcbcrConversion = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, + eDescriptorUpdateTemplate = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, + eCuModuleNVX = VK_DEBUG_REPORT_OBJECT_TYPE_CU_MODULE_NVX_EXT, + eCuFunctionNVX = VK_DEBUG_REPORT_OBJECT_TYPE_CU_FUNCTION_NVX_EXT, + eAccelerationStructureKHR = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT, + eAccelerationStructureNV = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT, + eDebugReport = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT, + eDescriptorUpdateTemplateKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT, + eSamplerYcbcrConversionKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT, + eValidationCache = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DebugReportObjectTypeEXT value ) + { + switch ( value ) + { + case DebugReportObjectTypeEXT::eUnknown: return "Unknown"; + case DebugReportObjectTypeEXT::eInstance: return "Instance"; + case DebugReportObjectTypeEXT::ePhysicalDevice: return "PhysicalDevice"; + case DebugReportObjectTypeEXT::eDevice: return "Device"; + case DebugReportObjectTypeEXT::eQueue: return "Queue"; + case DebugReportObjectTypeEXT::eSemaphore: return "Semaphore"; + case DebugReportObjectTypeEXT::eCommandBuffer: return "CommandBuffer"; + case DebugReportObjectTypeEXT::eFence: return "Fence"; + case DebugReportObjectTypeEXT::eDeviceMemory: return "DeviceMemory"; + case DebugReportObjectTypeEXT::eBuffer: return "Buffer"; + case DebugReportObjectTypeEXT::eImage: return "Image"; + case DebugReportObjectTypeEXT::eEvent: return "Event"; + case DebugReportObjectTypeEXT::eQueryPool: return "QueryPool"; + case DebugReportObjectTypeEXT::eBufferView: return "BufferView"; + case DebugReportObjectTypeEXT::eImageView: return "ImageView"; + case DebugReportObjectTypeEXT::eShaderModule: return "ShaderModule"; + case DebugReportObjectTypeEXT::ePipelineCache: return "PipelineCache"; + case DebugReportObjectTypeEXT::ePipelineLayout: return "PipelineLayout"; + case DebugReportObjectTypeEXT::eRenderPass: return "RenderPass"; + case DebugReportObjectTypeEXT::ePipeline: return "Pipeline"; + case DebugReportObjectTypeEXT::eDescriptorSetLayout: return "DescriptorSetLayout"; + case DebugReportObjectTypeEXT::eSampler: return "Sampler"; + case DebugReportObjectTypeEXT::eDescriptorPool: return "DescriptorPool"; + case DebugReportObjectTypeEXT::eDescriptorSet: return "DescriptorSet"; + case DebugReportObjectTypeEXT::eFramebuffer: return "Framebuffer"; + case DebugReportObjectTypeEXT::eCommandPool: return "CommandPool"; + case DebugReportObjectTypeEXT::eSurfaceKHR: return "SurfaceKHR"; + case DebugReportObjectTypeEXT::eSwapchainKHR: return "SwapchainKHR"; + case DebugReportObjectTypeEXT::eDebugReportCallbackEXT: return "DebugReportCallbackEXT"; + case DebugReportObjectTypeEXT::eDisplayKHR: return "DisplayKHR"; + case DebugReportObjectTypeEXT::eDisplayModeKHR: return "DisplayModeKHR"; + case DebugReportObjectTypeEXT::eValidationCacheEXT: return "ValidationCacheEXT"; + case DebugReportObjectTypeEXT::eSamplerYcbcrConversion: return "SamplerYcbcrConversion"; + case DebugReportObjectTypeEXT::eDescriptorUpdateTemplate: return "DescriptorUpdateTemplate"; + case DebugReportObjectTypeEXT::eCuModuleNVX: return "CuModuleNVX"; + case DebugReportObjectTypeEXT::eCuFunctionNVX: return "CuFunctionNVX"; + case DebugReportObjectTypeEXT::eAccelerationStructureKHR: return "AccelerationStructureKHR"; + case DebugReportObjectTypeEXT::eAccelerationStructureNV: return "AccelerationStructureNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_AMD_rasterization_order === + + enum class RasterizationOrderAMD + { + eStrict = VK_RASTERIZATION_ORDER_STRICT_AMD, + eRelaxed = VK_RASTERIZATION_ORDER_RELAXED_AMD + }; + + VULKAN_HPP_INLINE std::string to_string( RasterizationOrderAMD value ) + { + switch ( value ) + { + case RasterizationOrderAMD::eStrict: return "Strict"; + case RasterizationOrderAMD::eRelaxed: return "Relaxed"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + + //=== VK_KHR_video_queue === + + enum class VideoCodecOperationFlagBitsKHR : VkVideoCodecOperationFlagsKHR + { + eInvalid = VK_VIDEO_CODEC_OPERATION_INVALID_BIT_KHR, +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + eEncodeH264EXT = VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_EXT, + eDecodeH264EXT = VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_EXT, + eDecodeH265EXT = VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_EXT +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + }; + + VULKAN_HPP_INLINE std::string to_string( VideoCodecOperationFlagBitsKHR value ) + { + switch ( value ) + { + case VideoCodecOperationFlagBitsKHR::eInvalid: return "Invalid"; +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + case VideoCodecOperationFlagBitsKHR::eEncodeH264EXT: return "EncodeH264EXT"; + case VideoCodecOperationFlagBitsKHR::eDecodeH264EXT: return "DecodeH264EXT"; + case VideoCodecOperationFlagBitsKHR::eDecodeH265EXT: return "DecodeH265EXT"; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoChromaSubsamplingFlagBitsKHR : VkVideoChromaSubsamplingFlagsKHR + { + eInvalid = VK_VIDEO_CHROMA_SUBSAMPLING_INVALID_BIT_KHR, + eMonochrome = VK_VIDEO_CHROMA_SUBSAMPLING_MONOCHROME_BIT_KHR, + e420 = VK_VIDEO_CHROMA_SUBSAMPLING_420_BIT_KHR, + e422 = VK_VIDEO_CHROMA_SUBSAMPLING_422_BIT_KHR, + e444 = VK_VIDEO_CHROMA_SUBSAMPLING_444_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( VideoChromaSubsamplingFlagBitsKHR value ) + { + switch ( value ) + { + case VideoChromaSubsamplingFlagBitsKHR::eInvalid: return "Invalid"; + case VideoChromaSubsamplingFlagBitsKHR::eMonochrome: return "Monochrome"; + case VideoChromaSubsamplingFlagBitsKHR::e420: return "420"; + case VideoChromaSubsamplingFlagBitsKHR::e422: return "422"; + case VideoChromaSubsamplingFlagBitsKHR::e444: return "444"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoComponentBitDepthFlagBitsKHR : VkVideoComponentBitDepthFlagsKHR + { + eInvalid = VK_VIDEO_COMPONENT_BIT_DEPTH_INVALID_KHR, + e8 = VK_VIDEO_COMPONENT_BIT_DEPTH_8_BIT_KHR, + e10 = VK_VIDEO_COMPONENT_BIT_DEPTH_10_BIT_KHR, + e12 = VK_VIDEO_COMPONENT_BIT_DEPTH_12_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( VideoComponentBitDepthFlagBitsKHR value ) + { + switch ( value ) + { + case VideoComponentBitDepthFlagBitsKHR::eInvalid: return "Invalid"; + case VideoComponentBitDepthFlagBitsKHR::e8: return "8"; + case VideoComponentBitDepthFlagBitsKHR::e10: return "10"; + case VideoComponentBitDepthFlagBitsKHR::e12: return "12"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoCapabilitiesFlagBitsKHR : VkVideoCapabilitiesFlagsKHR + { + eProtectedContent = VK_VIDEO_CAPABILITIES_PROTECTED_CONTENT_BIT_KHR, + eSeparateReferenceImages = VK_VIDEO_CAPABILITIES_SEPARATE_REFERENCE_IMAGES_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( VideoCapabilitiesFlagBitsKHR value ) + { + switch ( value ) + { + case VideoCapabilitiesFlagBitsKHR::eProtectedContent: return "ProtectedContent"; + case VideoCapabilitiesFlagBitsKHR::eSeparateReferenceImages: return "SeparateReferenceImages"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoSessionCreateFlagBitsKHR : VkVideoSessionCreateFlagsKHR + { + eDefault = VK_VIDEO_SESSION_CREATE_DEFAULT_KHR, + eProtectedContent = VK_VIDEO_SESSION_CREATE_PROTECTED_CONTENT_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( VideoSessionCreateFlagBitsKHR value ) + { + switch ( value ) + { + case VideoSessionCreateFlagBitsKHR::eDefault: return "Default"; + case VideoSessionCreateFlagBitsKHR::eProtectedContent: return "ProtectedContent"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoCodingControlFlagBitsKHR : VkVideoCodingControlFlagsKHR + { + eDefault = VK_VIDEO_CODING_CONTROL_DEFAULT_KHR, + eReset = VK_VIDEO_CODING_CONTROL_RESET_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( VideoCodingControlFlagBitsKHR value ) + { + switch ( value ) + { + case VideoCodingControlFlagBitsKHR::eDefault: return "Default"; + case VideoCodingControlFlagBitsKHR::eReset: return "Reset"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoCodingQualityPresetFlagBitsKHR : VkVideoCodingQualityPresetFlagsKHR + { + eDefault = VK_VIDEO_CODING_QUALITY_PRESET_DEFAULT_BIT_KHR, + eNormal = VK_VIDEO_CODING_QUALITY_PRESET_NORMAL_BIT_KHR, + ePower = VK_VIDEO_CODING_QUALITY_PRESET_POWER_BIT_KHR, + eQuality = VK_VIDEO_CODING_QUALITY_PRESET_QUALITY_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( VideoCodingQualityPresetFlagBitsKHR value ) + { + switch ( value ) + { + case VideoCodingQualityPresetFlagBitsKHR::eDefault: return "Default"; + case VideoCodingQualityPresetFlagBitsKHR::eNormal: return "Normal"; + case VideoCodingQualityPresetFlagBitsKHR::ePower: return "Power"; + case VideoCodingQualityPresetFlagBitsKHR::eQuality: return "Quality"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class QueryResultStatusKHR + { + eError = VK_QUERY_RESULT_STATUS_ERROR_KHR, + eNotReady = VK_QUERY_RESULT_STATUS_NOT_READY_KHR, + eComplete = VK_QUERY_RESULT_STATUS_COMPLETE_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( QueryResultStatusKHR value ) + { + switch ( value ) + { + case QueryResultStatusKHR::eError: return "Error"; + case QueryResultStatusKHR::eNotReady: return "NotReady"; + case QueryResultStatusKHR::eComplete: return "Complete"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + + //=== VK_KHR_video_decode_queue === + + enum class VideoDecodeFlagBitsKHR : VkVideoDecodeFlagsKHR + { + eDefault = VK_VIDEO_DECODE_DEFAULT_KHR, + eReserved0 = VK_VIDEO_DECODE_RESERVED_0_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( VideoDecodeFlagBitsKHR value ) + { + switch ( value ) + { + case VideoDecodeFlagBitsKHR::eDefault: return "Default"; + case VideoDecodeFlagBitsKHR::eReserved0: return "Reserved0"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + + //=== VK_EXT_video_encode_h264 === + + enum class VideoEncodeH264CapabilitiesFlagBitsEXT : VkVideoEncodeH264CapabilitiesFlagsEXT + { + eVkVideoEncodeH264CapabilityCabac = VK_VIDEO_ENCODE_H264_CAPABILITY_CABAC_BIT_EXT, + eVkVideoEncodeH264CapabilityCavlc = VK_VIDEO_ENCODE_H264_CAPABILITY_CAVLC_BIT_EXT, + eVkVideoEncodeH264CapabilityWeightedBiPredImplicit = + VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_BI_PRED_IMPLICIT_BIT_EXT, + eVkVideoEncodeH264CapabilityTransform8X8 = VK_VIDEO_ENCODE_H264_CAPABILITY_TRANSFORM_8X8_BIT_EXT, + eVkVideoEncodeH264CapabilityChromaQpOffset = VK_VIDEO_ENCODE_H264_CAPABILITY_CHROMA_QP_OFFSET_BIT_EXT, + eVkVideoEncodeH264CapabilitySecondChromaQpOffset = VK_VIDEO_ENCODE_H264_CAPABILITY_SECOND_CHROMA_QP_OFFSET_BIT_EXT, + eVkVideoEncodeH264CapabilityDeblockingFilterDisabled = + VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_DISABLED_BIT_EXT, + eVkVideoEncodeH264CapabilityDeblockingFilterEnabled = + VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_ENABLED_BIT_EXT, + eVkVideoEncodeH264CapabilityDeblockingFilterPartial = + VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_PARTIAL_BIT_EXT, + eVkVideoEncodeH264CapabilityMultipleSlicePerFrame = + VK_VIDEO_ENCODE_H264_CAPABILITY_MULTIPLE_SLICE_PER_FRAME_BIT_EXT, + eVkVideoEncodeH264CapabilityEvenlyDistributedSliceSize = + VK_VIDEO_ENCODE_H264_CAPABILITY_EVENLY_DISTRIBUTED_SLICE_SIZE_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264CapabilitiesFlagBitsEXT value ) + { + switch ( value ) + { + case VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityCabac: + return "VkVideoEncodeH264CapabilityCabac"; + case VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityCavlc: + return "VkVideoEncodeH264CapabilityCavlc"; + case VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityWeightedBiPredImplicit: + return "VkVideoEncodeH264CapabilityWeightedBiPredImplicit"; + case VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityTransform8X8: + return "VkVideoEncodeH264CapabilityTransform8X8"; + case VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityChromaQpOffset: + return "VkVideoEncodeH264CapabilityChromaQpOffset"; + case VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilitySecondChromaQpOffset: + return "VkVideoEncodeH264CapabilitySecondChromaQpOffset"; + case VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityDeblockingFilterDisabled: + return "VkVideoEncodeH264CapabilityDeblockingFilterDisabled"; + case VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityDeblockingFilterEnabled: + return "VkVideoEncodeH264CapabilityDeblockingFilterEnabled"; + case VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityDeblockingFilterPartial: + return "VkVideoEncodeH264CapabilityDeblockingFilterPartial"; + case VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityMultipleSlicePerFrame: + return "VkVideoEncodeH264CapabilityMultipleSlicePerFrame"; + case VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityEvenlyDistributedSliceSize: + return "VkVideoEncodeH264CapabilityEvenlyDistributedSliceSize"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoEncodeH264InputModeFlagBitsEXT : VkVideoEncodeH264InputModeFlagsEXT + { + eVkVideoEncodeH264InputModeFrame = VK_VIDEO_ENCODE_H264_INPUT_MODE_FRAME_BIT_EXT, + eVkVideoEncodeH264InputModeSlice = VK_VIDEO_ENCODE_H264_INPUT_MODE_SLICE_BIT_EXT, + eVkVideoEncodeH264InputModeNonVcl = VK_VIDEO_ENCODE_H264_INPUT_MODE_NON_VCL_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264InputModeFlagBitsEXT value ) + { + switch ( value ) + { + case VideoEncodeH264InputModeFlagBitsEXT::eVkVideoEncodeH264InputModeFrame: + return "VkVideoEncodeH264InputModeFrame"; + case VideoEncodeH264InputModeFlagBitsEXT::eVkVideoEncodeH264InputModeSlice: + return "VkVideoEncodeH264InputModeSlice"; + case VideoEncodeH264InputModeFlagBitsEXT::eVkVideoEncodeH264InputModeNonVcl: + return "VkVideoEncodeH264InputModeNonVcl"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoEncodeH264OutputModeFlagBitsEXT : VkVideoEncodeH264OutputModeFlagsEXT + { + eVkVideoEncodeH264OutputModeFrame = VK_VIDEO_ENCODE_H264_OUTPUT_MODE_FRAME_BIT_EXT, + eVkVideoEncodeH264OutputModeSlice = VK_VIDEO_ENCODE_H264_OUTPUT_MODE_SLICE_BIT_EXT, + eVkVideoEncodeH264OutputModeNonVcl = VK_VIDEO_ENCODE_H264_OUTPUT_MODE_NON_VCL_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264OutputModeFlagBitsEXT value ) + { + switch ( value ) + { + case VideoEncodeH264OutputModeFlagBitsEXT::eVkVideoEncodeH264OutputModeFrame: + return "VkVideoEncodeH264OutputModeFrame"; + case VideoEncodeH264OutputModeFlagBitsEXT::eVkVideoEncodeH264OutputModeSlice: + return "VkVideoEncodeH264OutputModeSlice"; + case VideoEncodeH264OutputModeFlagBitsEXT::eVkVideoEncodeH264OutputModeNonVcl: + return "VkVideoEncodeH264OutputModeNonVcl"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoEncodeH264CreateFlagBitsEXT : VkVideoEncodeH264CreateFlagsEXT + { + eVkVideoEncodeH264CreateDefault = VK_VIDEO_ENCODE_H264_CREATE_DEFAULT_EXT, + eVkVideoEncodeH264CreateReserved0 = VK_VIDEO_ENCODE_H264_CREATE_RESERVED_0_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264CreateFlagBitsEXT value ) + { + switch ( value ) + { + case VideoEncodeH264CreateFlagBitsEXT::eVkVideoEncodeH264CreateDefault: return "VkVideoEncodeH264CreateDefault"; + case VideoEncodeH264CreateFlagBitsEXT::eVkVideoEncodeH264CreateReserved0: + return "VkVideoEncodeH264CreateReserved0"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + + //=== VK_EXT_video_decode_h264 === + + enum class VideoDecodeH264FieldLayoutFlagBitsEXT : VkVideoDecodeH264FieldLayoutFlagsEXT + { + eVkVideoDecodeH264ProgressivePicturesOnly = VK_VIDEO_DECODE_H264_PROGRESSIVE_PICTURES_ONLY_EXT, + eVkVideoDecodeH264FieldLayoutLineInterlacedPlane = VK_VIDEO_DECODE_H264_FIELD_LAYOUT_LINE_INTERLACED_PLANE_BIT_EXT, + eVkVideoDecodeH264FieldLayoutSeparateInterlacedPlane = + VK_VIDEO_DECODE_H264_FIELD_LAYOUT_SEPARATE_INTERLACED_PLANE_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( VideoDecodeH264FieldLayoutFlagBitsEXT value ) + { + switch ( value ) + { + case VideoDecodeH264FieldLayoutFlagBitsEXT::eVkVideoDecodeH264ProgressivePicturesOnly: + return "VkVideoDecodeH264ProgressivePicturesOnly"; + case VideoDecodeH264FieldLayoutFlagBitsEXT::eVkVideoDecodeH264FieldLayoutLineInterlacedPlane: + return "VkVideoDecodeH264FieldLayoutLineInterlacedPlane"; + case VideoDecodeH264FieldLayoutFlagBitsEXT::eVkVideoDecodeH264FieldLayoutSeparateInterlacedPlane: + return "VkVideoDecodeH264FieldLayoutSeparateInterlacedPlane"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_AMD_shader_info === + + enum class ShaderInfoTypeAMD + { + eStatistics = VK_SHADER_INFO_TYPE_STATISTICS_AMD, + eBinary = VK_SHADER_INFO_TYPE_BINARY_AMD, + eDisassembly = VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD + }; + + VULKAN_HPP_INLINE std::string to_string( ShaderInfoTypeAMD value ) + { + switch ( value ) + { + case ShaderInfoTypeAMD::eStatistics: return "Statistics"; + case ShaderInfoTypeAMD::eBinary: return "Binary"; + case ShaderInfoTypeAMD::eDisassembly: return "Disassembly"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_external_memory_capabilities === + + enum class ExternalMemoryHandleTypeFlagBitsNV : VkExternalMemoryHandleTypeFlagsNV + { + eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV, + eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV, + eD3D11Image = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV, + eD3D11ImageKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagBitsNV value ) + { + switch ( value ) + { + case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32: return "OpaqueWin32"; + case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt: return "OpaqueWin32Kmt"; + case ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image: return "D3D11Image"; + case ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt: return "D3D11ImageKmt"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ExternalMemoryFeatureFlagBitsNV : VkExternalMemoryFeatureFlagsNV + { + eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV, + eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV, + eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagBitsNV value ) + { + switch ( value ) + { + case ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly: return "DedicatedOnly"; + case ExternalMemoryFeatureFlagBitsNV::eExportable: return "Exportable"; + case ExternalMemoryFeatureFlagBitsNV::eImportable: return "Importable"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_validation_flags === + + enum class ValidationCheckEXT + { + eAll = VK_VALIDATION_CHECK_ALL_EXT, + eShaders = VK_VALIDATION_CHECK_SHADERS_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ValidationCheckEXT value ) + { + switch ( value ) + { + case ValidationCheckEXT::eAll: return "All"; + case ValidationCheckEXT::eShaders: return "Shaders"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_conditional_rendering === + + enum class ConditionalRenderingFlagBitsEXT : VkConditionalRenderingFlagsEXT + { + eInverted = VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagBitsEXT value ) + { + switch ( value ) + { + case ConditionalRenderingFlagBitsEXT::eInverted: return "Inverted"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_display_surface_counter === + + enum class SurfaceCounterFlagBitsEXT : VkSurfaceCounterFlagsEXT + { + eVblank = VK_SURFACE_COUNTER_VBLANK_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( SurfaceCounterFlagBitsEXT value ) + { + switch ( value ) + { + case SurfaceCounterFlagBitsEXT::eVblank: return "Vblank"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_display_control === + + enum class DisplayPowerStateEXT + { + eOff = VK_DISPLAY_POWER_STATE_OFF_EXT, + eSuspend = VK_DISPLAY_POWER_STATE_SUSPEND_EXT, + eOn = VK_DISPLAY_POWER_STATE_ON_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DisplayPowerStateEXT value ) + { + switch ( value ) + { + case DisplayPowerStateEXT::eOff: return "Off"; + case DisplayPowerStateEXT::eSuspend: return "Suspend"; + case DisplayPowerStateEXT::eOn: return "On"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DeviceEventTypeEXT + { + eDisplayHotplug = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DeviceEventTypeEXT value ) + { + switch ( value ) + { + case DeviceEventTypeEXT::eDisplayHotplug: return "DisplayHotplug"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DisplayEventTypeEXT + { + eFirstPixelOut = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DisplayEventTypeEXT value ) + { + switch ( value ) + { + case DisplayEventTypeEXT::eFirstPixelOut: return "FirstPixelOut"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_viewport_swizzle === + + enum class ViewportCoordinateSwizzleNV + { + ePositiveX = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV, + eNegativeX = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV, + ePositiveY = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV, + eNegativeY = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV, + ePositiveZ = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV, + eNegativeZ = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV, + ePositiveW = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV, + eNegativeW = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV + }; + + VULKAN_HPP_INLINE std::string to_string( ViewportCoordinateSwizzleNV value ) + { + switch ( value ) + { + case ViewportCoordinateSwizzleNV::ePositiveX: return "PositiveX"; + case ViewportCoordinateSwizzleNV::eNegativeX: return "NegativeX"; + case ViewportCoordinateSwizzleNV::ePositiveY: return "PositiveY"; + case ViewportCoordinateSwizzleNV::eNegativeY: return "NegativeY"; + case ViewportCoordinateSwizzleNV::ePositiveZ: return "PositiveZ"; + case ViewportCoordinateSwizzleNV::eNegativeZ: return "NegativeZ"; + case ViewportCoordinateSwizzleNV::ePositiveW: return "PositiveW"; + case ViewportCoordinateSwizzleNV::eNegativeW: return "NegativeW"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_discard_rectangles === + + enum class DiscardRectangleModeEXT + { + eInclusive = VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT, + eExclusive = VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DiscardRectangleModeEXT value ) + { + switch ( value ) + { + case DiscardRectangleModeEXT::eInclusive: return "Inclusive"; + case DiscardRectangleModeEXT::eExclusive: return "Exclusive"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_conservative_rasterization === + + enum class ConservativeRasterizationModeEXT + { + eDisabled = VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT, + eOverestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT, + eUnderestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ConservativeRasterizationModeEXT value ) + { + switch ( value ) + { + case ConservativeRasterizationModeEXT::eDisabled: return "Disabled"; + case ConservativeRasterizationModeEXT::eOverestimate: return "Overestimate"; + case ConservativeRasterizationModeEXT::eUnderestimate: return "Underestimate"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_performance_query === + + enum class PerformanceCounterDescriptionFlagBitsKHR : VkPerformanceCounterDescriptionFlagsKHR + { + ePerformanceImpacting = VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR, + eConcurrentlyImpacted = VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( PerformanceCounterDescriptionFlagBitsKHR value ) + { + switch ( value ) + { + case PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting: return "PerformanceImpacting"; + case PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted: return "ConcurrentlyImpacted"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PerformanceCounterScopeKHR + { + eCommandBuffer = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR, + eRenderPass = VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR, + eCommand = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR, + eVkQueryScopeCommandBuffer = VK_QUERY_SCOPE_COMMAND_BUFFER_KHR, + eVkQueryScopeCommand = VK_QUERY_SCOPE_COMMAND_KHR, + eVkQueryScopeRenderPass = VK_QUERY_SCOPE_RENDER_PASS_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( PerformanceCounterScopeKHR value ) + { + switch ( value ) + { + case PerformanceCounterScopeKHR::eCommandBuffer: return "CommandBuffer"; + case PerformanceCounterScopeKHR::eRenderPass: return "RenderPass"; + case PerformanceCounterScopeKHR::eCommand: return "Command"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PerformanceCounterStorageKHR + { + eInt32 = VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR, + eInt64 = VK_PERFORMANCE_COUNTER_STORAGE_INT64_KHR, + eUint32 = VK_PERFORMANCE_COUNTER_STORAGE_UINT32_KHR, + eUint64 = VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR, + eFloat32 = VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR, + eFloat64 = VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( PerformanceCounterStorageKHR value ) + { + switch ( value ) + { + case PerformanceCounterStorageKHR::eInt32: return "Int32"; + case PerformanceCounterStorageKHR::eInt64: return "Int64"; + case PerformanceCounterStorageKHR::eUint32: return "Uint32"; + case PerformanceCounterStorageKHR::eUint64: return "Uint64"; + case PerformanceCounterStorageKHR::eFloat32: return "Float32"; + case PerformanceCounterStorageKHR::eFloat64: return "Float64"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PerformanceCounterUnitKHR + { + eGeneric = VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR, + ePercentage = VK_PERFORMANCE_COUNTER_UNIT_PERCENTAGE_KHR, + eNanoseconds = VK_PERFORMANCE_COUNTER_UNIT_NANOSECONDS_KHR, + eBytes = VK_PERFORMANCE_COUNTER_UNIT_BYTES_KHR, + eBytesPerSecond = VK_PERFORMANCE_COUNTER_UNIT_BYTES_PER_SECOND_KHR, + eKelvin = VK_PERFORMANCE_COUNTER_UNIT_KELVIN_KHR, + eWatts = VK_PERFORMANCE_COUNTER_UNIT_WATTS_KHR, + eVolts = VK_PERFORMANCE_COUNTER_UNIT_VOLTS_KHR, + eAmps = VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR, + eHertz = VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR, + eCycles = VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( PerformanceCounterUnitKHR value ) + { + switch ( value ) + { + case PerformanceCounterUnitKHR::eGeneric: return "Generic"; + case PerformanceCounterUnitKHR::ePercentage: return "Percentage"; + case PerformanceCounterUnitKHR::eNanoseconds: return "Nanoseconds"; + case PerformanceCounterUnitKHR::eBytes: return "Bytes"; + case PerformanceCounterUnitKHR::eBytesPerSecond: return "BytesPerSecond"; + case PerformanceCounterUnitKHR::eKelvin: return "Kelvin"; + case PerformanceCounterUnitKHR::eWatts: return "Watts"; + case PerformanceCounterUnitKHR::eVolts: return "Volts"; + case PerformanceCounterUnitKHR::eAmps: return "Amps"; + case PerformanceCounterUnitKHR::eHertz: return "Hertz"; + case PerformanceCounterUnitKHR::eCycles: return "Cycles"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class AcquireProfilingLockFlagBitsKHR : VkAcquireProfilingLockFlagsKHR + { + }; + + VULKAN_HPP_INLINE std::string to_string( AcquireProfilingLockFlagBitsKHR ) + { + return "(void)"; + } + + //=== VK_EXT_debug_utils === + + enum class DebugUtilsMessageSeverityFlagBitsEXT : VkDebugUtilsMessageSeverityFlagsEXT + { + eVerbose = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT, + eInfo = VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT, + eWarning = VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT, + eError = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagBitsEXT value ) + { + switch ( value ) + { + case DebugUtilsMessageSeverityFlagBitsEXT::eVerbose: return "Verbose"; + case DebugUtilsMessageSeverityFlagBitsEXT::eInfo: return "Info"; + case DebugUtilsMessageSeverityFlagBitsEXT::eWarning: return "Warning"; + case DebugUtilsMessageSeverityFlagBitsEXT::eError: return "Error"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DebugUtilsMessageTypeFlagBitsEXT : VkDebugUtilsMessageTypeFlagsEXT + { + eGeneral = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT, + eValidation = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT, + ePerformance = VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagBitsEXT value ) + { + switch ( value ) + { + case DebugUtilsMessageTypeFlagBitsEXT::eGeneral: return "General"; + case DebugUtilsMessageTypeFlagBitsEXT::eValidation: return "Validation"; + case DebugUtilsMessageTypeFlagBitsEXT::ePerformance: return "Performance"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_blend_operation_advanced === + + enum class BlendOverlapEXT + { + eUncorrelated = VK_BLEND_OVERLAP_UNCORRELATED_EXT, + eDisjoint = VK_BLEND_OVERLAP_DISJOINT_EXT, + eConjoint = VK_BLEND_OVERLAP_CONJOINT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( BlendOverlapEXT value ) + { + switch ( value ) + { + case BlendOverlapEXT::eUncorrelated: return "Uncorrelated"; + case BlendOverlapEXT::eDisjoint: return "Disjoint"; + case BlendOverlapEXT::eConjoint: return "Conjoint"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_acceleration_structure === + + enum class AccelerationStructureTypeKHR + { + eTopLevel = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR, + eBottomLevel = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR, + eGeneric = VK_ACCELERATION_STRUCTURE_TYPE_GENERIC_KHR + }; + using AccelerationStructureTypeNV = AccelerationStructureTypeKHR; + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureTypeKHR value ) + { + switch ( value ) + { + case AccelerationStructureTypeKHR::eTopLevel: return "TopLevel"; + case AccelerationStructureTypeKHR::eBottomLevel: return "BottomLevel"; + case AccelerationStructureTypeKHR::eGeneric: return "Generic"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class AccelerationStructureBuildTypeKHR + { + eHost = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR, + eDevice = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR, + eHostOrDevice = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureBuildTypeKHR value ) + { + switch ( value ) + { + case AccelerationStructureBuildTypeKHR::eHost: return "Host"; + case AccelerationStructureBuildTypeKHR::eDevice: return "Device"; + case AccelerationStructureBuildTypeKHR::eHostOrDevice: return "HostOrDevice"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class GeometryFlagBitsKHR : VkGeometryFlagsKHR + { + eOpaque = VK_GEOMETRY_OPAQUE_BIT_KHR, + eNoDuplicateAnyHitInvocation = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR + }; + using GeometryFlagBitsNV = GeometryFlagBitsKHR; + + VULKAN_HPP_INLINE std::string to_string( GeometryFlagBitsKHR value ) + { + switch ( value ) + { + case GeometryFlagBitsKHR::eOpaque: return "Opaque"; + case GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation: return "NoDuplicateAnyHitInvocation"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class GeometryInstanceFlagBitsKHR : VkGeometryInstanceFlagsKHR + { + eTriangleFacingCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR, + eTriangleFrontCounterclockwise = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR, + eForceOpaque = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR, + eForceNoOpaque = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR, + eTriangleCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV + }; + using GeometryInstanceFlagBitsNV = GeometryInstanceFlagBitsKHR; + + VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagBitsKHR value ) + { + switch ( value ) + { + case GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable: return "TriangleFacingCullDisable"; + case GeometryInstanceFlagBitsKHR::eTriangleFrontCounterclockwise: return "TriangleFrontCounterclockwise"; + case GeometryInstanceFlagBitsKHR::eForceOpaque: return "ForceOpaque"; + case GeometryInstanceFlagBitsKHR::eForceNoOpaque: return "ForceNoOpaque"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class BuildAccelerationStructureFlagBitsKHR : VkBuildAccelerationStructureFlagsKHR + { + eAllowUpdate = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR, + eAllowCompaction = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR, + ePreferFastTrace = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR, + ePreferFastBuild = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR, + eLowMemory = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR + }; + using BuildAccelerationStructureFlagBitsNV = BuildAccelerationStructureFlagBitsKHR; + + VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagBitsKHR value ) + { + switch ( value ) + { + case BuildAccelerationStructureFlagBitsKHR::eAllowUpdate: return "AllowUpdate"; + case BuildAccelerationStructureFlagBitsKHR::eAllowCompaction: return "AllowCompaction"; + case BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace: return "PreferFastTrace"; + case BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild: return "PreferFastBuild"; + case BuildAccelerationStructureFlagBitsKHR::eLowMemory: return "LowMemory"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class CopyAccelerationStructureModeKHR + { + eClone = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR, + eCompact = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR, + eSerialize = VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR, + eDeserialize = VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR + }; + using CopyAccelerationStructureModeNV = CopyAccelerationStructureModeKHR; + + VULKAN_HPP_INLINE std::string to_string( CopyAccelerationStructureModeKHR value ) + { + switch ( value ) + { + case CopyAccelerationStructureModeKHR::eClone: return "Clone"; + case CopyAccelerationStructureModeKHR::eCompact: return "Compact"; + case CopyAccelerationStructureModeKHR::eSerialize: return "Serialize"; + case CopyAccelerationStructureModeKHR::eDeserialize: return "Deserialize"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class GeometryTypeKHR + { + eTriangles = VK_GEOMETRY_TYPE_TRIANGLES_KHR, + eAabbs = VK_GEOMETRY_TYPE_AABBS_KHR, + eInstances = VK_GEOMETRY_TYPE_INSTANCES_KHR + }; + using GeometryTypeNV = GeometryTypeKHR; + + VULKAN_HPP_INLINE std::string to_string( GeometryTypeKHR value ) + { + switch ( value ) + { + case GeometryTypeKHR::eTriangles: return "Triangles"; + case GeometryTypeKHR::eAabbs: return "Aabbs"; + case GeometryTypeKHR::eInstances: return "Instances"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class AccelerationStructureCompatibilityKHR + { + eCompatible = VK_ACCELERATION_STRUCTURE_COMPATIBILITY_COMPATIBLE_KHR, + eIncompatible = VK_ACCELERATION_STRUCTURE_COMPATIBILITY_INCOMPATIBLE_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureCompatibilityKHR value ) + { + switch ( value ) + { + case AccelerationStructureCompatibilityKHR::eCompatible: return "Compatible"; + case AccelerationStructureCompatibilityKHR::eIncompatible: return "Incompatible"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class AccelerationStructureCreateFlagBitsKHR : VkAccelerationStructureCreateFlagsKHR + { + eDeviceAddressCaptureReplay = VK_ACCELERATION_STRUCTURE_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureCreateFlagBitsKHR value ) + { + switch ( value ) + { + case AccelerationStructureCreateFlagBitsKHR::eDeviceAddressCaptureReplay: return "DeviceAddressCaptureReplay"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class BuildAccelerationStructureModeKHR + { + eBuild = VK_BUILD_ACCELERATION_STRUCTURE_MODE_BUILD_KHR, + eUpdate = VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureModeKHR value ) + { + switch ( value ) + { + case BuildAccelerationStructureModeKHR::eBuild: return "Build"; + case BuildAccelerationStructureModeKHR::eUpdate: return "Update"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_framebuffer_mixed_samples === + + enum class CoverageModulationModeNV + { + eNone = VK_COVERAGE_MODULATION_MODE_NONE_NV, + eRgb = VK_COVERAGE_MODULATION_MODE_RGB_NV, + eAlpha = VK_COVERAGE_MODULATION_MODE_ALPHA_NV, + eRgba = VK_COVERAGE_MODULATION_MODE_RGBA_NV + }; + + VULKAN_HPP_INLINE std::string to_string( CoverageModulationModeNV value ) + { + switch ( value ) + { + case CoverageModulationModeNV::eNone: return "None"; + case CoverageModulationModeNV::eRgb: return "Rgb"; + case CoverageModulationModeNV::eAlpha: return "Alpha"; + case CoverageModulationModeNV::eRgba: return "Rgba"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_validation_cache === + + enum class ValidationCacheHeaderVersionEXT + { + eOne = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ValidationCacheHeaderVersionEXT value ) + { + switch ( value ) + { + case ValidationCacheHeaderVersionEXT::eOne: return "One"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_shading_rate_image === + + enum class ShadingRatePaletteEntryNV + { + eNoInvocations = VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV, + e16InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV, + e8InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV, + e4InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV, + e2InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV, + e1InvocationPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV, + e1InvocationPer2X1Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV, + e1InvocationPer1X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV, + e1InvocationPer2X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV, + e1InvocationPer4X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV, + e1InvocationPer2X4Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV, + e1InvocationPer4X4Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV + }; + + VULKAN_HPP_INLINE std::string to_string( ShadingRatePaletteEntryNV value ) + { + switch ( value ) + { + case ShadingRatePaletteEntryNV::eNoInvocations: return "NoInvocations"; + case ShadingRatePaletteEntryNV::e16InvocationsPerPixel: return "16InvocationsPerPixel"; + case ShadingRatePaletteEntryNV::e8InvocationsPerPixel: return "8InvocationsPerPixel"; + case ShadingRatePaletteEntryNV::e4InvocationsPerPixel: return "4InvocationsPerPixel"; + case ShadingRatePaletteEntryNV::e2InvocationsPerPixel: return "2InvocationsPerPixel"; + case ShadingRatePaletteEntryNV::e1InvocationPerPixel: return "1InvocationPerPixel"; + case ShadingRatePaletteEntryNV::e1InvocationPer2X1Pixels: return "1InvocationPer2X1Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer1X2Pixels: return "1InvocationPer1X2Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer2X2Pixels: return "1InvocationPer2X2Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer4X2Pixels: return "1InvocationPer4X2Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer2X4Pixels: return "1InvocationPer2X4Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer4X4Pixels: return "1InvocationPer4X4Pixels"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class CoarseSampleOrderTypeNV + { + eDefault = VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV, + eCustom = VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV, + ePixelMajor = VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV, + eSampleMajor = VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV + }; + + VULKAN_HPP_INLINE std::string to_string( CoarseSampleOrderTypeNV value ) + { + switch ( value ) + { + case CoarseSampleOrderTypeNV::eDefault: return "Default"; + case CoarseSampleOrderTypeNV::eCustom: return "Custom"; + case CoarseSampleOrderTypeNV::ePixelMajor: return "PixelMajor"; + case CoarseSampleOrderTypeNV::eSampleMajor: return "SampleMajor"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_ray_tracing === + + enum class AccelerationStructureMemoryRequirementsTypeNV + { + eObject = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV, + eBuildScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV, + eUpdateScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV + }; + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMemoryRequirementsTypeNV value ) + { + switch ( value ) + { + case AccelerationStructureMemoryRequirementsTypeNV::eObject: return "Object"; + case AccelerationStructureMemoryRequirementsTypeNV::eBuildScratch: return "BuildScratch"; + case AccelerationStructureMemoryRequirementsTypeNV::eUpdateScratch: return "UpdateScratch"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_global_priority === + + enum class QueueGlobalPriorityEXT + { + eLow = VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT, + eMedium = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT, + eHigh = VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT, + eRealtime = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( QueueGlobalPriorityEXT value ) + { + switch ( value ) + { + case QueueGlobalPriorityEXT::eLow: return "Low"; + case QueueGlobalPriorityEXT::eMedium: return "Medium"; + case QueueGlobalPriorityEXT::eHigh: return "High"; + case QueueGlobalPriorityEXT::eRealtime: return "Realtime"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_AMD_pipeline_compiler_control === + + enum class PipelineCompilerControlFlagBitsAMD : VkPipelineCompilerControlFlagsAMD + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagBitsAMD ) + { + return "(void)"; + } + + //=== VK_EXT_calibrated_timestamps === + + enum class TimeDomainEXT + { + eDevice = VK_TIME_DOMAIN_DEVICE_EXT, + eClockMonotonic = VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT, + eClockMonotonicRaw = VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT, + eQueryPerformanceCounter = VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( TimeDomainEXT value ) + { + switch ( value ) + { + case TimeDomainEXT::eDevice: return "Device"; + case TimeDomainEXT::eClockMonotonic: return "ClockMonotonic"; + case TimeDomainEXT::eClockMonotonicRaw: return "ClockMonotonicRaw"; + case TimeDomainEXT::eQueryPerformanceCounter: return "QueryPerformanceCounter"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_AMD_memory_overallocation_behavior === + + enum class MemoryOverallocationBehaviorAMD + { + eDefault = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD, + eAllowed = VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD, + eDisallowed = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD + }; + + VULKAN_HPP_INLINE std::string to_string( MemoryOverallocationBehaviorAMD value ) + { + switch ( value ) + { + case MemoryOverallocationBehaviorAMD::eDefault: return "Default"; + case MemoryOverallocationBehaviorAMD::eAllowed: return "Allowed"; + case MemoryOverallocationBehaviorAMD::eDisallowed: return "Disallowed"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_pipeline_creation_feedback === + + enum class PipelineCreationFeedbackFlagBitsEXT : VkPipelineCreationFeedbackFlagsEXT + { + eValid = VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT_EXT, + eApplicationPipelineCacheHit = VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT_EXT, + eBasePipelineAcceleration = VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlagBitsEXT value ) + { + switch ( value ) + { + case PipelineCreationFeedbackFlagBitsEXT::eValid: return "Valid"; + case PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit: return "ApplicationPipelineCacheHit"; + case PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration: return "BasePipelineAcceleration"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_INTEL_performance_query === + + enum class PerformanceConfigurationTypeINTEL + { + eCommandQueueMetricsDiscoveryActivated = + VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL + }; + + VULKAN_HPP_INLINE std::string to_string( PerformanceConfigurationTypeINTEL value ) + { + switch ( value ) + { + case PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated: + return "CommandQueueMetricsDiscoveryActivated"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class QueryPoolSamplingModeINTEL + { + eManual = VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL + }; + + VULKAN_HPP_INLINE std::string to_string( QueryPoolSamplingModeINTEL value ) + { + switch ( value ) + { + case QueryPoolSamplingModeINTEL::eManual: return "Manual"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PerformanceOverrideTypeINTEL + { + eNullHardware = VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL, + eFlushGpuCaches = VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL + }; + + VULKAN_HPP_INLINE std::string to_string( PerformanceOverrideTypeINTEL value ) + { + switch ( value ) + { + case PerformanceOverrideTypeINTEL::eNullHardware: return "NullHardware"; + case PerformanceOverrideTypeINTEL::eFlushGpuCaches: return "FlushGpuCaches"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PerformanceParameterTypeINTEL + { + eHwCountersSupported = VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL, + eStreamMarkerValidBits = VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL + }; + + VULKAN_HPP_INLINE std::string to_string( PerformanceParameterTypeINTEL value ) + { + switch ( value ) + { + case PerformanceParameterTypeINTEL::eHwCountersSupported: return "HwCountersSupported"; + case PerformanceParameterTypeINTEL::eStreamMarkerValidBits: return "StreamMarkerValidBits"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PerformanceValueTypeINTEL + { + eUint32 = VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL, + eUint64 = VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL, + eFloat = VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL, + eBool = VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL, + eString = VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL + }; + + VULKAN_HPP_INLINE std::string to_string( PerformanceValueTypeINTEL value ) + { + switch ( value ) + { + case PerformanceValueTypeINTEL::eUint32: return "Uint32"; + case PerformanceValueTypeINTEL::eUint64: return "Uint64"; + case PerformanceValueTypeINTEL::eFloat: return "Float"; + case PerformanceValueTypeINTEL::eBool: return "Bool"; + case PerformanceValueTypeINTEL::eString: return "String"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_fragment_shading_rate === + + enum class FragmentShadingRateCombinerOpKHR + { + eKeep = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR, + eReplace = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_REPLACE_KHR, + eMin = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MIN_KHR, + eMax = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_KHR, + eMul = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( FragmentShadingRateCombinerOpKHR value ) + { + switch ( value ) + { + case FragmentShadingRateCombinerOpKHR::eKeep: return "Keep"; + case FragmentShadingRateCombinerOpKHR::eReplace: return "Replace"; + case FragmentShadingRateCombinerOpKHR::eMin: return "Min"; + case FragmentShadingRateCombinerOpKHR::eMax: return "Max"; + case FragmentShadingRateCombinerOpKHR::eMul: return "Mul"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_AMD_shader_core_properties2 === + + enum class ShaderCorePropertiesFlagBitsAMD : VkShaderCorePropertiesFlagsAMD + { + }; + + VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagBitsAMD ) + { + return "(void)"; + } + + //=== VK_EXT_tooling_info === + + enum class ToolPurposeFlagBitsEXT : VkToolPurposeFlagsEXT + { + eValidation = VK_TOOL_PURPOSE_VALIDATION_BIT_EXT, + eProfiling = VK_TOOL_PURPOSE_PROFILING_BIT_EXT, + eTracing = VK_TOOL_PURPOSE_TRACING_BIT_EXT, + eAdditionalFeatures = VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT_EXT, + eModifyingFeatures = VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT_EXT, + eDebugReporting = VK_TOOL_PURPOSE_DEBUG_REPORTING_BIT_EXT, + eDebugMarkers = VK_TOOL_PURPOSE_DEBUG_MARKERS_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ToolPurposeFlagBitsEXT value ) + { + switch ( value ) + { + case ToolPurposeFlagBitsEXT::eValidation: return "Validation"; + case ToolPurposeFlagBitsEXT::eProfiling: return "Profiling"; + case ToolPurposeFlagBitsEXT::eTracing: return "Tracing"; + case ToolPurposeFlagBitsEXT::eAdditionalFeatures: return "AdditionalFeatures"; + case ToolPurposeFlagBitsEXT::eModifyingFeatures: return "ModifyingFeatures"; + case ToolPurposeFlagBitsEXT::eDebugReporting: return "DebugReporting"; + case ToolPurposeFlagBitsEXT::eDebugMarkers: return "DebugMarkers"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_validation_features === + + enum class ValidationFeatureEnableEXT + { + eGpuAssisted = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT, + eGpuAssistedReserveBindingSlot = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT, + eBestPractices = VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT, + eDebugPrintf = VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT, + eSynchronizationValidation = VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ValidationFeatureEnableEXT value ) + { + switch ( value ) + { + case ValidationFeatureEnableEXT::eGpuAssisted: return "GpuAssisted"; + case ValidationFeatureEnableEXT::eGpuAssistedReserveBindingSlot: return "GpuAssistedReserveBindingSlot"; + case ValidationFeatureEnableEXT::eBestPractices: return "BestPractices"; + case ValidationFeatureEnableEXT::eDebugPrintf: return "DebugPrintf"; + case ValidationFeatureEnableEXT::eSynchronizationValidation: return "SynchronizationValidation"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ValidationFeatureDisableEXT + { + eAll = VK_VALIDATION_FEATURE_DISABLE_ALL_EXT, + eShaders = VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT, + eThreadSafety = VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT, + eApiParameters = VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT, + eObjectLifetimes = VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT, + eCoreChecks = VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT, + eUniqueHandles = VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ValidationFeatureDisableEXT value ) + { + switch ( value ) + { + case ValidationFeatureDisableEXT::eAll: return "All"; + case ValidationFeatureDisableEXT::eShaders: return "Shaders"; + case ValidationFeatureDisableEXT::eThreadSafety: return "ThreadSafety"; + case ValidationFeatureDisableEXT::eApiParameters: return "ApiParameters"; + case ValidationFeatureDisableEXT::eObjectLifetimes: return "ObjectLifetimes"; + case ValidationFeatureDisableEXT::eCoreChecks: return "CoreChecks"; + case ValidationFeatureDisableEXT::eUniqueHandles: return "UniqueHandles"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_cooperative_matrix === + + enum class ScopeNV + { + eDevice = VK_SCOPE_DEVICE_NV, + eWorkgroup = VK_SCOPE_WORKGROUP_NV, + eSubgroup = VK_SCOPE_SUBGROUP_NV, + eQueueFamily = VK_SCOPE_QUEUE_FAMILY_NV + }; + + VULKAN_HPP_INLINE std::string to_string( ScopeNV value ) + { + switch ( value ) + { + case ScopeNV::eDevice: return "Device"; + case ScopeNV::eWorkgroup: return "Workgroup"; + case ScopeNV::eSubgroup: return "Subgroup"; + case ScopeNV::eQueueFamily: return "QueueFamily"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ComponentTypeNV + { + eFloat16 = VK_COMPONENT_TYPE_FLOAT16_NV, + eFloat32 = VK_COMPONENT_TYPE_FLOAT32_NV, + eFloat64 = VK_COMPONENT_TYPE_FLOAT64_NV, + eSint8 = VK_COMPONENT_TYPE_SINT8_NV, + eSint16 = VK_COMPONENT_TYPE_SINT16_NV, + eSint32 = VK_COMPONENT_TYPE_SINT32_NV, + eSint64 = VK_COMPONENT_TYPE_SINT64_NV, + eUint8 = VK_COMPONENT_TYPE_UINT8_NV, + eUint16 = VK_COMPONENT_TYPE_UINT16_NV, + eUint32 = VK_COMPONENT_TYPE_UINT32_NV, + eUint64 = VK_COMPONENT_TYPE_UINT64_NV + }; + + VULKAN_HPP_INLINE std::string to_string( ComponentTypeNV value ) + { + switch ( value ) + { + case ComponentTypeNV::eFloat16: return "Float16"; + case ComponentTypeNV::eFloat32: return "Float32"; + case ComponentTypeNV::eFloat64: return "Float64"; + case ComponentTypeNV::eSint8: return "Sint8"; + case ComponentTypeNV::eSint16: return "Sint16"; + case ComponentTypeNV::eSint32: return "Sint32"; + case ComponentTypeNV::eSint64: return "Sint64"; + case ComponentTypeNV::eUint8: return "Uint8"; + case ComponentTypeNV::eUint16: return "Uint16"; + case ComponentTypeNV::eUint32: return "Uint32"; + case ComponentTypeNV::eUint64: return "Uint64"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_coverage_reduction_mode === + + enum class CoverageReductionModeNV + { + eMerge = VK_COVERAGE_REDUCTION_MODE_MERGE_NV, + eTruncate = VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV + }; + + VULKAN_HPP_INLINE std::string to_string( CoverageReductionModeNV value ) + { + switch ( value ) + { + case CoverageReductionModeNV::eMerge: return "Merge"; + case CoverageReductionModeNV::eTruncate: return "Truncate"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_provoking_vertex === + + enum class ProvokingVertexModeEXT + { + eFirstVertex = VK_PROVOKING_VERTEX_MODE_FIRST_VERTEX_EXT, + eLastVertex = VK_PROVOKING_VERTEX_MODE_LAST_VERTEX_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ProvokingVertexModeEXT value ) + { + switch ( value ) + { + case ProvokingVertexModeEXT::eFirstVertex: return "FirstVertex"; + case ProvokingVertexModeEXT::eLastVertex: return "LastVertex"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + + //=== VK_EXT_full_screen_exclusive === + + enum class FullScreenExclusiveEXT + { + eDefault = VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT, + eAllowed = VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT, + eDisallowed = VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT, + eApplicationControlled = VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( FullScreenExclusiveEXT value ) + { + switch ( value ) + { + case FullScreenExclusiveEXT::eDefault: return "Default"; + case FullScreenExclusiveEXT::eAllowed: return "Allowed"; + case FullScreenExclusiveEXT::eDisallowed: return "Disallowed"; + case FullScreenExclusiveEXT::eApplicationControlled: return "ApplicationControlled"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_line_rasterization === + + enum class LineRasterizationModeEXT + { + eDefault = VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT, + eRectangular = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT, + eBresenham = VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT, + eRectangularSmooth = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( LineRasterizationModeEXT value ) + { + switch ( value ) + { + case LineRasterizationModeEXT::eDefault: return "Default"; + case LineRasterizationModeEXT::eRectangular: return "Rectangular"; + case LineRasterizationModeEXT::eBresenham: return "Bresenham"; + case LineRasterizationModeEXT::eRectangularSmooth: return "RectangularSmooth"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_pipeline_executable_properties === + + enum class PipelineExecutableStatisticFormatKHR + { + eBool32 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR, + eInt64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR, + eUint64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR, + eFloat64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineExecutableStatisticFormatKHR value ) + { + switch ( value ) + { + case PipelineExecutableStatisticFormatKHR::eBool32: return "Bool32"; + case PipelineExecutableStatisticFormatKHR::eInt64: return "Int64"; + case PipelineExecutableStatisticFormatKHR::eUint64: return "Uint64"; + case PipelineExecutableStatisticFormatKHR::eFloat64: return "Float64"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_device_generated_commands === + + enum class IndirectStateFlagBitsNV : VkIndirectStateFlagsNV + { + eFlagFrontface = VK_INDIRECT_STATE_FLAG_FRONTFACE_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( IndirectStateFlagBitsNV value ) + { + switch ( value ) + { + case IndirectStateFlagBitsNV::eFlagFrontface: return "FlagFrontface"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class IndirectCommandsTokenTypeNV + { + eShaderGroup = VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV, + eStateFlags = VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV, + eIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV, + eVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV, + ePushConstant = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV, + eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV, + eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV, + eDrawTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV + }; + + VULKAN_HPP_INLINE std::string to_string( IndirectCommandsTokenTypeNV value ) + { + switch ( value ) + { + case IndirectCommandsTokenTypeNV::eShaderGroup: return "ShaderGroup"; + case IndirectCommandsTokenTypeNV::eStateFlags: return "StateFlags"; + case IndirectCommandsTokenTypeNV::eIndexBuffer: return "IndexBuffer"; + case IndirectCommandsTokenTypeNV::eVertexBuffer: return "VertexBuffer"; + case IndirectCommandsTokenTypeNV::ePushConstant: return "PushConstant"; + case IndirectCommandsTokenTypeNV::eDrawIndexed: return "DrawIndexed"; + case IndirectCommandsTokenTypeNV::eDraw: return "Draw"; + case IndirectCommandsTokenTypeNV::eDrawTasks: return "DrawTasks"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class IndirectCommandsLayoutUsageFlagBitsNV : VkIndirectCommandsLayoutUsageFlagsNV + { + eExplicitPreprocess = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_NV, + eIndexedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NV, + eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagBitsNV value ) + { + switch ( value ) + { + case IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess: return "ExplicitPreprocess"; + case IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences: return "IndexedSequences"; + case IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences: return "UnorderedSequences"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_device_memory_report === + + enum class DeviceMemoryReportEventTypeEXT + { + eAllocate = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATE_EXT, + eFree = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_FREE_EXT, + eImport = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_IMPORT_EXT, + eUnimport = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_UNIMPORT_EXT, + eAllocationFailed = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATION_FAILED_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DeviceMemoryReportEventTypeEXT value ) + { + switch ( value ) + { + case DeviceMemoryReportEventTypeEXT::eAllocate: return "Allocate"; + case DeviceMemoryReportEventTypeEXT::eFree: return "Free"; + case DeviceMemoryReportEventTypeEXT::eImport: return "Import"; + case DeviceMemoryReportEventTypeEXT::eUnimport: return "Unimport"; + case DeviceMemoryReportEventTypeEXT::eAllocationFailed: return "AllocationFailed"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_private_data === + + enum class PrivateDataSlotCreateFlagBitsEXT : VkPrivateDataSlotCreateFlagsEXT + { + }; + + VULKAN_HPP_INLINE std::string to_string( PrivateDataSlotCreateFlagBitsEXT ) + { + return "(void)"; + } + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + + //=== VK_KHR_video_encode_queue === + + enum class VideoEncodeFlagBitsKHR : VkVideoEncodeFlagsKHR + { + eDefault = VK_VIDEO_ENCODE_DEFAULT_KHR, + eReserved0 = VK_VIDEO_ENCODE_RESERVED_0_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeFlagBitsKHR::eDefault: return "Default"; + case VideoEncodeFlagBitsKHR::eReserved0: return "Reserved0"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoEncodeRateControlFlagBitsKHR : VkVideoEncodeRateControlFlagsKHR + { + eDefault = VK_VIDEO_ENCODE_RATE_CONTROL_DEFAULT_KHR, + eReset = VK_VIDEO_ENCODE_RATE_CONTROL_RESET_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeRateControlFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeRateControlFlagBitsKHR::eDefault: return "Default"; + case VideoEncodeRateControlFlagBitsKHR::eReset: return "Reset"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoEncodeRateControlModeFlagBitsKHR : VkVideoEncodeRateControlModeFlagsKHR + { + eNone = VK_VIDEO_ENCODE_RATE_CONTROL_MODE_NONE_BIT_KHR, + eCbr = VK_VIDEO_ENCODE_RATE_CONTROL_MODE_CBR_BIT_KHR, + eVbr = VK_VIDEO_ENCODE_RATE_CONTROL_MODE_VBR_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeRateControlModeFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeRateControlModeFlagBitsKHR::eNone: return "None"; + case VideoEncodeRateControlModeFlagBitsKHR::eCbr: return "Cbr"; + case VideoEncodeRateControlModeFlagBitsKHR::eVbr: return "Vbr"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_device_diagnostics_config === + + enum class DeviceDiagnosticsConfigFlagBitsNV : VkDeviceDiagnosticsConfigFlagsNV + { + eEnableShaderDebugInfo = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV, + eEnableResourceTracking = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV, + eEnableAutomaticCheckpoints = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( DeviceDiagnosticsConfigFlagBitsNV value ) + { + switch ( value ) + { + case DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo: return "EnableShaderDebugInfo"; + case DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking: return "EnableResourceTracking"; + case DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints: return "EnableAutomaticCheckpoints"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_synchronization2 === + + enum class PipelineStageFlagBits2KHR : VkPipelineStageFlags2KHR + { + eNone = VK_PIPELINE_STAGE_2_NONE_KHR, + eTopOfPipe = VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, + eDrawIndirect = VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR, + eVertexInput = VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR, + eVertexShader = VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT_KHR, + eTessellationControlShader = VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR, + eTessellationEvaluationShader = VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR, + eGeometryShader = VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR, + eFragmentShader = VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR, + eEarlyFragmentTests = VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR, + eLateFragmentTests = VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR, + eColorAttachmentOutput = VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR, + eComputeShader = VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT_KHR, + eAllTransfer = VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR, + eBottomOfPipe = VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT_KHR, + eHost = VK_PIPELINE_STAGE_2_HOST_BIT_KHR, + eAllGraphics = VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, + eAllCommands = VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, + eCopy = VK_PIPELINE_STAGE_2_COPY_BIT_KHR, + eResolve = VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR, + eBlit = VK_PIPELINE_STAGE_2_BLIT_BIT_KHR, + eClear = VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR, + eIndexInput = VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT_KHR, + eVertexAttributeInput = VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT_KHR, + ePreRasterizationShaders = VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT_KHR, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoDecode = VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR, + eVideoEncode = VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eTransformFeedbackExt = VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT, + eConditionalRenderingExt = VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT, + eCommandPreprocessNv = VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV, + eFragmentShadingRateAttachment = VK_PIPELINE_STAGE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + eAccelerationStructureBuild = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, + eRayTracingShader = VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR, + eFragmentDensityProcessExt = VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT, + eTaskShaderNv = VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV, + eMeshShaderNv = VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV, + eAccelerationStructureBuildNv = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_NV, + eRayTracingShaderNv = VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_NV, + eShadingRateImageNv = VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV, + eTransfer = VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineStageFlagBits2KHR value ) + { + switch ( value ) + { + case PipelineStageFlagBits2KHR::eNone: return "None"; + case PipelineStageFlagBits2KHR::eTopOfPipe: return "TopOfPipe"; + case PipelineStageFlagBits2KHR::eDrawIndirect: return "DrawIndirect"; + case PipelineStageFlagBits2KHR::eVertexInput: return "VertexInput"; + case PipelineStageFlagBits2KHR::eVertexShader: return "VertexShader"; + case PipelineStageFlagBits2KHR::eTessellationControlShader: return "TessellationControlShader"; + case PipelineStageFlagBits2KHR::eTessellationEvaluationShader: return "TessellationEvaluationShader"; + case PipelineStageFlagBits2KHR::eGeometryShader: return "GeometryShader"; + case PipelineStageFlagBits2KHR::eFragmentShader: return "FragmentShader"; + case PipelineStageFlagBits2KHR::eEarlyFragmentTests: return "EarlyFragmentTests"; + case PipelineStageFlagBits2KHR::eLateFragmentTests: return "LateFragmentTests"; + case PipelineStageFlagBits2KHR::eColorAttachmentOutput: return "ColorAttachmentOutput"; + case PipelineStageFlagBits2KHR::eComputeShader: return "ComputeShader"; + case PipelineStageFlagBits2KHR::eAllTransfer: return "AllTransfer"; + case PipelineStageFlagBits2KHR::eBottomOfPipe: return "BottomOfPipe"; + case PipelineStageFlagBits2KHR::eHost: return "Host"; + case PipelineStageFlagBits2KHR::eAllGraphics: return "AllGraphics"; + case PipelineStageFlagBits2KHR::eAllCommands: return "AllCommands"; + case PipelineStageFlagBits2KHR::eCopy: return "Copy"; + case PipelineStageFlagBits2KHR::eResolve: return "Resolve"; + case PipelineStageFlagBits2KHR::eBlit: return "Blit"; + case PipelineStageFlagBits2KHR::eClear: return "Clear"; + case PipelineStageFlagBits2KHR::eIndexInput: return "IndexInput"; + case PipelineStageFlagBits2KHR::eVertexAttributeInput: return "VertexAttributeInput"; + case PipelineStageFlagBits2KHR::ePreRasterizationShaders: return "PreRasterizationShaders"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case PipelineStageFlagBits2KHR::eVideoDecode: return "VideoDecode"; + case PipelineStageFlagBits2KHR::eVideoEncode: return "VideoEncode"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case PipelineStageFlagBits2KHR::eTransformFeedbackExt: return "TransformFeedbackExt"; + case PipelineStageFlagBits2KHR::eConditionalRenderingExt: return "ConditionalRenderingExt"; + case PipelineStageFlagBits2KHR::eCommandPreprocessNv: return "CommandPreprocessNv"; + case PipelineStageFlagBits2KHR::eFragmentShadingRateAttachment: return "FragmentShadingRateAttachment"; + case PipelineStageFlagBits2KHR::eAccelerationStructureBuild: return "AccelerationStructureBuild"; + case PipelineStageFlagBits2KHR::eRayTracingShader: return "RayTracingShader"; + case PipelineStageFlagBits2KHR::eFragmentDensityProcessExt: return "FragmentDensityProcessExt"; + case PipelineStageFlagBits2KHR::eTaskShaderNv: return "TaskShaderNv"; + case PipelineStageFlagBits2KHR::eMeshShaderNv: return "MeshShaderNv"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class AccessFlagBits2KHR : VkAccessFlags2KHR + { + eNone = VK_ACCESS_2_NONE_KHR, + eIndirectCommandRead = VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT_KHR, + eIndexRead = VK_ACCESS_2_INDEX_READ_BIT_KHR, + eVertexAttributeRead = VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT_KHR, + eUniformRead = VK_ACCESS_2_UNIFORM_READ_BIT_KHR, + eInputAttachmentRead = VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT_KHR, + eShaderRead = VK_ACCESS_2_SHADER_READ_BIT_KHR, + eShaderWrite = VK_ACCESS_2_SHADER_WRITE_BIT_KHR, + eColorAttachmentRead = VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT_KHR, + eColorAttachmentWrite = VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR, + eDepthStencilAttachmentRead = VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT_KHR, + eDepthStencilAttachmentWrite = VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR, + eTransferRead = VK_ACCESS_2_TRANSFER_READ_BIT_KHR, + eTransferWrite = VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, + eHostRead = VK_ACCESS_2_HOST_READ_BIT_KHR, + eHostWrite = VK_ACCESS_2_HOST_WRITE_BIT_KHR, + eMemoryRead = VK_ACCESS_2_MEMORY_READ_BIT_KHR, + eMemoryWrite = VK_ACCESS_2_MEMORY_WRITE_BIT_KHR, + eShaderSampledRead = VK_ACCESS_2_SHADER_SAMPLED_READ_BIT_KHR, + eShaderStorageRead = VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR, + eShaderStorageWrite = VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoDecodeRead = VK_ACCESS_2_VIDEO_DECODE_READ_BIT_KHR, + eVideoDecodeWrite = VK_ACCESS_2_VIDEO_DECODE_WRITE_BIT_KHR, + eVideoEncodeRead = VK_ACCESS_2_VIDEO_ENCODE_READ_BIT_KHR, + eVideoEncodeWrite = VK_ACCESS_2_VIDEO_ENCODE_WRITE_BIT_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eTransformFeedbackWriteExt = VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT, + eTransformFeedbackCounterReadExt = VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT, + eTransformFeedbackCounterWriteExt = VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT, + eConditionalRenderingReadExt = VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT, + eCommandPreprocessReadNv = VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV, + eCommandPreprocessWriteNv = VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV, + eFragmentShadingRateAttachmentRead = VK_ACCESS_2_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR, + eAccelerationStructureRead = VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR, + eAccelerationStructureWrite = VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR, + eFragmentDensityMapReadExt = VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT, + eColorAttachmentReadNoncoherentExt = VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT, + eAccelerationStructureReadNv = VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_NV, + eAccelerationStructureWriteNv = VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_NV, + eShadingRateImageReadNv = VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( AccessFlagBits2KHR value ) + { + switch ( value ) + { + case AccessFlagBits2KHR::eNone: return "None"; + case AccessFlagBits2KHR::eIndirectCommandRead: return "IndirectCommandRead"; + case AccessFlagBits2KHR::eIndexRead: return "IndexRead"; + case AccessFlagBits2KHR::eVertexAttributeRead: return "VertexAttributeRead"; + case AccessFlagBits2KHR::eUniformRead: return "UniformRead"; + case AccessFlagBits2KHR::eInputAttachmentRead: return "InputAttachmentRead"; + case AccessFlagBits2KHR::eShaderRead: return "ShaderRead"; + case AccessFlagBits2KHR::eShaderWrite: return "ShaderWrite"; + case AccessFlagBits2KHR::eColorAttachmentRead: return "ColorAttachmentRead"; + case AccessFlagBits2KHR::eColorAttachmentWrite: return "ColorAttachmentWrite"; + case AccessFlagBits2KHR::eDepthStencilAttachmentRead: return "DepthStencilAttachmentRead"; + case AccessFlagBits2KHR::eDepthStencilAttachmentWrite: return "DepthStencilAttachmentWrite"; + case AccessFlagBits2KHR::eTransferRead: return "TransferRead"; + case AccessFlagBits2KHR::eTransferWrite: return "TransferWrite"; + case AccessFlagBits2KHR::eHostRead: return "HostRead"; + case AccessFlagBits2KHR::eHostWrite: return "HostWrite"; + case AccessFlagBits2KHR::eMemoryRead: return "MemoryRead"; + case AccessFlagBits2KHR::eMemoryWrite: return "MemoryWrite"; + case AccessFlagBits2KHR::eShaderSampledRead: return "ShaderSampledRead"; + case AccessFlagBits2KHR::eShaderStorageRead: return "ShaderStorageRead"; + case AccessFlagBits2KHR::eShaderStorageWrite: return "ShaderStorageWrite"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case AccessFlagBits2KHR::eVideoDecodeRead: return "VideoDecodeRead"; + case AccessFlagBits2KHR::eVideoDecodeWrite: return "VideoDecodeWrite"; + case AccessFlagBits2KHR::eVideoEncodeRead: return "VideoEncodeRead"; + case AccessFlagBits2KHR::eVideoEncodeWrite: return "VideoEncodeWrite"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case AccessFlagBits2KHR::eTransformFeedbackWriteExt: return "TransformFeedbackWriteExt"; + case AccessFlagBits2KHR::eTransformFeedbackCounterReadExt: return "TransformFeedbackCounterReadExt"; + case AccessFlagBits2KHR::eTransformFeedbackCounterWriteExt: return "TransformFeedbackCounterWriteExt"; + case AccessFlagBits2KHR::eConditionalRenderingReadExt: return "ConditionalRenderingReadExt"; + case AccessFlagBits2KHR::eCommandPreprocessReadNv: return "CommandPreprocessReadNv"; + case AccessFlagBits2KHR::eCommandPreprocessWriteNv: return "CommandPreprocessWriteNv"; + case AccessFlagBits2KHR::eFragmentShadingRateAttachmentRead: return "FragmentShadingRateAttachmentRead"; + case AccessFlagBits2KHR::eAccelerationStructureRead: return "AccelerationStructureRead"; + case AccessFlagBits2KHR::eAccelerationStructureWrite: return "AccelerationStructureWrite"; + case AccessFlagBits2KHR::eFragmentDensityMapReadExt: return "FragmentDensityMapReadExt"; + case AccessFlagBits2KHR::eColorAttachmentReadNoncoherentExt: return "ColorAttachmentReadNoncoherentExt"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SubmitFlagBitsKHR : VkSubmitFlagsKHR + { + eProtected = VK_SUBMIT_PROTECTED_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( SubmitFlagBitsKHR value ) + { + switch ( value ) + { + case SubmitFlagBitsKHR::eProtected: return "Protected"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_fragment_shading_rate_enums === + + enum class FragmentShadingRateNV + { + e1InvocationPerPixel = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_PIXEL_NV, + e1InvocationPer1X2Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_1X2_PIXELS_NV, + e1InvocationPer2X1Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X1_PIXELS_NV, + e1InvocationPer2X2Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X2_PIXELS_NV, + e1InvocationPer2X4Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X4_PIXELS_NV, + e1InvocationPer4X2Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X2_PIXELS_NV, + e1InvocationPer4X4Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X4_PIXELS_NV, + e2InvocationsPerPixel = VK_FRAGMENT_SHADING_RATE_2_INVOCATIONS_PER_PIXEL_NV, + e4InvocationsPerPixel = VK_FRAGMENT_SHADING_RATE_4_INVOCATIONS_PER_PIXEL_NV, + e8InvocationsPerPixel = VK_FRAGMENT_SHADING_RATE_8_INVOCATIONS_PER_PIXEL_NV, + e16InvocationsPerPixel = VK_FRAGMENT_SHADING_RATE_16_INVOCATIONS_PER_PIXEL_NV, + eNoInvocations = VK_FRAGMENT_SHADING_RATE_NO_INVOCATIONS_NV + }; + + VULKAN_HPP_INLINE std::string to_string( FragmentShadingRateNV value ) + { + switch ( value ) + { + case FragmentShadingRateNV::e1InvocationPerPixel: return "1InvocationPerPixel"; + case FragmentShadingRateNV::e1InvocationPer1X2Pixels: return "1InvocationPer1X2Pixels"; + case FragmentShadingRateNV::e1InvocationPer2X1Pixels: return "1InvocationPer2X1Pixels"; + case FragmentShadingRateNV::e1InvocationPer2X2Pixels: return "1InvocationPer2X2Pixels"; + case FragmentShadingRateNV::e1InvocationPer2X4Pixels: return "1InvocationPer2X4Pixels"; + case FragmentShadingRateNV::e1InvocationPer4X2Pixels: return "1InvocationPer4X2Pixels"; + case FragmentShadingRateNV::e1InvocationPer4X4Pixels: return "1InvocationPer4X4Pixels"; + case FragmentShadingRateNV::e2InvocationsPerPixel: return "2InvocationsPerPixel"; + case FragmentShadingRateNV::e4InvocationsPerPixel: return "4InvocationsPerPixel"; + case FragmentShadingRateNV::e8InvocationsPerPixel: return "8InvocationsPerPixel"; + case FragmentShadingRateNV::e16InvocationsPerPixel: return "16InvocationsPerPixel"; + case FragmentShadingRateNV::eNoInvocations: return "NoInvocations"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class FragmentShadingRateTypeNV + { + eFragmentSize = VK_FRAGMENT_SHADING_RATE_TYPE_FRAGMENT_SIZE_NV, + eEnums = VK_FRAGMENT_SHADING_RATE_TYPE_ENUMS_NV + }; + + VULKAN_HPP_INLINE std::string to_string( FragmentShadingRateTypeNV value ) + { + switch ( value ) + { + case FragmentShadingRateTypeNV::eFragmentSize: return "FragmentSize"; + case FragmentShadingRateTypeNV::eEnums: return "Enums"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_ray_tracing_pipeline === + + enum class RayTracingShaderGroupTypeKHR + { + eGeneral = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR, + eTrianglesHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR, + eProceduralHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR + }; + using RayTracingShaderGroupTypeNV = RayTracingShaderGroupTypeKHR; + + VULKAN_HPP_INLINE std::string to_string( RayTracingShaderGroupTypeKHR value ) + { + switch ( value ) + { + case RayTracingShaderGroupTypeKHR::eGeneral: return "General"; + case RayTracingShaderGroupTypeKHR::eTrianglesHitGroup: return "TrianglesHitGroup"; + case RayTracingShaderGroupTypeKHR::eProceduralHitGroup: return "ProceduralHitGroup"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ShaderGroupShaderKHR + { + eGeneral = VK_SHADER_GROUP_SHADER_GENERAL_KHR, + eClosestHit = VK_SHADER_GROUP_SHADER_CLOSEST_HIT_KHR, + eAnyHit = VK_SHADER_GROUP_SHADER_ANY_HIT_KHR, + eIntersection = VK_SHADER_GROUP_SHADER_INTERSECTION_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( ShaderGroupShaderKHR value ) + { + switch ( value ) + { + case ShaderGroupShaderKHR::eGeneral: return "General"; + case ShaderGroupShaderKHR::eClosestHit: return "ClosestHit"; + case ShaderGroupShaderKHR::eAnyHit: return "AnyHit"; + case ShaderGroupShaderKHR::eIntersection: return "Intersection"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + template + struct cpp_type + {}; + + template + struct IndexTypeValue + {}; + + template <> + struct IndexTypeValue + { + static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint16; + }; + + template <> + struct CppType + { + using Type = uint16_t; + }; + + template <> + struct IndexTypeValue + { + static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint32; + }; + + template <> + struct CppType + { + using Type = uint32_t; + }; + + template <> + struct IndexTypeValue + { + static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint8EXT; + }; + + template <> + struct CppType + { + using Type = uint8_t; + }; + + //================ + //=== BITMASKs === + //================ + + //=== VK_VERSION_1_0 === + + using FormatFeatureFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = + VkFlags( FormatFeatureFlagBits::eSampledImage ) | VkFlags( FormatFeatureFlagBits::eStorageImage ) | + VkFlags( FormatFeatureFlagBits::eStorageImageAtomic ) | VkFlags( FormatFeatureFlagBits::eUniformTexelBuffer ) | + VkFlags( FormatFeatureFlagBits::eStorageTexelBuffer ) | + VkFlags( FormatFeatureFlagBits::eStorageTexelBufferAtomic ) | VkFlags( FormatFeatureFlagBits::eVertexBuffer ) | + VkFlags( FormatFeatureFlagBits::eColorAttachment ) | VkFlags( FormatFeatureFlagBits::eColorAttachmentBlend ) | + VkFlags( FormatFeatureFlagBits::eDepthStencilAttachment ) | VkFlags( FormatFeatureFlagBits::eBlitSrc ) | + VkFlags( FormatFeatureFlagBits::eBlitDst ) | VkFlags( FormatFeatureFlagBits::eSampledImageFilterLinear ) | + VkFlags( FormatFeatureFlagBits::eTransferSrc ) | VkFlags( FormatFeatureFlagBits::eTransferDst ) | + VkFlags( FormatFeatureFlagBits::eMidpointChromaSamples ) | + VkFlags( FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter ) | + VkFlags( FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter ) | + VkFlags( FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit ) | + VkFlags( FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable ) | + VkFlags( FormatFeatureFlagBits::eDisjoint ) | VkFlags( FormatFeatureFlagBits::eCositedChromaSamples ) | + VkFlags( FormatFeatureFlagBits::eSampledImageFilterMinmax ) | + VkFlags( FormatFeatureFlagBits::eSampledImageFilterCubicIMG ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | VkFlags( FormatFeatureFlagBits::eVideoDecodeOutputKHR ) | VkFlags( FormatFeatureFlagBits::eVideoDecodeDpbKHR ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | VkFlags( FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR ) | + VkFlags( FormatFeatureFlagBits::eFragmentDensityMapEXT ) | + VkFlags( FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | VkFlags( FormatFeatureFlagBits::eVideoEncodeInputKHR ) | VkFlags( FormatFeatureFlagBits::eVideoEncodeDpbKHR ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator|( FormatFeatureFlagBits bit0, + FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return FormatFeatureFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator&(FormatFeatureFlagBits bit0, + FormatFeatureFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return FormatFeatureFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator^( FormatFeatureFlagBits bit0, + FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return FormatFeatureFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator~( FormatFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( FormatFeatureFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & FormatFeatureFlagBits::eSampledImage ) + result += "SampledImage | "; + if ( value & FormatFeatureFlagBits::eStorageImage ) + result += "StorageImage | "; + if ( value & FormatFeatureFlagBits::eStorageImageAtomic ) + result += "StorageImageAtomic | "; + if ( value & FormatFeatureFlagBits::eUniformTexelBuffer ) + result += "UniformTexelBuffer | "; + if ( value & FormatFeatureFlagBits::eStorageTexelBuffer ) + result += "StorageTexelBuffer | "; + if ( value & FormatFeatureFlagBits::eStorageTexelBufferAtomic ) + result += "StorageTexelBufferAtomic | "; + if ( value & FormatFeatureFlagBits::eVertexBuffer ) + result += "VertexBuffer | "; + if ( value & FormatFeatureFlagBits::eColorAttachment ) + result += "ColorAttachment | "; + if ( value & FormatFeatureFlagBits::eColorAttachmentBlend ) + result += "ColorAttachmentBlend | "; + if ( value & FormatFeatureFlagBits::eDepthStencilAttachment ) + result += "DepthStencilAttachment | "; + if ( value & FormatFeatureFlagBits::eBlitSrc ) + result += "BlitSrc | "; + if ( value & FormatFeatureFlagBits::eBlitDst ) + result += "BlitDst | "; + if ( value & FormatFeatureFlagBits::eSampledImageFilterLinear ) + result += "SampledImageFilterLinear | "; + if ( value & FormatFeatureFlagBits::eTransferSrc ) + result += "TransferSrc | "; + if ( value & FormatFeatureFlagBits::eTransferDst ) + result += "TransferDst | "; + if ( value & FormatFeatureFlagBits::eMidpointChromaSamples ) + result += "MidpointChromaSamples | "; + if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter ) + result += "SampledImageYcbcrConversionLinearFilter | "; + if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter ) + result += "SampledImageYcbcrConversionSeparateReconstructionFilter | "; + if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit ) + result += "SampledImageYcbcrConversionChromaReconstructionExplicit | "; + if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable ) + result += "SampledImageYcbcrConversionChromaReconstructionExplicitForceable | "; + if ( value & FormatFeatureFlagBits::eDisjoint ) + result += "Disjoint | "; + if ( value & FormatFeatureFlagBits::eCositedChromaSamples ) + result += "CositedChromaSamples | "; + if ( value & FormatFeatureFlagBits::eSampledImageFilterMinmax ) + result += "SampledImageFilterMinmax | "; + if ( value & FormatFeatureFlagBits::eSampledImageFilterCubicIMG ) + result += "SampledImageFilterCubicIMG | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & FormatFeatureFlagBits::eVideoDecodeOutputKHR ) + result += "VideoDecodeOutputKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & FormatFeatureFlagBits::eVideoDecodeDpbKHR ) + result += "VideoDecodeDpbKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR ) + result += "AccelerationStructureVertexBufferKHR | "; + if ( value & FormatFeatureFlagBits::eFragmentDensityMapEXT ) + result += "FragmentDensityMapEXT | "; + if ( value & FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR ) + result += "FragmentShadingRateAttachmentKHR | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & FormatFeatureFlagBits::eVideoEncodeInputKHR ) + result += "VideoEncodeInputKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & FormatFeatureFlagBits::eVideoEncodeDpbKHR ) + result += "VideoEncodeDpbKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using ImageCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ImageCreateFlagBits::eSparseBinding ) | VkFlags( ImageCreateFlagBits::eSparseResidency ) | + VkFlags( ImageCreateFlagBits::eSparseAliased ) | VkFlags( ImageCreateFlagBits::eMutableFormat ) | + VkFlags( ImageCreateFlagBits::eCubeCompatible ) | VkFlags( ImageCreateFlagBits::eAlias ) | + VkFlags( ImageCreateFlagBits::eSplitInstanceBindRegions ) | + VkFlags( ImageCreateFlagBits::e2DArrayCompatible ) | + VkFlags( ImageCreateFlagBits::eBlockTexelViewCompatible ) | + VkFlags( ImageCreateFlagBits::eExtendedUsage ) | VkFlags( ImageCreateFlagBits::eProtected ) | + VkFlags( ImageCreateFlagBits::eDisjoint ) | VkFlags( ImageCreateFlagBits::eCornerSampledNV ) | + VkFlags( ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT ) | + VkFlags( ImageCreateFlagBits::eSubsampledEXT ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator|( ImageCreateFlagBits bit0, + ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator&(ImageCreateFlagBits bit0, + ImageCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return ImageCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator^( ImageCreateFlagBits bit0, + ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator~( ImageCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ImageCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ImageCreateFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & ImageCreateFlagBits::eSparseBinding ) + result += "SparseBinding | "; + if ( value & ImageCreateFlagBits::eSparseResidency ) + result += "SparseResidency | "; + if ( value & ImageCreateFlagBits::eSparseAliased ) + result += "SparseAliased | "; + if ( value & ImageCreateFlagBits::eMutableFormat ) + result += "MutableFormat | "; + if ( value & ImageCreateFlagBits::eCubeCompatible ) + result += "CubeCompatible | "; + if ( value & ImageCreateFlagBits::eAlias ) + result += "Alias | "; + if ( value & ImageCreateFlagBits::eSplitInstanceBindRegions ) + result += "SplitInstanceBindRegions | "; + if ( value & ImageCreateFlagBits::e2DArrayCompatible ) + result += "2DArrayCompatible | "; + if ( value & ImageCreateFlagBits::eBlockTexelViewCompatible ) + result += "BlockTexelViewCompatible | "; + if ( value & ImageCreateFlagBits::eExtendedUsage ) + result += "ExtendedUsage | "; + if ( value & ImageCreateFlagBits::eProtected ) + result += "Protected | "; + if ( value & ImageCreateFlagBits::eDisjoint ) + result += "Disjoint | "; + if ( value & ImageCreateFlagBits::eCornerSampledNV ) + result += "CornerSampledNV | "; + if ( value & ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT ) + result += "SampleLocationsCompatibleDepthEXT | "; + if ( value & ImageCreateFlagBits::eSubsampledEXT ) + result += "SubsampledEXT | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using ImageUsageFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = + VkFlags( ImageUsageFlagBits::eTransferSrc ) | VkFlags( ImageUsageFlagBits::eTransferDst ) | + VkFlags( ImageUsageFlagBits::eSampled ) | VkFlags( ImageUsageFlagBits::eStorage ) | + VkFlags( ImageUsageFlagBits::eColorAttachment ) | VkFlags( ImageUsageFlagBits::eDepthStencilAttachment ) | + VkFlags( ImageUsageFlagBits::eTransientAttachment ) | VkFlags( ImageUsageFlagBits::eInputAttachment ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | VkFlags( ImageUsageFlagBits::eVideoDecodeDstKHR ) | VkFlags( ImageUsageFlagBits::eVideoDecodeSrcKHR ) | + VkFlags( ImageUsageFlagBits::eVideoDecodeDpbKHR ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | VkFlags( ImageUsageFlagBits::eShadingRateImageNV ) | VkFlags( ImageUsageFlagBits::eFragmentDensityMapEXT ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | VkFlags( ImageUsageFlagBits::eVideoEncodeDstKHR ) | VkFlags( ImageUsageFlagBits::eVideoEncodeSrcKHR ) | + VkFlags( ImageUsageFlagBits::eVideoEncodeDpbKHR ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator|( ImageUsageFlagBits bit0, + ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageUsageFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator&(ImageUsageFlagBits bit0, + ImageUsageFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return ImageUsageFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator^( ImageUsageFlagBits bit0, + ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageUsageFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator~( ImageUsageFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ImageUsageFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ImageUsageFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & ImageUsageFlagBits::eTransferSrc ) + result += "TransferSrc | "; + if ( value & ImageUsageFlagBits::eTransferDst ) + result += "TransferDst | "; + if ( value & ImageUsageFlagBits::eSampled ) + result += "Sampled | "; + if ( value & ImageUsageFlagBits::eStorage ) + result += "Storage | "; + if ( value & ImageUsageFlagBits::eColorAttachment ) + result += "ColorAttachment | "; + if ( value & ImageUsageFlagBits::eDepthStencilAttachment ) + result += "DepthStencilAttachment | "; + if ( value & ImageUsageFlagBits::eTransientAttachment ) + result += "TransientAttachment | "; + if ( value & ImageUsageFlagBits::eInputAttachment ) + result += "InputAttachment | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & ImageUsageFlagBits::eVideoDecodeDstKHR ) + result += "VideoDecodeDstKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & ImageUsageFlagBits::eVideoDecodeSrcKHR ) + result += "VideoDecodeSrcKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & ImageUsageFlagBits::eVideoDecodeDpbKHR ) + result += "VideoDecodeDpbKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & ImageUsageFlagBits::eShadingRateImageNV ) + result += "ShadingRateImageNV | "; + if ( value & ImageUsageFlagBits::eFragmentDensityMapEXT ) + result += "FragmentDensityMapEXT | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & ImageUsageFlagBits::eVideoEncodeDstKHR ) + result += "VideoEncodeDstKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & ImageUsageFlagBits::eVideoEncodeSrcKHR ) + result += "VideoEncodeSrcKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & ImageUsageFlagBits::eVideoEncodeDpbKHR ) + result += "VideoEncodeDpbKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using InstanceCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( InstanceCreateFlags ) + { + return "{}"; + } + + using MemoryHeapFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( MemoryHeapFlagBits::eDeviceLocal ) | VkFlags( MemoryHeapFlagBits::eMultiInstance ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator|( MemoryHeapFlagBits bit0, + MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return MemoryHeapFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator&(MemoryHeapFlagBits bit0, + MemoryHeapFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return MemoryHeapFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator^( MemoryHeapFlagBits bit0, + MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return MemoryHeapFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator~( MemoryHeapFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( MemoryHeapFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( MemoryHeapFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & MemoryHeapFlagBits::eDeviceLocal ) + result += "DeviceLocal | "; + if ( value & MemoryHeapFlagBits::eMultiInstance ) + result += "MultiInstance | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using MemoryPropertyFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( MemoryPropertyFlagBits::eDeviceLocal ) | VkFlags( MemoryPropertyFlagBits::eHostVisible ) | + VkFlags( MemoryPropertyFlagBits::eHostCoherent ) | VkFlags( MemoryPropertyFlagBits::eHostCached ) | + VkFlags( MemoryPropertyFlagBits::eLazilyAllocated ) | VkFlags( MemoryPropertyFlagBits::eProtected ) | + VkFlags( MemoryPropertyFlagBits::eDeviceCoherentAMD ) | + VkFlags( MemoryPropertyFlagBits::eDeviceUncachedAMD ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags + operator|( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return MemoryPropertyFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator&(MemoryPropertyFlagBits bit0, + MemoryPropertyFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return MemoryPropertyFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags + operator^( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return MemoryPropertyFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator~( MemoryPropertyFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( MemoryPropertyFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( MemoryPropertyFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & MemoryPropertyFlagBits::eDeviceLocal ) + result += "DeviceLocal | "; + if ( value & MemoryPropertyFlagBits::eHostVisible ) + result += "HostVisible | "; + if ( value & MemoryPropertyFlagBits::eHostCoherent ) + result += "HostCoherent | "; + if ( value & MemoryPropertyFlagBits::eHostCached ) + result += "HostCached | "; + if ( value & MemoryPropertyFlagBits::eLazilyAllocated ) + result += "LazilyAllocated | "; + if ( value & MemoryPropertyFlagBits::eProtected ) + result += "Protected | "; + if ( value & MemoryPropertyFlagBits::eDeviceCoherentAMD ) + result += "DeviceCoherentAMD | "; + if ( value & MemoryPropertyFlagBits::eDeviceUncachedAMD ) + result += "DeviceUncachedAMD | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using QueueFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( QueueFlagBits::eGraphics ) | VkFlags( QueueFlagBits::eCompute ) | + VkFlags( QueueFlagBits::eTransfer ) | VkFlags( QueueFlagBits::eSparseBinding ) | + VkFlags( QueueFlagBits::eProtected ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | VkFlags( QueueFlagBits::eVideoDecodeKHR ) | VkFlags( QueueFlagBits::eVideoEncodeKHR ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator|( QueueFlagBits bit0, + QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueueFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator&(QueueFlagBits bit0, QueueFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return QueueFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator^( QueueFlagBits bit0, + QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueueFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator~( QueueFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( QueueFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( QueueFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & QueueFlagBits::eGraphics ) + result += "Graphics | "; + if ( value & QueueFlagBits::eCompute ) + result += "Compute | "; + if ( value & QueueFlagBits::eTransfer ) + result += "Transfer | "; + if ( value & QueueFlagBits::eSparseBinding ) + result += "SparseBinding | "; + if ( value & QueueFlagBits::eProtected ) + result += "Protected | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & QueueFlagBits::eVideoDecodeKHR ) + result += "VideoDecodeKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & QueueFlagBits::eVideoEncodeKHR ) + result += "VideoEncodeKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using SampleCountFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( SampleCountFlagBits::e1 ) | VkFlags( SampleCountFlagBits::e2 ) | + VkFlags( SampleCountFlagBits::e4 ) | VkFlags( SampleCountFlagBits::e8 ) | + VkFlags( SampleCountFlagBits::e16 ) | VkFlags( SampleCountFlagBits::e32 ) | + VkFlags( SampleCountFlagBits::e64 ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator|( SampleCountFlagBits bit0, + SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SampleCountFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator&(SampleCountFlagBits bit0, + SampleCountFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return SampleCountFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator^( SampleCountFlagBits bit0, + SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SampleCountFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator~( SampleCountFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( SampleCountFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( SampleCountFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & SampleCountFlagBits::e1 ) + result += "1 | "; + if ( value & SampleCountFlagBits::e2 ) + result += "2 | "; + if ( value & SampleCountFlagBits::e4 ) + result += "4 | "; + if ( value & SampleCountFlagBits::e8 ) + result += "8 | "; + if ( value & SampleCountFlagBits::e16 ) + result += "16 | "; + if ( value & SampleCountFlagBits::e32 ) + result += "32 | "; + if ( value & SampleCountFlagBits::e64 ) + result += "64 | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using DeviceCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( DeviceCreateFlags ) + { + return "{}"; + } + + using DeviceQueueCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( DeviceQueueCreateFlagBits::eProtected ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags + operator|( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DeviceQueueCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags + operator&(DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return DeviceQueueCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags + operator^( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DeviceQueueCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags operator~( DeviceQueueCreateFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( DeviceQueueCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & DeviceQueueCreateFlagBits::eProtected ) + result += "Protected | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using PipelineStageFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = + VkFlags( PipelineStageFlagBits::eTopOfPipe ) | VkFlags( PipelineStageFlagBits::eDrawIndirect ) | + VkFlags( PipelineStageFlagBits::eVertexInput ) | VkFlags( PipelineStageFlagBits::eVertexShader ) | + VkFlags( PipelineStageFlagBits::eTessellationControlShader ) | + VkFlags( PipelineStageFlagBits::eTessellationEvaluationShader ) | + VkFlags( PipelineStageFlagBits::eGeometryShader ) | VkFlags( PipelineStageFlagBits::eFragmentShader ) | + VkFlags( PipelineStageFlagBits::eEarlyFragmentTests ) | VkFlags( PipelineStageFlagBits::eLateFragmentTests ) | + VkFlags( PipelineStageFlagBits::eColorAttachmentOutput ) | VkFlags( PipelineStageFlagBits::eComputeShader ) | + VkFlags( PipelineStageFlagBits::eTransfer ) | VkFlags( PipelineStageFlagBits::eBottomOfPipe ) | + VkFlags( PipelineStageFlagBits::eHost ) | VkFlags( PipelineStageFlagBits::eAllGraphics ) | + VkFlags( PipelineStageFlagBits::eAllCommands ) | VkFlags( PipelineStageFlagBits::eTransformFeedbackEXT ) | + VkFlags( PipelineStageFlagBits::eConditionalRenderingEXT ) | + VkFlags( PipelineStageFlagBits::eAccelerationStructureBuildKHR ) | + VkFlags( PipelineStageFlagBits::eRayTracingShaderKHR ) | VkFlags( PipelineStageFlagBits::eShadingRateImageNV ) | + VkFlags( PipelineStageFlagBits::eTaskShaderNV ) | VkFlags( PipelineStageFlagBits::eMeshShaderNV ) | + VkFlags( PipelineStageFlagBits::eFragmentDensityProcessEXT ) | + VkFlags( PipelineStageFlagBits::eCommandPreprocessNV ) | VkFlags( PipelineStageFlagBits::eNoneKHR ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator|( PipelineStageFlagBits bit0, + PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineStageFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator&(PipelineStageFlagBits bit0, + PipelineStageFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return PipelineStageFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator^( PipelineStageFlagBits bit0, + PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineStageFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator~( PipelineStageFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( PipelineStageFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( PipelineStageFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & PipelineStageFlagBits::eTopOfPipe ) + result += "TopOfPipe | "; + if ( value & PipelineStageFlagBits::eDrawIndirect ) + result += "DrawIndirect | "; + if ( value & PipelineStageFlagBits::eVertexInput ) + result += "VertexInput | "; + if ( value & PipelineStageFlagBits::eVertexShader ) + result += "VertexShader | "; + if ( value & PipelineStageFlagBits::eTessellationControlShader ) + result += "TessellationControlShader | "; + if ( value & PipelineStageFlagBits::eTessellationEvaluationShader ) + result += "TessellationEvaluationShader | "; + if ( value & PipelineStageFlagBits::eGeometryShader ) + result += "GeometryShader | "; + if ( value & PipelineStageFlagBits::eFragmentShader ) + result += "FragmentShader | "; + if ( value & PipelineStageFlagBits::eEarlyFragmentTests ) + result += "EarlyFragmentTests | "; + if ( value & PipelineStageFlagBits::eLateFragmentTests ) + result += "LateFragmentTests | "; + if ( value & PipelineStageFlagBits::eColorAttachmentOutput ) + result += "ColorAttachmentOutput | "; + if ( value & PipelineStageFlagBits::eComputeShader ) + result += "ComputeShader | "; + if ( value & PipelineStageFlagBits::eTransfer ) + result += "Transfer | "; + if ( value & PipelineStageFlagBits::eBottomOfPipe ) + result += "BottomOfPipe | "; + if ( value & PipelineStageFlagBits::eHost ) + result += "Host | "; + if ( value & PipelineStageFlagBits::eAllGraphics ) + result += "AllGraphics | "; + if ( value & PipelineStageFlagBits::eAllCommands ) + result += "AllCommands | "; + if ( value & PipelineStageFlagBits::eTransformFeedbackEXT ) + result += "TransformFeedbackEXT | "; + if ( value & PipelineStageFlagBits::eConditionalRenderingEXT ) + result += "ConditionalRenderingEXT | "; + if ( value & PipelineStageFlagBits::eAccelerationStructureBuildKHR ) + result += "AccelerationStructureBuildKHR | "; + if ( value & PipelineStageFlagBits::eRayTracingShaderKHR ) + result += "RayTracingShaderKHR | "; + if ( value & PipelineStageFlagBits::eShadingRateImageNV ) + result += "ShadingRateImageNV | "; + if ( value & PipelineStageFlagBits::eTaskShaderNV ) + result += "TaskShaderNV | "; + if ( value & PipelineStageFlagBits::eMeshShaderNV ) + result += "MeshShaderNV | "; + if ( value & PipelineStageFlagBits::eFragmentDensityProcessEXT ) + result += "FragmentDensityProcessEXT | "; + if ( value & PipelineStageFlagBits::eCommandPreprocessNV ) + result += "CommandPreprocessNV | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + enum class MemoryMapFlagBits : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( MemoryMapFlagBits ) + { + return "(void)"; + } + + using MemoryMapFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( MemoryMapFlags ) + { + return "{}"; + } + + using ImageAspectFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ImageAspectFlagBits::eColor ) | VkFlags( ImageAspectFlagBits::eDepth ) | + VkFlags( ImageAspectFlagBits::eStencil ) | VkFlags( ImageAspectFlagBits::eMetadata ) | + VkFlags( ImageAspectFlagBits::ePlane0 ) | VkFlags( ImageAspectFlagBits::ePlane1 ) | + VkFlags( ImageAspectFlagBits::ePlane2 ) | VkFlags( ImageAspectFlagBits::eMemoryPlane0EXT ) | + VkFlags( ImageAspectFlagBits::eMemoryPlane1EXT ) | VkFlags( ImageAspectFlagBits::eMemoryPlane2EXT ) | + VkFlags( ImageAspectFlagBits::eMemoryPlane3EXT ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator|( ImageAspectFlagBits bit0, + ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageAspectFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator&(ImageAspectFlagBits bit0, + ImageAspectFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return ImageAspectFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator^( ImageAspectFlagBits bit0, + ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageAspectFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator~( ImageAspectFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ImageAspectFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ImageAspectFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & ImageAspectFlagBits::eColor ) + result += "Color | "; + if ( value & ImageAspectFlagBits::eDepth ) + result += "Depth | "; + if ( value & ImageAspectFlagBits::eStencil ) + result += "Stencil | "; + if ( value & ImageAspectFlagBits::eMetadata ) + result += "Metadata | "; + if ( value & ImageAspectFlagBits::ePlane0 ) + result += "Plane0 | "; + if ( value & ImageAspectFlagBits::ePlane1 ) + result += "Plane1 | "; + if ( value & ImageAspectFlagBits::ePlane2 ) + result += "Plane2 | "; + if ( value & ImageAspectFlagBits::eMemoryPlane0EXT ) + result += "MemoryPlane0EXT | "; + if ( value & ImageAspectFlagBits::eMemoryPlane1EXT ) + result += "MemoryPlane1EXT | "; + if ( value & ImageAspectFlagBits::eMemoryPlane2EXT ) + result += "MemoryPlane2EXT | "; + if ( value & ImageAspectFlagBits::eMemoryPlane3EXT ) + result += "MemoryPlane3EXT | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using SparseImageFormatFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( SparseImageFormatFlagBits::eSingleMiptail ) | + VkFlags( SparseImageFormatFlagBits::eAlignedMipSize ) | + VkFlags( SparseImageFormatFlagBits::eNonstandardBlockSize ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags + operator|( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SparseImageFormatFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags + operator&(SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return SparseImageFormatFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags + operator^( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SparseImageFormatFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags operator~( SparseImageFormatFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( SparseImageFormatFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( SparseImageFormatFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & SparseImageFormatFlagBits::eSingleMiptail ) + result += "SingleMiptail | "; + if ( value & SparseImageFormatFlagBits::eAlignedMipSize ) + result += "AlignedMipSize | "; + if ( value & SparseImageFormatFlagBits::eNonstandardBlockSize ) + result += "NonstandardBlockSize | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using SparseMemoryBindFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( SparseMemoryBindFlagBits::eMetadata ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags + operator|( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SparseMemoryBindFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags + operator&(SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return SparseMemoryBindFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags + operator^( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SparseMemoryBindFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags operator~( SparseMemoryBindFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( SparseMemoryBindFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( SparseMemoryBindFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & SparseMemoryBindFlagBits::eMetadata ) + result += "Metadata | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using FenceCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( FenceCreateFlagBits::eSignaled ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator|( FenceCreateFlagBits bit0, + FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return FenceCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator&(FenceCreateFlagBits bit0, + FenceCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return FenceCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator^( FenceCreateFlagBits bit0, + FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return FenceCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator~( FenceCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( FenceCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( FenceCreateFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & FenceCreateFlagBits::eSignaled ) + result += "Signaled | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + enum class SemaphoreCreateFlagBits : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlagBits ) + { + return "(void)"; + } + + using SemaphoreCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlags ) + { + return "{}"; + } + + using EventCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( EventCreateFlagBits::eDeviceOnlyKHR ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR EventCreateFlags operator|( EventCreateFlagBits bit0, + EventCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return EventCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR EventCreateFlags operator&(EventCreateFlagBits bit0, + EventCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return EventCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR EventCreateFlags operator^( EventCreateFlagBits bit0, + EventCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return EventCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR EventCreateFlags operator~( EventCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( EventCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( EventCreateFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & EventCreateFlagBits::eDeviceOnlyKHR ) + result += "DeviceOnlyKHR | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using QueryPipelineStatisticFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( QueryPipelineStatisticFlagBits::eInputAssemblyVertices ) | + VkFlags( QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives ) | + VkFlags( QueryPipelineStatisticFlagBits::eVertexShaderInvocations ) | + VkFlags( QueryPipelineStatisticFlagBits::eGeometryShaderInvocations ) | + VkFlags( QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives ) | + VkFlags( QueryPipelineStatisticFlagBits::eClippingInvocations ) | + VkFlags( QueryPipelineStatisticFlagBits::eClippingPrimitives ) | + VkFlags( QueryPipelineStatisticFlagBits::eFragmentShaderInvocations ) | + VkFlags( QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches ) | + VkFlags( QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations ) | + VkFlags( QueryPipelineStatisticFlagBits::eComputeShaderInvocations ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags + operator|( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueryPipelineStatisticFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags + operator&(QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return QueryPipelineStatisticFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags + operator^( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueryPipelineStatisticFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags operator~( QueryPipelineStatisticFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( QueryPipelineStatisticFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( QueryPipelineStatisticFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & QueryPipelineStatisticFlagBits::eInputAssemblyVertices ) + result += "InputAssemblyVertices | "; + if ( value & QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives ) + result += "InputAssemblyPrimitives | "; + if ( value & QueryPipelineStatisticFlagBits::eVertexShaderInvocations ) + result += "VertexShaderInvocations | "; + if ( value & QueryPipelineStatisticFlagBits::eGeometryShaderInvocations ) + result += "GeometryShaderInvocations | "; + if ( value & QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives ) + result += "GeometryShaderPrimitives | "; + if ( value & QueryPipelineStatisticFlagBits::eClippingInvocations ) + result += "ClippingInvocations | "; + if ( value & QueryPipelineStatisticFlagBits::eClippingPrimitives ) + result += "ClippingPrimitives | "; + if ( value & QueryPipelineStatisticFlagBits::eFragmentShaderInvocations ) + result += "FragmentShaderInvocations | "; + if ( value & QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches ) + result += "TessellationControlShaderPatches | "; + if ( value & QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations ) + result += "TessellationEvaluationShaderInvocations | "; + if ( value & QueryPipelineStatisticFlagBits::eComputeShaderInvocations ) + result += "ComputeShaderInvocations | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using QueryPoolCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlags ) + { + return "{}"; + } + + using QueryResultFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( QueryResultFlagBits::e64 ) | VkFlags( QueryResultFlagBits::eWait ) | + VkFlags( QueryResultFlagBits::eWithAvailability ) | VkFlags( QueryResultFlagBits::ePartial ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | VkFlags( QueryResultFlagBits::eWithStatusKHR ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator|( QueryResultFlagBits bit0, + QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueryResultFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator&(QueryResultFlagBits bit0, + QueryResultFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return QueryResultFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator^( QueryResultFlagBits bit0, + QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueryResultFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator~( QueryResultFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( QueryResultFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( QueryResultFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & QueryResultFlagBits::e64 ) + result += "64 | "; + if ( value & QueryResultFlagBits::eWait ) + result += "Wait | "; + if ( value & QueryResultFlagBits::eWithAvailability ) + result += "WithAvailability | "; + if ( value & QueryResultFlagBits::ePartial ) + result += "Partial | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & QueryResultFlagBits::eWithStatusKHR ) + result += "WithStatusKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using BufferCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( BufferCreateFlagBits::eSparseBinding ) | VkFlags( BufferCreateFlagBits::eSparseResidency ) | + VkFlags( BufferCreateFlagBits::eSparseAliased ) | VkFlags( BufferCreateFlagBits::eProtected ) | + VkFlags( BufferCreateFlagBits::eDeviceAddressCaptureReplay ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator|( BufferCreateFlagBits bit0, + BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return BufferCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator&(BufferCreateFlagBits bit0, + BufferCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return BufferCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator^( BufferCreateFlagBits bit0, + BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return BufferCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator~( BufferCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( BufferCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( BufferCreateFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & BufferCreateFlagBits::eSparseBinding ) + result += "SparseBinding | "; + if ( value & BufferCreateFlagBits::eSparseResidency ) + result += "SparseResidency | "; + if ( value & BufferCreateFlagBits::eSparseAliased ) + result += "SparseAliased | "; + if ( value & BufferCreateFlagBits::eProtected ) + result += "Protected | "; + if ( value & BufferCreateFlagBits::eDeviceAddressCaptureReplay ) + result += "DeviceAddressCaptureReplay | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using BufferUsageFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = + VkFlags( BufferUsageFlagBits::eTransferSrc ) | VkFlags( BufferUsageFlagBits::eTransferDst ) | + VkFlags( BufferUsageFlagBits::eUniformTexelBuffer ) | VkFlags( BufferUsageFlagBits::eStorageTexelBuffer ) | + VkFlags( BufferUsageFlagBits::eUniformBuffer ) | VkFlags( BufferUsageFlagBits::eStorageBuffer ) | + VkFlags( BufferUsageFlagBits::eIndexBuffer ) | VkFlags( BufferUsageFlagBits::eVertexBuffer ) | + VkFlags( BufferUsageFlagBits::eIndirectBuffer ) | VkFlags( BufferUsageFlagBits::eShaderDeviceAddress ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | VkFlags( BufferUsageFlagBits::eVideoDecodeSrcKHR ) | VkFlags( BufferUsageFlagBits::eVideoDecodeDstKHR ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | VkFlags( BufferUsageFlagBits::eTransformFeedbackBufferEXT ) | + VkFlags( BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT ) | + VkFlags( BufferUsageFlagBits::eConditionalRenderingEXT ) | + VkFlags( BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR ) | + VkFlags( BufferUsageFlagBits::eAccelerationStructureStorageKHR ) | + VkFlags( BufferUsageFlagBits::eShaderBindingTableKHR ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | VkFlags( BufferUsageFlagBits::eVideoEncodeDstKHR ) | VkFlags( BufferUsageFlagBits::eVideoEncodeSrcKHR ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator|( BufferUsageFlagBits bit0, + BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return BufferUsageFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator&(BufferUsageFlagBits bit0, + BufferUsageFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return BufferUsageFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator^( BufferUsageFlagBits bit0, + BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return BufferUsageFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator~( BufferUsageFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( BufferUsageFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( BufferUsageFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & BufferUsageFlagBits::eTransferSrc ) + result += "TransferSrc | "; + if ( value & BufferUsageFlagBits::eTransferDst ) + result += "TransferDst | "; + if ( value & BufferUsageFlagBits::eUniformTexelBuffer ) + result += "UniformTexelBuffer | "; + if ( value & BufferUsageFlagBits::eStorageTexelBuffer ) + result += "StorageTexelBuffer | "; + if ( value & BufferUsageFlagBits::eUniformBuffer ) + result += "UniformBuffer | "; + if ( value & BufferUsageFlagBits::eStorageBuffer ) + result += "StorageBuffer | "; + if ( value & BufferUsageFlagBits::eIndexBuffer ) + result += "IndexBuffer | "; + if ( value & BufferUsageFlagBits::eVertexBuffer ) + result += "VertexBuffer | "; + if ( value & BufferUsageFlagBits::eIndirectBuffer ) + result += "IndirectBuffer | "; + if ( value & BufferUsageFlagBits::eShaderDeviceAddress ) + result += "ShaderDeviceAddress | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & BufferUsageFlagBits::eVideoDecodeSrcKHR ) + result += "VideoDecodeSrcKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & BufferUsageFlagBits::eVideoDecodeDstKHR ) + result += "VideoDecodeDstKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & BufferUsageFlagBits::eTransformFeedbackBufferEXT ) + result += "TransformFeedbackBufferEXT | "; + if ( value & BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT ) + result += "TransformFeedbackCounterBufferEXT | "; + if ( value & BufferUsageFlagBits::eConditionalRenderingEXT ) + result += "ConditionalRenderingEXT | "; + if ( value & BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR ) + result += "AccelerationStructureBuildInputReadOnlyKHR | "; + if ( value & BufferUsageFlagBits::eAccelerationStructureStorageKHR ) + result += "AccelerationStructureStorageKHR | "; + if ( value & BufferUsageFlagBits::eShaderBindingTableKHR ) + result += "ShaderBindingTableKHR | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & BufferUsageFlagBits::eVideoEncodeDstKHR ) + result += "VideoEncodeDstKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & BufferUsageFlagBits::eVideoEncodeSrcKHR ) + result += "VideoEncodeSrcKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + enum class BufferViewCreateFlagBits : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlagBits ) + { + return "(void)"; + } + + using BufferViewCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlags ) + { + return "{}"; + } + + using ImageViewCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT ) | + VkFlags( ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags + operator|( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageViewCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator&(ImageViewCreateFlagBits bit0, + ImageViewCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return ImageViewCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags + operator^( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageViewCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator~( ImageViewCreateFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( ImageViewCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT ) + result += "FragmentDensityMapDynamicEXT | "; + if ( value & ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT ) + result += "FragmentDensityMapDeferredEXT | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using ShaderModuleCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlags ) + { + return "{}"; + } + + using PipelineCacheCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( PipelineCacheCreateFlagBits::eExternallySynchronizedEXT ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags + operator|( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineCacheCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags + operator&(PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return PipelineCacheCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags + operator^( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineCacheCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator~( PipelineCacheCreateFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( PipelineCacheCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & PipelineCacheCreateFlagBits::eExternallySynchronizedEXT ) + result += "ExternallySynchronizedEXT | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using ColorComponentFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ColorComponentFlagBits::eR ) | VkFlags( ColorComponentFlagBits::eG ) | + VkFlags( ColorComponentFlagBits::eB ) | VkFlags( ColorComponentFlagBits::eA ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags + operator|( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ColorComponentFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator&(ColorComponentFlagBits bit0, + ColorComponentFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return ColorComponentFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags + operator^( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ColorComponentFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator~( ColorComponentFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( ColorComponentFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ColorComponentFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & ColorComponentFlagBits::eR ) + result += "R | "; + if ( value & ColorComponentFlagBits::eG ) + result += "G | "; + if ( value & ColorComponentFlagBits::eB ) + result += "B | "; + if ( value & ColorComponentFlagBits::eA ) + result += "A | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using CullModeFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( CullModeFlagBits::eNone ) | VkFlags( CullModeFlagBits::eFront ) | + VkFlags( CullModeFlagBits::eBack ) | VkFlags( CullModeFlagBits::eFrontAndBack ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator|( CullModeFlagBits bit0, + CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CullModeFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator&(CullModeFlagBits bit0, + CullModeFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return CullModeFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator^( CullModeFlagBits bit0, + CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CullModeFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator~( CullModeFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( CullModeFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( CullModeFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & CullModeFlagBits::eFront ) + result += "Front | "; + if ( value & CullModeFlagBits::eBack ) + result += "Back | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + enum class PipelineColorBlendStateCreateFlagBits : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlagBits ) + { + return "(void)"; + } + + using PipelineColorBlendStateCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlags ) + { + return "{}"; + } + + using PipelineCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = + VkFlags( PipelineCreateFlagBits::eDisableOptimization ) | VkFlags( PipelineCreateFlagBits::eAllowDerivatives ) | + VkFlags( PipelineCreateFlagBits::eDerivative ) | VkFlags( PipelineCreateFlagBits::eViewIndexFromDeviceIndex ) | + VkFlags( PipelineCreateFlagBits::eDispatchBase ) | + VkFlags( PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR ) | + VkFlags( PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR ) | + VkFlags( PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR ) | + VkFlags( PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR ) | + VkFlags( PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR ) | + VkFlags( PipelineCreateFlagBits::eRayTracingSkipAabbsKHR ) | + VkFlags( PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR ) | + VkFlags( PipelineCreateFlagBits::eDeferCompileNV ) | VkFlags( PipelineCreateFlagBits::eCaptureStatisticsKHR ) | + VkFlags( PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR ) | + VkFlags( PipelineCreateFlagBits::eIndirectBindableNV ) | VkFlags( PipelineCreateFlagBits::eLibraryKHR ) | + VkFlags( PipelineCreateFlagBits::eFailOnPipelineCompileRequiredEXT ) | + VkFlags( PipelineCreateFlagBits::eEarlyReturnOnFailureEXT ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags + operator|( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator&(PipelineCreateFlagBits bit0, + PipelineCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return PipelineCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags + operator^( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator~( PipelineCreateFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( PipelineCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & PipelineCreateFlagBits::eDisableOptimization ) + result += "DisableOptimization | "; + if ( value & PipelineCreateFlagBits::eAllowDerivatives ) + result += "AllowDerivatives | "; + if ( value & PipelineCreateFlagBits::eDerivative ) + result += "Derivative | "; + if ( value & PipelineCreateFlagBits::eViewIndexFromDeviceIndex ) + result += "ViewIndexFromDeviceIndex | "; + if ( value & PipelineCreateFlagBits::eDispatchBase ) + result += "DispatchBase | "; + if ( value & PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR ) + result += "RayTracingNoNullAnyHitShadersKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR ) + result += "RayTracingNoNullClosestHitShadersKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR ) + result += "RayTracingNoNullMissShadersKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR ) + result += "RayTracingNoNullIntersectionShadersKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR ) + result += "RayTracingSkipTrianglesKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingSkipAabbsKHR ) + result += "RayTracingSkipAabbsKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR ) + result += "RayTracingShaderGroupHandleCaptureReplayKHR | "; + if ( value & PipelineCreateFlagBits::eDeferCompileNV ) + result += "DeferCompileNV | "; + if ( value & PipelineCreateFlagBits::eCaptureStatisticsKHR ) + result += "CaptureStatisticsKHR | "; + if ( value & PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR ) + result += "CaptureInternalRepresentationsKHR | "; + if ( value & PipelineCreateFlagBits::eIndirectBindableNV ) + result += "IndirectBindableNV | "; + if ( value & PipelineCreateFlagBits::eLibraryKHR ) + result += "LibraryKHR | "; + if ( value & PipelineCreateFlagBits::eFailOnPipelineCompileRequiredEXT ) + result += "FailOnPipelineCompileRequiredEXT | "; + if ( value & PipelineCreateFlagBits::eEarlyReturnOnFailureEXT ) + result += "EarlyReturnOnFailureEXT | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + enum class PipelineDepthStencilStateCreateFlagBits : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlagBits ) + { + return "(void)"; + } + + using PipelineDepthStencilStateCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlags ) + { + return "{}"; + } + + enum class PipelineDynamicStateCreateFlagBits : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlagBits ) + { + return "(void)"; + } + + using PipelineDynamicStateCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlags ) + { + return "{}"; + } + + enum class PipelineInputAssemblyStateCreateFlagBits : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlagBits ) + { + return "(void)"; + } + + using PipelineInputAssemblyStateCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlags ) + { + return "{}"; + } + + enum class PipelineLayoutCreateFlagBits : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlagBits ) + { + return "(void)"; + } + + using PipelineLayoutCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlags ) + { + return "{}"; + } + + enum class PipelineMultisampleStateCreateFlagBits : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlagBits ) + { + return "(void)"; + } + + using PipelineMultisampleStateCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlags ) + { + return "{}"; + } + + enum class PipelineRasterizationStateCreateFlagBits : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlagBits ) + { + return "(void)"; + } + + using PipelineRasterizationStateCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlags ) + { + return "{}"; + } + + using PipelineShaderStageCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT ) | + VkFlags( PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags + operator|( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineShaderStageCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags + operator&(PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return PipelineShaderStageCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags + operator^( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineShaderStageCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags + operator~( PipelineShaderStageCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( PipelineShaderStageCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT ) + result += "AllowVaryingSubgroupSizeEXT | "; + if ( value & PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT ) + result += "RequireFullSubgroupsEXT | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + enum class PipelineTessellationStateCreateFlagBits : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlagBits ) + { + return "(void)"; + } + + using PipelineTessellationStateCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlags ) + { + return "{}"; + } + + enum class PipelineVertexInputStateCreateFlagBits : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlagBits ) + { + return "(void)"; + } + + using PipelineVertexInputStateCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlags ) + { + return "{}"; + } + + enum class PipelineViewportStateCreateFlagBits : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlagBits ) + { + return "(void)"; + } + + using PipelineViewportStateCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlags ) + { + return "{}"; + } + + using ShaderStageFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ShaderStageFlagBits::eVertex ) | VkFlags( ShaderStageFlagBits::eTessellationControl ) | + VkFlags( ShaderStageFlagBits::eTessellationEvaluation ) | VkFlags( ShaderStageFlagBits::eGeometry ) | + VkFlags( ShaderStageFlagBits::eFragment ) | VkFlags( ShaderStageFlagBits::eCompute ) | + VkFlags( ShaderStageFlagBits::eAllGraphics ) | VkFlags( ShaderStageFlagBits::eAll ) | + VkFlags( ShaderStageFlagBits::eRaygenKHR ) | VkFlags( ShaderStageFlagBits::eAnyHitKHR ) | + VkFlags( ShaderStageFlagBits::eClosestHitKHR ) | VkFlags( ShaderStageFlagBits::eMissKHR ) | + VkFlags( ShaderStageFlagBits::eIntersectionKHR ) | VkFlags( ShaderStageFlagBits::eCallableKHR ) | + VkFlags( ShaderStageFlagBits::eTaskNV ) | VkFlags( ShaderStageFlagBits::eMeshNV ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator|( ShaderStageFlagBits bit0, + ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ShaderStageFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator&(ShaderStageFlagBits bit0, + ShaderStageFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return ShaderStageFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator^( ShaderStageFlagBits bit0, + ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ShaderStageFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator~( ShaderStageFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ShaderStageFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ShaderStageFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & ShaderStageFlagBits::eVertex ) + result += "Vertex | "; + if ( value & ShaderStageFlagBits::eTessellationControl ) + result += "TessellationControl | "; + if ( value & ShaderStageFlagBits::eTessellationEvaluation ) + result += "TessellationEvaluation | "; + if ( value & ShaderStageFlagBits::eGeometry ) + result += "Geometry | "; + if ( value & ShaderStageFlagBits::eFragment ) + result += "Fragment | "; + if ( value & ShaderStageFlagBits::eCompute ) + result += "Compute | "; + if ( value & ShaderStageFlagBits::eRaygenKHR ) + result += "RaygenKHR | "; + if ( value & ShaderStageFlagBits::eAnyHitKHR ) + result += "AnyHitKHR | "; + if ( value & ShaderStageFlagBits::eClosestHitKHR ) + result += "ClosestHitKHR | "; + if ( value & ShaderStageFlagBits::eMissKHR ) + result += "MissKHR | "; + if ( value & ShaderStageFlagBits::eIntersectionKHR ) + result += "IntersectionKHR | "; + if ( value & ShaderStageFlagBits::eCallableKHR ) + result += "CallableKHR | "; + if ( value & ShaderStageFlagBits::eTaskNV ) + result += "TaskNV | "; + if ( value & ShaderStageFlagBits::eMeshNV ) + result += "MeshNV | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using SamplerCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( SamplerCreateFlagBits::eSubsampledEXT ) | + VkFlags( SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator|( SamplerCreateFlagBits bit0, + SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SamplerCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator&(SamplerCreateFlagBits bit0, + SamplerCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return SamplerCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator^( SamplerCreateFlagBits bit0, + SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SamplerCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator~( SamplerCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( SamplerCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( SamplerCreateFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & SamplerCreateFlagBits::eSubsampledEXT ) + result += "SubsampledEXT | "; + if ( value & SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT ) + result += "SubsampledCoarseReconstructionEXT | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using DescriptorPoolCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( DescriptorPoolCreateFlagBits::eFreeDescriptorSet ) | + VkFlags( DescriptorPoolCreateFlagBits::eUpdateAfterBind ) | + VkFlags( DescriptorPoolCreateFlagBits::eHostOnlyVALVE ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags + operator|( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DescriptorPoolCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags + operator&(DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return DescriptorPoolCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags + operator^( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DescriptorPoolCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags operator~( DescriptorPoolCreateFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( DescriptorPoolCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DescriptorPoolCreateFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & DescriptorPoolCreateFlagBits::eFreeDescriptorSet ) + result += "FreeDescriptorSet | "; + if ( value & DescriptorPoolCreateFlagBits::eUpdateAfterBind ) + result += "UpdateAfterBind | "; + if ( value & DescriptorPoolCreateFlagBits::eHostOnlyVALVE ) + result += "HostOnlyVALVE | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + enum class DescriptorPoolResetFlagBits : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( DescriptorPoolResetFlagBits ) + { + return "(void)"; + } + + using DescriptorPoolResetFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( DescriptorPoolResetFlags ) + { + return "{}"; + } + + using DescriptorSetLayoutCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool ) | + VkFlags( DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR ) | + VkFlags( DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolVALVE ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags + operator|( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DescriptorSetLayoutCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags + operator&(DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return DescriptorSetLayoutCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags + operator^( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DescriptorSetLayoutCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags + operator~( DescriptorSetLayoutCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( DescriptorSetLayoutCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool ) + result += "UpdateAfterBindPool | "; + if ( value & DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR ) + result += "PushDescriptorKHR | "; + if ( value & DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolVALVE ) + result += "HostOnlyPoolVALVE | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using AccessFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = + VkFlags( AccessFlagBits::eIndirectCommandRead ) | VkFlags( AccessFlagBits::eIndexRead ) | + VkFlags( AccessFlagBits::eVertexAttributeRead ) | VkFlags( AccessFlagBits::eUniformRead ) | + VkFlags( AccessFlagBits::eInputAttachmentRead ) | VkFlags( AccessFlagBits::eShaderRead ) | + VkFlags( AccessFlagBits::eShaderWrite ) | VkFlags( AccessFlagBits::eColorAttachmentRead ) | + VkFlags( AccessFlagBits::eColorAttachmentWrite ) | VkFlags( AccessFlagBits::eDepthStencilAttachmentRead ) | + VkFlags( AccessFlagBits::eDepthStencilAttachmentWrite ) | VkFlags( AccessFlagBits::eTransferRead ) | + VkFlags( AccessFlagBits::eTransferWrite ) | VkFlags( AccessFlagBits::eHostRead ) | + VkFlags( AccessFlagBits::eHostWrite ) | VkFlags( AccessFlagBits::eMemoryRead ) | + VkFlags( AccessFlagBits::eMemoryWrite ) | VkFlags( AccessFlagBits::eTransformFeedbackWriteEXT ) | + VkFlags( AccessFlagBits::eTransformFeedbackCounterReadEXT ) | + VkFlags( AccessFlagBits::eTransformFeedbackCounterWriteEXT ) | + VkFlags( AccessFlagBits::eConditionalRenderingReadEXT ) | + VkFlags( AccessFlagBits::eColorAttachmentReadNoncoherentEXT ) | + VkFlags( AccessFlagBits::eAccelerationStructureReadKHR ) | + VkFlags( AccessFlagBits::eAccelerationStructureWriteKHR ) | VkFlags( AccessFlagBits::eShadingRateImageReadNV ) | + VkFlags( AccessFlagBits::eFragmentDensityMapReadEXT ) | VkFlags( AccessFlagBits::eCommandPreprocessReadNV ) | + VkFlags( AccessFlagBits::eCommandPreprocessWriteNV ) | VkFlags( AccessFlagBits::eNoneKHR ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator|( AccessFlagBits bit0, + AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return AccessFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator&(AccessFlagBits bit0, + AccessFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return AccessFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator^( AccessFlagBits bit0, + AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return AccessFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator~( AccessFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( AccessFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( AccessFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & AccessFlagBits::eIndirectCommandRead ) + result += "IndirectCommandRead | "; + if ( value & AccessFlagBits::eIndexRead ) + result += "IndexRead | "; + if ( value & AccessFlagBits::eVertexAttributeRead ) + result += "VertexAttributeRead | "; + if ( value & AccessFlagBits::eUniformRead ) + result += "UniformRead | "; + if ( value & AccessFlagBits::eInputAttachmentRead ) + result += "InputAttachmentRead | "; + if ( value & AccessFlagBits::eShaderRead ) + result += "ShaderRead | "; + if ( value & AccessFlagBits::eShaderWrite ) + result += "ShaderWrite | "; + if ( value & AccessFlagBits::eColorAttachmentRead ) + result += "ColorAttachmentRead | "; + if ( value & AccessFlagBits::eColorAttachmentWrite ) + result += "ColorAttachmentWrite | "; + if ( value & AccessFlagBits::eDepthStencilAttachmentRead ) + result += "DepthStencilAttachmentRead | "; + if ( value & AccessFlagBits::eDepthStencilAttachmentWrite ) + result += "DepthStencilAttachmentWrite | "; + if ( value & AccessFlagBits::eTransferRead ) + result += "TransferRead | "; + if ( value & AccessFlagBits::eTransferWrite ) + result += "TransferWrite | "; + if ( value & AccessFlagBits::eHostRead ) + result += "HostRead | "; + if ( value & AccessFlagBits::eHostWrite ) + result += "HostWrite | "; + if ( value & AccessFlagBits::eMemoryRead ) + result += "MemoryRead | "; + if ( value & AccessFlagBits::eMemoryWrite ) + result += "MemoryWrite | "; + if ( value & AccessFlagBits::eTransformFeedbackWriteEXT ) + result += "TransformFeedbackWriteEXT | "; + if ( value & AccessFlagBits::eTransformFeedbackCounterReadEXT ) + result += "TransformFeedbackCounterReadEXT | "; + if ( value & AccessFlagBits::eTransformFeedbackCounterWriteEXT ) + result += "TransformFeedbackCounterWriteEXT | "; + if ( value & AccessFlagBits::eConditionalRenderingReadEXT ) + result += "ConditionalRenderingReadEXT | "; + if ( value & AccessFlagBits::eColorAttachmentReadNoncoherentEXT ) + result += "ColorAttachmentReadNoncoherentEXT | "; + if ( value & AccessFlagBits::eAccelerationStructureReadKHR ) + result += "AccelerationStructureReadKHR | "; + if ( value & AccessFlagBits::eAccelerationStructureWriteKHR ) + result += "AccelerationStructureWriteKHR | "; + if ( value & AccessFlagBits::eShadingRateImageReadNV ) + result += "ShadingRateImageReadNV | "; + if ( value & AccessFlagBits::eFragmentDensityMapReadEXT ) + result += "FragmentDensityMapReadEXT | "; + if ( value & AccessFlagBits::eCommandPreprocessReadNV ) + result += "CommandPreprocessReadNV | "; + if ( value & AccessFlagBits::eCommandPreprocessWriteNV ) + result += "CommandPreprocessWriteNV | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using AttachmentDescriptionFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( AttachmentDescriptionFlagBits::eMayAlias ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags + operator|( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return AttachmentDescriptionFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags + operator&(AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return AttachmentDescriptionFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags + operator^( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return AttachmentDescriptionFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags operator~( AttachmentDescriptionFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( AttachmentDescriptionFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( AttachmentDescriptionFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & AttachmentDescriptionFlagBits::eMayAlias ) + result += "MayAlias | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using DependencyFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( DependencyFlagBits::eByRegion ) | VkFlags( DependencyFlagBits::eDeviceGroup ) | + VkFlags( DependencyFlagBits::eViewLocal ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator|( DependencyFlagBits bit0, + DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DependencyFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator&(DependencyFlagBits bit0, + DependencyFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return DependencyFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator^( DependencyFlagBits bit0, + DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DependencyFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator~( DependencyFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( DependencyFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DependencyFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & DependencyFlagBits::eByRegion ) + result += "ByRegion | "; + if ( value & DependencyFlagBits::eDeviceGroup ) + result += "DeviceGroup | "; + if ( value & DependencyFlagBits::eViewLocal ) + result += "ViewLocal | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using FramebufferCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( FramebufferCreateFlagBits::eImageless ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags + operator|( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return FramebufferCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags + operator&(FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return FramebufferCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags + operator^( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return FramebufferCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags operator~( FramebufferCreateFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( FramebufferCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( FramebufferCreateFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & FramebufferCreateFlagBits::eImageless ) + result += "Imageless | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using RenderPassCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( RenderPassCreateFlagBits::eTransformQCOM ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags + operator|( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return RenderPassCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags + operator&(RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return RenderPassCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags + operator^( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return RenderPassCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags operator~( RenderPassCreateFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( RenderPassCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & RenderPassCreateFlagBits::eTransformQCOM ) + result += "TransformQCOM | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using SubpassDescriptionFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( SubpassDescriptionFlagBits::ePerViewAttributesNVX ) | + VkFlags( SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX ) | + VkFlags( SubpassDescriptionFlagBits::eFragmentRegionQCOM ) | + VkFlags( SubpassDescriptionFlagBits::eShaderResolveQCOM ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags + operator|( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SubpassDescriptionFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags + operator&(SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return SubpassDescriptionFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags + operator^( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SubpassDescriptionFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags operator~( SubpassDescriptionFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( SubpassDescriptionFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & SubpassDescriptionFlagBits::ePerViewAttributesNVX ) + result += "PerViewAttributesNVX | "; + if ( value & SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX ) + result += "PerViewPositionXOnlyNVX | "; + if ( value & SubpassDescriptionFlagBits::eFragmentRegionQCOM ) + result += "FragmentRegionQCOM | "; + if ( value & SubpassDescriptionFlagBits::eShaderResolveQCOM ) + result += "ShaderResolveQCOM | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using CommandPoolCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( CommandPoolCreateFlagBits::eTransient ) | + VkFlags( CommandPoolCreateFlagBits::eResetCommandBuffer ) | + VkFlags( CommandPoolCreateFlagBits::eProtected ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags + operator|( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CommandPoolCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags + operator&(CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return CommandPoolCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags + operator^( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CommandPoolCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags operator~( CommandPoolCreateFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( CommandPoolCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & CommandPoolCreateFlagBits::eTransient ) + result += "Transient | "; + if ( value & CommandPoolCreateFlagBits::eResetCommandBuffer ) + result += "ResetCommandBuffer | "; + if ( value & CommandPoolCreateFlagBits::eProtected ) + result += "Protected | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using CommandPoolResetFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( CommandPoolResetFlagBits::eReleaseResources ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags + operator|( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CommandPoolResetFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags + operator&(CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return CommandPoolResetFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags + operator^( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CommandPoolResetFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags operator~( CommandPoolResetFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( CommandPoolResetFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( CommandPoolResetFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & CommandPoolResetFlagBits::eReleaseResources ) + result += "ReleaseResources | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using CommandBufferResetFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( CommandBufferResetFlagBits::eReleaseResources ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags + operator|( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CommandBufferResetFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags + operator&(CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return CommandBufferResetFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags + operator^( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CommandBufferResetFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags operator~( CommandBufferResetFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( CommandBufferResetFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( CommandBufferResetFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & CommandBufferResetFlagBits::eReleaseResources ) + result += "ReleaseResources | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using CommandBufferUsageFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( CommandBufferUsageFlagBits::eOneTimeSubmit ) | + VkFlags( CommandBufferUsageFlagBits::eRenderPassContinue ) | + VkFlags( CommandBufferUsageFlagBits::eSimultaneousUse ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags + operator|( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CommandBufferUsageFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags + operator&(CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return CommandBufferUsageFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags + operator^( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CommandBufferUsageFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags operator~( CommandBufferUsageFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( CommandBufferUsageFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & CommandBufferUsageFlagBits::eOneTimeSubmit ) + result += "OneTimeSubmit | "; + if ( value & CommandBufferUsageFlagBits::eRenderPassContinue ) + result += "RenderPassContinue | "; + if ( value & CommandBufferUsageFlagBits::eSimultaneousUse ) + result += "SimultaneousUse | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using QueryControlFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( QueryControlFlagBits::ePrecise ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator|( QueryControlFlagBits bit0, + QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueryControlFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator&(QueryControlFlagBits bit0, + QueryControlFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return QueryControlFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator^( QueryControlFlagBits bit0, + QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueryControlFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator~( QueryControlFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( QueryControlFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( QueryControlFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & QueryControlFlagBits::ePrecise ) + result += "Precise | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using StencilFaceFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( StencilFaceFlagBits::eFront ) | VkFlags( StencilFaceFlagBits::eBack ) | + VkFlags( StencilFaceFlagBits::eFrontAndBack ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator|( StencilFaceFlagBits bit0, + StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return StencilFaceFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator&(StencilFaceFlagBits bit0, + StencilFaceFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return StencilFaceFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator^( StencilFaceFlagBits bit0, + StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return StencilFaceFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator~( StencilFaceFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( StencilFaceFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( StencilFaceFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & StencilFaceFlagBits::eFront ) + result += "Front | "; + if ( value & StencilFaceFlagBits::eBack ) + result += "Back | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_VERSION_1_1 === + + using SubgroupFeatureFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( SubgroupFeatureFlagBits::eBasic ) | VkFlags( SubgroupFeatureFlagBits::eVote ) | + VkFlags( SubgroupFeatureFlagBits::eArithmetic ) | VkFlags( SubgroupFeatureFlagBits::eBallot ) | + VkFlags( SubgroupFeatureFlagBits::eShuffle ) | VkFlags( SubgroupFeatureFlagBits::eShuffleRelative ) | + VkFlags( SubgroupFeatureFlagBits::eClustered ) | VkFlags( SubgroupFeatureFlagBits::eQuad ) | + VkFlags( SubgroupFeatureFlagBits::ePartitionedNV ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags + operator|( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SubgroupFeatureFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator&(SubgroupFeatureFlagBits bit0, + SubgroupFeatureFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return SubgroupFeatureFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags + operator^( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SubgroupFeatureFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator~( SubgroupFeatureFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( SubgroupFeatureFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & SubgroupFeatureFlagBits::eBasic ) + result += "Basic | "; + if ( value & SubgroupFeatureFlagBits::eVote ) + result += "Vote | "; + if ( value & SubgroupFeatureFlagBits::eArithmetic ) + result += "Arithmetic | "; + if ( value & SubgroupFeatureFlagBits::eBallot ) + result += "Ballot | "; + if ( value & SubgroupFeatureFlagBits::eShuffle ) + result += "Shuffle | "; + if ( value & SubgroupFeatureFlagBits::eShuffleRelative ) + result += "ShuffleRelative | "; + if ( value & SubgroupFeatureFlagBits::eClustered ) + result += "Clustered | "; + if ( value & SubgroupFeatureFlagBits::eQuad ) + result += "Quad | "; + if ( value & SubgroupFeatureFlagBits::ePartitionedNV ) + result += "PartitionedNV | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using PeerMemoryFeatureFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( PeerMemoryFeatureFlagBits::eCopySrc ) | VkFlags( PeerMemoryFeatureFlagBits::eCopyDst ) | + VkFlags( PeerMemoryFeatureFlagBits::eGenericSrc ) | VkFlags( PeerMemoryFeatureFlagBits::eGenericDst ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags + operator|( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PeerMemoryFeatureFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags + operator&(PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return PeerMemoryFeatureFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags + operator^( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PeerMemoryFeatureFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags operator~( PeerMemoryFeatureFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( PeerMemoryFeatureFlags( bits ) ); + } + + using PeerMemoryFeatureFlagsKHR = PeerMemoryFeatureFlags; + + VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & PeerMemoryFeatureFlagBits::eCopySrc ) + result += "CopySrc | "; + if ( value & PeerMemoryFeatureFlagBits::eCopyDst ) + result += "CopyDst | "; + if ( value & PeerMemoryFeatureFlagBits::eGenericSrc ) + result += "GenericSrc | "; + if ( value & PeerMemoryFeatureFlagBits::eGenericDst ) + result += "GenericDst | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using MemoryAllocateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( MemoryAllocateFlagBits::eDeviceMask ) | VkFlags( MemoryAllocateFlagBits::eDeviceAddress ) | + VkFlags( MemoryAllocateFlagBits::eDeviceAddressCaptureReplay ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags + operator|( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return MemoryAllocateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator&(MemoryAllocateFlagBits bit0, + MemoryAllocateFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return MemoryAllocateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags + operator^( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return MemoryAllocateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator~( MemoryAllocateFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( MemoryAllocateFlags( bits ) ); + } + + using MemoryAllocateFlagsKHR = MemoryAllocateFlags; + + VULKAN_HPP_INLINE std::string to_string( MemoryAllocateFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & MemoryAllocateFlagBits::eDeviceMask ) + result += "DeviceMask | "; + if ( value & MemoryAllocateFlagBits::eDeviceAddress ) + result += "DeviceAddress | "; + if ( value & MemoryAllocateFlagBits::eDeviceAddressCaptureReplay ) + result += "DeviceAddressCaptureReplay | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + enum class CommandPoolTrimFlagBits : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( CommandPoolTrimFlagBits ) + { + return "(void)"; + } + + using CommandPoolTrimFlags = Flags; + + using CommandPoolTrimFlagsKHR = CommandPoolTrimFlags; + + VULKAN_HPP_INLINE std::string to_string( CommandPoolTrimFlags ) + { + return "{}"; + } + + enum class DescriptorUpdateTemplateCreateFlagBits : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateCreateFlagBits ) + { + return "(void)"; + } + + using DescriptorUpdateTemplateCreateFlags = Flags; + + using DescriptorUpdateTemplateCreateFlagsKHR = DescriptorUpdateTemplateCreateFlags; + + VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateCreateFlags ) + { + return "{}"; + } + + using ExternalMemoryHandleTypeFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) | + VkFlags( ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 ) | + VkFlags( ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt ) | + VkFlags( ExternalMemoryHandleTypeFlagBits::eD3D11Texture ) | + VkFlags( ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt ) | + VkFlags( ExternalMemoryHandleTypeFlagBits::eD3D12Heap ) | + VkFlags( ExternalMemoryHandleTypeFlagBits::eD3D12Resource ) | + VkFlags( ExternalMemoryHandleTypeFlagBits::eDmaBufEXT ) +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + | VkFlags( ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID ) +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + | VkFlags( ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT ) | + VkFlags( ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT ) +#if defined( VK_USE_PLATFORM_FUCHSIA ) + | VkFlags( ExternalMemoryHandleTypeFlagBits::eZirconVmoFUCHSIA ) +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags + operator|( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryHandleTypeFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags + operator&(ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryHandleTypeFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags + operator^( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryHandleTypeFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags + operator~( ExternalMemoryHandleTypeFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ExternalMemoryHandleTypeFlags( bits ) ); + } + + using ExternalMemoryHandleTypeFlagsKHR = ExternalMemoryHandleTypeFlags; + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) + result += "OpaqueFd | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 ) + result += "OpaqueWin32 | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt ) + result += "OpaqueWin32Kmt | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eD3D11Texture ) + result += "D3D11Texture | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt ) + result += "D3D11TextureKmt | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eD3D12Heap ) + result += "D3D12Heap | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eD3D12Resource ) + result += "D3D12Resource | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eDmaBufEXT ) + result += "DmaBufEXT | "; +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + if ( value & ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID ) + result += "AndroidHardwareBufferANDROID | "; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + if ( value & ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT ) + result += "HostAllocationEXT | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT ) + result += "HostMappedForeignMemoryEXT | "; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + if ( value & ExternalMemoryHandleTypeFlagBits::eZirconVmoFUCHSIA ) + result += "ZirconVmoFUCHSIA | "; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using ExternalMemoryFeatureFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ExternalMemoryFeatureFlagBits::eDedicatedOnly ) | + VkFlags( ExternalMemoryFeatureFlagBits::eExportable ) | + VkFlags( ExternalMemoryFeatureFlagBits::eImportable ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags + operator|( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryFeatureFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags + operator&(ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryFeatureFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags + operator^( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryFeatureFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags operator~( ExternalMemoryFeatureFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( ExternalMemoryFeatureFlags( bits ) ); + } + + using ExternalMemoryFeatureFlagsKHR = ExternalMemoryFeatureFlags; + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & ExternalMemoryFeatureFlagBits::eDedicatedOnly ) + result += "DedicatedOnly | "; + if ( value & ExternalMemoryFeatureFlagBits::eExportable ) + result += "Exportable | "; + if ( value & ExternalMemoryFeatureFlagBits::eImportable ) + result += "Importable | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using ExternalFenceHandleTypeFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ExternalFenceHandleTypeFlagBits::eOpaqueFd ) | + VkFlags( ExternalFenceHandleTypeFlagBits::eOpaqueWin32 ) | + VkFlags( ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt ) | + VkFlags( ExternalFenceHandleTypeFlagBits::eSyncFd ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags + operator|( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalFenceHandleTypeFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags + operator&(ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return ExternalFenceHandleTypeFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags + operator^( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalFenceHandleTypeFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags operator~( ExternalFenceHandleTypeFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( ExternalFenceHandleTypeFlags( bits ) ); + } + + using ExternalFenceHandleTypeFlagsKHR = ExternalFenceHandleTypeFlags; + + VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueFd ) + result += "OpaqueFd | "; + if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32 ) + result += "OpaqueWin32 | "; + if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt ) + result += "OpaqueWin32Kmt | "; + if ( value & ExternalFenceHandleTypeFlagBits::eSyncFd ) + result += "SyncFd | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using ExternalFenceFeatureFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = + VkFlags( ExternalFenceFeatureFlagBits::eExportable ) | VkFlags( ExternalFenceFeatureFlagBits::eImportable ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags + operator|( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalFenceFeatureFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags + operator&(ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return ExternalFenceFeatureFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags + operator^( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalFenceFeatureFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags operator~( ExternalFenceFeatureFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( ExternalFenceFeatureFlags( bits ) ); + } + + using ExternalFenceFeatureFlagsKHR = ExternalFenceFeatureFlags; + + VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & ExternalFenceFeatureFlagBits::eExportable ) + result += "Exportable | "; + if ( value & ExternalFenceFeatureFlagBits::eImportable ) + result += "Importable | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using FenceImportFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( FenceImportFlagBits::eTemporary ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator|( FenceImportFlagBits bit0, + FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return FenceImportFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator&(FenceImportFlagBits bit0, + FenceImportFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return FenceImportFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator^( FenceImportFlagBits bit0, + FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return FenceImportFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator~( FenceImportFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( FenceImportFlags( bits ) ); + } + + using FenceImportFlagsKHR = FenceImportFlags; + + VULKAN_HPP_INLINE std::string to_string( FenceImportFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & FenceImportFlagBits::eTemporary ) + result += "Temporary | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using SemaphoreImportFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( SemaphoreImportFlagBits::eTemporary ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags + operator|( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SemaphoreImportFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator&(SemaphoreImportFlagBits bit0, + SemaphoreImportFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return SemaphoreImportFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags + operator^( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SemaphoreImportFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator~( SemaphoreImportFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( SemaphoreImportFlags( bits ) ); + } + + using SemaphoreImportFlagsKHR = SemaphoreImportFlags; + + VULKAN_HPP_INLINE std::string to_string( SemaphoreImportFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & SemaphoreImportFlagBits::eTemporary ) + result += "Temporary | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using ExternalSemaphoreHandleTypeFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) | + VkFlags( ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 ) | + VkFlags( ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt ) | + VkFlags( ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence ) | + VkFlags( ExternalSemaphoreHandleTypeFlagBits::eSyncFd ) +#if defined( VK_USE_PLATFORM_FUCHSIA ) + | VkFlags( ExternalSemaphoreHandleTypeFlagBits::eZirconEventFUCHSIA ) +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags + operator|( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalSemaphoreHandleTypeFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags + operator&(ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return ExternalSemaphoreHandleTypeFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags + operator^( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalSemaphoreHandleTypeFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags + operator~( ExternalSemaphoreHandleTypeFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ExternalSemaphoreHandleTypeFlags( bits ) ); + } + + using ExternalSemaphoreHandleTypeFlagsKHR = ExternalSemaphoreHandleTypeFlags; + + VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) + result += "OpaqueFd | "; + if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 ) + result += "OpaqueWin32 | "; + if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt ) + result += "OpaqueWin32Kmt | "; + if ( value & ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence ) + result += "D3D12Fence | "; + if ( value & ExternalSemaphoreHandleTypeFlagBits::eSyncFd ) + result += "SyncFd | "; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + if ( value & ExternalSemaphoreHandleTypeFlagBits::eZirconEventFUCHSIA ) + result += "ZirconEventFUCHSIA | "; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using ExternalSemaphoreFeatureFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ExternalSemaphoreFeatureFlagBits::eExportable ) | + VkFlags( ExternalSemaphoreFeatureFlagBits::eImportable ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags + operator|( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalSemaphoreFeatureFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags + operator&(ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return ExternalSemaphoreFeatureFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags + operator^( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalSemaphoreFeatureFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags + operator~( ExternalSemaphoreFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ExternalSemaphoreFeatureFlags( bits ) ); + } + + using ExternalSemaphoreFeatureFlagsKHR = ExternalSemaphoreFeatureFlags; + + VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & ExternalSemaphoreFeatureFlagBits::eExportable ) + result += "Exportable | "; + if ( value & ExternalSemaphoreFeatureFlagBits::eImportable ) + result += "Importable | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_VERSION_1_2 === + + using DescriptorBindingFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( DescriptorBindingFlagBits::eUpdateAfterBind ) | + VkFlags( DescriptorBindingFlagBits::eUpdateUnusedWhilePending ) | + VkFlags( DescriptorBindingFlagBits::ePartiallyBound ) | + VkFlags( DescriptorBindingFlagBits::eVariableDescriptorCount ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags + operator|( DescriptorBindingFlagBits bit0, DescriptorBindingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DescriptorBindingFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags + operator&(DescriptorBindingFlagBits bit0, DescriptorBindingFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return DescriptorBindingFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags + operator^( DescriptorBindingFlagBits bit0, DescriptorBindingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DescriptorBindingFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags operator~( DescriptorBindingFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( DescriptorBindingFlags( bits ) ); + } + + using DescriptorBindingFlagsEXT = DescriptorBindingFlags; + + VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & DescriptorBindingFlagBits::eUpdateAfterBind ) + result += "UpdateAfterBind | "; + if ( value & DescriptorBindingFlagBits::eUpdateUnusedWhilePending ) + result += "UpdateUnusedWhilePending | "; + if ( value & DescriptorBindingFlagBits::ePartiallyBound ) + result += "PartiallyBound | "; + if ( value & DescriptorBindingFlagBits::eVariableDescriptorCount ) + result += "VariableDescriptorCount | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using ResolveModeFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ResolveModeFlagBits::eNone ) | VkFlags( ResolveModeFlagBits::eSampleZero ) | + VkFlags( ResolveModeFlagBits::eAverage ) | VkFlags( ResolveModeFlagBits::eMin ) | + VkFlags( ResolveModeFlagBits::eMax ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator|( ResolveModeFlagBits bit0, + ResolveModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ResolveModeFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator&(ResolveModeFlagBits bit0, + ResolveModeFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return ResolveModeFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator^( ResolveModeFlagBits bit0, + ResolveModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ResolveModeFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator~( ResolveModeFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ResolveModeFlags( bits ) ); + } + + using ResolveModeFlagsKHR = ResolveModeFlags; + + VULKAN_HPP_INLINE std::string to_string( ResolveModeFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & ResolveModeFlagBits::eSampleZero ) + result += "SampleZero | "; + if ( value & ResolveModeFlagBits::eAverage ) + result += "Average | "; + if ( value & ResolveModeFlagBits::eMin ) + result += "Min | "; + if ( value & ResolveModeFlagBits::eMax ) + result += "Max | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using SemaphoreWaitFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( SemaphoreWaitFlagBits::eAny ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator|( SemaphoreWaitFlagBits bit0, + SemaphoreWaitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SemaphoreWaitFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator&(SemaphoreWaitFlagBits bit0, + SemaphoreWaitFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return SemaphoreWaitFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator^( SemaphoreWaitFlagBits bit0, + SemaphoreWaitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SemaphoreWaitFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator~( SemaphoreWaitFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( SemaphoreWaitFlags( bits ) ); + } + + using SemaphoreWaitFlagsKHR = SemaphoreWaitFlags; + + VULKAN_HPP_INLINE std::string to_string( SemaphoreWaitFlags value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & SemaphoreWaitFlagBits::eAny ) + result += "Any | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_KHR_surface === + + using CompositeAlphaFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( CompositeAlphaFlagBitsKHR::eOpaque ) | VkFlags( CompositeAlphaFlagBitsKHR::ePreMultiplied ) | + VkFlags( CompositeAlphaFlagBitsKHR::ePostMultiplied ) | VkFlags( CompositeAlphaFlagBitsKHR::eInherit ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR + operator|( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return CompositeAlphaFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR + operator&(CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return CompositeAlphaFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR + operator^( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return CompositeAlphaFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR operator~( CompositeAlphaFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( CompositeAlphaFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagsKHR value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & CompositeAlphaFlagBitsKHR::eOpaque ) + result += "Opaque | "; + if ( value & CompositeAlphaFlagBitsKHR::ePreMultiplied ) + result += "PreMultiplied | "; + if ( value & CompositeAlphaFlagBitsKHR::ePostMultiplied ) + result += "PostMultiplied | "; + if ( value & CompositeAlphaFlagBitsKHR::eInherit ) + result += "Inherit | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_KHR_swapchain === + + using SwapchainCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions ) | + VkFlags( SwapchainCreateFlagBitsKHR::eProtected ) | + VkFlags( SwapchainCreateFlagBitsKHR::eMutableFormat ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR + operator|( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return SwapchainCreateFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR + operator&(SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return SwapchainCreateFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR + operator^( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return SwapchainCreateFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR operator~( SwapchainCreateFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( SwapchainCreateFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagsKHR value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions ) + result += "SplitInstanceBindRegions | "; + if ( value & SwapchainCreateFlagBitsKHR::eProtected ) + result += "Protected | "; + if ( value & SwapchainCreateFlagBitsKHR::eMutableFormat ) + result += "MutableFormat | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using DeviceGroupPresentModeFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( DeviceGroupPresentModeFlagBitsKHR::eLocal ) | + VkFlags( DeviceGroupPresentModeFlagBitsKHR::eRemote ) | + VkFlags( DeviceGroupPresentModeFlagBitsKHR::eSum ) | + VkFlags( DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR + operator|( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return DeviceGroupPresentModeFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR + operator&(DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return DeviceGroupPresentModeFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR + operator^( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return DeviceGroupPresentModeFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR + operator~( DeviceGroupPresentModeFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( DeviceGroupPresentModeFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DeviceGroupPresentModeFlagsKHR value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & DeviceGroupPresentModeFlagBitsKHR::eLocal ) + result += "Local | "; + if ( value & DeviceGroupPresentModeFlagBitsKHR::eRemote ) + result += "Remote | "; + if ( value & DeviceGroupPresentModeFlagBitsKHR::eSum ) + result += "Sum | "; + if ( value & DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice ) + result += "LocalMultiDevice | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_KHR_display === + + enum class DisplayModeCreateFlagBitsKHR : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( DisplayModeCreateFlagBitsKHR ) + { + return "(void)"; + } + + using DisplayModeCreateFlagsKHR = Flags; + + VULKAN_HPP_INLINE std::string to_string( DisplayModeCreateFlagsKHR ) + { + return "{}"; + } + + using DisplayPlaneAlphaFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( DisplayPlaneAlphaFlagBitsKHR::eOpaque ) | VkFlags( DisplayPlaneAlphaFlagBitsKHR::eGlobal ) | + VkFlags( DisplayPlaneAlphaFlagBitsKHR::ePerPixel ) | + VkFlags( DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR + operator|( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return DisplayPlaneAlphaFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR + operator&(DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return DisplayPlaneAlphaFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR + operator^( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return DisplayPlaneAlphaFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR operator~( DisplayPlaneAlphaFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( DisplayPlaneAlphaFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DisplayPlaneAlphaFlagsKHR value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & DisplayPlaneAlphaFlagBitsKHR::eOpaque ) + result += "Opaque | "; + if ( value & DisplayPlaneAlphaFlagBitsKHR::eGlobal ) + result += "Global | "; + if ( value & DisplayPlaneAlphaFlagBitsKHR::ePerPixel ) + result += "PerPixel | "; + if ( value & DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied ) + result += "PerPixelPremultiplied | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + enum class DisplaySurfaceCreateFlagBitsKHR : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceCreateFlagBitsKHR ) + { + return "(void)"; + } + + using DisplaySurfaceCreateFlagsKHR = Flags; + + VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceCreateFlagsKHR ) + { + return "{}"; + } + + using SurfaceTransformFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( SurfaceTransformFlagBitsKHR::eIdentity ) | VkFlags( SurfaceTransformFlagBitsKHR::eRotate90 ) | + VkFlags( SurfaceTransformFlagBitsKHR::eRotate180 ) | + VkFlags( SurfaceTransformFlagBitsKHR::eRotate270 ) | + VkFlags( SurfaceTransformFlagBitsKHR::eHorizontalMirror ) | + VkFlags( SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 ) | + VkFlags( SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 ) | + VkFlags( SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 ) | + VkFlags( SurfaceTransformFlagBitsKHR::eInherit ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR + operator|( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return SurfaceTransformFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR + operator&(SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return SurfaceTransformFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR + operator^( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return SurfaceTransformFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR operator~( SurfaceTransformFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( SurfaceTransformFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagsKHR value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & SurfaceTransformFlagBitsKHR::eIdentity ) + result += "Identity | "; + if ( value & SurfaceTransformFlagBitsKHR::eRotate90 ) + result += "Rotate90 | "; + if ( value & SurfaceTransformFlagBitsKHR::eRotate180 ) + result += "Rotate180 | "; + if ( value & SurfaceTransformFlagBitsKHR::eRotate270 ) + result += "Rotate270 | "; + if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirror ) + result += "HorizontalMirror | "; + if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 ) + result += "HorizontalMirrorRotate90 | "; + if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 ) + result += "HorizontalMirrorRotate180 | "; + if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 ) + result += "HorizontalMirrorRotate270 | "; + if ( value & SurfaceTransformFlagBitsKHR::eInherit ) + result += "Inherit | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + + //=== VK_KHR_xlib_surface === + + enum class XlibSurfaceCreateFlagBitsKHR : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( XlibSurfaceCreateFlagBitsKHR ) + { + return "(void)"; + } + + using XlibSurfaceCreateFlagsKHR = Flags; + + VULKAN_HPP_INLINE std::string to_string( XlibSurfaceCreateFlagsKHR ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + + //=== VK_KHR_xcb_surface === + + enum class XcbSurfaceCreateFlagBitsKHR : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( XcbSurfaceCreateFlagBitsKHR ) + { + return "(void)"; + } + + using XcbSurfaceCreateFlagsKHR = Flags; + + VULKAN_HPP_INLINE std::string to_string( XcbSurfaceCreateFlagsKHR ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + + //=== VK_KHR_wayland_surface === + + enum class WaylandSurfaceCreateFlagBitsKHR : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( WaylandSurfaceCreateFlagBitsKHR ) + { + return "(void)"; + } + + using WaylandSurfaceCreateFlagsKHR = Flags; + + VULKAN_HPP_INLINE std::string to_string( WaylandSurfaceCreateFlagsKHR ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + + //=== VK_KHR_android_surface === + + enum class AndroidSurfaceCreateFlagBitsKHR : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( AndroidSurfaceCreateFlagBitsKHR ) + { + return "(void)"; + } + + using AndroidSurfaceCreateFlagsKHR = Flags; + + VULKAN_HPP_INLINE std::string to_string( AndroidSurfaceCreateFlagsKHR ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + + //=== VK_KHR_win32_surface === + + enum class Win32SurfaceCreateFlagBitsKHR : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( Win32SurfaceCreateFlagBitsKHR ) + { + return "(void)"; + } + + using Win32SurfaceCreateFlagsKHR = Flags; + + VULKAN_HPP_INLINE std::string to_string( Win32SurfaceCreateFlagsKHR ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + + using DebugReportFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( DebugReportFlagBitsEXT::eInformation ) | VkFlags( DebugReportFlagBitsEXT::eWarning ) | + VkFlags( DebugReportFlagBitsEXT::ePerformanceWarning ) | VkFlags( DebugReportFlagBitsEXT::eError ) | + VkFlags( DebugReportFlagBitsEXT::eDebug ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT + operator|( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return DebugReportFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator&(DebugReportFlagBitsEXT bit0, + DebugReportFlagBitsEXT bit1)VULKAN_HPP_NOEXCEPT + { + return DebugReportFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT + operator^( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return DebugReportFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator~( DebugReportFlagBitsEXT bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( DebugReportFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DebugReportFlagsEXT value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & DebugReportFlagBitsEXT::eInformation ) + result += "Information | "; + if ( value & DebugReportFlagBitsEXT::eWarning ) + result += "Warning | "; + if ( value & DebugReportFlagBitsEXT::ePerformanceWarning ) + result += "PerformanceWarning | "; + if ( value & DebugReportFlagBitsEXT::eError ) + result += "Error | "; + if ( value & DebugReportFlagBitsEXT::eDebug ) + result += "Debug | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + + //=== VK_KHR_video_queue === + + using VideoCodecOperationFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoCodecOperationFlagBitsKHR::eInvalid ) +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + | VkFlags( VideoCodecOperationFlagBitsKHR::eEncodeH264EXT ) | + VkFlags( VideoCodecOperationFlagBitsKHR::eDecodeH264EXT ) | + VkFlags( VideoCodecOperationFlagBitsKHR::eDecodeH265EXT ) +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodecOperationFlagsKHR + operator|( VideoCodecOperationFlagBitsKHR bit0, VideoCodecOperationFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoCodecOperationFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodecOperationFlagsKHR + operator&(VideoCodecOperationFlagBitsKHR bit0, VideoCodecOperationFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return VideoCodecOperationFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodecOperationFlagsKHR + operator^( VideoCodecOperationFlagBitsKHR bit0, VideoCodecOperationFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoCodecOperationFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodecOperationFlagsKHR operator~( VideoCodecOperationFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( VideoCodecOperationFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoCodecOperationFlagsKHR value ) + { + if ( !value ) + return "{}"; + std::string result; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & VideoCodecOperationFlagBitsKHR::eEncodeH264EXT ) + result += "EncodeH264EXT | "; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & VideoCodecOperationFlagBitsKHR::eDecodeH264EXT ) + result += "DecodeH264EXT | "; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & VideoCodecOperationFlagBitsKHR::eDecodeH265EXT ) + result += "DecodeH265EXT | "; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using VideoChromaSubsamplingFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoChromaSubsamplingFlagBitsKHR::eInvalid ) | + VkFlags( VideoChromaSubsamplingFlagBitsKHR::eMonochrome ) | + VkFlags( VideoChromaSubsamplingFlagBitsKHR::e420 ) | + VkFlags( VideoChromaSubsamplingFlagBitsKHR::e422 ) | VkFlags( VideoChromaSubsamplingFlagBitsKHR::e444 ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoChromaSubsamplingFlagsKHR + operator|( VideoChromaSubsamplingFlagBitsKHR bit0, VideoChromaSubsamplingFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoChromaSubsamplingFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoChromaSubsamplingFlagsKHR + operator&(VideoChromaSubsamplingFlagBitsKHR bit0, VideoChromaSubsamplingFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return VideoChromaSubsamplingFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoChromaSubsamplingFlagsKHR + operator^( VideoChromaSubsamplingFlagBitsKHR bit0, VideoChromaSubsamplingFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoChromaSubsamplingFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoChromaSubsamplingFlagsKHR + operator~( VideoChromaSubsamplingFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( VideoChromaSubsamplingFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoChromaSubsamplingFlagsKHR value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & VideoChromaSubsamplingFlagBitsKHR::eMonochrome ) + result += "Monochrome | "; + if ( value & VideoChromaSubsamplingFlagBitsKHR::e420 ) + result += "420 | "; + if ( value & VideoChromaSubsamplingFlagBitsKHR::e422 ) + result += "422 | "; + if ( value & VideoChromaSubsamplingFlagBitsKHR::e444 ) + result += "444 | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using VideoComponentBitDepthFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoComponentBitDepthFlagBitsKHR::eInvalid ) | + VkFlags( VideoComponentBitDepthFlagBitsKHR::e8 ) | VkFlags( VideoComponentBitDepthFlagBitsKHR::e10 ) | + VkFlags( VideoComponentBitDepthFlagBitsKHR::e12 ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoComponentBitDepthFlagsKHR + operator|( VideoComponentBitDepthFlagBitsKHR bit0, VideoComponentBitDepthFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoComponentBitDepthFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoComponentBitDepthFlagsKHR + operator&(VideoComponentBitDepthFlagBitsKHR bit0, VideoComponentBitDepthFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return VideoComponentBitDepthFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoComponentBitDepthFlagsKHR + operator^( VideoComponentBitDepthFlagBitsKHR bit0, VideoComponentBitDepthFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoComponentBitDepthFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoComponentBitDepthFlagsKHR + operator~( VideoComponentBitDepthFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( VideoComponentBitDepthFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoComponentBitDepthFlagsKHR value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & VideoComponentBitDepthFlagBitsKHR::e8 ) + result += "8 | "; + if ( value & VideoComponentBitDepthFlagBitsKHR::e10 ) + result += "10 | "; + if ( value & VideoComponentBitDepthFlagBitsKHR::e12 ) + result += "12 | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using VideoCapabilitiesFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoCapabilitiesFlagBitsKHR::eProtectedContent ) | + VkFlags( VideoCapabilitiesFlagBitsKHR::eSeparateReferenceImages ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCapabilitiesFlagsKHR + operator|( VideoCapabilitiesFlagBitsKHR bit0, VideoCapabilitiesFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoCapabilitiesFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCapabilitiesFlagsKHR + operator&(VideoCapabilitiesFlagBitsKHR bit0, VideoCapabilitiesFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return VideoCapabilitiesFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCapabilitiesFlagsKHR + operator^( VideoCapabilitiesFlagBitsKHR bit0, VideoCapabilitiesFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoCapabilitiesFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCapabilitiesFlagsKHR operator~( VideoCapabilitiesFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( VideoCapabilitiesFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoCapabilitiesFlagsKHR value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & VideoCapabilitiesFlagBitsKHR::eProtectedContent ) + result += "ProtectedContent | "; + if ( value & VideoCapabilitiesFlagBitsKHR::eSeparateReferenceImages ) + result += "SeparateReferenceImages | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using VideoSessionCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = + VkFlags( VideoSessionCreateFlagBitsKHR::eDefault ) | VkFlags( VideoSessionCreateFlagBitsKHR::eProtectedContent ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoSessionCreateFlagsKHR + operator|( VideoSessionCreateFlagBitsKHR bit0, VideoSessionCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoSessionCreateFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoSessionCreateFlagsKHR + operator&(VideoSessionCreateFlagBitsKHR bit0, VideoSessionCreateFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return VideoSessionCreateFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoSessionCreateFlagsKHR + operator^( VideoSessionCreateFlagBitsKHR bit0, VideoSessionCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoSessionCreateFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoSessionCreateFlagsKHR operator~( VideoSessionCreateFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( VideoSessionCreateFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoSessionCreateFlagsKHR value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & VideoSessionCreateFlagBitsKHR::eProtectedContent ) + result += "ProtectedContent | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + enum class VideoBeginCodingFlagBitsKHR : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( VideoBeginCodingFlagBitsKHR ) + { + return "(void)"; + } + + using VideoBeginCodingFlagsKHR = Flags; + + VULKAN_HPP_INLINE std::string to_string( VideoBeginCodingFlagsKHR ) + { + return "{}"; + } + + enum class VideoEndCodingFlagBitsKHR : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( VideoEndCodingFlagBitsKHR ) + { + return "(void)"; + } + + using VideoEndCodingFlagsKHR = Flags; + + VULKAN_HPP_INLINE std::string to_string( VideoEndCodingFlagsKHR ) + { + return "{}"; + } + + using VideoCodingControlFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoCodingControlFlagBitsKHR::eDefault ) | VkFlags( VideoCodingControlFlagBitsKHR::eReset ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingControlFlagsKHR + operator|( VideoCodingControlFlagBitsKHR bit0, VideoCodingControlFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoCodingControlFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingControlFlagsKHR + operator&(VideoCodingControlFlagBitsKHR bit0, VideoCodingControlFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return VideoCodingControlFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingControlFlagsKHR + operator^( VideoCodingControlFlagBitsKHR bit0, VideoCodingControlFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoCodingControlFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingControlFlagsKHR operator~( VideoCodingControlFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( VideoCodingControlFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoCodingControlFlagsKHR value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & VideoCodingControlFlagBitsKHR::eReset ) + result += "Reset | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using VideoCodingQualityPresetFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoCodingQualityPresetFlagBitsKHR::eDefault ) | + VkFlags( VideoCodingQualityPresetFlagBitsKHR::eNormal ) | + VkFlags( VideoCodingQualityPresetFlagBitsKHR::ePower ) | + VkFlags( VideoCodingQualityPresetFlagBitsKHR::eQuality ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingQualityPresetFlagsKHR + operator|( VideoCodingQualityPresetFlagBitsKHR bit0, VideoCodingQualityPresetFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoCodingQualityPresetFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingQualityPresetFlagsKHR + operator&(VideoCodingQualityPresetFlagBitsKHR bit0, VideoCodingQualityPresetFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return VideoCodingQualityPresetFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingQualityPresetFlagsKHR + operator^( VideoCodingQualityPresetFlagBitsKHR bit0, VideoCodingQualityPresetFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoCodingQualityPresetFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingQualityPresetFlagsKHR + operator~( VideoCodingQualityPresetFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( VideoCodingQualityPresetFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoCodingQualityPresetFlagsKHR value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & VideoCodingQualityPresetFlagBitsKHR::eNormal ) + result += "Normal | "; + if ( value & VideoCodingQualityPresetFlagBitsKHR::ePower ) + result += "Power | "; + if ( value & VideoCodingQualityPresetFlagBitsKHR::eQuality ) + result += "Quality | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + + //=== VK_KHR_video_decode_queue === + + using VideoDecodeFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoDecodeFlagBitsKHR::eDefault ) | VkFlags( VideoDecodeFlagBitsKHR::eReserved0 ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeFlagsKHR + operator|( VideoDecodeFlagBitsKHR bit0, VideoDecodeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoDecodeFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeFlagsKHR operator&(VideoDecodeFlagBitsKHR bit0, + VideoDecodeFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return VideoDecodeFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeFlagsKHR + operator^( VideoDecodeFlagBitsKHR bit0, VideoDecodeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoDecodeFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeFlagsKHR operator~( VideoDecodeFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( VideoDecodeFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoDecodeFlagsKHR value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & VideoDecodeFlagBitsKHR::eReserved0 ) + result += "Reserved0 | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_transform_feedback === + + enum class PipelineRasterizationStateStreamCreateFlagBitsEXT : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateStreamCreateFlagBitsEXT ) + { + return "(void)"; + } + + using PipelineRasterizationStateStreamCreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateStreamCreateFlagsEXT ) + { + return "{}"; + } + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + + //=== VK_EXT_video_encode_h264 === + + using VideoEncodeH264CapabilitiesFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = + VkFlags( VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityCabac ) | + VkFlags( VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityCavlc ) | + VkFlags( VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityWeightedBiPredImplicit ) | + VkFlags( VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityTransform8X8 ) | + VkFlags( VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityChromaQpOffset ) | + VkFlags( VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilitySecondChromaQpOffset ) | + VkFlags( VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityDeblockingFilterDisabled ) | + VkFlags( VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityDeblockingFilterEnabled ) | + VkFlags( VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityDeblockingFilterPartial ) | + VkFlags( VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityMultipleSlicePerFrame ) | + VkFlags( VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityEvenlyDistributedSliceSize ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilitiesFlagsEXT operator|( + VideoEncodeH264CapabilitiesFlagBitsEXT bit0, VideoEncodeH264CapabilitiesFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264CapabilitiesFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilitiesFlagsEXT operator&( + VideoEncodeH264CapabilitiesFlagBitsEXT bit0, VideoEncodeH264CapabilitiesFlagBitsEXT bit1)VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264CapabilitiesFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilitiesFlagsEXT operator^( + VideoEncodeH264CapabilitiesFlagBitsEXT bit0, VideoEncodeH264CapabilitiesFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264CapabilitiesFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilitiesFlagsEXT + operator~( VideoEncodeH264CapabilitiesFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + { + return ~( VideoEncodeH264CapabilitiesFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264CapabilitiesFlagsEXT value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityCabac ) + result += "VkVideoEncodeH264CapabilityCabac | "; + if ( value & VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityCavlc ) + result += "VkVideoEncodeH264CapabilityCavlc | "; + if ( value & VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityWeightedBiPredImplicit ) + result += "VkVideoEncodeH264CapabilityWeightedBiPredImplicit | "; + if ( value & VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityTransform8X8 ) + result += "VkVideoEncodeH264CapabilityTransform8X8 | "; + if ( value & VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityChromaQpOffset ) + result += "VkVideoEncodeH264CapabilityChromaQpOffset | "; + if ( value & VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilitySecondChromaQpOffset ) + result += "VkVideoEncodeH264CapabilitySecondChromaQpOffset | "; + if ( value & VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityDeblockingFilterDisabled ) + result += "VkVideoEncodeH264CapabilityDeblockingFilterDisabled | "; + if ( value & VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityDeblockingFilterEnabled ) + result += "VkVideoEncodeH264CapabilityDeblockingFilterEnabled | "; + if ( value & VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityDeblockingFilterPartial ) + result += "VkVideoEncodeH264CapabilityDeblockingFilterPartial | "; + if ( value & VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityMultipleSlicePerFrame ) + result += "VkVideoEncodeH264CapabilityMultipleSlicePerFrame | "; + if ( value & VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityEvenlyDistributedSliceSize ) + result += "VkVideoEncodeH264CapabilityEvenlyDistributedSliceSize | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using VideoEncodeH264InputModeFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoEncodeH264InputModeFlagBitsEXT::eVkVideoEncodeH264InputModeFrame ) | + VkFlags( VideoEncodeH264InputModeFlagBitsEXT::eVkVideoEncodeH264InputModeSlice ) | + VkFlags( VideoEncodeH264InputModeFlagBitsEXT::eVkVideoEncodeH264InputModeNonVcl ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264InputModeFlagsEXT + operator|( VideoEncodeH264InputModeFlagBitsEXT bit0, VideoEncodeH264InputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264InputModeFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264InputModeFlagsEXT + operator&(VideoEncodeH264InputModeFlagBitsEXT bit0, VideoEncodeH264InputModeFlagBitsEXT bit1)VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264InputModeFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264InputModeFlagsEXT + operator^( VideoEncodeH264InputModeFlagBitsEXT bit0, VideoEncodeH264InputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264InputModeFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264InputModeFlagsEXT + operator~( VideoEncodeH264InputModeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + { + return ~( VideoEncodeH264InputModeFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264InputModeFlagsEXT value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & VideoEncodeH264InputModeFlagBitsEXT::eVkVideoEncodeH264InputModeFrame ) + result += "VkVideoEncodeH264InputModeFrame | "; + if ( value & VideoEncodeH264InputModeFlagBitsEXT::eVkVideoEncodeH264InputModeSlice ) + result += "VkVideoEncodeH264InputModeSlice | "; + if ( value & VideoEncodeH264InputModeFlagBitsEXT::eVkVideoEncodeH264InputModeNonVcl ) + result += "VkVideoEncodeH264InputModeNonVcl | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using VideoEncodeH264OutputModeFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoEncodeH264OutputModeFlagBitsEXT::eVkVideoEncodeH264OutputModeFrame ) | + VkFlags( VideoEncodeH264OutputModeFlagBitsEXT::eVkVideoEncodeH264OutputModeSlice ) | + VkFlags( VideoEncodeH264OutputModeFlagBitsEXT::eVkVideoEncodeH264OutputModeNonVcl ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264OutputModeFlagsEXT operator|( + VideoEncodeH264OutputModeFlagBitsEXT bit0, VideoEncodeH264OutputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264OutputModeFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264OutputModeFlagsEXT + operator&(VideoEncodeH264OutputModeFlagBitsEXT bit0, VideoEncodeH264OutputModeFlagBitsEXT bit1)VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264OutputModeFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264OutputModeFlagsEXT operator^( + VideoEncodeH264OutputModeFlagBitsEXT bit0, VideoEncodeH264OutputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264OutputModeFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264OutputModeFlagsEXT + operator~( VideoEncodeH264OutputModeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + { + return ~( VideoEncodeH264OutputModeFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264OutputModeFlagsEXT value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & VideoEncodeH264OutputModeFlagBitsEXT::eVkVideoEncodeH264OutputModeFrame ) + result += "VkVideoEncodeH264OutputModeFrame | "; + if ( value & VideoEncodeH264OutputModeFlagBitsEXT::eVkVideoEncodeH264OutputModeSlice ) + result += "VkVideoEncodeH264OutputModeSlice | "; + if ( value & VideoEncodeH264OutputModeFlagBitsEXT::eVkVideoEncodeH264OutputModeNonVcl ) + result += "VkVideoEncodeH264OutputModeNonVcl | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using VideoEncodeH264CreateFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoEncodeH264CreateFlagBitsEXT::eVkVideoEncodeH264CreateDefault ) | + VkFlags( VideoEncodeH264CreateFlagBitsEXT::eVkVideoEncodeH264CreateReserved0 ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CreateFlagsEXT + operator|( VideoEncodeH264CreateFlagBitsEXT bit0, VideoEncodeH264CreateFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264CreateFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CreateFlagsEXT + operator&(VideoEncodeH264CreateFlagBitsEXT bit0, VideoEncodeH264CreateFlagBitsEXT bit1)VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264CreateFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CreateFlagsEXT + operator^( VideoEncodeH264CreateFlagBitsEXT bit0, VideoEncodeH264CreateFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264CreateFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CreateFlagsEXT + operator~( VideoEncodeH264CreateFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + { + return ~( VideoEncodeH264CreateFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264CreateFlagsEXT value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & VideoEncodeH264CreateFlagBitsEXT::eVkVideoEncodeH264CreateReserved0 ) + result += "VkVideoEncodeH264CreateReserved0 | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + + //=== VK_EXT_video_decode_h264 === + + using VideoDecodeH264FieldLayoutFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoDecodeH264FieldLayoutFlagBitsEXT::eVkVideoDecodeH264ProgressivePicturesOnly ) | + VkFlags( VideoDecodeH264FieldLayoutFlagBitsEXT::eVkVideoDecodeH264FieldLayoutLineInterlacedPlane ) | + VkFlags( VideoDecodeH264FieldLayoutFlagBitsEXT::eVkVideoDecodeH264FieldLayoutSeparateInterlacedPlane ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeH264FieldLayoutFlagsEXT operator|( + VideoDecodeH264FieldLayoutFlagBitsEXT bit0, VideoDecodeH264FieldLayoutFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoDecodeH264FieldLayoutFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeH264FieldLayoutFlagsEXT + operator&(VideoDecodeH264FieldLayoutFlagBitsEXT bit0, VideoDecodeH264FieldLayoutFlagBitsEXT bit1)VULKAN_HPP_NOEXCEPT + { + return VideoDecodeH264FieldLayoutFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeH264FieldLayoutFlagsEXT operator^( + VideoDecodeH264FieldLayoutFlagBitsEXT bit0, VideoDecodeH264FieldLayoutFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoDecodeH264FieldLayoutFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeH264FieldLayoutFlagsEXT + operator~( VideoDecodeH264FieldLayoutFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + { + return ~( VideoDecodeH264FieldLayoutFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoDecodeH264FieldLayoutFlagsEXT value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & VideoDecodeH264FieldLayoutFlagBitsEXT::eVkVideoDecodeH264FieldLayoutLineInterlacedPlane ) + result += "VkVideoDecodeH264FieldLayoutLineInterlacedPlane | "; + if ( value & VideoDecodeH264FieldLayoutFlagBitsEXT::eVkVideoDecodeH264FieldLayoutSeparateInterlacedPlane ) + result += "VkVideoDecodeH264FieldLayoutSeparateInterlacedPlane | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + enum class VideoDecodeH264CreateFlagBitsEXT : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( VideoDecodeH264CreateFlagBitsEXT ) + { + return "(void)"; + } + + using VideoDecodeH264CreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( VideoDecodeH264CreateFlagsEXT ) + { + return "{}"; + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_USE_PLATFORM_GGP ) + + //=== VK_GGP_stream_descriptor_surface === + + enum class StreamDescriptorSurfaceCreateFlagBitsGGP : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( StreamDescriptorSurfaceCreateFlagBitsGGP ) + { + return "(void)"; + } + + using StreamDescriptorSurfaceCreateFlagsGGP = Flags; + + VULKAN_HPP_INLINE std::string to_string( StreamDescriptorSurfaceCreateFlagsGGP ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + + using ExternalMemoryHandleTypeFlagsNV = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 ) | + VkFlags( ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt ) | + VkFlags( ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image ) | + VkFlags( ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV + operator|( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryHandleTypeFlagsNV( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV + operator&(ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1)VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryHandleTypeFlagsNV( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV + operator^( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryHandleTypeFlagsNV( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV + operator~( ExternalMemoryHandleTypeFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ExternalMemoryHandleTypeFlagsNV( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagsNV value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 ) + result += "OpaqueWin32 | "; + if ( value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt ) + result += "OpaqueWin32Kmt | "; + if ( value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image ) + result += "D3D11Image | "; + if ( value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt ) + result += "D3D11ImageKmt | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using ExternalMemoryFeatureFlagsNV = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly ) | + VkFlags( ExternalMemoryFeatureFlagBitsNV::eExportable ) | + VkFlags( ExternalMemoryFeatureFlagBitsNV::eImportable ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV + operator|( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryFeatureFlagsNV( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV + operator&(ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1)VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryFeatureFlagsNV( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV + operator^( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryFeatureFlagsNV( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV operator~( ExternalMemoryFeatureFlagBitsNV bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( ExternalMemoryFeatureFlagsNV( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagsNV value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly ) + result += "DedicatedOnly | "; + if ( value & ExternalMemoryFeatureFlagBitsNV::eExportable ) + result += "Exportable | "; + if ( value & ExternalMemoryFeatureFlagBitsNV::eImportable ) + result += "Importable | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + +#if defined( VK_USE_PLATFORM_VI_NN ) + + //=== VK_NN_vi_surface === + + enum class ViSurfaceCreateFlagBitsNN : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagBitsNN ) + { + return "(void)"; + } + + using ViSurfaceCreateFlagsNN = Flags; + + VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagsNN ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_EXT_conditional_rendering === + + using ConditionalRenderingFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ConditionalRenderingFlagBitsEXT::eInverted ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT + operator|( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return ConditionalRenderingFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT + operator&(ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1)VULKAN_HPP_NOEXCEPT + { + return ConditionalRenderingFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT + operator^( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return ConditionalRenderingFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT operator~( ConditionalRenderingFlagBitsEXT bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( ConditionalRenderingFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagsEXT value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & ConditionalRenderingFlagBitsEXT::eInverted ) + result += "Inverted | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_EXT_display_surface_counter === + + using SurfaceCounterFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( SurfaceCounterFlagBitsEXT::eVblank ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT + operator|( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return SurfaceCounterFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT + operator&(SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1)VULKAN_HPP_NOEXCEPT + { + return SurfaceCounterFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT + operator^( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return SurfaceCounterFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT operator~( SurfaceCounterFlagBitsEXT bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( SurfaceCounterFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( SurfaceCounterFlagsEXT value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & SurfaceCounterFlagBitsEXT::eVblank ) + result += "Vblank | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_NV_viewport_swizzle === + + enum class PipelineViewportSwizzleStateCreateFlagBitsNV : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineViewportSwizzleStateCreateFlagBitsNV ) + { + return "(void)"; + } + + using PipelineViewportSwizzleStateCreateFlagsNV = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineViewportSwizzleStateCreateFlagsNV ) + { + return "{}"; + } + + //=== VK_EXT_discard_rectangles === + + enum class PipelineDiscardRectangleStateCreateFlagBitsEXT : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineDiscardRectangleStateCreateFlagBitsEXT ) + { + return "(void)"; + } + + using PipelineDiscardRectangleStateCreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineDiscardRectangleStateCreateFlagsEXT ) + { + return "{}"; + } + + //=== VK_EXT_conservative_rasterization === + + enum class PipelineRasterizationConservativeStateCreateFlagBitsEXT : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationConservativeStateCreateFlagBitsEXT ) + { + return "(void)"; + } + + using PipelineRasterizationConservativeStateCreateFlagsEXT = + Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationConservativeStateCreateFlagsEXT ) + { + return "{}"; + } + + //=== VK_EXT_depth_clip_enable === + + enum class PipelineRasterizationDepthClipStateCreateFlagBitsEXT : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationDepthClipStateCreateFlagBitsEXT ) + { + return "(void)"; + } + + using PipelineRasterizationDepthClipStateCreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationDepthClipStateCreateFlagsEXT ) + { + return "{}"; + } + + //=== VK_KHR_performance_query === + + using PerformanceCounterDescriptionFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting ) | + VkFlags( PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator|( + PerformanceCounterDescriptionFlagBitsKHR bit0, PerformanceCounterDescriptionFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return PerformanceCounterDescriptionFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator&( + PerformanceCounterDescriptionFlagBitsKHR bit0, PerformanceCounterDescriptionFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return PerformanceCounterDescriptionFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator^( + PerformanceCounterDescriptionFlagBitsKHR bit0, PerformanceCounterDescriptionFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return PerformanceCounterDescriptionFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR + operator~( PerformanceCounterDescriptionFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( PerformanceCounterDescriptionFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( PerformanceCounterDescriptionFlagsKHR value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting ) + result += "PerformanceImpacting | "; + if ( value & PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted ) + result += "ConcurrentlyImpacted | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using AcquireProfilingLockFlagsKHR = Flags; + + VULKAN_HPP_INLINE std::string to_string( AcquireProfilingLockFlagsKHR ) + { + return "{}"; + } + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + + //=== VK_MVK_ios_surface === + + enum class IOSSurfaceCreateFlagBitsMVK : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( IOSSurfaceCreateFlagBitsMVK ) + { + return "(void)"; + } + + using IOSSurfaceCreateFlagsMVK = Flags; + + VULKAN_HPP_INLINE std::string to_string( IOSSurfaceCreateFlagsMVK ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + + //=== VK_MVK_macos_surface === + + enum class MacOSSurfaceCreateFlagBitsMVK : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( MacOSSurfaceCreateFlagBitsMVK ) + { + return "(void)"; + } + + using MacOSSurfaceCreateFlagsMVK = Flags; + + VULKAN_HPP_INLINE std::string to_string( MacOSSurfaceCreateFlagsMVK ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + + using DebugUtilsMessageSeverityFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( DebugUtilsMessageSeverityFlagBitsEXT::eVerbose ) | + VkFlags( DebugUtilsMessageSeverityFlagBitsEXT::eInfo ) | + VkFlags( DebugUtilsMessageSeverityFlagBitsEXT::eWarning ) | + VkFlags( DebugUtilsMessageSeverityFlagBitsEXT::eError ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator|( + DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return DebugUtilsMessageSeverityFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT + operator&(DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1)VULKAN_HPP_NOEXCEPT + { + return DebugUtilsMessageSeverityFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator^( + DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return DebugUtilsMessageSeverityFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT + operator~( DebugUtilsMessageSeverityFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + { + return ~( DebugUtilsMessageSeverityFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagsEXT value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eVerbose ) + result += "Verbose | "; + if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eInfo ) + result += "Info | "; + if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eWarning ) + result += "Warning | "; + if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eError ) + result += "Error | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using DebugUtilsMessageTypeFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( DebugUtilsMessageTypeFlagBitsEXT::eGeneral ) | + VkFlags( DebugUtilsMessageTypeFlagBitsEXT::eValidation ) | + VkFlags( DebugUtilsMessageTypeFlagBitsEXT::ePerformance ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT + operator|( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return DebugUtilsMessageTypeFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT + operator&(DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1)VULKAN_HPP_NOEXCEPT + { + return DebugUtilsMessageTypeFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT + operator^( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return DebugUtilsMessageTypeFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT + operator~( DebugUtilsMessageTypeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + { + return ~( DebugUtilsMessageTypeFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagsEXT value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & DebugUtilsMessageTypeFlagBitsEXT::eGeneral ) + result += "General | "; + if ( value & DebugUtilsMessageTypeFlagBitsEXT::eValidation ) + result += "Validation | "; + if ( value & DebugUtilsMessageTypeFlagBitsEXT::ePerformance ) + result += "Performance | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + enum class DebugUtilsMessengerCallbackDataFlagBitsEXT : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagBitsEXT ) + { + return "(void)"; + } + + using DebugUtilsMessengerCallbackDataFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagsEXT ) + { + return "{}"; + } + + enum class DebugUtilsMessengerCreateFlagBitsEXT : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCreateFlagBitsEXT ) + { + return "(void)"; + } + + using DebugUtilsMessengerCreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCreateFlagsEXT ) + { + return "{}"; + } + + //=== VK_NV_fragment_coverage_to_color === + + enum class PipelineCoverageToColorStateCreateFlagBitsNV : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineCoverageToColorStateCreateFlagBitsNV ) + { + return "(void)"; + } + + using PipelineCoverageToColorStateCreateFlagsNV = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineCoverageToColorStateCreateFlagsNV ) + { + return "{}"; + } + + //=== VK_KHR_acceleration_structure === + + using GeometryFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( GeometryFlagBitsKHR::eOpaque ) | VkFlags( GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator|( GeometryFlagBitsKHR bit0, + GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return GeometryFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator&(GeometryFlagBitsKHR bit0, + GeometryFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return GeometryFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator^( GeometryFlagBitsKHR bit0, + GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return GeometryFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator~( GeometryFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( GeometryFlagsKHR( bits ) ); + } + + using GeometryFlagsNV = GeometryFlagsKHR; + + VULKAN_HPP_INLINE std::string to_string( GeometryFlagsKHR value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & GeometryFlagBitsKHR::eOpaque ) + result += "Opaque | "; + if ( value & GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation ) + result += "NoDuplicateAnyHitInvocation | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using GeometryInstanceFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable ) | + VkFlags( GeometryInstanceFlagBitsKHR::eTriangleFrontCounterclockwise ) | + VkFlags( GeometryInstanceFlagBitsKHR::eForceOpaque ) | + VkFlags( GeometryInstanceFlagBitsKHR::eForceNoOpaque ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR + operator|( GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return GeometryInstanceFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR + operator&(GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return GeometryInstanceFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR + operator^( GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return GeometryInstanceFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator~( GeometryInstanceFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( GeometryInstanceFlagsKHR( bits ) ); + } + + using GeometryInstanceFlagsNV = GeometryInstanceFlagsKHR; + + VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagsKHR value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable ) + result += "TriangleFacingCullDisable | "; + if ( value & GeometryInstanceFlagBitsKHR::eTriangleFrontCounterclockwise ) + result += "TriangleFrontCounterclockwise | "; + if ( value & GeometryInstanceFlagBitsKHR::eForceOpaque ) + result += "ForceOpaque | "; + if ( value & GeometryInstanceFlagBitsKHR::eForceNoOpaque ) + result += "ForceNoOpaque | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using BuildAccelerationStructureFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( BuildAccelerationStructureFlagBitsKHR::eAllowUpdate ) | + VkFlags( BuildAccelerationStructureFlagBitsKHR::eAllowCompaction ) | + VkFlags( BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace ) | + VkFlags( BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild ) | + VkFlags( BuildAccelerationStructureFlagBitsKHR::eLowMemory ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator|( + BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return BuildAccelerationStructureFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR + operator&(BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return BuildAccelerationStructureFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator^( + BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return BuildAccelerationStructureFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR + operator~( BuildAccelerationStructureFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( BuildAccelerationStructureFlagsKHR( bits ) ); + } + + using BuildAccelerationStructureFlagsNV = BuildAccelerationStructureFlagsKHR; + + VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagsKHR value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowUpdate ) + result += "AllowUpdate | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowCompaction ) + result += "AllowCompaction | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace ) + result += "PreferFastTrace | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild ) + result += "PreferFastBuild | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::eLowMemory ) + result += "LowMemory | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using AccelerationStructureCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( AccelerationStructureCreateFlagBitsKHR::eDeviceAddressCaptureReplay ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccelerationStructureCreateFlagsKHR operator|( + AccelerationStructureCreateFlagBitsKHR bit0, AccelerationStructureCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return AccelerationStructureCreateFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccelerationStructureCreateFlagsKHR operator&( + AccelerationStructureCreateFlagBitsKHR bit0, AccelerationStructureCreateFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return AccelerationStructureCreateFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccelerationStructureCreateFlagsKHR operator^( + AccelerationStructureCreateFlagBitsKHR bit0, AccelerationStructureCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return AccelerationStructureCreateFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccelerationStructureCreateFlagsKHR + operator~( AccelerationStructureCreateFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( AccelerationStructureCreateFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureCreateFlagsKHR value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & AccelerationStructureCreateFlagBitsKHR::eDeviceAddressCaptureReplay ) + result += "DeviceAddressCaptureReplay | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_NV_framebuffer_mixed_samples === + + enum class PipelineCoverageModulationStateCreateFlagBitsNV : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineCoverageModulationStateCreateFlagBitsNV ) + { + return "(void)"; + } + + using PipelineCoverageModulationStateCreateFlagsNV = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineCoverageModulationStateCreateFlagsNV ) + { + return "{}"; + } + + //=== VK_EXT_validation_cache === + + enum class ValidationCacheCreateFlagBitsEXT : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( ValidationCacheCreateFlagBitsEXT ) + { + return "(void)"; + } + + using ValidationCacheCreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( ValidationCacheCreateFlagsEXT ) + { + return "{}"; + } + + //=== VK_AMD_pipeline_compiler_control === + + using PipelineCompilerControlFlagsAMD = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagsAMD ) + { + return "{}"; + } + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + + //=== VK_EXT_video_decode_h265 === + + enum class VideoDecodeH265CreateFlagBitsEXT : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( VideoDecodeH265CreateFlagBitsEXT ) + { + return "(void)"; + } + + using VideoDecodeH265CreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( VideoDecodeH265CreateFlagsEXT ) + { + return "{}"; + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_pipeline_creation_feedback === + + using PipelineCreationFeedbackFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( PipelineCreationFeedbackFlagBitsEXT::eValid ) | + VkFlags( PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit ) | + VkFlags( PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT + operator|( PipelineCreationFeedbackFlagBitsEXT bit0, PipelineCreationFeedbackFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineCreationFeedbackFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT + operator&(PipelineCreationFeedbackFlagBitsEXT bit0, PipelineCreationFeedbackFlagBitsEXT bit1)VULKAN_HPP_NOEXCEPT + { + return PipelineCreationFeedbackFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT + operator^( PipelineCreationFeedbackFlagBitsEXT bit0, PipelineCreationFeedbackFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineCreationFeedbackFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT + operator~( PipelineCreationFeedbackFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + { + return ~( PipelineCreationFeedbackFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlagsEXT value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & PipelineCreationFeedbackFlagBitsEXT::eValid ) + result += "Valid | "; + if ( value & PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit ) + result += "ApplicationPipelineCacheHit | "; + if ( value & PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration ) + result += "BasePipelineAcceleration | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + + //=== VK_FUCHSIA_imagepipe_surface === + + enum class ImagePipeSurfaceCreateFlagBitsFUCHSIA : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( ImagePipeSurfaceCreateFlagBitsFUCHSIA ) + { + return "(void)"; + } + + using ImagePipeSurfaceCreateFlagsFUCHSIA = Flags; + + VULKAN_HPP_INLINE std::string to_string( ImagePipeSurfaceCreateFlagsFUCHSIA ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + + //=== VK_EXT_metal_surface === + + enum class MetalSurfaceCreateFlagBitsEXT : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( MetalSurfaceCreateFlagBitsEXT ) + { + return "(void)"; + } + + using MetalSurfaceCreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( MetalSurfaceCreateFlagsEXT ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_AMD_shader_core_properties2 === + + using ShaderCorePropertiesFlagsAMD = Flags; + + VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagsAMD ) + { + return "{}"; + } + + //=== VK_EXT_tooling_info === + + using ToolPurposeFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ToolPurposeFlagBitsEXT::eValidation ) | VkFlags( ToolPurposeFlagBitsEXT::eProfiling ) | + VkFlags( ToolPurposeFlagBitsEXT::eTracing ) | VkFlags( ToolPurposeFlagBitsEXT::eAdditionalFeatures ) | + VkFlags( ToolPurposeFlagBitsEXT::eModifyingFeatures ) | + VkFlags( ToolPurposeFlagBitsEXT::eDebugReporting ) | VkFlags( ToolPurposeFlagBitsEXT::eDebugMarkers ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT + operator|( ToolPurposeFlagBitsEXT bit0, ToolPurposeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return ToolPurposeFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT operator&(ToolPurposeFlagBitsEXT bit0, + ToolPurposeFlagBitsEXT bit1)VULKAN_HPP_NOEXCEPT + { + return ToolPurposeFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT + operator^( ToolPurposeFlagBitsEXT bit0, ToolPurposeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return ToolPurposeFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT operator~( ToolPurposeFlagBitsEXT bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( ToolPurposeFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ToolPurposeFlagsEXT value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & ToolPurposeFlagBitsEXT::eValidation ) + result += "Validation | "; + if ( value & ToolPurposeFlagBitsEXT::eProfiling ) + result += "Profiling | "; + if ( value & ToolPurposeFlagBitsEXT::eTracing ) + result += "Tracing | "; + if ( value & ToolPurposeFlagBitsEXT::eAdditionalFeatures ) + result += "AdditionalFeatures | "; + if ( value & ToolPurposeFlagBitsEXT::eModifyingFeatures ) + result += "ModifyingFeatures | "; + if ( value & ToolPurposeFlagBitsEXT::eDebugReporting ) + result += "DebugReporting | "; + if ( value & ToolPurposeFlagBitsEXT::eDebugMarkers ) + result += "DebugMarkers | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_NV_coverage_reduction_mode === + + enum class PipelineCoverageReductionStateCreateFlagBitsNV : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineCoverageReductionStateCreateFlagBitsNV ) + { + return "(void)"; + } + + using PipelineCoverageReductionStateCreateFlagsNV = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineCoverageReductionStateCreateFlagsNV ) + { + return "{}"; + } + + //=== VK_EXT_headless_surface === + + enum class HeadlessSurfaceCreateFlagBitsEXT : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( HeadlessSurfaceCreateFlagBitsEXT ) + { + return "(void)"; + } + + using HeadlessSurfaceCreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( HeadlessSurfaceCreateFlagsEXT ) + { + return "{}"; + } + + //=== VK_NV_device_generated_commands === + + using IndirectStateFlagsNV = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( IndirectStateFlagBitsNV::eFlagFrontface ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV + operator|( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return IndirectStateFlagsNV( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator&(IndirectStateFlagBitsNV bit0, + IndirectStateFlagBitsNV bit1)VULKAN_HPP_NOEXCEPT + { + return IndirectStateFlagsNV( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV + operator^( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return IndirectStateFlagsNV( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator~( IndirectStateFlagBitsNV bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( IndirectStateFlagsNV( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( IndirectStateFlagsNV value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & IndirectStateFlagBitsNV::eFlagFrontface ) + result += "FlagFrontface | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using IndirectCommandsLayoutUsageFlagsNV = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess ) | + VkFlags( IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences ) | + VkFlags( IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator|( + IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return IndirectCommandsLayoutUsageFlagsNV( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV + operator&(IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1)VULKAN_HPP_NOEXCEPT + { + return IndirectCommandsLayoutUsageFlagsNV( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator^( + IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return IndirectCommandsLayoutUsageFlagsNV( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV + operator~( IndirectCommandsLayoutUsageFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT + { + return ~( IndirectCommandsLayoutUsageFlagsNV( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagsNV value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess ) + result += "ExplicitPreprocess | "; + if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences ) + result += "IndexedSequences | "; + if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences ) + result += "UnorderedSequences | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_EXT_device_memory_report === + + enum class DeviceMemoryReportFlagBitsEXT : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( DeviceMemoryReportFlagBitsEXT ) + { + return "(void)"; + } + + using DeviceMemoryReportFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( DeviceMemoryReportFlagsEXT ) + { + return "{}"; + } + + //=== VK_EXT_private_data === + + using PrivateDataSlotCreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( PrivateDataSlotCreateFlagsEXT ) + { + return "{}"; + } + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + + //=== VK_KHR_video_encode_queue === + + using VideoEncodeFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoEncodeFlagBitsKHR::eDefault ) | VkFlags( VideoEncodeFlagBitsKHR::eReserved0 ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeFlagsKHR + operator|( VideoEncodeFlagBitsKHR bit0, VideoEncodeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeFlagsKHR operator&(VideoEncodeFlagBitsKHR bit0, + VideoEncodeFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return VideoEncodeFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeFlagsKHR + operator^( VideoEncodeFlagBitsKHR bit0, VideoEncodeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeFlagsKHR operator~( VideoEncodeFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( VideoEncodeFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeFlagsKHR value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & VideoEncodeFlagBitsKHR::eReserved0 ) + result += "Reserved0 | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using VideoEncodeRateControlFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = + VkFlags( VideoEncodeRateControlFlagBitsKHR::eDefault ) | VkFlags( VideoEncodeRateControlFlagBitsKHR::eReset ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlFlagsKHR + operator|( VideoEncodeRateControlFlagBitsKHR bit0, VideoEncodeRateControlFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeRateControlFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlFlagsKHR + operator&(VideoEncodeRateControlFlagBitsKHR bit0, VideoEncodeRateControlFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return VideoEncodeRateControlFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlFlagsKHR + operator^( VideoEncodeRateControlFlagBitsKHR bit0, VideoEncodeRateControlFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeRateControlFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlFlagsKHR + operator~( VideoEncodeRateControlFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( VideoEncodeRateControlFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeRateControlFlagsKHR value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & VideoEncodeRateControlFlagBitsKHR::eReset ) + result += "Reset | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using VideoEncodeRateControlModeFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoEncodeRateControlModeFlagBitsKHR::eNone ) | + VkFlags( VideoEncodeRateControlModeFlagBitsKHR::eCbr ) | + VkFlags( VideoEncodeRateControlModeFlagBitsKHR::eVbr ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlModeFlagsKHR operator|( + VideoEncodeRateControlModeFlagBitsKHR bit0, VideoEncodeRateControlModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeRateControlModeFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlModeFlagsKHR + operator&(VideoEncodeRateControlModeFlagBitsKHR bit0, VideoEncodeRateControlModeFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return VideoEncodeRateControlModeFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlModeFlagsKHR operator^( + VideoEncodeRateControlModeFlagBitsKHR bit0, VideoEncodeRateControlModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeRateControlModeFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlModeFlagsKHR + operator~( VideoEncodeRateControlModeFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( VideoEncodeRateControlModeFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeRateControlModeFlagsKHR value ) + { + if ( !value ) + return "{}"; + std::string result; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_device_diagnostics_config === + + using DeviceDiagnosticsConfigFlagsNV = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo ) | + VkFlags( DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking ) | + VkFlags( DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV + operator|( DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return DeviceDiagnosticsConfigFlagsNV( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV + operator&(DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1)VULKAN_HPP_NOEXCEPT + { + return DeviceDiagnosticsConfigFlagsNV( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV + operator^( DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return DeviceDiagnosticsConfigFlagsNV( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV + operator~( DeviceDiagnosticsConfigFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT + { + return ~( DeviceDiagnosticsConfigFlagsNV( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DeviceDiagnosticsConfigFlagsNV value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo ) + result += "EnableShaderDebugInfo | "; + if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking ) + result += "EnableResourceTracking | "; + if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints ) + result += "EnableAutomaticCheckpoints | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_KHR_synchronization2 === + + using PipelineStageFlags2KHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags64 + { + allFlags = + VkFlags64( PipelineStageFlagBits2KHR::eNone ) | VkFlags64( PipelineStageFlagBits2KHR::eTopOfPipe ) | + VkFlags64( PipelineStageFlagBits2KHR::eDrawIndirect ) | VkFlags64( PipelineStageFlagBits2KHR::eVertexInput ) | + VkFlags64( PipelineStageFlagBits2KHR::eVertexShader ) | + VkFlags64( PipelineStageFlagBits2KHR::eTessellationControlShader ) | + VkFlags64( PipelineStageFlagBits2KHR::eTessellationEvaluationShader ) | + VkFlags64( PipelineStageFlagBits2KHR::eGeometryShader ) | + VkFlags64( PipelineStageFlagBits2KHR::eFragmentShader ) | + VkFlags64( PipelineStageFlagBits2KHR::eEarlyFragmentTests ) | + VkFlags64( PipelineStageFlagBits2KHR::eLateFragmentTests ) | + VkFlags64( PipelineStageFlagBits2KHR::eColorAttachmentOutput ) | + VkFlags64( PipelineStageFlagBits2KHR::eComputeShader ) | VkFlags64( PipelineStageFlagBits2KHR::eAllTransfer ) | + VkFlags64( PipelineStageFlagBits2KHR::eBottomOfPipe ) | VkFlags64( PipelineStageFlagBits2KHR::eHost ) | + VkFlags64( PipelineStageFlagBits2KHR::eAllGraphics ) | VkFlags64( PipelineStageFlagBits2KHR::eAllCommands ) | + VkFlags64( PipelineStageFlagBits2KHR::eCopy ) | VkFlags64( PipelineStageFlagBits2KHR::eResolve ) | + VkFlags64( PipelineStageFlagBits2KHR::eBlit ) | VkFlags64( PipelineStageFlagBits2KHR::eClear ) | + VkFlags64( PipelineStageFlagBits2KHR::eIndexInput ) | + VkFlags64( PipelineStageFlagBits2KHR::eVertexAttributeInput ) | + VkFlags64( PipelineStageFlagBits2KHR::ePreRasterizationShaders ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | VkFlags64( PipelineStageFlagBits2KHR::eVideoDecode ) | VkFlags64( PipelineStageFlagBits2KHR::eVideoEncode ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | VkFlags64( PipelineStageFlagBits2KHR::eTransformFeedbackExt ) | + VkFlags64( PipelineStageFlagBits2KHR::eConditionalRenderingExt ) | + VkFlags64( PipelineStageFlagBits2KHR::eCommandPreprocessNv ) | + VkFlags64( PipelineStageFlagBits2KHR::eFragmentShadingRateAttachment ) | + VkFlags64( PipelineStageFlagBits2KHR::eAccelerationStructureBuild ) | + VkFlags64( PipelineStageFlagBits2KHR::eRayTracingShader ) | + VkFlags64( PipelineStageFlagBits2KHR::eFragmentDensityProcessExt ) | + VkFlags64( PipelineStageFlagBits2KHR::eTaskShaderNv ) | VkFlags64( PipelineStageFlagBits2KHR::eMeshShaderNv ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags2KHR + operator|( PipelineStageFlagBits2KHR bit0, PipelineStageFlagBits2KHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineStageFlags2KHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags2KHR + operator&(PipelineStageFlagBits2KHR bit0, PipelineStageFlagBits2KHR bit1)VULKAN_HPP_NOEXCEPT + { + return PipelineStageFlags2KHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags2KHR + operator^( PipelineStageFlagBits2KHR bit0, PipelineStageFlagBits2KHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineStageFlags2KHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags2KHR operator~( PipelineStageFlagBits2KHR bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( PipelineStageFlags2KHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( PipelineStageFlags2KHR value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & PipelineStageFlagBits2KHR::eTopOfPipe ) + result += "TopOfPipe | "; + if ( value & PipelineStageFlagBits2KHR::eDrawIndirect ) + result += "DrawIndirect | "; + if ( value & PipelineStageFlagBits2KHR::eVertexInput ) + result += "VertexInput | "; + if ( value & PipelineStageFlagBits2KHR::eVertexShader ) + result += "VertexShader | "; + if ( value & PipelineStageFlagBits2KHR::eTessellationControlShader ) + result += "TessellationControlShader | "; + if ( value & PipelineStageFlagBits2KHR::eTessellationEvaluationShader ) + result += "TessellationEvaluationShader | "; + if ( value & PipelineStageFlagBits2KHR::eGeometryShader ) + result += "GeometryShader | "; + if ( value & PipelineStageFlagBits2KHR::eFragmentShader ) + result += "FragmentShader | "; + if ( value & PipelineStageFlagBits2KHR::eEarlyFragmentTests ) + result += "EarlyFragmentTests | "; + if ( value & PipelineStageFlagBits2KHR::eLateFragmentTests ) + result += "LateFragmentTests | "; + if ( value & PipelineStageFlagBits2KHR::eColorAttachmentOutput ) + result += "ColorAttachmentOutput | "; + if ( value & PipelineStageFlagBits2KHR::eComputeShader ) + result += "ComputeShader | "; + if ( value & PipelineStageFlagBits2KHR::eAllTransfer ) + result += "AllTransfer | "; + if ( value & PipelineStageFlagBits2KHR::eBottomOfPipe ) + result += "BottomOfPipe | "; + if ( value & PipelineStageFlagBits2KHR::eHost ) + result += "Host | "; + if ( value & PipelineStageFlagBits2KHR::eAllGraphics ) + result += "AllGraphics | "; + if ( value & PipelineStageFlagBits2KHR::eAllCommands ) + result += "AllCommands | "; + if ( value & PipelineStageFlagBits2KHR::eCopy ) + result += "Copy | "; + if ( value & PipelineStageFlagBits2KHR::eResolve ) + result += "Resolve | "; + if ( value & PipelineStageFlagBits2KHR::eBlit ) + result += "Blit | "; + if ( value & PipelineStageFlagBits2KHR::eClear ) + result += "Clear | "; + if ( value & PipelineStageFlagBits2KHR::eIndexInput ) + result += "IndexInput | "; + if ( value & PipelineStageFlagBits2KHR::eVertexAttributeInput ) + result += "VertexAttributeInput | "; + if ( value & PipelineStageFlagBits2KHR::ePreRasterizationShaders ) + result += "PreRasterizationShaders | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & PipelineStageFlagBits2KHR::eVideoDecode ) + result += "VideoDecode | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & PipelineStageFlagBits2KHR::eVideoEncode ) + result += "VideoEncode | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & PipelineStageFlagBits2KHR::eTransformFeedbackExt ) + result += "TransformFeedbackExt | "; + if ( value & PipelineStageFlagBits2KHR::eConditionalRenderingExt ) + result += "ConditionalRenderingExt | "; + if ( value & PipelineStageFlagBits2KHR::eCommandPreprocessNv ) + result += "CommandPreprocessNv | "; + if ( value & PipelineStageFlagBits2KHR::eFragmentShadingRateAttachment ) + result += "FragmentShadingRateAttachment | "; + if ( value & PipelineStageFlagBits2KHR::eAccelerationStructureBuild ) + result += "AccelerationStructureBuild | "; + if ( value & PipelineStageFlagBits2KHR::eRayTracingShader ) + result += "RayTracingShader | "; + if ( value & PipelineStageFlagBits2KHR::eFragmentDensityProcessExt ) + result += "FragmentDensityProcessExt | "; + if ( value & PipelineStageFlagBits2KHR::eTaskShaderNv ) + result += "TaskShaderNv | "; + if ( value & PipelineStageFlagBits2KHR::eMeshShaderNv ) + result += "MeshShaderNv | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using AccessFlags2KHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags64 + { + allFlags = + VkFlags64( AccessFlagBits2KHR::eNone ) | VkFlags64( AccessFlagBits2KHR::eIndirectCommandRead ) | + VkFlags64( AccessFlagBits2KHR::eIndexRead ) | VkFlags64( AccessFlagBits2KHR::eVertexAttributeRead ) | + VkFlags64( AccessFlagBits2KHR::eUniformRead ) | VkFlags64( AccessFlagBits2KHR::eInputAttachmentRead ) | + VkFlags64( AccessFlagBits2KHR::eShaderRead ) | VkFlags64( AccessFlagBits2KHR::eShaderWrite ) | + VkFlags64( AccessFlagBits2KHR::eColorAttachmentRead ) | VkFlags64( AccessFlagBits2KHR::eColorAttachmentWrite ) | + VkFlags64( AccessFlagBits2KHR::eDepthStencilAttachmentRead ) | + VkFlags64( AccessFlagBits2KHR::eDepthStencilAttachmentWrite ) | VkFlags64( AccessFlagBits2KHR::eTransferRead ) | + VkFlags64( AccessFlagBits2KHR::eTransferWrite ) | VkFlags64( AccessFlagBits2KHR::eHostRead ) | + VkFlags64( AccessFlagBits2KHR::eHostWrite ) | VkFlags64( AccessFlagBits2KHR::eMemoryRead ) | + VkFlags64( AccessFlagBits2KHR::eMemoryWrite ) | VkFlags64( AccessFlagBits2KHR::eShaderSampledRead ) | + VkFlags64( AccessFlagBits2KHR::eShaderStorageRead ) | VkFlags64( AccessFlagBits2KHR::eShaderStorageWrite ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | VkFlags64( AccessFlagBits2KHR::eVideoDecodeRead ) | VkFlags64( AccessFlagBits2KHR::eVideoDecodeWrite ) | + VkFlags64( AccessFlagBits2KHR::eVideoEncodeRead ) | VkFlags64( AccessFlagBits2KHR::eVideoEncodeWrite ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | VkFlags64( AccessFlagBits2KHR::eTransformFeedbackWriteExt ) | + VkFlags64( AccessFlagBits2KHR::eTransformFeedbackCounterReadExt ) | + VkFlags64( AccessFlagBits2KHR::eTransformFeedbackCounterWriteExt ) | + VkFlags64( AccessFlagBits2KHR::eConditionalRenderingReadExt ) | + VkFlags64( AccessFlagBits2KHR::eCommandPreprocessReadNv ) | + VkFlags64( AccessFlagBits2KHR::eCommandPreprocessWriteNv ) | + VkFlags64( AccessFlagBits2KHR::eFragmentShadingRateAttachmentRead ) | + VkFlags64( AccessFlagBits2KHR::eAccelerationStructureRead ) | + VkFlags64( AccessFlagBits2KHR::eAccelerationStructureWrite ) | + VkFlags64( AccessFlagBits2KHR::eFragmentDensityMapReadExt ) | + VkFlags64( AccessFlagBits2KHR::eColorAttachmentReadNoncoherentExt ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags2KHR operator|( AccessFlagBits2KHR bit0, + AccessFlagBits2KHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return AccessFlags2KHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags2KHR operator&(AccessFlagBits2KHR bit0, + AccessFlagBits2KHR bit1)VULKAN_HPP_NOEXCEPT + { + return AccessFlags2KHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags2KHR operator^( AccessFlagBits2KHR bit0, + AccessFlagBits2KHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return AccessFlags2KHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags2KHR operator~( AccessFlagBits2KHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( AccessFlags2KHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( AccessFlags2KHR value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & AccessFlagBits2KHR::eIndirectCommandRead ) + result += "IndirectCommandRead | "; + if ( value & AccessFlagBits2KHR::eIndexRead ) + result += "IndexRead | "; + if ( value & AccessFlagBits2KHR::eVertexAttributeRead ) + result += "VertexAttributeRead | "; + if ( value & AccessFlagBits2KHR::eUniformRead ) + result += "UniformRead | "; + if ( value & AccessFlagBits2KHR::eInputAttachmentRead ) + result += "InputAttachmentRead | "; + if ( value & AccessFlagBits2KHR::eShaderRead ) + result += "ShaderRead | "; + if ( value & AccessFlagBits2KHR::eShaderWrite ) + result += "ShaderWrite | "; + if ( value & AccessFlagBits2KHR::eColorAttachmentRead ) + result += "ColorAttachmentRead | "; + if ( value & AccessFlagBits2KHR::eColorAttachmentWrite ) + result += "ColorAttachmentWrite | "; + if ( value & AccessFlagBits2KHR::eDepthStencilAttachmentRead ) + result += "DepthStencilAttachmentRead | "; + if ( value & AccessFlagBits2KHR::eDepthStencilAttachmentWrite ) + result += "DepthStencilAttachmentWrite | "; + if ( value & AccessFlagBits2KHR::eTransferRead ) + result += "TransferRead | "; + if ( value & AccessFlagBits2KHR::eTransferWrite ) + result += "TransferWrite | "; + if ( value & AccessFlagBits2KHR::eHostRead ) + result += "HostRead | "; + if ( value & AccessFlagBits2KHR::eHostWrite ) + result += "HostWrite | "; + if ( value & AccessFlagBits2KHR::eMemoryRead ) + result += "MemoryRead | "; + if ( value & AccessFlagBits2KHR::eMemoryWrite ) + result += "MemoryWrite | "; + if ( value & AccessFlagBits2KHR::eShaderSampledRead ) + result += "ShaderSampledRead | "; + if ( value & AccessFlagBits2KHR::eShaderStorageRead ) + result += "ShaderStorageRead | "; + if ( value & AccessFlagBits2KHR::eShaderStorageWrite ) + result += "ShaderStorageWrite | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & AccessFlagBits2KHR::eVideoDecodeRead ) + result += "VideoDecodeRead | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & AccessFlagBits2KHR::eVideoDecodeWrite ) + result += "VideoDecodeWrite | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & AccessFlagBits2KHR::eVideoEncodeRead ) + result += "VideoEncodeRead | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & AccessFlagBits2KHR::eVideoEncodeWrite ) + result += "VideoEncodeWrite | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & AccessFlagBits2KHR::eTransformFeedbackWriteExt ) + result += "TransformFeedbackWriteExt | "; + if ( value & AccessFlagBits2KHR::eTransformFeedbackCounterReadExt ) + result += "TransformFeedbackCounterReadExt | "; + if ( value & AccessFlagBits2KHR::eTransformFeedbackCounterWriteExt ) + result += "TransformFeedbackCounterWriteExt | "; + if ( value & AccessFlagBits2KHR::eConditionalRenderingReadExt ) + result += "ConditionalRenderingReadExt | "; + if ( value & AccessFlagBits2KHR::eCommandPreprocessReadNv ) + result += "CommandPreprocessReadNv | "; + if ( value & AccessFlagBits2KHR::eCommandPreprocessWriteNv ) + result += "CommandPreprocessWriteNv | "; + if ( value & AccessFlagBits2KHR::eFragmentShadingRateAttachmentRead ) + result += "FragmentShadingRateAttachmentRead | "; + if ( value & AccessFlagBits2KHR::eAccelerationStructureRead ) + result += "AccelerationStructureRead | "; + if ( value & AccessFlagBits2KHR::eAccelerationStructureWrite ) + result += "AccelerationStructureWrite | "; + if ( value & AccessFlagBits2KHR::eFragmentDensityMapReadExt ) + result += "FragmentDensityMapReadExt | "; + if ( value & AccessFlagBits2KHR::eColorAttachmentReadNoncoherentExt ) + result += "ColorAttachmentReadNoncoherentExt | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using SubmitFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( SubmitFlagBitsKHR::eProtected ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubmitFlagsKHR operator|( SubmitFlagBitsKHR bit0, + SubmitFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return SubmitFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubmitFlagsKHR operator&(SubmitFlagBitsKHR bit0, + SubmitFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return SubmitFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubmitFlagsKHR operator^( SubmitFlagBitsKHR bit0, + SubmitFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return SubmitFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubmitFlagsKHR operator~( SubmitFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( SubmitFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( SubmitFlagsKHR value ) + { + if ( !value ) + return "{}"; + std::string result; + + if ( value & SubmitFlagBitsKHR::eProtected ) + result += "Protected | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + + //=== VK_EXT_directfb_surface === + + enum class DirectFBSurfaceCreateFlagBitsEXT : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( DirectFBSurfaceCreateFlagBitsEXT ) + { + return "(void)"; + } + + using DirectFBSurfaceCreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( DirectFBSurfaceCreateFlagsEXT ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + + //=== VK_QNX_screen_surface === + + enum class ScreenSurfaceCreateFlagBitsQNX : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( ScreenSurfaceCreateFlagBitsQNX ) + { + return "(void)"; + } + + using ScreenSurfaceCreateFlagsQNX = Flags; + + VULKAN_HPP_INLINE std::string to_string( ScreenSurfaceCreateFlagsQNX ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ +} // namespace VULKAN_HPP_NAMESPACE + +#ifndef VULKAN_HPP_NO_EXCEPTIONS +namespace std +{ + template <> + struct is_error_code_enum : public true_type + {}; +} // namespace std +#endif + +namespace VULKAN_HPP_NAMESPACE +{ +#ifndef VULKAN_HPP_NO_EXCEPTIONS + class ErrorCategoryImpl : public std::error_category + { + public: + virtual const char * name() const VULKAN_HPP_NOEXCEPT override + { + return VULKAN_HPP_NAMESPACE_STRING "::Result"; + } + virtual std::string message( int ev ) const override + { + return to_string( static_cast( ev ) ); + } + }; + + class Error + { + public: + Error() VULKAN_HPP_NOEXCEPT = default; + Error( const Error & ) VULKAN_HPP_NOEXCEPT = default; + virtual ~Error() VULKAN_HPP_NOEXCEPT = default; + + virtual const char * what() const VULKAN_HPP_NOEXCEPT = 0; + }; + + class LogicError + : public Error + , public std::logic_error + { + public: + explicit LogicError( const std::string & what ) : Error(), std::logic_error( what ) {} + explicit LogicError( char const * what ) : Error(), std::logic_error( what ) {} + + virtual const char * what() const VULKAN_HPP_NOEXCEPT + { + return std::logic_error::what(); + } + }; + + class SystemError + : public Error + , public std::system_error + { + public: + SystemError( std::error_code ec ) : Error(), std::system_error( ec ) {} + SystemError( std::error_code ec, std::string const & what ) : Error(), std::system_error( ec, what ) {} + SystemError( std::error_code ec, char const * what ) : Error(), std::system_error( ec, what ) {} + SystemError( int ev, std::error_category const & ecat ) : Error(), std::system_error( ev, ecat ) {} + SystemError( int ev, std::error_category const & ecat, std::string const & what ) + : Error(), std::system_error( ev, ecat, what ) + {} + SystemError( int ev, std::error_category const & ecat, char const * what ) + : Error(), std::system_error( ev, ecat, what ) + {} + + virtual const char * what() const VULKAN_HPP_NOEXCEPT + { + return std::system_error::what(); + } + }; + + VULKAN_HPP_INLINE const std::error_category & errorCategory() VULKAN_HPP_NOEXCEPT + { + static ErrorCategoryImpl instance; + return instance; + } + + VULKAN_HPP_INLINE std::error_code make_error_code( Result e ) VULKAN_HPP_NOEXCEPT + { + return std::error_code( static_cast( e ), errorCategory() ); + } + + VULKAN_HPP_INLINE std::error_condition make_error_condition( Result e ) VULKAN_HPP_NOEXCEPT + { + return std::error_condition( static_cast( e ), errorCategory() ); + } + + class OutOfHostMemoryError : public SystemError + { + public: + OutOfHostMemoryError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) + {} + OutOfHostMemoryError( char const * message ) + : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) + {} + }; + + class OutOfDeviceMemoryError : public SystemError + { + public: + OutOfDeviceMemoryError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) + {} + OutOfDeviceMemoryError( char const * message ) + : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) + {} + }; + + class InitializationFailedError : public SystemError + { + public: + InitializationFailedError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) + {} + InitializationFailedError( char const * message ) + : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) + {} + }; + + class DeviceLostError : public SystemError + { + public: + DeviceLostError( std::string const & message ) : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) + {} + DeviceLostError( char const * message ) : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {} + }; + + class MemoryMapFailedError : public SystemError + { + public: + MemoryMapFailedError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) + {} + MemoryMapFailedError( char const * message ) + : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) + {} + }; + + class LayerNotPresentError : public SystemError + { + public: + LayerNotPresentError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) + {} + LayerNotPresentError( char const * message ) + : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) + {} + }; + + class ExtensionNotPresentError : public SystemError + { + public: + ExtensionNotPresentError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) + {} + ExtensionNotPresentError( char const * message ) + : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) + {} + }; + + class FeatureNotPresentError : public SystemError + { + public: + FeatureNotPresentError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) + {} + FeatureNotPresentError( char const * message ) + : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) + {} + }; + + class IncompatibleDriverError : public SystemError + { + public: + IncompatibleDriverError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) + {} + IncompatibleDriverError( char const * message ) + : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) + {} + }; + + class TooManyObjectsError : public SystemError + { + public: + TooManyObjectsError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) + {} + TooManyObjectsError( char const * message ) + : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) + {} + }; + + class FormatNotSupportedError : public SystemError + { + public: + FormatNotSupportedError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) + {} + FormatNotSupportedError( char const * message ) + : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) + {} + }; + + class FragmentedPoolError : public SystemError + { + public: + FragmentedPoolError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) + {} + FragmentedPoolError( char const * message ) + : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) + {} + }; + + class UnknownError : public SystemError + { + public: + UnknownError( std::string const & message ) : SystemError( make_error_code( Result::eErrorUnknown ), message ) {} + UnknownError( char const * message ) : SystemError( make_error_code( Result::eErrorUnknown ), message ) {} + }; + + class OutOfPoolMemoryError : public SystemError + { + public: + OutOfPoolMemoryError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) + {} + OutOfPoolMemoryError( char const * message ) + : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) + {} + }; + + class InvalidExternalHandleError : public SystemError + { + public: + InvalidExternalHandleError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) + {} + InvalidExternalHandleError( char const * message ) + : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) + {} + }; + + class FragmentationError : public SystemError + { + public: + FragmentationError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorFragmentation ), message ) + {} + FragmentationError( char const * message ) : SystemError( make_error_code( Result::eErrorFragmentation ), message ) + {} + }; + + class InvalidOpaqueCaptureAddressError : public SystemError + { + public: + InvalidOpaqueCaptureAddressError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message ) + {} + InvalidOpaqueCaptureAddressError( char const * message ) + : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message ) + {} + }; + + class SurfaceLostKHRError : public SystemError + { + public: + SurfaceLostKHRError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) + {} + SurfaceLostKHRError( char const * message ) + : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) + {} + }; + + class NativeWindowInUseKHRError : public SystemError + { + public: + NativeWindowInUseKHRError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) + {} + NativeWindowInUseKHRError( char const * message ) + : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) + {} + }; + + class OutOfDateKHRError : public SystemError + { + public: + OutOfDateKHRError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) + {} + OutOfDateKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {} + }; + + class IncompatibleDisplayKHRError : public SystemError + { + public: + IncompatibleDisplayKHRError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) + {} + IncompatibleDisplayKHRError( char const * message ) + : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) + {} + }; + + class ValidationFailedEXTError : public SystemError + { + public: + ValidationFailedEXTError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) + {} + ValidationFailedEXTError( char const * message ) + : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) + {} + }; + + class InvalidShaderNVError : public SystemError + { + public: + InvalidShaderNVError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) + {} + InvalidShaderNVError( char const * message ) + : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) + {} + }; + + class InvalidDrmFormatModifierPlaneLayoutEXTError : public SystemError + { + public: + InvalidDrmFormatModifierPlaneLayoutEXTError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) + {} + InvalidDrmFormatModifierPlaneLayoutEXTError( char const * message ) + : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) + {} + }; + + class NotPermittedEXTError : public SystemError + { + public: + NotPermittedEXTError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorNotPermittedEXT ), message ) + {} + NotPermittedEXTError( char const * message ) + : SystemError( make_error_code( Result::eErrorNotPermittedEXT ), message ) + {} + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + class FullScreenExclusiveModeLostEXTError : public SystemError + { + public: + FullScreenExclusiveModeLostEXTError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) + {} + FullScreenExclusiveModeLostEXTError( char const * message ) + : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) + {} + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + [[noreturn]] static void throwResultException( Result result, char const * message ) + { + switch ( result ) + { + case Result::eErrorOutOfHostMemory: throw OutOfHostMemoryError( message ); + case Result::eErrorOutOfDeviceMemory: throw OutOfDeviceMemoryError( message ); + case Result::eErrorInitializationFailed: throw InitializationFailedError( message ); + case Result::eErrorDeviceLost: throw DeviceLostError( message ); + case Result::eErrorMemoryMapFailed: throw MemoryMapFailedError( message ); + case Result::eErrorLayerNotPresent: throw LayerNotPresentError( message ); + case Result::eErrorExtensionNotPresent: throw ExtensionNotPresentError( message ); + case Result::eErrorFeatureNotPresent: throw FeatureNotPresentError( message ); + case Result::eErrorIncompatibleDriver: throw IncompatibleDriverError( message ); + case Result::eErrorTooManyObjects: throw TooManyObjectsError( message ); + case Result::eErrorFormatNotSupported: throw FormatNotSupportedError( message ); + case Result::eErrorFragmentedPool: throw FragmentedPoolError( message ); + case Result::eErrorUnknown: throw UnknownError( message ); + case Result::eErrorOutOfPoolMemory: throw OutOfPoolMemoryError( message ); + case Result::eErrorInvalidExternalHandle: throw InvalidExternalHandleError( message ); + case Result::eErrorFragmentation: throw FragmentationError( message ); + case Result::eErrorInvalidOpaqueCaptureAddress: throw InvalidOpaqueCaptureAddressError( message ); + case Result::eErrorSurfaceLostKHR: throw SurfaceLostKHRError( message ); + case Result::eErrorNativeWindowInUseKHR: throw NativeWindowInUseKHRError( message ); + case Result::eErrorOutOfDateKHR: throw OutOfDateKHRError( message ); + case Result::eErrorIncompatibleDisplayKHR: throw IncompatibleDisplayKHRError( message ); + case Result::eErrorValidationFailedEXT: throw ValidationFailedEXTError( message ); + case Result::eErrorInvalidShaderNV: throw InvalidShaderNVError( message ); + case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: + throw InvalidDrmFormatModifierPlaneLayoutEXTError( message ); + case Result::eErrorNotPermittedEXT: throw NotPermittedEXTError( message ); +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + case Result::eErrorFullScreenExclusiveModeLostEXT: throw FullScreenExclusiveModeLostEXTError( message ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + default: throw SystemError( make_error_code( result ) ); + } + } +#endif + + template + void ignore( T const & ) VULKAN_HPP_NOEXCEPT + {} + + template + struct ResultValue + { +#ifdef VULKAN_HPP_HAS_NOEXCEPT + ResultValue( Result r, T & v ) VULKAN_HPP_NOEXCEPT( VULKAN_HPP_NOEXCEPT( T( v ) ) ) +#else + ResultValue( Result r, T & v ) +#endif + : result( r ), value( v ) + {} + +#ifdef VULKAN_HPP_HAS_NOEXCEPT + ResultValue( Result r, T && v ) VULKAN_HPP_NOEXCEPT( VULKAN_HPP_NOEXCEPT( T( std::move( v ) ) ) ) +#else + ResultValue( Result r, T && v ) +#endif + : result( r ), value( std::move( v ) ) + {} + + Result result; + T value; + + operator std::tuple() VULKAN_HPP_NOEXCEPT + { + return std::tuple( result, value ); + } + +#if !defined( VULKAN_HPP_DISABLE_IMPLICIT_RESULT_VALUE_CAST ) + VULKAN_HPP_DEPRECATED( + "Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue." ) + operator T const &() const & VULKAN_HPP_NOEXCEPT + { + return value; + } + + VULKAN_HPP_DEPRECATED( + "Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue." ) + operator T &() & VULKAN_HPP_NOEXCEPT + { + return value; + } + + VULKAN_HPP_DEPRECATED( + "Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue." ) + operator T const &&() const && VULKAN_HPP_NOEXCEPT + { + return std::move( value ); + } + + VULKAN_HPP_DEPRECATED( + "Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue." ) + operator T &&() && VULKAN_HPP_NOEXCEPT + { + return std::move( value ); + } +#endif + }; + +#if !defined( VULKAN_HPP_NO_SMART_HANDLE ) + template + struct ResultValue> + { +# ifdef VULKAN_HPP_HAS_NOEXCEPT + ResultValue( Result r, UniqueHandle && v ) VULKAN_HPP_NOEXCEPT +# else + ResultValue( Result r, UniqueHandle && v ) +# endif + : result( r ) + , value( std::move( v ) ) + {} + + std::tuple> asTuple() + { + return std::make_tuple( result, std::move( value ) ); + } + +# if !defined( VULKAN_HPP_DISABLE_IMPLICIT_RESULT_VALUE_CAST ) + VULKAN_HPP_DEPRECATED( + "Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue." ) + operator UniqueHandle &() & VULKAN_HPP_NOEXCEPT + { + return value; + } + + VULKAN_HPP_DEPRECATED( + "Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue." ) + operator UniqueHandle() VULKAN_HPP_NOEXCEPT + { + return std::move( value ); + } +# endif + + Result result; + UniqueHandle value; + }; + + template + struct ResultValue>> + { +# ifdef VULKAN_HPP_HAS_NOEXCEPT + ResultValue( Result r, std::vector> && v ) VULKAN_HPP_NOEXCEPT +# else + ResultValue( Result r, std::vector> && v ) +# endif + : result( r ) + , value( std::move( v ) ) + {} + + std::tuple>> asTuple() + { + return std::make_tuple( result, std::move( value ) ); + } + + Result result; + std::vector> value; + +# if !defined( VULKAN_HPP_DISABLE_IMPLICIT_RESULT_VALUE_CAST ) + VULKAN_HPP_DEPRECATED( + "Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue." ) + operator std::tuple> &>() VULKAN_HPP_NOEXCEPT + { + return std::tuple> &>( result, value ); + } +# endif + }; +#endif + + template + struct ResultValueType + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + typedef ResultValue type; +#else + typedef T type; +#endif + }; + + template <> + struct ResultValueType + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + typedef Result type; +#else + typedef void type; +#endif + }; + + VULKAN_HPP_INLINE ResultValueType::type createResultValue( Result result, char const * message ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + ignore( message ); + VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess ); + return result; +#else + if ( result != Result::eSuccess ) + { + throwResultException( result, message ); + } +#endif + } + + template + VULKAN_HPP_INLINE typename ResultValueType::type createResultValue( Result result, T & data, char const * message ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + ignore( message ); + VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess ); + return ResultValue( result, std::move( data ) ); +#else + if ( result != Result::eSuccess ) + { + throwResultException( result, message ); + } + return std::move( data ); +#endif + } + + VULKAN_HPP_INLINE Result createResultValue( Result result, + char const * message, + std::initializer_list successCodes ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + ignore( message ); + ignore( successCodes ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty + VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); +#else + if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() ) + { + throwResultException( result, message ); + } +#endif + return result; + } + + template + VULKAN_HPP_INLINE ResultValue + createResultValue( Result result, T & data, char const * message, std::initializer_list successCodes ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + ignore( message ); + ignore( successCodes ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty + VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); +#else + if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() ) + { + throwResultException( result, message ); + } +#endif + return ResultValue( result, std::move( data ) ); + } + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type createResultValue( + Result result, T & data, char const * message, typename UniqueHandleTraits::deleter const & deleter ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + ignore( message ); + VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess ); + return ResultValue>( result, UniqueHandle( data, deleter ) ); +# else + if ( result != Result::eSuccess ) + { + throwResultException( result, message ); + } + return UniqueHandle( data, deleter ); +# endif + } + + template + VULKAN_HPP_INLINE ResultValue> + createResultValue( Result result, + T & data, + char const * message, + std::initializer_list successCodes, + typename UniqueHandleTraits::deleter const & deleter ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + ignore( message ); + ignore( successCodes ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty + VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); +# else + if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() ) + { + throwResultException( result, message ); + } +# endif + return ResultValue>( result, UniqueHandle( data, deleter ) ); + } + + template + VULKAN_HPP_INLINE typename ResultValueType>>::type + createResultValue( Result result, std::vector> && data, char const * message ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + ignore( message ); + VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess ); + return ResultValue>>( result, std::move( data ) ); +# else + if ( result != Result::eSuccess ) + { + throwResultException( result, message ); + } + return std::move( data ); +# endif + } + + template + VULKAN_HPP_INLINE ResultValue>> + createResultValue( Result result, + std::vector> && data, + char const * message, + std::initializer_list successCodes ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + ignore( message ); + ignore( successCodes ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty + VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); +# else + if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() ) + { + throwResultException( result, message ); + } +# endif + return ResultValue>>( result, std::move( data ) ); + } +#endif + + struct AabbPositionsKHR + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AabbPositionsKHR( float minX_ = {}, + float minY_ = {}, + float minZ_ = {}, + float maxX_ = {}, + float maxY_ = {}, + float maxZ_ = {} ) VULKAN_HPP_NOEXCEPT + : minX( minX_ ) + , minY( minY_ ) + , minZ( minZ_ ) + , maxX( maxX_ ) + , maxY( maxY_ ) + , maxZ( maxZ_ ) + {} + + VULKAN_HPP_CONSTEXPR AabbPositionsKHR( AabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AabbPositionsKHR( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AabbPositionsKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & operator=( AabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AabbPositionsKHR & operator=( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AabbPositionsKHR & setMinX( float minX_ ) VULKAN_HPP_NOEXCEPT + { + minX = minX_; + return *this; + } + + AabbPositionsKHR & setMinY( float minY_ ) VULKAN_HPP_NOEXCEPT + { + minY = minY_; + return *this; + } + + AabbPositionsKHR & setMinZ( float minZ_ ) VULKAN_HPP_NOEXCEPT + { + minZ = minZ_; + return *this; + } + + AabbPositionsKHR & setMaxX( float maxX_ ) VULKAN_HPP_NOEXCEPT + { + maxX = maxX_; + return *this; + } + + AabbPositionsKHR & setMaxY( float maxY_ ) VULKAN_HPP_NOEXCEPT + { + maxY = maxY_; + return *this; + } + + AabbPositionsKHR & setMaxZ( float maxZ_ ) VULKAN_HPP_NOEXCEPT + { + maxZ = maxZ_; + return *this; + } + + operator VkAabbPositionsKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAabbPositionsKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AabbPositionsKHR const & ) const = default; +#else + bool operator==( AabbPositionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( minX == rhs.minX ) && ( minY == rhs.minY ) && ( minZ == rhs.minZ ) && ( maxX == rhs.maxX ) && + ( maxY == rhs.maxY ) && ( maxZ == rhs.maxZ ); + } + + bool operator!=( AabbPositionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float minX = {}; + float minY = {}; + float minZ = {}; + float maxX = {}; + float maxY = {}; + float maxZ = {}; + }; + static_assert( sizeof( AabbPositionsKHR ) == sizeof( VkAabbPositionsKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + using AabbPositionsNV = AabbPositionsKHR; + + class AccelerationStructureKHR + { + public: + using CType = VkAccelerationStructureKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureKHR; + + public: + VULKAN_HPP_CONSTEXPR AccelerationStructureKHR() = default; + VULKAN_HPP_CONSTEXPR AccelerationStructureKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT + AccelerationStructureKHR( VkAccelerationStructureKHR accelerationStructureKHR ) VULKAN_HPP_NOEXCEPT + : m_accelerationStructureKHR( accelerationStructureKHR ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + AccelerationStructureKHR & operator=( VkAccelerationStructureKHR accelerationStructureKHR ) VULKAN_HPP_NOEXCEPT + { + m_accelerationStructureKHR = accelerationStructureKHR; + return *this; + } +#endif + + AccelerationStructureKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_accelerationStructureKHR = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureKHR const & ) const = default; +#else + bool operator==( AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureKHR == rhs.m_accelerationStructureKHR; + } + + bool operator!=( AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureKHR != rhs.m_accelerationStructureKHR; + } + + bool operator<( AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureKHR < rhs.m_accelerationStructureKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkAccelerationStructureKHR() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureKHR == VK_NULL_HANDLE; + } + + private: + VkAccelerationStructureKHR m_accelerationStructureKHR = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR ) == sizeof( VkAccelerationStructureKHR ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + union DeviceOrHostAddressConstKHR + { + DeviceOrHostAddressConstKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR ) ); + } + + DeviceOrHostAddressConstKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) + : deviceAddress( deviceAddress_ ) + {} + + DeviceOrHostAddressConstKHR( const void * hostAddress_ ) : hostAddress( hostAddress_ ) {} + + DeviceOrHostAddressConstKHR & + setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; + return *this; + } + + DeviceOrHostAddressConstKHR & setHostAddress( const void * hostAddress_ ) VULKAN_HPP_NOEXCEPT + { + hostAddress = hostAddress_; + return *this; + } + + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR & + operator=( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR ) ); + return *this; + } + + operator VkDeviceOrHostAddressConstKHR const &() const + { + return *reinterpret_cast( this ); + } + + operator VkDeviceOrHostAddressConstKHR &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress; + const void * hostAddress; +#else + VkDeviceAddress deviceAddress; + const void * hostAddress; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + struct AccelerationStructureGeometryTrianglesDataKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAccelerationStructureGeometryTrianglesDataKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + AccelerationStructureGeometryTrianglesDataKHR( + VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, + uint32_t maxVertex_ = {}, + VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData_ = {} ) VULKAN_HPP_NOEXCEPT + : vertexFormat( vertexFormat_ ) + , vertexData( vertexData_ ) + , vertexStride( vertexStride_ ) + , maxVertex( maxVertex_ ) + , indexType( indexType_ ) + , indexData( indexData_ ) + , transformData( transformData_ ) + {} + + AccelerationStructureGeometryTrianglesDataKHR( AccelerationStructureGeometryTrianglesDataKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryTrianglesDataKHR( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : AccelerationStructureGeometryTrianglesDataKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureGeometryTrianglesDataKHR & + operator=( AccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryTrianglesDataKHR & + operator=( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AccelerationStructureGeometryTrianglesDataKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AccelerationStructureGeometryTrianglesDataKHR & + setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT + { + vertexFormat = vertexFormat_; + return *this; + } + + AccelerationStructureGeometryTrianglesDataKHR & + setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT + { + vertexData = vertexData_; + return *this; + } + + AccelerationStructureGeometryTrianglesDataKHR & + setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT + { + vertexStride = vertexStride_; + return *this; + } + + AccelerationStructureGeometryTrianglesDataKHR & setMaxVertex( uint32_t maxVertex_ ) VULKAN_HPP_NOEXCEPT + { + maxVertex = maxVertex_; + return *this; + } + + AccelerationStructureGeometryTrianglesDataKHR & + setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + { + indexType = indexType_; + return *this; + } + + AccelerationStructureGeometryTrianglesDataKHR & + setIndexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexData_ ) VULKAN_HPP_NOEXCEPT + { + indexData = indexData_; + return *this; + } + + AccelerationStructureGeometryTrianglesDataKHR & + setTransformData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & transformData_ ) VULKAN_HPP_NOEXCEPT + { + transformData = transformData_; + return *this; + } + + operator VkAccelerationStructureGeometryTrianglesDataKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryTrianglesDataKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryTrianglesDataKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {}; + uint32_t maxVertex = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData = {}; + }; + static_assert( sizeof( AccelerationStructureGeometryTrianglesDataKHR ) == + sizeof( VkAccelerationStructureGeometryTrianglesDataKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = AccelerationStructureGeometryTrianglesDataKHR; + }; + + struct AccelerationStructureGeometryAabbsDataKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAccelerationStructureGeometryAabbsDataKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + AccelerationStructureGeometryAabbsDataKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {} ) VULKAN_HPP_NOEXCEPT + : data( data_ ) + , stride( stride_ ) + {} + + AccelerationStructureGeometryAabbsDataKHR( AccelerationStructureGeometryAabbsDataKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryAabbsDataKHR( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : AccelerationStructureGeometryAabbsDataKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureGeometryAabbsDataKHR & + operator=( AccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryAabbsDataKHR & + operator=( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AccelerationStructureGeometryAabbsDataKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AccelerationStructureGeometryAabbsDataKHR & + setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT + { + data = data_; + return *this; + } + + AccelerationStructureGeometryAabbsDataKHR & + setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } + + operator VkAccelerationStructureGeometryAabbsDataKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryAabbsDataKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryAabbsDataKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {}; + VULKAN_HPP_NAMESPACE::DeviceSize stride = {}; + }; + static_assert( sizeof( AccelerationStructureGeometryAabbsDataKHR ) == + sizeof( VkAccelerationStructureGeometryAabbsDataKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = AccelerationStructureGeometryAabbsDataKHR; + }; + + struct AccelerationStructureGeometryInstancesDataKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAccelerationStructureGeometryInstancesDataKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + AccelerationStructureGeometryInstancesDataKHR( VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {} ) + VULKAN_HPP_NOEXCEPT + : arrayOfPointers( arrayOfPointers_ ) + , data( data_ ) + {} + + AccelerationStructureGeometryInstancesDataKHR( AccelerationStructureGeometryInstancesDataKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryInstancesDataKHR( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : AccelerationStructureGeometryInstancesDataKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureGeometryInstancesDataKHR & + operator=( AccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryInstancesDataKHR & + operator=( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AccelerationStructureGeometryInstancesDataKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AccelerationStructureGeometryInstancesDataKHR & + setArrayOfPointers( VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ ) VULKAN_HPP_NOEXCEPT + { + arrayOfPointers = arrayOfPointers_; + return *this; + } + + AccelerationStructureGeometryInstancesDataKHR & + setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT + { + data = data_; + return *this; + } + + operator VkAccelerationStructureGeometryInstancesDataKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryInstancesDataKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryInstancesDataKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {}; + }; + static_assert( sizeof( AccelerationStructureGeometryInstancesDataKHR ) == + sizeof( VkAccelerationStructureGeometryInstancesDataKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = AccelerationStructureGeometryInstancesDataKHR; + }; + + union AccelerationStructureGeometryDataKHR + { + AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR ) ); + } + + AccelerationStructureGeometryDataKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles_ = {} ) + : triangles( triangles_ ) + {} + + AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR aabbs_ ) + : aabbs( aabbs_ ) + {} + + AccelerationStructureGeometryDataKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances_ ) + : instances( instances_ ) + {} + + AccelerationStructureGeometryDataKHR & setTriangles( + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR const & triangles_ ) VULKAN_HPP_NOEXCEPT + { + triangles = triangles_; + return *this; + } + + AccelerationStructureGeometryDataKHR & + setAabbs( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR const & aabbs_ ) VULKAN_HPP_NOEXCEPT + { + aabbs = aabbs_; + return *this; + } + + AccelerationStructureGeometryDataKHR & setInstances( + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR const & instances_ ) VULKAN_HPP_NOEXCEPT + { + instances = instances_; + return *this; + } + + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR & + operator=( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR ) ); + return *this; + } + + operator VkAccelerationStructureGeometryDataKHR const &() const + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryDataKHR &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles; + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR aabbs; + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances; +#else + VkAccelerationStructureGeometryTrianglesDataKHR triangles; + VkAccelerationStructureGeometryAabbsDataKHR aabbs; + VkAccelerationStructureGeometryInstancesDataKHR instances; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + struct AccelerationStructureGeometryKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + AccelerationStructureGeometryKHR( + VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry_ = {}, + VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {} ) VULKAN_HPP_NOEXCEPT + : geometryType( geometryType_ ) + , geometry( geometry_ ) + , flags( flags_ ) + {} + + AccelerationStructureGeometryKHR( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryKHR( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureGeometryKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureGeometryKHR & + operator=( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryKHR & operator=( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AccelerationStructureGeometryKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AccelerationStructureGeometryKHR & + setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT + { + geometryType = geometryType_; + return *this; + } + + AccelerationStructureGeometryKHR & + setGeometry( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const & geometry_ ) VULKAN_HPP_NOEXCEPT + { + geometry = geometry_; + return *this; + } + + AccelerationStructureGeometryKHR & setFlags( VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + operator VkAccelerationStructureGeometryKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles; + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry = {}; + VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {}; + }; + static_assert( sizeof( AccelerationStructureGeometryKHR ) == sizeof( VkAccelerationStructureGeometryKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = AccelerationStructureGeometryKHR; + }; + + union DeviceOrHostAddressKHR + { + DeviceOrHostAddressKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR ) ); + } + + DeviceOrHostAddressKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) : deviceAddress( deviceAddress_ ) + {} + + DeviceOrHostAddressKHR( void * hostAddress_ ) : hostAddress( hostAddress_ ) {} + + DeviceOrHostAddressKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; + return *this; + } + + DeviceOrHostAddressKHR & setHostAddress( void * hostAddress_ ) VULKAN_HPP_NOEXCEPT + { + hostAddress = hostAddress_; + return *this; + } + + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR & + operator=( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR ) ); + return *this; + } + + operator VkDeviceOrHostAddressKHR const &() const + { + return *reinterpret_cast( this ); + } + + operator VkDeviceOrHostAddressKHR &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress; + void * hostAddress; +#else + VkDeviceAddress deviceAddress; + void * hostAddress; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + struct AccelerationStructureBuildGeometryInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAccelerationStructureBuildGeometryInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + AccelerationStructureBuildGeometryInfoKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_ = + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR::eBuild, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ = {}, + uint32_t geometryCount_ = {}, + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * pGeometries_ = {}, + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , flags( flags_ ) + , mode( mode_ ) + , srcAccelerationStructure( srcAccelerationStructure_ ) + , dstAccelerationStructure( dstAccelerationStructure_ ) + , geometryCount( geometryCount_ ) + , pGeometries( pGeometries_ ) + , ppGeometries( ppGeometries_ ) + , scratchData( scratchData_ ) + {} + + AccelerationStructureBuildGeometryInfoKHR( AccelerationStructureBuildGeometryInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureBuildGeometryInfoKHR( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : AccelerationStructureBuildGeometryInfoKHR( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureBuildGeometryInfoKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR> const & geometries_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const> const & pGeometries_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {} ) + : type( type_ ) + , flags( flags_ ) + , mode( mode_ ) + , srcAccelerationStructure( srcAccelerationStructure_ ) + , dstAccelerationStructure( dstAccelerationStructure_ ) + , geometryCount( static_cast( !geometries_.empty() ? geometries_.size() : pGeometries_.size() ) ) + , pGeometries( geometries_.data() ) + , ppGeometries( pGeometries_.data() ) + , scratchData( scratchData_ ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( ( !geometries_.empty() + !pGeometries_.empty() ) <= 1 ); +# else + if ( 1 < ( !geometries_.empty() + !pGeometries_.empty() ) ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::AccelerationStructureBuildGeometryInfoKHR::AccelerationStructureBuildGeometryInfoKHR: 1 < ( !geometries_.empty() + !pGeometries_.empty() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureBuildGeometryInfoKHR & + operator=( AccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureBuildGeometryInfoKHR & + operator=( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AccelerationStructureBuildGeometryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AccelerationStructureBuildGeometryInfoKHR & + setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + AccelerationStructureBuildGeometryInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + AccelerationStructureBuildGeometryInfoKHR & + setMode( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } + + AccelerationStructureBuildGeometryInfoKHR & setSrcAccelerationStructure( + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + srcAccelerationStructure = srcAccelerationStructure_; + return *this; + } + + AccelerationStructureBuildGeometryInfoKHR & setDstAccelerationStructure( + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + dstAccelerationStructure = dstAccelerationStructure_; + return *this; + } + + AccelerationStructureBuildGeometryInfoKHR & setGeometryCount( uint32_t geometryCount_ ) VULKAN_HPP_NOEXCEPT + { + geometryCount = geometryCount_; + return *this; + } + + AccelerationStructureBuildGeometryInfoKHR & + setPGeometries( const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * pGeometries_ ) VULKAN_HPP_NOEXCEPT + { + pGeometries = pGeometries_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureBuildGeometryInfoKHR & setGeometries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR> const & geometries_ ) VULKAN_HPP_NOEXCEPT + { + geometryCount = static_cast( geometries_.size() ); + pGeometries = geometries_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + AccelerationStructureBuildGeometryInfoKHR & setPpGeometries( + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries_ ) VULKAN_HPP_NOEXCEPT + { + ppGeometries = ppGeometries_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureBuildGeometryInfoKHR & setPGeometries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const> const & pGeometries_ ) VULKAN_HPP_NOEXCEPT + { + geometryCount = static_cast( pGeometries_.size() ); + ppGeometries = pGeometries_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + AccelerationStructureBuildGeometryInfoKHR & + setScratchData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & scratchData_ ) VULKAN_HPP_NOEXCEPT + { + scratchData = scratchData_; + return *this; + } + + operator VkAccelerationStructureBuildGeometryInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureBuildGeometryInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureBuildGeometryInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type = + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel; + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode = + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR::eBuild; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure = {}; + uint32_t geometryCount = {}; + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * pGeometries = {}; + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData = {}; + }; + static_assert( sizeof( AccelerationStructureBuildGeometryInfoKHR ) == + sizeof( VkAccelerationStructureBuildGeometryInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = AccelerationStructureBuildGeometryInfoKHR; + }; + + struct AccelerationStructureBuildRangeInfoKHR + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureBuildRangeInfoKHR( uint32_t primitiveCount_ = {}, + uint32_t primitiveOffset_ = {}, + uint32_t firstVertex_ = {}, + uint32_t transformOffset_ = {} ) VULKAN_HPP_NOEXCEPT + : primitiveCount( primitiveCount_ ) + , primitiveOffset( primitiveOffset_ ) + , firstVertex( firstVertex_ ) + , transformOffset( transformOffset_ ) + {} + + VULKAN_HPP_CONSTEXPR AccelerationStructureBuildRangeInfoKHR( AccelerationStructureBuildRangeInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureBuildRangeInfoKHR( VkAccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureBuildRangeInfoKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & + operator=( AccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureBuildRangeInfoKHR & + operator=( VkAccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AccelerationStructureBuildRangeInfoKHR & setPrimitiveCount( uint32_t primitiveCount_ ) VULKAN_HPP_NOEXCEPT + { + primitiveCount = primitiveCount_; + return *this; + } + + AccelerationStructureBuildRangeInfoKHR & setPrimitiveOffset( uint32_t primitiveOffset_ ) VULKAN_HPP_NOEXCEPT + { + primitiveOffset = primitiveOffset_; + return *this; + } + + AccelerationStructureBuildRangeInfoKHR & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT + { + firstVertex = firstVertex_; + return *this; + } + + AccelerationStructureBuildRangeInfoKHR & setTransformOffset( uint32_t transformOffset_ ) VULKAN_HPP_NOEXCEPT + { + transformOffset = transformOffset_; + return *this; + } + + operator VkAccelerationStructureBuildRangeInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureBuildRangeInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureBuildRangeInfoKHR const & ) const = default; +#else + bool operator==( AccelerationStructureBuildRangeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( primitiveCount == rhs.primitiveCount ) && ( primitiveOffset == rhs.primitiveOffset ) && + ( firstVertex == rhs.firstVertex ) && ( transformOffset == rhs.transformOffset ); + } + + bool operator!=( AccelerationStructureBuildRangeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t primitiveCount = {}; + uint32_t primitiveOffset = {}; + uint32_t firstVertex = {}; + uint32_t transformOffset = {}; + }; + static_assert( sizeof( AccelerationStructureBuildRangeInfoKHR ) == sizeof( VkAccelerationStructureBuildRangeInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct AccelerationStructureBuildSizesInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAccelerationStructureBuildSizesInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureBuildSizesInfoKHR( + VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ = {} ) VULKAN_HPP_NOEXCEPT + : accelerationStructureSize( accelerationStructureSize_ ) + , updateScratchSize( updateScratchSize_ ) + , buildScratchSize( buildScratchSize_ ) + {} + + VULKAN_HPP_CONSTEXPR AccelerationStructureBuildSizesInfoKHR( AccelerationStructureBuildSizesInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureBuildSizesInfoKHR( VkAccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureBuildSizesInfoKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildSizesInfoKHR & + operator=( AccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureBuildSizesInfoKHR & + operator=( VkAccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AccelerationStructureBuildSizesInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AccelerationStructureBuildSizesInfoKHR & + setAccelerationStructureSize( VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureSize = accelerationStructureSize_; + return *this; + } + + AccelerationStructureBuildSizesInfoKHR & + setUpdateScratchSize( VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize_ ) VULKAN_HPP_NOEXCEPT + { + updateScratchSize = updateScratchSize_; + return *this; + } + + AccelerationStructureBuildSizesInfoKHR & + setBuildScratchSize( VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ ) VULKAN_HPP_NOEXCEPT + { + buildScratchSize = buildScratchSize_; + return *this; + } + + operator VkAccelerationStructureBuildSizesInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureBuildSizesInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureBuildSizesInfoKHR const & ) const = default; +#else + bool operator==( AccelerationStructureBuildSizesInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( accelerationStructureSize == rhs.accelerationStructureSize ) && + ( updateScratchSize == rhs.updateScratchSize ) && ( buildScratchSize == rhs.buildScratchSize ); + } + + bool operator!=( AccelerationStructureBuildSizesInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureBuildSizesInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize = {}; + }; + static_assert( sizeof( AccelerationStructureBuildSizesInfoKHR ) == sizeof( VkAccelerationStructureBuildSizesInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = AccelerationStructureBuildSizesInfoKHR; + }; + + class Buffer + { + public: + using CType = VkBuffer; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eBuffer; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBuffer; + + public: + VULKAN_HPP_CONSTEXPR Buffer() = default; + VULKAN_HPP_CONSTEXPR Buffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT Buffer( VkBuffer buffer ) VULKAN_HPP_NOEXCEPT : m_buffer( buffer ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Buffer & operator=( VkBuffer buffer ) VULKAN_HPP_NOEXCEPT + { + m_buffer = buffer; + return *this; + } +#endif + + Buffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_buffer = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Buffer const & ) const = default; +#else + bool operator==( Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_buffer == rhs.m_buffer; + } + + bool operator!=( Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_buffer != rhs.m_buffer; + } + + bool operator<( Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_buffer < rhs.m_buffer; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBuffer() const VULKAN_HPP_NOEXCEPT + { + return m_buffer; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_buffer != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_buffer == VK_NULL_HANDLE; + } + + private: + VkBuffer m_buffer = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::Buffer ) == sizeof( VkBuffer ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Buffer; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Buffer; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Buffer; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct AccelerationStructureCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAccelerationStructureCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + AccelerationStructureCreateInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) VULKAN_HPP_NOEXCEPT + : createFlags( createFlags_ ) + , buffer( buffer_ ) + , offset( offset_ ) + , size( size_ ) + , type( type_ ) + , deviceAddress( deviceAddress_ ) + {} + + VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR( AccelerationStructureCreateInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureCreateInfoKHR( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & + operator=( AccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureCreateInfoKHR & + operator=( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AccelerationStructureCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AccelerationStructureCreateInfoKHR & + setCreateFlags( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags_ ) VULKAN_HPP_NOEXCEPT + { + createFlags = createFlags_; + return *this; + } + + AccelerationStructureCreateInfoKHR & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + AccelerationStructureCreateInfoKHR & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + AccelerationStructureCreateInfoKHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + AccelerationStructureCreateInfoKHR & + setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + AccelerationStructureCreateInfoKHR & + setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; + return *this; + } + + operator VkAccelerationStructureCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureCreateInfoKHR const & ) const = default; +#else + bool operator==( AccelerationStructureCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( createFlags == rhs.createFlags ) && + ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( size == rhs.size ) && ( type == rhs.type ) && + ( deviceAddress == rhs.deviceAddress ); + } + + bool operator!=( AccelerationStructureCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type = + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel; + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; + }; + static_assert( sizeof( AccelerationStructureCreateInfoKHR ) == sizeof( VkAccelerationStructureCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = AccelerationStructureCreateInfoKHR; + }; + + struct GeometryTrianglesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryTrianglesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + GeometryTrianglesNV( VULKAN_HPP_NAMESPACE::Buffer vertexData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ = {}, + uint32_t vertexCount_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, + VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Buffer indexData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ = {}, + uint32_t indexCount_ = {}, + VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, + VULKAN_HPP_NAMESPACE::Buffer transformData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ = {} ) VULKAN_HPP_NOEXCEPT + : vertexData( vertexData_ ) + , vertexOffset( vertexOffset_ ) + , vertexCount( vertexCount_ ) + , vertexStride( vertexStride_ ) + , vertexFormat( vertexFormat_ ) + , indexData( indexData_ ) + , indexOffset( indexOffset_ ) + , indexCount( indexCount_ ) + , indexType( indexType_ ) + , transformData( transformData_ ) + , transformOffset( transformOffset_ ) + {} + + VULKAN_HPP_CONSTEXPR GeometryTrianglesNV( GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeometryTrianglesNV( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : GeometryTrianglesNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & + operator=( GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeometryTrianglesNV & operator=( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + GeometryTrianglesNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + GeometryTrianglesNV & setVertexData( VULKAN_HPP_NAMESPACE::Buffer vertexData_ ) VULKAN_HPP_NOEXCEPT + { + vertexData = vertexData_; + return *this; + } + + GeometryTrianglesNV & setVertexOffset( VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ ) VULKAN_HPP_NOEXCEPT + { + vertexOffset = vertexOffset_; + return *this; + } + + GeometryTrianglesNV & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT + { + vertexCount = vertexCount_; + return *this; + } + + GeometryTrianglesNV & setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT + { + vertexStride = vertexStride_; + return *this; + } + + GeometryTrianglesNV & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT + { + vertexFormat = vertexFormat_; + return *this; + } + + GeometryTrianglesNV & setIndexData( VULKAN_HPP_NAMESPACE::Buffer indexData_ ) VULKAN_HPP_NOEXCEPT + { + indexData = indexData_; + return *this; + } + + GeometryTrianglesNV & setIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ ) VULKAN_HPP_NOEXCEPT + { + indexOffset = indexOffset_; + return *this; + } + + GeometryTrianglesNV & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT + { + indexCount = indexCount_; + return *this; + } + + GeometryTrianglesNV & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + { + indexType = indexType_; + return *this; + } + + GeometryTrianglesNV & setTransformData( VULKAN_HPP_NAMESPACE::Buffer transformData_ ) VULKAN_HPP_NOEXCEPT + { + transformData = transformData_; + return *this; + } + + GeometryTrianglesNV & setTransformOffset( VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ ) VULKAN_HPP_NOEXCEPT + { + transformOffset = transformOffset_; + return *this; + } + + operator VkGeometryTrianglesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeometryTrianglesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeometryTrianglesNV const & ) const = default; +#else + bool operator==( GeometryTrianglesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexData == rhs.vertexData ) && + ( vertexOffset == rhs.vertexOffset ) && ( vertexCount == rhs.vertexCount ) && + ( vertexStride == rhs.vertexStride ) && ( vertexFormat == rhs.vertexFormat ) && + ( indexData == rhs.indexData ) && ( indexOffset == rhs.indexOffset ) && ( indexCount == rhs.indexCount ) && + ( indexType == rhs.indexType ) && ( transformData == rhs.transformData ) && + ( transformOffset == rhs.transformOffset ); + } + + bool operator!=( GeometryTrianglesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryTrianglesNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer vertexData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset = {}; + uint32_t vertexCount = {}; + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {}; + VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Buffer indexData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize indexOffset = {}; + uint32_t indexCount = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + VULKAN_HPP_NAMESPACE::Buffer transformData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize transformOffset = {}; + }; + static_assert( sizeof( GeometryTrianglesNV ) == sizeof( VkGeometryTrianglesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = GeometryTrianglesNV; + }; + + struct GeometryAABBNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryAabbNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeometryAABBNV( VULKAN_HPP_NAMESPACE::Buffer aabbData_ = {}, + uint32_t numAABBs_ = {}, + uint32_t stride_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {} ) VULKAN_HPP_NOEXCEPT + : aabbData( aabbData_ ) + , numAABBs( numAABBs_ ) + , stride( stride_ ) + , offset( offset_ ) + {} + + VULKAN_HPP_CONSTEXPR GeometryAABBNV( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeometryAABBNV( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT + : GeometryAABBNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & operator=( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeometryAABBNV & operator=( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + GeometryAABBNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + GeometryAABBNV & setAabbData( VULKAN_HPP_NAMESPACE::Buffer aabbData_ ) VULKAN_HPP_NOEXCEPT + { + aabbData = aabbData_; + return *this; + } + + GeometryAABBNV & setNumAABBs( uint32_t numAABBs_ ) VULKAN_HPP_NOEXCEPT + { + numAABBs = numAABBs_; + return *this; + } + + GeometryAABBNV & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } + + GeometryAABBNV & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + operator VkGeometryAABBNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeometryAABBNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeometryAABBNV const & ) const = default; +#else + bool operator==( GeometryAABBNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( aabbData == rhs.aabbData ) && + ( numAABBs == rhs.numAABBs ) && ( stride == rhs.stride ) && ( offset == rhs.offset ); + } + + bool operator!=( GeometryAABBNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryAabbNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer aabbData = {}; + uint32_t numAABBs = {}; + uint32_t stride = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + }; + static_assert( sizeof( GeometryAABBNV ) == sizeof( VkGeometryAABBNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = GeometryAABBNV; + }; + + struct GeometryDataNV + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeometryDataNV( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles_ = {}, + VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs_ = {} ) VULKAN_HPP_NOEXCEPT + : triangles( triangles_ ) + , aabbs( aabbs_ ) + {} + + VULKAN_HPP_CONSTEXPR GeometryDataNV( GeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeometryDataNV( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + : GeometryDataNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 GeometryDataNV & operator=( GeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeometryDataNV & operator=( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + GeometryDataNV & setTriangles( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const & triangles_ ) VULKAN_HPP_NOEXCEPT + { + triangles = triangles_; + return *this; + } + + GeometryDataNV & setAabbs( VULKAN_HPP_NAMESPACE::GeometryAABBNV const & aabbs_ ) VULKAN_HPP_NOEXCEPT + { + aabbs = aabbs_; + return *this; + } + + operator VkGeometryDataNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeometryDataNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeometryDataNV const & ) const = default; +#else + bool operator==( GeometryDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( triangles == rhs.triangles ) && ( aabbs == rhs.aabbs ); + } + + bool operator!=( GeometryDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles = {}; + VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs = {}; + }; + static_assert( sizeof( GeometryDataNV ) == sizeof( VkGeometryDataNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct GeometryNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeometryNV( + VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, + VULKAN_HPP_NAMESPACE::GeometryDataNV geometry_ = {}, + VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {} ) VULKAN_HPP_NOEXCEPT + : geometryType( geometryType_ ) + , geometry( geometry_ ) + , flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR GeometryNV( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeometryNV( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT + : GeometryNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 GeometryNV & operator=( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeometryNV & operator=( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + GeometryNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + GeometryNV & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT + { + geometryType = geometryType_; + return *this; + } + + GeometryNV & setGeometry( VULKAN_HPP_NAMESPACE::GeometryDataNV const & geometry_ ) VULKAN_HPP_NOEXCEPT + { + geometry = geometry_; + return *this; + } + + GeometryNV & setFlags( VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + operator VkGeometryNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeometryNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeometryNV const & ) const = default; +#else + bool operator==( GeometryNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( geometryType == rhs.geometryType ) && + ( geometry == rhs.geometry ) && ( flags == rhs.flags ); + } + + bool operator!=( GeometryNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles; + VULKAN_HPP_NAMESPACE::GeometryDataNV geometry = {}; + VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {}; + }; + static_assert( sizeof( GeometryNV ) == sizeof( VkGeometryNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = GeometryNV; + }; + + struct AccelerationStructureInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + AccelerationStructureInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ = {}, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ = {}, + uint32_t instanceCount_ = {}, + uint32_t geometryCount_ = {}, + const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , flags( flags_ ) + , instanceCount( instanceCount_ ) + , geometryCount( geometryCount_ ) + , pGeometries( pGeometries_ ) + {} + + VULKAN_HPP_CONSTEXPR + AccelerationStructureInfoNV( AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureInfoNV( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureInfoNV( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureInfoNV( + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_, + uint32_t instanceCount_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & geometries_ ) + : type( type_ ) + , flags( flags_ ) + , instanceCount( instanceCount_ ) + , geometryCount( static_cast( geometries_.size() ) ) + , pGeometries( geometries_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & + operator=( AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureInfoNV & operator=( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AccelerationStructureInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AccelerationStructureInfoNV & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + AccelerationStructureInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + AccelerationStructureInfoNV & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT + { + instanceCount = instanceCount_; + return *this; + } + + AccelerationStructureInfoNV & setGeometryCount( uint32_t geometryCount_ ) VULKAN_HPP_NOEXCEPT + { + geometryCount = geometryCount_; + return *this; + } + + AccelerationStructureInfoNV & + setPGeometries( const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries_ ) VULKAN_HPP_NOEXCEPT + { + pGeometries = pGeometries_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureInfoNV & setGeometries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & geometries_ ) + VULKAN_HPP_NOEXCEPT + { + geometryCount = static_cast( geometries_.size() ); + pGeometries = geometries_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkAccelerationStructureInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureInfoNV const & ) const = default; +#else + bool operator==( AccelerationStructureInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( flags == rhs.flags ) && + ( instanceCount == rhs.instanceCount ) && ( geometryCount == rhs.geometryCount ) && + ( pGeometries == rhs.pGeometries ); + } + + bool operator!=( AccelerationStructureInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type = {}; + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags = {}; + uint32_t instanceCount = {}; + uint32_t geometryCount = {}; + const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries = {}; + }; + static_assert( sizeof( AccelerationStructureInfoNV ) == sizeof( VkAccelerationStructureInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = AccelerationStructureInfoNV; + }; + + struct AccelerationStructureCreateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAccelerationStructureCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV( + VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info_ = {} ) VULKAN_HPP_NOEXCEPT + : compactedSize( compactedSize_ ) + , info( info_ ) + {} + + VULKAN_HPP_CONSTEXPR + AccelerationStructureCreateInfoNV( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureCreateInfoNV( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureCreateInfoNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV & + operator=( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureCreateInfoNV & operator=( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AccelerationStructureCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AccelerationStructureCreateInfoNV & + setCompactedSize( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ ) VULKAN_HPP_NOEXCEPT + { + compactedSize = compactedSize_; + return *this; + } + + AccelerationStructureCreateInfoNV & + setInfo( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const & info_ ) VULKAN_HPP_NOEXCEPT + { + info = info_; + return *this; + } + + operator VkAccelerationStructureCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureCreateInfoNV const & ) const = default; +#else + bool operator==( AccelerationStructureCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( compactedSize == rhs.compactedSize ) && + ( info == rhs.info ); + } + + bool operator!=( AccelerationStructureCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize compactedSize = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info = {}; + }; + static_assert( sizeof( AccelerationStructureCreateInfoNV ) == sizeof( VkAccelerationStructureCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = AccelerationStructureCreateInfoNV; + }; + + struct AccelerationStructureDeviceAddressInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAccelerationStructureDeviceAddressInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT + : accelerationStructure( accelerationStructure_ ) + {} + + VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR( + AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureDeviceAddressInfoKHR( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : AccelerationStructureDeviceAddressInfoKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDeviceAddressInfoKHR & + operator=( AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureDeviceAddressInfoKHR & + operator=( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AccelerationStructureDeviceAddressInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AccelerationStructureDeviceAddressInfoKHR & setAccelerationStructure( + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructure = accelerationStructure_; + return *this; + } + + operator VkAccelerationStructureDeviceAddressInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureDeviceAddressInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureDeviceAddressInfoKHR const & ) const = default; +#else + bool operator==( AccelerationStructureDeviceAddressInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructure == rhs.accelerationStructure ); + } + + bool operator!=( AccelerationStructureDeviceAddressInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureDeviceAddressInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure = {}; + }; + static_assert( sizeof( AccelerationStructureDeviceAddressInfoKHR ) == + sizeof( VkAccelerationStructureDeviceAddressInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = AccelerationStructureDeviceAddressInfoKHR; + }; + + struct TransformMatrixKHR + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + TransformMatrixKHR( std::array, 3> const & matrix_ = {} ) VULKAN_HPP_NOEXCEPT + : matrix( matrix_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR( TransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TransformMatrixKHR( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : TransformMatrixKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR & + operator=( TransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TransformMatrixKHR & operator=( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + TransformMatrixKHR & setMatrix( std::array, 3> matrix_ ) VULKAN_HPP_NOEXCEPT + { + matrix = matrix_; + return *this; + } + + operator VkTransformMatrixKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTransformMatrixKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( TransformMatrixKHR const & ) const = default; +#else + bool operator==( TransformMatrixKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( matrix == rhs.matrix ); + } + + bool operator!=( TransformMatrixKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ArrayWrapper2D matrix = {}; + }; + static_assert( sizeof( TransformMatrixKHR ) == sizeof( VkTransformMatrixKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + using TransformMatrixNV = TransformMatrixKHR; + + struct AccelerationStructureInstanceKHR + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + AccelerationStructureInstanceKHR( VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform_ = {}, + uint32_t instanceCustomIndex_ = {}, + uint32_t mask_ = {}, + uint32_t instanceShaderBindingTableRecordOffset_ = {}, + VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, + uint64_t accelerationStructureReference_ = {} ) VULKAN_HPP_NOEXCEPT + : transform( transform_ ) + , instanceCustomIndex( instanceCustomIndex_ ) + , mask( mask_ ) + , instanceShaderBindingTableRecordOffset( instanceShaderBindingTableRecordOffset_ ) + , flags( flags_ ) + , accelerationStructureReference( accelerationStructureReference_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + AccelerationStructureInstanceKHR( AccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureInstanceKHR( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureInstanceKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & + operator=( AccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureInstanceKHR & operator=( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AccelerationStructureInstanceKHR & + setTransform( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transform_ ) VULKAN_HPP_NOEXCEPT + { + transform = transform_; + return *this; + } + + AccelerationStructureInstanceKHR & setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) VULKAN_HPP_NOEXCEPT + { + instanceCustomIndex = instanceCustomIndex_; + return *this; + } + + AccelerationStructureInstanceKHR & setMask( uint32_t mask_ ) VULKAN_HPP_NOEXCEPT + { + mask = mask_; + return *this; + } + + AccelerationStructureInstanceKHR & + setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT + { + instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_; + return *this; + } + + AccelerationStructureInstanceKHR & + setFlags( VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = *reinterpret_cast( &flags_ ); + return *this; + } + + AccelerationStructureInstanceKHR & + setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureReference = accelerationStructureReference_; + return *this; + } + + operator VkAccelerationStructureInstanceKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureInstanceKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureInstanceKHR const & ) const = default; +#else + bool operator==( AccelerationStructureInstanceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( transform == rhs.transform ) && ( instanceCustomIndex == rhs.instanceCustomIndex ) && + ( mask == rhs.mask ) && + ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset ) && + ( flags == rhs.flags ) && ( accelerationStructureReference == rhs.accelerationStructureReference ); + } + + bool operator!=( AccelerationStructureInstanceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform = {}; + uint32_t instanceCustomIndex : 24; + uint32_t mask : 8; + uint32_t instanceShaderBindingTableRecordOffset : 24; + VkGeometryInstanceFlagsKHR flags : 8; + uint64_t accelerationStructureReference = {}; + }; + static_assert( sizeof( AccelerationStructureInstanceKHR ) == sizeof( VkAccelerationStructureInstanceKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + using AccelerationStructureInstanceNV = AccelerationStructureInstanceKHR; + + class AccelerationStructureNV + { + public: + using CType = VkAccelerationStructureNV; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureNV; + + public: + VULKAN_HPP_CONSTEXPR AccelerationStructureNV() = default; + VULKAN_HPP_CONSTEXPR AccelerationStructureNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT + AccelerationStructureNV( VkAccelerationStructureNV accelerationStructureNV ) VULKAN_HPP_NOEXCEPT + : m_accelerationStructureNV( accelerationStructureNV ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + AccelerationStructureNV & operator=( VkAccelerationStructureNV accelerationStructureNV ) VULKAN_HPP_NOEXCEPT + { + m_accelerationStructureNV = accelerationStructureNV; + return *this; + } +#endif + + AccelerationStructureNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_accelerationStructureNV = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureNV const & ) const = default; +#else + bool operator==( AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV == rhs.m_accelerationStructureNV; + } + + bool operator!=( AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV != rhs.m_accelerationStructureNV; + } + + bool operator<( AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV < rhs.m_accelerationStructureNV; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkAccelerationStructureNV() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV == VK_NULL_HANDLE; + } + + private: + VkAccelerationStructureNV m_accelerationStructureNV = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureNV ) == sizeof( VkAccelerationStructureNV ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::AccelerationStructureNV; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureNV; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureNV; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct AccelerationStructureMemoryRequirementsInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAccelerationStructureMemoryRequirementsInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV( + VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ = + VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , accelerationStructure( accelerationStructure_ ) + {} + + VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV( + AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureMemoryRequirementsInfoNV( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : AccelerationStructureMemoryRequirementsInfoNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV & + operator=( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureMemoryRequirementsInfoNV & + operator=( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AccelerationStructureMemoryRequirementsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AccelerationStructureMemoryRequirementsInfoNV & + setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + AccelerationStructureMemoryRequirementsInfoNV & setAccelerationStructure( + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructure = accelerationStructure_; + return *this; + } + + operator VkAccelerationStructureMemoryRequirementsInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureMemoryRequirementsInfoNV const & ) const = default; +#else + bool operator==( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && + ( accelerationStructure == rhs.accelerationStructure ); + } + + bool operator!=( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type = + VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject; + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {}; + }; + static_assert( sizeof( AccelerationStructureMemoryRequirementsInfoNV ) == + sizeof( VkAccelerationStructureMemoryRequirementsInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = AccelerationStructureMemoryRequirementsInfoNV; + }; + + struct AccelerationStructureVersionInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAccelerationStructureVersionInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureVersionInfoKHR( const uint8_t * pVersionData_ = {} ) VULKAN_HPP_NOEXCEPT + : pVersionData( pVersionData_ ) + {} + + VULKAN_HPP_CONSTEXPR AccelerationStructureVersionInfoKHR( AccelerationStructureVersionInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureVersionInfoKHR( VkAccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureVersionInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureVersionInfoKHR & + operator=( AccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureVersionInfoKHR & + operator=( VkAccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AccelerationStructureVersionInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AccelerationStructureVersionInfoKHR & setPVersionData( const uint8_t * pVersionData_ ) VULKAN_HPP_NOEXCEPT + { + pVersionData = pVersionData_; + return *this; + } + + operator VkAccelerationStructureVersionInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureVersionInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureVersionInfoKHR const & ) const = default; +#else + bool operator==( AccelerationStructureVersionInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pVersionData == rhs.pVersionData ); + } + + bool operator!=( AccelerationStructureVersionInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureVersionInfoKHR; + const void * pNext = {}; + const uint8_t * pVersionData = {}; + }; + static_assert( sizeof( AccelerationStructureVersionInfoKHR ) == sizeof( VkAccelerationStructureVersionInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = AccelerationStructureVersionInfoKHR; + }; + + class SwapchainKHR + { + public: + using CType = VkSwapchainKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eSwapchainKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSwapchainKHR; + + public: + VULKAN_HPP_CONSTEXPR SwapchainKHR() = default; + VULKAN_HPP_CONSTEXPR SwapchainKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT SwapchainKHR( VkSwapchainKHR swapchainKHR ) VULKAN_HPP_NOEXCEPT + : m_swapchainKHR( swapchainKHR ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + SwapchainKHR & operator=( VkSwapchainKHR swapchainKHR ) VULKAN_HPP_NOEXCEPT + { + m_swapchainKHR = swapchainKHR; + return *this; + } +#endif + + SwapchainKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_swapchainKHR = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainKHR const & ) const = default; +#else + bool operator==( SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR == rhs.m_swapchainKHR; + } + + bool operator!=( SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR != rhs.m_swapchainKHR; + } + + bool operator<( SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR < rhs.m_swapchainKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSwapchainKHR() const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR == VK_NULL_HANDLE; + } + + private: + VkSwapchainKHR m_swapchainKHR = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::SwapchainKHR ) == sizeof( VkSwapchainKHR ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::SwapchainKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SwapchainKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SwapchainKHR; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Semaphore + { + public: + using CType = VkSemaphore; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eSemaphore; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSemaphore; + + public: + VULKAN_HPP_CONSTEXPR Semaphore() = default; + VULKAN_HPP_CONSTEXPR Semaphore( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT Semaphore( VkSemaphore semaphore ) VULKAN_HPP_NOEXCEPT : m_semaphore( semaphore ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Semaphore & operator=( VkSemaphore semaphore ) VULKAN_HPP_NOEXCEPT + { + m_semaphore = semaphore; + return *this; + } +#endif + + Semaphore & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_semaphore = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Semaphore const & ) const = default; +#else + bool operator==( Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_semaphore == rhs.m_semaphore; + } + + bool operator!=( Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_semaphore != rhs.m_semaphore; + } + + bool operator<( Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_semaphore < rhs.m_semaphore; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSemaphore() const VULKAN_HPP_NOEXCEPT + { + return m_semaphore; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_semaphore != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_semaphore == VK_NULL_HANDLE; + } + + private: + VkSemaphore m_semaphore = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::Semaphore ) == sizeof( VkSemaphore ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Semaphore; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Semaphore; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Semaphore; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Fence + { + public: + using CType = VkFence; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eFence; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFence; + + public: + VULKAN_HPP_CONSTEXPR Fence() = default; + VULKAN_HPP_CONSTEXPR Fence( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT Fence( VkFence fence ) VULKAN_HPP_NOEXCEPT : m_fence( fence ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Fence & operator=( VkFence fence ) VULKAN_HPP_NOEXCEPT + { + m_fence = fence; + return *this; + } +#endif + + Fence & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_fence = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Fence const & ) const = default; +#else + bool operator==( Fence const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_fence == rhs.m_fence; + } + + bool operator!=( Fence const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_fence != rhs.m_fence; + } + + bool operator<( Fence const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_fence < rhs.m_fence; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFence() const VULKAN_HPP_NOEXCEPT + { + return m_fence; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_fence != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_fence == VK_NULL_HANDLE; + } + + private: + VkFence m_fence = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::Fence ) == sizeof( VkFence ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Fence; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Fence; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Fence; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct AcquireNextImageInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAcquireNextImageInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, + uint64_t timeout_ = {}, + VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + uint32_t deviceMask_ = {} ) VULKAN_HPP_NOEXCEPT + : swapchain( swapchain_ ) + , timeout( timeout_ ) + , semaphore( semaphore_ ) + , fence( fence_ ) + , deviceMask( deviceMask_ ) + {} + + VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AcquireNextImageInfoKHR( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AcquireNextImageInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & + operator=( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AcquireNextImageInfoKHR & operator=( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AcquireNextImageInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AcquireNextImageInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT + { + swapchain = swapchain_; + return *this; + } + + AcquireNextImageInfoKHR & setTimeout( uint64_t timeout_ ) VULKAN_HPP_NOEXCEPT + { + timeout = timeout_; + return *this; + } + + AcquireNextImageInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + AcquireNextImageInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT + { + fence = fence_; + return *this; + } + + AcquireNextImageInfoKHR & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT + { + deviceMask = deviceMask_; + return *this; + } + + operator VkAcquireNextImageInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAcquireNextImageInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AcquireNextImageInfoKHR const & ) const = default; +#else + bool operator==( AcquireNextImageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ) && + ( timeout == rhs.timeout ) && ( semaphore == rhs.semaphore ) && ( fence == rhs.fence ) && + ( deviceMask == rhs.deviceMask ); + } + + bool operator!=( AcquireNextImageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireNextImageInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; + uint64_t timeout = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + uint32_t deviceMask = {}; + }; + static_assert( sizeof( AcquireNextImageInfoKHR ) == sizeof( VkAcquireNextImageInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = AcquireNextImageInfoKHR; + }; + + struct AcquireProfilingLockInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAcquireProfilingLockInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR( VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ = {}, + uint64_t timeout_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , timeout( timeout_ ) + {} + + VULKAN_HPP_CONSTEXPR + AcquireProfilingLockInfoKHR( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AcquireProfilingLockInfoKHR( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AcquireProfilingLockInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR & + operator=( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AcquireProfilingLockInfoKHR & operator=( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AcquireProfilingLockInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AcquireProfilingLockInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + AcquireProfilingLockInfoKHR & setTimeout( uint64_t timeout_ ) VULKAN_HPP_NOEXCEPT + { + timeout = timeout_; + return *this; + } + + operator VkAcquireProfilingLockInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAcquireProfilingLockInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AcquireProfilingLockInfoKHR const & ) const = default; +#else + bool operator==( AcquireProfilingLockInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( timeout == rhs.timeout ); + } + + bool operator!=( AcquireProfilingLockInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireProfilingLockInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags = {}; + uint64_t timeout = {}; + }; + static_assert( sizeof( AcquireProfilingLockInfoKHR ) == sizeof( VkAcquireProfilingLockInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = AcquireProfilingLockInfoKHR; + }; + + struct AllocationCallbacks + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AllocationCallbacks( void * pUserData_ = {}, + PFN_vkAllocationFunction pfnAllocation_ = {}, + PFN_vkReallocationFunction pfnReallocation_ = {}, + PFN_vkFreeFunction pfnFree_ = {}, + PFN_vkInternalAllocationNotification pfnInternalAllocation_ = {}, + PFN_vkInternalFreeNotification pfnInternalFree_ = {} ) VULKAN_HPP_NOEXCEPT + : pUserData( pUserData_ ) + , pfnAllocation( pfnAllocation_ ) + , pfnReallocation( pfnReallocation_ ) + , pfnFree( pfnFree_ ) + , pfnInternalAllocation( pfnInternalAllocation_ ) + , pfnInternalFree( pfnInternalFree_ ) + {} + + VULKAN_HPP_CONSTEXPR AllocationCallbacks( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AllocationCallbacks( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT + : AllocationCallbacks( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & + operator=( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AllocationCallbacks & operator=( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AllocationCallbacks & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT + { + pUserData = pUserData_; + return *this; + } + + AllocationCallbacks & setPfnAllocation( PFN_vkAllocationFunction pfnAllocation_ ) VULKAN_HPP_NOEXCEPT + { + pfnAllocation = pfnAllocation_; + return *this; + } + + AllocationCallbacks & setPfnReallocation( PFN_vkReallocationFunction pfnReallocation_ ) VULKAN_HPP_NOEXCEPT + { + pfnReallocation = pfnReallocation_; + return *this; + } + + AllocationCallbacks & setPfnFree( PFN_vkFreeFunction pfnFree_ ) VULKAN_HPP_NOEXCEPT + { + pfnFree = pfnFree_; + return *this; + } + + AllocationCallbacks & + setPfnInternalAllocation( PFN_vkInternalAllocationNotification pfnInternalAllocation_ ) VULKAN_HPP_NOEXCEPT + { + pfnInternalAllocation = pfnInternalAllocation_; + return *this; + } + + AllocationCallbacks & setPfnInternalFree( PFN_vkInternalFreeNotification pfnInternalFree_ ) VULKAN_HPP_NOEXCEPT + { + pfnInternalFree = pfnInternalFree_; + return *this; + } + + operator VkAllocationCallbacks const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAllocationCallbacks &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AllocationCallbacks const & ) const = default; +#else + bool operator==( AllocationCallbacks const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( pUserData == rhs.pUserData ) && ( pfnAllocation == rhs.pfnAllocation ) && + ( pfnReallocation == rhs.pfnReallocation ) && ( pfnFree == rhs.pfnFree ) && + ( pfnInternalAllocation == rhs.pfnInternalAllocation ) && ( pfnInternalFree == rhs.pfnInternalFree ); + } + + bool operator!=( AllocationCallbacks const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + void * pUserData = {}; + PFN_vkAllocationFunction pfnAllocation = {}; + PFN_vkReallocationFunction pfnReallocation = {}; + PFN_vkFreeFunction pfnFree = {}; + PFN_vkInternalAllocationNotification pfnInternalAllocation = {}; + PFN_vkInternalFreeNotification pfnInternalFree = {}; + }; + static_assert( sizeof( AllocationCallbacks ) == sizeof( VkAllocationCallbacks ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ComponentMapping + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ComponentMapping( VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, + VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, + VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, + VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity ) + VULKAN_HPP_NOEXCEPT + : r( r_ ) + , g( g_ ) + , b( b_ ) + , a( a_ ) + {} + + VULKAN_HPP_CONSTEXPR ComponentMapping( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ComponentMapping( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT + : ComponentMapping( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ComponentMapping & operator=( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ComponentMapping & operator=( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ComponentMapping & setR( VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ ) VULKAN_HPP_NOEXCEPT + { + r = r_; + return *this; + } + + ComponentMapping & setG( VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ ) VULKAN_HPP_NOEXCEPT + { + g = g_; + return *this; + } + + ComponentMapping & setB( VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ ) VULKAN_HPP_NOEXCEPT + { + b = b_; + return *this; + } + + ComponentMapping & setA( VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ ) VULKAN_HPP_NOEXCEPT + { + a = a_; + return *this; + } + + operator VkComponentMapping const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkComponentMapping &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ComponentMapping const & ) const = default; +#else + bool operator==( ComponentMapping const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( r == rhs.r ) && ( g == rhs.g ) && ( b == rhs.b ) && ( a == rhs.a ); + } + + bool operator!=( ComponentMapping const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ComponentSwizzle r = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity; + VULKAN_HPP_NAMESPACE::ComponentSwizzle g = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity; + VULKAN_HPP_NAMESPACE::ComponentSwizzle b = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity; + VULKAN_HPP_NAMESPACE::ComponentSwizzle a = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity; + }; + static_assert( sizeof( ComponentMapping ) == sizeof( VkComponentMapping ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct AndroidHardwareBufferFormatPropertiesANDROID + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAndroidHardwareBufferFormatPropertiesANDROID; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID( + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + uint64_t externalFormat_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {}, + VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {}, + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = + VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven ) VULKAN_HPP_NOEXCEPT + : format( format_ ) + , externalFormat( externalFormat_ ) + , formatFeatures( formatFeatures_ ) + , samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ ) + , suggestedYcbcrModel( suggestedYcbcrModel_ ) + , suggestedYcbcrRange( suggestedYcbcrRange_ ) + , suggestedXChromaOffset( suggestedXChromaOffset_ ) + , suggestedYChromaOffset( suggestedYChromaOffset_ ) + {} + + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID( + AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AndroidHardwareBufferFormatPropertiesANDROID( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) + VULKAN_HPP_NOEXCEPT + : AndroidHardwareBufferFormatPropertiesANDROID( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AndroidHardwareBufferFormatPropertiesANDROID & + operator=( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AndroidHardwareBufferFormatPropertiesANDROID & + operator=( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkAndroidHardwareBufferFormatPropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAndroidHardwareBufferFormatPropertiesANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AndroidHardwareBufferFormatPropertiesANDROID const & ) const = default; +# else + bool operator==( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && + ( externalFormat == rhs.externalFormat ) && ( formatFeatures == rhs.formatFeatures ) && + ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) && + ( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) && + ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) && + ( suggestedYChromaOffset == rhs.suggestedYChromaOffset ); + } + + bool operator!=( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + uint64_t externalFormat = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures = {}; + VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {}; + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity; + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull; + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + }; + static_assert( sizeof( AndroidHardwareBufferFormatPropertiesANDROID ) == + sizeof( VkAndroidHardwareBufferFormatPropertiesANDROID ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = AndroidHardwareBufferFormatPropertiesANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct AndroidHardwareBufferPropertiesANDROID + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAndroidHardwareBufferPropertiesANDROID; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, + uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT + : allocationSize( allocationSize_ ) + , memoryTypeBits( memoryTypeBits_ ) + {} + + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID( AndroidHardwareBufferPropertiesANDROID const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + AndroidHardwareBufferPropertiesANDROID( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : AndroidHardwareBufferPropertiesANDROID( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AndroidHardwareBufferPropertiesANDROID & + operator=( AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AndroidHardwareBufferPropertiesANDROID & + operator=( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkAndroidHardwareBufferPropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAndroidHardwareBufferPropertiesANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AndroidHardwareBufferPropertiesANDROID const & ) const = default; +# else + bool operator==( AndroidHardwareBufferPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allocationSize == rhs.allocationSize ) && + ( memoryTypeBits == rhs.memoryTypeBits ); + } + + bool operator!=( AndroidHardwareBufferPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferPropertiesANDROID; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {}; + uint32_t memoryTypeBits = {}; + }; + static_assert( sizeof( AndroidHardwareBufferPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferPropertiesANDROID ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = AndroidHardwareBufferPropertiesANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct AndroidHardwareBufferUsageANDROID + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAndroidHardwareBufferUsageANDROID; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + AndroidHardwareBufferUsageANDROID( uint64_t androidHardwareBufferUsage_ = {} ) VULKAN_HPP_NOEXCEPT + : androidHardwareBufferUsage( androidHardwareBufferUsage_ ) + {} + + VULKAN_HPP_CONSTEXPR + AndroidHardwareBufferUsageANDROID( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AndroidHardwareBufferUsageANDROID( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : AndroidHardwareBufferUsageANDROID( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AndroidHardwareBufferUsageANDROID & + operator=( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AndroidHardwareBufferUsageANDROID & operator=( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkAndroidHardwareBufferUsageANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAndroidHardwareBufferUsageANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AndroidHardwareBufferUsageANDROID const & ) const = default; +# else + bool operator==( AndroidHardwareBufferUsageANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( androidHardwareBufferUsage == rhs.androidHardwareBufferUsage ); + } + + bool operator!=( AndroidHardwareBufferUsageANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferUsageANDROID; + void * pNext = {}; + uint64_t androidHardwareBufferUsage = {}; + }; + static_assert( sizeof( AndroidHardwareBufferUsageANDROID ) == sizeof( VkAndroidHardwareBufferUsageANDROID ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = AndroidHardwareBufferUsageANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct AndroidSurfaceCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidSurfaceCreateInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ = {}, + struct ANativeWindow * window_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , window( window_ ) + {} + + VULKAN_HPP_CONSTEXPR + AndroidSurfaceCreateInfoKHR( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AndroidSurfaceCreateInfoKHR( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AndroidSurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR & + operator=( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AndroidSurfaceCreateInfoKHR & operator=( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AndroidSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AndroidSurfaceCreateInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + AndroidSurfaceCreateInfoKHR & setWindow( struct ANativeWindow * window_ ) VULKAN_HPP_NOEXCEPT + { + window = window_; + return *this; + } + + operator VkAndroidSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAndroidSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AndroidSurfaceCreateInfoKHR const & ) const = default; +# else + bool operator==( AndroidSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( window == rhs.window ); + } + + bool operator!=( AndroidSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidSurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags = {}; + struct ANativeWindow * window = {}; + }; + static_assert( sizeof( AndroidSurfaceCreateInfoKHR ) == sizeof( VkAndroidSurfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = AndroidSurfaceCreateInfoKHR; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + struct ApplicationInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eApplicationInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ApplicationInfo( const char * pApplicationName_ = {}, + uint32_t applicationVersion_ = {}, + const char * pEngineName_ = {}, + uint32_t engineVersion_ = {}, + uint32_t apiVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : pApplicationName( pApplicationName_ ) + , applicationVersion( applicationVersion_ ) + , pEngineName( pEngineName_ ) + , engineVersion( engineVersion_ ) + , apiVersion( apiVersion_ ) + {} + + VULKAN_HPP_CONSTEXPR ApplicationInfo( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ApplicationInfo( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ApplicationInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & operator=( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ApplicationInfo & operator=( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ApplicationInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ApplicationInfo & setPApplicationName( const char * pApplicationName_ ) VULKAN_HPP_NOEXCEPT + { + pApplicationName = pApplicationName_; + return *this; + } + + ApplicationInfo & setApplicationVersion( uint32_t applicationVersion_ ) VULKAN_HPP_NOEXCEPT + { + applicationVersion = applicationVersion_; + return *this; + } + + ApplicationInfo & setPEngineName( const char * pEngineName_ ) VULKAN_HPP_NOEXCEPT + { + pEngineName = pEngineName_; + return *this; + } + + ApplicationInfo & setEngineVersion( uint32_t engineVersion_ ) VULKAN_HPP_NOEXCEPT + { + engineVersion = engineVersion_; + return *this; + } + + ApplicationInfo & setApiVersion( uint32_t apiVersion_ ) VULKAN_HPP_NOEXCEPT + { + apiVersion = apiVersion_; + return *this; + } + + operator VkApplicationInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkApplicationInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ApplicationInfo const & ) const = default; +#else + bool operator==( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pApplicationName == rhs.pApplicationName ) && + ( applicationVersion == rhs.applicationVersion ) && ( pEngineName == rhs.pEngineName ) && + ( engineVersion == rhs.engineVersion ) && ( apiVersion == rhs.apiVersion ); + } + + bool operator!=( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eApplicationInfo; + const void * pNext = {}; + const char * pApplicationName = {}; + uint32_t applicationVersion = {}; + const char * pEngineName = {}; + uint32_t engineVersion = {}; + uint32_t apiVersion = {}; + }; + static_assert( sizeof( ApplicationInfo ) == sizeof( VkApplicationInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ApplicationInfo; + }; + + struct AttachmentDescription + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AttachmentDescription( + VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, + VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, + VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, + VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) + VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , format( format_ ) + , samples( samples_ ) + , loadOp( loadOp_ ) + , storeOp( storeOp_ ) + , stencilLoadOp( stencilLoadOp_ ) + , stencilStoreOp( stencilStoreOp_ ) + , initialLayout( initialLayout_ ) + , finalLayout( finalLayout_ ) + {} + + VULKAN_HPP_CONSTEXPR AttachmentDescription( AttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentDescription( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT + : AttachmentDescription( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & + operator=( AttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentDescription & operator=( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AttachmentDescription & setFlags( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + AttachmentDescription & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + AttachmentDescription & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT + { + samples = samples_; + return *this; + } + + AttachmentDescription & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT + { + loadOp = loadOp_; + return *this; + } + + AttachmentDescription & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT + { + storeOp = storeOp_; + return *this; + } + + AttachmentDescription & + setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT + { + stencilLoadOp = stencilLoadOp_; + return *this; + } + + AttachmentDescription & + setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT + { + stencilStoreOp = stencilStoreOp_; + return *this; + } + + AttachmentDescription & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT + { + initialLayout = initialLayout_; + return *this; + } + + AttachmentDescription & setFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ ) VULKAN_HPP_NOEXCEPT + { + finalLayout = finalLayout_; + return *this; + } + + operator VkAttachmentDescription const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAttachmentDescription &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentDescription const & ) const = default; +#else + bool operator==( AttachmentDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( format == rhs.format ) && ( samples == rhs.samples ) && + ( loadOp == rhs.loadOp ) && ( storeOp == rhs.storeOp ) && ( stencilLoadOp == rhs.stencilLoadOp ) && + ( stencilStoreOp == rhs.stencilStoreOp ) && ( initialLayout == rhs.initialLayout ) && + ( finalLayout == rhs.finalLayout ); + } + + bool operator!=( AttachmentDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; + VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; + VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; + VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + static_assert( sizeof( AttachmentDescription ) == sizeof( VkAttachmentDescription ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct AttachmentDescription2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentDescription2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AttachmentDescription2( + VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, + VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, + VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, + VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) + VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , format( format_ ) + , samples( samples_ ) + , loadOp( loadOp_ ) + , storeOp( storeOp_ ) + , stencilLoadOp( stencilLoadOp_ ) + , stencilStoreOp( stencilStoreOp_ ) + , initialLayout( initialLayout_ ) + , finalLayout( finalLayout_ ) + {} + + VULKAN_HPP_CONSTEXPR AttachmentDescription2( AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentDescription2( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT + : AttachmentDescription2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & + operator=( AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentDescription2 & operator=( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AttachmentDescription2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AttachmentDescription2 & setFlags( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + AttachmentDescription2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + AttachmentDescription2 & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT + { + samples = samples_; + return *this; + } + + AttachmentDescription2 & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT + { + loadOp = loadOp_; + return *this; + } + + AttachmentDescription2 & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT + { + storeOp = storeOp_; + return *this; + } + + AttachmentDescription2 & + setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT + { + stencilLoadOp = stencilLoadOp_; + return *this; + } + + AttachmentDescription2 & + setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT + { + stencilStoreOp = stencilStoreOp_; + return *this; + } + + AttachmentDescription2 & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT + { + initialLayout = initialLayout_; + return *this; + } + + AttachmentDescription2 & setFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ ) VULKAN_HPP_NOEXCEPT + { + finalLayout = finalLayout_; + return *this; + } + + operator VkAttachmentDescription2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAttachmentDescription2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentDescription2 const & ) const = default; +#else + bool operator==( AttachmentDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( format == rhs.format ) && + ( samples == rhs.samples ) && ( loadOp == rhs.loadOp ) && ( storeOp == rhs.storeOp ) && + ( stencilLoadOp == rhs.stencilLoadOp ) && ( stencilStoreOp == rhs.stencilStoreOp ) && + ( initialLayout == rhs.initialLayout ) && ( finalLayout == rhs.finalLayout ); + } + + bool operator!=( AttachmentDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescription2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; + VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; + VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; + VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + static_assert( sizeof( AttachmentDescription2 ) == sizeof( VkAttachmentDescription2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = AttachmentDescription2; + }; + using AttachmentDescription2KHR = AttachmentDescription2; + + struct AttachmentDescriptionStencilLayout + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAttachmentDescriptionStencilLayout; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout( + VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) + VULKAN_HPP_NOEXCEPT + : stencilInitialLayout( stencilInitialLayout_ ) + , stencilFinalLayout( stencilFinalLayout_ ) + {} + + VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout( AttachmentDescriptionStencilLayout const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + AttachmentDescriptionStencilLayout( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT + : AttachmentDescriptionStencilLayout( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout & + operator=( AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentDescriptionStencilLayout & + operator=( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AttachmentDescriptionStencilLayout & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AttachmentDescriptionStencilLayout & + setStencilInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ ) VULKAN_HPP_NOEXCEPT + { + stencilInitialLayout = stencilInitialLayout_; + return *this; + } + + AttachmentDescriptionStencilLayout & + setStencilFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ ) VULKAN_HPP_NOEXCEPT + { + stencilFinalLayout = stencilFinalLayout_; + return *this; + } + + operator VkAttachmentDescriptionStencilLayout const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAttachmentDescriptionStencilLayout &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentDescriptionStencilLayout const & ) const = default; +#else + bool operator==( AttachmentDescriptionStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilInitialLayout == rhs.stencilInitialLayout ) && + ( stencilFinalLayout == rhs.stencilFinalLayout ); + } + + bool operator!=( AttachmentDescriptionStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescriptionStencilLayout; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + static_assert( sizeof( AttachmentDescriptionStencilLayout ) == sizeof( VkAttachmentDescriptionStencilLayout ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = AttachmentDescriptionStencilLayout; + }; + using AttachmentDescriptionStencilLayoutKHR = AttachmentDescriptionStencilLayout; + + struct AttachmentReference + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AttachmentReference( + uint32_t attachment_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT + : attachment( attachment_ ) + , layout( layout_ ) + {} + + VULKAN_HPP_CONSTEXPR AttachmentReference( AttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentReference( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT + : AttachmentReference( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AttachmentReference & + operator=( AttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentReference & operator=( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AttachmentReference & setAttachment( uint32_t attachment_ ) VULKAN_HPP_NOEXCEPT + { + attachment = attachment_; + return *this; + } + + AttachmentReference & setLayout( VULKAN_HPP_NAMESPACE::ImageLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + operator VkAttachmentReference const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAttachmentReference &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentReference const & ) const = default; +#else + bool operator==( AttachmentReference const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( attachment == rhs.attachment ) && ( layout == rhs.layout ); + } + + bool operator!=( AttachmentReference const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t attachment = {}; + VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + static_assert( sizeof( AttachmentReference ) == sizeof( VkAttachmentReference ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct AttachmentReference2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReference2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + AttachmentReference2( uint32_t attachment_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {} ) VULKAN_HPP_NOEXCEPT + : attachment( attachment_ ) + , layout( layout_ ) + , aspectMask( aspectMask_ ) + {} + + VULKAN_HPP_CONSTEXPR AttachmentReference2( AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentReference2( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT + : AttachmentReference2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & + operator=( AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentReference2 & operator=( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AttachmentReference2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AttachmentReference2 & setAttachment( uint32_t attachment_ ) VULKAN_HPP_NOEXCEPT + { + attachment = attachment_; + return *this; + } + + AttachmentReference2 & setLayout( VULKAN_HPP_NAMESPACE::ImageLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + AttachmentReference2 & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + { + aspectMask = aspectMask_; + return *this; + } + + operator VkAttachmentReference2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAttachmentReference2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentReference2 const & ) const = default; +#else + bool operator==( AttachmentReference2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachment == rhs.attachment ) && + ( layout == rhs.layout ) && ( aspectMask == rhs.aspectMask ); + } + + bool operator!=( AttachmentReference2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReference2; + const void * pNext = {}; + uint32_t attachment = {}; + VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + }; + static_assert( sizeof( AttachmentReference2 ) == sizeof( VkAttachmentReference2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = AttachmentReference2; + }; + using AttachmentReference2KHR = AttachmentReference2; + + struct AttachmentReferenceStencilLayout + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReferenceStencilLayout; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + AttachmentReferenceStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ = + VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT + : stencilLayout( stencilLayout_ ) + {} + + VULKAN_HPP_CONSTEXPR + AttachmentReferenceStencilLayout( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentReferenceStencilLayout( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT + : AttachmentReferenceStencilLayout( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AttachmentReferenceStencilLayout & + operator=( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentReferenceStencilLayout & operator=( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AttachmentReferenceStencilLayout & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AttachmentReferenceStencilLayout & + setStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ ) VULKAN_HPP_NOEXCEPT + { + stencilLayout = stencilLayout_; + return *this; + } + + operator VkAttachmentReferenceStencilLayout const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAttachmentReferenceStencilLayout &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentReferenceStencilLayout const & ) const = default; +#else + bool operator==( AttachmentReferenceStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilLayout == rhs.stencilLayout ); + } + + bool operator!=( AttachmentReferenceStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReferenceStencilLayout; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + static_assert( sizeof( AttachmentReferenceStencilLayout ) == sizeof( VkAttachmentReferenceStencilLayout ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = AttachmentReferenceStencilLayout; + }; + using AttachmentReferenceStencilLayoutKHR = AttachmentReferenceStencilLayout; + + struct Extent2D + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Extent2D( uint32_t width_ = {}, uint32_t height_ = {} ) VULKAN_HPP_NOEXCEPT + : width( width_ ) + , height( height_ ) + {} + + VULKAN_HPP_CONSTEXPR Extent2D( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Extent2D( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT : Extent2D( *reinterpret_cast( &rhs ) ) {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 Extent2D & operator=( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Extent2D & operator=( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + Extent2D & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + Extent2D & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } + + operator VkExtent2D const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExtent2D &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Extent2D const & ) const = default; +#else + bool operator==( Extent2D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( width == rhs.width ) && ( height == rhs.height ); + } + + bool operator!=( Extent2D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t width = {}; + uint32_t height = {}; + }; + static_assert( sizeof( Extent2D ) == sizeof( VkExtent2D ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SampleLocationEXT + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SampleLocationEXT( float x_ = {}, float y_ = {} ) VULKAN_HPP_NOEXCEPT + : x( x_ ) + , y( y_ ) + {} + + VULKAN_HPP_CONSTEXPR SampleLocationEXT( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SampleLocationEXT( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SampleLocationEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SampleLocationEXT & + operator=( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SampleLocationEXT & operator=( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SampleLocationEXT & setX( float x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + SampleLocationEXT & setY( float y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } + + operator VkSampleLocationEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSampleLocationEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SampleLocationEXT const & ) const = default; +#else + bool operator==( SampleLocationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( x == rhs.x ) && ( y == rhs.y ); + } + + bool operator!=( SampleLocationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float x = {}; + float y = {}; + }; + static_assert( sizeof( SampleLocationEXT ) == sizeof( VkSampleLocationEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SampleLocationsInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSampleLocationsInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT( + VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ = + VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_ = {}, + uint32_t sampleLocationsCount_ = {}, + const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT + : sampleLocationsPerPixel( sampleLocationsPerPixel_ ) + , sampleLocationGridSize( sampleLocationGridSize_ ) + , sampleLocationsCount( sampleLocationsCount_ ) + , pSampleLocations( pSampleLocations_ ) + {} + + VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT( SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SampleLocationsInfoEXT( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SampleLocationsInfoEXT( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SampleLocationsInfoEXT( + VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_, + VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + sampleLocations_ ) + : sampleLocationsPerPixel( sampleLocationsPerPixel_ ) + , sampleLocationGridSize( sampleLocationGridSize_ ) + , sampleLocationsCount( static_cast( sampleLocations_.size() ) ) + , pSampleLocations( sampleLocations_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & + operator=( SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SampleLocationsInfoEXT & operator=( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SampleLocationsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SampleLocationsInfoEXT & setSampleLocationsPerPixel( + VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationsPerPixel = sampleLocationsPerPixel_; + return *this; + } + + SampleLocationsInfoEXT & + setSampleLocationGridSize( VULKAN_HPP_NAMESPACE::Extent2D const & sampleLocationGridSize_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationGridSize = sampleLocationGridSize_; + return *this; + } + + SampleLocationsInfoEXT & setSampleLocationsCount( uint32_t sampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationsCount = sampleLocationsCount_; + return *this; + } + + SampleLocationsInfoEXT & + setPSampleLocations( const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations_ ) VULKAN_HPP_NOEXCEPT + { + pSampleLocations = pSampleLocations_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SampleLocationsInfoEXT & setSampleLocations( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + sampleLocations_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationsCount = static_cast( sampleLocations_.size() ); + pSampleLocations = sampleLocations_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkSampleLocationsInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSampleLocationsInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SampleLocationsInfoEXT const & ) const = default; +#else + bool operator==( SampleLocationsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( sampleLocationsPerPixel == rhs.sampleLocationsPerPixel ) && + ( sampleLocationGridSize == rhs.sampleLocationGridSize ) && + ( sampleLocationsCount == rhs.sampleLocationsCount ) && ( pSampleLocations == rhs.pSampleLocations ); + } + + bool operator!=( SampleLocationsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSampleLocationsInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize = {}; + uint32_t sampleLocationsCount = {}; + const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations = {}; + }; + static_assert( sizeof( SampleLocationsInfoEXT ) == sizeof( VkSampleLocationsInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SampleLocationsInfoEXT; + }; + + struct AttachmentSampleLocationsEXT + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT( + uint32_t attachmentIndex_ = {}, + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : attachmentIndex( attachmentIndex_ ) + , sampleLocationsInfo( sampleLocationsInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR + AttachmentSampleLocationsEXT( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentSampleLocationsEXT( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : AttachmentSampleLocationsEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AttachmentSampleLocationsEXT & + operator=( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentSampleLocationsEXT & operator=( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AttachmentSampleLocationsEXT & setAttachmentIndex( uint32_t attachmentIndex_ ) VULKAN_HPP_NOEXCEPT + { + attachmentIndex = attachmentIndex_; + return *this; + } + + AttachmentSampleLocationsEXT & setSampleLocationsInfo( + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationsInfo = sampleLocationsInfo_; + return *this; + } + + operator VkAttachmentSampleLocationsEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAttachmentSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentSampleLocationsEXT const & ) const = default; +#else + bool operator==( AttachmentSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( attachmentIndex == rhs.attachmentIndex ) && ( sampleLocationsInfo == rhs.sampleLocationsInfo ); + } + + bool operator!=( AttachmentSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t attachmentIndex = {}; + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {}; + }; + static_assert( sizeof( AttachmentSampleLocationsEXT ) == sizeof( VkAttachmentSampleLocationsEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct BaseInStructure + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + BaseInStructure( VULKAN_HPP_NAMESPACE::StructureType sType_ = + VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo ) VULKAN_HPP_NOEXCEPT : sType( sType_ ) + {} + + BaseInStructure( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BaseInStructure( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT + : BaseInStructure( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BaseInStructure & operator=( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BaseInStructure & operator=( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BaseInStructure & setPNext( const struct VULKAN_HPP_NAMESPACE::BaseInStructure * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + operator VkBaseInStructure const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBaseInStructure &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BaseInStructure const & ) const = default; +#else + bool operator==( BaseInStructure const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); + } + + bool operator!=( BaseInStructure const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo; + const struct VULKAN_HPP_NAMESPACE::BaseInStructure * pNext = {}; + }; + static_assert( sizeof( BaseInStructure ) == sizeof( VkBaseInStructure ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct BaseOutStructure + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + BaseOutStructure( VULKAN_HPP_NAMESPACE::StructureType sType_ = + VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo ) VULKAN_HPP_NOEXCEPT : sType( sType_ ) + {} + + BaseOutStructure( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BaseOutStructure( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT + : BaseOutStructure( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BaseOutStructure & operator=( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BaseOutStructure & operator=( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BaseOutStructure & setPNext( struct VULKAN_HPP_NAMESPACE::BaseOutStructure * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + operator VkBaseOutStructure const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBaseOutStructure &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BaseOutStructure const & ) const = default; +#else + bool operator==( BaseOutStructure const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); + } + + bool operator!=( BaseOutStructure const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo; + struct VULKAN_HPP_NAMESPACE::BaseOutStructure * pNext = {}; + }; + static_assert( sizeof( BaseOutStructure ) == sizeof( VkBaseOutStructure ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + class DeviceMemory + { + public: + using CType = VkDeviceMemory; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDeviceMemory; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDeviceMemory; + + public: + VULKAN_HPP_CONSTEXPR DeviceMemory() = default; + VULKAN_HPP_CONSTEXPR DeviceMemory( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT DeviceMemory( VkDeviceMemory deviceMemory ) VULKAN_HPP_NOEXCEPT + : m_deviceMemory( deviceMemory ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DeviceMemory & operator=( VkDeviceMemory deviceMemory ) VULKAN_HPP_NOEXCEPT + { + m_deviceMemory = deviceMemory; + return *this; + } +#endif + + DeviceMemory & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_deviceMemory = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceMemory const & ) const = default; +#else + bool operator==( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory == rhs.m_deviceMemory; + } + + bool operator!=( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory != rhs.m_deviceMemory; + } + + bool operator<( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory < rhs.m_deviceMemory; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeviceMemory() const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory == VK_NULL_HANDLE; + } + + private: + VkDeviceMemory m_deviceMemory = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemory ) == sizeof( VkDeviceMemory ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DeviceMemory; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DeviceMemory; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DeviceMemory; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct BindAccelerationStructureMemoryInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eBindAccelerationStructureMemoryInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + BindAccelerationStructureMemoryInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, + uint32_t deviceIndexCount_ = {}, + const uint32_t * pDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT + : accelerationStructure( accelerationStructure_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + , deviceIndexCount( deviceIndexCount_ ) + , pDeviceIndices( pDeviceIndices_ ) + {} + + VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV( BindAccelerationStructureMemoryInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + BindAccelerationStructureMemoryInfoNV( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : BindAccelerationStructureMemoryInfoNV( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindAccelerationStructureMemoryInfoNV( + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceIndices_ ) + : accelerationStructure( accelerationStructure_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + , deviceIndexCount( static_cast( deviceIndices_.size() ) ) + , pDeviceIndices( deviceIndices_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & + operator=( BindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindAccelerationStructureMemoryInfoNV & + operator=( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BindAccelerationStructureMemoryInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BindAccelerationStructureMemoryInfoNV & setAccelerationStructure( + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructure = accelerationStructure_; + return *this; + } + + BindAccelerationStructureMemoryInfoNV & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + BindAccelerationStructureMemoryInfoNV & + setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT + { + memoryOffset = memoryOffset_; + return *this; + } + + BindAccelerationStructureMemoryInfoNV & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + deviceIndexCount = deviceIndexCount_; + return *this; + } + + BindAccelerationStructureMemoryInfoNV & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + pDeviceIndices = pDeviceIndices_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindAccelerationStructureMemoryInfoNV & setDeviceIndices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + deviceIndexCount = static_cast( deviceIndices_.size() ); + pDeviceIndices = deviceIndices_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkBindAccelerationStructureMemoryInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindAccelerationStructureMemoryInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindAccelerationStructureMemoryInfoNV const & ) const = default; +#else + bool operator==( BindAccelerationStructureMemoryInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( accelerationStructure == rhs.accelerationStructure ) && ( memory == rhs.memory ) && + ( memoryOffset == rhs.memoryOffset ) && ( deviceIndexCount == rhs.deviceIndexCount ) && + ( pDeviceIndices == rhs.pDeviceIndices ); + } + + bool operator!=( BindAccelerationStructureMemoryInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindAccelerationStructureMemoryInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + uint32_t deviceIndexCount = {}; + const uint32_t * pDeviceIndices = {}; + }; + static_assert( sizeof( BindAccelerationStructureMemoryInfoNV ) == sizeof( VkBindAccelerationStructureMemoryInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = BindAccelerationStructureMemoryInfoNV; + }; + + struct BindBufferMemoryDeviceGroupInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindBufferMemoryDeviceGroupInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = {}, + const uint32_t * pDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceIndexCount( deviceIndexCount_ ) + , pDeviceIndices( pDeviceIndices_ ) + {} + + VULKAN_HPP_CONSTEXPR + BindBufferMemoryDeviceGroupInfo( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindBufferMemoryDeviceGroupInfo( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BindBufferMemoryDeviceGroupInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindBufferMemoryDeviceGroupInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceIndices_ ) + : deviceIndexCount( static_cast( deviceIndices_.size() ) ), pDeviceIndices( deviceIndices_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo & + operator=( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindBufferMemoryDeviceGroupInfo & operator=( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BindBufferMemoryDeviceGroupInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BindBufferMemoryDeviceGroupInfo & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + deviceIndexCount = deviceIndexCount_; + return *this; + } + + BindBufferMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + pDeviceIndices = pDeviceIndices_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindBufferMemoryDeviceGroupInfo & setDeviceIndices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + deviceIndexCount = static_cast( deviceIndices_.size() ); + pDeviceIndices = deviceIndices_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkBindBufferMemoryDeviceGroupInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindBufferMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindBufferMemoryDeviceGroupInfo const & ) const = default; +#else + bool operator==( BindBufferMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceIndexCount == rhs.deviceIndexCount ) && + ( pDeviceIndices == rhs.pDeviceIndices ); + } + + bool operator!=( BindBufferMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryDeviceGroupInfo; + const void * pNext = {}; + uint32_t deviceIndexCount = {}; + const uint32_t * pDeviceIndices = {}; + }; + static_assert( sizeof( BindBufferMemoryDeviceGroupInfo ) == sizeof( VkBindBufferMemoryDeviceGroupInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = BindBufferMemoryDeviceGroupInfo; + }; + using BindBufferMemoryDeviceGroupInfoKHR = BindBufferMemoryDeviceGroupInfo; + + struct BindBufferMemoryInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindBufferMemoryInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + {} + + VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindBufferMemoryInfo( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BindBufferMemoryInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & + operator=( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindBufferMemoryInfo & operator=( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BindBufferMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BindBufferMemoryInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + BindBufferMemoryInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + BindBufferMemoryInfo & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT + { + memoryOffset = memoryOffset_; + return *this; + } + + operator VkBindBufferMemoryInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindBufferMemoryInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindBufferMemoryInfo const & ) const = default; +#else + bool operator==( BindBufferMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ) && ( memory == rhs.memory ) && + ( memoryOffset == rhs.memoryOffset ); + } + + bool operator!=( BindBufferMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + }; + static_assert( sizeof( BindBufferMemoryInfo ) == sizeof( VkBindBufferMemoryInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = BindBufferMemoryInfo; + }; + using BindBufferMemoryInfoKHR = BindBufferMemoryInfo; + + struct Offset2D + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Offset2D( int32_t x_ = {}, int32_t y_ = {} ) VULKAN_HPP_NOEXCEPT + : x( x_ ) + , y( y_ ) + {} + + VULKAN_HPP_CONSTEXPR Offset2D( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Offset2D( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT : Offset2D( *reinterpret_cast( &rhs ) ) {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 Offset2D & operator=( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Offset2D & operator=( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + Offset2D & setX( int32_t x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + Offset2D & setY( int32_t y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } + + operator VkOffset2D const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOffset2D &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Offset2D const & ) const = default; +#else + bool operator==( Offset2D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( x == rhs.x ) && ( y == rhs.y ); + } + + bool operator!=( Offset2D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + int32_t x = {}; + int32_t y = {}; + }; + static_assert( sizeof( Offset2D ) == sizeof( VkOffset2D ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct Rect2D + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Rect2D( VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D extent_ = {} ) VULKAN_HPP_NOEXCEPT + : offset( offset_ ) + , extent( extent_ ) + {} + + VULKAN_HPP_CONSTEXPR Rect2D( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Rect2D( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT : Rect2D( *reinterpret_cast( &rhs ) ) {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 Rect2D & operator=( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Rect2D & operator=( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + Rect2D & setOffset( VULKAN_HPP_NAMESPACE::Offset2D const & offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + Rect2D & setExtent( VULKAN_HPP_NAMESPACE::Extent2D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } + + operator VkRect2D const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRect2D &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Rect2D const & ) const = default; +#else + bool operator==( Rect2D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( offset == rhs.offset ) && ( extent == rhs.extent ); + } + + bool operator!=( Rect2D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Offset2D offset = {}; + VULKAN_HPP_NAMESPACE::Extent2D extent = {}; + }; + static_assert( sizeof( Rect2D ) == sizeof( VkRect2D ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct BindImageMemoryDeviceGroupInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemoryDeviceGroupInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( + uint32_t deviceIndexCount_ = {}, + const uint32_t * pDeviceIndices_ = {}, + uint32_t splitInstanceBindRegionCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceIndexCount( deviceIndexCount_ ) + , pDeviceIndices( pDeviceIndices_ ) + , splitInstanceBindRegionCount( splitInstanceBindRegionCount_ ) + , pSplitInstanceBindRegions( pSplitInstanceBindRegions_ ) + {} + + VULKAN_HPP_CONSTEXPR + BindImageMemoryDeviceGroupInfo( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindImageMemoryDeviceGroupInfo( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BindImageMemoryDeviceGroupInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindImageMemoryDeviceGroupInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceIndices_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + splitInstanceBindRegions_ = {} ) + : deviceIndexCount( static_cast( deviceIndices_.size() ) ) + , pDeviceIndices( deviceIndices_.data() ) + , splitInstanceBindRegionCount( static_cast( splitInstanceBindRegions_.size() ) ) + , pSplitInstanceBindRegions( splitInstanceBindRegions_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & + operator=( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindImageMemoryDeviceGroupInfo & operator=( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BindImageMemoryDeviceGroupInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BindImageMemoryDeviceGroupInfo & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + deviceIndexCount = deviceIndexCount_; + return *this; + } + + BindImageMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + pDeviceIndices = pDeviceIndices_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindImageMemoryDeviceGroupInfo & setDeviceIndices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + deviceIndexCount = static_cast( deviceIndices_.size() ); + pDeviceIndices = deviceIndices_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + BindImageMemoryDeviceGroupInfo & + setSplitInstanceBindRegionCount( uint32_t splitInstanceBindRegionCount_ ) VULKAN_HPP_NOEXCEPT + { + splitInstanceBindRegionCount = splitInstanceBindRegionCount_; + return *this; + } + + BindImageMemoryDeviceGroupInfo & setPSplitInstanceBindRegions( + const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT + { + pSplitInstanceBindRegions = pSplitInstanceBindRegions_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindImageMemoryDeviceGroupInfo & setSplitInstanceBindRegions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + splitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT + { + splitInstanceBindRegionCount = static_cast( splitInstanceBindRegions_.size() ); + pSplitInstanceBindRegions = splitInstanceBindRegions_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkBindImageMemoryDeviceGroupInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindImageMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindImageMemoryDeviceGroupInfo const & ) const = default; +#else + bool operator==( BindImageMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceIndexCount == rhs.deviceIndexCount ) && + ( pDeviceIndices == rhs.pDeviceIndices ) && + ( splitInstanceBindRegionCount == rhs.splitInstanceBindRegionCount ) && + ( pSplitInstanceBindRegions == rhs.pSplitInstanceBindRegions ); + } + + bool operator!=( BindImageMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryDeviceGroupInfo; + const void * pNext = {}; + uint32_t deviceIndexCount = {}; + const uint32_t * pDeviceIndices = {}; + uint32_t splitInstanceBindRegionCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions = {}; + }; + static_assert( sizeof( BindImageMemoryDeviceGroupInfo ) == sizeof( VkBindImageMemoryDeviceGroupInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = BindImageMemoryDeviceGroupInfo; + }; + using BindImageMemoryDeviceGroupInfoKHR = BindImageMemoryDeviceGroupInfo; + + class Image + { + public: + using CType = VkImage; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eImage; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImage; + + public: + VULKAN_HPP_CONSTEXPR Image() = default; + VULKAN_HPP_CONSTEXPR Image( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT Image( VkImage image ) VULKAN_HPP_NOEXCEPT : m_image( image ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Image & operator=( VkImage image ) VULKAN_HPP_NOEXCEPT + { + m_image = image; + return *this; + } +#endif + + Image & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_image = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Image const & ) const = default; +#else + bool operator==( Image const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_image == rhs.m_image; + } + + bool operator!=( Image const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_image != rhs.m_image; + } + + bool operator<( Image const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_image < rhs.m_image; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImage() const VULKAN_HPP_NOEXCEPT + { + return m_image; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_image != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_image == VK_NULL_HANDLE; + } + + private: + VkImage m_image = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::Image ) == sizeof( VkImage ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Image; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Image; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Image; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct BindImageMemoryInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemoryInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {} ) VULKAN_HPP_NOEXCEPT + : image( image_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + {} + + VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindImageMemoryInfo( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BindImageMemoryInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & + operator=( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindImageMemoryInfo & operator=( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BindImageMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BindImageMemoryInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + BindImageMemoryInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + BindImageMemoryInfo & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT + { + memoryOffset = memoryOffset_; + return *this; + } + + operator VkBindImageMemoryInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindImageMemoryInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindImageMemoryInfo const & ) const = default; +#else + bool operator==( BindImageMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( memory == rhs.memory ) && + ( memoryOffset == rhs.memoryOffset ); + } + + bool operator!=( BindImageMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + }; + static_assert( sizeof( BindImageMemoryInfo ) == sizeof( VkBindImageMemoryInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = BindImageMemoryInfo; + }; + using BindImageMemoryInfoKHR = BindImageMemoryInfo; + + struct BindImageMemorySwapchainInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemorySwapchainInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, + uint32_t imageIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : swapchain( swapchain_ ) + , imageIndex( imageIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR + BindImageMemorySwapchainInfoKHR( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindImageMemorySwapchainInfoKHR( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : BindImageMemorySwapchainInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR & + operator=( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindImageMemorySwapchainInfoKHR & operator=( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BindImageMemorySwapchainInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BindImageMemorySwapchainInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT + { + swapchain = swapchain_; + return *this; + } + + BindImageMemorySwapchainInfoKHR & setImageIndex( uint32_t imageIndex_ ) VULKAN_HPP_NOEXCEPT + { + imageIndex = imageIndex_; + return *this; + } + + operator VkBindImageMemorySwapchainInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindImageMemorySwapchainInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindImageMemorySwapchainInfoKHR const & ) const = default; +#else + bool operator==( BindImageMemorySwapchainInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ) && + ( imageIndex == rhs.imageIndex ); + } + + bool operator!=( BindImageMemorySwapchainInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemorySwapchainInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; + uint32_t imageIndex = {}; + }; + static_assert( sizeof( BindImageMemorySwapchainInfoKHR ) == sizeof( VkBindImageMemorySwapchainInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = BindImageMemorySwapchainInfoKHR; + }; + + struct BindImagePlaneMemoryInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImagePlaneMemoryInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + BindImagePlaneMemoryInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor ) VULKAN_HPP_NOEXCEPT + : planeAspect( planeAspect_ ) + {} + + VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindImagePlaneMemoryInfo( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BindImagePlaneMemoryInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BindImagePlaneMemoryInfo & + operator=( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindImagePlaneMemoryInfo & operator=( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BindImagePlaneMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BindImagePlaneMemoryInfo & + setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT + { + planeAspect = planeAspect_; + return *this; + } + + operator VkBindImagePlaneMemoryInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindImagePlaneMemoryInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindImagePlaneMemoryInfo const & ) const = default; +#else + bool operator==( BindImagePlaneMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( planeAspect == rhs.planeAspect ); + } + + bool operator!=( BindImagePlaneMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImagePlaneMemoryInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor; + }; + static_assert( sizeof( BindImagePlaneMemoryInfo ) == sizeof( VkBindImagePlaneMemoryInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = BindImagePlaneMemoryInfo; + }; + using BindImagePlaneMemoryInfoKHR = BindImagePlaneMemoryInfo; + + struct BindIndexBufferIndirectCommandNV + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandNV( + VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, + uint32_t size_ = {}, + VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16 ) VULKAN_HPP_NOEXCEPT + : bufferAddress( bufferAddress_ ) + , size( size_ ) + , indexType( indexType_ ) + {} + + VULKAN_HPP_CONSTEXPR + BindIndexBufferIndirectCommandNV( BindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindIndexBufferIndirectCommandNV( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : BindIndexBufferIndirectCommandNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV & + operator=( BindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindIndexBufferIndirectCommandNV & operator=( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BindIndexBufferIndirectCommandNV & + setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferAddress = bufferAddress_; + return *this; + } + + BindIndexBufferIndirectCommandNV & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + BindIndexBufferIndirectCommandNV & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + { + indexType = indexType_; + return *this; + } + + operator VkBindIndexBufferIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindIndexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindIndexBufferIndirectCommandNV const & ) const = default; +#else + bool operator==( BindIndexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( bufferAddress == rhs.bufferAddress ) && ( size == rhs.size ) && ( indexType == rhs.indexType ); + } + + bool operator!=( BindIndexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {}; + uint32_t size = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + }; + static_assert( sizeof( BindIndexBufferIndirectCommandNV ) == sizeof( VkBindIndexBufferIndirectCommandNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct BindShaderGroupIndirectCommandNV + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV( uint32_t groupIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : groupIndex( groupIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR + BindShaderGroupIndirectCommandNV( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindShaderGroupIndirectCommandNV( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : BindShaderGroupIndirectCommandNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BindShaderGroupIndirectCommandNV & + operator=( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindShaderGroupIndirectCommandNV & operator=( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BindShaderGroupIndirectCommandNV & setGroupIndex( uint32_t groupIndex_ ) VULKAN_HPP_NOEXCEPT + { + groupIndex = groupIndex_; + return *this; + } + + operator VkBindShaderGroupIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindShaderGroupIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindShaderGroupIndirectCommandNV const & ) const = default; +#else + bool operator==( BindShaderGroupIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( groupIndex == rhs.groupIndex ); + } + + bool operator!=( BindShaderGroupIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t groupIndex = {}; + }; + static_assert( sizeof( BindShaderGroupIndirectCommandNV ) == sizeof( VkBindShaderGroupIndirectCommandNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct SparseMemoryBind + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseMemoryBind( VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, + VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : resourceOffset( resourceOffset_ ) + , size( size_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + , flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR SparseMemoryBind( SparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseMemoryBind( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseMemoryBind( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & operator=( SparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseMemoryBind & operator=( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SparseMemoryBind & setResourceOffset( VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ ) VULKAN_HPP_NOEXCEPT + { + resourceOffset = resourceOffset_; + return *this; + } + + SparseMemoryBind & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + SparseMemoryBind & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + SparseMemoryBind & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT + { + memoryOffset = memoryOffset_; + return *this; + } + + SparseMemoryBind & setFlags( VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + operator VkSparseMemoryBind const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseMemoryBind &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseMemoryBind const & ) const = default; +#else + bool operator==( SparseMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( resourceOffset == rhs.resourceOffset ) && ( size == rhs.size ) && ( memory == rhs.memory ) && + ( memoryOffset == rhs.memoryOffset ) && ( flags == rhs.flags ); + } + + bool operator!=( SparseMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {}; + }; + static_assert( sizeof( SparseMemoryBind ) == sizeof( VkSparseMemoryBind ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SparseBufferMemoryBindInfo + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SparseBufferMemoryBindInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + uint32_t bindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ) + , bindCount( bindCount_ ) + , pBinds( pBinds_ ) + {} + + VULKAN_HPP_CONSTEXPR + SparseBufferMemoryBindInfo( SparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseBufferMemoryBindInfo( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseBufferMemoryBindInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SparseBufferMemoryBindInfo( + VULKAN_HPP_NAMESPACE::Buffer buffer_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & binds_ ) + : buffer( buffer_ ), bindCount( static_cast( binds_.size() ) ), pBinds( binds_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo & + operator=( SparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseBufferMemoryBindInfo & operator=( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SparseBufferMemoryBindInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + SparseBufferMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT + { + bindCount = bindCount_; + return *this; + } + + SparseBufferMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT + { + pBinds = pBinds_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SparseBufferMemoryBindInfo & setBinds( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & binds_ ) + VULKAN_HPP_NOEXCEPT + { + bindCount = static_cast( binds_.size() ); + pBinds = binds_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkSparseBufferMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseBufferMemoryBindInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseBufferMemoryBindInfo const & ) const = default; +#else + bool operator==( SparseBufferMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( buffer == rhs.buffer ) && ( bindCount == rhs.bindCount ) && ( pBinds == rhs.pBinds ); + } + + bool operator!=( SparseBufferMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + uint32_t bindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds = {}; + }; + static_assert( sizeof( SparseBufferMemoryBindInfo ) == sizeof( VkSparseBufferMemoryBindInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct SparseImageOpaqueMemoryBindInfo + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SparseImageOpaqueMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, + uint32_t bindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT + : image( image_ ) + , bindCount( bindCount_ ) + , pBinds( pBinds_ ) + {} + + VULKAN_HPP_CONSTEXPR + SparseImageOpaqueMemoryBindInfo( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageOpaqueMemoryBindInfo( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageOpaqueMemoryBindInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SparseImageOpaqueMemoryBindInfo( + VULKAN_HPP_NAMESPACE::Image image_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & binds_ ) + : image( image_ ), bindCount( static_cast( binds_.size() ) ), pBinds( binds_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo & + operator=( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageOpaqueMemoryBindInfo & operator=( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SparseImageOpaqueMemoryBindInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + SparseImageOpaqueMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT + { + bindCount = bindCount_; + return *this; + } + + SparseImageOpaqueMemoryBindInfo & + setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT + { + pBinds = pBinds_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SparseImageOpaqueMemoryBindInfo & setBinds( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & binds_ ) + VULKAN_HPP_NOEXCEPT + { + bindCount = static_cast( binds_.size() ); + pBinds = binds_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkSparseImageOpaqueMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageOpaqueMemoryBindInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageOpaqueMemoryBindInfo const & ) const = default; +#else + bool operator==( SparseImageOpaqueMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( image == rhs.image ) && ( bindCount == rhs.bindCount ) && ( pBinds == rhs.pBinds ); + } + + bool operator!=( SparseImageOpaqueMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Image image = {}; + uint32_t bindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds = {}; + }; + static_assert( sizeof( SparseImageOpaqueMemoryBindInfo ) == sizeof( VkSparseImageOpaqueMemoryBindInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct ImageSubresource + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageSubresource( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, + uint32_t mipLevel_ = {}, + uint32_t arrayLayer_ = {} ) VULKAN_HPP_NOEXCEPT + : aspectMask( aspectMask_ ) + , mipLevel( mipLevel_ ) + , arrayLayer( arrayLayer_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageSubresource( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSubresource( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageSubresource( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageSubresource & operator=( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSubresource & operator=( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageSubresource & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + { + aspectMask = aspectMask_; + return *this; + } + + ImageSubresource & setMipLevel( uint32_t mipLevel_ ) VULKAN_HPP_NOEXCEPT + { + mipLevel = mipLevel_; + return *this; + } + + ImageSubresource & setArrayLayer( uint32_t arrayLayer_ ) VULKAN_HPP_NOEXCEPT + { + arrayLayer = arrayLayer_; + return *this; + } + + operator VkImageSubresource const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSubresource &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageSubresource const & ) const = default; +#else + bool operator==( ImageSubresource const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( aspectMask == rhs.aspectMask ) && ( mipLevel == rhs.mipLevel ) && ( arrayLayer == rhs.arrayLayer ); + } + + bool operator!=( ImageSubresource const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + uint32_t mipLevel = {}; + uint32_t arrayLayer = {}; + }; + static_assert( sizeof( ImageSubresource ) == sizeof( VkImageSubresource ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct Offset3D + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Offset3D( int32_t x_ = {}, int32_t y_ = {}, int32_t z_ = {} ) VULKAN_HPP_NOEXCEPT + : x( x_ ) + , y( y_ ) + , z( z_ ) + {} + + VULKAN_HPP_CONSTEXPR Offset3D( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Offset3D( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT : Offset3D( *reinterpret_cast( &rhs ) ) {} + + explicit Offset3D( Offset2D const & offset2D, int32_t z_ = {} ) : x( offset2D.x ), y( offset2D.y ), z( z_ ) {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 Offset3D & operator=( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Offset3D & operator=( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + Offset3D & setX( int32_t x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + Offset3D & setY( int32_t y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } + + Offset3D & setZ( int32_t z_ ) VULKAN_HPP_NOEXCEPT + { + z = z_; + return *this; + } + + operator VkOffset3D const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOffset3D &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Offset3D const & ) const = default; +#else + bool operator==( Offset3D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( x == rhs.x ) && ( y == rhs.y ) && ( z == rhs.z ); + } + + bool operator!=( Offset3D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + int32_t x = {}; + int32_t y = {}; + int32_t z = {}; + }; + static_assert( sizeof( Offset3D ) == sizeof( VkOffset3D ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct Extent3D + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + Extent3D( uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT + : width( width_ ) + , height( height_ ) + , depth( depth_ ) + {} + + VULKAN_HPP_CONSTEXPR Extent3D( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Extent3D( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT : Extent3D( *reinterpret_cast( &rhs ) ) {} + + explicit Extent3D( Extent2D const & extent2D, uint32_t depth_ = {} ) + : width( extent2D.width ), height( extent2D.height ), depth( depth_ ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 Extent3D & operator=( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Extent3D & operator=( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + Extent3D & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + Extent3D & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } + + Extent3D & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT + { + depth = depth_; + return *this; + } + + operator VkExtent3D const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExtent3D &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Extent3D const & ) const = default; +#else + bool operator==( Extent3D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( width == rhs.width ) && ( height == rhs.height ) && ( depth == rhs.depth ); + } + + bool operator!=( Extent3D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t width = {}; + uint32_t height = {}; + uint32_t depth = {}; + }; + static_assert( sizeof( Extent3D ) == sizeof( VkExtent3D ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SparseImageMemoryBind + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SparseImageMemoryBind( VULKAN_HPP_NAMESPACE::ImageSubresource subresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D offset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, + VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : subresource( subresource_ ) + , offset( offset_ ) + , extent( extent_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + , flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR SparseImageMemoryBind( SparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageMemoryBind( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageMemoryBind( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & + operator=( SparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageMemoryBind & operator=( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SparseImageMemoryBind & + setSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource const & subresource_ ) VULKAN_HPP_NOEXCEPT + { + subresource = subresource_; + return *this; + } + + SparseImageMemoryBind & setOffset( VULKAN_HPP_NAMESPACE::Offset3D const & offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + SparseImageMemoryBind & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } + + SparseImageMemoryBind & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + SparseImageMemoryBind & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT + { + memoryOffset = memoryOffset_; + return *this; + } + + SparseImageMemoryBind & setFlags( VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + operator VkSparseImageMemoryBind const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageMemoryBind &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageMemoryBind const & ) const = default; +#else + bool operator==( SparseImageMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( subresource == rhs.subresource ) && ( offset == rhs.offset ) && ( extent == rhs.extent ) && + ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset ) && ( flags == rhs.flags ); + } + + bool operator!=( SparseImageMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageSubresource subresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D offset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {}; + }; + static_assert( sizeof( SparseImageMemoryBind ) == sizeof( VkSparseImageMemoryBind ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SparseImageMemoryBindInfo + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SparseImageMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, + uint32_t bindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT + : image( image_ ) + , bindCount( bindCount_ ) + , pBinds( pBinds_ ) + {} + + VULKAN_HPP_CONSTEXPR + SparseImageMemoryBindInfo( SparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageMemoryBindInfo( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageMemoryBindInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SparseImageMemoryBindInfo( + VULKAN_HPP_NAMESPACE::Image image_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & binds_ ) + : image( image_ ), bindCount( static_cast( binds_.size() ) ), pBinds( binds_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo & + operator=( SparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageMemoryBindInfo & operator=( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SparseImageMemoryBindInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + SparseImageMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT + { + bindCount = bindCount_; + return *this; + } + + SparseImageMemoryBindInfo & + setPBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT + { + pBinds = pBinds_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SparseImageMemoryBindInfo & setBinds( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & binds_ ) + VULKAN_HPP_NOEXCEPT + { + bindCount = static_cast( binds_.size() ); + pBinds = binds_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkSparseImageMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageMemoryBindInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageMemoryBindInfo const & ) const = default; +#else + bool operator==( SparseImageMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( image == rhs.image ) && ( bindCount == rhs.bindCount ) && ( pBinds == rhs.pBinds ); + } + + bool operator!=( SparseImageMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Image image = {}; + uint32_t bindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds = {}; + }; + static_assert( sizeof( SparseImageMemoryBindInfo ) == sizeof( VkSparseImageMemoryBindInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct BindSparseInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindSparseInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + BindSparseInfo( uint32_t waitSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {}, + uint32_t bufferBindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo * pBufferBinds_ = {}, + uint32_t imageOpaqueBindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds_ = {}, + uint32_t imageBindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo * pImageBinds_ = {}, + uint32_t signalSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ = {} ) VULKAN_HPP_NOEXCEPT + : waitSemaphoreCount( waitSemaphoreCount_ ) + , pWaitSemaphores( pWaitSemaphores_ ) + , bufferBindCount( bufferBindCount_ ) + , pBufferBinds( pBufferBinds_ ) + , imageOpaqueBindCount( imageOpaqueBindCount_ ) + , pImageOpaqueBinds( pImageOpaqueBinds_ ) + , imageBindCount( imageBindCount_ ) + , pImageBinds( pImageBinds_ ) + , signalSemaphoreCount( signalSemaphoreCount_ ) + , pSignalSemaphores( pSignalSemaphores_ ) + {} + + VULKAN_HPP_CONSTEXPR BindSparseInfo( BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindSparseInfo( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BindSparseInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindSparseInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + bufferBinds_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + imageOpaqueBinds_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + imageBinds_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + signalSemaphores_ = {} ) + : waitSemaphoreCount( static_cast( waitSemaphores_.size() ) ) + , pWaitSemaphores( waitSemaphores_.data() ) + , bufferBindCount( static_cast( bufferBinds_.size() ) ) + , pBufferBinds( bufferBinds_.data() ) + , imageOpaqueBindCount( static_cast( imageOpaqueBinds_.size() ) ) + , pImageOpaqueBinds( imageOpaqueBinds_.data() ) + , imageBindCount( static_cast( imageBinds_.size() ) ) + , pImageBinds( imageBinds_.data() ) + , signalSemaphoreCount( static_cast( signalSemaphores_.size() ) ) + , pSignalSemaphores( signalSemaphores_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & operator=( BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindSparseInfo & operator=( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BindSparseInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BindSparseInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = waitSemaphoreCount_; + return *this; + } + + BindSparseInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphores = pWaitSemaphores_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindSparseInfo & setWaitSemaphores( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_ ) + VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = static_cast( waitSemaphores_.size() ); + pWaitSemaphores = waitSemaphores_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + BindSparseInfo & setBufferBindCount( uint32_t bufferBindCount_ ) VULKAN_HPP_NOEXCEPT + { + bufferBindCount = bufferBindCount_; + return *this; + } + + BindSparseInfo & + setPBufferBinds( const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo * pBufferBinds_ ) VULKAN_HPP_NOEXCEPT + { + pBufferBinds = pBufferBinds_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindSparseInfo & setBufferBinds( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + bufferBinds_ ) VULKAN_HPP_NOEXCEPT + { + bufferBindCount = static_cast( bufferBinds_.size() ); + pBufferBinds = bufferBinds_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + BindSparseInfo & setImageOpaqueBindCount( uint32_t imageOpaqueBindCount_ ) VULKAN_HPP_NOEXCEPT + { + imageOpaqueBindCount = imageOpaqueBindCount_; + return *this; + } + + BindSparseInfo & setPImageOpaqueBinds( + const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT + { + pImageOpaqueBinds = pImageOpaqueBinds_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindSparseInfo & setImageOpaqueBinds( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + imageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT + { + imageOpaqueBindCount = static_cast( imageOpaqueBinds_.size() ); + pImageOpaqueBinds = imageOpaqueBinds_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + BindSparseInfo & setImageBindCount( uint32_t imageBindCount_ ) VULKAN_HPP_NOEXCEPT + { + imageBindCount = imageBindCount_; + return *this; + } + + BindSparseInfo & + setPImageBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo * pImageBinds_ ) VULKAN_HPP_NOEXCEPT + { + pImageBinds = pImageBinds_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindSparseInfo & setImageBinds( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + imageBinds_ ) VULKAN_HPP_NOEXCEPT + { + imageBindCount = static_cast( imageBinds_.size() ); + pImageBinds = imageBinds_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + BindSparseInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = signalSemaphoreCount_; + return *this; + } + + BindSparseInfo & + setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + pSignalSemaphores = pSignalSemaphores_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindSparseInfo & setSignalSemaphores( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphores_ ) + VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = static_cast( signalSemaphores_.size() ); + pSignalSemaphores = signalSemaphores_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkBindSparseInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindSparseInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindSparseInfo const & ) const = default; +#else + bool operator==( BindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) && + ( pWaitSemaphores == rhs.pWaitSemaphores ) && ( bufferBindCount == rhs.bufferBindCount ) && + ( pBufferBinds == rhs.pBufferBinds ) && ( imageOpaqueBindCount == rhs.imageOpaqueBindCount ) && + ( pImageOpaqueBinds == rhs.pImageOpaqueBinds ) && ( imageBindCount == rhs.imageBindCount ) && + ( pImageBinds == rhs.pImageBinds ) && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) && + ( pSignalSemaphores == rhs.pSignalSemaphores ); + } + + bool operator!=( BindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindSparseInfo; + const void * pNext = {}; + uint32_t waitSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores = {}; + uint32_t bufferBindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo * pBufferBinds = {}; + uint32_t imageOpaqueBindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds = {}; + uint32_t imageBindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo * pImageBinds = {}; + uint32_t signalSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores = {}; + }; + static_assert( sizeof( BindSparseInfo ) == sizeof( VkBindSparseInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = BindSparseInfo; + }; + + struct BindVertexBufferIndirectCommandNV + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandNV( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, + uint32_t size_ = {}, + uint32_t stride_ = {} ) VULKAN_HPP_NOEXCEPT + : bufferAddress( bufferAddress_ ) + , size( size_ ) + , stride( stride_ ) + {} + + VULKAN_HPP_CONSTEXPR + BindVertexBufferIndirectCommandNV( BindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindVertexBufferIndirectCommandNV( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : BindVertexBufferIndirectCommandNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV & + operator=( BindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindVertexBufferIndirectCommandNV & operator=( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BindVertexBufferIndirectCommandNV & + setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferAddress = bufferAddress_; + return *this; + } + + BindVertexBufferIndirectCommandNV & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + BindVertexBufferIndirectCommandNV & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } + + operator VkBindVertexBufferIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindVertexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindVertexBufferIndirectCommandNV const & ) const = default; +#else + bool operator==( BindVertexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( bufferAddress == rhs.bufferAddress ) && ( size == rhs.size ) && ( stride == rhs.stride ); + } + + bool operator!=( BindVertexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {}; + uint32_t size = {}; + uint32_t stride = {}; + }; + static_assert( sizeof( BindVertexBufferIndirectCommandNV ) == sizeof( VkBindVertexBufferIndirectCommandNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct ImageSubresourceLayers + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, + uint32_t mipLevel_ = {}, + uint32_t baseArrayLayer_ = {}, + uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT + : aspectMask( aspectMask_ ) + , mipLevel( mipLevel_ ) + , baseArrayLayer( baseArrayLayer_ ) + , layerCount( layerCount_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSubresourceLayers( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageSubresourceLayers( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & + operator=( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSubresourceLayers & operator=( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageSubresourceLayers & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + { + aspectMask = aspectMask_; + return *this; + } + + ImageSubresourceLayers & setMipLevel( uint32_t mipLevel_ ) VULKAN_HPP_NOEXCEPT + { + mipLevel = mipLevel_; + return *this; + } + + ImageSubresourceLayers & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT + { + baseArrayLayer = baseArrayLayer_; + return *this; + } + + ImageSubresourceLayers & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT + { + layerCount = layerCount_; + return *this; + } + + operator VkImageSubresourceLayers const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSubresourceLayers &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageSubresourceLayers const & ) const = default; +#else + bool operator==( ImageSubresourceLayers const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( aspectMask == rhs.aspectMask ) && ( mipLevel == rhs.mipLevel ) && + ( baseArrayLayer == rhs.baseArrayLayer ) && ( layerCount == rhs.layerCount ); + } + + bool operator!=( ImageSubresourceLayers const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + uint32_t mipLevel = {}; + uint32_t baseArrayLayer = {}; + uint32_t layerCount = {}; + }; + static_assert( sizeof( ImageSubresourceLayers ) == sizeof( VkImageSubresourceLayers ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageBlit2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageBlit2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + ImageBlit2KHR( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + std::array const & srcOffsets_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + std::array const & dstOffsets_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubresource( srcSubresource_ ) + , srcOffsets( srcOffsets_ ) + , dstSubresource( dstSubresource_ ) + , dstOffsets( dstOffsets_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 ImageBlit2KHR( ImageBlit2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageBlit2KHR( VkImageBlit2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageBlit2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageBlit2KHR & operator=( ImageBlit2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageBlit2KHR & operator=( VkImageBlit2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageBlit2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageBlit2KHR & + setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + ImageBlit2KHR & + setSrcOffsets( std::array const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT + { + srcOffsets = srcOffsets_; + return *this; + } + + ImageBlit2KHR & + setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + ImageBlit2KHR & + setDstOffsets( std::array const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT + { + dstOffsets = dstOffsets_; + return *this; + } + + operator VkImageBlit2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageBlit2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageBlit2KHR const & ) const = default; +#else + bool operator==( ImageBlit2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) && + ( srcOffsets == rhs.srcOffsets ) && ( dstSubresource == rhs.dstSubresource ) && + ( dstOffsets == rhs.dstOffsets ); + } + + bool operator!=( ImageBlit2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageBlit2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D srcOffsets = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D dstOffsets = {}; + }; + static_assert( sizeof( ImageBlit2KHR ) == sizeof( VkImageBlit2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageBlit2KHR; + }; + + struct BlitImageInfo2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBlitImageInfo2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2KHR( + VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageBlit2KHR * pRegions_ = {}, + VULKAN_HPP_NAMESPACE::Filter filter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest ) VULKAN_HPP_NOEXCEPT + : srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( regionCount_ ) + , pRegions( pRegions_ ) + , filter( filter_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2KHR( BlitImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BlitImageInfo2KHR( VkBlitImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : BlitImageInfo2KHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BlitImageInfo2KHR( + VULKAN_HPP_NAMESPACE::Image srcImage_, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, + VULKAN_HPP_NAMESPACE::Image dstImage_, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + VULKAN_HPP_NAMESPACE::Filter filter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest ) + : srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + , filter( filter_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2KHR & + operator=( BlitImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BlitImageInfo2KHR & operator=( VkBlitImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BlitImageInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BlitImageInfo2KHR & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT + { + srcImage = srcImage_; + return *this; + } + + BlitImageInfo2KHR & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + srcImageLayout = srcImageLayout_; + return *this; + } + + BlitImageInfo2KHR & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT + { + dstImage = dstImage_; + return *this; + } + + BlitImageInfo2KHR & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + dstImageLayout = dstImageLayout_; + return *this; + } + + BlitImageInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + BlitImageInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::ImageBlit2KHR * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BlitImageInfo2KHR & setRegions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + BlitImageInfo2KHR & setFilter( VULKAN_HPP_NAMESPACE::Filter filter_ ) VULKAN_HPP_NOEXCEPT + { + filter = filter_; + return *this; + } + + operator VkBlitImageInfo2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBlitImageInfo2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BlitImageInfo2KHR const & ) const = default; +#else + bool operator==( BlitImageInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && + ( srcImageLayout == rhs.srcImageLayout ) && ( dstImage == rhs.dstImage ) && + ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && + ( pRegions == rhs.pRegions ) && ( filter == rhs.filter ); + } + + bool operator!=( BlitImageInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBlitImageInfo2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::ImageBlit2KHR * pRegions = {}; + VULKAN_HPP_NAMESPACE::Filter filter = VULKAN_HPP_NAMESPACE::Filter::eNearest; + }; + static_assert( sizeof( BlitImageInfo2KHR ) == sizeof( VkBlitImageInfo2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = BlitImageInfo2KHR; + }; + + struct BufferCopy + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferCopy( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + : srcOffset( srcOffset_ ) + , dstOffset( dstOffset_ ) + , size( size_ ) + {} + + VULKAN_HPP_CONSTEXPR BufferCopy( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCopy( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferCopy( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BufferCopy & operator=( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCopy & operator=( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BufferCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + BufferCopy & setDstOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + BufferCopy & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + operator VkBufferCopy const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferCopy &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferCopy const & ) const = default; +#else + bool operator==( BufferCopy const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( srcOffset == rhs.srcOffset ) && ( dstOffset == rhs.dstOffset ) && ( size == rhs.size ); + } + + bool operator!=( BufferCopy const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceSize srcOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + static_assert( sizeof( BufferCopy ) == sizeof( VkBufferCopy ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct BufferCopy2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCopy2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferCopy2KHR( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + : srcOffset( srcOffset_ ) + , dstOffset( dstOffset_ ) + , size( size_ ) + {} + + VULKAN_HPP_CONSTEXPR BufferCopy2KHR( BufferCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCopy2KHR( VkBufferCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferCopy2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BufferCopy2KHR & operator=( BufferCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCopy2KHR & operator=( VkBufferCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BufferCopy2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BufferCopy2KHR & setSrcOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + BufferCopy2KHR & setDstOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + BufferCopy2KHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + operator VkBufferCopy2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferCopy2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferCopy2KHR const & ) const = default; +#else + bool operator==( BufferCopy2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcOffset == rhs.srcOffset ) && + ( dstOffset == rhs.dstOffset ) && ( size == rhs.size ); + } + + bool operator!=( BufferCopy2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCopy2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize srcOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + static_assert( sizeof( BufferCopy2KHR ) == sizeof( VkBufferCopy2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = BufferCopy2KHR; + }; + + struct BufferCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + BufferCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, + uint32_t queueFamilyIndexCount_ = {}, + const uint32_t * pQueueFamilyIndices_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , size( size_ ) + , usage( usage_ ) + , sharingMode( sharingMode_ ) + , queueFamilyIndexCount( queueFamilyIndexCount_ ) + , pQueueFamilyIndices( pQueueFamilyIndices_ ) + {} + + VULKAN_HPP_CONSTEXPR BufferCreateInfo( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCreateInfo( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BufferCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_, + VULKAN_HPP_NAMESPACE::DeviceSize size_, + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) + : flags( flags_ ) + , size( size_ ) + , usage( usage_ ) + , sharingMode( sharingMode_ ) + , queueFamilyIndexCount( static_cast( queueFamilyIndices_.size() ) ) + , pQueueFamilyIndices( queueFamilyIndices_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & operator=( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCreateInfo & operator=( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BufferCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + BufferCreateInfo & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + BufferCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + BufferCreateInfo & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT + { + sharingMode = sharingMode_; + return *this; + } + + BufferCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = queueFamilyIndexCount_; + return *this; + } + + BufferCreateInfo & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + pQueueFamilyIndices = pQueueFamilyIndices_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BufferCreateInfo & setQueueFamilyIndices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = static_cast( queueFamilyIndices_.size() ); + pQueueFamilyIndices = queueFamilyIndices_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkBufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferCreateInfo const & ) const = default; +#else + bool operator==( BufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( size == rhs.size ) && + ( usage == rhs.usage ) && ( sharingMode == rhs.sharingMode ) && + ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && + ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ); + } + + bool operator!=( BufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; + uint32_t queueFamilyIndexCount = {}; + const uint32_t * pQueueFamilyIndices = {}; + }; + static_assert( sizeof( BufferCreateInfo ) == sizeof( VkBufferCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = BufferCreateInfo; + }; + + struct BufferDeviceAddressCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferDeviceAddressCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + BufferDeviceAddressCreateInfoEXT( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceAddress( deviceAddress_ ) + {} + + VULKAN_HPP_CONSTEXPR + BufferDeviceAddressCreateInfoEXT( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferDeviceAddressCreateInfoEXT( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferDeviceAddressCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressCreateInfoEXT & + operator=( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferDeviceAddressCreateInfoEXT & operator=( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BufferDeviceAddressCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BufferDeviceAddressCreateInfoEXT & + setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; + return *this; + } + + operator VkBufferDeviceAddressCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferDeviceAddressCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferDeviceAddressCreateInfoEXT const & ) const = default; +#else + bool operator==( BufferDeviceAddressCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceAddress == rhs.deviceAddress ); + } + + bool operator!=( BufferDeviceAddressCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; + }; + static_assert( sizeof( BufferDeviceAddressCreateInfoEXT ) == sizeof( VkBufferDeviceAddressCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = BufferDeviceAddressCreateInfoEXT; + }; + + struct BufferDeviceAddressInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferDeviceAddressInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ) + {} + + VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo( BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferDeviceAddressInfo( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferDeviceAddressInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressInfo & + operator=( BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferDeviceAddressInfo & operator=( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BufferDeviceAddressInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BufferDeviceAddressInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + operator VkBufferDeviceAddressInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferDeviceAddressInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferDeviceAddressInfo const & ) const = default; +#else + bool operator==( BufferDeviceAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ); + } + + bool operator!=( BufferDeviceAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + }; + static_assert( sizeof( BufferDeviceAddressInfo ) == sizeof( VkBufferDeviceAddressInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = BufferDeviceAddressInfo; + }; + using BufferDeviceAddressInfoEXT = BufferDeviceAddressInfo; + using BufferDeviceAddressInfoKHR = BufferDeviceAddressInfo; + + struct BufferImageCopy + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferImageCopy( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {}, + uint32_t bufferRowLength_ = {}, + uint32_t bufferImageHeight_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT + : bufferOffset( bufferOffset_ ) + , bufferRowLength( bufferRowLength_ ) + , bufferImageHeight( bufferImageHeight_ ) + , imageSubresource( imageSubresource_ ) + , imageOffset( imageOffset_ ) + , imageExtent( imageExtent_ ) + {} + + VULKAN_HPP_CONSTEXPR BufferImageCopy( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferImageCopy( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferImageCopy( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & operator=( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferImageCopy & operator=( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BufferImageCopy & setBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT + { + bufferOffset = bufferOffset_; + return *this; + } + + BufferImageCopy & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT + { + bufferRowLength = bufferRowLength_; + return *this; + } + + BufferImageCopy & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT + { + bufferImageHeight = bufferImageHeight_; + return *this; + } + + BufferImageCopy & + setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT + { + imageSubresource = imageSubresource_; + return *this; + } + + BufferImageCopy & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT + { + imageOffset = imageOffset_; + return *this; + } + + BufferImageCopy & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT + { + imageExtent = imageExtent_; + return *this; + } + + operator VkBufferImageCopy const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferImageCopy &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferImageCopy const & ) const = default; +#else + bool operator==( BufferImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( bufferOffset == rhs.bufferOffset ) && ( bufferRowLength == rhs.bufferRowLength ) && + ( bufferImageHeight == rhs.bufferImageHeight ) && ( imageSubresource == rhs.imageSubresource ) && + ( imageOffset == rhs.imageOffset ) && ( imageExtent == rhs.imageExtent ); + } + + bool operator!=( BufferImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset = {}; + uint32_t bufferRowLength = {}; + uint32_t bufferImageHeight = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {}; + }; + static_assert( sizeof( BufferImageCopy ) == sizeof( VkBufferImageCopy ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct BufferImageCopy2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferImageCopy2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferImageCopy2KHR( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {}, + uint32_t bufferRowLength_ = {}, + uint32_t bufferImageHeight_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT + : bufferOffset( bufferOffset_ ) + , bufferRowLength( bufferRowLength_ ) + , bufferImageHeight( bufferImageHeight_ ) + , imageSubresource( imageSubresource_ ) + , imageOffset( imageOffset_ ) + , imageExtent( imageExtent_ ) + {} + + VULKAN_HPP_CONSTEXPR BufferImageCopy2KHR( BufferImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferImageCopy2KHR( VkBufferImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferImageCopy2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2KHR & + operator=( BufferImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferImageCopy2KHR & operator=( VkBufferImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BufferImageCopy2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BufferImageCopy2KHR & setBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT + { + bufferOffset = bufferOffset_; + return *this; + } + + BufferImageCopy2KHR & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT + { + bufferRowLength = bufferRowLength_; + return *this; + } + + BufferImageCopy2KHR & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT + { + bufferImageHeight = bufferImageHeight_; + return *this; + } + + BufferImageCopy2KHR & + setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT + { + imageSubresource = imageSubresource_; + return *this; + } + + BufferImageCopy2KHR & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT + { + imageOffset = imageOffset_; + return *this; + } + + BufferImageCopy2KHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT + { + imageExtent = imageExtent_; + return *this; + } + + operator VkBufferImageCopy2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferImageCopy2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferImageCopy2KHR const & ) const = default; +#else + bool operator==( BufferImageCopy2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferOffset == rhs.bufferOffset ) && + ( bufferRowLength == rhs.bufferRowLength ) && ( bufferImageHeight == rhs.bufferImageHeight ) && + ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) && + ( imageExtent == rhs.imageExtent ); + } + + bool operator!=( BufferImageCopy2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferImageCopy2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset = {}; + uint32_t bufferRowLength = {}; + uint32_t bufferImageHeight = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {}; + }; + static_assert( sizeof( BufferImageCopy2KHR ) == sizeof( VkBufferImageCopy2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = BufferImageCopy2KHR; + }; + + struct BufferMemoryBarrier + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryBarrier; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, + uint32_t srcQueueFamilyIndex_ = {}, + uint32_t dstQueueFamilyIndex_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + : srcAccessMask( srcAccessMask_ ) + , dstAccessMask( dstAccessMask_ ) + , srcQueueFamilyIndex( srcQueueFamilyIndex_ ) + , dstQueueFamilyIndex( dstQueueFamilyIndex_ ) + , buffer( buffer_ ) + , offset( offset_ ) + , size( size_ ) + {} + + VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferMemoryBarrier( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferMemoryBarrier( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & + operator=( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferMemoryBarrier & operator=( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BufferMemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BufferMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + BufferMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + BufferMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + srcQueueFamilyIndex = srcQueueFamilyIndex_; + return *this; + } + + BufferMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + dstQueueFamilyIndex = dstQueueFamilyIndex_; + return *this; + } + + BufferMemoryBarrier & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + BufferMemoryBarrier & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + BufferMemoryBarrier & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + operator VkBufferMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferMemoryBarrier &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferMemoryBarrier const & ) const = default; +#else + bool operator==( BufferMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) && + ( dstAccessMask == rhs.dstAccessMask ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && + ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( buffer == rhs.buffer ) && + ( offset == rhs.offset ) && ( size == rhs.size ); + } + + bool operator!=( BufferMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryBarrier; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + uint32_t srcQueueFamilyIndex = {}; + uint32_t dstQueueFamilyIndex = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + static_assert( sizeof( BufferMemoryBarrier ) == sizeof( VkBufferMemoryBarrier ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = BufferMemoryBarrier; + }; + + struct BufferMemoryBarrier2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryBarrier2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferMemoryBarrier2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask_ = {}, + uint32_t srcQueueFamilyIndex_ = {}, + uint32_t dstQueueFamilyIndex_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + : srcStageMask( srcStageMask_ ) + , srcAccessMask( srcAccessMask_ ) + , dstStageMask( dstStageMask_ ) + , dstAccessMask( dstAccessMask_ ) + , srcQueueFamilyIndex( srcQueueFamilyIndex_ ) + , dstQueueFamilyIndex( dstQueueFamilyIndex_ ) + , buffer( buffer_ ) + , offset( offset_ ) + , size( size_ ) + {} + + VULKAN_HPP_CONSTEXPR BufferMemoryBarrier2KHR( BufferMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferMemoryBarrier2KHR( VkBufferMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferMemoryBarrier2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2KHR & + operator=( BufferMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferMemoryBarrier2KHR & operator=( VkBufferMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BufferMemoryBarrier2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BufferMemoryBarrier2KHR & + setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask_ ) VULKAN_HPP_NOEXCEPT + { + srcStageMask = srcStageMask_; + return *this; + } + + BufferMemoryBarrier2KHR & + setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + BufferMemoryBarrier2KHR & + setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + dstStageMask = dstStageMask_; + return *this; + } + + BufferMemoryBarrier2KHR & + setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + BufferMemoryBarrier2KHR & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + srcQueueFamilyIndex = srcQueueFamilyIndex_; + return *this; + } + + BufferMemoryBarrier2KHR & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + dstQueueFamilyIndex = dstQueueFamilyIndex_; + return *this; + } + + BufferMemoryBarrier2KHR & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + BufferMemoryBarrier2KHR & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + BufferMemoryBarrier2KHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + operator VkBufferMemoryBarrier2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferMemoryBarrier2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferMemoryBarrier2KHR const & ) const = default; +#else + bool operator==( BufferMemoryBarrier2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) && + ( srcAccessMask == rhs.srcAccessMask ) && ( dstStageMask == rhs.dstStageMask ) && + ( dstAccessMask == rhs.dstAccessMask ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && + ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( buffer == rhs.buffer ) && + ( offset == rhs.offset ) && ( size == rhs.size ); + } + + bool operator!=( BufferMemoryBarrier2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryBarrier2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask = {}; + uint32_t srcQueueFamilyIndex = {}; + uint32_t dstQueueFamilyIndex = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + static_assert( sizeof( BufferMemoryBarrier2KHR ) == sizeof( VkBufferMemoryBarrier2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = BufferMemoryBarrier2KHR; + }; + + struct BufferMemoryRequirementsInfo2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryRequirementsInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ) + {} + + VULKAN_HPP_CONSTEXPR + BufferMemoryRequirementsInfo2( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferMemoryRequirementsInfo2( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferMemoryRequirementsInfo2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryRequirementsInfo2 & + operator=( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferMemoryRequirementsInfo2 & operator=( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BufferMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BufferMemoryRequirementsInfo2 & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + operator VkBufferMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferMemoryRequirementsInfo2 const & ) const = default; +#else + bool operator==( BufferMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ); + } + + bool operator!=( BufferMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryRequirementsInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + }; + static_assert( sizeof( BufferMemoryRequirementsInfo2 ) == sizeof( VkBufferMemoryRequirementsInfo2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = BufferMemoryRequirementsInfo2; + }; + using BufferMemoryRequirementsInfo2KHR = BufferMemoryRequirementsInfo2; + + struct BufferOpaqueCaptureAddressCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eBufferOpaqueCaptureAddressCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo( uint64_t opaqueCaptureAddress_ = {} ) VULKAN_HPP_NOEXCEPT + : opaqueCaptureAddress( opaqueCaptureAddress_ ) + {} + + VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo( BufferOpaqueCaptureAddressCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + BufferOpaqueCaptureAddressCreateInfo( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferOpaqueCaptureAddressCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BufferOpaqueCaptureAddressCreateInfo & + operator=( BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferOpaqueCaptureAddressCreateInfo & + operator=( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BufferOpaqueCaptureAddressCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BufferOpaqueCaptureAddressCreateInfo & setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT + { + opaqueCaptureAddress = opaqueCaptureAddress_; + return *this; + } + + operator VkBufferOpaqueCaptureAddressCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferOpaqueCaptureAddressCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferOpaqueCaptureAddressCreateInfo const & ) const = default; +#else + bool operator==( BufferOpaqueCaptureAddressCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress ); + } + + bool operator!=( BufferOpaqueCaptureAddressCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferOpaqueCaptureAddressCreateInfo; + const void * pNext = {}; + uint64_t opaqueCaptureAddress = {}; + }; + static_assert( sizeof( BufferOpaqueCaptureAddressCreateInfo ) == sizeof( VkBufferOpaqueCaptureAddressCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = BufferOpaqueCaptureAddressCreateInfo; + }; + using BufferOpaqueCaptureAddressCreateInfoKHR = BufferOpaqueCaptureAddressCreateInfo; + + struct BufferViewCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferViewCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + BufferViewCreateInfo( VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize range_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , buffer( buffer_ ) + , format( format_ ) + , offset( offset_ ) + , range( range_ ) + {} + + VULKAN_HPP_CONSTEXPR BufferViewCreateInfo( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferViewCreateInfo( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferViewCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & + operator=( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferViewCreateInfo & operator=( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BufferViewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BufferViewCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + BufferViewCreateInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + BufferViewCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + BufferViewCreateInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + BufferViewCreateInfo & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT + { + range = range_; + return *this; + } + + operator VkBufferViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferViewCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferViewCreateInfo const & ) const = default; +#else + bool operator==( BufferViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( buffer == rhs.buffer ) && + ( format == rhs.format ) && ( offset == rhs.offset ) && ( range == rhs.range ); + } + + bool operator!=( BufferViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferViewCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize range = {}; + }; + static_assert( sizeof( BufferViewCreateInfo ) == sizeof( VkBufferViewCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = BufferViewCreateInfo; + }; + + struct CalibratedTimestampInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCalibratedTimestampInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + CalibratedTimestampInfoEXT( VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain_ = + VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice ) VULKAN_HPP_NOEXCEPT + : timeDomain( timeDomain_ ) + {} + + VULKAN_HPP_CONSTEXPR + CalibratedTimestampInfoEXT( CalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CalibratedTimestampInfoEXT( VkCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : CalibratedTimestampInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CalibratedTimestampInfoEXT & + operator=( CalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CalibratedTimestampInfoEXT & operator=( VkCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CalibratedTimestampInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CalibratedTimestampInfoEXT & setTimeDomain( VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain_ ) VULKAN_HPP_NOEXCEPT + { + timeDomain = timeDomain_; + return *this; + } + + operator VkCalibratedTimestampInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCalibratedTimestampInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CalibratedTimestampInfoEXT const & ) const = default; +#else + bool operator==( CalibratedTimestampInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( timeDomain == rhs.timeDomain ); + } + + bool operator!=( CalibratedTimestampInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCalibratedTimestampInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain = VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice; + }; + static_assert( sizeof( CalibratedTimestampInfoEXT ) == sizeof( VkCalibratedTimestampInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CalibratedTimestampInfoEXT; + }; + + struct CheckpointData2NV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointData2Nv; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CheckpointData2NV( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage_ = {}, + void * pCheckpointMarker_ = {} ) VULKAN_HPP_NOEXCEPT + : stage( stage_ ) + , pCheckpointMarker( pCheckpointMarker_ ) + {} + + VULKAN_HPP_CONSTEXPR CheckpointData2NV( CheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CheckpointData2NV( VkCheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT + : CheckpointData2NV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CheckpointData2NV & + operator=( CheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CheckpointData2NV & operator=( VkCheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkCheckpointData2NV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCheckpointData2NV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CheckpointData2NV const & ) const = default; +#else + bool operator==( CheckpointData2NV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stage == rhs.stage ) && + ( pCheckpointMarker == rhs.pCheckpointMarker ); + } + + bool operator!=( CheckpointData2NV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointData2Nv; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage = {}; + void * pCheckpointMarker = {}; + }; + static_assert( sizeof( CheckpointData2NV ) == sizeof( VkCheckpointData2NV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CheckpointData2NV; + }; + + struct CheckpointDataNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointDataNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CheckpointDataNV( + VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe, + void * pCheckpointMarker_ = {} ) VULKAN_HPP_NOEXCEPT + : stage( stage_ ) + , pCheckpointMarker( pCheckpointMarker_ ) + {} + + VULKAN_HPP_CONSTEXPR CheckpointDataNV( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CheckpointDataNV( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CheckpointDataNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CheckpointDataNV & operator=( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CheckpointDataNV & operator=( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkCheckpointDataNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCheckpointDataNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CheckpointDataNV const & ) const = default; +#else + bool operator==( CheckpointDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stage == rhs.stage ) && + ( pCheckpointMarker == rhs.pCheckpointMarker ); + } + + bool operator!=( CheckpointDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointDataNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe; + void * pCheckpointMarker = {}; + }; + static_assert( sizeof( CheckpointDataNV ) == sizeof( VkCheckpointDataNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CheckpointDataNV; + }; + + union ClearColorValue + { + ClearColorValue( VULKAN_HPP_NAMESPACE::ClearColorValue const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearColorValue ) ); + } + + ClearColorValue( const std::array & float32_ = {} ) : float32( float32_ ) {} + + ClearColorValue( const std::array & int32_ ) : int32( int32_ ) {} + + ClearColorValue( const std::array & uint32_ ) : uint32( uint32_ ) {} + + ClearColorValue & setFloat32( std::array float32_ ) VULKAN_HPP_NOEXCEPT + { + float32 = float32_; + return *this; + } + + ClearColorValue & setInt32( std::array int32_ ) VULKAN_HPP_NOEXCEPT + { + int32 = int32_; + return *this; + } + + ClearColorValue & setUint32( std::array uint32_ ) VULKAN_HPP_NOEXCEPT + { + uint32 = uint32_; + return *this; + } + + VULKAN_HPP_NAMESPACE::ClearColorValue & + operator=( VULKAN_HPP_NAMESPACE::ClearColorValue const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearColorValue ) ); + return *this; + } + + operator VkClearColorValue const &() const + { + return *reinterpret_cast( this ); + } + + operator VkClearColorValue &() + { + return *reinterpret_cast( this ); + } + + VULKAN_HPP_NAMESPACE::ArrayWrapper1D float32; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D int32; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D uint32; + }; + + struct ClearDepthStencilValue + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( float depth_ = {}, uint32_t stencil_ = {} ) VULKAN_HPP_NOEXCEPT + : depth( depth_ ) + , stencil( stencil_ ) + {} + + VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ClearDepthStencilValue( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT + : ClearDepthStencilValue( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ClearDepthStencilValue & + operator=( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ClearDepthStencilValue & operator=( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ClearDepthStencilValue & setDepth( float depth_ ) VULKAN_HPP_NOEXCEPT + { + depth = depth_; + return *this; + } + + ClearDepthStencilValue & setStencil( uint32_t stencil_ ) VULKAN_HPP_NOEXCEPT + { + stencil = stencil_; + return *this; + } + + operator VkClearDepthStencilValue const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClearDepthStencilValue &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ClearDepthStencilValue const & ) const = default; +#else + bool operator==( ClearDepthStencilValue const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( depth == rhs.depth ) && ( stencil == rhs.stencil ); + } + + bool operator!=( ClearDepthStencilValue const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float depth = {}; + uint32_t stencil = {}; + }; + static_assert( sizeof( ClearDepthStencilValue ) == sizeof( VkClearDepthStencilValue ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + union ClearValue + { + ClearValue( VULKAN_HPP_NAMESPACE::ClearValue const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearValue ) ); + } + + ClearValue( VULKAN_HPP_NAMESPACE::ClearColorValue color_ = {} ) : color( color_ ) {} + + ClearValue( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil_ ) : depthStencil( depthStencil_ ) {} + + ClearValue & setColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & color_ ) VULKAN_HPP_NOEXCEPT + { + color = color_; + return *this; + } + + ClearValue & + setDepthStencil( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const & depthStencil_ ) VULKAN_HPP_NOEXCEPT + { + depthStencil = depthStencil_; + return *this; + } + + VULKAN_HPP_NAMESPACE::ClearValue & operator=( VULKAN_HPP_NAMESPACE::ClearValue const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearValue ) ); + return *this; + } + + operator VkClearValue const &() const + { + return *reinterpret_cast( this ); + } + + operator VkClearValue &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::ClearColorValue color; + VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil; +#else + VkClearColorValue color; + VkClearDepthStencilValue depthStencil; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + struct ClearAttachment + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + ClearAttachment( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, + uint32_t colorAttachment_ = {}, + VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {} ) VULKAN_HPP_NOEXCEPT + : aspectMask( aspectMask_ ) + , colorAttachment( colorAttachment_ ) + , clearValue( clearValue_ ) + {} + + ClearAttachment( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ClearAttachment( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT + : ClearAttachment( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ClearAttachment & operator=( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ClearAttachment & operator=( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ClearAttachment & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + { + aspectMask = aspectMask_; + return *this; + } + + ClearAttachment & setColorAttachment( uint32_t colorAttachment_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachment = colorAttachment_; + return *this; + } + + ClearAttachment & setClearValue( VULKAN_HPP_NAMESPACE::ClearValue const & clearValue_ ) VULKAN_HPP_NOEXCEPT + { + clearValue = clearValue_; + return *this; + } + + operator VkClearAttachment const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClearAttachment &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + public: + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + uint32_t colorAttachment = {}; + VULKAN_HPP_NAMESPACE::ClearValue clearValue = {}; + }; + static_assert( sizeof( ClearAttachment ) == sizeof( VkClearAttachment ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ClearRect + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ClearRect( VULKAN_HPP_NAMESPACE::Rect2D rect_ = {}, + uint32_t baseArrayLayer_ = {}, + uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT + : rect( rect_ ) + , baseArrayLayer( baseArrayLayer_ ) + , layerCount( layerCount_ ) + {} + + VULKAN_HPP_CONSTEXPR ClearRect( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ClearRect( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT : ClearRect( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ClearRect & operator=( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ClearRect & operator=( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ClearRect & setRect( VULKAN_HPP_NAMESPACE::Rect2D const & rect_ ) VULKAN_HPP_NOEXCEPT + { + rect = rect_; + return *this; + } + + ClearRect & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT + { + baseArrayLayer = baseArrayLayer_; + return *this; + } + + ClearRect & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT + { + layerCount = layerCount_; + return *this; + } + + operator VkClearRect const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClearRect &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ClearRect const & ) const = default; +#else + bool operator==( ClearRect const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( rect == rhs.rect ) && ( baseArrayLayer == rhs.baseArrayLayer ) && ( layerCount == rhs.layerCount ); + } + + bool operator!=( ClearRect const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Rect2D rect = {}; + uint32_t baseArrayLayer = {}; + uint32_t layerCount = {}; + }; + static_assert( sizeof( ClearRect ) == sizeof( VkClearRect ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct CoarseSampleLocationNV + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + CoarseSampleLocationNV( uint32_t pixelX_ = {}, uint32_t pixelY_ = {}, uint32_t sample_ = {} ) VULKAN_HPP_NOEXCEPT + : pixelX( pixelX_ ) + , pixelY( pixelY_ ) + , sample( sample_ ) + {} + + VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CoarseSampleLocationNV( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CoarseSampleLocationNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV & + operator=( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CoarseSampleLocationNV & operator=( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CoarseSampleLocationNV & setPixelX( uint32_t pixelX_ ) VULKAN_HPP_NOEXCEPT + { + pixelX = pixelX_; + return *this; + } + + CoarseSampleLocationNV & setPixelY( uint32_t pixelY_ ) VULKAN_HPP_NOEXCEPT + { + pixelY = pixelY_; + return *this; + } + + CoarseSampleLocationNV & setSample( uint32_t sample_ ) VULKAN_HPP_NOEXCEPT + { + sample = sample_; + return *this; + } + + operator VkCoarseSampleLocationNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCoarseSampleLocationNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CoarseSampleLocationNV const & ) const = default; +#else + bool operator==( CoarseSampleLocationNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( pixelX == rhs.pixelX ) && ( pixelY == rhs.pixelY ) && ( sample == rhs.sample ); + } + + bool operator!=( CoarseSampleLocationNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t pixelX = {}; + uint32_t pixelY = {}; + uint32_t sample = {}; + }; + static_assert( sizeof( CoarseSampleLocationNV ) == sizeof( VkCoarseSampleLocationNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct CoarseSampleOrderCustomNV + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV( + VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ = + VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations, + uint32_t sampleCount_ = {}, + uint32_t sampleLocationCount_ = {}, + const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT + : shadingRate( shadingRate_ ) + , sampleCount( sampleCount_ ) + , sampleLocationCount( sampleLocationCount_ ) + , pSampleLocations( pSampleLocations_ ) + {} + + VULKAN_HPP_CONSTEXPR + CoarseSampleOrderCustomNV( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CoarseSampleOrderCustomNV( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CoarseSampleOrderCustomNV( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CoarseSampleOrderCustomNV( + VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_, + uint32_t sampleCount_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + sampleLocations_ ) + : shadingRate( shadingRate_ ) + , sampleCount( sampleCount_ ) + , sampleLocationCount( static_cast( sampleLocations_.size() ) ) + , pSampleLocations( sampleLocations_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & + operator=( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CoarseSampleOrderCustomNV & operator=( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CoarseSampleOrderCustomNV & + setShadingRate( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ ) VULKAN_HPP_NOEXCEPT + { + shadingRate = shadingRate_; + return *this; + } + + CoarseSampleOrderCustomNV & setSampleCount( uint32_t sampleCount_ ) VULKAN_HPP_NOEXCEPT + { + sampleCount = sampleCount_; + return *this; + } + + CoarseSampleOrderCustomNV & setSampleLocationCount( uint32_t sampleLocationCount_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationCount = sampleLocationCount_; + return *this; + } + + CoarseSampleOrderCustomNV & + setPSampleLocations( const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations_ ) VULKAN_HPP_NOEXCEPT + { + pSampleLocations = pSampleLocations_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CoarseSampleOrderCustomNV & setSampleLocations( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + sampleLocations_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationCount = static_cast( sampleLocations_.size() ); + pSampleLocations = sampleLocations_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkCoarseSampleOrderCustomNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCoarseSampleOrderCustomNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CoarseSampleOrderCustomNV const & ) const = default; +#else + bool operator==( CoarseSampleOrderCustomNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( shadingRate == rhs.shadingRate ) && ( sampleCount == rhs.sampleCount ) && + ( sampleLocationCount == rhs.sampleLocationCount ) && ( pSampleLocations == rhs.pSampleLocations ); + } + + bool operator!=( CoarseSampleOrderCustomNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate = + VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations; + uint32_t sampleCount = {}; + uint32_t sampleLocationCount = {}; + const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations = {}; + }; + static_assert( sizeof( CoarseSampleOrderCustomNV ) == sizeof( VkCoarseSampleOrderCustomNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + class CommandPool + { + public: + using CType = VkCommandPool; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eCommandPool; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandPool; + + public: + VULKAN_HPP_CONSTEXPR CommandPool() = default; + VULKAN_HPP_CONSTEXPR CommandPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT CommandPool( VkCommandPool commandPool ) VULKAN_HPP_NOEXCEPT + : m_commandPool( commandPool ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + CommandPool & operator=( VkCommandPool commandPool ) VULKAN_HPP_NOEXCEPT + { + m_commandPool = commandPool; + return *this; + } +#endif + + CommandPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_commandPool = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandPool const & ) const = default; +#else + bool operator==( CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_commandPool == rhs.m_commandPool; + } + + bool operator!=( CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_commandPool != rhs.m_commandPool; + } + + bool operator<( CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_commandPool < rhs.m_commandPool; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandPool() const VULKAN_HPP_NOEXCEPT + { + return m_commandPool; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_commandPool != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_commandPool == VK_NULL_HANDLE; + } + + private: + VkCommandPool m_commandPool = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::CommandPool ) == sizeof( VkCommandPool ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::CommandPool; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CommandPool; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CommandPool; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct CommandBufferAllocateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferAllocateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( + VULKAN_HPP_NAMESPACE::CommandPool commandPool_ = {}, + VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary, + uint32_t commandBufferCount_ = {} ) VULKAN_HPP_NOEXCEPT + : commandPool( commandPool_ ) + , level( level_ ) + , commandBufferCount( commandBufferCount_ ) + {} + + VULKAN_HPP_CONSTEXPR + CommandBufferAllocateInfo( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferAllocateInfo( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferAllocateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & + operator=( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferAllocateInfo & operator=( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CommandBufferAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CommandBufferAllocateInfo & setCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool_ ) VULKAN_HPP_NOEXCEPT + { + commandPool = commandPool_; + return *this; + } + + CommandBufferAllocateInfo & setLevel( VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ ) VULKAN_HPP_NOEXCEPT + { + level = level_; + return *this; + } + + CommandBufferAllocateInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferCount = commandBufferCount_; + return *this; + } + + operator VkCommandBufferAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferAllocateInfo const & ) const = default; +#else + bool operator==( CommandBufferAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( commandPool == rhs.commandPool ) && + ( level == rhs.level ) && ( commandBufferCount == rhs.commandBufferCount ); + } + + bool operator!=( CommandBufferAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferAllocateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CommandPool commandPool = {}; + VULKAN_HPP_NAMESPACE::CommandBufferLevel level = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary; + uint32_t commandBufferCount = {}; + }; + static_assert( sizeof( CommandBufferAllocateInfo ) == sizeof( VkCommandBufferAllocateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CommandBufferAllocateInfo; + }; + + class RenderPass + { + public: + using CType = VkRenderPass; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eRenderPass; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eRenderPass; + + public: + VULKAN_HPP_CONSTEXPR RenderPass() = default; + VULKAN_HPP_CONSTEXPR RenderPass( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT RenderPass( VkRenderPass renderPass ) VULKAN_HPP_NOEXCEPT : m_renderPass( renderPass ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + RenderPass & operator=( VkRenderPass renderPass ) VULKAN_HPP_NOEXCEPT + { + m_renderPass = renderPass; + return *this; + } +#endif + + RenderPass & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_renderPass = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPass const & ) const = default; +#else + bool operator==( RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_renderPass == rhs.m_renderPass; + } + + bool operator!=( RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_renderPass != rhs.m_renderPass; + } + + bool operator<( RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_renderPass < rhs.m_renderPass; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkRenderPass() const VULKAN_HPP_NOEXCEPT + { + return m_renderPass; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_renderPass != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_renderPass == VK_NULL_HANDLE; + } + + private: + VkRenderPass m_renderPass = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::RenderPass ) == sizeof( VkRenderPass ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::RenderPass; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::RenderPass; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::RenderPass; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Framebuffer + { + public: + using CType = VkFramebuffer; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eFramebuffer; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFramebuffer; + + public: + VULKAN_HPP_CONSTEXPR Framebuffer() = default; + VULKAN_HPP_CONSTEXPR Framebuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT Framebuffer( VkFramebuffer framebuffer ) VULKAN_HPP_NOEXCEPT + : m_framebuffer( framebuffer ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Framebuffer & operator=( VkFramebuffer framebuffer ) VULKAN_HPP_NOEXCEPT + { + m_framebuffer = framebuffer; + return *this; + } +#endif + + Framebuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_framebuffer = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Framebuffer const & ) const = default; +#else + bool operator==( Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer == rhs.m_framebuffer; + } + + bool operator!=( Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer != rhs.m_framebuffer; + } + + bool operator<( Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer < rhs.m_framebuffer; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFramebuffer() const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer == VK_NULL_HANDLE; + } + + private: + VkFramebuffer m_framebuffer = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::Framebuffer ) == sizeof( VkFramebuffer ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Framebuffer; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Framebuffer; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Framebuffer; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct CommandBufferInheritanceInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( + VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + uint32_t subpass_ = {}, + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ = {}, + VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ = {}, + VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {} ) VULKAN_HPP_NOEXCEPT + : renderPass( renderPass_ ) + , subpass( subpass_ ) + , framebuffer( framebuffer_ ) + , occlusionQueryEnable( occlusionQueryEnable_ ) + , queryFlags( queryFlags_ ) + , pipelineStatistics( pipelineStatistics_ ) + {} + + VULKAN_HPP_CONSTEXPR + CommandBufferInheritanceInfo( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceInfo( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferInheritanceInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & + operator=( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceInfo & operator=( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CommandBufferInheritanceInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CommandBufferInheritanceInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT + { + renderPass = renderPass_; + return *this; + } + + CommandBufferInheritanceInfo & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT + { + subpass = subpass_; + return *this; + } + + CommandBufferInheritanceInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT + { + framebuffer = framebuffer_; + return *this; + } + + CommandBufferInheritanceInfo & + setOcclusionQueryEnable( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ ) VULKAN_HPP_NOEXCEPT + { + occlusionQueryEnable = occlusionQueryEnable_; + return *this; + } + + CommandBufferInheritanceInfo & + setQueryFlags( VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ ) VULKAN_HPP_NOEXCEPT + { + queryFlags = queryFlags_; + return *this; + } + + CommandBufferInheritanceInfo & + setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT + { + pipelineStatistics = pipelineStatistics_; + return *this; + } + + operator VkCommandBufferInheritanceInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferInheritanceInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferInheritanceInfo const & ) const = default; +#else + bool operator==( CommandBufferInheritanceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPass == rhs.renderPass ) && + ( subpass == rhs.subpass ) && ( framebuffer == rhs.framebuffer ) && + ( occlusionQueryEnable == rhs.occlusionQueryEnable ) && ( queryFlags == rhs.queryFlags ) && + ( pipelineStatistics == rhs.pipelineStatistics ); + } + + bool operator!=( CommandBufferInheritanceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + uint32_t subpass = {}; + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable = {}; + VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags = {}; + VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {}; + }; + static_assert( sizeof( CommandBufferInheritanceInfo ) == sizeof( VkCommandBufferInheritanceInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CommandBufferInheritanceInfo; + }; + + struct CommandBufferBeginInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferBeginInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( + VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ = {}, + const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pInheritanceInfo( pInheritanceInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferBeginInfo( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferBeginInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & + operator=( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferBeginInfo & operator=( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CommandBufferBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CommandBufferBeginInfo & setFlags( VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + CommandBufferBeginInfo & setPInheritanceInfo( + const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo_ ) VULKAN_HPP_NOEXCEPT + { + pInheritanceInfo = pInheritanceInfo_; + return *this; + } + + operator VkCommandBufferBeginInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferBeginInfo const & ) const = default; +#else + bool operator==( CommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( pInheritanceInfo == rhs.pInheritanceInfo ); + } + + bool operator!=( CommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferBeginInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags = {}; + const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo = {}; + }; + static_assert( sizeof( CommandBufferBeginInfo ) == sizeof( VkCommandBufferBeginInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CommandBufferBeginInfo; + }; + + struct CommandBufferInheritanceConditionalRenderingInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT( + VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ = {} ) VULKAN_HPP_NOEXCEPT + : conditionalRenderingEnable( conditionalRenderingEnable_ ) + {} + + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT( + CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceConditionalRenderingInfoEXT( + VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferInheritanceConditionalRenderingInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceConditionalRenderingInfoEXT & + operator=( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceConditionalRenderingInfoEXT & + operator=( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + CommandBufferInheritanceConditionalRenderingInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CommandBufferInheritanceConditionalRenderingInfoEXT & + setConditionalRenderingEnable( VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ ) VULKAN_HPP_NOEXCEPT + { + conditionalRenderingEnable = conditionalRenderingEnable_; + return *this; + } + + operator VkCommandBufferInheritanceConditionalRenderingInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferInheritanceConditionalRenderingInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferInheritanceConditionalRenderingInfoEXT const & ) const = default; +#else + bool operator==( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( conditionalRenderingEnable == rhs.conditionalRenderingEnable ); + } + + bool operator!=( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable = {}; + }; + static_assert( sizeof( CommandBufferInheritanceConditionalRenderingInfoEXT ) == + sizeof( VkCommandBufferInheritanceConditionalRenderingInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CommandBufferInheritanceConditionalRenderingInfoEXT; + }; + + struct CommandBufferInheritanceRenderPassTransformInfoQCOM + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderPassTransformInfoQCOM( + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {} ) VULKAN_HPP_NOEXCEPT + : transform( transform_ ) + , renderArea( renderArea_ ) + {} + + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderPassTransformInfoQCOM( + CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceRenderPassTransformInfoQCOM( + VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferInheritanceRenderPassTransformInfoQCOM( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM & + operator=( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceRenderPassTransformInfoQCOM & + operator=( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + CommandBufferInheritanceRenderPassTransformInfoQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CommandBufferInheritanceRenderPassTransformInfoQCOM & + setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT + { + transform = transform_; + return *this; + } + + CommandBufferInheritanceRenderPassTransformInfoQCOM & + setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT + { + renderArea = renderArea_; + return *this; + } + + operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferInheritanceRenderPassTransformInfoQCOM const & ) const = default; +#else + bool operator==( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform ) && + ( renderArea == rhs.renderArea ); + } + + bool operator!=( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + VULKAN_HPP_NAMESPACE::Rect2D renderArea = {}; + }; + static_assert( sizeof( CommandBufferInheritanceRenderPassTransformInfoQCOM ) == + sizeof( VkCommandBufferInheritanceRenderPassTransformInfoQCOM ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CommandBufferInheritanceRenderPassTransformInfoQCOM; + }; + + struct Viewport + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Viewport( float x_ = {}, + float y_ = {}, + float width_ = {}, + float height_ = {}, + float minDepth_ = {}, + float maxDepth_ = {} ) VULKAN_HPP_NOEXCEPT + : x( x_ ) + , y( y_ ) + , width( width_ ) + , height( height_ ) + , minDepth( minDepth_ ) + , maxDepth( maxDepth_ ) + {} + + VULKAN_HPP_CONSTEXPR Viewport( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Viewport( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT : Viewport( *reinterpret_cast( &rhs ) ) {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 Viewport & operator=( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Viewport & operator=( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + Viewport & setX( float x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + Viewport & setY( float y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } + + Viewport & setWidth( float width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + Viewport & setHeight( float height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } + + Viewport & setMinDepth( float minDepth_ ) VULKAN_HPP_NOEXCEPT + { + minDepth = minDepth_; + return *this; + } + + Viewport & setMaxDepth( float maxDepth_ ) VULKAN_HPP_NOEXCEPT + { + maxDepth = maxDepth_; + return *this; + } + + operator VkViewport const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkViewport &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Viewport const & ) const = default; +#else + bool operator==( Viewport const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( x == rhs.x ) && ( y == rhs.y ) && ( width == rhs.width ) && ( height == rhs.height ) && + ( minDepth == rhs.minDepth ) && ( maxDepth == rhs.maxDepth ); + } + + bool operator!=( Viewport const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float x = {}; + float y = {}; + float width = {}; + float height = {}; + float minDepth = {}; + float maxDepth = {}; + }; + static_assert( sizeof( Viewport ) == sizeof( VkViewport ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct CommandBufferInheritanceViewportScissorInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eCommandBufferInheritanceViewportScissorInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceViewportScissorInfoNV( + VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D_ = {}, + uint32_t viewportDepthCount_ = {}, + const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths_ = {} ) VULKAN_HPP_NOEXCEPT + : viewportScissor2D( viewportScissor2D_ ) + , viewportDepthCount( viewportDepthCount_ ) + , pViewportDepths( pViewportDepths_ ) + {} + + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceViewportScissorInfoNV( + CommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceViewportScissorInfoNV( VkCommandBufferInheritanceViewportScissorInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : CommandBufferInheritanceViewportScissorInfoNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & + operator=( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceViewportScissorInfoNV & + operator=( VkCommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CommandBufferInheritanceViewportScissorInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CommandBufferInheritanceViewportScissorInfoNV & + setViewportScissor2D( VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D_ ) VULKAN_HPP_NOEXCEPT + { + viewportScissor2D = viewportScissor2D_; + return *this; + } + + CommandBufferInheritanceViewportScissorInfoNV & + setViewportDepthCount( uint32_t viewportDepthCount_ ) VULKAN_HPP_NOEXCEPT + { + viewportDepthCount = viewportDepthCount_; + return *this; + } + + CommandBufferInheritanceViewportScissorInfoNV & + setPViewportDepths( const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths_ ) VULKAN_HPP_NOEXCEPT + { + pViewportDepths = pViewportDepths_; + return *this; + } + + operator VkCommandBufferInheritanceViewportScissorInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferInheritanceViewportScissorInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferInheritanceViewportScissorInfoNV const & ) const = default; +#else + bool operator==( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewportScissor2D == rhs.viewportScissor2D ) && + ( viewportDepthCount == rhs.viewportDepthCount ) && ( pViewportDepths == rhs.pViewportDepths ); + } + + bool operator!=( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceViewportScissorInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D = {}; + uint32_t viewportDepthCount = {}; + const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths = {}; + }; + static_assert( sizeof( CommandBufferInheritanceViewportScissorInfoNV ) == + sizeof( VkCommandBufferInheritanceViewportScissorInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CommandBufferInheritanceViewportScissorInfoNV; + }; + + struct ConditionalRenderingBeginInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eConditionalRenderingBeginInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ) + , offset( offset_ ) + , flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR + ConditionalRenderingBeginInfoEXT( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ConditionalRenderingBeginInfoEXT( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ConditionalRenderingBeginInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & + operator=( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ConditionalRenderingBeginInfoEXT & operator=( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ConditionalRenderingBeginInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ConditionalRenderingBeginInfoEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + ConditionalRenderingBeginInfoEXT & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + ConditionalRenderingBeginInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + operator VkConditionalRenderingBeginInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkConditionalRenderingBeginInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ConditionalRenderingBeginInfoEXT const & ) const = default; +#else + bool operator==( ConditionalRenderingBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && + ( flags == rhs.flags ); + } + + bool operator!=( ConditionalRenderingBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eConditionalRenderingBeginInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags = {}; + }; + static_assert( sizeof( ConditionalRenderingBeginInfoEXT ) == sizeof( VkConditionalRenderingBeginInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ConditionalRenderingBeginInfoEXT; + }; + + struct DebugUtilsLabelEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsLabelEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( const char * pLabelName_ = {}, + std::array const & color_ = {} ) VULKAN_HPP_NOEXCEPT + : pLabelName( pLabelName_ ) + , color( color_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugUtilsLabelEXT( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugUtilsLabelEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & + operator=( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugUtilsLabelEXT & operator=( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DebugUtilsLabelEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DebugUtilsLabelEXT & setPLabelName( const char * pLabelName_ ) VULKAN_HPP_NOEXCEPT + { + pLabelName = pLabelName_; + return *this; + } + + DebugUtilsLabelEXT & setColor( std::array color_ ) VULKAN_HPP_NOEXCEPT + { + color = color_; + return *this; + } + + operator VkDebugUtilsLabelEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugUtilsLabelEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugUtilsLabelEXT const & ) const = default; +#else + bool operator==( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pLabelName == rhs.pLabelName ) && + ( color == rhs.color ); + } + + bool operator!=( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsLabelEXT; + const void * pNext = {}; + const char * pLabelName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D color = {}; + }; + static_assert( sizeof( DebugUtilsLabelEXT ) == sizeof( VkDebugUtilsLabelEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DebugUtilsLabelEXT; + }; + + class QueryPool + { + public: + using CType = VkQueryPool; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool; + + public: + VULKAN_HPP_CONSTEXPR QueryPool() = default; + VULKAN_HPP_CONSTEXPR QueryPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT QueryPool( VkQueryPool queryPool ) VULKAN_HPP_NOEXCEPT : m_queryPool( queryPool ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + QueryPool & operator=( VkQueryPool queryPool ) VULKAN_HPP_NOEXCEPT + { + m_queryPool = queryPool; + return *this; + } +#endif + + QueryPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_queryPool = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueryPool const & ) const = default; +#else + bool operator==( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_queryPool == rhs.m_queryPool; + } + + bool operator!=( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_queryPool != rhs.m_queryPool; + } + + bool operator<( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_queryPool < rhs.m_queryPool; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueryPool() const VULKAN_HPP_NOEXCEPT + { + return m_queryPool; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_queryPool != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_queryPool == VK_NULL_HANDLE; + } + + private: + VkQueryPool m_queryPool = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::QueryPool ) == sizeof( VkQueryPool ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::QueryPool; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::QueryPool; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::QueryPool; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct RenderPassBeginInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassBeginInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + RenderPassBeginInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, + VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, + uint32_t clearValueCount_ = {}, + const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues_ = {} ) VULKAN_HPP_NOEXCEPT + : renderPass( renderPass_ ) + , framebuffer( framebuffer_ ) + , renderArea( renderArea_ ) + , clearValueCount( clearValueCount_ ) + , pClearValues( pClearValues_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassBeginInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassBeginInfo( + VULKAN_HPP_NAMESPACE::RenderPass renderPass_, + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_, + VULKAN_HPP_NAMESPACE::Rect2D renderArea_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & clearValues_ ) + : renderPass( renderPass_ ) + , framebuffer( framebuffer_ ) + , renderArea( renderArea_ ) + , clearValueCount( static_cast( clearValues_.size() ) ) + , pClearValues( clearValues_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & + operator=( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassBeginInfo & operator=( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + RenderPassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RenderPassBeginInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT + { + renderPass = renderPass_; + return *this; + } + + RenderPassBeginInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT + { + framebuffer = framebuffer_; + return *this; + } + + RenderPassBeginInfo & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT + { + renderArea = renderArea_; + return *this; + } + + RenderPassBeginInfo & setClearValueCount( uint32_t clearValueCount_ ) VULKAN_HPP_NOEXCEPT + { + clearValueCount = clearValueCount_; + return *this; + } + + RenderPassBeginInfo & setPClearValues( const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues_ ) VULKAN_HPP_NOEXCEPT + { + pClearValues = pClearValues_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassBeginInfo & setClearValues( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & clearValues_ ) + VULKAN_HPP_NOEXCEPT + { + clearValueCount = static_cast( clearValues_.size() ); + pClearValues = clearValues_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkRenderPassBeginInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassBeginInfo const & ) const = default; +#else + bool operator==( RenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPass == rhs.renderPass ) && + ( framebuffer == rhs.framebuffer ) && ( renderArea == rhs.renderArea ) && + ( clearValueCount == rhs.clearValueCount ) && ( pClearValues == rhs.pClearValues ); + } + + bool operator!=( RenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassBeginInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {}; + VULKAN_HPP_NAMESPACE::Rect2D renderArea = {}; + uint32_t clearValueCount = {}; + const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues = {}; + }; + static_assert( sizeof( RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = RenderPassBeginInfo; + }; + + struct SubpassBeginInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassBeginInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassBeginInfo( VULKAN_HPP_NAMESPACE::SubpassContents contents_ = + VULKAN_HPP_NAMESPACE::SubpassContents::eInline ) VULKAN_HPP_NOEXCEPT + : contents( contents_ ) + {} + + VULKAN_HPP_CONSTEXPR SubpassBeginInfo( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassBeginInfo( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassBeginInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SubpassBeginInfo & operator=( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassBeginInfo & operator=( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SubpassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SubpassBeginInfo & setContents( VULKAN_HPP_NAMESPACE::SubpassContents contents_ ) VULKAN_HPP_NOEXCEPT + { + contents = contents_; + return *this; + } + + operator VkSubpassBeginInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassBeginInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassBeginInfo const & ) const = default; +#else + bool operator==( SubpassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( contents == rhs.contents ); + } + + bool operator!=( SubpassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassBeginInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SubpassContents contents = VULKAN_HPP_NAMESPACE::SubpassContents::eInline; + }; + static_assert( sizeof( SubpassBeginInfo ) == sizeof( VkSubpassBeginInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SubpassBeginInfo; + }; + using SubpassBeginInfoKHR = SubpassBeginInfo; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + class VideoSessionKHR + { + public: + using CType = VkVideoSessionKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VULKAN_HPP_CONSTEXPR VideoSessionKHR() = default; + VULKAN_HPP_CONSTEXPR VideoSessionKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT VideoSessionKHR( VkVideoSessionKHR videoSessionKHR ) VULKAN_HPP_NOEXCEPT + : m_videoSessionKHR( videoSessionKHR ) + {} + +# if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + VideoSessionKHR & operator=( VkVideoSessionKHR videoSessionKHR ) VULKAN_HPP_NOEXCEPT + { + m_videoSessionKHR = videoSessionKHR; + return *this; + } +# endif + + VideoSessionKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_videoSessionKHR = {}; + return *this; + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoSessionKHR const & ) const = default; +# else + bool operator==( VideoSessionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionKHR == rhs.m_videoSessionKHR; + } + + bool operator!=( VideoSessionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionKHR != rhs.m_videoSessionKHR; + } + + bool operator<( VideoSessionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionKHR < rhs.m_videoSessionKHR; + } +# endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkVideoSessionKHR() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionKHR == VK_NULL_HANDLE; + } + + private: + VkVideoSessionKHR m_videoSessionKHR = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionKHR ) == sizeof( VkVideoSessionKHR ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::VideoSessionKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::VideoSessionKHR; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + class VideoSessionParametersKHR + { + public: + using CType = VkVideoSessionParametersKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionParametersKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VULKAN_HPP_CONSTEXPR VideoSessionParametersKHR() = default; + VULKAN_HPP_CONSTEXPR VideoSessionParametersKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT + VideoSessionParametersKHR( VkVideoSessionParametersKHR videoSessionParametersKHR ) VULKAN_HPP_NOEXCEPT + : m_videoSessionParametersKHR( videoSessionParametersKHR ) + {} + +# if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + VideoSessionParametersKHR & operator=( VkVideoSessionParametersKHR videoSessionParametersKHR ) VULKAN_HPP_NOEXCEPT + { + m_videoSessionParametersKHR = videoSessionParametersKHR; + return *this; + } +# endif + + VideoSessionParametersKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_videoSessionParametersKHR = {}; + return *this; + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoSessionParametersKHR const & ) const = default; +# else + bool operator==( VideoSessionParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionParametersKHR == rhs.m_videoSessionParametersKHR; + } + + bool operator!=( VideoSessionParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionParametersKHR != rhs.m_videoSessionParametersKHR; + } + + bool operator<( VideoSessionParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionParametersKHR < rhs.m_videoSessionParametersKHR; + } +# endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkVideoSessionParametersKHR() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionParametersKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionParametersKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionParametersKHR == VK_NULL_HANDLE; + } + + private: + VkVideoSessionParametersKHR m_videoSessionParametersKHR = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR ) == sizeof( VkVideoSessionParametersKHR ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + class ImageView + { + public: + using CType = VkImageView; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eImageView; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView; + + public: + VULKAN_HPP_CONSTEXPR ImageView() = default; + VULKAN_HPP_CONSTEXPR ImageView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT ImageView( VkImageView imageView ) VULKAN_HPP_NOEXCEPT : m_imageView( imageView ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + ImageView & operator=( VkImageView imageView ) VULKAN_HPP_NOEXCEPT + { + m_imageView = imageView; + return *this; + } +#endif + + ImageView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_imageView = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageView const & ) const = default; +#else + bool operator==( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_imageView == rhs.m_imageView; + } + + bool operator!=( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_imageView != rhs.m_imageView; + } + + bool operator<( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_imageView < rhs.m_imageView; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImageView() const VULKAN_HPP_NOEXCEPT + { + return m_imageView; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_imageView != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_imageView == VK_NULL_HANDLE; + } + + private: + VkImageView m_imageView = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::ImageView ) == sizeof( VkImageView ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::ImageView; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ImageView; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ImageView; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoPictureResourceKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoPictureResourceKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoPictureResourceKHR( VULKAN_HPP_NAMESPACE::Offset2D codedOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D codedExtent_ = {}, + uint32_t baseArrayLayer_ = {}, + VULKAN_HPP_NAMESPACE::ImageView imageViewBinding_ = {} ) VULKAN_HPP_NOEXCEPT + : codedOffset( codedOffset_ ) + , codedExtent( codedExtent_ ) + , baseArrayLayer( baseArrayLayer_ ) + , imageViewBinding( imageViewBinding_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoPictureResourceKHR( VideoPictureResourceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoPictureResourceKHR( VkVideoPictureResourceKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoPictureResourceKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceKHR & + operator=( VideoPictureResourceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoPictureResourceKHR & operator=( VkVideoPictureResourceKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoPictureResourceKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoPictureResourceKHR & setCodedOffset( VULKAN_HPP_NAMESPACE::Offset2D const & codedOffset_ ) VULKAN_HPP_NOEXCEPT + { + codedOffset = codedOffset_; + return *this; + } + + VideoPictureResourceKHR & setCodedExtent( VULKAN_HPP_NAMESPACE::Extent2D const & codedExtent_ ) VULKAN_HPP_NOEXCEPT + { + codedExtent = codedExtent_; + return *this; + } + + VideoPictureResourceKHR & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT + { + baseArrayLayer = baseArrayLayer_; + return *this; + } + + VideoPictureResourceKHR & + setImageViewBinding( VULKAN_HPP_NAMESPACE::ImageView imageViewBinding_ ) VULKAN_HPP_NOEXCEPT + { + imageViewBinding = imageViewBinding_; + return *this; + } + + operator VkVideoPictureResourceKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoPictureResourceKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoPictureResourceKHR const & ) const = default; +# else + bool operator==( VideoPictureResourceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( codedOffset == rhs.codedOffset ) && + ( codedExtent == rhs.codedExtent ) && ( baseArrayLayer == rhs.baseArrayLayer ) && + ( imageViewBinding == rhs.imageViewBinding ); + } + + bool operator!=( VideoPictureResourceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoPictureResourceKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Offset2D codedOffset = {}; + VULKAN_HPP_NAMESPACE::Extent2D codedExtent = {}; + uint32_t baseArrayLayer = {}; + VULKAN_HPP_NAMESPACE::ImageView imageViewBinding = {}; + }; + static_assert( sizeof( VideoPictureResourceKHR ) == sizeof( VkVideoPictureResourceKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoPictureResourceKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoReferenceSlotKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoReferenceSlotKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoReferenceSlotKHR( + int8_t slotIndex_ = {}, + const VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR * pPictureResource_ = {} ) VULKAN_HPP_NOEXCEPT + : slotIndex( slotIndex_ ) + , pPictureResource( pPictureResource_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoReferenceSlotKHR( VideoReferenceSlotKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoReferenceSlotKHR( VkVideoReferenceSlotKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoReferenceSlotKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotKHR & + operator=( VideoReferenceSlotKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoReferenceSlotKHR & operator=( VkVideoReferenceSlotKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoReferenceSlotKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoReferenceSlotKHR & setSlotIndex( int8_t slotIndex_ ) VULKAN_HPP_NOEXCEPT + { + slotIndex = slotIndex_; + return *this; + } + + VideoReferenceSlotKHR & + setPPictureResource( const VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR * pPictureResource_ ) VULKAN_HPP_NOEXCEPT + { + pPictureResource = pPictureResource_; + return *this; + } + + operator VkVideoReferenceSlotKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoReferenceSlotKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoReferenceSlotKHR const & ) const = default; +# else + bool operator==( VideoReferenceSlotKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( slotIndex == rhs.slotIndex ) && + ( pPictureResource == rhs.pPictureResource ); + } + + bool operator!=( VideoReferenceSlotKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoReferenceSlotKHR; + const void * pNext = {}; + int8_t slotIndex = {}; + const VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR * pPictureResource = {}; + }; + static_assert( sizeof( VideoReferenceSlotKHR ) == sizeof( VkVideoReferenceSlotKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoReferenceSlotKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoBeginCodingInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoBeginCodingInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoBeginCodingInfoKHR( + VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::VideoCodingQualityPresetFlagsKHR codecQualityPreset_ = {}, + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ = {}, + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_ = {}, + uint32_t referenceSlotCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , codecQualityPreset( codecQualityPreset_ ) + , videoSession( videoSession_ ) + , videoSessionParameters( videoSessionParameters_ ) + , referenceSlotCount( referenceSlotCount_ ) + , pReferenceSlots( pReferenceSlots_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoBeginCodingInfoKHR( VideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoBeginCodingInfoKHR( VkVideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoBeginCodingInfoKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoBeginCodingInfoKHR( + VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_, + VULKAN_HPP_NAMESPACE::VideoCodingQualityPresetFlagsKHR codecQualityPreset_, + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_, + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + referenceSlots_ ) + : flags( flags_ ) + , codecQualityPreset( codecQualityPreset_ ) + , videoSession( videoSession_ ) + , videoSessionParameters( videoSessionParameters_ ) + , referenceSlotCount( static_cast( referenceSlots_.size() ) ) + , pReferenceSlots( referenceSlots_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & + operator=( VideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoBeginCodingInfoKHR & operator=( VkVideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoBeginCodingInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoBeginCodingInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VideoBeginCodingInfoKHR & setCodecQualityPreset( + VULKAN_HPP_NAMESPACE::VideoCodingQualityPresetFlagsKHR codecQualityPreset_ ) VULKAN_HPP_NOEXCEPT + { + codecQualityPreset = codecQualityPreset_; + return *this; + } + + VideoBeginCodingInfoKHR & setVideoSession( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ ) VULKAN_HPP_NOEXCEPT + { + videoSession = videoSession_; + return *this; + } + + VideoBeginCodingInfoKHR & setVideoSessionParameters( + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_ ) VULKAN_HPP_NOEXCEPT + { + videoSessionParameters = videoSessionParameters_; + return *this; + } + + VideoBeginCodingInfoKHR & setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT + { + referenceSlotCount = referenceSlotCount_; + return *this; + } + + VideoBeginCodingInfoKHR & + setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT + { + pReferenceSlots = pReferenceSlots_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoBeginCodingInfoKHR & setReferenceSlots( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + referenceSlots_ ) VULKAN_HPP_NOEXCEPT + { + referenceSlotCount = static_cast( referenceSlots_.size() ); + pReferenceSlots = referenceSlots_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkVideoBeginCodingInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoBeginCodingInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoBeginCodingInfoKHR const & ) const = default; +# else + bool operator==( VideoBeginCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( codecQualityPreset == rhs.codecQualityPreset ) && ( videoSession == rhs.videoSession ) && + ( videoSessionParameters == rhs.videoSessionParameters ) && + ( referenceSlotCount == rhs.referenceSlotCount ) && ( pReferenceSlots == rhs.pReferenceSlots ); + } + + bool operator!=( VideoBeginCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoBeginCodingInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::VideoCodingQualityPresetFlagsKHR codecQualityPreset = {}; + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession = {}; + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters = {}; + uint32_t referenceSlotCount = {}; + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots = {}; + }; + static_assert( sizeof( VideoBeginCodingInfoKHR ) == sizeof( VkVideoBeginCodingInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoBeginCodingInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + class PipelineLayout + { + public: + using CType = VkPipelineLayout; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::ePipelineLayout; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineLayout; + + public: + VULKAN_HPP_CONSTEXPR PipelineLayout() = default; + VULKAN_HPP_CONSTEXPR PipelineLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT PipelineLayout( VkPipelineLayout pipelineLayout ) VULKAN_HPP_NOEXCEPT + : m_pipelineLayout( pipelineLayout ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + PipelineLayout & operator=( VkPipelineLayout pipelineLayout ) VULKAN_HPP_NOEXCEPT + { + m_pipelineLayout = pipelineLayout; + return *this; + } +#endif + + PipelineLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_pipelineLayout = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineLayout const & ) const = default; +#else + bool operator==( PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout == rhs.m_pipelineLayout; + } + + bool operator!=( PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout != rhs.m_pipelineLayout; + } + + bool operator<( PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout < rhs.m_pipelineLayout; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineLayout() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout == VK_NULL_HANDLE; + } + + private: + VkPipelineLayout m_pipelineLayout = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::PipelineLayout ) == sizeof( VkPipelineLayout ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::PipelineLayout; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PipelineLayout; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PipelineLayout; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class DescriptorSet + { + public: + using CType = VkDescriptorSet; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet; + + public: + VULKAN_HPP_CONSTEXPR DescriptorSet() = default; + VULKAN_HPP_CONSTEXPR DescriptorSet( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSet( VkDescriptorSet descriptorSet ) VULKAN_HPP_NOEXCEPT + : m_descriptorSet( descriptorSet ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DescriptorSet & operator=( VkDescriptorSet descriptorSet ) VULKAN_HPP_NOEXCEPT + { + m_descriptorSet = descriptorSet; + return *this; + } +#endif + + DescriptorSet & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_descriptorSet = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSet const & ) const = default; +#else + bool operator==( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet == rhs.m_descriptorSet; + } + + bool operator!=( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet != rhs.m_descriptorSet; + } + + bool operator<( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet < rhs.m_descriptorSet; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSet() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet == VK_NULL_HANDLE; + } + + private: + VkDescriptorSet m_descriptorSet = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSet ) == sizeof( VkDescriptorSet ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DescriptorSet; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorSet; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorSet; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Pipeline + { + public: + using CType = VkPipeline; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::ePipeline; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline; + + public: + VULKAN_HPP_CONSTEXPR Pipeline() = default; + VULKAN_HPP_CONSTEXPR Pipeline( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT Pipeline( VkPipeline pipeline ) VULKAN_HPP_NOEXCEPT : m_pipeline( pipeline ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Pipeline & operator=( VkPipeline pipeline ) VULKAN_HPP_NOEXCEPT + { + m_pipeline = pipeline; + return *this; + } +#endif + + Pipeline & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_pipeline = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Pipeline const & ) const = default; +#else + bool operator==( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipeline == rhs.m_pipeline; + } + + bool operator!=( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipeline != rhs.m_pipeline; + } + + bool operator<( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipeline < rhs.m_pipeline; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipeline() const VULKAN_HPP_NOEXCEPT + { + return m_pipeline; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_pipeline != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_pipeline == VK_NULL_HANDLE; + } + + private: + VkPipeline m_pipeline = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::Pipeline ) == sizeof( VkPipeline ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Pipeline; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Pipeline; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Pipeline; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct ImageBlit + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + ImageBlit( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + std::array const & srcOffsets_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + std::array const & dstOffsets_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubresource( srcSubresource_ ) + , srcOffsets( srcOffsets_ ) + , dstSubresource( dstSubresource_ ) + , dstOffsets( dstOffsets_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 ImageBlit( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageBlit( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT : ImageBlit( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageBlit & operator=( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageBlit & operator=( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageBlit & + setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + ImageBlit & setSrcOffsets( std::array const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT + { + srcOffsets = srcOffsets_; + return *this; + } + + ImageBlit & + setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + ImageBlit & setDstOffsets( std::array const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT + { + dstOffsets = dstOffsets_; + return *this; + } + + operator VkImageBlit const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageBlit &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageBlit const & ) const = default; +#else + bool operator==( ImageBlit const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( srcSubresource == rhs.srcSubresource ) && ( srcOffsets == rhs.srcOffsets ) && + ( dstSubresource == rhs.dstSubresource ) && ( dstOffsets == rhs.dstOffsets ); + } + + bool operator!=( ImageBlit const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D srcOffsets = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D dstOffsets = {}; + }; + static_assert( sizeof( ImageBlit ) == sizeof( VkImageBlit ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageSubresourceRange + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageSubresourceRange( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, + uint32_t baseMipLevel_ = {}, + uint32_t levelCount_ = {}, + uint32_t baseArrayLayer_ = {}, + uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT + : aspectMask( aspectMask_ ) + , baseMipLevel( baseMipLevel_ ) + , levelCount( levelCount_ ) + , baseArrayLayer( baseArrayLayer_ ) + , layerCount( layerCount_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageSubresourceRange( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSubresourceRange( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageSubresourceRange( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & + operator=( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSubresourceRange & operator=( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageSubresourceRange & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + { + aspectMask = aspectMask_; + return *this; + } + + ImageSubresourceRange & setBaseMipLevel( uint32_t baseMipLevel_ ) VULKAN_HPP_NOEXCEPT + { + baseMipLevel = baseMipLevel_; + return *this; + } + + ImageSubresourceRange & setLevelCount( uint32_t levelCount_ ) VULKAN_HPP_NOEXCEPT + { + levelCount = levelCount_; + return *this; + } + + ImageSubresourceRange & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT + { + baseArrayLayer = baseArrayLayer_; + return *this; + } + + ImageSubresourceRange & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT + { + layerCount = layerCount_; + return *this; + } + + operator VkImageSubresourceRange const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSubresourceRange &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageSubresourceRange const & ) const = default; +#else + bool operator==( ImageSubresourceRange const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( aspectMask == rhs.aspectMask ) && ( baseMipLevel == rhs.baseMipLevel ) && + ( levelCount == rhs.levelCount ) && ( baseArrayLayer == rhs.baseArrayLayer ) && + ( layerCount == rhs.layerCount ); + } + + bool operator!=( ImageSubresourceRange const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + uint32_t baseMipLevel = {}; + uint32_t levelCount = {}; + uint32_t baseArrayLayer = {}; + uint32_t layerCount = {}; + }; + static_assert( sizeof( ImageSubresourceRange ) == sizeof( VkImageSubresourceRange ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoCodingControlInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoCodingControlInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoCodingControlInfoKHR( VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoCodingControlInfoKHR( VideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoCodingControlInfoKHR( VkVideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoCodingControlInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoCodingControlInfoKHR & + operator=( VideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoCodingControlInfoKHR & operator=( VkVideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoCodingControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoCodingControlInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + operator VkVideoCodingControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoCodingControlInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoCodingControlInfoKHR const & ) const = default; +# else + bool operator==( VideoCodingControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); + } + + bool operator!=( VideoCodingControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoCodingControlInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags = {}; + }; + static_assert( sizeof( VideoCodingControlInfoKHR ) == sizeof( VkVideoCodingControlInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoCodingControlInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + struct CopyAccelerationStructureInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyAccelerationStructureInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone ) VULKAN_HPP_NOEXCEPT + : src( src_ ) + , dst( dst_ ) + , mode( mode_ ) + {} + + VULKAN_HPP_CONSTEXPR + CopyAccelerationStructureInfoKHR( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyAccelerationStructureInfoKHR( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyAccelerationStructureInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & + operator=( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyAccelerationStructureInfoKHR & operator=( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CopyAccelerationStructureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CopyAccelerationStructureInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT + { + src = src_; + return *this; + } + + CopyAccelerationStructureInfoKHR & setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT + { + dst = dst_; + return *this; + } + + CopyAccelerationStructureInfoKHR & + setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } + + operator VkCopyAccelerationStructureInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyAccelerationStructureInfoKHR const & ) const = default; +#else + bool operator==( CopyAccelerationStructureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( src == rhs.src ) && ( dst == rhs.dst ) && + ( mode == rhs.mode ); + } + + bool operator!=( CopyAccelerationStructureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {}; + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; + }; + static_assert( sizeof( CopyAccelerationStructureInfoKHR ) == sizeof( VkCopyAccelerationStructureInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CopyAccelerationStructureInfoKHR; + }; + + struct CopyAccelerationStructureToMemoryInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eCopyAccelerationStructureToMemoryInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + CopyAccelerationStructureToMemoryInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst_ = {}, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone ) + VULKAN_HPP_NOEXCEPT + : src( src_ ) + , dst( dst_ ) + , mode( mode_ ) + {} + + CopyAccelerationStructureToMemoryInfoKHR( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + CopyAccelerationStructureToMemoryInfoKHR( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : CopyAccelerationStructureToMemoryInfoKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CopyAccelerationStructureToMemoryInfoKHR & + operator=( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyAccelerationStructureToMemoryInfoKHR & + operator=( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CopyAccelerationStructureToMemoryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CopyAccelerationStructureToMemoryInfoKHR & + setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT + { + src = src_; + return *this; + } + + CopyAccelerationStructureToMemoryInfoKHR & + setDst( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & dst_ ) VULKAN_HPP_NOEXCEPT + { + dst = dst_; + return *this; + } + + CopyAccelerationStructureToMemoryInfoKHR & + setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } + + operator VkCopyAccelerationStructureToMemoryInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyAccelerationStructureToMemoryInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst = {}; + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; + }; + static_assert( sizeof( CopyAccelerationStructureToMemoryInfoKHR ) == + sizeof( VkCopyAccelerationStructureToMemoryInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CopyAccelerationStructureToMemoryInfoKHR; + }; + + struct CopyBufferInfo2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferInfo2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + CopyBufferInfo2KHR( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {}, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::BufferCopy2KHR * pRegions_ = {} ) VULKAN_HPP_NOEXCEPT + : srcBuffer( srcBuffer_ ) + , dstBuffer( dstBuffer_ ) + , regionCount( regionCount_ ) + , pRegions( pRegions_ ) + {} + + VULKAN_HPP_CONSTEXPR CopyBufferInfo2KHR( CopyBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyBufferInfo2KHR( VkCopyBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyBufferInfo2KHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyBufferInfo2KHR( + VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + : srcBuffer( srcBuffer_ ) + , dstBuffer( dstBuffer_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2KHR & + operator=( CopyBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyBufferInfo2KHR & operator=( VkCopyBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CopyBufferInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CopyBufferInfo2KHR & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT + { + srcBuffer = srcBuffer_; + return *this; + } + + CopyBufferInfo2KHR & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT + { + dstBuffer = dstBuffer_; + return *this; + } + + CopyBufferInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + CopyBufferInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::BufferCopy2KHR * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyBufferInfo2KHR & setRegions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkCopyBufferInfo2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyBufferInfo2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyBufferInfo2KHR const & ) const = default; +#else + bool operator==( CopyBufferInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcBuffer == rhs.srcBuffer ) && + ( dstBuffer == rhs.dstBuffer ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); + } + + bool operator!=( CopyBufferInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyBufferInfo2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {}; + VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {}; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::BufferCopy2KHR * pRegions = {}; + }; + static_assert( sizeof( CopyBufferInfo2KHR ) == sizeof( VkCopyBufferInfo2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CopyBufferInfo2KHR; + }; + + struct CopyBufferToImageInfo2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferToImageInfo2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2KHR( + VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR * pRegions_ = {} ) VULKAN_HPP_NOEXCEPT + : srcBuffer( srcBuffer_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( regionCount_ ) + , pRegions( pRegions_ ) + {} + + VULKAN_HPP_CONSTEXPR + CopyBufferToImageInfo2KHR( CopyBufferToImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyBufferToImageInfo2KHR( VkCopyBufferToImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyBufferToImageInfo2KHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyBufferToImageInfo2KHR( + VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, + VULKAN_HPP_NAMESPACE::Image dstImage_, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + : srcBuffer( srcBuffer_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2KHR & + operator=( CopyBufferToImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyBufferToImageInfo2KHR & operator=( VkCopyBufferToImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CopyBufferToImageInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CopyBufferToImageInfo2KHR & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT + { + srcBuffer = srcBuffer_; + return *this; + } + + CopyBufferToImageInfo2KHR & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT + { + dstImage = dstImage_; + return *this; + } + + CopyBufferToImageInfo2KHR & + setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + dstImageLayout = dstImageLayout_; + return *this; + } + + CopyBufferToImageInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + CopyBufferToImageInfo2KHR & + setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyBufferToImageInfo2KHR & setRegions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkCopyBufferToImageInfo2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyBufferToImageInfo2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyBufferToImageInfo2KHR const & ) const = default; +#else + bool operator==( CopyBufferToImageInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcBuffer == rhs.srcBuffer ) && + ( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) && + ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); + } + + bool operator!=( CopyBufferToImageInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyBufferToImageInfo2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {}; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR * pRegions = {}; + }; + static_assert( sizeof( CopyBufferToImageInfo2KHR ) == sizeof( VkCopyBufferToImageInfo2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CopyBufferToImageInfo2KHR; + }; + + struct ImageCopy + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageCopy( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubresource( srcSubresource_ ) + , srcOffset( srcOffset_ ) + , dstSubresource( dstSubresource_ ) + , dstOffset( dstOffset_ ) + , extent( extent_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageCopy( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageCopy( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT : ImageCopy( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageCopy & operator=( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageCopy & operator=( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageCopy & + setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + ImageCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + ImageCopy & + setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + ImageCopy & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + ImageCopy & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } + + operator VkImageCopy const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageCopy &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageCopy const & ) const = default; +#else + bool operator==( ImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) && + ( dstSubresource == rhs.dstSubresource ) && ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent ); + } + + bool operator!=( ImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + }; + static_assert( sizeof( ImageCopy ) == sizeof( VkImageCopy ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageCopy2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCopy2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageCopy2KHR( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubresource( srcSubresource_ ) + , srcOffset( srcOffset_ ) + , dstSubresource( dstSubresource_ ) + , dstOffset( dstOffset_ ) + , extent( extent_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageCopy2KHR( ImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageCopy2KHR( VkImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageCopy2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageCopy2KHR & operator=( ImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageCopy2KHR & operator=( VkImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageCopy2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageCopy2KHR & + setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + ImageCopy2KHR & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + ImageCopy2KHR & + setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + ImageCopy2KHR & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + ImageCopy2KHR & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } + + operator VkImageCopy2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageCopy2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageCopy2KHR const & ) const = default; +#else + bool operator==( ImageCopy2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) && + ( srcOffset == rhs.srcOffset ) && ( dstSubresource == rhs.dstSubresource ) && + ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent ); + } + + bool operator!=( ImageCopy2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCopy2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + }; + static_assert( sizeof( ImageCopy2KHR ) == sizeof( VkImageCopy2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageCopy2KHR; + }; + + struct CopyImageInfo2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageInfo2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyImageInfo2KHR( + VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageCopy2KHR * pRegions_ = {} ) VULKAN_HPP_NOEXCEPT + : srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( regionCount_ ) + , pRegions( pRegions_ ) + {} + + VULKAN_HPP_CONSTEXPR CopyImageInfo2KHR( CopyImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyImageInfo2KHR( VkCopyImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyImageInfo2KHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageInfo2KHR( + VULKAN_HPP_NAMESPACE::Image srcImage_, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, + VULKAN_HPP_NAMESPACE::Image dstImage_, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + : srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2KHR & + operator=( CopyImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyImageInfo2KHR & operator=( VkCopyImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CopyImageInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CopyImageInfo2KHR & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT + { + srcImage = srcImage_; + return *this; + } + + CopyImageInfo2KHR & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + srcImageLayout = srcImageLayout_; + return *this; + } + + CopyImageInfo2KHR & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT + { + dstImage = dstImage_; + return *this; + } + + CopyImageInfo2KHR & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + dstImageLayout = dstImageLayout_; + return *this; + } + + CopyImageInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + CopyImageInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::ImageCopy2KHR * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageInfo2KHR & setRegions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkCopyImageInfo2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyImageInfo2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyImageInfo2KHR const & ) const = default; +#else + bool operator==( CopyImageInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && + ( srcImageLayout == rhs.srcImageLayout ) && ( dstImage == rhs.dstImage ) && + ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && + ( pRegions == rhs.pRegions ); + } + + bool operator!=( CopyImageInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageInfo2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::ImageCopy2KHR * pRegions = {}; + }; + static_assert( sizeof( CopyImageInfo2KHR ) == sizeof( VkCopyImageInfo2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CopyImageInfo2KHR; + }; + + struct CopyImageToBufferInfo2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToBufferInfo2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2KHR( + VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {}, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR * pRegions_ = {} ) VULKAN_HPP_NOEXCEPT + : srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstBuffer( dstBuffer_ ) + , regionCount( regionCount_ ) + , pRegions( pRegions_ ) + {} + + VULKAN_HPP_CONSTEXPR + CopyImageToBufferInfo2KHR( CopyImageToBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyImageToBufferInfo2KHR( VkCopyImageToBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyImageToBufferInfo2KHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageToBufferInfo2KHR( + VULKAN_HPP_NAMESPACE::Image srcImage_, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + : srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstBuffer( dstBuffer_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2KHR & + operator=( CopyImageToBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyImageToBufferInfo2KHR & operator=( VkCopyImageToBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CopyImageToBufferInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CopyImageToBufferInfo2KHR & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT + { + srcImage = srcImage_; + return *this; + } + + CopyImageToBufferInfo2KHR & + setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + srcImageLayout = srcImageLayout_; + return *this; + } + + CopyImageToBufferInfo2KHR & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT + { + dstBuffer = dstBuffer_; + return *this; + } + + CopyImageToBufferInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + CopyImageToBufferInfo2KHR & + setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageToBufferInfo2KHR & setRegions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkCopyImageToBufferInfo2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyImageToBufferInfo2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyImageToBufferInfo2KHR const & ) const = default; +#else + bool operator==( CopyImageToBufferInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && + ( srcImageLayout == rhs.srcImageLayout ) && ( dstBuffer == rhs.dstBuffer ) && + ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); + } + + bool operator!=( CopyImageToBufferInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageToBufferInfo2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {}; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR * pRegions = {}; + }; + static_assert( sizeof( CopyImageToBufferInfo2KHR ) == sizeof( VkCopyImageToBufferInfo2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CopyImageToBufferInfo2KHR; + }; + + struct CopyMemoryToAccelerationStructureInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eCopyMemoryToAccelerationStructureInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + CopyMemoryToAccelerationStructureInfoKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone ) + VULKAN_HPP_NOEXCEPT + : src( src_ ) + , dst( dst_ ) + , mode( mode_ ) + {} + + CopyMemoryToAccelerationStructureInfoKHR( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + CopyMemoryToAccelerationStructureInfoKHR( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : CopyMemoryToAccelerationStructureInfoKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CopyMemoryToAccelerationStructureInfoKHR & + operator=( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyMemoryToAccelerationStructureInfoKHR & + operator=( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CopyMemoryToAccelerationStructureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CopyMemoryToAccelerationStructureInfoKHR & + setSrc( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & src_ ) VULKAN_HPP_NOEXCEPT + { + src = src_; + return *this; + } + + CopyMemoryToAccelerationStructureInfoKHR & + setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT + { + dst = dst_; + return *this; + } + + CopyMemoryToAccelerationStructureInfoKHR & + setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } + + operator VkCopyMemoryToAccelerationStructureInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyMemoryToAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {}; + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; + }; + static_assert( sizeof( CopyMemoryToAccelerationStructureInfoKHR ) == + sizeof( VkCopyMemoryToAccelerationStructureInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CopyMemoryToAccelerationStructureInfoKHR; + }; + + class CuFunctionNVX + { + public: + using CType = VkCuFunctionNVX; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eCuFunctionNVX; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuFunctionNVX; + + public: + VULKAN_HPP_CONSTEXPR CuFunctionNVX() = default; + VULKAN_HPP_CONSTEXPR CuFunctionNVX( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT CuFunctionNVX( VkCuFunctionNVX cuFunctionNVX ) VULKAN_HPP_NOEXCEPT + : m_cuFunctionNVX( cuFunctionNVX ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + CuFunctionNVX & operator=( VkCuFunctionNVX cuFunctionNVX ) VULKAN_HPP_NOEXCEPT + { + m_cuFunctionNVX = cuFunctionNVX; + return *this; + } +#endif + + CuFunctionNVX & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_cuFunctionNVX = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CuFunctionNVX const & ) const = default; +#else + bool operator==( CuFunctionNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_cuFunctionNVX == rhs.m_cuFunctionNVX; + } + + bool operator!=( CuFunctionNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_cuFunctionNVX != rhs.m_cuFunctionNVX; + } + + bool operator<( CuFunctionNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_cuFunctionNVX < rhs.m_cuFunctionNVX; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCuFunctionNVX() const VULKAN_HPP_NOEXCEPT + { + return m_cuFunctionNVX; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_cuFunctionNVX != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_cuFunctionNVX == VK_NULL_HANDLE; + } + + private: + VkCuFunctionNVX m_cuFunctionNVX = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::CuFunctionNVX ) == sizeof( VkCuFunctionNVX ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::CuFunctionNVX; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CuFunctionNVX; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CuFunctionNVX; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct CuLaunchInfoNVX + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuLaunchInfoNVX; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CuLaunchInfoNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function_ = {}, + uint32_t gridDimX_ = {}, + uint32_t gridDimY_ = {}, + uint32_t gridDimZ_ = {}, + uint32_t blockDimX_ = {}, + uint32_t blockDimY_ = {}, + uint32_t blockDimZ_ = {}, + uint32_t sharedMemBytes_ = {}, + size_t paramCount_ = {}, + const void * const * pParams_ = {}, + size_t extraCount_ = {}, + const void * const * pExtras_ = {} ) VULKAN_HPP_NOEXCEPT + : function( function_ ) + , gridDimX( gridDimX_ ) + , gridDimY( gridDimY_ ) + , gridDimZ( gridDimZ_ ) + , blockDimX( blockDimX_ ) + , blockDimY( blockDimY_ ) + , blockDimZ( blockDimZ_ ) + , sharedMemBytes( sharedMemBytes_ ) + , paramCount( paramCount_ ) + , pParams( pParams_ ) + , extraCount( extraCount_ ) + , pExtras( pExtras_ ) + {} + + VULKAN_HPP_CONSTEXPR CuLaunchInfoNVX( CuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CuLaunchInfoNVX( VkCuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : CuLaunchInfoNVX( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & operator=( CuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CuLaunchInfoNVX & operator=( VkCuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CuLaunchInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CuLaunchInfoNVX & setFunction( VULKAN_HPP_NAMESPACE::CuFunctionNVX function_ ) VULKAN_HPP_NOEXCEPT + { + function = function_; + return *this; + } + + CuLaunchInfoNVX & setGridDimX( uint32_t gridDimX_ ) VULKAN_HPP_NOEXCEPT + { + gridDimX = gridDimX_; + return *this; + } + + CuLaunchInfoNVX & setGridDimY( uint32_t gridDimY_ ) VULKAN_HPP_NOEXCEPT + { + gridDimY = gridDimY_; + return *this; + } + + CuLaunchInfoNVX & setGridDimZ( uint32_t gridDimZ_ ) VULKAN_HPP_NOEXCEPT + { + gridDimZ = gridDimZ_; + return *this; + } + + CuLaunchInfoNVX & setBlockDimX( uint32_t blockDimX_ ) VULKAN_HPP_NOEXCEPT + { + blockDimX = blockDimX_; + return *this; + } + + CuLaunchInfoNVX & setBlockDimY( uint32_t blockDimY_ ) VULKAN_HPP_NOEXCEPT + { + blockDimY = blockDimY_; + return *this; + } + + CuLaunchInfoNVX & setBlockDimZ( uint32_t blockDimZ_ ) VULKAN_HPP_NOEXCEPT + { + blockDimZ = blockDimZ_; + return *this; + } + + CuLaunchInfoNVX & setSharedMemBytes( uint32_t sharedMemBytes_ ) VULKAN_HPP_NOEXCEPT + { + sharedMemBytes = sharedMemBytes_; + return *this; + } + + CuLaunchInfoNVX & setParamCount( size_t paramCount_ ) VULKAN_HPP_NOEXCEPT + { + paramCount = paramCount_; + return *this; + } + + CuLaunchInfoNVX & setPParams( const void * const * pParams_ ) VULKAN_HPP_NOEXCEPT + { + pParams = pParams_; + return *this; + } + + CuLaunchInfoNVX & setExtraCount( size_t extraCount_ ) VULKAN_HPP_NOEXCEPT + { + extraCount = extraCount_; + return *this; + } + + CuLaunchInfoNVX & setPExtras( const void * const * pExtras_ ) VULKAN_HPP_NOEXCEPT + { + pExtras = pExtras_; + return *this; + } + + operator VkCuLaunchInfoNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCuLaunchInfoNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CuLaunchInfoNVX const & ) const = default; +#else + bool operator==( CuLaunchInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( function == rhs.function ) && + ( gridDimX == rhs.gridDimX ) && ( gridDimY == rhs.gridDimY ) && ( gridDimZ == rhs.gridDimZ ) && + ( blockDimX == rhs.blockDimX ) && ( blockDimY == rhs.blockDimY ) && ( blockDimZ == rhs.blockDimZ ) && + ( sharedMemBytes == rhs.sharedMemBytes ) && ( paramCount == rhs.paramCount ) && + ( pParams == rhs.pParams ) && ( extraCount == rhs.extraCount ) && ( pExtras == rhs.pExtras ); + } + + bool operator!=( CuLaunchInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuLaunchInfoNVX; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CuFunctionNVX function = {}; + uint32_t gridDimX = {}; + uint32_t gridDimY = {}; + uint32_t gridDimZ = {}; + uint32_t blockDimX = {}; + uint32_t blockDimY = {}; + uint32_t blockDimZ = {}; + uint32_t sharedMemBytes = {}; + size_t paramCount = {}; + const void * const * pParams = {}; + size_t extraCount = {}; + const void * const * pExtras = {}; + }; + static_assert( sizeof( CuLaunchInfoNVX ) == sizeof( VkCuLaunchInfoNVX ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CuLaunchInfoNVX; + }; + + struct DebugMarkerMarkerInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerMarkerInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT( const char * pMarkerName_ = {}, + std::array const & color_ = {} ) VULKAN_HPP_NOEXCEPT + : pMarkerName( pMarkerName_ ) + , color( color_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + DebugMarkerMarkerInfoEXT( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugMarkerMarkerInfoEXT( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugMarkerMarkerInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT & + operator=( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugMarkerMarkerInfoEXT & operator=( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DebugMarkerMarkerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DebugMarkerMarkerInfoEXT & setPMarkerName( const char * pMarkerName_ ) VULKAN_HPP_NOEXCEPT + { + pMarkerName = pMarkerName_; + return *this; + } + + DebugMarkerMarkerInfoEXT & setColor( std::array color_ ) VULKAN_HPP_NOEXCEPT + { + color = color_; + return *this; + } + + operator VkDebugMarkerMarkerInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugMarkerMarkerInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugMarkerMarkerInfoEXT const & ) const = default; +#else + bool operator==( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pMarkerName == rhs.pMarkerName ) && + ( color == rhs.color ); + } + + bool operator!=( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerMarkerInfoEXT; + const void * pNext = {}; + const char * pMarkerName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D color = {}; + }; + static_assert( sizeof( DebugMarkerMarkerInfoEXT ) == sizeof( VkDebugMarkerMarkerInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DebugMarkerMarkerInfoEXT; + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeInfoKHR( + VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D codedOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D codedExtent_ = {}, + VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_ = {}, + VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR dstPictureResource_ = {}, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot_ = {}, + uint32_t referenceSlotCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , codedOffset( codedOffset_ ) + , codedExtent( codedExtent_ ) + , srcBuffer( srcBuffer_ ) + , srcBufferOffset( srcBufferOffset_ ) + , srcBufferRange( srcBufferRange_ ) + , dstPictureResource( dstPictureResource_ ) + , pSetupReferenceSlot( pSetupReferenceSlot_ ) + , referenceSlotCount( referenceSlotCount_ ) + , pReferenceSlots( pReferenceSlots_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoDecodeInfoKHR( VideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeInfoKHR( VkVideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeInfoKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeInfoKHR( + VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_, + VULKAN_HPP_NAMESPACE::Offset2D codedOffset_, + VULKAN_HPP_NAMESPACE::Extent2D codedExtent_, + VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, + VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_, + VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_, + VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR dstPictureResource_, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + referenceSlots_ ) + : flags( flags_ ) + , codedOffset( codedOffset_ ) + , codedExtent( codedExtent_ ) + , srcBuffer( srcBuffer_ ) + , srcBufferOffset( srcBufferOffset_ ) + , srcBufferRange( srcBufferRange_ ) + , dstPictureResource( dstPictureResource_ ) + , pSetupReferenceSlot( pSetupReferenceSlot_ ) + , referenceSlotCount( static_cast( referenceSlots_.size() ) ) + , pReferenceSlots( referenceSlots_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & + operator=( VideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeInfoKHR & operator=( VkVideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoDecodeInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoDecodeInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VideoDecodeInfoKHR & setCodedOffset( VULKAN_HPP_NAMESPACE::Offset2D const & codedOffset_ ) VULKAN_HPP_NOEXCEPT + { + codedOffset = codedOffset_; + return *this; + } + + VideoDecodeInfoKHR & setCodedExtent( VULKAN_HPP_NAMESPACE::Extent2D const & codedExtent_ ) VULKAN_HPP_NOEXCEPT + { + codedExtent = codedExtent_; + return *this; + } + + VideoDecodeInfoKHR & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT + { + srcBuffer = srcBuffer_; + return *this; + } + + VideoDecodeInfoKHR & setSrcBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcBufferOffset = srcBufferOffset_; + return *this; + } + + VideoDecodeInfoKHR & setSrcBufferRange( VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_ ) VULKAN_HPP_NOEXCEPT + { + srcBufferRange = srcBufferRange_; + return *this; + } + + VideoDecodeInfoKHR & setDstPictureResource( + VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR const & dstPictureResource_ ) VULKAN_HPP_NOEXCEPT + { + dstPictureResource = dstPictureResource_; + return *this; + } + + VideoDecodeInfoKHR & setPSetupReferenceSlot( + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot_ ) VULKAN_HPP_NOEXCEPT + { + pSetupReferenceSlot = pSetupReferenceSlot_; + return *this; + } + + VideoDecodeInfoKHR & setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT + { + referenceSlotCount = referenceSlotCount_; + return *this; + } + + VideoDecodeInfoKHR & + setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT + { + pReferenceSlots = pReferenceSlots_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeInfoKHR & setReferenceSlots( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + referenceSlots_ ) VULKAN_HPP_NOEXCEPT + { + referenceSlotCount = static_cast( referenceSlots_.size() ); + pReferenceSlots = referenceSlots_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkVideoDecodeInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeInfoKHR const & ) const = default; +# else + bool operator==( VideoDecodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( codedOffset == rhs.codedOffset ) && ( codedExtent == rhs.codedExtent ) && + ( srcBuffer == rhs.srcBuffer ) && ( srcBufferOffset == rhs.srcBufferOffset ) && + ( srcBufferRange == rhs.srcBufferRange ) && ( dstPictureResource == rhs.dstPictureResource ) && + ( pSetupReferenceSlot == rhs.pSetupReferenceSlot ) && ( referenceSlotCount == rhs.referenceSlotCount ) && + ( pReferenceSlots == rhs.pReferenceSlots ); + } + + bool operator!=( VideoDecodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::Offset2D codedOffset = {}; + VULKAN_HPP_NAMESPACE::Extent2D codedExtent = {}; + VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange = {}; + VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR dstPictureResource = {}; + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot = {}; + uint32_t referenceSlotCount = {}; + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots = {}; + }; + static_assert( sizeof( VideoDecodeInfoKHR ) == sizeof( VkVideoDecodeInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoDecodeInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeInfoKHR( + VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_ = {}, + uint32_t qualityLevel_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D codedExtent_ = {}, + VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange_ = {}, + VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR srcPictureResource_ = {}, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot_ = {}, + uint32_t referenceSlotCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , qualityLevel( qualityLevel_ ) + , codedExtent( codedExtent_ ) + , dstBitstreamBuffer( dstBitstreamBuffer_ ) + , dstBitstreamBufferOffset( dstBitstreamBufferOffset_ ) + , dstBitstreamBufferMaxRange( dstBitstreamBufferMaxRange_ ) + , srcPictureResource( srcPictureResource_ ) + , pSetupReferenceSlot( pSetupReferenceSlot_ ) + , referenceSlotCount( referenceSlotCount_ ) + , pReferenceSlots( pReferenceSlots_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoEncodeInfoKHR( VideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeInfoKHR( VkVideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeInfoKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeInfoKHR( + VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_, + uint32_t qualityLevel_, + VULKAN_HPP_NAMESPACE::Extent2D codedExtent_, + VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer_, + VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset_, + VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange_, + VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR srcPictureResource_, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + referenceSlots_ ) + : flags( flags_ ) + , qualityLevel( qualityLevel_ ) + , codedExtent( codedExtent_ ) + , dstBitstreamBuffer( dstBitstreamBuffer_ ) + , dstBitstreamBufferOffset( dstBitstreamBufferOffset_ ) + , dstBitstreamBufferMaxRange( dstBitstreamBufferMaxRange_ ) + , srcPictureResource( srcPictureResource_ ) + , pSetupReferenceSlot( pSetupReferenceSlot_ ) + , referenceSlotCount( static_cast( referenceSlots_.size() ) ) + , pReferenceSlots( referenceSlots_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & + operator=( VideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeInfoKHR & operator=( VkVideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoEncodeInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoEncodeInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VideoEncodeInfoKHR & setQualityLevel( uint32_t qualityLevel_ ) VULKAN_HPP_NOEXCEPT + { + qualityLevel = qualityLevel_; + return *this; + } + + VideoEncodeInfoKHR & setCodedExtent( VULKAN_HPP_NAMESPACE::Extent2D const & codedExtent_ ) VULKAN_HPP_NOEXCEPT + { + codedExtent = codedExtent_; + return *this; + } + + VideoEncodeInfoKHR & setDstBitstreamBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer_ ) VULKAN_HPP_NOEXCEPT + { + dstBitstreamBuffer = dstBitstreamBuffer_; + return *this; + } + + VideoEncodeInfoKHR & + setDstBitstreamBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstBitstreamBufferOffset = dstBitstreamBufferOffset_; + return *this; + } + + VideoEncodeInfoKHR & + setDstBitstreamBufferMaxRange( VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange_ ) VULKAN_HPP_NOEXCEPT + { + dstBitstreamBufferMaxRange = dstBitstreamBufferMaxRange_; + return *this; + } + + VideoEncodeInfoKHR & setSrcPictureResource( + VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR const & srcPictureResource_ ) VULKAN_HPP_NOEXCEPT + { + srcPictureResource = srcPictureResource_; + return *this; + } + + VideoEncodeInfoKHR & setPSetupReferenceSlot( + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot_ ) VULKAN_HPP_NOEXCEPT + { + pSetupReferenceSlot = pSetupReferenceSlot_; + return *this; + } + + VideoEncodeInfoKHR & setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT + { + referenceSlotCount = referenceSlotCount_; + return *this; + } + + VideoEncodeInfoKHR & + setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT + { + pReferenceSlots = pReferenceSlots_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeInfoKHR & setReferenceSlots( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + referenceSlots_ ) VULKAN_HPP_NOEXCEPT + { + referenceSlotCount = static_cast( referenceSlots_.size() ); + pReferenceSlots = referenceSlots_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkVideoEncodeInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeInfoKHR const & ) const = default; +# else + bool operator==( VideoEncodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( qualityLevel == rhs.qualityLevel ) && ( codedExtent == rhs.codedExtent ) && + ( dstBitstreamBuffer == rhs.dstBitstreamBuffer ) && + ( dstBitstreamBufferOffset == rhs.dstBitstreamBufferOffset ) && + ( dstBitstreamBufferMaxRange == rhs.dstBitstreamBufferMaxRange ) && + ( srcPictureResource == rhs.srcPictureResource ) && ( pSetupReferenceSlot == rhs.pSetupReferenceSlot ) && + ( referenceSlotCount == rhs.referenceSlotCount ) && ( pReferenceSlots == rhs.pReferenceSlots ); + } + + bool operator!=( VideoEncodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags = {}; + uint32_t qualityLevel = {}; + VULKAN_HPP_NAMESPACE::Extent2D codedExtent = {}; + VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange = {}; + VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR srcPictureResource = {}; + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot = {}; + uint32_t referenceSlotCount = {}; + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots = {}; + }; + static_assert( sizeof( VideoEncodeInfoKHR ) == sizeof( VkVideoEncodeInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoEncodeInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + struct SubpassEndInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassEndInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassEndInfo() VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_CONSTEXPR SubpassEndInfo( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassEndInfo( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassEndInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SubpassEndInfo & operator=( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassEndInfo & operator=( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SubpassEndInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + operator VkSubpassEndInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassEndInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassEndInfo const & ) const = default; +#else + bool operator==( SubpassEndInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); + } + + bool operator!=( SubpassEndInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassEndInfo; + const void * pNext = {}; + }; + static_assert( sizeof( SubpassEndInfo ) == sizeof( VkSubpassEndInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SubpassEndInfo; + }; + using SubpassEndInfoKHR = SubpassEndInfo; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEndCodingInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEndCodingInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEndCodingInfoKHR( VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoEndCodingInfoKHR( VideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEndCodingInfoKHR( VkVideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEndCodingInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEndCodingInfoKHR & + operator=( VideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEndCodingInfoKHR & operator=( VkVideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoEndCodingInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoEndCodingInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + operator VkVideoEndCodingInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEndCodingInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEndCodingInfoKHR const & ) const = default; +# else + bool operator==( VideoEndCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); + } + + bool operator!=( VideoEndCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEndCodingInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags = {}; + }; + static_assert( sizeof( VideoEndCodingInfoKHR ) == sizeof( VkVideoEndCodingInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoEndCodingInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + class IndirectCommandsLayoutNV + { + public: + using CType = VkIndirectCommandsLayoutNV; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV() = default; + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT + IndirectCommandsLayoutNV( VkIndirectCommandsLayoutNV indirectCommandsLayoutNV ) VULKAN_HPP_NOEXCEPT + : m_indirectCommandsLayoutNV( indirectCommandsLayoutNV ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + IndirectCommandsLayoutNV & operator=( VkIndirectCommandsLayoutNV indirectCommandsLayoutNV ) VULKAN_HPP_NOEXCEPT + { + m_indirectCommandsLayoutNV = indirectCommandsLayoutNV; + return *this; + } +#endif + + IndirectCommandsLayoutNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_indirectCommandsLayoutNV = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectCommandsLayoutNV const & ) const = default; +#else + bool operator==( IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV == rhs.m_indirectCommandsLayoutNV; + } + + bool operator!=( IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV != rhs.m_indirectCommandsLayoutNV; + } + + bool operator<( IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV < rhs.m_indirectCommandsLayoutNV; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectCommandsLayoutNV() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV == VK_NULL_HANDLE; + } + + private: + VkIndirectCommandsLayoutNV m_indirectCommandsLayoutNV = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV ) == sizeof( VkIndirectCommandsLayoutNV ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct IndirectCommandsStreamNV + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ) + , offset( offset_ ) + {} + + VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsStreamNV( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectCommandsStreamNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsStreamNV & + operator=( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsStreamNV & operator=( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + IndirectCommandsStreamNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + IndirectCommandsStreamNV & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + operator VkIndirectCommandsStreamNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsStreamNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectCommandsStreamNV const & ) const = default; +#else + bool operator==( IndirectCommandsStreamNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( buffer == rhs.buffer ) && ( offset == rhs.offset ); + } + + bool operator!=( IndirectCommandsStreamNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + }; + static_assert( sizeof( IndirectCommandsStreamNV ) == sizeof( VkIndirectCommandsStreamNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct GeneratedCommandsInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV( + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, + uint32_t streamCount_ = {}, + const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams_ = {}, + uint32_t sequencesCount_ = {}, + VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, + VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, + VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {} ) VULKAN_HPP_NOEXCEPT + : pipelineBindPoint( pipelineBindPoint_ ) + , pipeline( pipeline_ ) + , indirectCommandsLayout( indirectCommandsLayout_ ) + , streamCount( streamCount_ ) + , pStreams( pStreams_ ) + , sequencesCount( sequencesCount_ ) + , preprocessBuffer( preprocessBuffer_ ) + , preprocessOffset( preprocessOffset_ ) + , preprocessSize( preprocessSize_ ) + , sequencesCountBuffer( sequencesCountBuffer_ ) + , sequencesCountOffset( sequencesCountOffset_ ) + , sequencesIndexBuffer( sequencesIndexBuffer_ ) + , sequencesIndexOffset( sequencesIndexOffset_ ) + {} + + VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeneratedCommandsInfoNV( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : GeneratedCommandsInfoNV( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GeneratedCommandsInfoNV( + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, + VULKAN_HPP_NAMESPACE::Pipeline pipeline_, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + streams_, + uint32_t sequencesCount_ = {}, + VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, + VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, + VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {} ) + : pipelineBindPoint( pipelineBindPoint_ ) + , pipeline( pipeline_ ) + , indirectCommandsLayout( indirectCommandsLayout_ ) + , streamCount( static_cast( streams_.size() ) ) + , pStreams( streams_.data() ) + , sequencesCount( sequencesCount_ ) + , preprocessBuffer( preprocessBuffer_ ) + , preprocessOffset( preprocessOffset_ ) + , preprocessSize( preprocessSize_ ) + , sequencesCountBuffer( sequencesCountBuffer_ ) + , sequencesCountOffset( sequencesCountOffset_ ) + , sequencesIndexBuffer( sequencesIndexBuffer_ ) + , sequencesIndexOffset( sequencesIndexOffset_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & + operator=( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeneratedCommandsInfoNV & operator=( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + GeneratedCommandsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + GeneratedCommandsInfoNV & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + GeneratedCommandsInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } + + GeneratedCommandsInfoNV & setIndirectCommandsLayout( + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT + { + indirectCommandsLayout = indirectCommandsLayout_; + return *this; + } + + GeneratedCommandsInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT + { + streamCount = streamCount_; + return *this; + } + + GeneratedCommandsInfoNV & + setPStreams( const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams_ ) VULKAN_HPP_NOEXCEPT + { + pStreams = pStreams_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GeneratedCommandsInfoNV & setStreams( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + streams_ ) VULKAN_HPP_NOEXCEPT + { + streamCount = static_cast( streams_.size() ); + pStreams = streams_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + GeneratedCommandsInfoNV & setSequencesCount( uint32_t sequencesCount_ ) VULKAN_HPP_NOEXCEPT + { + sequencesCount = sequencesCount_; + return *this; + } + + GeneratedCommandsInfoNV & setPreprocessBuffer( VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ ) VULKAN_HPP_NOEXCEPT + { + preprocessBuffer = preprocessBuffer_; + return *this; + } + + GeneratedCommandsInfoNV & + setPreprocessOffset( VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ ) VULKAN_HPP_NOEXCEPT + { + preprocessOffset = preprocessOffset_; + return *this; + } + + GeneratedCommandsInfoNV & setPreprocessSize( VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ ) VULKAN_HPP_NOEXCEPT + { + preprocessSize = preprocessSize_; + return *this; + } + + GeneratedCommandsInfoNV & + setSequencesCountBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ ) VULKAN_HPP_NOEXCEPT + { + sequencesCountBuffer = sequencesCountBuffer_; + return *this; + } + + GeneratedCommandsInfoNV & + setSequencesCountOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ ) VULKAN_HPP_NOEXCEPT + { + sequencesCountOffset = sequencesCountOffset_; + return *this; + } + + GeneratedCommandsInfoNV & + setSequencesIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ ) VULKAN_HPP_NOEXCEPT + { + sequencesIndexBuffer = sequencesIndexBuffer_; + return *this; + } + + GeneratedCommandsInfoNV & + setSequencesIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ ) VULKAN_HPP_NOEXCEPT + { + sequencesIndexOffset = sequencesIndexOffset_; + return *this; + } + + operator VkGeneratedCommandsInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeneratedCommandsInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeneratedCommandsInfoNV const & ) const = default; +#else + bool operator==( GeneratedCommandsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && + ( pipeline == rhs.pipeline ) && ( indirectCommandsLayout == rhs.indirectCommandsLayout ) && + ( streamCount == rhs.streamCount ) && ( pStreams == rhs.pStreams ) && + ( sequencesCount == rhs.sequencesCount ) && ( preprocessBuffer == rhs.preprocessBuffer ) && + ( preprocessOffset == rhs.preprocessOffset ) && ( preprocessSize == rhs.preprocessSize ) && + ( sequencesCountBuffer == rhs.sequencesCountBuffer ) && + ( sequencesCountOffset == rhs.sequencesCountOffset ) && + ( sequencesIndexBuffer == rhs.sequencesIndexBuffer ) && + ( sequencesIndexOffset == rhs.sequencesIndexOffset ); + } + + bool operator!=( GeneratedCommandsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {}; + uint32_t streamCount = {}; + const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams = {}; + uint32_t sequencesCount = {}; + VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize = {}; + VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset = {}; + VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset = {}; + }; + static_assert( sizeof( GeneratedCommandsInfoNV ) == sizeof( VkGeneratedCommandsInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = GeneratedCommandsInfoNV; + }; + + struct MemoryBarrier + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {} ) VULKAN_HPP_NOEXCEPT + : srcAccessMask( srcAccessMask_ ) + , dstAccessMask( dstAccessMask_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryBarrier( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryBarrier( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryBarrier( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & operator=( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryBarrier & operator=( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + MemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + operator VkMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryBarrier &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryBarrier const & ) const = default; +#else + bool operator==( MemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) && + ( dstAccessMask == rhs.dstAccessMask ); + } + + bool operator!=( MemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + }; + static_assert( sizeof( MemoryBarrier ) == sizeof( VkMemoryBarrier ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MemoryBarrier; + }; + + struct ImageMemoryBarrier + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImageMemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t srcQueueFamilyIndex_ = {}, + uint32_t dstQueueFamilyIndex_ = {}, + VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {} ) VULKAN_HPP_NOEXCEPT + : srcAccessMask( srcAccessMask_ ) + , dstAccessMask( dstAccessMask_ ) + , oldLayout( oldLayout_ ) + , newLayout( newLayout_ ) + , srcQueueFamilyIndex( srcQueueFamilyIndex_ ) + , dstQueueFamilyIndex( dstQueueFamilyIndex_ ) + , image( image_ ) + , subresourceRange( subresourceRange_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageMemoryBarrier( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageMemoryBarrier( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & + operator=( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageMemoryBarrier & operator=( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageMemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + ImageMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + ImageMemoryBarrier & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT + { + oldLayout = oldLayout_; + return *this; + } + + ImageMemoryBarrier & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT + { + newLayout = newLayout_; + return *this; + } + + ImageMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + srcQueueFamilyIndex = srcQueueFamilyIndex_; + return *this; + } + + ImageMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + dstQueueFamilyIndex = dstQueueFamilyIndex_; + return *this; + } + + ImageMemoryBarrier & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + ImageMemoryBarrier & + setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT + { + subresourceRange = subresourceRange_; + return *this; + } + + operator VkImageMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageMemoryBarrier &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageMemoryBarrier const & ) const = default; +#else + bool operator==( ImageMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) && + ( dstAccessMask == rhs.dstAccessMask ) && ( oldLayout == rhs.oldLayout ) && + ( newLayout == rhs.newLayout ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && + ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( image == rhs.image ) && + ( subresourceRange == rhs.subresourceRange ); + } + + bool operator!=( ImageMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t srcQueueFamilyIndex = {}; + uint32_t dstQueueFamilyIndex = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; + }; + static_assert( sizeof( ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageMemoryBarrier; + }; + + struct MemoryBarrier2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + MemoryBarrier2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask_ = {} ) VULKAN_HPP_NOEXCEPT + : srcStageMask( srcStageMask_ ) + , srcAccessMask( srcAccessMask_ ) + , dstStageMask( dstStageMask_ ) + , dstAccessMask( dstAccessMask_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryBarrier2KHR( MemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryBarrier2KHR( VkMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryBarrier2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2KHR & + operator=( MemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryBarrier2KHR & operator=( VkMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MemoryBarrier2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MemoryBarrier2KHR & + setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask_ ) VULKAN_HPP_NOEXCEPT + { + srcStageMask = srcStageMask_; + return *this; + } + + MemoryBarrier2KHR & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + MemoryBarrier2KHR & + setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + dstStageMask = dstStageMask_; + return *this; + } + + MemoryBarrier2KHR & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + operator VkMemoryBarrier2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryBarrier2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryBarrier2KHR const & ) const = default; +#else + bool operator==( MemoryBarrier2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) && + ( srcAccessMask == rhs.srcAccessMask ) && ( dstStageMask == rhs.dstStageMask ) && + ( dstAccessMask == rhs.dstAccessMask ); + } + + bool operator!=( MemoryBarrier2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask = {}; + }; + static_assert( sizeof( MemoryBarrier2KHR ) == sizeof( VkMemoryBarrier2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MemoryBarrier2KHR; + }; + + struct ImageMemoryBarrier2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageMemoryBarrier2KHR( + VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t srcQueueFamilyIndex_ = {}, + uint32_t dstQueueFamilyIndex_ = {}, + VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {} ) VULKAN_HPP_NOEXCEPT + : srcStageMask( srcStageMask_ ) + , srcAccessMask( srcAccessMask_ ) + , dstStageMask( dstStageMask_ ) + , dstAccessMask( dstAccessMask_ ) + , oldLayout( oldLayout_ ) + , newLayout( newLayout_ ) + , srcQueueFamilyIndex( srcQueueFamilyIndex_ ) + , dstQueueFamilyIndex( dstQueueFamilyIndex_ ) + , image( image_ ) + , subresourceRange( subresourceRange_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageMemoryBarrier2KHR( ImageMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageMemoryBarrier2KHR( VkImageMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageMemoryBarrier2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2KHR & + operator=( ImageMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageMemoryBarrier2KHR & operator=( VkImageMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageMemoryBarrier2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageMemoryBarrier2KHR & + setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask_ ) VULKAN_HPP_NOEXCEPT + { + srcStageMask = srcStageMask_; + return *this; + } + + ImageMemoryBarrier2KHR & + setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + ImageMemoryBarrier2KHR & + setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + dstStageMask = dstStageMask_; + return *this; + } + + ImageMemoryBarrier2KHR & + setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + ImageMemoryBarrier2KHR & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT + { + oldLayout = oldLayout_; + return *this; + } + + ImageMemoryBarrier2KHR & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT + { + newLayout = newLayout_; + return *this; + } + + ImageMemoryBarrier2KHR & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + srcQueueFamilyIndex = srcQueueFamilyIndex_; + return *this; + } + + ImageMemoryBarrier2KHR & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + dstQueueFamilyIndex = dstQueueFamilyIndex_; + return *this; + } + + ImageMemoryBarrier2KHR & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + ImageMemoryBarrier2KHR & + setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT + { + subresourceRange = subresourceRange_; + return *this; + } + + operator VkImageMemoryBarrier2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageMemoryBarrier2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageMemoryBarrier2KHR const & ) const = default; +#else + bool operator==( ImageMemoryBarrier2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) && + ( srcAccessMask == rhs.srcAccessMask ) && ( dstStageMask == rhs.dstStageMask ) && + ( dstAccessMask == rhs.dstAccessMask ) && ( oldLayout == rhs.oldLayout ) && + ( newLayout == rhs.newLayout ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && + ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( image == rhs.image ) && + ( subresourceRange == rhs.subresourceRange ); + } + + bool operator!=( ImageMemoryBarrier2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask = {}; + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t srcQueueFamilyIndex = {}; + uint32_t dstQueueFamilyIndex = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; + }; + static_assert( sizeof( ImageMemoryBarrier2KHR ) == sizeof( VkImageMemoryBarrier2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageMemoryBarrier2KHR; + }; + + struct DependencyInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDependencyInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DependencyInfoKHR( + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}, + uint32_t memoryBarrierCount_ = {}, + const VULKAN_HPP_NAMESPACE::MemoryBarrier2KHR * pMemoryBarriers_ = {}, + uint32_t bufferMemoryBarrierCount_ = {}, + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2KHR * pBufferMemoryBarriers_ = {}, + uint32_t imageMemoryBarrierCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2KHR * pImageMemoryBarriers_ = {} ) VULKAN_HPP_NOEXCEPT + : dependencyFlags( dependencyFlags_ ) + , memoryBarrierCount( memoryBarrierCount_ ) + , pMemoryBarriers( pMemoryBarriers_ ) + , bufferMemoryBarrierCount( bufferMemoryBarrierCount_ ) + , pBufferMemoryBarriers( pBufferMemoryBarriers_ ) + , imageMemoryBarrierCount( imageMemoryBarrierCount_ ) + , pImageMemoryBarriers( pImageMemoryBarriers_ ) + {} + + VULKAN_HPP_CONSTEXPR DependencyInfoKHR( DependencyInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DependencyInfoKHR( VkDependencyInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DependencyInfoKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DependencyInfoKHR( + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + memoryBarriers_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + bufferMemoryBarriers_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + imageMemoryBarriers_ = {} ) + : dependencyFlags( dependencyFlags_ ) + , memoryBarrierCount( static_cast( memoryBarriers_.size() ) ) + , pMemoryBarriers( memoryBarriers_.data() ) + , bufferMemoryBarrierCount( static_cast( bufferMemoryBarriers_.size() ) ) + , pBufferMemoryBarriers( bufferMemoryBarriers_.data() ) + , imageMemoryBarrierCount( static_cast( imageMemoryBarriers_.size() ) ) + , pImageMemoryBarriers( imageMemoryBarriers_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DependencyInfoKHR & + operator=( DependencyInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DependencyInfoKHR & operator=( VkDependencyInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DependencyInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DependencyInfoKHR & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT + { + dependencyFlags = dependencyFlags_; + return *this; + } + + DependencyInfoKHR & setMemoryBarrierCount( uint32_t memoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT + { + memoryBarrierCount = memoryBarrierCount_; + return *this; + } + + DependencyInfoKHR & + setPMemoryBarriers( const VULKAN_HPP_NAMESPACE::MemoryBarrier2KHR * pMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT + { + pMemoryBarriers = pMemoryBarriers_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DependencyInfoKHR & setMemoryBarriers( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + memoryBarriers_ ) VULKAN_HPP_NOEXCEPT + { + memoryBarrierCount = static_cast( memoryBarriers_.size() ); + pMemoryBarriers = memoryBarriers_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DependencyInfoKHR & setBufferMemoryBarrierCount( uint32_t bufferMemoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT + { + bufferMemoryBarrierCount = bufferMemoryBarrierCount_; + return *this; + } + + DependencyInfoKHR & setPBufferMemoryBarriers( + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2KHR * pBufferMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT + { + pBufferMemoryBarriers = pBufferMemoryBarriers_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DependencyInfoKHR & setBufferMemoryBarriers( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + bufferMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT + { + bufferMemoryBarrierCount = static_cast( bufferMemoryBarriers_.size() ); + pBufferMemoryBarriers = bufferMemoryBarriers_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DependencyInfoKHR & setImageMemoryBarrierCount( uint32_t imageMemoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT + { + imageMemoryBarrierCount = imageMemoryBarrierCount_; + return *this; + } + + DependencyInfoKHR & setPImageMemoryBarriers( + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2KHR * pImageMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT + { + pImageMemoryBarriers = pImageMemoryBarriers_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DependencyInfoKHR & setImageMemoryBarriers( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + imageMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT + { + imageMemoryBarrierCount = static_cast( imageMemoryBarriers_.size() ); + pImageMemoryBarriers = imageMemoryBarriers_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkDependencyInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDependencyInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DependencyInfoKHR const & ) const = default; +#else + bool operator==( DependencyInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dependencyFlags == rhs.dependencyFlags ) && + ( memoryBarrierCount == rhs.memoryBarrierCount ) && ( pMemoryBarriers == rhs.pMemoryBarriers ) && + ( bufferMemoryBarrierCount == rhs.bufferMemoryBarrierCount ) && + ( pBufferMemoryBarriers == rhs.pBufferMemoryBarriers ) && + ( imageMemoryBarrierCount == rhs.imageMemoryBarrierCount ) && + ( pImageMemoryBarriers == rhs.pImageMemoryBarriers ); + } + + bool operator!=( DependencyInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDependencyInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {}; + uint32_t memoryBarrierCount = {}; + const VULKAN_HPP_NAMESPACE::MemoryBarrier2KHR * pMemoryBarriers = {}; + uint32_t bufferMemoryBarrierCount = {}; + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2KHR * pBufferMemoryBarriers = {}; + uint32_t imageMemoryBarrierCount = {}; + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2KHR * pImageMemoryBarriers = {}; + }; + static_assert( sizeof( DependencyInfoKHR ) == sizeof( VkDependencyInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DependencyInfoKHR; + }; + + class Sampler + { + public: + using CType = VkSampler; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eSampler; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler; + + public: + VULKAN_HPP_CONSTEXPR Sampler() = default; + VULKAN_HPP_CONSTEXPR Sampler( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT Sampler( VkSampler sampler ) VULKAN_HPP_NOEXCEPT : m_sampler( sampler ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Sampler & operator=( VkSampler sampler ) VULKAN_HPP_NOEXCEPT + { + m_sampler = sampler; + return *this; + } +#endif + + Sampler & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_sampler = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Sampler const & ) const = default; +#else + bool operator==( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_sampler == rhs.m_sampler; + } + + bool operator!=( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_sampler != rhs.m_sampler; + } + + bool operator<( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_sampler < rhs.m_sampler; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSampler() const VULKAN_HPP_NOEXCEPT + { + return m_sampler; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_sampler != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_sampler == VK_NULL_HANDLE; + } + + private: + VkSampler m_sampler = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::Sampler ) == sizeof( VkSampler ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Sampler; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Sampler; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Sampler; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct DescriptorImageInfo + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorImageInfo( VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, + VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = + VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT + : sampler( sampler_ ) + , imageView( imageView_ ) + , imageLayout( imageLayout_ ) + {} + + VULKAN_HPP_CONSTEXPR DescriptorImageInfo( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorImageInfo( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorImageInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & + operator=( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorImageInfo & operator=( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DescriptorImageInfo & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT + { + sampler = sampler_; + return *this; + } + + DescriptorImageInfo & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT + { + imageView = imageView_; + return *this; + } + + DescriptorImageInfo & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT + { + imageLayout = imageLayout_; + return *this; + } + + operator VkDescriptorImageInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorImageInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorImageInfo const & ) const = default; +#else + bool operator==( DescriptorImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sampler == rhs.sampler ) && ( imageView == rhs.imageView ) && ( imageLayout == rhs.imageLayout ); + } + + bool operator!=( DescriptorImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Sampler sampler = {}; + VULKAN_HPP_NAMESPACE::ImageView imageView = {}; + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + static_assert( sizeof( DescriptorImageInfo ) == sizeof( VkDescriptorImageInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DescriptorBufferInfo + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize range_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ) + , offset( offset_ ) + , range( range_ ) + {} + + VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorBufferInfo( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorBufferInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & + operator=( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorBufferInfo & operator=( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DescriptorBufferInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + DescriptorBufferInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + DescriptorBufferInfo & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT + { + range = range_; + return *this; + } + + operator VkDescriptorBufferInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorBufferInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorBufferInfo const & ) const = default; +#else + bool operator==( DescriptorBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( range == rhs.range ); + } + + bool operator!=( DescriptorBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize range = {}; + }; + static_assert( sizeof( DescriptorBufferInfo ) == sizeof( VkDescriptorBufferInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + class BufferView + { + public: + using CType = VkBufferView; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eBufferView; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView; + + public: + VULKAN_HPP_CONSTEXPR BufferView() = default; + VULKAN_HPP_CONSTEXPR BufferView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT BufferView( VkBufferView bufferView ) VULKAN_HPP_NOEXCEPT : m_bufferView( bufferView ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + BufferView & operator=( VkBufferView bufferView ) VULKAN_HPP_NOEXCEPT + { + m_bufferView = bufferView; + return *this; + } +#endif + + BufferView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_bufferView = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferView const & ) const = default; +#else + bool operator==( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_bufferView == rhs.m_bufferView; + } + + bool operator!=( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_bufferView != rhs.m_bufferView; + } + + bool operator<( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_bufferView < rhs.m_bufferView; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBufferView() const VULKAN_HPP_NOEXCEPT + { + return m_bufferView; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_bufferView != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_bufferView == VK_NULL_HANDLE; + } + + private: + VkBufferView m_bufferView = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::BufferView ) == sizeof( VkBufferView ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::BufferView; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::BufferView; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::BufferView; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct WriteDescriptorSet + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSet; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR WriteDescriptorSet( + VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, + uint32_t dstBinding_ = {}, + uint32_t dstArrayElement_ = {}, + uint32_t descriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo_ = {}, + const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView_ = {} ) VULKAN_HPP_NOEXCEPT + : dstSet( dstSet_ ) + , dstBinding( dstBinding_ ) + , dstArrayElement( dstArrayElement_ ) + , descriptorCount( descriptorCount_ ) + , descriptorType( descriptorType_ ) + , pImageInfo( pImageInfo_ ) + , pBufferInfo( pBufferInfo_ ) + , pTexelBufferView( pTexelBufferView_ ) + {} + + VULKAN_HPP_CONSTEXPR WriteDescriptorSet( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteDescriptorSet( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT + : WriteDescriptorSet( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSet( + VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_, + uint32_t dstBinding_, + uint32_t dstArrayElement_, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageInfo_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + bufferInfo_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + texelBufferView_ = {} ) + : dstSet( dstSet_ ) + , dstBinding( dstBinding_ ) + , dstArrayElement( dstArrayElement_ ) + , descriptorCount( static_cast( !imageInfo_.empty() ? imageInfo_.size() + : !bufferInfo_.empty() ? bufferInfo_.size() + : texelBufferView_.size() ) ) + , descriptorType( descriptorType_ ) + , pImageInfo( imageInfo_.data() ) + , pBufferInfo( bufferInfo_.data() ) + , pTexelBufferView( texelBufferView_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) <= 1 ); +# else + if ( 1 < ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::WriteDescriptorSet::WriteDescriptorSet: 1 < ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & + operator=( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteDescriptorSet & operator=( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + WriteDescriptorSet & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + WriteDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT + { + dstSet = dstSet_; + return *this; + } + + WriteDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT + { + dstBinding = dstBinding_; + return *this; + } + + WriteDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT + { + dstArrayElement = dstArrayElement_; + return *this; + } + + WriteDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = descriptorCount_; + return *this; + } + + WriteDescriptorSet & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT + { + descriptorType = descriptorType_; + return *this; + } + + WriteDescriptorSet & + setPImageInfo( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo_ ) VULKAN_HPP_NOEXCEPT + { + pImageInfo = pImageInfo_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSet & setImageInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + imageInfo_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = static_cast( imageInfo_.size() ); + pImageInfo = imageInfo_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + WriteDescriptorSet & + setPBufferInfo( const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo_ ) VULKAN_HPP_NOEXCEPT + { + pBufferInfo = pBufferInfo_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSet & setBufferInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + bufferInfo_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = static_cast( bufferInfo_.size() ); + pBufferInfo = bufferInfo_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + WriteDescriptorSet & + setPTexelBufferView( const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView_ ) VULKAN_HPP_NOEXCEPT + { + pTexelBufferView = pTexelBufferView_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSet & setTexelBufferView( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & texelBufferView_ ) + VULKAN_HPP_NOEXCEPT + { + descriptorCount = static_cast( texelBufferView_.size() ); + pTexelBufferView = texelBufferView_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkWriteDescriptorSet const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWriteDescriptorSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WriteDescriptorSet const & ) const = default; +#else + bool operator==( WriteDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dstSet == rhs.dstSet ) && + ( dstBinding == rhs.dstBinding ) && ( dstArrayElement == rhs.dstArrayElement ) && + ( descriptorCount == rhs.descriptorCount ) && ( descriptorType == rhs.descriptorType ) && + ( pImageInfo == rhs.pImageInfo ) && ( pBufferInfo == rhs.pBufferInfo ) && + ( pTexelBufferView == rhs.pTexelBufferView ); + } + + bool operator!=( WriteDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSet; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {}; + uint32_t dstBinding = {}; + uint32_t dstArrayElement = {}; + uint32_t descriptorCount = {}; + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo = {}; + const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo = {}; + const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView = {}; + }; + static_assert( sizeof( WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = WriteDescriptorSet; + }; + + class DescriptorUpdateTemplate + { + public: + using CType = VkDescriptorUpdateTemplate; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate; + + public: + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate() = default; + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT + DescriptorUpdateTemplate( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) VULKAN_HPP_NOEXCEPT + : m_descriptorUpdateTemplate( descriptorUpdateTemplate ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DescriptorUpdateTemplate & operator=( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) VULKAN_HPP_NOEXCEPT + { + m_descriptorUpdateTemplate = descriptorUpdateTemplate; + return *this; + } +#endif + + DescriptorUpdateTemplate & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_descriptorUpdateTemplate = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorUpdateTemplate const & ) const = default; +#else + bool operator==( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate == rhs.m_descriptorUpdateTemplate; + } + + bool operator!=( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate != rhs.m_descriptorUpdateTemplate; + } + + bool operator<( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate < rhs.m_descriptorUpdateTemplate; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorUpdateTemplate() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate == VK_NULL_HANDLE; + } + + private: + VkDescriptorUpdateTemplate m_descriptorUpdateTemplate = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate ) == sizeof( VkDescriptorUpdateTemplate ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + using DescriptorUpdateTemplateKHR = DescriptorUpdateTemplate; + + class Event + { + public: + using CType = VkEvent; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eEvent; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent; + + public: + VULKAN_HPP_CONSTEXPR Event() = default; + VULKAN_HPP_CONSTEXPR Event( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT Event( VkEvent event ) VULKAN_HPP_NOEXCEPT : m_event( event ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Event & operator=( VkEvent event ) VULKAN_HPP_NOEXCEPT + { + m_event = event; + return *this; + } +#endif + + Event & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_event = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Event const & ) const = default; +#else + bool operator==( Event const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_event == rhs.m_event; + } + + bool operator!=( Event const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_event != rhs.m_event; + } + + bool operator<( Event const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_event < rhs.m_event; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkEvent() const VULKAN_HPP_NOEXCEPT + { + return m_event; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_event != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_event == VK_NULL_HANDLE; + } + + private: + VkEvent m_event = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::Event ) == sizeof( VkEvent ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Event; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Event; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Event; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct ImageResolve + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageResolve( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubresource( srcSubresource_ ) + , srcOffset( srcOffset_ ) + , dstSubresource( dstSubresource_ ) + , dstOffset( dstOffset_ ) + , extent( extent_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageResolve( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageResolve( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageResolve( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageResolve & operator=( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageResolve & operator=( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageResolve & + setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + ImageResolve & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + ImageResolve & + setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + ImageResolve & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + ImageResolve & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } + + operator VkImageResolve const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageResolve &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageResolve const & ) const = default; +#else + bool operator==( ImageResolve const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) && + ( dstSubresource == rhs.dstSubresource ) && ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent ); + } + + bool operator!=( ImageResolve const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + }; + static_assert( sizeof( ImageResolve ) == sizeof( VkImageResolve ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageResolve2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageResolve2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageResolve2KHR( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubresource( srcSubresource_ ) + , srcOffset( srcOffset_ ) + , dstSubresource( dstSubresource_ ) + , dstOffset( dstOffset_ ) + , extent( extent_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageResolve2KHR( ImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageResolve2KHR( VkImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageResolve2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageResolve2KHR & operator=( ImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageResolve2KHR & operator=( VkImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageResolve2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageResolve2KHR & + setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + ImageResolve2KHR & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + ImageResolve2KHR & + setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + ImageResolve2KHR & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + ImageResolve2KHR & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } + + operator VkImageResolve2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageResolve2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageResolve2KHR const & ) const = default; +#else + bool operator==( ImageResolve2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) && + ( srcOffset == rhs.srcOffset ) && ( dstSubresource == rhs.dstSubresource ) && + ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent ); + } + + bool operator!=( ImageResolve2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageResolve2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + }; + static_assert( sizeof( ImageResolve2KHR ) == sizeof( VkImageResolve2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageResolve2KHR; + }; + + struct ResolveImageInfo2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eResolveImageInfo2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ResolveImageInfo2KHR( + VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageResolve2KHR * pRegions_ = {} ) VULKAN_HPP_NOEXCEPT + : srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( regionCount_ ) + , pRegions( pRegions_ ) + {} + + VULKAN_HPP_CONSTEXPR ResolveImageInfo2KHR( ResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ResolveImageInfo2KHR( VkResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ResolveImageInfo2KHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ResolveImageInfo2KHR( + VULKAN_HPP_NAMESPACE::Image srcImage_, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, + VULKAN_HPP_NAMESPACE::Image dstImage_, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + : srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2KHR & + operator=( ResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ResolveImageInfo2KHR & operator=( VkResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ResolveImageInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ResolveImageInfo2KHR & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT + { + srcImage = srcImage_; + return *this; + } + + ResolveImageInfo2KHR & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + srcImageLayout = srcImageLayout_; + return *this; + } + + ResolveImageInfo2KHR & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT + { + dstImage = dstImage_; + return *this; + } + + ResolveImageInfo2KHR & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + dstImageLayout = dstImageLayout_; + return *this; + } + + ResolveImageInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + ResolveImageInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::ImageResolve2KHR * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ResolveImageInfo2KHR & setRegions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkResolveImageInfo2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkResolveImageInfo2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ResolveImageInfo2KHR const & ) const = default; +#else + bool operator==( ResolveImageInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && + ( srcImageLayout == rhs.srcImageLayout ) && ( dstImage == rhs.dstImage ) && + ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && + ( pRegions == rhs.pRegions ); + } + + bool operator!=( ResolveImageInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eResolveImageInfo2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::ImageResolve2KHR * pRegions = {}; + }; + static_assert( sizeof( ResolveImageInfo2KHR ) == sizeof( VkResolveImageInfo2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ResolveImageInfo2KHR; + }; + + struct PerformanceMarkerInfoINTEL + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceMarkerInfoINTEL; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( uint64_t marker_ = {} ) VULKAN_HPP_NOEXCEPT : marker( marker_ ) {} + + VULKAN_HPP_CONSTEXPR + PerformanceMarkerInfoINTEL( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceMarkerInfoINTEL( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceMarkerInfoINTEL( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PerformanceMarkerInfoINTEL & + operator=( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceMarkerInfoINTEL & operator=( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PerformanceMarkerInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PerformanceMarkerInfoINTEL & setMarker( uint64_t marker_ ) VULKAN_HPP_NOEXCEPT + { + marker = marker_; + return *this; + } + + operator VkPerformanceMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceMarkerInfoINTEL const & ) const = default; +#else + bool operator==( PerformanceMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( marker == rhs.marker ); + } + + bool operator!=( PerformanceMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceMarkerInfoINTEL; + const void * pNext = {}; + uint64_t marker = {}; + }; + static_assert( sizeof( PerformanceMarkerInfoINTEL ) == sizeof( VkPerformanceMarkerInfoINTEL ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PerformanceMarkerInfoINTEL; + }; + + struct PerformanceOverrideInfoINTEL + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceOverrideInfoINTEL; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PerformanceOverrideInfoINTEL( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ = + VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware, + VULKAN_HPP_NAMESPACE::Bool32 enable_ = {}, + uint64_t parameter_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , enable( enable_ ) + , parameter( parameter_ ) + {} + + VULKAN_HPP_CONSTEXPR + PerformanceOverrideInfoINTEL( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceOverrideInfoINTEL( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceOverrideInfoINTEL( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & + operator=( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceOverrideInfoINTEL & operator=( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PerformanceOverrideInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PerformanceOverrideInfoINTEL & + setType( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + PerformanceOverrideInfoINTEL & setEnable( VULKAN_HPP_NAMESPACE::Bool32 enable_ ) VULKAN_HPP_NOEXCEPT + { + enable = enable_; + return *this; + } + + PerformanceOverrideInfoINTEL & setParameter( uint64_t parameter_ ) VULKAN_HPP_NOEXCEPT + { + parameter = parameter_; + return *this; + } + + operator VkPerformanceOverrideInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceOverrideInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceOverrideInfoINTEL const & ) const = default; +#else + bool operator==( PerformanceOverrideInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( enable == rhs.enable ) && + ( parameter == rhs.parameter ); + } + + bool operator!=( PerformanceOverrideInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceOverrideInfoINTEL; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type = + VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware; + VULKAN_HPP_NAMESPACE::Bool32 enable = {}; + uint64_t parameter = {}; + }; + static_assert( sizeof( PerformanceOverrideInfoINTEL ) == sizeof( VkPerformanceOverrideInfoINTEL ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PerformanceOverrideInfoINTEL; + }; + + struct PerformanceStreamMarkerInfoINTEL + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceStreamMarkerInfoINTEL; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( uint32_t marker_ = {} ) VULKAN_HPP_NOEXCEPT + : marker( marker_ ) + {} + + VULKAN_HPP_CONSTEXPR + PerformanceStreamMarkerInfoINTEL( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceStreamMarkerInfoINTEL( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceStreamMarkerInfoINTEL( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PerformanceStreamMarkerInfoINTEL & + operator=( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceStreamMarkerInfoINTEL & operator=( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PerformanceStreamMarkerInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PerformanceStreamMarkerInfoINTEL & setMarker( uint32_t marker_ ) VULKAN_HPP_NOEXCEPT + { + marker = marker_; + return *this; + } + + operator VkPerformanceStreamMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceStreamMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceStreamMarkerInfoINTEL const & ) const = default; +#else + bool operator==( PerformanceStreamMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( marker == rhs.marker ); + } + + bool operator!=( PerformanceStreamMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceStreamMarkerInfoINTEL; + const void * pNext = {}; + uint32_t marker = {}; + }; + static_assert( sizeof( PerformanceStreamMarkerInfoINTEL ) == sizeof( VkPerformanceStreamMarkerInfoINTEL ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PerformanceStreamMarkerInfoINTEL; + }; + + struct VertexInputBindingDescription2EXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVertexInputBindingDescription2EXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VertexInputBindingDescription2EXT( + uint32_t binding_ = {}, + uint32_t stride_ = {}, + VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex, + uint32_t divisor_ = {} ) VULKAN_HPP_NOEXCEPT + : binding( binding_ ) + , stride( stride_ ) + , inputRate( inputRate_ ) + , divisor( divisor_ ) + {} + + VULKAN_HPP_CONSTEXPR + VertexInputBindingDescription2EXT( VertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VertexInputBindingDescription2EXT( VkVertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VertexInputBindingDescription2EXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & + operator=( VertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VertexInputBindingDescription2EXT & operator=( VkVertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VertexInputBindingDescription2EXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VertexInputBindingDescription2EXT & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + { + binding = binding_; + return *this; + } + + VertexInputBindingDescription2EXT & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } + + VertexInputBindingDescription2EXT & + setInputRate( VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT + { + inputRate = inputRate_; + return *this; + } + + VertexInputBindingDescription2EXT & setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT + { + divisor = divisor_; + return *this; + } + + operator VkVertexInputBindingDescription2EXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVertexInputBindingDescription2EXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VertexInputBindingDescription2EXT const & ) const = default; +#else + bool operator==( VertexInputBindingDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( binding == rhs.binding ) && + ( stride == rhs.stride ) && ( inputRate == rhs.inputRate ) && ( divisor == rhs.divisor ); + } + + bool operator!=( VertexInputBindingDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVertexInputBindingDescription2EXT; + void * pNext = {}; + uint32_t binding = {}; + uint32_t stride = {}; + VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex; + uint32_t divisor = {}; + }; + static_assert( sizeof( VertexInputBindingDescription2EXT ) == sizeof( VkVertexInputBindingDescription2EXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VertexInputBindingDescription2EXT; + }; + + struct VertexInputAttributeDescription2EXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVertexInputAttributeDescription2EXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription2EXT( + uint32_t location_ = {}, + uint32_t binding_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + uint32_t offset_ = {} ) VULKAN_HPP_NOEXCEPT + : location( location_ ) + , binding( binding_ ) + , format( format_ ) + , offset( offset_ ) + {} + + VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription2EXT( VertexInputAttributeDescription2EXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + VertexInputAttributeDescription2EXT( VkVertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VertexInputAttributeDescription2EXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & + operator=( VertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VertexInputAttributeDescription2EXT & + operator=( VkVertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VertexInputAttributeDescription2EXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VertexInputAttributeDescription2EXT & setLocation( uint32_t location_ ) VULKAN_HPP_NOEXCEPT + { + location = location_; + return *this; + } + + VertexInputAttributeDescription2EXT & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + { + binding = binding_; + return *this; + } + + VertexInputAttributeDescription2EXT & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VertexInputAttributeDescription2EXT & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + operator VkVertexInputAttributeDescription2EXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVertexInputAttributeDescription2EXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VertexInputAttributeDescription2EXT const & ) const = default; +#else + bool operator==( VertexInputAttributeDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( location == rhs.location ) && + ( binding == rhs.binding ) && ( format == rhs.format ) && ( offset == rhs.offset ); + } + + bool operator!=( VertexInputAttributeDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVertexInputAttributeDescription2EXT; + void * pNext = {}; + uint32_t location = {}; + uint32_t binding = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + uint32_t offset = {}; + }; + static_assert( sizeof( VertexInputAttributeDescription2EXT ) == sizeof( VkVertexInputAttributeDescription2EXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VertexInputAttributeDescription2EXT; + }; + + struct ShadingRatePaletteNV + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( + uint32_t shadingRatePaletteEntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries_ = {} ) VULKAN_HPP_NOEXCEPT + : shadingRatePaletteEntryCount( shadingRatePaletteEntryCount_ ) + , pShadingRatePaletteEntries( pShadingRatePaletteEntries_ ) + {} + + VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShadingRatePaletteNV( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ShadingRatePaletteNV( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ShadingRatePaletteNV( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + shadingRatePaletteEntries_ ) + : shadingRatePaletteEntryCount( static_cast( shadingRatePaletteEntries_.size() ) ) + , pShadingRatePaletteEntries( shadingRatePaletteEntries_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ShadingRatePaletteNV & + operator=( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShadingRatePaletteNV & operator=( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ShadingRatePaletteNV & setShadingRatePaletteEntryCount( uint32_t shadingRatePaletteEntryCount_ ) VULKAN_HPP_NOEXCEPT + { + shadingRatePaletteEntryCount = shadingRatePaletteEntryCount_; + return *this; + } + + ShadingRatePaletteNV & setPShadingRatePaletteEntries( + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT + { + pShadingRatePaletteEntries = pShadingRatePaletteEntries_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ShadingRatePaletteNV & setShadingRatePaletteEntries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + shadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT + { + shadingRatePaletteEntryCount = static_cast( shadingRatePaletteEntries_.size() ); + pShadingRatePaletteEntries = shadingRatePaletteEntries_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkShadingRatePaletteNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShadingRatePaletteNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShadingRatePaletteNV const & ) const = default; +#else + bool operator==( ShadingRatePaletteNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( shadingRatePaletteEntryCount == rhs.shadingRatePaletteEntryCount ) && + ( pShadingRatePaletteEntries == rhs.pShadingRatePaletteEntries ); + } + + bool operator!=( ShadingRatePaletteNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t shadingRatePaletteEntryCount = {}; + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries = {}; + }; + static_assert( sizeof( ShadingRatePaletteNV ) == sizeof( VkShadingRatePaletteNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ViewportWScalingNV + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ViewportWScalingNV( float xcoeff_ = {}, float ycoeff_ = {} ) VULKAN_HPP_NOEXCEPT + : xcoeff( xcoeff_ ) + , ycoeff( ycoeff_ ) + {} + + VULKAN_HPP_CONSTEXPR ViewportWScalingNV( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ViewportWScalingNV( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ViewportWScalingNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ViewportWScalingNV & + operator=( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ViewportWScalingNV & operator=( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ViewportWScalingNV & setXcoeff( float xcoeff_ ) VULKAN_HPP_NOEXCEPT + { + xcoeff = xcoeff_; + return *this; + } + + ViewportWScalingNV & setYcoeff( float ycoeff_ ) VULKAN_HPP_NOEXCEPT + { + ycoeff = ycoeff_; + return *this; + } + + operator VkViewportWScalingNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkViewportWScalingNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ViewportWScalingNV const & ) const = default; +#else + bool operator==( ViewportWScalingNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( xcoeff == rhs.xcoeff ) && ( ycoeff == rhs.ycoeff ); + } + + bool operator!=( ViewportWScalingNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float xcoeff = {}; + float ycoeff = {}; + }; + static_assert( sizeof( ViewportWScalingNV ) == sizeof( VkViewportWScalingNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct StridedDeviceAddressRegionKHR + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + StridedDeviceAddressRegionKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceAddress( deviceAddress_ ) + , stride( stride_ ) + , size( size_ ) + {} + + VULKAN_HPP_CONSTEXPR + StridedDeviceAddressRegionKHR( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + StridedDeviceAddressRegionKHR( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : StridedDeviceAddressRegionKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & + operator=( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + StridedDeviceAddressRegionKHR & operator=( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + StridedDeviceAddressRegionKHR & + setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; + return *this; + } + + StridedDeviceAddressRegionKHR & setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } + + StridedDeviceAddressRegionKHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + operator VkStridedDeviceAddressRegionKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkStridedDeviceAddressRegionKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( StridedDeviceAddressRegionKHR const & ) const = default; +#else + bool operator==( StridedDeviceAddressRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( deviceAddress == rhs.deviceAddress ) && ( stride == rhs.stride ) && ( size == rhs.size ); + } + + bool operator!=( StridedDeviceAddressRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize stride = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + static_assert( sizeof( StridedDeviceAddressRegionKHR ) == sizeof( VkStridedDeviceAddressRegionKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + class CommandBuffer + { + public: + using CType = VkCommandBuffer; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer; + + public: + VULKAN_HPP_CONSTEXPR CommandBuffer() = default; + VULKAN_HPP_CONSTEXPR CommandBuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT CommandBuffer( VkCommandBuffer commandBuffer ) VULKAN_HPP_NOEXCEPT + : m_commandBuffer( commandBuffer ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + CommandBuffer & operator=( VkCommandBuffer commandBuffer ) VULKAN_HPP_NOEXCEPT + { + m_commandBuffer = commandBuffer; + return *this; + } +#endif + + CommandBuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_commandBuffer = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBuffer const & ) const = default; +#else + bool operator==( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer == rhs.m_commandBuffer; + } + + bool operator!=( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer != rhs.m_commandBuffer; + } + + bool operator<( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer < rhs.m_commandBuffer; + } +#endif + + template + VULKAN_HPP_NODISCARD Result + begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + begin( const CommandBufferBeginInfo & beginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginConditionalRenderingEXT( + const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void + beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags, + uint32_t index, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + VULKAN_HPP_NAMESPACE::SubpassContents contents, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginRenderPass( const RenderPassBeginInfo & renderPassBegin, + VULKAN_HPP_NAMESPACE::SubpassContents contents, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginRenderPass2( const RenderPassBeginInfo & renderPassBegin, + const SubpassBeginInfo & subpassBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, + const SubpassBeginInfo & subpassBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, + ArrayProxy const & counterBuffers, + ArrayProxy const & counterBufferOffsets + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + void beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginVideoCodingKHR( const VideoBeginCodingInfoKHR & beginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + uint32_t dynamicOffsetCount, + const uint32_t * pDynamicOffsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + ArrayProxy const & descriptorSets, + ArrayProxy const & dynamicOffsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::IndexType indexType, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t groupIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + template + void + bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindTransformFeedbackBuffersEXT( + uint32_t firstBinding, + ArrayProxy const & buffers, + ArrayProxy const & offsets, + ArrayProxy const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void bindVertexBuffers( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindVertexBuffers( uint32_t firstBinding, + ArrayProxy const & buffers, + ArrayProxy const & offsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void bindVertexBuffers2EXT( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, + const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindVertexBuffers2EXT( + uint32_t firstBinding, + ArrayProxy const & buffers, + ArrayProxy const & offsets, + ArrayProxy const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + ArrayProxy const & strides VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions, + VULKAN_HPP_NAMESPACE::Filter filter, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy const & regions, + VULKAN_HPP_NAMESPACE::Filter filter, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2KHR * pBlitImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void blitImage2KHR( const BlitImageInfo2KHR & blitImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::Buffer instanceData, + VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, + VULKAN_HPP_NAMESPACE::Bool32 update, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::Buffer scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, + VULKAN_HPP_NAMESPACE::Buffer instanceData, + VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, + VULKAN_HPP_NAMESPACE::Bool32 update, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::Buffer scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void buildAccelerationStructuresIndirectKHR( + uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses, + const uint32_t * pIndirectStrides, + const uint32_t * const * ppMaxPrimitiveCounts, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void buildAccelerationStructuresIndirectKHR( + ArrayProxy const & infos, + ArrayProxy const & indirectDeviceAddresses, + ArrayProxy const & indirectStrides, + ArrayProxy const & pMaxPrimitiveCounts, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void buildAccelerationStructuresKHR( + uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void buildAccelerationStructuresKHR( + ArrayProxy const & infos, + ArrayProxy const & pBuildRangeInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void clearAttachments( uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments, + uint32_t rectCount, + const VULKAN_HPP_NAMESPACE::ClearRect * pRects, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void clearAttachments( ArrayProxy const & attachments, + ArrayProxy const & rects, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor, + uint32_t rangeCount, + const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const ClearColorValue & color, + ArrayProxy const & ranges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void + clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil, + uint32_t rangeCount, + const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const ClearDepthStencilValue & depthStencil, + ArrayProxy const & ranges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + void controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void controlVideoCodingKHR( const VideoCodingControlInfoKHR & codingControlInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + template + void copyAccelerationStructureToMemoryKHR( + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + ArrayProxy const & regions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2KHR * pCopyBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyBuffer2KHR( const CopyBufferInfo2KHR & copyBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy const & regions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2KHR * pCopyBufferToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyBufferToImage2KHR( const CopyBufferToImageInfo2KHR & copyBufferToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy const & regions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR * pCopyImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyImage2KHR( const CopyImageInfo2KHR & copyImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + ArrayProxy const & regions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR * pCopyImageToBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyImageToBuffer2KHR( const CopyImageToBufferInfo2KHR & copyImageToBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyMemoryToAccelerationStructureKHR( + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void cuLaunchKernelNVX( const CuLaunchInfoNVX & launchInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void debugMarkerEndEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + void decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pFrameInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void decodeVideoKHR( const VideoDecodeInfoKHR & frameInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + void dispatch( uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void dispatchBase( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void dispatchBaseKHR( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void draw( uint32_t vertexCount, + uint32_t instanceCount, + uint32_t firstVertex, + uint32_t firstInstance, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void drawIndexed( uint32_t indexCount, + uint32_t instanceCount, + uint32_t firstIndex, + int32_t vertexOffset, + uint32_t firstInstance, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void + drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + template + void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + template + void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void + drawIndirectByteCountEXT( uint32_t instanceCount, + uint32_t firstInstance, + VULKAN_HPP_NAMESPACE::Buffer counterBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + template + void + drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void drawMeshTasksNV( uint32_t taskCount, + uint32_t firstTask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + void encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void encodeVideoKHR( const VideoEncodeInfoKHR & encodeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + void endConditionalRenderingEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + template + void endDebugUtilsLabelEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + uint32_t index, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void endRenderPass( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void endRenderPass2( const SubpassEndInfo & subpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void endRenderPass2KHR( const SubpassEndInfo & subpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void + endTransformFeedbackEXT( uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void endTransformFeedbackEXT( uint32_t firstCounterBuffer, + ArrayProxy const & counterBuffers, + ArrayProxy const & counterBufferOffsets + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + void endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void endVideoCodingKHR( const VideoEndCodingInfoKHR & endCodingInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + void executeCommands( uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void executeCommands( ArrayProxy const & commandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const GeneratedCommandsInfoNV & generatedCommandsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + uint32_t data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void + insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void nextSubpass2( const SubpassBeginInfo & subpassBeginInfo, + const SubpassEndInfo & subpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void nextSubpass2KHR( const SubpassBeginInfo & subpassBeginInfo, + const SubpassEndInfo & subpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, + uint32_t memoryBarrierCount, + const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, + ArrayProxy const & memoryBarriers, + ArrayProxy const & bufferMemoryBarriers, + ArrayProxy const & imageMemoryBarriers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfoKHR * pDependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pipelineBarrier2KHR( const DependencyInfoKHR & dependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV & generatedCommandsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, + uint32_t offset, + uint32_t size, + const void * pValues, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, + uint32_t offset, + ArrayProxy const & values, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + ArrayProxy const & descriptorWrites, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + const void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + template + void resetEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stageMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy const & regions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR * pResolveImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void resolveImage2KHR( const ResolveImageInfo2KHR & resolveImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setBlendConstants( const float blendConstants[4], + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setCheckpointNV( const void * pCheckpointMarker, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void + setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, + uint32_t customSampleOrderCount, + const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setCoarseSampleOrderNV( + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, + ArrayProxy const & customSampleOrders, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void + setColorWriteEnableEXT( uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + setColorWriteEnableEXT( ArrayProxy const & colorWriteEnables, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setDepthBias( float depthBiasConstantFactor, + float depthBiasClamp, + float depthBiasSlopeFactor, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setDepthBounds( float minDepthBounds, + float maxDepthBounds, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + template + void setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void + setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setDeviceMask( uint32_t deviceMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setDeviceMaskKHR( uint32_t deviceMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void + setDiscardRectangleEXT( uint32_t firstDiscardRectangle, + uint32_t discardRectangleCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + setDiscardRectangleEXT( uint32_t firstDiscardRectangle, + ArrayProxy const & discardRectangles, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfoKHR * pDependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + const DependencyInfoKHR & dependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setExclusiveScissorNV( uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setExclusiveScissorNV( uint32_t firstExclusiveScissor, + ArrayProxy const & exclusiveScissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + template + void setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setFragmentShadingRateKHR( const Extent2D & fragmentSize, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setLineStippleEXT( uint32_t lineStippleFactor, + uint16_t lineStipplePattern, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setLineWidth( float lineWidth, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void + setPatchControlPointsEXT( uint32_t patchControlPoints, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD Result setPerformanceMarkerINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result setPerformanceOverrideINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result setPerformanceStreamMarkerINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + template + void + setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + template + void setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + template + void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setScissor( uint32_t firstScissor, + uint32_t scissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setScissor( uint32_t firstScissor, + ArrayProxy const & scissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void + setScissorWithCountEXT( uint32_t scissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + setScissorWithCountEXT( ArrayProxy const & scissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t compareMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + VULKAN_HPP_NAMESPACE::StencilOp failOp, + VULKAN_HPP_NAMESPACE::StencilOp passOp, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, + VULKAN_HPP_NAMESPACE::CompareOp compareOp, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t reference, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void + setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t writeMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void + setVertexInputEXT( uint32_t vertexBindingDescriptionCount, + const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions, + uint32_t vertexAttributeDescriptionCount, + const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setVertexInputEXT( + ArrayProxy const & vertexBindingDescriptions, + ArrayProxy const & vertexAttributeDescriptions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setViewport( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setViewport( uint32_t firstViewport, + ArrayProxy const & viewports, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setViewportShadingRatePaletteNV( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setViewportShadingRatePaletteNV( + uint32_t firstViewport, + ArrayProxy const & shadingRatePalettes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setViewportWScalingNV( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setViewportWScalingNV( uint32_t firstViewport, + ArrayProxy const & viewportWScalings, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void + setViewportWithCountEXT( uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + setViewportWithCountEXT( ArrayProxy const & viewports, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void traceRaysIndirectKHR( const StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const StridedDeviceAddressRegionKHR & missShaderBindingTable, + const StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const StridedDeviceAddressRegionKHR & callableShaderBindingTable, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void traceRaysKHR( const StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const StridedDeviceAddressRegionKHR & missShaderBindingTable, + const StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const StridedDeviceAddressRegionKHR & callableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, + VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize dataSize, + const void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + ArrayProxy const & data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void waitEvents( uint32_t eventCount, + const VULKAN_HPP_NAMESPACE::Event * pEvents, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + uint32_t memoryBarrierCount, + const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void waitEvents( ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + ArrayProxy const & memoryBarriers, + ArrayProxy const & bufferMemoryBarriers, + ArrayProxy const & imageMemoryBarriers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void waitEvents2KHR( uint32_t eventCount, + const VULKAN_HPP_NAMESPACE::Event * pEvents, + const VULKAN_HPP_NAMESPACE::DependencyInfoKHR * pDependencyInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void waitEvents2KHR( ArrayProxy const & events, + ArrayProxy const & dependencyInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void writeAccelerationStructuresPropertiesKHR( + uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void writeAccelerationStructuresPropertiesKHR( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void writeAccelerationStructuresPropertiesNV( + uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void writeAccelerationStructuresPropertiesNV( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + end( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + end( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type + reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandBuffer() const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer == VK_NULL_HANDLE; + } + + private: + VkCommandBuffer m_commandBuffer = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::CommandBuffer ) == sizeof( VkCommandBuffer ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::CommandBuffer; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CommandBuffer; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CommandBuffer; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct CommandBufferSubmitInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferSubmitInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferSubmitInfoKHR( VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer_ = {}, + uint32_t deviceMask_ = {} ) VULKAN_HPP_NOEXCEPT + : commandBuffer( commandBuffer_ ) + , deviceMask( deviceMask_ ) + {} + + VULKAN_HPP_CONSTEXPR + CommandBufferSubmitInfoKHR( CommandBufferSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferSubmitInfoKHR( VkCommandBufferSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferSubmitInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfoKHR & + operator=( CommandBufferSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferSubmitInfoKHR & operator=( VkCommandBufferSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CommandBufferSubmitInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CommandBufferSubmitInfoKHR & + setCommandBuffer( VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer_ ) VULKAN_HPP_NOEXCEPT + { + commandBuffer = commandBuffer_; + return *this; + } + + CommandBufferSubmitInfoKHR & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT + { + deviceMask = deviceMask_; + return *this; + } + + operator VkCommandBufferSubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferSubmitInfoKHR const & ) const = default; +#else + bool operator==( CommandBufferSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( commandBuffer == rhs.commandBuffer ) && + ( deviceMask == rhs.deviceMask ); + } + + bool operator!=( CommandBufferSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferSubmitInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer = {}; + uint32_t deviceMask = {}; + }; + static_assert( sizeof( CommandBufferSubmitInfoKHR ) == sizeof( VkCommandBufferSubmitInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CommandBufferSubmitInfoKHR; + }; + + struct CommandPoolCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandPoolCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ = {}, + uint32_t queueFamilyIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , queueFamilyIndex( queueFamilyIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandPoolCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & + operator=( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandPoolCreateInfo & operator=( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CommandPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CommandPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + CommandPoolCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndex = queueFamilyIndex_; + return *this; + } + + operator VkCommandPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandPoolCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandPoolCreateInfo const & ) const = default; +#else + bool operator==( CommandPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( queueFamilyIndex == rhs.queueFamilyIndex ); + } + + bool operator!=( CommandPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandPoolCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags = {}; + uint32_t queueFamilyIndex = {}; + }; + static_assert( sizeof( CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CommandPoolCreateInfo; + }; + + class ShaderModule + { + public: + using CType = VkShaderModule; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule; + + public: + VULKAN_HPP_CONSTEXPR ShaderModule() = default; + VULKAN_HPP_CONSTEXPR ShaderModule( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT ShaderModule( VkShaderModule shaderModule ) VULKAN_HPP_NOEXCEPT + : m_shaderModule( shaderModule ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + ShaderModule & operator=( VkShaderModule shaderModule ) VULKAN_HPP_NOEXCEPT + { + m_shaderModule = shaderModule; + return *this; + } +#endif + + ShaderModule & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_shaderModule = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShaderModule const & ) const = default; +#else + bool operator==( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule == rhs.m_shaderModule; + } + + bool operator!=( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule != rhs.m_shaderModule; + } + + bool operator<( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule < rhs.m_shaderModule; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkShaderModule() const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule == VK_NULL_HANDLE; + } + + private: + VkShaderModule m_shaderModule = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::ShaderModule ) == sizeof( VkShaderModule ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::ShaderModule; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ShaderModule; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ShaderModule; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct SpecializationMapEntry + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SpecializationMapEntry( uint32_t constantID_ = {}, uint32_t offset_ = {}, size_t size_ = {} ) VULKAN_HPP_NOEXCEPT + : constantID( constantID_ ) + , offset( offset_ ) + , size( size_ ) + {} + + VULKAN_HPP_CONSTEXPR SpecializationMapEntry( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SpecializationMapEntry( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT + : SpecializationMapEntry( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & + operator=( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SpecializationMapEntry & operator=( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SpecializationMapEntry & setConstantID( uint32_t constantID_ ) VULKAN_HPP_NOEXCEPT + { + constantID = constantID_; + return *this; + } + + SpecializationMapEntry & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + SpecializationMapEntry & setSize( size_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + operator VkSpecializationMapEntry const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSpecializationMapEntry &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SpecializationMapEntry const & ) const = default; +#else + bool operator==( SpecializationMapEntry const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( constantID == rhs.constantID ) && ( offset == rhs.offset ) && ( size == rhs.size ); + } + + bool operator!=( SpecializationMapEntry const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t constantID = {}; + uint32_t offset = {}; + size_t size = {}; + }; + static_assert( sizeof( SpecializationMapEntry ) == sizeof( VkSpecializationMapEntry ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SpecializationInfo + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SpecializationInfo( uint32_t mapEntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries_ = {}, + size_t dataSize_ = {}, + const void * pData_ = {} ) VULKAN_HPP_NOEXCEPT + : mapEntryCount( mapEntryCount_ ) + , pMapEntries( pMapEntries_ ) + , dataSize( dataSize_ ) + , pData( pData_ ) + {} + + VULKAN_HPP_CONSTEXPR SpecializationInfo( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SpecializationInfo( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SpecializationInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + SpecializationInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::SpecializationMapEntry> const & mapEntries_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ = {} ) + : mapEntryCount( static_cast( mapEntries_.size() ) ) + , pMapEntries( mapEntries_.data() ) + , dataSize( data_.size() * sizeof( T ) ) + , pData( data_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & + operator=( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SpecializationInfo & operator=( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SpecializationInfo & setMapEntryCount( uint32_t mapEntryCount_ ) VULKAN_HPP_NOEXCEPT + { + mapEntryCount = mapEntryCount_; + return *this; + } + + SpecializationInfo & + setPMapEntries( const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries_ ) VULKAN_HPP_NOEXCEPT + { + pMapEntries = pMapEntries_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SpecializationInfo & setMapEntries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + mapEntries_ ) VULKAN_HPP_NOEXCEPT + { + mapEntryCount = static_cast( mapEntries_.size() ); + pMapEntries = mapEntries_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SpecializationInfo & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = dataSize_; + return *this; + } + + SpecializationInfo & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT + { + pData = pData_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + SpecializationInfo & + setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = data_.size() * sizeof( T ); + pData = data_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkSpecializationInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSpecializationInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SpecializationInfo const & ) const = default; +#else + bool operator==( SpecializationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( mapEntryCount == rhs.mapEntryCount ) && ( pMapEntries == rhs.pMapEntries ) && + ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); + } + + bool operator!=( SpecializationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t mapEntryCount = {}; + const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries = {}; + size_t dataSize = {}; + const void * pData = {}; + }; + static_assert( sizeof( SpecializationInfo ) == sizeof( VkSpecializationInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineShaderStageCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex, + VULKAN_HPP_NAMESPACE::ShaderModule module_ = {}, + const char * pName_ = {}, + const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , stage( stage_ ) + , module( module_ ) + , pName( pName_ ) + , pSpecializationInfo( pSpecializationInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR + PipelineShaderStageCreateInfo( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineShaderStageCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & + operator=( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineShaderStageCreateInfo & operator=( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineShaderStageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineShaderStageCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineShaderStageCreateInfo & setStage( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ ) VULKAN_HPP_NOEXCEPT + { + stage = stage_; + return *this; + } + + PipelineShaderStageCreateInfo & setModule( VULKAN_HPP_NAMESPACE::ShaderModule module_ ) VULKAN_HPP_NOEXCEPT + { + module = module_; + return *this; + } + + PipelineShaderStageCreateInfo & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT + { + pName = pName_; + return *this; + } + + PipelineShaderStageCreateInfo & setPSpecializationInfo( + const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT + { + pSpecializationInfo = pSpecializationInfo_; + return *this; + } + + operator VkPipelineShaderStageCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineShaderStageCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineShaderStageCreateInfo const & ) const = default; +#else + bool operator==( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) && + ( module == rhs.module ) && ( pName == rhs.pName ) && ( pSpecializationInfo == rhs.pSpecializationInfo ); + } + + bool operator!=( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex; + VULKAN_HPP_NAMESPACE::ShaderModule module = {}; + const char * pName = {}; + const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo = {}; + }; + static_assert( sizeof( PipelineShaderStageCreateInfo ) == sizeof( VkPipelineShaderStageCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineShaderStageCreateInfo; + }; + + struct ComputePipelineCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eComputePipelineCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , stage( stage_ ) + , layout( layout_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR + ComputePipelineCreateInfo( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ComputePipelineCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & + operator=( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ComputePipelineCreateInfo & operator=( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ComputePipelineCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ComputePipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + ComputePipelineCreateInfo & + setStage( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const & stage_ ) VULKAN_HPP_NOEXCEPT + { + stage = stage_; + return *this; + } + + ComputePipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + ComputePipelineCreateInfo & + setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineHandle = basePipelineHandle_; + return *this; + } + + ComputePipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineIndex = basePipelineIndex_; + return *this; + } + + operator VkComputePipelineCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkComputePipelineCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ComputePipelineCreateInfo const & ) const = default; +#else + bool operator==( ComputePipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) && + ( layout == rhs.layout ) && ( basePipelineHandle == rhs.basePipelineHandle ) && + ( basePipelineIndex == rhs.basePipelineIndex ); + } + + bool operator!=( ComputePipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eComputePipelineCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; + }; + static_assert( sizeof( ComputePipelineCreateInfo ) == sizeof( VkComputePipelineCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ComputePipelineCreateInfo; + }; + + struct ConformanceVersion + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ConformanceVersion( uint8_t major_ = {}, + uint8_t minor_ = {}, + uint8_t subminor_ = {}, + uint8_t patch_ = {} ) VULKAN_HPP_NOEXCEPT + : major( major_ ) + , minor( minor_ ) + , subminor( subminor_ ) + , patch( patch_ ) + {} + + VULKAN_HPP_CONSTEXPR ConformanceVersion( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ConformanceVersion( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT + : ConformanceVersion( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & + operator=( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ConformanceVersion & operator=( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ConformanceVersion & setMajor( uint8_t major_ ) VULKAN_HPP_NOEXCEPT + { + major = major_; + return *this; + } + + ConformanceVersion & setMinor( uint8_t minor_ ) VULKAN_HPP_NOEXCEPT + { + minor = minor_; + return *this; + } + + ConformanceVersion & setSubminor( uint8_t subminor_ ) VULKAN_HPP_NOEXCEPT + { + subminor = subminor_; + return *this; + } + + ConformanceVersion & setPatch( uint8_t patch_ ) VULKAN_HPP_NOEXCEPT + { + patch = patch_; + return *this; + } + + operator VkConformanceVersion const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkConformanceVersion &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ConformanceVersion const & ) const = default; +#else + bool operator==( ConformanceVersion const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( major == rhs.major ) && ( minor == rhs.minor ) && ( subminor == rhs.subminor ) && ( patch == rhs.patch ); + } + + bool operator!=( ConformanceVersion const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint8_t major = {}; + uint8_t minor = {}; + uint8_t subminor = {}; + uint8_t patch = {}; + }; + static_assert( sizeof( ConformanceVersion ) == sizeof( VkConformanceVersion ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + using ConformanceVersionKHR = ConformanceVersion; + + struct CooperativeMatrixPropertiesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeMatrixPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( + uint32_t MSize_ = {}, + uint32_t NSize_ = {}, + uint32_t KSize_ = {}, + VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, + VULKAN_HPP_NAMESPACE::ScopeNV scope_ = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice ) VULKAN_HPP_NOEXCEPT + : MSize( MSize_ ) + , NSize( NSize_ ) + , KSize( KSize_ ) + , AType( AType_ ) + , BType( BType_ ) + , CType( CType_ ) + , DType( DType_ ) + , scope( scope_ ) + {} + + VULKAN_HPP_CONSTEXPR + CooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CooperativeMatrixPropertiesNV( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CooperativeMatrixPropertiesNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & + operator=( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CooperativeMatrixPropertiesNV & operator=( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CooperativeMatrixPropertiesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CooperativeMatrixPropertiesNV & setMSize( uint32_t MSize_ ) VULKAN_HPP_NOEXCEPT + { + MSize = MSize_; + return *this; + } + + CooperativeMatrixPropertiesNV & setNSize( uint32_t NSize_ ) VULKAN_HPP_NOEXCEPT + { + NSize = NSize_; + return *this; + } + + CooperativeMatrixPropertiesNV & setKSize( uint32_t KSize_ ) VULKAN_HPP_NOEXCEPT + { + KSize = KSize_; + return *this; + } + + CooperativeMatrixPropertiesNV & setAType( VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ ) VULKAN_HPP_NOEXCEPT + { + AType = AType_; + return *this; + } + + CooperativeMatrixPropertiesNV & setBType( VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ ) VULKAN_HPP_NOEXCEPT + { + BType = BType_; + return *this; + } + + CooperativeMatrixPropertiesNV & setCType( VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ ) VULKAN_HPP_NOEXCEPT + { + CType = CType_; + return *this; + } + + CooperativeMatrixPropertiesNV & setDType( VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ ) VULKAN_HPP_NOEXCEPT + { + DType = DType_; + return *this; + } + + CooperativeMatrixPropertiesNV & setScope( VULKAN_HPP_NAMESPACE::ScopeNV scope_ ) VULKAN_HPP_NOEXCEPT + { + scope = scope_; + return *this; + } + + operator VkCooperativeMatrixPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CooperativeMatrixPropertiesNV const & ) const = default; +#else + bool operator==( CooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( MSize == rhs.MSize ) && ( NSize == rhs.NSize ) && + ( KSize == rhs.KSize ) && ( AType == rhs.AType ) && ( BType == rhs.BType ) && ( CType == rhs.CType ) && + ( DType == rhs.DType ) && ( scope == rhs.scope ); + } + + bool operator!=( CooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCooperativeMatrixPropertiesNV; + void * pNext = {}; + uint32_t MSize = {}; + uint32_t NSize = {}; + uint32_t KSize = {}; + VULKAN_HPP_NAMESPACE::ComponentTypeNV AType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeNV BType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeNV CType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeNV DType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; + VULKAN_HPP_NAMESPACE::ScopeNV scope = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice; + }; + static_assert( sizeof( CooperativeMatrixPropertiesNV ) == sizeof( VkCooperativeMatrixPropertiesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CooperativeMatrixPropertiesNV; + }; + + struct CopyCommandTransformInfoQCOM + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyCommandTransformInfoQCOM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + CopyCommandTransformInfoQCOM( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity ) VULKAN_HPP_NOEXCEPT + : transform( transform_ ) + {} + + VULKAN_HPP_CONSTEXPR + CopyCommandTransformInfoQCOM( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyCommandTransformInfoQCOM( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyCommandTransformInfoQCOM( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CopyCommandTransformInfoQCOM & + operator=( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyCommandTransformInfoQCOM & operator=( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CopyCommandTransformInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CopyCommandTransformInfoQCOM & + setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT + { + transform = transform_; + return *this; + } + + operator VkCopyCommandTransformInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyCommandTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyCommandTransformInfoQCOM const & ) const = default; +#else + bool operator==( CopyCommandTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform ); + } + + bool operator!=( CopyCommandTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyCommandTransformInfoQCOM; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + }; + static_assert( sizeof( CopyCommandTransformInfoQCOM ) == sizeof( VkCopyCommandTransformInfoQCOM ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CopyCommandTransformInfoQCOM; + }; + + struct CopyDescriptorSet + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyDescriptorSet; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ = {}, + uint32_t srcBinding_ = {}, + uint32_t srcArrayElement_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, + uint32_t dstBinding_ = {}, + uint32_t dstArrayElement_ = {}, + uint32_t descriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSet( srcSet_ ) + , srcBinding( srcBinding_ ) + , srcArrayElement( srcArrayElement_ ) + , dstSet( dstSet_ ) + , dstBinding( dstBinding_ ) + , dstArrayElement( dstArrayElement_ ) + , descriptorCount( descriptorCount_ ) + {} + + VULKAN_HPP_CONSTEXPR CopyDescriptorSet( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyDescriptorSet( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyDescriptorSet( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & + operator=( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyDescriptorSet & operator=( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CopyDescriptorSet & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CopyDescriptorSet & setSrcSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ ) VULKAN_HPP_NOEXCEPT + { + srcSet = srcSet_; + return *this; + } + + CopyDescriptorSet & setSrcBinding( uint32_t srcBinding_ ) VULKAN_HPP_NOEXCEPT + { + srcBinding = srcBinding_; + return *this; + } + + CopyDescriptorSet & setSrcArrayElement( uint32_t srcArrayElement_ ) VULKAN_HPP_NOEXCEPT + { + srcArrayElement = srcArrayElement_; + return *this; + } + + CopyDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT + { + dstSet = dstSet_; + return *this; + } + + CopyDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT + { + dstBinding = dstBinding_; + return *this; + } + + CopyDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT + { + dstArrayElement = dstArrayElement_; + return *this; + } + + CopyDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = descriptorCount_; + return *this; + } + + operator VkCopyDescriptorSet const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyDescriptorSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyDescriptorSet const & ) const = default; +#else + bool operator==( CopyDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSet == rhs.srcSet ) && + ( srcBinding == rhs.srcBinding ) && ( srcArrayElement == rhs.srcArrayElement ) && + ( dstSet == rhs.dstSet ) && ( dstBinding == rhs.dstBinding ) && + ( dstArrayElement == rhs.dstArrayElement ) && ( descriptorCount == rhs.descriptorCount ); + } + + bool operator!=( CopyDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyDescriptorSet; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorSet srcSet = {}; + uint32_t srcBinding = {}; + uint32_t srcArrayElement = {}; + VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {}; + uint32_t dstBinding = {}; + uint32_t dstArrayElement = {}; + uint32_t descriptorCount = {}; + }; + static_assert( sizeof( CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CopyDescriptorSet; + }; + + class CuModuleNVX + { + public: + using CType = VkCuModuleNVX; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eCuModuleNVX; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuModuleNVX; + + public: + VULKAN_HPP_CONSTEXPR CuModuleNVX() = default; + VULKAN_HPP_CONSTEXPR CuModuleNVX( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT CuModuleNVX( VkCuModuleNVX cuModuleNVX ) VULKAN_HPP_NOEXCEPT + : m_cuModuleNVX( cuModuleNVX ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + CuModuleNVX & operator=( VkCuModuleNVX cuModuleNVX ) VULKAN_HPP_NOEXCEPT + { + m_cuModuleNVX = cuModuleNVX; + return *this; + } +#endif + + CuModuleNVX & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_cuModuleNVX = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CuModuleNVX const & ) const = default; +#else + bool operator==( CuModuleNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_cuModuleNVX == rhs.m_cuModuleNVX; + } + + bool operator!=( CuModuleNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_cuModuleNVX != rhs.m_cuModuleNVX; + } + + bool operator<( CuModuleNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_cuModuleNVX < rhs.m_cuModuleNVX; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCuModuleNVX() const VULKAN_HPP_NOEXCEPT + { + return m_cuModuleNVX; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_cuModuleNVX != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_cuModuleNVX == VK_NULL_HANDLE; + } + + private: + VkCuModuleNVX m_cuModuleNVX = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::CuModuleNVX ) == sizeof( VkCuModuleNVX ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::CuModuleNVX; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CuModuleNVX; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CuModuleNVX; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct CuFunctionCreateInfoNVX + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuFunctionCreateInfoNVX; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CuFunctionCreateInfoNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module_ = {}, + const char * pName_ = {} ) VULKAN_HPP_NOEXCEPT + : module( module_ ) + , pName( pName_ ) + {} + + VULKAN_HPP_CONSTEXPR CuFunctionCreateInfoNVX( CuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CuFunctionCreateInfoNVX( VkCuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : CuFunctionCreateInfoNVX( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX & + operator=( CuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CuFunctionCreateInfoNVX & operator=( VkCuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CuFunctionCreateInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CuFunctionCreateInfoNVX & setModule( VULKAN_HPP_NAMESPACE::CuModuleNVX module_ ) VULKAN_HPP_NOEXCEPT + { + module = module_; + return *this; + } + + CuFunctionCreateInfoNVX & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT + { + pName = pName_; + return *this; + } + + operator VkCuFunctionCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCuFunctionCreateInfoNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CuFunctionCreateInfoNVX const & ) const = default; +#else + bool operator==( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( module == rhs.module ) && ( pName == rhs.pName ); + } + + bool operator!=( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuFunctionCreateInfoNVX; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CuModuleNVX module = {}; + const char * pName = {}; + }; + static_assert( sizeof( CuFunctionCreateInfoNVX ) == sizeof( VkCuFunctionCreateInfoNVX ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CuFunctionCreateInfoNVX; + }; + + struct CuModuleCreateInfoNVX + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuModuleCreateInfoNVX; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CuModuleCreateInfoNVX( size_t dataSize_ = {}, const void * pData_ = {} ) VULKAN_HPP_NOEXCEPT + : dataSize( dataSize_ ) + , pData( pData_ ) + {} + + VULKAN_HPP_CONSTEXPR CuModuleCreateInfoNVX( CuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CuModuleCreateInfoNVX( VkCuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : CuModuleCreateInfoNVX( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX & + operator=( CuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CuModuleCreateInfoNVX & operator=( VkCuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CuModuleCreateInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CuModuleCreateInfoNVX & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = dataSize_; + return *this; + } + + CuModuleCreateInfoNVX & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT + { + pData = pData_; + return *this; + } + + operator VkCuModuleCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCuModuleCreateInfoNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CuModuleCreateInfoNVX const & ) const = default; +#else + bool operator==( CuModuleCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); + } + + bool operator!=( CuModuleCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuModuleCreateInfoNVX; + const void * pNext = {}; + size_t dataSize = {}; + const void * pData = {}; + }; + static_assert( sizeof( CuModuleCreateInfoNVX ) == sizeof( VkCuModuleCreateInfoNVX ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CuModuleCreateInfoNVX; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct D3D12FenceSubmitInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eD3D12FenceSubmitInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( uint32_t waitSemaphoreValuesCount_ = {}, + const uint64_t * pWaitSemaphoreValues_ = {}, + uint32_t signalSemaphoreValuesCount_ = {}, + const uint64_t * pSignalSemaphoreValues_ = {} ) VULKAN_HPP_NOEXCEPT + : waitSemaphoreValuesCount( waitSemaphoreValuesCount_ ) + , pWaitSemaphoreValues( pWaitSemaphoreValues_ ) + , signalSemaphoreValuesCount( signalSemaphoreValuesCount_ ) + , pSignalSemaphoreValues( pSignalSemaphoreValues_ ) + {} + + VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + D3D12FenceSubmitInfoKHR( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : D3D12FenceSubmitInfoKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + D3D12FenceSubmitInfoKHR( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreValues_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreValues_ = {} ) + : waitSemaphoreValuesCount( static_cast( waitSemaphoreValues_.size() ) ) + , pWaitSemaphoreValues( waitSemaphoreValues_.data() ) + , signalSemaphoreValuesCount( static_cast( signalSemaphoreValues_.size() ) ) + , pSignalSemaphoreValues( signalSemaphoreValues_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & + operator=( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + D3D12FenceSubmitInfoKHR & operator=( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + D3D12FenceSubmitInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + D3D12FenceSubmitInfoKHR & setWaitSemaphoreValuesCount( uint32_t waitSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreValuesCount = waitSemaphoreValuesCount_; + return *this; + } + + D3D12FenceSubmitInfoKHR & setPWaitSemaphoreValues( const uint64_t * pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphoreValues = pWaitSemaphoreValues_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + D3D12FenceSubmitInfoKHR & setWaitSemaphoreValues( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreValuesCount = static_cast( waitSemaphoreValues_.size() ); + pWaitSemaphoreValues = waitSemaphoreValues_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + D3D12FenceSubmitInfoKHR & setSignalSemaphoreValuesCount( uint32_t signalSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreValuesCount = signalSemaphoreValuesCount_; + return *this; + } + + D3D12FenceSubmitInfoKHR & setPSignalSemaphoreValues( const uint64_t * pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + pSignalSemaphoreValues = pSignalSemaphoreValues_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + D3D12FenceSubmitInfoKHR & setSignalSemaphoreValues( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreValuesCount = static_cast( signalSemaphoreValues_.size() ); + pSignalSemaphoreValues = signalSemaphoreValues_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkD3D12FenceSubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkD3D12FenceSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( D3D12FenceSubmitInfoKHR const & ) const = default; +# else + bool operator==( D3D12FenceSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( waitSemaphoreValuesCount == rhs.waitSemaphoreValuesCount ) && + ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues ) && + ( signalSemaphoreValuesCount == rhs.signalSemaphoreValuesCount ) && + ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues ); + } + + bool operator!=( D3D12FenceSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eD3D12FenceSubmitInfoKHR; + const void * pNext = {}; + uint32_t waitSemaphoreValuesCount = {}; + const uint64_t * pWaitSemaphoreValues = {}; + uint32_t signalSemaphoreValuesCount = {}; + const uint64_t * pSignalSemaphoreValues = {}; + }; + static_assert( sizeof( D3D12FenceSubmitInfoKHR ) == sizeof( VkD3D12FenceSubmitInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = D3D12FenceSubmitInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct DebugMarkerObjectNameInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectNameInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, + uint64_t object_ = {}, + const char * pObjectName_ = {} ) VULKAN_HPP_NOEXCEPT + : objectType( objectType_ ) + , object( object_ ) + , pObjectName( pObjectName_ ) + {} + + VULKAN_HPP_CONSTEXPR + DebugMarkerObjectNameInfoEXT( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugMarkerObjectNameInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & + operator=( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugMarkerObjectNameInfoEXT & operator=( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DebugMarkerObjectNameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DebugMarkerObjectNameInfoEXT & + setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT + { + objectType = objectType_; + return *this; + } + + DebugMarkerObjectNameInfoEXT & setObject( uint64_t object_ ) VULKAN_HPP_NOEXCEPT + { + object = object_; + return *this; + } + + DebugMarkerObjectNameInfoEXT & setPObjectName( const char * pObjectName_ ) VULKAN_HPP_NOEXCEPT + { + pObjectName = pObjectName_; + return *this; + } + + operator VkDebugMarkerObjectNameInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugMarkerObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugMarkerObjectNameInfoEXT const & ) const = default; +#else + bool operator==( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && + ( object == rhs.object ) && ( pObjectName == rhs.pObjectName ); + } + + bool operator!=( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectNameInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + uint64_t object = {}; + const char * pObjectName = {}; + }; + static_assert( sizeof( DebugMarkerObjectNameInfoEXT ) == sizeof( VkDebugMarkerObjectNameInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DebugMarkerObjectNameInfoEXT; + }; + + struct DebugMarkerObjectTagInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectTagInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, + uint64_t object_ = {}, + uint64_t tagName_ = {}, + size_t tagSize_ = {}, + const void * pTag_ = {} ) VULKAN_HPP_NOEXCEPT + : objectType( objectType_ ) + , object( object_ ) + , tagName( tagName_ ) + , tagSize( tagSize_ ) + , pTag( pTag_ ) + {} + + VULKAN_HPP_CONSTEXPR + DebugMarkerObjectTagInfoEXT( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugMarkerObjectTagInfoEXT( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + DebugMarkerObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, + uint64_t object_, + uint64_t tagName_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_ ) + : objectType( objectType_ ) + , object( object_ ) + , tagName( tagName_ ) + , tagSize( tag_.size() * sizeof( T ) ) + , pTag( tag_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & + operator=( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugMarkerObjectTagInfoEXT & operator=( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DebugMarkerObjectTagInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DebugMarkerObjectTagInfoEXT & + setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT + { + objectType = objectType_; + return *this; + } + + DebugMarkerObjectTagInfoEXT & setObject( uint64_t object_ ) VULKAN_HPP_NOEXCEPT + { + object = object_; + return *this; + } + + DebugMarkerObjectTagInfoEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT + { + tagName = tagName_; + return *this; + } + + DebugMarkerObjectTagInfoEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT + { + tagSize = tagSize_; + return *this; + } + + DebugMarkerObjectTagInfoEXT & setPTag( const void * pTag_ ) VULKAN_HPP_NOEXCEPT + { + pTag = pTag_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + DebugMarkerObjectTagInfoEXT & + setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_ ) VULKAN_HPP_NOEXCEPT + { + tagSize = tag_.size() * sizeof( T ); + pTag = tag_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkDebugMarkerObjectTagInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugMarkerObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugMarkerObjectTagInfoEXT const & ) const = default; +#else + bool operator==( DebugMarkerObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && + ( object == rhs.object ) && ( tagName == rhs.tagName ) && ( tagSize == rhs.tagSize ) && + ( pTag == rhs.pTag ); + } + + bool operator!=( DebugMarkerObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectTagInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + uint64_t object = {}; + uint64_t tagName = {}; + size_t tagSize = {}; + const void * pTag = {}; + }; + static_assert( sizeof( DebugMarkerObjectTagInfoEXT ) == sizeof( VkDebugMarkerObjectTagInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DebugMarkerObjectTagInfoEXT; + }; + + struct DebugReportCallbackCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugReportCallbackCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ = {}, + PFN_vkDebugReportCallbackEXT pfnCallback_ = {}, + void * pUserData_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pfnCallback( pfnCallback_ ) + , pUserData( pUserData_ ) + {} + + VULKAN_HPP_CONSTEXPR + DebugReportCallbackCreateInfoEXT( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugReportCallbackCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & + operator=( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugReportCallbackCreateInfoEXT & operator=( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DebugReportCallbackCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DebugReportCallbackCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + DebugReportCallbackCreateInfoEXT & setPfnCallback( PFN_vkDebugReportCallbackEXT pfnCallback_ ) VULKAN_HPP_NOEXCEPT + { + pfnCallback = pfnCallback_; + return *this; + } + + DebugReportCallbackCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT + { + pUserData = pUserData_; + return *this; + } + + operator VkDebugReportCallbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugReportCallbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugReportCallbackCreateInfoEXT const & ) const = default; +#else + bool operator==( DebugReportCallbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( pfnCallback == rhs.pfnCallback ) && ( pUserData == rhs.pUserData ); + } + + bool operator!=( DebugReportCallbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugReportCallbackCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags = {}; + PFN_vkDebugReportCallbackEXT pfnCallback = {}; + void * pUserData = {}; + }; + static_assert( sizeof( DebugReportCallbackCreateInfoEXT ) == sizeof( VkDebugReportCallbackCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DebugReportCallbackCreateInfoEXT; + }; + + struct DebugUtilsObjectNameInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectNameInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( + VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, + uint64_t objectHandle_ = {}, + const char * pObjectName_ = {} ) VULKAN_HPP_NOEXCEPT + : objectType( objectType_ ) + , objectHandle( objectHandle_ ) + , pObjectName( pObjectName_ ) + {} + + VULKAN_HPP_CONSTEXPR + DebugUtilsObjectNameInfoEXT( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugUtilsObjectNameInfoEXT( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugUtilsObjectNameInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & + operator=( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugUtilsObjectNameInfoEXT & operator=( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DebugUtilsObjectNameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DebugUtilsObjectNameInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT + { + objectType = objectType_; + return *this; + } + + DebugUtilsObjectNameInfoEXT & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT + { + objectHandle = objectHandle_; + return *this; + } + + DebugUtilsObjectNameInfoEXT & setPObjectName( const char * pObjectName_ ) VULKAN_HPP_NOEXCEPT + { + pObjectName = pObjectName_; + return *this; + } + + operator VkDebugUtilsObjectNameInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugUtilsObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugUtilsObjectNameInfoEXT const & ) const = default; +#else + bool operator==( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && + ( objectHandle == rhs.objectHandle ) && ( pObjectName == rhs.pObjectName ); + } + + bool operator!=( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectNameInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown; + uint64_t objectHandle = {}; + const char * pObjectName = {}; + }; + static_assert( sizeof( DebugUtilsObjectNameInfoEXT ) == sizeof( VkDebugUtilsObjectNameInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DebugUtilsObjectNameInfoEXT; + }; + + struct DebugUtilsMessengerCallbackDataEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDebugUtilsMessengerCallbackDataEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ = {}, + const char * pMessageIdName_ = {}, + int32_t messageIdNumber_ = {}, + const char * pMessage_ = {}, + uint32_t queueLabelCount_ = {}, + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels_ = {}, + uint32_t cmdBufLabelCount_ = {}, + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels_ = {}, + uint32_t objectCount_ = {}, + const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pMessageIdName( pMessageIdName_ ) + , messageIdNumber( messageIdNumber_ ) + , pMessage( pMessage_ ) + , queueLabelCount( queueLabelCount_ ) + , pQueueLabels( pQueueLabels_ ) + , cmdBufLabelCount( cmdBufLabelCount_ ) + , pCmdBufLabels( pCmdBufLabels_ ) + , objectCount( objectCount_ ) + , pObjects( pObjects_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( DebugUtilsMessengerCallbackDataEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + DebugUtilsMessengerCallbackDataEXT( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugUtilsMessengerCallbackDataEXT( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DebugUtilsMessengerCallbackDataEXT( + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_, + const char * pMessageIdName_, + int32_t messageIdNumber_, + const char * pMessage_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + queueLabels_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + cmdBufLabels_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + objects_ = {} ) + : flags( flags_ ) + , pMessageIdName( pMessageIdName_ ) + , messageIdNumber( messageIdNumber_ ) + , pMessage( pMessage_ ) + , queueLabelCount( static_cast( queueLabels_.size() ) ) + , pQueueLabels( queueLabels_.data() ) + , cmdBufLabelCount( static_cast( cmdBufLabels_.size() ) ) + , pCmdBufLabels( cmdBufLabels_.data() ) + , objectCount( static_cast( objects_.size() ) ) + , pObjects( objects_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & + operator=( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugUtilsMessengerCallbackDataEXT & + operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DebugUtilsMessengerCallbackDataEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT & + setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT & setPMessageIdName( const char * pMessageIdName_ ) VULKAN_HPP_NOEXCEPT + { + pMessageIdName = pMessageIdName_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT & setMessageIdNumber( int32_t messageIdNumber_ ) VULKAN_HPP_NOEXCEPT + { + messageIdNumber = messageIdNumber_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT & setPMessage( const char * pMessage_ ) VULKAN_HPP_NOEXCEPT + { + pMessage = pMessage_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT & setQueueLabelCount( uint32_t queueLabelCount_ ) VULKAN_HPP_NOEXCEPT + { + queueLabelCount = queueLabelCount_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT & + setPQueueLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels_ ) VULKAN_HPP_NOEXCEPT + { + pQueueLabels = pQueueLabels_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DebugUtilsMessengerCallbackDataEXT & setQueueLabels( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + queueLabels_ ) VULKAN_HPP_NOEXCEPT + { + queueLabelCount = static_cast( queueLabels_.size() ); + pQueueLabels = queueLabels_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DebugUtilsMessengerCallbackDataEXT & setCmdBufLabelCount( uint32_t cmdBufLabelCount_ ) VULKAN_HPP_NOEXCEPT + { + cmdBufLabelCount = cmdBufLabelCount_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT & + setPCmdBufLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels_ ) VULKAN_HPP_NOEXCEPT + { + pCmdBufLabels = pCmdBufLabels_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DebugUtilsMessengerCallbackDataEXT & setCmdBufLabels( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + cmdBufLabels_ ) VULKAN_HPP_NOEXCEPT + { + cmdBufLabelCount = static_cast( cmdBufLabels_.size() ); + pCmdBufLabels = cmdBufLabels_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DebugUtilsMessengerCallbackDataEXT & setObjectCount( uint32_t objectCount_ ) VULKAN_HPP_NOEXCEPT + { + objectCount = objectCount_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT & + setPObjects( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects_ ) VULKAN_HPP_NOEXCEPT + { + pObjects = pObjects_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DebugUtilsMessengerCallbackDataEXT & setObjects( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + objects_ ) VULKAN_HPP_NOEXCEPT + { + objectCount = static_cast( objects_.size() ); + pObjects = objects_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkDebugUtilsMessengerCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugUtilsMessengerCallbackDataEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugUtilsMessengerCallbackDataEXT const & ) const = default; +#else + bool operator==( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( pMessageIdName == rhs.pMessageIdName ) && ( messageIdNumber == rhs.messageIdNumber ) && + ( pMessage == rhs.pMessage ) && ( queueLabelCount == rhs.queueLabelCount ) && + ( pQueueLabels == rhs.pQueueLabels ) && ( cmdBufLabelCount == rhs.cmdBufLabelCount ) && + ( pCmdBufLabels == rhs.pCmdBufLabels ) && ( objectCount == rhs.objectCount ) && + ( pObjects == rhs.pObjects ); + } + + bool operator!=( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCallbackDataEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags = {}; + const char * pMessageIdName = {}; + int32_t messageIdNumber = {}; + const char * pMessage = {}; + uint32_t queueLabelCount = {}; + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels = {}; + uint32_t cmdBufLabelCount = {}; + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels = {}; + uint32_t objectCount = {}; + const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects = {}; + }; + static_assert( sizeof( DebugUtilsMessengerCallbackDataEXT ) == sizeof( VkDebugUtilsMessengerCallbackDataEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DebugUtilsMessengerCallbackDataEXT; + }; + + struct DebugUtilsMessengerCreateInfoEXT + { + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DebugUtilsMessengerCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = {}, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ = {}, + PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ = {}, + void * pUserData_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , messageSeverity( messageSeverity_ ) + , messageType( messageType_ ) + , pfnUserCallback( pfnUserCallback_ ) + , pUserData( pUserData_ ) + {} + + VULKAN_HPP_CONSTEXPR + DebugUtilsMessengerCreateInfoEXT( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugUtilsMessengerCreateInfoEXT( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugUtilsMessengerCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & + operator=( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugUtilsMessengerCreateInfoEXT & operator=( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DebugUtilsMessengerCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DebugUtilsMessengerCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + DebugUtilsMessengerCreateInfoEXT & + setMessageSeverity( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ ) VULKAN_HPP_NOEXCEPT + { + messageSeverity = messageSeverity_; + return *this; + } + + DebugUtilsMessengerCreateInfoEXT & + setMessageType( VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ ) VULKAN_HPP_NOEXCEPT + { + messageType = messageType_; + return *this; + } + + DebugUtilsMessengerCreateInfoEXT & + setPfnUserCallback( PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT + { + pfnUserCallback = pfnUserCallback_; + return *this; + } + + DebugUtilsMessengerCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT + { + pUserData = pUserData_; + return *this; + } + + operator VkDebugUtilsMessengerCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugUtilsMessengerCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugUtilsMessengerCreateInfoEXT const & ) const = default; +#else + bool operator==( DebugUtilsMessengerCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( messageSeverity == rhs.messageSeverity ) && ( messageType == rhs.messageType ) && + ( pfnUserCallback == rhs.pfnUserCallback ) && ( pUserData == rhs.pUserData ); + } + + bool operator!=( DebugUtilsMessengerCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity = {}; + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType = {}; + PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback = {}; + void * pUserData = {}; + }; + static_assert( sizeof( DebugUtilsMessengerCreateInfoEXT ) == sizeof( VkDebugUtilsMessengerCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DebugUtilsMessengerCreateInfoEXT; + }; + + struct DebugUtilsObjectTagInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectTagInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( + VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, + uint64_t objectHandle_ = {}, + uint64_t tagName_ = {}, + size_t tagSize_ = {}, + const void * pTag_ = {} ) VULKAN_HPP_NOEXCEPT + : objectType( objectType_ ) + , objectHandle( objectHandle_ ) + , tagName( tagName_ ) + , tagSize( tagSize_ ) + , pTag( pTag_ ) + {} + + VULKAN_HPP_CONSTEXPR + DebugUtilsObjectTagInfoEXT( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugUtilsObjectTagInfoEXT( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugUtilsObjectTagInfoEXT( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + DebugUtilsObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle_, + uint64_t tagName_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_ ) + : objectType( objectType_ ) + , objectHandle( objectHandle_ ) + , tagName( tagName_ ) + , tagSize( tag_.size() * sizeof( T ) ) + , pTag( tag_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & + operator=( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugUtilsObjectTagInfoEXT & operator=( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DebugUtilsObjectTagInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DebugUtilsObjectTagInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT + { + objectType = objectType_; + return *this; + } + + DebugUtilsObjectTagInfoEXT & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT + { + objectHandle = objectHandle_; + return *this; + } + + DebugUtilsObjectTagInfoEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT + { + tagName = tagName_; + return *this; + } + + DebugUtilsObjectTagInfoEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT + { + tagSize = tagSize_; + return *this; + } + + DebugUtilsObjectTagInfoEXT & setPTag( const void * pTag_ ) VULKAN_HPP_NOEXCEPT + { + pTag = pTag_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + DebugUtilsObjectTagInfoEXT & + setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_ ) VULKAN_HPP_NOEXCEPT + { + tagSize = tag_.size() * sizeof( T ); + pTag = tag_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkDebugUtilsObjectTagInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugUtilsObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugUtilsObjectTagInfoEXT const & ) const = default; +#else + bool operator==( DebugUtilsObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && + ( objectHandle == rhs.objectHandle ) && ( tagName == rhs.tagName ) && ( tagSize == rhs.tagSize ) && + ( pTag == rhs.pTag ); + } + + bool operator!=( DebugUtilsObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectTagInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown; + uint64_t objectHandle = {}; + uint64_t tagName = {}; + size_t tagSize = {}; + const void * pTag = {}; + }; + static_assert( sizeof( DebugUtilsObjectTagInfoEXT ) == sizeof( VkDebugUtilsObjectTagInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DebugUtilsObjectTagInfoEXT; + }; + + struct DedicatedAllocationBufferCreateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDedicatedAllocationBufferCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {} ) + VULKAN_HPP_NOEXCEPT : dedicatedAllocation( dedicatedAllocation_ ) + {} + + VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( DedicatedAllocationBufferCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + DedicatedAllocationBufferCreateInfoNV( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DedicatedAllocationBufferCreateInfoNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationBufferCreateInfoNV & + operator=( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DedicatedAllocationBufferCreateInfoNV & + operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DedicatedAllocationBufferCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DedicatedAllocationBufferCreateInfoNV & + setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT + { + dedicatedAllocation = dedicatedAllocation_; + return *this; + } + + operator VkDedicatedAllocationBufferCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDedicatedAllocationBufferCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DedicatedAllocationBufferCreateInfoNV const & ) const = default; +#else + bool operator==( DedicatedAllocationBufferCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dedicatedAllocation == rhs.dedicatedAllocation ); + } + + bool operator!=( DedicatedAllocationBufferCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationBufferCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {}; + }; + static_assert( sizeof( DedicatedAllocationBufferCreateInfoNV ) == sizeof( VkDedicatedAllocationBufferCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DedicatedAllocationBufferCreateInfoNV; + }; + + struct DedicatedAllocationImageCreateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDedicatedAllocationImageCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DedicatedAllocationImageCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {} ) VULKAN_HPP_NOEXCEPT + : dedicatedAllocation( dedicatedAllocation_ ) + {} + + VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( DedicatedAllocationImageCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + DedicatedAllocationImageCreateInfoNV( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DedicatedAllocationImageCreateInfoNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationImageCreateInfoNV & + operator=( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DedicatedAllocationImageCreateInfoNV & + operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DedicatedAllocationImageCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DedicatedAllocationImageCreateInfoNV & + setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT + { + dedicatedAllocation = dedicatedAllocation_; + return *this; + } + + operator VkDedicatedAllocationImageCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDedicatedAllocationImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DedicatedAllocationImageCreateInfoNV const & ) const = default; +#else + bool operator==( DedicatedAllocationImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dedicatedAllocation == rhs.dedicatedAllocation ); + } + + bool operator!=( DedicatedAllocationImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationImageCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {}; + }; + static_assert( sizeof( DedicatedAllocationImageCreateInfoNV ) == sizeof( VkDedicatedAllocationImageCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DedicatedAllocationImageCreateInfoNV; + }; + + struct DedicatedAllocationMemoryAllocateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDedicatedAllocationMemoryAllocateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DedicatedAllocationMemoryAllocateInfoNV( VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT + : image( image_ ) + , buffer( buffer_ ) + {} + + VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + DedicatedAllocationMemoryAllocateInfoNV( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DedicatedAllocationMemoryAllocateInfoNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & + operator=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DedicatedAllocationMemoryAllocateInfoNV & + operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DedicatedAllocationMemoryAllocateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DedicatedAllocationMemoryAllocateInfoNV & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + DedicatedAllocationMemoryAllocateInfoNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + operator VkDedicatedAllocationMemoryAllocateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDedicatedAllocationMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DedicatedAllocationMemoryAllocateInfoNV const & ) const = default; +#else + bool operator==( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( buffer == rhs.buffer ); + } + + bool operator!=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + }; + static_assert( sizeof( DedicatedAllocationMemoryAllocateInfoNV ) == + sizeof( VkDedicatedAllocationMemoryAllocateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DedicatedAllocationMemoryAllocateInfoNV; + }; + + struct DescriptorPoolSize + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DescriptorPoolSize( VULKAN_HPP_NAMESPACE::DescriptorType type_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + uint32_t descriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , descriptorCount( descriptorCount_ ) + {} + + VULKAN_HPP_CONSTEXPR DescriptorPoolSize( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorPoolSize( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorPoolSize( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolSize & + operator=( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorPoolSize & operator=( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DescriptorPoolSize & setType( VULKAN_HPP_NAMESPACE::DescriptorType type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + DescriptorPoolSize & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = descriptorCount_; + return *this; + } + + operator VkDescriptorPoolSize const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorPoolSize &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorPoolSize const & ) const = default; +#else + bool operator==( DescriptorPoolSize const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( type == rhs.type ) && ( descriptorCount == rhs.descriptorCount ); + } + + bool operator!=( DescriptorPoolSize const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DescriptorType type = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + uint32_t descriptorCount = {}; + }; + static_assert( sizeof( DescriptorPoolSize ) == sizeof( VkDescriptorPoolSize ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DescriptorPoolCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DescriptorPoolCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ = {}, + uint32_t maxSets_ = {}, + uint32_t poolSizeCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , maxSets( maxSets_ ) + , poolSizeCount( poolSizeCount_ ) + , pPoolSizes( pPoolSizes_ ) + {} + + VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorPoolCreateInfo( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorPoolCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorPoolCreateInfo( + VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_, + uint32_t maxSets_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & poolSizes_ ) + : flags( flags_ ) + , maxSets( maxSets_ ) + , poolSizeCount( static_cast( poolSizes_.size() ) ) + , pPoolSizes( poolSizes_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & + operator=( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorPoolCreateInfo & operator=( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DescriptorPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DescriptorPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + DescriptorPoolCreateInfo & setMaxSets( uint32_t maxSets_ ) VULKAN_HPP_NOEXCEPT + { + maxSets = maxSets_; + return *this; + } + + DescriptorPoolCreateInfo & setPoolSizeCount( uint32_t poolSizeCount_ ) VULKAN_HPP_NOEXCEPT + { + poolSizeCount = poolSizeCount_; + return *this; + } + + DescriptorPoolCreateInfo & + setPPoolSizes( const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes_ ) VULKAN_HPP_NOEXCEPT + { + pPoolSizes = pPoolSizes_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorPoolCreateInfo & setPoolSizes( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & poolSizes_ ) + VULKAN_HPP_NOEXCEPT + { + poolSizeCount = static_cast( poolSizes_.size() ); + pPoolSizes = poolSizes_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkDescriptorPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorPoolCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorPoolCreateInfo const & ) const = default; +#else + bool operator==( DescriptorPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( maxSets == rhs.maxSets ) && + ( poolSizeCount == rhs.poolSizeCount ) && ( pPoolSizes == rhs.pPoolSizes ); + } + + bool operator!=( DescriptorPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags = {}; + uint32_t maxSets = {}; + uint32_t poolSizeCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes = {}; + }; + static_assert( sizeof( DescriptorPoolCreateInfo ) == sizeof( VkDescriptorPoolCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DescriptorPoolCreateInfo; + }; + + struct DescriptorPoolInlineUniformBlockCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DescriptorPoolInlineUniformBlockCreateInfoEXT( uint32_t maxInlineUniformBlockBindings_ = {} ) VULKAN_HPP_NOEXCEPT + : maxInlineUniformBlockBindings( maxInlineUniformBlockBindings_ ) + {} + + VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfoEXT( + DescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorPoolInlineUniformBlockCreateInfoEXT( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : DescriptorPoolInlineUniformBlockCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolInlineUniformBlockCreateInfoEXT & + operator=( DescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorPoolInlineUniformBlockCreateInfoEXT & + operator=( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DescriptorPoolInlineUniformBlockCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DescriptorPoolInlineUniformBlockCreateInfoEXT & + setMaxInlineUniformBlockBindings( uint32_t maxInlineUniformBlockBindings_ ) VULKAN_HPP_NOEXCEPT + { + maxInlineUniformBlockBindings = maxInlineUniformBlockBindings_; + return *this; + } + + operator VkDescriptorPoolInlineUniformBlockCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorPoolInlineUniformBlockCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorPoolInlineUniformBlockCreateInfoEXT const & ) const = default; +#else + bool operator==( DescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxInlineUniformBlockBindings == rhs.maxInlineUniformBlockBindings ); + } + + bool operator!=( DescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT; + const void * pNext = {}; + uint32_t maxInlineUniformBlockBindings = {}; + }; + static_assert( sizeof( DescriptorPoolInlineUniformBlockCreateInfoEXT ) == + sizeof( VkDescriptorPoolInlineUniformBlockCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DescriptorPoolInlineUniformBlockCreateInfoEXT; + }; + + class DescriptorPool + { + public: + using CType = VkDescriptorPool; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool; + + public: + VULKAN_HPP_CONSTEXPR DescriptorPool() = default; + VULKAN_HPP_CONSTEXPR DescriptorPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorPool( VkDescriptorPool descriptorPool ) VULKAN_HPP_NOEXCEPT + : m_descriptorPool( descriptorPool ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DescriptorPool & operator=( VkDescriptorPool descriptorPool ) VULKAN_HPP_NOEXCEPT + { + m_descriptorPool = descriptorPool; + return *this; + } +#endif + + DescriptorPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_descriptorPool = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorPool const & ) const = default; +#else + bool operator==( DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool == rhs.m_descriptorPool; + } + + bool operator!=( DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool != rhs.m_descriptorPool; + } + + bool operator<( DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool < rhs.m_descriptorPool; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorPool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool == VK_NULL_HANDLE; + } + + private: + VkDescriptorPool m_descriptorPool = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPool ) == sizeof( VkDescriptorPool ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DescriptorPool; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorPool; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorPool; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class DescriptorSetLayout + { + public: + using CType = VkDescriptorSetLayout; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout; + + public: + VULKAN_HPP_CONSTEXPR DescriptorSetLayout() = default; + VULKAN_HPP_CONSTEXPR DescriptorSetLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSetLayout( VkDescriptorSetLayout descriptorSetLayout ) VULKAN_HPP_NOEXCEPT + : m_descriptorSetLayout( descriptorSetLayout ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DescriptorSetLayout & operator=( VkDescriptorSetLayout descriptorSetLayout ) VULKAN_HPP_NOEXCEPT + { + m_descriptorSetLayout = descriptorSetLayout; + return *this; + } +#endif + + DescriptorSetLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_descriptorSetLayout = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetLayout const & ) const = default; +#else + bool operator==( DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout == rhs.m_descriptorSetLayout; + } + + bool operator!=( DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout != rhs.m_descriptorSetLayout; + } + + bool operator<( DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout < rhs.m_descriptorSetLayout; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSetLayout() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout == VK_NULL_HANDLE; + } + + private: + VkDescriptorSetLayout m_descriptorSetLayout = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayout ) == sizeof( VkDescriptorSetLayout ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct DescriptorSetAllocateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetAllocateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( + VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ = {}, + uint32_t descriptorSetCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {} ) VULKAN_HPP_NOEXCEPT + : descriptorPool( descriptorPool_ ) + , descriptorSetCount( descriptorSetCount_ ) + , pSetLayouts( pSetLayouts_ ) + {} + + VULKAN_HPP_CONSTEXPR + DescriptorSetAllocateInfo( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetAllocateInfo( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetAllocateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetAllocateInfo( + VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + setLayouts_ ) + : descriptorPool( descriptorPool_ ) + , descriptorSetCount( static_cast( setLayouts_.size() ) ) + , pSetLayouts( setLayouts_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & + operator=( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetAllocateInfo & operator=( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DescriptorSetAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DescriptorSetAllocateInfo & + setDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ ) VULKAN_HPP_NOEXCEPT + { + descriptorPool = descriptorPool_; + return *this; + } + + DescriptorSetAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSetCount = descriptorSetCount_; + return *this; + } + + DescriptorSetAllocateInfo & + setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT + { + pSetLayouts = pSetLayouts_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetAllocateInfo & setSetLayouts( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + setLayouts_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSetCount = static_cast( setLayouts_.size() ); + pSetLayouts = setLayouts_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkDescriptorSetAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetAllocateInfo const & ) const = default; +#else + bool operator==( DescriptorSetAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorPool == rhs.descriptorPool ) && + ( descriptorSetCount == rhs.descriptorSetCount ) && ( pSetLayouts == rhs.pSetLayouts ); + } + + bool operator!=( DescriptorSetAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetAllocateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool = {}; + uint32_t descriptorSetCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {}; + }; + static_assert( sizeof( DescriptorSetAllocateInfo ) == sizeof( VkDescriptorSetAllocateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DescriptorSetAllocateInfo; + }; + + struct DescriptorSetLayoutBinding + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( + uint32_t binding_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + uint32_t descriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, + const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers_ = {} ) VULKAN_HPP_NOEXCEPT + : binding( binding_ ) + , descriptorType( descriptorType_ ) + , descriptorCount( descriptorCount_ ) + , stageFlags( stageFlags_ ) + , pImmutableSamplers( pImmutableSamplers_ ) + {} + + VULKAN_HPP_CONSTEXPR + DescriptorSetLayoutBinding( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetLayoutBinding( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetLayoutBinding( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetLayoutBinding( + uint32_t binding_, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & immutableSamplers_ ) + : binding( binding_ ) + , descriptorType( descriptorType_ ) + , descriptorCount( static_cast( immutableSamplers_.size() ) ) + , stageFlags( stageFlags_ ) + , pImmutableSamplers( immutableSamplers_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & + operator=( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetLayoutBinding & operator=( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DescriptorSetLayoutBinding & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + { + binding = binding_; + return *this; + } + + DescriptorSetLayoutBinding & + setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT + { + descriptorType = descriptorType_; + return *this; + } + + DescriptorSetLayoutBinding & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = descriptorCount_; + return *this; + } + + DescriptorSetLayoutBinding & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT + { + stageFlags = stageFlags_; + return *this; + } + + DescriptorSetLayoutBinding & + setPImmutableSamplers( const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers_ ) VULKAN_HPP_NOEXCEPT + { + pImmutableSamplers = pImmutableSamplers_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetLayoutBinding & setImmutableSamplers( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & immutableSamplers_ ) + VULKAN_HPP_NOEXCEPT + { + descriptorCount = static_cast( immutableSamplers_.size() ); + pImmutableSamplers = immutableSamplers_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkDescriptorSetLayoutBinding const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetLayoutBinding &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetLayoutBinding const & ) const = default; +#else + bool operator==( DescriptorSetLayoutBinding const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( binding == rhs.binding ) && ( descriptorType == rhs.descriptorType ) && + ( descriptorCount == rhs.descriptorCount ) && ( stageFlags == rhs.stageFlags ) && + ( pImmutableSamplers == rhs.pImmutableSamplers ); + } + + bool operator!=( DescriptorSetLayoutBinding const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t binding = {}; + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + uint32_t descriptorCount = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; + const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers = {}; + }; + static_assert( sizeof( DescriptorSetLayoutBinding ) == sizeof( VkDescriptorSetLayoutBinding ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct DescriptorSetLayoutBindingFlagsCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( + uint32_t bindingCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags_ = {} ) VULKAN_HPP_NOEXCEPT + : bindingCount( bindingCount_ ) + , pBindingFlags( pBindingFlags_ ) + {} + + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( + DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetLayoutBindingFlagsCreateInfo( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT + : DescriptorSetLayoutBindingFlagsCreateInfo( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetLayoutBindingFlagsCreateInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + bindingFlags_ ) + : bindingCount( static_cast( bindingFlags_.size() ) ), pBindingFlags( bindingFlags_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & + operator=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetLayoutBindingFlagsCreateInfo & + operator=( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DescriptorSetLayoutBindingFlagsCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DescriptorSetLayoutBindingFlagsCreateInfo & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT + { + bindingCount = bindingCount_; + return *this; + } + + DescriptorSetLayoutBindingFlagsCreateInfo & + setPBindingFlags( const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags_ ) VULKAN_HPP_NOEXCEPT + { + pBindingFlags = pBindingFlags_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetLayoutBindingFlagsCreateInfo & setBindingFlags( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + bindingFlags_ ) VULKAN_HPP_NOEXCEPT + { + bindingCount = static_cast( bindingFlags_.size() ); + pBindingFlags = bindingFlags_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkDescriptorSetLayoutBindingFlagsCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetLayoutBindingFlagsCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetLayoutBindingFlagsCreateInfo const & ) const = default; +#else + bool operator==( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bindingCount == rhs.bindingCount ) && + ( pBindingFlags == rhs.pBindingFlags ); + } + + bool operator!=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo; + const void * pNext = {}; + uint32_t bindingCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags = {}; + }; + static_assert( sizeof( DescriptorSetLayoutBindingFlagsCreateInfo ) == + sizeof( VkDescriptorSetLayoutBindingFlagsCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DescriptorSetLayoutBindingFlagsCreateInfo; + }; + using DescriptorSetLayoutBindingFlagsCreateInfoEXT = DescriptorSetLayoutBindingFlagsCreateInfo; + + struct DescriptorSetLayoutCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ = {}, + uint32_t bindingCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , bindingCount( bindingCount_ ) + , pBindings( pBindings_ ) + {} + + VULKAN_HPP_CONSTEXPR + DescriptorSetLayoutCreateInfo( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetLayoutCreateInfo( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetLayoutCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetLayoutCreateInfo( + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + bindings_ ) + : flags( flags_ ), bindingCount( static_cast( bindings_.size() ) ), pBindings( bindings_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & + operator=( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetLayoutCreateInfo & operator=( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DescriptorSetLayoutCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DescriptorSetLayoutCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + DescriptorSetLayoutCreateInfo & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT + { + bindingCount = bindingCount_; + return *this; + } + + DescriptorSetLayoutCreateInfo & + setPBindings( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings_ ) VULKAN_HPP_NOEXCEPT + { + pBindings = pBindings_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetLayoutCreateInfo & setBindings( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + bindings_ ) VULKAN_HPP_NOEXCEPT + { + bindingCount = static_cast( bindings_.size() ); + pBindings = bindings_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkDescriptorSetLayoutCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetLayoutCreateInfo const & ) const = default; +#else + bool operator==( DescriptorSetLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( bindingCount == rhs.bindingCount ) && ( pBindings == rhs.pBindings ); + } + + bool operator!=( DescriptorSetLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags = {}; + uint32_t bindingCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings = {}; + }; + static_assert( sizeof( DescriptorSetLayoutCreateInfo ) == sizeof( VkDescriptorSetLayoutCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DescriptorSetLayoutCreateInfo; + }; + + struct DescriptorSetLayoutSupport + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutSupport; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport( VULKAN_HPP_NAMESPACE::Bool32 supported_ = {} ) VULKAN_HPP_NOEXCEPT + : supported( supported_ ) + {} + + VULKAN_HPP_CONSTEXPR + DescriptorSetLayoutSupport( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetLayoutSupport( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetLayoutSupport( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutSupport & + operator=( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetLayoutSupport & operator=( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDescriptorSetLayoutSupport const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetLayoutSupport &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetLayoutSupport const & ) const = default; +#else + bool operator==( DescriptorSetLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supported == rhs.supported ); + } + + bool operator!=( DescriptorSetLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutSupport; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 supported = {}; + }; + static_assert( sizeof( DescriptorSetLayoutSupport ) == sizeof( VkDescriptorSetLayoutSupport ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DescriptorSetLayoutSupport; + }; + using DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport; + + struct DescriptorSetVariableDescriptorCountAllocateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DescriptorSetVariableDescriptorCountAllocateInfo( uint32_t descriptorSetCount_ = {}, + const uint32_t * pDescriptorCounts_ = {} ) VULKAN_HPP_NOEXCEPT + : descriptorSetCount( descriptorSetCount_ ) + , pDescriptorCounts( pDescriptorCounts_ ) + {} + + VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo( + DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetVariableDescriptorCountAllocateInfo( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT + : DescriptorSetVariableDescriptorCountAllocateInfo( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetVariableDescriptorCountAllocateInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorCounts_ ) + : descriptorSetCount( static_cast( descriptorCounts_.size() ) ) + , pDescriptorCounts( descriptorCounts_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & + operator=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetVariableDescriptorCountAllocateInfo & + operator=( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DescriptorSetVariableDescriptorCountAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DescriptorSetVariableDescriptorCountAllocateInfo & + setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSetCount = descriptorSetCount_; + return *this; + } + + DescriptorSetVariableDescriptorCountAllocateInfo & + setPDescriptorCounts( const uint32_t * pDescriptorCounts_ ) VULKAN_HPP_NOEXCEPT + { + pDescriptorCounts = pDescriptorCounts_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetVariableDescriptorCountAllocateInfo & setDescriptorCounts( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorCounts_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSetCount = static_cast( descriptorCounts_.size() ); + pDescriptorCounts = descriptorCounts_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkDescriptorSetVariableDescriptorCountAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetVariableDescriptorCountAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetVariableDescriptorCountAllocateInfo const & ) const = default; +#else + bool operator==( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorSetCount == rhs.descriptorSetCount ) && + ( pDescriptorCounts == rhs.pDescriptorCounts ); + } + + bool operator!=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo; + const void * pNext = {}; + uint32_t descriptorSetCount = {}; + const uint32_t * pDescriptorCounts = {}; + }; + static_assert( sizeof( DescriptorSetVariableDescriptorCountAllocateInfo ) == + sizeof( VkDescriptorSetVariableDescriptorCountAllocateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DescriptorSetVariableDescriptorCountAllocateInfo; + }; + using DescriptorSetVariableDescriptorCountAllocateInfoEXT = DescriptorSetVariableDescriptorCountAllocateInfo; + + struct DescriptorSetVariableDescriptorCountLayoutSupport + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DescriptorSetVariableDescriptorCountLayoutSupport( uint32_t maxVariableDescriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT + : maxVariableDescriptorCount( maxVariableDescriptorCount_ ) + {} + + VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport( + DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetVariableDescriptorCountLayoutSupport( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) + VULKAN_HPP_NOEXCEPT + : DescriptorSetVariableDescriptorCountLayoutSupport( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountLayoutSupport & + operator=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetVariableDescriptorCountLayoutSupport & + operator=( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDescriptorSetVariableDescriptorCountLayoutSupport const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetVariableDescriptorCountLayoutSupport &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetVariableDescriptorCountLayoutSupport const & ) const = default; +#else + bool operator==( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxVariableDescriptorCount == rhs.maxVariableDescriptorCount ); + } + + bool operator!=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport; + void * pNext = {}; + uint32_t maxVariableDescriptorCount = {}; + }; + static_assert( sizeof( DescriptorSetVariableDescriptorCountLayoutSupport ) == + sizeof( VkDescriptorSetVariableDescriptorCountLayoutSupport ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DescriptorSetVariableDescriptorCountLayoutSupport; + }; + using DescriptorSetVariableDescriptorCountLayoutSupportEXT = DescriptorSetVariableDescriptorCountLayoutSupport; + + struct DescriptorUpdateTemplateEntry + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry( + uint32_t dstBinding_ = {}, + uint32_t dstArrayElement_ = {}, + uint32_t descriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + size_t offset_ = {}, + size_t stride_ = {} ) VULKAN_HPP_NOEXCEPT + : dstBinding( dstBinding_ ) + , dstArrayElement( dstArrayElement_ ) + , descriptorCount( descriptorCount_ ) + , descriptorType( descriptorType_ ) + , offset( offset_ ) + , stride( stride_ ) + {} + + VULKAN_HPP_CONSTEXPR + DescriptorUpdateTemplateEntry( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorUpdateTemplateEntry( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorUpdateTemplateEntry( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & + operator=( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorUpdateTemplateEntry & operator=( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DescriptorUpdateTemplateEntry & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT + { + dstBinding = dstBinding_; + return *this; + } + + DescriptorUpdateTemplateEntry & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT + { + dstArrayElement = dstArrayElement_; + return *this; + } + + DescriptorUpdateTemplateEntry & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = descriptorCount_; + return *this; + } + + DescriptorUpdateTemplateEntry & + setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT + { + descriptorType = descriptorType_; + return *this; + } + + DescriptorUpdateTemplateEntry & setOffset( size_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + DescriptorUpdateTemplateEntry & setStride( size_t stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } + + operator VkDescriptorUpdateTemplateEntry const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorUpdateTemplateEntry &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorUpdateTemplateEntry const & ) const = default; +#else + bool operator==( DescriptorUpdateTemplateEntry const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( dstBinding == rhs.dstBinding ) && ( dstArrayElement == rhs.dstArrayElement ) && + ( descriptorCount == rhs.descriptorCount ) && ( descriptorType == rhs.descriptorType ) && + ( offset == rhs.offset ) && ( stride == rhs.stride ); + } + + bool operator!=( DescriptorUpdateTemplateEntry const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t dstBinding = {}; + uint32_t dstArrayElement = {}; + uint32_t descriptorCount = {}; + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + size_t offset = {}; + size_t stride = {}; + }; + static_assert( sizeof( DescriptorUpdateTemplateEntry ) == sizeof( VkDescriptorUpdateTemplateEntry ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + using DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry; + + struct DescriptorUpdateTemplateCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDescriptorUpdateTemplateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ = {}, + uint32_t descriptorUpdateEntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, + uint32_t set_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , descriptorUpdateEntryCount( descriptorUpdateEntryCount_ ) + , pDescriptorUpdateEntries( pDescriptorUpdateEntries_ ) + , templateType( templateType_ ) + , descriptorSetLayout( descriptorSetLayout_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , pipelineLayout( pipelineLayout_ ) + , set( set_ ) + {} + + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( DescriptorUpdateTemplateCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + DescriptorUpdateTemplateCreateInfo( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorUpdateTemplateCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorUpdateTemplateCreateInfo( + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + descriptorUpdateEntries_, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, + uint32_t set_ = {} ) + : flags( flags_ ) + , descriptorUpdateEntryCount( static_cast( descriptorUpdateEntries_.size() ) ) + , pDescriptorUpdateEntries( descriptorUpdateEntries_.data() ) + , templateType( templateType_ ) + , descriptorSetLayout( descriptorSetLayout_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , pipelineLayout( pipelineLayout_ ) + , set( set_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & + operator=( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorUpdateTemplateCreateInfo & + operator=( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DescriptorUpdateTemplateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DescriptorUpdateTemplateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + DescriptorUpdateTemplateCreateInfo & + setDescriptorUpdateEntryCount( uint32_t descriptorUpdateEntryCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorUpdateEntryCount = descriptorUpdateEntryCount_; + return *this; + } + + DescriptorUpdateTemplateCreateInfo & setPDescriptorUpdateEntries( + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT + { + pDescriptorUpdateEntries = pDescriptorUpdateEntries_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + descriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT + { + descriptorUpdateEntryCount = static_cast( descriptorUpdateEntries_.size() ); + pDescriptorUpdateEntries = descriptorUpdateEntries_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DescriptorUpdateTemplateCreateInfo & + setTemplateType( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ ) VULKAN_HPP_NOEXCEPT + { + templateType = templateType_; + return *this; + } + + DescriptorUpdateTemplateCreateInfo & + setDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSetLayout = descriptorSetLayout_; + return *this; + } + + DescriptorUpdateTemplateCreateInfo & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + DescriptorUpdateTemplateCreateInfo & + setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT + { + pipelineLayout = pipelineLayout_; + return *this; + } + + DescriptorUpdateTemplateCreateInfo & setSet( uint32_t set_ ) VULKAN_HPP_NOEXCEPT + { + set = set_; + return *this; + } + + operator VkDescriptorUpdateTemplateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorUpdateTemplateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorUpdateTemplateCreateInfo const & ) const = default; +#else + bool operator==( DescriptorUpdateTemplateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( descriptorUpdateEntryCount == rhs.descriptorUpdateEntryCount ) && + ( pDescriptorUpdateEntries == rhs.pDescriptorUpdateEntries ) && ( templateType == rhs.templateType ) && + ( descriptorSetLayout == rhs.descriptorSetLayout ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && + ( pipelineLayout == rhs.pipelineLayout ) && ( set == rhs.set ); + } + + bool operator!=( DescriptorUpdateTemplateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorUpdateTemplateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags = {}; + uint32_t descriptorUpdateEntryCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries = {}; + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType = + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet; + VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {}; + uint32_t set = {}; + }; + static_assert( sizeof( DescriptorUpdateTemplateCreateInfo ) == sizeof( VkDescriptorUpdateTemplateCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DescriptorUpdateTemplateCreateInfo; + }; + using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo; + + struct DeviceQueueCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, + uint32_t queueFamilyIndex_ = {}, + uint32_t queueCount_ = {}, + const float * pQueuePriorities_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , queueFamilyIndex( queueFamilyIndex_ ) + , queueCount( queueCount_ ) + , pQueuePriorities( pQueuePriorities_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceQueueCreateInfo( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceQueueCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_, + uint32_t queueFamilyIndex_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queuePriorities_ ) + : flags( flags_ ) + , queueFamilyIndex( queueFamilyIndex_ ) + , queueCount( static_cast( queuePriorities_.size() ) ) + , pQueuePriorities( queuePriorities_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & + operator=( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceQueueCreateInfo & operator=( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceQueueCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceQueueCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + DeviceQueueCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndex = queueFamilyIndex_; + return *this; + } + + DeviceQueueCreateInfo & setQueueCount( uint32_t queueCount_ ) VULKAN_HPP_NOEXCEPT + { + queueCount = queueCount_; + return *this; + } + + DeviceQueueCreateInfo & setPQueuePriorities( const float * pQueuePriorities_ ) VULKAN_HPP_NOEXCEPT + { + pQueuePriorities = pQueuePriorities_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceQueueCreateInfo & setQueuePriorities( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queuePriorities_ ) VULKAN_HPP_NOEXCEPT + { + queueCount = static_cast( queuePriorities_.size() ); + pQueuePriorities = queuePriorities_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkDeviceQueueCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceQueueCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceQueueCreateInfo const & ) const = default; +#else + bool operator==( DeviceQueueCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( queueFamilyIndex == rhs.queueFamilyIndex ) && ( queueCount == rhs.queueCount ) && + ( pQueuePriorities == rhs.pQueuePriorities ); + } + + bool operator!=( DeviceQueueCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {}; + uint32_t queueFamilyIndex = {}; + uint32_t queueCount = {}; + const float * pQueuePriorities = {}; + }; + static_assert( sizeof( DeviceQueueCreateInfo ) == sizeof( VkDeviceQueueCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceQueueCreateInfo; + }; + + struct PhysicalDeviceFeatures + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFeatures( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 logicOp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 wideLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 largePoints_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ = {} ) VULKAN_HPP_NOEXCEPT + : robustBufferAccess( robustBufferAccess_ ) + , fullDrawIndexUint32( fullDrawIndexUint32_ ) + , imageCubeArray( imageCubeArray_ ) + , independentBlend( independentBlend_ ) + , geometryShader( geometryShader_ ) + , tessellationShader( tessellationShader_ ) + , sampleRateShading( sampleRateShading_ ) + , dualSrcBlend( dualSrcBlend_ ) + , logicOp( logicOp_ ) + , multiDrawIndirect( multiDrawIndirect_ ) + , drawIndirectFirstInstance( drawIndirectFirstInstance_ ) + , depthClamp( depthClamp_ ) + , depthBiasClamp( depthBiasClamp_ ) + , fillModeNonSolid( fillModeNonSolid_ ) + , depthBounds( depthBounds_ ) + , wideLines( wideLines_ ) + , largePoints( largePoints_ ) + , alphaToOne( alphaToOne_ ) + , multiViewport( multiViewport_ ) + , samplerAnisotropy( samplerAnisotropy_ ) + , textureCompressionETC2( textureCompressionETC2_ ) + , textureCompressionASTC_LDR( textureCompressionASTC_LDR_ ) + , textureCompressionBC( textureCompressionBC_ ) + , occlusionQueryPrecise( occlusionQueryPrecise_ ) + , pipelineStatisticsQuery( pipelineStatisticsQuery_ ) + , vertexPipelineStoresAndAtomics( vertexPipelineStoresAndAtomics_ ) + , fragmentStoresAndAtomics( fragmentStoresAndAtomics_ ) + , shaderTessellationAndGeometryPointSize( shaderTessellationAndGeometryPointSize_ ) + , shaderImageGatherExtended( shaderImageGatherExtended_ ) + , shaderStorageImageExtendedFormats( shaderStorageImageExtendedFormats_ ) + , shaderStorageImageMultisample( shaderStorageImageMultisample_ ) + , shaderStorageImageReadWithoutFormat( shaderStorageImageReadWithoutFormat_ ) + , shaderStorageImageWriteWithoutFormat( shaderStorageImageWriteWithoutFormat_ ) + , shaderUniformBufferArrayDynamicIndexing( shaderUniformBufferArrayDynamicIndexing_ ) + , shaderSampledImageArrayDynamicIndexing( shaderSampledImageArrayDynamicIndexing_ ) + , shaderStorageBufferArrayDynamicIndexing( shaderStorageBufferArrayDynamicIndexing_ ) + , shaderStorageImageArrayDynamicIndexing( shaderStorageImageArrayDynamicIndexing_ ) + , shaderClipDistance( shaderClipDistance_ ) + , shaderCullDistance( shaderCullDistance_ ) + , shaderFloat64( shaderFloat64_ ) + , shaderInt64( shaderInt64_ ) + , shaderInt16( shaderInt16_ ) + , shaderResourceResidency( shaderResourceResidency_ ) + , shaderResourceMinLod( shaderResourceMinLod_ ) + , sparseBinding( sparseBinding_ ) + , sparseResidencyBuffer( sparseResidencyBuffer_ ) + , sparseResidencyImage2D( sparseResidencyImage2D_ ) + , sparseResidencyImage3D( sparseResidencyImage3D_ ) + , sparseResidency2Samples( sparseResidency2Samples_ ) + , sparseResidency4Samples( sparseResidency4Samples_ ) + , sparseResidency8Samples( sparseResidency8Samples_ ) + , sparseResidency16Samples( sparseResidency16Samples_ ) + , sparseResidencyAliased( sparseResidencyAliased_ ) + , variableMultisampleRate( variableMultisampleRate_ ) + , inheritedQueries( inheritedQueries_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFeatures( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFeatures( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + operator=( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFeatures & operator=( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceFeatures & + setRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ ) VULKAN_HPP_NOEXCEPT + { + robustBufferAccess = robustBufferAccess_; + return *this; + } + + PhysicalDeviceFeatures & + setFullDrawIndexUint32( VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ ) VULKAN_HPP_NOEXCEPT + { + fullDrawIndexUint32 = fullDrawIndexUint32_; + return *this; + } + + PhysicalDeviceFeatures & setImageCubeArray( VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ ) VULKAN_HPP_NOEXCEPT + { + imageCubeArray = imageCubeArray_; + return *this; + } + + PhysicalDeviceFeatures & setIndependentBlend( VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ ) VULKAN_HPP_NOEXCEPT + { + independentBlend = independentBlend_; + return *this; + } + + PhysicalDeviceFeatures & setGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ ) VULKAN_HPP_NOEXCEPT + { + geometryShader = geometryShader_; + return *this; + } + + PhysicalDeviceFeatures & + setTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ ) VULKAN_HPP_NOEXCEPT + { + tessellationShader = tessellationShader_; + return *this; + } + + PhysicalDeviceFeatures & setSampleRateShading( VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ ) VULKAN_HPP_NOEXCEPT + { + sampleRateShading = sampleRateShading_; + return *this; + } + + PhysicalDeviceFeatures & setDualSrcBlend( VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ ) VULKAN_HPP_NOEXCEPT + { + dualSrcBlend = dualSrcBlend_; + return *this; + } + + PhysicalDeviceFeatures & setLogicOp( VULKAN_HPP_NAMESPACE::Bool32 logicOp_ ) VULKAN_HPP_NOEXCEPT + { + logicOp = logicOp_; + return *this; + } + + PhysicalDeviceFeatures & setMultiDrawIndirect( VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ ) VULKAN_HPP_NOEXCEPT + { + multiDrawIndirect = multiDrawIndirect_; + return *this; + } + + PhysicalDeviceFeatures & + setDrawIndirectFirstInstance( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ ) VULKAN_HPP_NOEXCEPT + { + drawIndirectFirstInstance = drawIndirectFirstInstance_; + return *this; + } + + PhysicalDeviceFeatures & setDepthClamp( VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ ) VULKAN_HPP_NOEXCEPT + { + depthClamp = depthClamp_; + return *this; + } + + PhysicalDeviceFeatures & setDepthBiasClamp( VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasClamp = depthBiasClamp_; + return *this; + } + + PhysicalDeviceFeatures & setFillModeNonSolid( VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ ) VULKAN_HPP_NOEXCEPT + { + fillModeNonSolid = fillModeNonSolid_; + return *this; + } + + PhysicalDeviceFeatures & setDepthBounds( VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ ) VULKAN_HPP_NOEXCEPT + { + depthBounds = depthBounds_; + return *this; + } + + PhysicalDeviceFeatures & setWideLines( VULKAN_HPP_NAMESPACE::Bool32 wideLines_ ) VULKAN_HPP_NOEXCEPT + { + wideLines = wideLines_; + return *this; + } + + PhysicalDeviceFeatures & setLargePoints( VULKAN_HPP_NAMESPACE::Bool32 largePoints_ ) VULKAN_HPP_NOEXCEPT + { + largePoints = largePoints_; + return *this; + } + + PhysicalDeviceFeatures & setAlphaToOne( VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ ) VULKAN_HPP_NOEXCEPT + { + alphaToOne = alphaToOne_; + return *this; + } + + PhysicalDeviceFeatures & setMultiViewport( VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ ) VULKAN_HPP_NOEXCEPT + { + multiViewport = multiViewport_; + return *this; + } + + PhysicalDeviceFeatures & setSamplerAnisotropy( VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ ) VULKAN_HPP_NOEXCEPT + { + samplerAnisotropy = samplerAnisotropy_; + return *this; + } + + PhysicalDeviceFeatures & + setTextureCompressionETC2( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ ) VULKAN_HPP_NOEXCEPT + { + textureCompressionETC2 = textureCompressionETC2_; + return *this; + } + + PhysicalDeviceFeatures & + setTextureCompressionASTC_LDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ ) VULKAN_HPP_NOEXCEPT + { + textureCompressionASTC_LDR = textureCompressionASTC_LDR_; + return *this; + } + + PhysicalDeviceFeatures & + setTextureCompressionBC( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ ) VULKAN_HPP_NOEXCEPT + { + textureCompressionBC = textureCompressionBC_; + return *this; + } + + PhysicalDeviceFeatures & + setOcclusionQueryPrecise( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ ) VULKAN_HPP_NOEXCEPT + { + occlusionQueryPrecise = occlusionQueryPrecise_; + return *this; + } + + PhysicalDeviceFeatures & + setPipelineStatisticsQuery( VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ ) VULKAN_HPP_NOEXCEPT + { + pipelineStatisticsQuery = pipelineStatisticsQuery_; + return *this; + } + + PhysicalDeviceFeatures & setVertexPipelineStoresAndAtomics( + VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT + { + vertexPipelineStoresAndAtomics = vertexPipelineStoresAndAtomics_; + return *this; + } + + PhysicalDeviceFeatures & + setFragmentStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT + { + fragmentStoresAndAtomics = fragmentStoresAndAtomics_; + return *this; + } + + PhysicalDeviceFeatures & setShaderTessellationAndGeometryPointSize( + VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ ) VULKAN_HPP_NOEXCEPT + { + shaderTessellationAndGeometryPointSize = shaderTessellationAndGeometryPointSize_; + return *this; + } + + PhysicalDeviceFeatures & + setShaderImageGatherExtended( VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ ) VULKAN_HPP_NOEXCEPT + { + shaderImageGatherExtended = shaderImageGatherExtended_; + return *this; + } + + PhysicalDeviceFeatures & setShaderStorageImageExtendedFormats( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageImageExtendedFormats = shaderStorageImageExtendedFormats_; + return *this; + } + + PhysicalDeviceFeatures & setShaderStorageImageMultisample( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageImageMultisample = shaderStorageImageMultisample_; + return *this; + } + + PhysicalDeviceFeatures & setShaderStorageImageReadWithoutFormat( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageImageReadWithoutFormat = shaderStorageImageReadWithoutFormat_; + return *this; + } + + PhysicalDeviceFeatures & setShaderStorageImageWriteWithoutFormat( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageImageWriteWithoutFormat = shaderStorageImageWriteWithoutFormat_; + return *this; + } + + PhysicalDeviceFeatures & setShaderUniformBufferArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformBufferArrayDynamicIndexing = shaderUniformBufferArrayDynamicIndexing_; + return *this; + } + + PhysicalDeviceFeatures & setShaderSampledImageArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderSampledImageArrayDynamicIndexing = shaderSampledImageArrayDynamicIndexing_; + return *this; + } + + PhysicalDeviceFeatures & setShaderStorageBufferArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageBufferArrayDynamicIndexing = shaderStorageBufferArrayDynamicIndexing_; + return *this; + } + + PhysicalDeviceFeatures & setShaderStorageImageArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageImageArrayDynamicIndexing = shaderStorageImageArrayDynamicIndexing_; + return *this; + } + + PhysicalDeviceFeatures & + setShaderClipDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ ) VULKAN_HPP_NOEXCEPT + { + shaderClipDistance = shaderClipDistance_; + return *this; + } + + PhysicalDeviceFeatures & + setShaderCullDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ ) VULKAN_HPP_NOEXCEPT + { + shaderCullDistance = shaderCullDistance_; + return *this; + } + + PhysicalDeviceFeatures & setShaderFloat64( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ ) VULKAN_HPP_NOEXCEPT + { + shaderFloat64 = shaderFloat64_; + return *this; + } + + PhysicalDeviceFeatures & setShaderInt64( VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ ) VULKAN_HPP_NOEXCEPT + { + shaderInt64 = shaderInt64_; + return *this; + } + + PhysicalDeviceFeatures & setShaderInt16( VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ ) VULKAN_HPP_NOEXCEPT + { + shaderInt16 = shaderInt16_; + return *this; + } + + PhysicalDeviceFeatures & + setShaderResourceResidency( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ ) VULKAN_HPP_NOEXCEPT + { + shaderResourceResidency = shaderResourceResidency_; + return *this; + } + + PhysicalDeviceFeatures & + setShaderResourceMinLod( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ ) VULKAN_HPP_NOEXCEPT + { + shaderResourceMinLod = shaderResourceMinLod_; + return *this; + } + + PhysicalDeviceFeatures & setSparseBinding( VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ ) VULKAN_HPP_NOEXCEPT + { + sparseBinding = sparseBinding_; + return *this; + } + + PhysicalDeviceFeatures & + setSparseResidencyBuffer( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidencyBuffer = sparseResidencyBuffer_; + return *this; + } + + PhysicalDeviceFeatures & + setSparseResidencyImage2D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidencyImage2D = sparseResidencyImage2D_; + return *this; + } + + PhysicalDeviceFeatures & + setSparseResidencyImage3D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidencyImage3D = sparseResidencyImage3D_; + return *this; + } + + PhysicalDeviceFeatures & + setSparseResidency2Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidency2Samples = sparseResidency2Samples_; + return *this; + } + + PhysicalDeviceFeatures & + setSparseResidency4Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidency4Samples = sparseResidency4Samples_; + return *this; + } + + PhysicalDeviceFeatures & + setSparseResidency8Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidency8Samples = sparseResidency8Samples_; + return *this; + } + + PhysicalDeviceFeatures & + setSparseResidency16Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidency16Samples = sparseResidency16Samples_; + return *this; + } + + PhysicalDeviceFeatures & + setSparseResidencyAliased( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidencyAliased = sparseResidencyAliased_; + return *this; + } + + PhysicalDeviceFeatures & + setVariableMultisampleRate( VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ ) VULKAN_HPP_NOEXCEPT + { + variableMultisampleRate = variableMultisampleRate_; + return *this; + } + + PhysicalDeviceFeatures & setInheritedQueries( VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ ) VULKAN_HPP_NOEXCEPT + { + inheritedQueries = inheritedQueries_; + return *this; + } + + operator VkPhysicalDeviceFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( robustBufferAccess == rhs.robustBufferAccess ) && ( fullDrawIndexUint32 == rhs.fullDrawIndexUint32 ) && + ( imageCubeArray == rhs.imageCubeArray ) && ( independentBlend == rhs.independentBlend ) && + ( geometryShader == rhs.geometryShader ) && ( tessellationShader == rhs.tessellationShader ) && + ( sampleRateShading == rhs.sampleRateShading ) && ( dualSrcBlend == rhs.dualSrcBlend ) && + ( logicOp == rhs.logicOp ) && ( multiDrawIndirect == rhs.multiDrawIndirect ) && + ( drawIndirectFirstInstance == rhs.drawIndirectFirstInstance ) && ( depthClamp == rhs.depthClamp ) && + ( depthBiasClamp == rhs.depthBiasClamp ) && ( fillModeNonSolid == rhs.fillModeNonSolid ) && + ( depthBounds == rhs.depthBounds ) && ( wideLines == rhs.wideLines ) && + ( largePoints == rhs.largePoints ) && ( alphaToOne == rhs.alphaToOne ) && + ( multiViewport == rhs.multiViewport ) && ( samplerAnisotropy == rhs.samplerAnisotropy ) && + ( textureCompressionETC2 == rhs.textureCompressionETC2 ) && + ( textureCompressionASTC_LDR == rhs.textureCompressionASTC_LDR ) && + ( textureCompressionBC == rhs.textureCompressionBC ) && + ( occlusionQueryPrecise == rhs.occlusionQueryPrecise ) && + ( pipelineStatisticsQuery == rhs.pipelineStatisticsQuery ) && + ( vertexPipelineStoresAndAtomics == rhs.vertexPipelineStoresAndAtomics ) && + ( fragmentStoresAndAtomics == rhs.fragmentStoresAndAtomics ) && + ( shaderTessellationAndGeometryPointSize == rhs.shaderTessellationAndGeometryPointSize ) && + ( shaderImageGatherExtended == rhs.shaderImageGatherExtended ) && + ( shaderStorageImageExtendedFormats == rhs.shaderStorageImageExtendedFormats ) && + ( shaderStorageImageMultisample == rhs.shaderStorageImageMultisample ) && + ( shaderStorageImageReadWithoutFormat == rhs.shaderStorageImageReadWithoutFormat ) && + ( shaderStorageImageWriteWithoutFormat == rhs.shaderStorageImageWriteWithoutFormat ) && + ( shaderUniformBufferArrayDynamicIndexing == rhs.shaderUniformBufferArrayDynamicIndexing ) && + ( shaderSampledImageArrayDynamicIndexing == rhs.shaderSampledImageArrayDynamicIndexing ) && + ( shaderStorageBufferArrayDynamicIndexing == rhs.shaderStorageBufferArrayDynamicIndexing ) && + ( shaderStorageImageArrayDynamicIndexing == rhs.shaderStorageImageArrayDynamicIndexing ) && + ( shaderClipDistance == rhs.shaderClipDistance ) && ( shaderCullDistance == rhs.shaderCullDistance ) && + ( shaderFloat64 == rhs.shaderFloat64 ) && ( shaderInt64 == rhs.shaderInt64 ) && + ( shaderInt16 == rhs.shaderInt16 ) && ( shaderResourceResidency == rhs.shaderResourceResidency ) && + ( shaderResourceMinLod == rhs.shaderResourceMinLod ) && ( sparseBinding == rhs.sparseBinding ) && + ( sparseResidencyBuffer == rhs.sparseResidencyBuffer ) && + ( sparseResidencyImage2D == rhs.sparseResidencyImage2D ) && + ( sparseResidencyImage3D == rhs.sparseResidencyImage3D ) && + ( sparseResidency2Samples == rhs.sparseResidency2Samples ) && + ( sparseResidency4Samples == rhs.sparseResidency4Samples ) && + ( sparseResidency8Samples == rhs.sparseResidency8Samples ) && + ( sparseResidency16Samples == rhs.sparseResidency16Samples ) && + ( sparseResidencyAliased == rhs.sparseResidencyAliased ) && + ( variableMultisampleRate == rhs.variableMultisampleRate ) && ( inheritedQueries == rhs.inheritedQueries ); + } + + bool operator!=( PhysicalDeviceFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentBlend = {}; + VULKAN_HPP_NAMESPACE::Bool32 geometryShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 tessellationShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading = {}; + VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend = {}; + VULKAN_HPP_NAMESPACE::Bool32 logicOp = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect = {}; + VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClamp = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp = {}; + VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthBounds = {}; + VULKAN_HPP_NAMESPACE::Bool32 wideLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 largePoints = {}; + VULKAN_HPP_NAMESPACE::Bool32 alphaToOne = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiViewport = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2 = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC = {}; + VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInt64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInt16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseBinding = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased = {}; + VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate = {}; + VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries = {}; + }; + static_assert( sizeof( PhysicalDeviceFeatures ) == sizeof( VkPhysicalDeviceFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DeviceCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceCreateInfo( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ = {}, + uint32_t queueCreateInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos_ = {}, + uint32_t enabledLayerCount_ = {}, + const char * const * ppEnabledLayerNames_ = {}, + uint32_t enabledExtensionCount_ = {}, + const char * const * ppEnabledExtensionNames_ = {}, + const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ = {} ) + VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , queueCreateInfoCount( queueCreateInfoCount_ ) + , pQueueCreateInfos( pQueueCreateInfos_ ) + , enabledLayerCount( enabledLayerCount_ ) + , ppEnabledLayerNames( ppEnabledLayerNames_ ) + , enabledExtensionCount( enabledExtensionCount_ ) + , ppEnabledExtensionNames( ppEnabledExtensionNames_ ) + , pEnabledFeatures( pEnabledFeatures_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceCreateInfo( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceCreateInfo( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceCreateInfo( + VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + queueCreateInfos_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledLayerNames_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledExtensionNames_ = {}, + const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ = {} ) + : flags( flags_ ) + , queueCreateInfoCount( static_cast( queueCreateInfos_.size() ) ) + , pQueueCreateInfos( queueCreateInfos_.data() ) + , enabledLayerCount( static_cast( pEnabledLayerNames_.size() ) ) + , ppEnabledLayerNames( pEnabledLayerNames_.data() ) + , enabledExtensionCount( static_cast( pEnabledExtensionNames_.size() ) ) + , ppEnabledExtensionNames( pEnabledExtensionNames_.data() ) + , pEnabledFeatures( pEnabledFeatures_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & operator=( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceCreateInfo & operator=( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + DeviceCreateInfo & setQueueCreateInfoCount( uint32_t queueCreateInfoCount_ ) VULKAN_HPP_NOEXCEPT + { + queueCreateInfoCount = queueCreateInfoCount_; + return *this; + } + + DeviceCreateInfo & + setPQueueCreateInfos( const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos_ ) VULKAN_HPP_NOEXCEPT + { + pQueueCreateInfos = pQueueCreateInfos_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceCreateInfo & setQueueCreateInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + queueCreateInfos_ ) VULKAN_HPP_NOEXCEPT + { + queueCreateInfoCount = static_cast( queueCreateInfos_.size() ); + pQueueCreateInfos = queueCreateInfos_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DeviceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT + { + enabledLayerCount = enabledLayerCount_; + return *this; + } + + DeviceCreateInfo & setPpEnabledLayerNames( const char * const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT + { + ppEnabledLayerNames = ppEnabledLayerNames_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceCreateInfo & setPEnabledLayerNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT + { + enabledLayerCount = static_cast( pEnabledLayerNames_.size() ); + ppEnabledLayerNames = pEnabledLayerNames_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DeviceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT + { + enabledExtensionCount = enabledExtensionCount_; + return *this; + } + + DeviceCreateInfo & setPpEnabledExtensionNames( const char * const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT + { + ppEnabledExtensionNames = ppEnabledExtensionNames_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceCreateInfo & setPEnabledExtensionNames( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledExtensionNames_ ) + VULKAN_HPP_NOEXCEPT + { + enabledExtensionCount = static_cast( pEnabledExtensionNames_.size() ); + ppEnabledExtensionNames = pEnabledExtensionNames_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DeviceCreateInfo & + setPEnabledFeatures( const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ ) VULKAN_HPP_NOEXCEPT + { + pEnabledFeatures = pEnabledFeatures_; + return *this; + } + + operator VkDeviceCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceCreateInfo const & ) const = default; +#else + bool operator==( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( queueCreateInfoCount == rhs.queueCreateInfoCount ) && ( pQueueCreateInfos == rhs.pQueueCreateInfos ) && + ( enabledLayerCount == rhs.enabledLayerCount ) && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames ) && + ( enabledExtensionCount == rhs.enabledExtensionCount ) && + ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames ) && ( pEnabledFeatures == rhs.pEnabledFeatures ); + } + + bool operator!=( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags = {}; + uint32_t queueCreateInfoCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos = {}; + uint32_t enabledLayerCount = {}; + const char * const * ppEnabledLayerNames = {}; + uint32_t enabledExtensionCount = {}; + const char * const * ppEnabledExtensionNames = {}; + const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures = {}; + }; + static_assert( sizeof( DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceCreateInfo; + }; + + struct DeviceDeviceMemoryReportCreateInfoEXT + { + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDeviceDeviceMemoryReportCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DeviceDeviceMemoryReportCreateInfoEXT( VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ = {}, + PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ = {}, + void * pUserData_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pfnUserCallback( pfnUserCallback_ ) + , pUserData( pUserData_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + DeviceDeviceMemoryReportCreateInfoEXT( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceDeviceMemoryReportCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & + operator=( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceDeviceMemoryReportCreateInfoEXT & + operator=( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceDeviceMemoryReportCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceDeviceMemoryReportCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + DeviceDeviceMemoryReportCreateInfoEXT & + setPfnUserCallback( PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT + { + pfnUserCallback = pfnUserCallback_; + return *this; + } + + DeviceDeviceMemoryReportCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT + { + pUserData = pUserData_; + return *this; + } + + operator VkDeviceDeviceMemoryReportCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceDeviceMemoryReportCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceDeviceMemoryReportCreateInfoEXT const & ) const = default; +#else + bool operator==( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( pfnUserCallback == rhs.pfnUserCallback ) && ( pUserData == rhs.pUserData ); + } + + bool operator!=( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {}; + PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback = {}; + void * pUserData = {}; + }; + static_assert( sizeof( DeviceDeviceMemoryReportCreateInfoEXT ) == sizeof( VkDeviceDeviceMemoryReportCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceDeviceMemoryReportCreateInfoEXT; + }; + + struct DeviceDiagnosticsConfigCreateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDeviceDiagnosticsConfigCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( + VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + DeviceDiagnosticsConfigCreateInfoNV( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceDiagnosticsConfigCreateInfoNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceDiagnosticsConfigCreateInfoNV & + operator=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceDiagnosticsConfigCreateInfoNV & + operator=( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceDiagnosticsConfigCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceDiagnosticsConfigCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + operator VkDeviceDiagnosticsConfigCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceDiagnosticsConfigCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceDiagnosticsConfigCreateInfoNV const & ) const = default; +#else + bool operator==( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); + } + + bool operator!=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags = {}; + }; + static_assert( sizeof( DeviceDiagnosticsConfigCreateInfoNV ) == sizeof( VkDeviceDiagnosticsConfigCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceDiagnosticsConfigCreateInfoNV; + }; + + struct DeviceEventInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceEventInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DeviceEventInfoEXT( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ = + VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug ) VULKAN_HPP_NOEXCEPT + : deviceEvent( deviceEvent_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceEventInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceEventInfoEXT & + operator=( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceEventInfoEXT & operator=( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceEventInfoEXT & setDeviceEvent( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ ) VULKAN_HPP_NOEXCEPT + { + deviceEvent = deviceEvent_; + return *this; + } + + operator VkDeviceEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceEventInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceEventInfoEXT const & ) const = default; +#else + bool operator==( DeviceEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceEvent == rhs.deviceEvent ); + } + + bool operator!=( DeviceEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceEventInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug; + }; + static_assert( sizeof( DeviceEventInfoEXT ) == sizeof( VkDeviceEventInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceEventInfoEXT; + }; + + struct DeviceGroupBindSparseInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupBindSparseInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo( uint32_t resourceDeviceIndex_ = {}, + uint32_t memoryDeviceIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : resourceDeviceIndex( resourceDeviceIndex_ ) + , memoryDeviceIndex( memoryDeviceIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR + DeviceGroupBindSparseInfo( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupBindSparseInfo( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupBindSparseInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & + operator=( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupBindSparseInfo & operator=( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceGroupBindSparseInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceGroupBindSparseInfo & setResourceDeviceIndex( uint32_t resourceDeviceIndex_ ) VULKAN_HPP_NOEXCEPT + { + resourceDeviceIndex = resourceDeviceIndex_; + return *this; + } + + DeviceGroupBindSparseInfo & setMemoryDeviceIndex( uint32_t memoryDeviceIndex_ ) VULKAN_HPP_NOEXCEPT + { + memoryDeviceIndex = memoryDeviceIndex_; + return *this; + } + + operator VkDeviceGroupBindSparseInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupBindSparseInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupBindSparseInfo const & ) const = default; +#else + bool operator==( DeviceGroupBindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( resourceDeviceIndex == rhs.resourceDeviceIndex ) && + ( memoryDeviceIndex == rhs.memoryDeviceIndex ); + } + + bool operator!=( DeviceGroupBindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupBindSparseInfo; + const void * pNext = {}; + uint32_t resourceDeviceIndex = {}; + uint32_t memoryDeviceIndex = {}; + }; + static_assert( sizeof( DeviceGroupBindSparseInfo ) == sizeof( VkDeviceGroupBindSparseInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceGroupBindSparseInfo; + }; + using DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo; + + struct DeviceGroupCommandBufferBeginInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDeviceGroupCommandBufferBeginInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( uint32_t deviceMask_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceMask( deviceMask_ ) + {} + + VULKAN_HPP_CONSTEXPR + DeviceGroupCommandBufferBeginInfo( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupCommandBufferBeginInfo( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupCommandBufferBeginInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupCommandBufferBeginInfo & + operator=( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupCommandBufferBeginInfo & operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceGroupCommandBufferBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceGroupCommandBufferBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT + { + deviceMask = deviceMask_; + return *this; + } + + operator VkDeviceGroupCommandBufferBeginInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupCommandBufferBeginInfo const & ) const = default; +#else + bool operator==( DeviceGroupCommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMask == rhs.deviceMask ); + } + + bool operator!=( DeviceGroupCommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupCommandBufferBeginInfo; + const void * pNext = {}; + uint32_t deviceMask = {}; + }; + static_assert( sizeof( DeviceGroupCommandBufferBeginInfo ) == sizeof( VkDeviceGroupCommandBufferBeginInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceGroupCommandBufferBeginInfo; + }; + using DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo; + + class DisplayKHR + { + public: + using CType = VkDisplayKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR; + + public: + VULKAN_HPP_CONSTEXPR DisplayKHR() = default; + VULKAN_HPP_CONSTEXPR DisplayKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT DisplayKHR( VkDisplayKHR displayKHR ) VULKAN_HPP_NOEXCEPT : m_displayKHR( displayKHR ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DisplayKHR & operator=( VkDisplayKHR displayKHR ) VULKAN_HPP_NOEXCEPT + { + m_displayKHR = displayKHR; + return *this; + } +#endif + + DisplayKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_displayKHR = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayKHR const & ) const = default; +#else + bool operator==( DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR == rhs.m_displayKHR; + } + + bool operator!=( DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR != rhs.m_displayKHR; + } + + bool operator<( DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR < rhs.m_displayKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayKHR() const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR == VK_NULL_HANDLE; + } + + private: + VkDisplayKHR m_displayKHR = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::DisplayKHR ) == sizeof( VkDisplayKHR ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DisplayKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DisplayKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DisplayKHR; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct PerformanceConfigurationAcquireInfoINTEL + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePerformanceConfigurationAcquireInfoINTEL; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL( + VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ = + VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated ) + VULKAN_HPP_NOEXCEPT : type( type_ ) + {} + + VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL( + PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceConfigurationAcquireInfoINTEL( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) + VULKAN_HPP_NOEXCEPT + : PerformanceConfigurationAcquireInfoINTEL( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PerformanceConfigurationAcquireInfoINTEL & + operator=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceConfigurationAcquireInfoINTEL & + operator=( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PerformanceConfigurationAcquireInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PerformanceConfigurationAcquireInfoINTEL & + setType( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + operator VkPerformanceConfigurationAcquireInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceConfigurationAcquireInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceConfigurationAcquireInfoINTEL const & ) const = default; +#else + bool operator==( PerformanceConfigurationAcquireInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ); + } + + bool operator!=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceConfigurationAcquireInfoINTEL; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type = + VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated; + }; + static_assert( sizeof( PerformanceConfigurationAcquireInfoINTEL ) == + sizeof( VkPerformanceConfigurationAcquireInfoINTEL ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PerformanceConfigurationAcquireInfoINTEL; + }; + + class PerformanceConfigurationINTEL + { + public: + using CType = VkPerformanceConfigurationINTEL; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL() = default; + VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT + PerformanceConfigurationINTEL( VkPerformanceConfigurationINTEL performanceConfigurationINTEL ) VULKAN_HPP_NOEXCEPT + : m_performanceConfigurationINTEL( performanceConfigurationINTEL ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + PerformanceConfigurationINTEL & + operator=( VkPerformanceConfigurationINTEL performanceConfigurationINTEL ) VULKAN_HPP_NOEXCEPT + { + m_performanceConfigurationINTEL = performanceConfigurationINTEL; + return *this; + } +#endif + + PerformanceConfigurationINTEL & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_performanceConfigurationINTEL = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceConfigurationINTEL const & ) const = default; +#else + bool operator==( PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL == rhs.m_performanceConfigurationINTEL; + } + + bool operator!=( PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL != rhs.m_performanceConfigurationINTEL; + } + + bool operator<( PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL < rhs.m_performanceConfigurationINTEL; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPerformanceConfigurationINTEL() const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL == VK_NULL_HANDLE; + } + + private: + VkPerformanceConfigurationINTEL m_performanceConfigurationINTEL = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL ) == + sizeof( VkPerformanceConfigurationINTEL ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct MemoryAllocateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, + uint32_t memoryTypeIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : allocationSize( allocationSize_ ) + , memoryTypeIndex( memoryTypeIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryAllocateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & + operator=( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryAllocateInfo & operator=( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MemoryAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MemoryAllocateInfo & setAllocationSize( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ ) VULKAN_HPP_NOEXCEPT + { + allocationSize = allocationSize_; + return *this; + } + + MemoryAllocateInfo & setMemoryTypeIndex( uint32_t memoryTypeIndex_ ) VULKAN_HPP_NOEXCEPT + { + memoryTypeIndex = memoryTypeIndex_; + return *this; + } + + operator VkMemoryAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryAllocateInfo const & ) const = default; +#else + bool operator==( MemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allocationSize == rhs.allocationSize ) && + ( memoryTypeIndex == rhs.memoryTypeIndex ); + } + + bool operator!=( MemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {}; + uint32_t memoryTypeIndex = {}; + }; + static_assert( sizeof( MemoryAllocateInfo ) == sizeof( VkMemoryAllocateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MemoryAllocateInfo; + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoBindMemoryKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoBindMemoryKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoBindMemoryKHR( uint32_t memoryBindIndex_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memorySize_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryBindIndex( memoryBindIndex_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + , memorySize( memorySize_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoBindMemoryKHR( VideoBindMemoryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoBindMemoryKHR( VkVideoBindMemoryKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoBindMemoryKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoBindMemoryKHR & + operator=( VideoBindMemoryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoBindMemoryKHR & operator=( VkVideoBindMemoryKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoBindMemoryKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoBindMemoryKHR & setMemoryBindIndex( uint32_t memoryBindIndex_ ) VULKAN_HPP_NOEXCEPT + { + memoryBindIndex = memoryBindIndex_; + return *this; + } + + VideoBindMemoryKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VideoBindMemoryKHR & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT + { + memoryOffset = memoryOffset_; + return *this; + } + + VideoBindMemoryKHR & setMemorySize( VULKAN_HPP_NAMESPACE::DeviceSize memorySize_ ) VULKAN_HPP_NOEXCEPT + { + memorySize = memorySize_; + return *this; + } + + operator VkVideoBindMemoryKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoBindMemoryKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoBindMemoryKHR const & ) const = default; +# else + bool operator==( VideoBindMemoryKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryBindIndex == rhs.memoryBindIndex ) && + ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset ) && ( memorySize == rhs.memorySize ); + } + + bool operator!=( VideoBindMemoryKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoBindMemoryKHR; + const void * pNext = {}; + uint32_t memoryBindIndex = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memorySize = {}; + }; + static_assert( sizeof( VideoBindMemoryKHR ) == sizeof( VkVideoBindMemoryKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoBindMemoryKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + class DeferredOperationKHR + { + public: + using CType = VkDeferredOperationKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDeferredOperationKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VULKAN_HPP_CONSTEXPR DeferredOperationKHR() = default; + VULKAN_HPP_CONSTEXPR DeferredOperationKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT DeferredOperationKHR( VkDeferredOperationKHR deferredOperationKHR ) VULKAN_HPP_NOEXCEPT + : m_deferredOperationKHR( deferredOperationKHR ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DeferredOperationKHR & operator=( VkDeferredOperationKHR deferredOperationKHR ) VULKAN_HPP_NOEXCEPT + { + m_deferredOperationKHR = deferredOperationKHR; + return *this; + } +#endif + + DeferredOperationKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_deferredOperationKHR = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeferredOperationKHR const & ) const = default; +#else + bool operator==( DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR == rhs.m_deferredOperationKHR; + } + + bool operator!=( DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR != rhs.m_deferredOperationKHR; + } + + bool operator<( DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR < rhs.m_deferredOperationKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeferredOperationKHR() const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR == VK_NULL_HANDLE; + } + + private: + VkDeferredOperationKHR m_deferredOperationKHR = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::DeferredOperationKHR ) == sizeof( VkDeferredOperationKHR ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DeferredOperationKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DeferredOperationKHR; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class PipelineCache + { + public: + using CType = VkPipelineCache; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::ePipelineCache; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineCache; + + public: + VULKAN_HPP_CONSTEXPR PipelineCache() = default; + VULKAN_HPP_CONSTEXPR PipelineCache( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT PipelineCache( VkPipelineCache pipelineCache ) VULKAN_HPP_NOEXCEPT + : m_pipelineCache( pipelineCache ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + PipelineCache & operator=( VkPipelineCache pipelineCache ) VULKAN_HPP_NOEXCEPT + { + m_pipelineCache = pipelineCache; + return *this; + } +#endif + + PipelineCache & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_pipelineCache = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCache const & ) const = default; +#else + bool operator==( PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache == rhs.m_pipelineCache; + } + + bool operator!=( PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache != rhs.m_pipelineCache; + } + + bool operator<( PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache < rhs.m_pipelineCache; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineCache() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache == VK_NULL_HANDLE; + } + + private: + VkPipelineCache m_pipelineCache = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::PipelineCache ) == sizeof( VkPipelineCache ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::PipelineCache; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PipelineCache; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PipelineCache; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct EventCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eEventCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR EventCreateInfo( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR EventCreateInfo( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + EventCreateInfo( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : EventCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 EventCreateInfo & operator=( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + EventCreateInfo & operator=( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + EventCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + EventCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + operator VkEventCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkEventCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( EventCreateInfo const & ) const = default; +#else + bool operator==( EventCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); + } + + bool operator!=( EventCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eEventCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::EventCreateFlags flags = {}; + }; + static_assert( sizeof( EventCreateInfo ) == sizeof( VkEventCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = EventCreateInfo; + }; + + struct FenceCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FenceCreateInfo( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR FenceCreateInfo( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FenceCreateInfo( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : FenceCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 FenceCreateInfo & operator=( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FenceCreateInfo & operator=( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + FenceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + FenceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + operator VkFenceCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFenceCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FenceCreateInfo const & ) const = default; +#else + bool operator==( FenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); + } + + bool operator!=( FenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::FenceCreateFlags flags = {}; + }; + static_assert( sizeof( FenceCreateInfo ) == sizeof( VkFenceCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = FenceCreateInfo; + }; + + struct FramebufferCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ = {}, + uint32_t width_ = {}, + uint32_t height_ = {}, + uint32_t layers_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , renderPass( renderPass_ ) + , attachmentCount( attachmentCount_ ) + , pAttachments( pAttachments_ ) + , width( width_ ) + , height( height_ ) + , layers( layers_ ) + {} + + VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FramebufferCreateInfo( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : FramebufferCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FramebufferCreateInfo( + VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_, + VULKAN_HPP_NAMESPACE::RenderPass renderPass_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_, + uint32_t width_ = {}, + uint32_t height_ = {}, + uint32_t layers_ = {} ) + : flags( flags_ ) + , renderPass( renderPass_ ) + , attachmentCount( static_cast( attachments_.size() ) ) + , pAttachments( attachments_.data() ) + , width( width_ ) + , height( height_ ) + , layers( layers_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & + operator=( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FramebufferCreateInfo & operator=( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + FramebufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + FramebufferCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + FramebufferCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT + { + renderPass = renderPass_; + return *this; + } + + FramebufferCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = attachmentCount_; + return *this; + } + + FramebufferCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pAttachments = pAttachments_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FramebufferCreateInfo & setAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_ ) + VULKAN_HPP_NOEXCEPT + { + attachmentCount = static_cast( attachments_.size() ); + pAttachments = attachments_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + FramebufferCreateInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + FramebufferCreateInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } + + FramebufferCreateInfo & setLayers( uint32_t layers_ ) VULKAN_HPP_NOEXCEPT + { + layers = layers_; + return *this; + } + + operator VkFramebufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFramebufferCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FramebufferCreateInfo const & ) const = default; +#else + bool operator==( FramebufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( renderPass == rhs.renderPass ) && ( attachmentCount == rhs.attachmentCount ) && + ( pAttachments == rhs.pAttachments ) && ( width == rhs.width ) && ( height == rhs.height ) && + ( layers == rhs.layers ); + } + + bool operator!=( FramebufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::ImageView * pAttachments = {}; + uint32_t width = {}; + uint32_t height = {}; + uint32_t layers = {}; + }; + static_assert( sizeof( FramebufferCreateInfo ) == sizeof( VkFramebufferCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = FramebufferCreateInfo; + }; + + struct VertexInputBindingDescription + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VertexInputBindingDescription( uint32_t binding_ = {}, + uint32_t stride_ = {}, + VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = + VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex ) VULKAN_HPP_NOEXCEPT + : binding( binding_ ) + , stride( stride_ ) + , inputRate( inputRate_ ) + {} + + VULKAN_HPP_CONSTEXPR + VertexInputBindingDescription( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VertexInputBindingDescription( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT + : VertexInputBindingDescription( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & + operator=( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VertexInputBindingDescription & operator=( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VertexInputBindingDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + { + binding = binding_; + return *this; + } + + VertexInputBindingDescription & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } + + VertexInputBindingDescription & setInputRate( VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT + { + inputRate = inputRate_; + return *this; + } + + operator VkVertexInputBindingDescription const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVertexInputBindingDescription &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VertexInputBindingDescription const & ) const = default; +#else + bool operator==( VertexInputBindingDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( binding == rhs.binding ) && ( stride == rhs.stride ) && ( inputRate == rhs.inputRate ); + } + + bool operator!=( VertexInputBindingDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t binding = {}; + uint32_t stride = {}; + VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex; + }; + static_assert( sizeof( VertexInputBindingDescription ) == sizeof( VkVertexInputBindingDescription ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct VertexInputAttributeDescription + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VertexInputAttributeDescription( uint32_t location_ = {}, + uint32_t binding_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + uint32_t offset_ = {} ) VULKAN_HPP_NOEXCEPT + : location( location_ ) + , binding( binding_ ) + , format( format_ ) + , offset( offset_ ) + {} + + VULKAN_HPP_CONSTEXPR + VertexInputAttributeDescription( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VertexInputAttributeDescription( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT + : VertexInputAttributeDescription( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & + operator=( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VertexInputAttributeDescription & operator=( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VertexInputAttributeDescription & setLocation( uint32_t location_ ) VULKAN_HPP_NOEXCEPT + { + location = location_; + return *this; + } + + VertexInputAttributeDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + { + binding = binding_; + return *this; + } + + VertexInputAttributeDescription & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VertexInputAttributeDescription & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + operator VkVertexInputAttributeDescription const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVertexInputAttributeDescription &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VertexInputAttributeDescription const & ) const = default; +#else + bool operator==( VertexInputAttributeDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( location == rhs.location ) && ( binding == rhs.binding ) && ( format == rhs.format ) && + ( offset == rhs.offset ); + } + + bool operator!=( VertexInputAttributeDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t location = {}; + uint32_t binding = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + uint32_t offset = {}; + }; + static_assert( sizeof( VertexInputAttributeDescription ) == sizeof( VkVertexInputAttributeDescription ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct PipelineVertexInputStateCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineVertexInputStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ = {}, + uint32_t vertexBindingDescriptionCount_ = {}, + const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions_ = {}, + uint32_t vertexAttributeDescriptionCount_ = {}, + const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions_ = {} ) + VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , vertexBindingDescriptionCount( vertexBindingDescriptionCount_ ) + , pVertexBindingDescriptions( pVertexBindingDescriptions_ ) + , vertexAttributeDescriptionCount( vertexAttributeDescriptionCount_ ) + , pVertexAttributeDescriptions( pVertexAttributeDescriptions_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( PipelineVertexInputStateCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PipelineVertexInputStateCreateInfo( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineVertexInputStateCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineVertexInputStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + vertexBindingDescriptions_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + vertexAttributeDescriptions_ = {} ) + : flags( flags_ ) + , vertexBindingDescriptionCount( static_cast( vertexBindingDescriptions_.size() ) ) + , pVertexBindingDescriptions( vertexBindingDescriptions_.data() ) + , vertexAttributeDescriptionCount( static_cast( vertexAttributeDescriptions_.size() ) ) + , pVertexAttributeDescriptions( vertexAttributeDescriptions_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & + operator=( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineVertexInputStateCreateInfo & + operator=( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineVertexInputStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineVertexInputStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineVertexInputStateCreateInfo & + setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ ) VULKAN_HPP_NOEXCEPT + { + vertexBindingDescriptionCount = vertexBindingDescriptionCount_; + return *this; + } + + PipelineVertexInputStateCreateInfo & setPVertexBindingDescriptions( + const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT + { + pVertexBindingDescriptions = pVertexBindingDescriptions_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineVertexInputStateCreateInfo & setVertexBindingDescriptions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + vertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT + { + vertexBindingDescriptionCount = static_cast( vertexBindingDescriptions_.size() ); + pVertexBindingDescriptions = vertexBindingDescriptions_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineVertexInputStateCreateInfo & + setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ ) VULKAN_HPP_NOEXCEPT + { + vertexAttributeDescriptionCount = vertexAttributeDescriptionCount_; + return *this; + } + + PipelineVertexInputStateCreateInfo & setPVertexAttributeDescriptions( + const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT + { + pVertexAttributeDescriptions = pVertexAttributeDescriptions_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineVertexInputStateCreateInfo & setVertexAttributeDescriptions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + vertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT + { + vertexAttributeDescriptionCount = static_cast( vertexAttributeDescriptions_.size() ); + pVertexAttributeDescriptions = vertexAttributeDescriptions_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPipelineVertexInputStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineVertexInputStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineVertexInputStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineVertexInputStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount ) && + ( pVertexBindingDescriptions == rhs.pVertexBindingDescriptions ) && + ( vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount ) && + ( pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions ); + } + + bool operator!=( PipelineVertexInputStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags = {}; + uint32_t vertexBindingDescriptionCount = {}; + const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions = {}; + uint32_t vertexAttributeDescriptionCount = {}; + const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions = {}; + }; + static_assert( sizeof( PipelineVertexInputStateCreateInfo ) == sizeof( VkPipelineVertexInputStateCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineVertexInputStateCreateInfo; + }; + + struct PipelineInputAssemblyStateCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineInputAssemblyStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList, + VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , topology( topology_ ) + , primitiveRestartEnable( primitiveRestartEnable_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo( PipelineInputAssemblyStateCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PipelineInputAssemblyStateCreateInfo( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineInputAssemblyStateCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & + operator=( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineInputAssemblyStateCreateInfo & + operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineInputAssemblyStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineInputAssemblyStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineInputAssemblyStateCreateInfo & + setTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ ) VULKAN_HPP_NOEXCEPT + { + topology = topology_; + return *this; + } + + PipelineInputAssemblyStateCreateInfo & + setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ ) VULKAN_HPP_NOEXCEPT + { + primitiveRestartEnable = primitiveRestartEnable_; + return *this; + } + + operator VkPipelineInputAssemblyStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineInputAssemblyStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineInputAssemblyStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineInputAssemblyStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( topology == rhs.topology ) && ( primitiveRestartEnable == rhs.primitiveRestartEnable ); + } + + bool operator!=( PipelineInputAssemblyStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInputAssemblyStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::PrimitiveTopology topology = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList; + VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable = {}; + }; + static_assert( sizeof( PipelineInputAssemblyStateCreateInfo ) == sizeof( VkPipelineInputAssemblyStateCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineInputAssemblyStateCreateInfo; + }; + + struct PipelineTessellationStateCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineTessellationStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PipelineTessellationStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ = {}, + uint32_t patchControlPoints_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , patchControlPoints( patchControlPoints_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( PipelineTessellationStateCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PipelineTessellationStateCreateInfo( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineTessellationStateCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & + operator=( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineTessellationStateCreateInfo & + operator=( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineTessellationStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineTessellationStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineTessellationStateCreateInfo & setPatchControlPoints( uint32_t patchControlPoints_ ) VULKAN_HPP_NOEXCEPT + { + patchControlPoints = patchControlPoints_; + return *this; + } + + operator VkPipelineTessellationStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineTessellationStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineTessellationStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineTessellationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( patchControlPoints == rhs.patchControlPoints ); + } + + bool operator!=( PipelineTessellationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags = {}; + uint32_t patchControlPoints = {}; + }; + static_assert( sizeof( PipelineTessellationStateCreateInfo ) == sizeof( VkPipelineTessellationStateCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineTessellationStateCreateInfo; + }; + + struct PipelineViewportStateCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PipelineViewportStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ = {}, + uint32_t viewportCount_ = {}, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports_ = {}, + uint32_t scissorCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , viewportCount( viewportCount_ ) + , pViewports( pViewports_ ) + , scissorCount( scissorCount_ ) + , pScissors( pScissors_ ) + {} + + VULKAN_HPP_CONSTEXPR + PipelineViewportStateCreateInfo( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportStateCreateInfo( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineViewportStateCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewports_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & scissors_ = {} ) + : flags( flags_ ) + , viewportCount( static_cast( viewports_.size() ) ) + , pViewports( viewports_.data() ) + , scissorCount( static_cast( scissors_.size() ) ) + , pScissors( scissors_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & + operator=( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportStateCreateInfo & operator=( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineViewportStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineViewportStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineViewportStateCreateInfo & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = viewportCount_; + return *this; + } + + PipelineViewportStateCreateInfo & + setPViewports( const VULKAN_HPP_NAMESPACE::Viewport * pViewports_ ) VULKAN_HPP_NOEXCEPT + { + pViewports = pViewports_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportStateCreateInfo & setViewports( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewports_ ) + VULKAN_HPP_NOEXCEPT + { + viewportCount = static_cast( viewports_.size() ); + pViewports = viewports_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineViewportStateCreateInfo & setScissorCount( uint32_t scissorCount_ ) VULKAN_HPP_NOEXCEPT + { + scissorCount = scissorCount_; + return *this; + } + + PipelineViewportStateCreateInfo & + setPScissors( const VULKAN_HPP_NAMESPACE::Rect2D * pScissors_ ) VULKAN_HPP_NOEXCEPT + { + pScissors = pScissors_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportStateCreateInfo & + setScissors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & scissors_ ) + VULKAN_HPP_NOEXCEPT + { + scissorCount = static_cast( scissors_.size() ); + pScissors = scissors_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPipelineViewportStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineViewportStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( viewportCount == rhs.viewportCount ) && ( pViewports == rhs.pViewports ) && + ( scissorCount == rhs.scissorCount ) && ( pScissors == rhs.pScissors ); + } + + bool operator!=( PipelineViewportStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags = {}; + uint32_t viewportCount = {}; + const VULKAN_HPP_NAMESPACE::Viewport * pViewports = {}; + uint32_t scissorCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors = {}; + }; + static_assert( sizeof( PipelineViewportStateCreateInfo ) == sizeof( VkPipelineViewportStateCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineViewportStateCreateInfo; + }; + + struct PipelineRasterizationStateCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineRasterizationStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ = {}, + VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ = VULKAN_HPP_NAMESPACE::PolygonMode::eFill, + VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ = {}, + VULKAN_HPP_NAMESPACE::FrontFace frontFace_ = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise, + VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ = {}, + float depthBiasConstantFactor_ = {}, + float depthBiasClamp_ = {}, + float depthBiasSlopeFactor_ = {}, + float lineWidth_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , depthClampEnable( depthClampEnable_ ) + , rasterizerDiscardEnable( rasterizerDiscardEnable_ ) + , polygonMode( polygonMode_ ) + , cullMode( cullMode_ ) + , frontFace( frontFace_ ) + , depthBiasEnable( depthBiasEnable_ ) + , depthBiasConstantFactor( depthBiasConstantFactor_ ) + , depthBiasClamp( depthBiasClamp_ ) + , depthBiasSlopeFactor( depthBiasSlopeFactor_ ) + , lineWidth( lineWidth_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( PipelineRasterizationStateCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationStateCreateInfo( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRasterizationStateCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & + operator=( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationStateCreateInfo & + operator=( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineRasterizationStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineRasterizationStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineRasterizationStateCreateInfo & + setDepthClampEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthClampEnable = depthClampEnable_; + return *this; + } + + PipelineRasterizationStateCreateInfo & + setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ ) VULKAN_HPP_NOEXCEPT + { + rasterizerDiscardEnable = rasterizerDiscardEnable_; + return *this; + } + + PipelineRasterizationStateCreateInfo & + setPolygonMode( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ ) VULKAN_HPP_NOEXCEPT + { + polygonMode = polygonMode_; + return *this; + } + + PipelineRasterizationStateCreateInfo & + setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ ) VULKAN_HPP_NOEXCEPT + { + cullMode = cullMode_; + return *this; + } + + PipelineRasterizationStateCreateInfo & + setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace_ ) VULKAN_HPP_NOEXCEPT + { + frontFace = frontFace_; + return *this; + } + + PipelineRasterizationStateCreateInfo & + setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasEnable = depthBiasEnable_; + return *this; + } + + PipelineRasterizationStateCreateInfo & + setDepthBiasConstantFactor( float depthBiasConstantFactor_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasConstantFactor = depthBiasConstantFactor_; + return *this; + } + + PipelineRasterizationStateCreateInfo & setDepthBiasClamp( float depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasClamp = depthBiasClamp_; + return *this; + } + + PipelineRasterizationStateCreateInfo & setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasSlopeFactor = depthBiasSlopeFactor_; + return *this; + } + + PipelineRasterizationStateCreateInfo & setLineWidth( float lineWidth_ ) VULKAN_HPP_NOEXCEPT + { + lineWidth = lineWidth_; + return *this; + } + + operator VkPipelineRasterizationStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineRasterizationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( depthClampEnable == rhs.depthClampEnable ) && + ( rasterizerDiscardEnable == rhs.rasterizerDiscardEnable ) && ( polygonMode == rhs.polygonMode ) && + ( cullMode == rhs.cullMode ) && ( frontFace == rhs.frontFace ) && + ( depthBiasEnable == rhs.depthBiasEnable ) && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor ) && + ( depthBiasClamp == rhs.depthBiasClamp ) && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor ) && + ( lineWidth == rhs.lineWidth ); + } + + bool operator!=( PipelineRasterizationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable = {}; + VULKAN_HPP_NAMESPACE::PolygonMode polygonMode = VULKAN_HPP_NAMESPACE::PolygonMode::eFill; + VULKAN_HPP_NAMESPACE::CullModeFlags cullMode = {}; + VULKAN_HPP_NAMESPACE::FrontFace frontFace = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise; + VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable = {}; + float depthBiasConstantFactor = {}; + float depthBiasClamp = {}; + float depthBiasSlopeFactor = {}; + float lineWidth = {}; + }; + static_assert( sizeof( PipelineRasterizationStateCreateInfo ) == sizeof( VkPipelineRasterizationStateCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineRasterizationStateCreateInfo; + }; + + struct PipelineMultisampleStateCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineMultisampleStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ = {}, + float minSampleShading_ = {}, + const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , rasterizationSamples( rasterizationSamples_ ) + , sampleShadingEnable( sampleShadingEnable_ ) + , minSampleShading( minSampleShading_ ) + , pSampleMask( pSampleMask_ ) + , alphaToCoverageEnable( alphaToCoverageEnable_ ) + , alphaToOneEnable( alphaToOneEnable_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo( PipelineMultisampleStateCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PipelineMultisampleStateCreateInfo( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineMultisampleStateCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & + operator=( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineMultisampleStateCreateInfo & + operator=( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineMultisampleStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineMultisampleStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineMultisampleStateCreateInfo & + setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT + { + rasterizationSamples = rasterizationSamples_; + return *this; + } + + PipelineMultisampleStateCreateInfo & + setSampleShadingEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ ) VULKAN_HPP_NOEXCEPT + { + sampleShadingEnable = sampleShadingEnable_; + return *this; + } + + PipelineMultisampleStateCreateInfo & setMinSampleShading( float minSampleShading_ ) VULKAN_HPP_NOEXCEPT + { + minSampleShading = minSampleShading_; + return *this; + } + + PipelineMultisampleStateCreateInfo & + setPSampleMask( const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask_ ) VULKAN_HPP_NOEXCEPT + { + pSampleMask = pSampleMask_; + return *this; + } + + PipelineMultisampleStateCreateInfo & + setAlphaToCoverageEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ ) VULKAN_HPP_NOEXCEPT + { + alphaToCoverageEnable = alphaToCoverageEnable_; + return *this; + } + + PipelineMultisampleStateCreateInfo & + setAlphaToOneEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ ) VULKAN_HPP_NOEXCEPT + { + alphaToOneEnable = alphaToOneEnable_; + return *this; + } + + operator VkPipelineMultisampleStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineMultisampleStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineMultisampleStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineMultisampleStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( rasterizationSamples == rhs.rasterizationSamples ) && + ( sampleShadingEnable == rhs.sampleShadingEnable ) && ( minSampleShading == rhs.minSampleShading ) && + ( pSampleMask == rhs.pSampleMask ) && ( alphaToCoverageEnable == rhs.alphaToCoverageEnable ) && + ( alphaToOneEnable == rhs.alphaToOneEnable ); + } + + bool operator!=( PipelineMultisampleStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineMultisampleStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable = {}; + float minSampleShading = {}; + const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable = {}; + }; + static_assert( sizeof( PipelineMultisampleStateCreateInfo ) == sizeof( VkPipelineMultisampleStateCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineMultisampleStateCreateInfo; + }; + + struct StencilOpState + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + StencilOpState( VULKAN_HPP_NAMESPACE::StencilOp failOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, + VULKAN_HPP_NAMESPACE::StencilOp passOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, + VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, + uint32_t compareMask_ = {}, + uint32_t writeMask_ = {}, + uint32_t reference_ = {} ) VULKAN_HPP_NOEXCEPT + : failOp( failOp_ ) + , passOp( passOp_ ) + , depthFailOp( depthFailOp_ ) + , compareOp( compareOp_ ) + , compareMask( compareMask_ ) + , writeMask( writeMask_ ) + , reference( reference_ ) + {} + + VULKAN_HPP_CONSTEXPR StencilOpState( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + StencilOpState( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT + : StencilOpState( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 StencilOpState & operator=( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + StencilOpState & operator=( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + StencilOpState & setFailOp( VULKAN_HPP_NAMESPACE::StencilOp failOp_ ) VULKAN_HPP_NOEXCEPT + { + failOp = failOp_; + return *this; + } + + StencilOpState & setPassOp( VULKAN_HPP_NAMESPACE::StencilOp passOp_ ) VULKAN_HPP_NOEXCEPT + { + passOp = passOp_; + return *this; + } + + StencilOpState & setDepthFailOp( VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ ) VULKAN_HPP_NOEXCEPT + { + depthFailOp = depthFailOp_; + return *this; + } + + StencilOpState & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT + { + compareOp = compareOp_; + return *this; + } + + StencilOpState & setCompareMask( uint32_t compareMask_ ) VULKAN_HPP_NOEXCEPT + { + compareMask = compareMask_; + return *this; + } + + StencilOpState & setWriteMask( uint32_t writeMask_ ) VULKAN_HPP_NOEXCEPT + { + writeMask = writeMask_; + return *this; + } + + StencilOpState & setReference( uint32_t reference_ ) VULKAN_HPP_NOEXCEPT + { + reference = reference_; + return *this; + } + + operator VkStencilOpState const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkStencilOpState &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( StencilOpState const & ) const = default; +#else + bool operator==( StencilOpState const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( failOp == rhs.failOp ) && ( passOp == rhs.passOp ) && ( depthFailOp == rhs.depthFailOp ) && + ( compareOp == rhs.compareOp ) && ( compareMask == rhs.compareMask ) && ( writeMask == rhs.writeMask ) && + ( reference == rhs.reference ); + } + + bool operator!=( StencilOpState const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StencilOp failOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep; + VULKAN_HPP_NAMESPACE::StencilOp passOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep; + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep; + VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; + uint32_t compareMask = {}; + uint32_t writeMask = {}; + uint32_t reference = {}; + }; + static_assert( sizeof( StencilOpState ) == sizeof( VkStencilOpState ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineDepthStencilStateCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineDepthStencilStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ = {}, + VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, + VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ = {}, + VULKAN_HPP_NAMESPACE::StencilOpState front_ = {}, + VULKAN_HPP_NAMESPACE::StencilOpState back_ = {}, + float minDepthBounds_ = {}, + float maxDepthBounds_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , depthTestEnable( depthTestEnable_ ) + , depthWriteEnable( depthWriteEnable_ ) + , depthCompareOp( depthCompareOp_ ) + , depthBoundsTestEnable( depthBoundsTestEnable_ ) + , stencilTestEnable( stencilTestEnable_ ) + , front( front_ ) + , back( back_ ) + , minDepthBounds( minDepthBounds_ ) + , maxDepthBounds( maxDepthBounds_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( PipelineDepthStencilStateCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PipelineDepthStencilStateCreateInfo( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineDepthStencilStateCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & + operator=( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineDepthStencilStateCreateInfo & + operator=( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineDepthStencilStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineDepthStencilStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineDepthStencilStateCreateInfo & + setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthTestEnable = depthTestEnable_; + return *this; + } + + PipelineDepthStencilStateCreateInfo & + setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthWriteEnable = depthWriteEnable_; + return *this; + } + + PipelineDepthStencilStateCreateInfo & + setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ ) VULKAN_HPP_NOEXCEPT + { + depthCompareOp = depthCompareOp_; + return *this; + } + + PipelineDepthStencilStateCreateInfo & + setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthBoundsTestEnable = depthBoundsTestEnable_; + return *this; + } + + PipelineDepthStencilStateCreateInfo & + setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ ) VULKAN_HPP_NOEXCEPT + { + stencilTestEnable = stencilTestEnable_; + return *this; + } + + PipelineDepthStencilStateCreateInfo & + setFront( VULKAN_HPP_NAMESPACE::StencilOpState const & front_ ) VULKAN_HPP_NOEXCEPT + { + front = front_; + return *this; + } + + PipelineDepthStencilStateCreateInfo & + setBack( VULKAN_HPP_NAMESPACE::StencilOpState const & back_ ) VULKAN_HPP_NOEXCEPT + { + back = back_; + return *this; + } + + PipelineDepthStencilStateCreateInfo & setMinDepthBounds( float minDepthBounds_ ) VULKAN_HPP_NOEXCEPT + { + minDepthBounds = minDepthBounds_; + return *this; + } + + PipelineDepthStencilStateCreateInfo & setMaxDepthBounds( float maxDepthBounds_ ) VULKAN_HPP_NOEXCEPT + { + maxDepthBounds = maxDepthBounds_; + return *this; + } + + operator VkPipelineDepthStencilStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineDepthStencilStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineDepthStencilStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineDepthStencilStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( depthTestEnable == rhs.depthTestEnable ) && ( depthWriteEnable == rhs.depthWriteEnable ) && + ( depthCompareOp == rhs.depthCompareOp ) && ( depthBoundsTestEnable == rhs.depthBoundsTestEnable ) && + ( stencilTestEnable == rhs.stencilTestEnable ) && ( front == rhs.front ) && ( back == rhs.back ) && + ( minDepthBounds == rhs.minDepthBounds ) && ( maxDepthBounds == rhs.maxDepthBounds ); + } + + bool operator!=( PipelineDepthStencilStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDepthStencilStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable = {}; + VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; + VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable = {}; + VULKAN_HPP_NAMESPACE::StencilOpState front = {}; + VULKAN_HPP_NAMESPACE::StencilOpState back = {}; + float minDepthBounds = {}; + float maxDepthBounds = {}; + }; + static_assert( sizeof( PipelineDepthStencilStateCreateInfo ) == sizeof( VkPipelineDepthStencilStateCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineDepthStencilStateCreateInfo; + }; + + struct PipelineColorBlendAttachmentState + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState( + VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ = {}, + VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, + VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, + VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ = {} ) VULKAN_HPP_NOEXCEPT + : blendEnable( blendEnable_ ) + , srcColorBlendFactor( srcColorBlendFactor_ ) + , dstColorBlendFactor( dstColorBlendFactor_ ) + , colorBlendOp( colorBlendOp_ ) + , srcAlphaBlendFactor( srcAlphaBlendFactor_ ) + , dstAlphaBlendFactor( dstAlphaBlendFactor_ ) + , alphaBlendOp( alphaBlendOp_ ) + , colorWriteMask( colorWriteMask_ ) + {} + + VULKAN_HPP_CONSTEXPR + PipelineColorBlendAttachmentState( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineColorBlendAttachmentState( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineColorBlendAttachmentState( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & + operator=( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineColorBlendAttachmentState & operator=( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineColorBlendAttachmentState & setBlendEnable( VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ ) VULKAN_HPP_NOEXCEPT + { + blendEnable = blendEnable_; + return *this; + } + + PipelineColorBlendAttachmentState & + setSrcColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT + { + srcColorBlendFactor = srcColorBlendFactor_; + return *this; + } + + PipelineColorBlendAttachmentState & + setDstColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT + { + dstColorBlendFactor = dstColorBlendFactor_; + return *this; + } + + PipelineColorBlendAttachmentState & + setColorBlendOp( VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ ) VULKAN_HPP_NOEXCEPT + { + colorBlendOp = colorBlendOp_; + return *this; + } + + PipelineColorBlendAttachmentState & + setSrcAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT + { + srcAlphaBlendFactor = srcAlphaBlendFactor_; + return *this; + } + + PipelineColorBlendAttachmentState & + setDstAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT + { + dstAlphaBlendFactor = dstAlphaBlendFactor_; + return *this; + } + + PipelineColorBlendAttachmentState & + setAlphaBlendOp( VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ ) VULKAN_HPP_NOEXCEPT + { + alphaBlendOp = alphaBlendOp_; + return *this; + } + + PipelineColorBlendAttachmentState & + setColorWriteMask( VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ ) VULKAN_HPP_NOEXCEPT + { + colorWriteMask = colorWriteMask_; + return *this; + } + + operator VkPipelineColorBlendAttachmentState const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineColorBlendAttachmentState &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineColorBlendAttachmentState const & ) const = default; +#else + bool operator==( PipelineColorBlendAttachmentState const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( blendEnable == rhs.blendEnable ) && ( srcColorBlendFactor == rhs.srcColorBlendFactor ) && + ( dstColorBlendFactor == rhs.dstColorBlendFactor ) && ( colorBlendOp == rhs.colorBlendOp ) && + ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor ) && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor ) && + ( alphaBlendOp == rhs.alphaBlendOp ) && ( colorWriteMask == rhs.colorWriteMask ); + } + + bool operator!=( PipelineColorBlendAttachmentState const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Bool32 blendEnable = {}; + VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd; + VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd; + VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask = {}; + }; + static_assert( sizeof( PipelineColorBlendAttachmentState ) == sizeof( VkPipelineColorBlendAttachmentState ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct PipelineColorBlendStateCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineColorBlendStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ = {}, + VULKAN_HPP_NAMESPACE::LogicOp logicOp_ = VULKAN_HPP_NAMESPACE::LogicOp::eClear, + uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments_ = {}, + std::array const & blendConstants_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , logicOpEnable( logicOpEnable_ ) + , logicOp( logicOp_ ) + , attachmentCount( attachmentCount_ ) + , pAttachments( pAttachments_ ) + , blendConstants( blendConstants_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + PipelineColorBlendStateCreateInfo( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineColorBlendStateCreateInfo( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineColorBlendStateCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineColorBlendStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_, + VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_, + VULKAN_HPP_NAMESPACE::LogicOp logicOp_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState> const & attachments_, + std::array const & blendConstants_ = {} ) + : flags( flags_ ) + , logicOpEnable( logicOpEnable_ ) + , logicOp( logicOp_ ) + , attachmentCount( static_cast( attachments_.size() ) ) + , pAttachments( attachments_.data() ) + , blendConstants( blendConstants_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & + operator=( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineColorBlendStateCreateInfo & operator=( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineColorBlendStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineColorBlendStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineColorBlendStateCreateInfo & + setLogicOpEnable( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ ) VULKAN_HPP_NOEXCEPT + { + logicOpEnable = logicOpEnable_; + return *this; + } + + PipelineColorBlendStateCreateInfo & setLogicOp( VULKAN_HPP_NAMESPACE::LogicOp logicOp_ ) VULKAN_HPP_NOEXCEPT + { + logicOp = logicOp_; + return *this; + } + + PipelineColorBlendStateCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = attachmentCount_; + return *this; + } + + PipelineColorBlendStateCreateInfo & setPAttachments( + const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pAttachments = pAttachments_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineColorBlendStateCreateInfo & setAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState> const & attachments_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = static_cast( attachments_.size() ); + pAttachments = attachments_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineColorBlendStateCreateInfo & setBlendConstants( std::array blendConstants_ ) VULKAN_HPP_NOEXCEPT + { + blendConstants = blendConstants_; + return *this; + } + + operator VkPipelineColorBlendStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineColorBlendStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineColorBlendStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineColorBlendStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( logicOpEnable == rhs.logicOpEnable ) && ( logicOp == rhs.logicOp ) && + ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ) && + ( blendConstants == rhs.blendConstants ); + } + + bool operator!=( PipelineColorBlendStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable = {}; + VULKAN_HPP_NAMESPACE::LogicOp logicOp = VULKAN_HPP_NAMESPACE::LogicOp::eClear; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D blendConstants = {}; + }; + static_assert( sizeof( PipelineColorBlendStateCreateInfo ) == sizeof( VkPipelineColorBlendStateCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineColorBlendStateCreateInfo; + }; + + struct PipelineDynamicStateCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDynamicStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ = {}, + uint32_t dynamicStateCount_ = {}, + const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , dynamicStateCount( dynamicStateCount_ ) + , pDynamicStates( pDynamicStates_ ) + {} + + VULKAN_HPP_CONSTEXPR + PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineDynamicStateCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineDynamicStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dynamicStates_ ) + : flags( flags_ ) + , dynamicStateCount( static_cast( dynamicStates_.size() ) ) + , pDynamicStates( dynamicStates_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & + operator=( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineDynamicStateCreateInfo & operator=( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineDynamicStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineDynamicStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineDynamicStateCreateInfo & setDynamicStateCount( uint32_t dynamicStateCount_ ) VULKAN_HPP_NOEXCEPT + { + dynamicStateCount = dynamicStateCount_; + return *this; + } + + PipelineDynamicStateCreateInfo & + setPDynamicStates( const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates_ ) VULKAN_HPP_NOEXCEPT + { + pDynamicStates = pDynamicStates_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineDynamicStateCreateInfo & setDynamicStates( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dynamicStates_ ) + VULKAN_HPP_NOEXCEPT + { + dynamicStateCount = static_cast( dynamicStates_.size() ); + pDynamicStates = dynamicStates_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPipelineDynamicStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineDynamicStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineDynamicStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineDynamicStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( dynamicStateCount == rhs.dynamicStateCount ) && ( pDynamicStates == rhs.pDynamicStates ); + } + + bool operator!=( PipelineDynamicStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDynamicStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags = {}; + uint32_t dynamicStateCount = {}; + const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates = {}; + }; + static_assert( sizeof( PipelineDynamicStateCreateInfo ) == sizeof( VkPipelineDynamicStateCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineDynamicStateCreateInfo; + }; + + struct GraphicsPipelineCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, + uint32_t stageCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + uint32_t subpass_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , stageCount( stageCount_ ) + , pStages( pStages_ ) + , pVertexInputState( pVertexInputState_ ) + , pInputAssemblyState( pInputAssemblyState_ ) + , pTessellationState( pTessellationState_ ) + , pViewportState( pViewportState_ ) + , pRasterizationState( pRasterizationState_ ) + , pMultisampleState( pMultisampleState_ ) + , pDepthStencilState( pDepthStencilState_ ) + , pColorBlendState( pColorBlendState_ ) + , pDynamicState( pDynamicState_ ) + , layout( layout_ ) + , renderPass( renderPass_ ) + , subpass( subpass_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + GraphicsPipelineCreateInfo( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GraphicsPipelineCreateInfo( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : GraphicsPipelineCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsPipelineCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + stages_, + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + uint32_t subpass_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {} ) + : flags( flags_ ) + , stageCount( static_cast( stages_.size() ) ) + , pStages( stages_.data() ) + , pVertexInputState( pVertexInputState_ ) + , pInputAssemblyState( pInputAssemblyState_ ) + , pTessellationState( pTessellationState_ ) + , pViewportState( pViewportState_ ) + , pRasterizationState( pRasterizationState_ ) + , pMultisampleState( pMultisampleState_ ) + , pDepthStencilState( pDepthStencilState_ ) + , pColorBlendState( pColorBlendState_ ) + , pDynamicState( pDynamicState_ ) + , layout( layout_ ) + , renderPass( renderPass_ ) + , subpass( subpass_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & + operator=( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GraphicsPipelineCreateInfo & operator=( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + GraphicsPipelineCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + GraphicsPipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + GraphicsPipelineCreateInfo & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = stageCount_; + return *this; + } + + GraphicsPipelineCreateInfo & + setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT + { + pStages = pStages_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsPipelineCreateInfo & setStages( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + stages_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = static_cast( stages_.size() ); + pStages = stages_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + GraphicsPipelineCreateInfo & setPVertexInputState( + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ ) VULKAN_HPP_NOEXCEPT + { + pVertexInputState = pVertexInputState_; + return *this; + } + + GraphicsPipelineCreateInfo & setPInputAssemblyState( + const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ ) VULKAN_HPP_NOEXCEPT + { + pInputAssemblyState = pInputAssemblyState_; + return *this; + } + + GraphicsPipelineCreateInfo & setPTessellationState( + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ ) VULKAN_HPP_NOEXCEPT + { + pTessellationState = pTessellationState_; + return *this; + } + + GraphicsPipelineCreateInfo & setPViewportState( + const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ ) VULKAN_HPP_NOEXCEPT + { + pViewportState = pViewportState_; + return *this; + } + + GraphicsPipelineCreateInfo & setPRasterizationState( + const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ ) VULKAN_HPP_NOEXCEPT + { + pRasterizationState = pRasterizationState_; + return *this; + } + + GraphicsPipelineCreateInfo & setPMultisampleState( + const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ ) VULKAN_HPP_NOEXCEPT + { + pMultisampleState = pMultisampleState_; + return *this; + } + + GraphicsPipelineCreateInfo & setPDepthStencilState( + const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ ) VULKAN_HPP_NOEXCEPT + { + pDepthStencilState = pDepthStencilState_; + return *this; + } + + GraphicsPipelineCreateInfo & setPColorBlendState( + const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ ) VULKAN_HPP_NOEXCEPT + { + pColorBlendState = pColorBlendState_; + return *this; + } + + GraphicsPipelineCreateInfo & setPDynamicState( + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ ) VULKAN_HPP_NOEXCEPT + { + pDynamicState = pDynamicState_; + return *this; + } + + GraphicsPipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + GraphicsPipelineCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT + { + renderPass = renderPass_; + return *this; + } + + GraphicsPipelineCreateInfo & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT + { + subpass = subpass_; + return *this; + } + + GraphicsPipelineCreateInfo & + setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineHandle = basePipelineHandle_; + return *this; + } + + GraphicsPipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineIndex = basePipelineIndex_; + return *this; + } + + operator VkGraphicsPipelineCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGraphicsPipelineCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GraphicsPipelineCreateInfo const & ) const = default; +#else + bool operator==( GraphicsPipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && + ( pVertexInputState == rhs.pVertexInputState ) && ( pInputAssemblyState == rhs.pInputAssemblyState ) && + ( pTessellationState == rhs.pTessellationState ) && ( pViewportState == rhs.pViewportState ) && + ( pRasterizationState == rhs.pRasterizationState ) && ( pMultisampleState == rhs.pMultisampleState ) && + ( pDepthStencilState == rhs.pDepthStencilState ) && ( pColorBlendState == rhs.pColorBlendState ) && + ( pDynamicState == rhs.pDynamicState ) && ( layout == rhs.layout ) && ( renderPass == rhs.renderPass ) && + ( subpass == rhs.subpass ) && ( basePipelineHandle == rhs.basePipelineHandle ) && + ( basePipelineIndex == rhs.basePipelineIndex ); + } + + bool operator!=( GraphicsPipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {}; + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState = {}; + const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState = {}; + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState = {}; + const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState = {}; + const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState = {}; + const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState = {}; + const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState = {}; + const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState = {}; + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + uint32_t subpass = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; + }; + static_assert( sizeof( GraphicsPipelineCreateInfo ) == sizeof( VkGraphicsPipelineCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = GraphicsPipelineCreateInfo; + }; + + struct ImageCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageCreateInfo( + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ImageType imageType_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, + uint32_t mipLevels_ = {}, + uint32_t arrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, + uint32_t queueFamilyIndexCount_ = {}, + const uint32_t * pQueueFamilyIndices_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) + VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , imageType( imageType_ ) + , format( format_ ) + , extent( extent_ ) + , mipLevels( mipLevels_ ) + , arrayLayers( arrayLayers_ ) + , samples( samples_ ) + , tiling( tiling_ ) + , usage( usage_ ) + , sharingMode( sharingMode_ ) + , queueFamilyIndexCount( queueFamilyIndexCount_ ) + , pQueueFamilyIndices( pQueueFamilyIndices_ ) + , initialLayout( initialLayout_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageCreateInfo( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageCreateInfo( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ImageType imageType_, + VULKAN_HPP_NAMESPACE::Format format_, + VULKAN_HPP_NAMESPACE::Extent3D extent_, + uint32_t mipLevels_, + uint32_t arrayLayers_, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_, + VULKAN_HPP_NAMESPACE::ImageTiling tiling_, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_, + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) + : flags( flags_ ) + , imageType( imageType_ ) + , format( format_ ) + , extent( extent_ ) + , mipLevels( mipLevels_ ) + , arrayLayers( arrayLayers_ ) + , samples( samples_ ) + , tiling( tiling_ ) + , usage( usage_ ) + , sharingMode( sharingMode_ ) + , queueFamilyIndexCount( static_cast( queueFamilyIndices_.size() ) ) + , pQueueFamilyIndices( queueFamilyIndices_.data() ) + , initialLayout( initialLayout_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & operator=( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageCreateInfo & operator=( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + ImageCreateInfo & setImageType( VULKAN_HPP_NAMESPACE::ImageType imageType_ ) VULKAN_HPP_NOEXCEPT + { + imageType = imageType_; + return *this; + } + + ImageCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + ImageCreateInfo & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } + + ImageCreateInfo & setMipLevels( uint32_t mipLevels_ ) VULKAN_HPP_NOEXCEPT + { + mipLevels = mipLevels_; + return *this; + } + + ImageCreateInfo & setArrayLayers( uint32_t arrayLayers_ ) VULKAN_HPP_NOEXCEPT + { + arrayLayers = arrayLayers_; + return *this; + } + + ImageCreateInfo & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT + { + samples = samples_; + return *this; + } + + ImageCreateInfo & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT + { + tiling = tiling_; + return *this; + } + + ImageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + ImageCreateInfo & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT + { + sharingMode = sharingMode_; + return *this; + } + + ImageCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = queueFamilyIndexCount_; + return *this; + } + + ImageCreateInfo & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + pQueueFamilyIndices = pQueueFamilyIndices_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageCreateInfo & setQueueFamilyIndices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = static_cast( queueFamilyIndices_.size() ); + pQueueFamilyIndices = queueFamilyIndices_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ImageCreateInfo & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT + { + initialLayout = initialLayout_; + return *this; + } + + operator VkImageCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageCreateInfo const & ) const = default; +#else + bool operator==( ImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( imageType == rhs.imageType ) && ( format == rhs.format ) && ( extent == rhs.extent ) && + ( mipLevels == rhs.mipLevels ) && ( arrayLayers == rhs.arrayLayers ) && ( samples == rhs.samples ) && + ( tiling == rhs.tiling ) && ( usage == rhs.usage ) && ( sharingMode == rhs.sharingMode ) && + ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && + ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) && ( initialLayout == rhs.initialLayout ); + } + + bool operator!=( ImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::ImageType imageType = VULKAN_HPP_NAMESPACE::ImageType::e1D; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + uint32_t mipLevels = {}; + uint32_t arrayLayers = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; + uint32_t queueFamilyIndexCount = {}; + const uint32_t * pQueueFamilyIndices = {}; + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + static_assert( sizeof( ImageCreateInfo ) == sizeof( VkImageCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageCreateInfo; + }; + + struct ImageViewCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImageViewCreateInfo( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::ImageViewType viewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , image( image_ ) + , viewType( viewType_ ) + , format( format_ ) + , components( components_ ) + , subresourceRange( subresourceRange_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewCreateInfo( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & + operator=( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewCreateInfo & operator=( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageViewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageViewCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + ImageViewCreateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + ImageViewCreateInfo & setViewType( VULKAN_HPP_NAMESPACE::ImageViewType viewType_ ) VULKAN_HPP_NOEXCEPT + { + viewType = viewType_; + return *this; + } + + ImageViewCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + ImageViewCreateInfo & + setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT + { + components = components_; + return *this; + } + + ImageViewCreateInfo & + setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT + { + subresourceRange = subresourceRange_; + return *this; + } + + operator VkImageViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewCreateInfo const & ) const = default; +#else + bool operator==( ImageViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( image == rhs.image ) && + ( viewType == rhs.viewType ) && ( format == rhs.format ) && ( components == rhs.components ) && + ( subresourceRange == rhs.subresourceRange ); + } + + bool operator!=( ImageViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::ImageViewType viewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ComponentMapping components = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; + }; + static_assert( sizeof( ImageViewCreateInfo ) == sizeof( VkImageViewCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageViewCreateInfo; + }; + + struct IndirectCommandsLayoutTokenNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutTokenNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + IndirectCommandsLayoutTokenNV( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ = + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup, + uint32_t stream_ = {}, + uint32_t offset_ = {}, + uint32_t vertexBindingUnit_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ = {}, + uint32_t pushconstantOffset_ = {}, + uint32_t pushconstantSize_ = {}, + VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ = {}, + uint32_t indexTypeCount_ = {}, + const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes_ = {}, + const uint32_t * pIndexTypeValues_ = {} ) VULKAN_HPP_NOEXCEPT + : tokenType( tokenType_ ) + , stream( stream_ ) + , offset( offset_ ) + , vertexBindingUnit( vertexBindingUnit_ ) + , vertexDynamicStride( vertexDynamicStride_ ) + , pushconstantPipelineLayout( pushconstantPipelineLayout_ ) + , pushconstantShaderStageFlags( pushconstantShaderStageFlags_ ) + , pushconstantOffset( pushconstantOffset_ ) + , pushconstantSize( pushconstantSize_ ) + , indirectStateFlags( indirectStateFlags_ ) + , indexTypeCount( indexTypeCount_ ) + , pIndexTypes( pIndexTypes_ ) + , pIndexTypeValues( pIndexTypeValues_ ) + {} + + VULKAN_HPP_CONSTEXPR + IndirectCommandsLayoutTokenNV( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsLayoutTokenNV( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectCommandsLayoutTokenNV( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutTokenNV( + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_, + uint32_t stream_, + uint32_t offset_, + uint32_t vertexBindingUnit_, + VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_, + VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_, + VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_, + uint32_t pushconstantOffset_, + uint32_t pushconstantSize_, + VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & indexTypes_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & indexTypeValues_ = {} ) + : tokenType( tokenType_ ) + , stream( stream_ ) + , offset( offset_ ) + , vertexBindingUnit( vertexBindingUnit_ ) + , vertexDynamicStride( vertexDynamicStride_ ) + , pushconstantPipelineLayout( pushconstantPipelineLayout_ ) + , pushconstantShaderStageFlags( pushconstantShaderStageFlags_ ) + , pushconstantOffset( pushconstantOffset_ ) + , pushconstantSize( pushconstantSize_ ) + , indirectStateFlags( indirectStateFlags_ ) + , indexTypeCount( static_cast( indexTypes_.size() ) ) + , pIndexTypes( indexTypes_.data() ) + , pIndexTypeValues( indexTypeValues_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( indexTypes_.size() == indexTypeValues_.size() ); +# else + if ( indexTypes_.size() != indexTypeValues_.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::IndirectCommandsLayoutTokenNV::IndirectCommandsLayoutTokenNV: indexTypes_.size() != indexTypeValues_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & + operator=( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsLayoutTokenNV & operator=( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + IndirectCommandsLayoutTokenNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + IndirectCommandsLayoutTokenNV & + setTokenType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ ) VULKAN_HPP_NOEXCEPT + { + tokenType = tokenType_; + return *this; + } + + IndirectCommandsLayoutTokenNV & setStream( uint32_t stream_ ) VULKAN_HPP_NOEXCEPT + { + stream = stream_; + return *this; + } + + IndirectCommandsLayoutTokenNV & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + IndirectCommandsLayoutTokenNV & setVertexBindingUnit( uint32_t vertexBindingUnit_ ) VULKAN_HPP_NOEXCEPT + { + vertexBindingUnit = vertexBindingUnit_; + return *this; + } + + IndirectCommandsLayoutTokenNV & + setVertexDynamicStride( VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ ) VULKAN_HPP_NOEXCEPT + { + vertexDynamicStride = vertexDynamicStride_; + return *this; + } + + IndirectCommandsLayoutTokenNV & setPushconstantPipelineLayout( + VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ ) VULKAN_HPP_NOEXCEPT + { + pushconstantPipelineLayout = pushconstantPipelineLayout_; + return *this; + } + + IndirectCommandsLayoutTokenNV & setPushconstantShaderStageFlags( + VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ ) VULKAN_HPP_NOEXCEPT + { + pushconstantShaderStageFlags = pushconstantShaderStageFlags_; + return *this; + } + + IndirectCommandsLayoutTokenNV & setPushconstantOffset( uint32_t pushconstantOffset_ ) VULKAN_HPP_NOEXCEPT + { + pushconstantOffset = pushconstantOffset_; + return *this; + } + + IndirectCommandsLayoutTokenNV & setPushconstantSize( uint32_t pushconstantSize_ ) VULKAN_HPP_NOEXCEPT + { + pushconstantSize = pushconstantSize_; + return *this; + } + + IndirectCommandsLayoutTokenNV & + setIndirectStateFlags( VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ ) VULKAN_HPP_NOEXCEPT + { + indirectStateFlags = indirectStateFlags_; + return *this; + } + + IndirectCommandsLayoutTokenNV & setIndexTypeCount( uint32_t indexTypeCount_ ) VULKAN_HPP_NOEXCEPT + { + indexTypeCount = indexTypeCount_; + return *this; + } + + IndirectCommandsLayoutTokenNV & + setPIndexTypes( const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes_ ) VULKAN_HPP_NOEXCEPT + { + pIndexTypes = pIndexTypes_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutTokenNV & setIndexTypes( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & indexTypes_ ) + VULKAN_HPP_NOEXCEPT + { + indexTypeCount = static_cast( indexTypes_.size() ); + pIndexTypes = indexTypes_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + IndirectCommandsLayoutTokenNV & setPIndexTypeValues( const uint32_t * pIndexTypeValues_ ) VULKAN_HPP_NOEXCEPT + { + pIndexTypeValues = pIndexTypeValues_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutTokenNV & setIndexTypeValues( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & indexTypeValues_ ) VULKAN_HPP_NOEXCEPT + { + indexTypeCount = static_cast( indexTypeValues_.size() ); + pIndexTypeValues = indexTypeValues_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkIndirectCommandsLayoutTokenNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsLayoutTokenNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectCommandsLayoutTokenNV const & ) const = default; +#else + bool operator==( IndirectCommandsLayoutTokenNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tokenType == rhs.tokenType ) && + ( stream == rhs.stream ) && ( offset == rhs.offset ) && ( vertexBindingUnit == rhs.vertexBindingUnit ) && + ( vertexDynamicStride == rhs.vertexDynamicStride ) && + ( pushconstantPipelineLayout == rhs.pushconstantPipelineLayout ) && + ( pushconstantShaderStageFlags == rhs.pushconstantShaderStageFlags ) && + ( pushconstantOffset == rhs.pushconstantOffset ) && ( pushconstantSize == rhs.pushconstantSize ) && + ( indirectStateFlags == rhs.indirectStateFlags ) && ( indexTypeCount == rhs.indexTypeCount ) && + ( pIndexTypes == rhs.pIndexTypes ) && ( pIndexTypeValues == rhs.pIndexTypeValues ); + } + + bool operator!=( IndirectCommandsLayoutTokenNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutTokenNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType = + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup; + uint32_t stream = {}; + uint32_t offset = {}; + uint32_t vertexBindingUnit = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags = {}; + uint32_t pushconstantOffset = {}; + uint32_t pushconstantSize = {}; + VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags = {}; + uint32_t indexTypeCount = {}; + const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes = {}; + const uint32_t * pIndexTypeValues = {}; + }; + static_assert( sizeof( IndirectCommandsLayoutTokenNV ) == sizeof( VkIndirectCommandsLayoutTokenNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = IndirectCommandsLayoutTokenNV; + }; + + struct IndirectCommandsLayoutCreateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eIndirectCommandsLayoutCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV( + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + uint32_t tokenCount_ = {}, + const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens_ = {}, + uint32_t streamCount_ = {}, + const uint32_t * pStreamStrides_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , tokenCount( tokenCount_ ) + , pTokens( pTokens_ ) + , streamCount( streamCount_ ) + , pStreamStrides( pStreamStrides_ ) + {} + + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV( IndirectCommandsLayoutCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsLayoutCreateInfoNV( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectCommandsLayoutCreateInfoNV( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutCreateInfoNV( + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + tokens_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & streamStrides_ = {} ) + : flags( flags_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , tokenCount( static_cast( tokens_.size() ) ) + , pTokens( tokens_.data() ) + , streamCount( static_cast( streamStrides_.size() ) ) + , pStreamStrides( streamStrides_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & + operator=( IndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsLayoutCreateInfoNV & + operator=( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + IndirectCommandsLayoutCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + IndirectCommandsLayoutCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + IndirectCommandsLayoutCreateInfoNV & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + IndirectCommandsLayoutCreateInfoNV & setTokenCount( uint32_t tokenCount_ ) VULKAN_HPP_NOEXCEPT + { + tokenCount = tokenCount_; + return *this; + } + + IndirectCommandsLayoutCreateInfoNV & + setPTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens_ ) VULKAN_HPP_NOEXCEPT + { + pTokens = pTokens_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutCreateInfoNV & setTokens( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + tokens_ ) VULKAN_HPP_NOEXCEPT + { + tokenCount = static_cast( tokens_.size() ); + pTokens = tokens_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + IndirectCommandsLayoutCreateInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT + { + streamCount = streamCount_; + return *this; + } + + IndirectCommandsLayoutCreateInfoNV & setPStreamStrides( const uint32_t * pStreamStrides_ ) VULKAN_HPP_NOEXCEPT + { + pStreamStrides = pStreamStrides_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutCreateInfoNV & setStreamStrides( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & streamStrides_ ) VULKAN_HPP_NOEXCEPT + { + streamCount = static_cast( streamStrides_.size() ); + pStreamStrides = streamStrides_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkIndirectCommandsLayoutCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsLayoutCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectCommandsLayoutCreateInfoNV const & ) const = default; +#else + bool operator==( IndirectCommandsLayoutCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( tokenCount == rhs.tokenCount ) && + ( pTokens == rhs.pTokens ) && ( streamCount == rhs.streamCount ) && + ( pStreamStrides == rhs.pStreamStrides ); + } + + bool operator!=( IndirectCommandsLayoutCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + uint32_t tokenCount = {}; + const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens = {}; + uint32_t streamCount = {}; + const uint32_t * pStreamStrides = {}; + }; + static_assert( sizeof( IndirectCommandsLayoutCreateInfoNV ) == sizeof( VkIndirectCommandsLayoutCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = IndirectCommandsLayoutCreateInfoNV; + }; + + struct PipelineCacheCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCacheCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ = {}, + size_t initialDataSize_ = {}, + const void * pInitialData_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , initialDataSize( initialDataSize_ ) + , pInitialData( pInitialData_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCacheCreateInfo( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCacheCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + PipelineCacheCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialData_ ) + : flags( flags_ ), initialDataSize( initialData_.size() * sizeof( T ) ), pInitialData( initialData_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & + operator=( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCacheCreateInfo & operator=( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineCacheCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineCacheCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineCacheCreateInfo & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT + { + initialDataSize = initialDataSize_; + return *this; + } + + PipelineCacheCreateInfo & setPInitialData( const void * pInitialData_ ) VULKAN_HPP_NOEXCEPT + { + pInitialData = pInitialData_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + PipelineCacheCreateInfo & + setInitialData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialData_ ) VULKAN_HPP_NOEXCEPT + { + initialDataSize = initialData_.size() * sizeof( T ); + pInitialData = initialData_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPipelineCacheCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCacheCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCacheCreateInfo const & ) const = default; +#else + bool operator==( PipelineCacheCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( initialDataSize == rhs.initialDataSize ) && ( pInitialData == rhs.pInitialData ); + } + + bool operator!=( PipelineCacheCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCacheCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags = {}; + size_t initialDataSize = {}; + const void * pInitialData = {}; + }; + static_assert( sizeof( PipelineCacheCreateInfo ) == sizeof( VkPipelineCacheCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineCacheCreateInfo; + }; + + struct PushConstantRange + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PushConstantRange( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, + uint32_t offset_ = {}, + uint32_t size_ = {} ) VULKAN_HPP_NOEXCEPT + : stageFlags( stageFlags_ ) + , offset( offset_ ) + , size( size_ ) + {} + + VULKAN_HPP_CONSTEXPR PushConstantRange( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PushConstantRange( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT + : PushConstantRange( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PushConstantRange & + operator=( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PushConstantRange & operator=( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PushConstantRange & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT + { + stageFlags = stageFlags_; + return *this; + } + + PushConstantRange & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + PushConstantRange & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + operator VkPushConstantRange const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPushConstantRange &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PushConstantRange const & ) const = default; +#else + bool operator==( PushConstantRange const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( stageFlags == rhs.stageFlags ) && ( offset == rhs.offset ) && ( size == rhs.size ); + } + + bool operator!=( PushConstantRange const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; + uint32_t offset = {}; + uint32_t size = {}; + }; + static_assert( sizeof( PushConstantRange ) == sizeof( VkPushConstantRange ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineLayoutCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLayoutCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ = {}, + uint32_t setLayoutCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {}, + uint32_t pushConstantRangeCount_ = {}, + const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , setLayoutCount( setLayoutCount_ ) + , pSetLayouts( pSetLayouts_ ) + , pushConstantRangeCount( pushConstantRangeCount_ ) + , pPushConstantRanges( pPushConstantRanges_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineLayoutCreateInfo( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineLayoutCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineLayoutCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + setLayouts_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + pushConstantRanges_ = {} ) + : flags( flags_ ) + , setLayoutCount( static_cast( setLayouts_.size() ) ) + , pSetLayouts( setLayouts_.data() ) + , pushConstantRangeCount( static_cast( pushConstantRanges_.size() ) ) + , pPushConstantRanges( pushConstantRanges_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & + operator=( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineLayoutCreateInfo & operator=( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineLayoutCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineLayoutCreateInfo & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT + { + setLayoutCount = setLayoutCount_; + return *this; + } + + PipelineLayoutCreateInfo & + setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT + { + pSetLayouts = pSetLayouts_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineLayoutCreateInfo & setSetLayouts( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + setLayouts_ ) VULKAN_HPP_NOEXCEPT + { + setLayoutCount = static_cast( setLayouts_.size() ); + pSetLayouts = setLayouts_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineLayoutCreateInfo & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT + { + pushConstantRangeCount = pushConstantRangeCount_; + return *this; + } + + PipelineLayoutCreateInfo & + setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT + { + pPushConstantRanges = pPushConstantRanges_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineLayoutCreateInfo & setPushConstantRanges( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT + { + pushConstantRangeCount = static_cast( pushConstantRanges_.size() ); + pPushConstantRanges = pushConstantRanges_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPipelineLayoutCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineLayoutCreateInfo const & ) const = default; +#else + bool operator==( PipelineLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( setLayoutCount == rhs.setLayoutCount ) && ( pSetLayouts == rhs.pSetLayouts ) && + ( pushConstantRangeCount == rhs.pushConstantRangeCount ) && + ( pPushConstantRanges == rhs.pPushConstantRanges ); + } + + bool operator!=( PipelineLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLayoutCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags = {}; + uint32_t setLayoutCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {}; + uint32_t pushConstantRangeCount = {}; + const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges = {}; + }; + static_assert( sizeof( PipelineLayoutCreateInfo ) == sizeof( VkPipelineLayoutCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineLayoutCreateInfo; + }; + + struct PrivateDataSlotCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePrivateDataSlotCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PrivateDataSlotCreateInfoEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlagsEXT flags_ = {} ) + VULKAN_HPP_NOEXCEPT : flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR + PrivateDataSlotCreateInfoEXT( PrivateDataSlotCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PrivateDataSlotCreateInfoEXT( VkPrivateDataSlotCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PrivateDataSlotCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PrivateDataSlotCreateInfoEXT & + operator=( PrivateDataSlotCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PrivateDataSlotCreateInfoEXT & operator=( VkPrivateDataSlotCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PrivateDataSlotCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PrivateDataSlotCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + operator VkPrivateDataSlotCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPrivateDataSlotCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PrivateDataSlotCreateInfoEXT const & ) const = default; +#else + bool operator==( PrivateDataSlotCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); + } + + bool operator!=( PrivateDataSlotCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePrivateDataSlotCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlagsEXT flags = {}; + }; + static_assert( sizeof( PrivateDataSlotCreateInfoEXT ) == sizeof( VkPrivateDataSlotCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PrivateDataSlotCreateInfoEXT; + }; + + class PrivateDataSlotEXT + { + public: + using CType = VkPrivateDataSlotEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::ePrivateDataSlotEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VULKAN_HPP_CONSTEXPR PrivateDataSlotEXT() = default; + VULKAN_HPP_CONSTEXPR PrivateDataSlotEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT PrivateDataSlotEXT( VkPrivateDataSlotEXT privateDataSlotEXT ) VULKAN_HPP_NOEXCEPT + : m_privateDataSlotEXT( privateDataSlotEXT ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + PrivateDataSlotEXT & operator=( VkPrivateDataSlotEXT privateDataSlotEXT ) VULKAN_HPP_NOEXCEPT + { + m_privateDataSlotEXT = privateDataSlotEXT; + return *this; + } +#endif + + PrivateDataSlotEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_privateDataSlotEXT = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PrivateDataSlotEXT const & ) const = default; +#else + bool operator==( PrivateDataSlotEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlotEXT == rhs.m_privateDataSlotEXT; + } + + bool operator!=( PrivateDataSlotEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlotEXT != rhs.m_privateDataSlotEXT; + } + + bool operator<( PrivateDataSlotEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlotEXT < rhs.m_privateDataSlotEXT; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPrivateDataSlotEXT() const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlotEXT; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlotEXT != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlotEXT == VK_NULL_HANDLE; + } + + private: + VkPrivateDataSlotEXT m_privateDataSlotEXT = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT ) == sizeof( VkPrivateDataSlotEXT ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct QueryPoolCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( + VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::QueryType queryType_ = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion, + uint32_t queryCount_ = {}, + VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , queryType( queryType_ ) + , queryCount( queryCount_ ) + , pipelineStatistics( pipelineStatistics_ ) + {} + + VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueryPoolCreateInfo( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : QueryPoolCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & + operator=( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueryPoolCreateInfo & operator=( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + QueryPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + QueryPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + QueryPoolCreateInfo & setQueryType( VULKAN_HPP_NAMESPACE::QueryType queryType_ ) VULKAN_HPP_NOEXCEPT + { + queryType = queryType_; + return *this; + } + + QueryPoolCreateInfo & setQueryCount( uint32_t queryCount_ ) VULKAN_HPP_NOEXCEPT + { + queryCount = queryCount_; + return *this; + } + + QueryPoolCreateInfo & + setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT + { + pipelineStatistics = pipelineStatistics_; + return *this; + } + + operator VkQueryPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueryPoolCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueryPoolCreateInfo const & ) const = default; +#else + bool operator==( QueryPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( queryType == rhs.queryType ) && ( queryCount == rhs.queryCount ) && + ( pipelineStatistics == rhs.pipelineStatistics ); + } + + bool operator!=( QueryPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::QueryType queryType = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion; + uint32_t queryCount = {}; + VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {}; + }; + static_assert( sizeof( QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = QueryPoolCreateInfo; + }; + + struct RayTracingShaderGroupCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eRayTracingShaderGroupCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + RayTracingShaderGroupCreateInfoKHR( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, + uint32_t generalShader_ = {}, + uint32_t closestHitShader_ = {}, + uint32_t anyHitShader_ = {}, + uint32_t intersectionShader_ = {}, + const void * pShaderGroupCaptureReplayHandle_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , generalShader( generalShader_ ) + , closestHitShader( closestHitShader_ ) + , anyHitShader( anyHitShader_ ) + , intersectionShader( intersectionShader_ ) + , pShaderGroupCaptureReplayHandle( pShaderGroupCaptureReplayHandle_ ) + {} + + VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR( RayTracingShaderGroupCreateInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + RayTracingShaderGroupCreateInfoKHR( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : RayTracingShaderGroupCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & + operator=( RayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingShaderGroupCreateInfoKHR & + operator=( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + RayTracingShaderGroupCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RayTracingShaderGroupCreateInfoKHR & + setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + RayTracingShaderGroupCreateInfoKHR & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT + { + generalShader = generalShader_; + return *this; + } + + RayTracingShaderGroupCreateInfoKHR & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT + { + closestHitShader = closestHitShader_; + return *this; + } + + RayTracingShaderGroupCreateInfoKHR & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT + { + anyHitShader = anyHitShader_; + return *this; + } + + RayTracingShaderGroupCreateInfoKHR & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT + { + intersectionShader = intersectionShader_; + return *this; + } + + RayTracingShaderGroupCreateInfoKHR & + setPShaderGroupCaptureReplayHandle( const void * pShaderGroupCaptureReplayHandle_ ) VULKAN_HPP_NOEXCEPT + { + pShaderGroupCaptureReplayHandle = pShaderGroupCaptureReplayHandle_; + return *this; + } + + operator VkRayTracingShaderGroupCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingShaderGroupCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RayTracingShaderGroupCreateInfoKHR const & ) const = default; +#else + bool operator==( RayTracingShaderGroupCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && + ( generalShader == rhs.generalShader ) && ( closestHitShader == rhs.closestHitShader ) && + ( anyHitShader == rhs.anyHitShader ) && ( intersectionShader == rhs.intersectionShader ) && + ( pShaderGroupCaptureReplayHandle == rhs.pShaderGroupCaptureReplayHandle ); + } + + bool operator!=( RayTracingShaderGroupCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral; + uint32_t generalShader = {}; + uint32_t closestHitShader = {}; + uint32_t anyHitShader = {}; + uint32_t intersectionShader = {}; + const void * pShaderGroupCaptureReplayHandle = {}; + }; + static_assert( sizeof( RayTracingShaderGroupCreateInfoKHR ) == sizeof( VkRayTracingShaderGroupCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = RayTracingShaderGroupCreateInfoKHR; + }; + + struct PipelineLibraryCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLibraryCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PipelineLibraryCreateInfoKHR( uint32_t libraryCount_ = {}, + const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_ = {} ) VULKAN_HPP_NOEXCEPT + : libraryCount( libraryCount_ ) + , pLibraries( pLibraries_ ) + {} + + VULKAN_HPP_CONSTEXPR + PipelineLibraryCreateInfoKHR( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineLibraryCreateInfoKHR( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineLibraryCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineLibraryCreateInfoKHR( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & libraries_ ) + : libraryCount( static_cast( libraries_.size() ) ), pLibraries( libraries_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & + operator=( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineLibraryCreateInfoKHR & operator=( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineLibraryCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineLibraryCreateInfoKHR & setLibraryCount( uint32_t libraryCount_ ) VULKAN_HPP_NOEXCEPT + { + libraryCount = libraryCount_; + return *this; + } + + PipelineLibraryCreateInfoKHR & + setPLibraries( const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_ ) VULKAN_HPP_NOEXCEPT + { + pLibraries = pLibraries_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineLibraryCreateInfoKHR & setLibraries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & libraries_ ) + VULKAN_HPP_NOEXCEPT + { + libraryCount = static_cast( libraries_.size() ); + pLibraries = libraries_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPipelineLibraryCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineLibraryCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineLibraryCreateInfoKHR const & ) const = default; +#else + bool operator==( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( libraryCount == rhs.libraryCount ) && + ( pLibraries == rhs.pLibraries ); + } + + bool operator!=( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLibraryCreateInfoKHR; + const void * pNext = {}; + uint32_t libraryCount = {}; + const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries = {}; + }; + static_assert( sizeof( PipelineLibraryCreateInfoKHR ) == sizeof( VkPipelineLibraryCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineLibraryCreateInfoKHR; + }; + + struct RayTracingPipelineInterfaceCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eRayTracingPipelineInterfaceCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + RayTracingPipelineInterfaceCreateInfoKHR( uint32_t maxPipelineRayPayloadSize_ = {}, + uint32_t maxPipelineRayHitAttributeSize_ = {} ) VULKAN_HPP_NOEXCEPT + : maxPipelineRayPayloadSize( maxPipelineRayPayloadSize_ ) + , maxPipelineRayHitAttributeSize( maxPipelineRayHitAttributeSize_ ) + {} + + VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR( + RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingPipelineInterfaceCreateInfoKHR( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : RayTracingPipelineInterfaceCreateInfoKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR & + operator=( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingPipelineInterfaceCreateInfoKHR & + operator=( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + RayTracingPipelineInterfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RayTracingPipelineInterfaceCreateInfoKHR & + setMaxPipelineRayPayloadSize( uint32_t maxPipelineRayPayloadSize_ ) VULKAN_HPP_NOEXCEPT + { + maxPipelineRayPayloadSize = maxPipelineRayPayloadSize_; + return *this; + } + + RayTracingPipelineInterfaceCreateInfoKHR & + setMaxPipelineRayHitAttributeSize( uint32_t maxPipelineRayHitAttributeSize_ ) VULKAN_HPP_NOEXCEPT + { + maxPipelineRayHitAttributeSize = maxPipelineRayHitAttributeSize_; + return *this; + } + + operator VkRayTracingPipelineInterfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingPipelineInterfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RayTracingPipelineInterfaceCreateInfoKHR const & ) const = default; +#else + bool operator==( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxPipelineRayPayloadSize == rhs.maxPipelineRayPayloadSize ) && + ( maxPipelineRayHitAttributeSize == rhs.maxPipelineRayHitAttributeSize ); + } + + bool operator!=( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR; + const void * pNext = {}; + uint32_t maxPipelineRayPayloadSize = {}; + uint32_t maxPipelineRayHitAttributeSize = {}; + }; + static_assert( sizeof( RayTracingPipelineInterfaceCreateInfoKHR ) == + sizeof( VkRayTracingPipelineInterfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = RayTracingPipelineInterfaceCreateInfoKHR; + }; + + struct RayTracingPipelineCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR( + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, + uint32_t stageCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, + uint32_t groupCount_ = {}, + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups_ = {}, + uint32_t maxPipelineRayRecursionDepth_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {}, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , stageCount( stageCount_ ) + , pStages( pStages_ ) + , groupCount( groupCount_ ) + , pGroups( pGroups_ ) + , maxPipelineRayRecursionDepth( maxPipelineRayRecursionDepth_ ) + , pLibraryInfo( pLibraryInfo_ ) + , pLibraryInterface( pLibraryInterface_ ) + , pDynamicState( pDynamicState_ ) + , layout( layout_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR + RayTracingPipelineCreateInfoKHR( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingPipelineCreateInfoKHR( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : RayTracingPipelineCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RayTracingPipelineCreateInfoKHR( + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + stages_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR> const & groups_ = {}, + uint32_t maxPipelineRayRecursionDepth_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {}, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {} ) + : flags( flags_ ) + , stageCount( static_cast( stages_.size() ) ) + , pStages( stages_.data() ) + , groupCount( static_cast( groups_.size() ) ) + , pGroups( groups_.data() ) + , maxPipelineRayRecursionDepth( maxPipelineRayRecursionDepth_ ) + , pLibraryInfo( pLibraryInfo_ ) + , pLibraryInterface( pLibraryInterface_ ) + , pDynamicState( pDynamicState_ ) + , layout( layout_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & + operator=( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingPipelineCreateInfoKHR & operator=( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + RayTracingPipelineCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = stageCount_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & + setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT + { + pStages = pStages_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RayTracingPipelineCreateInfoKHR & setStages( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + stages_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = static_cast( stages_.size() ); + pStages = stages_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RayTracingPipelineCreateInfoKHR & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = groupCount_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & + setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups_ ) VULKAN_HPP_NOEXCEPT + { + pGroups = pGroups_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RayTracingPipelineCreateInfoKHR & + setGroups( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR> const & groups_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = static_cast( groups_.size() ); + pGroups = groups_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RayTracingPipelineCreateInfoKHR & + setMaxPipelineRayRecursionDepth( uint32_t maxPipelineRayRecursionDepth_ ) VULKAN_HPP_NOEXCEPT + { + maxPipelineRayRecursionDepth = maxPipelineRayRecursionDepth_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & + setPLibraryInfo( const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ ) VULKAN_HPP_NOEXCEPT + { + pLibraryInfo = pLibraryInfo_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & setPLibraryInterface( + const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ ) VULKAN_HPP_NOEXCEPT + { + pLibraryInterface = pLibraryInterface_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & setPDynamicState( + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ ) VULKAN_HPP_NOEXCEPT + { + pDynamicState = pDynamicState_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & + setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineHandle = basePipelineHandle_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineIndex = basePipelineIndex_; + return *this; + } + + operator VkRayTracingPipelineCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingPipelineCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RayTracingPipelineCreateInfoKHR const & ) const = default; +#else + bool operator==( RayTracingPipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && ( groupCount == rhs.groupCount ) && + ( pGroups == rhs.pGroups ) && ( maxPipelineRayRecursionDepth == rhs.maxPipelineRayRecursionDepth ) && + ( pLibraryInfo == rhs.pLibraryInfo ) && ( pLibraryInterface == rhs.pLibraryInterface ) && + ( pDynamicState == rhs.pDynamicState ) && ( layout == rhs.layout ) && + ( basePipelineHandle == rhs.basePipelineHandle ) && ( basePipelineIndex == rhs.basePipelineIndex ); + } + + bool operator!=( RayTracingPipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {}; + uint32_t groupCount = {}; + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups = {}; + uint32_t maxPipelineRayRecursionDepth = {}; + const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo = {}; + const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface = {}; + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; + }; + static_assert( sizeof( RayTracingPipelineCreateInfoKHR ) == sizeof( VkRayTracingPipelineCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = RayTracingPipelineCreateInfoKHR; + }; + + struct RayTracingShaderGroupCreateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eRayTracingShaderGroupCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + RayTracingShaderGroupCreateInfoNV( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, + uint32_t generalShader_ = {}, + uint32_t closestHitShader_ = {}, + uint32_t anyHitShader_ = {}, + uint32_t intersectionShader_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , generalShader( generalShader_ ) + , closestHitShader( closestHitShader_ ) + , anyHitShader( anyHitShader_ ) + , intersectionShader( intersectionShader_ ) + {} + + VULKAN_HPP_CONSTEXPR + RayTracingShaderGroupCreateInfoNV( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingShaderGroupCreateInfoNV( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : RayTracingShaderGroupCreateInfoNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & + operator=( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingShaderGroupCreateInfoNV & operator=( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + RayTracingShaderGroupCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RayTracingShaderGroupCreateInfoNV & + setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + RayTracingShaderGroupCreateInfoNV & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT + { + generalShader = generalShader_; + return *this; + } + + RayTracingShaderGroupCreateInfoNV & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT + { + closestHitShader = closestHitShader_; + return *this; + } + + RayTracingShaderGroupCreateInfoNV & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT + { + anyHitShader = anyHitShader_; + return *this; + } + + RayTracingShaderGroupCreateInfoNV & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT + { + intersectionShader = intersectionShader_; + return *this; + } + + operator VkRayTracingShaderGroupCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RayTracingShaderGroupCreateInfoNV const & ) const = default; +#else + bool operator==( RayTracingShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && + ( generalShader == rhs.generalShader ) && ( closestHitShader == rhs.closestHitShader ) && + ( anyHitShader == rhs.anyHitShader ) && ( intersectionShader == rhs.intersectionShader ); + } + + bool operator!=( RayTracingShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral; + uint32_t generalShader = {}; + uint32_t closestHitShader = {}; + uint32_t anyHitShader = {}; + uint32_t intersectionShader = {}; + }; + static_assert( sizeof( RayTracingShaderGroupCreateInfoNV ) == sizeof( VkRayTracingShaderGroupCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = RayTracingShaderGroupCreateInfoNV; + }; + + struct RayTracingPipelineCreateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + RayTracingPipelineCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, + uint32_t stageCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, + uint32_t groupCount_ = {}, + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups_ = {}, + uint32_t maxRecursionDepth_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , stageCount( stageCount_ ) + , pStages( pStages_ ) + , groupCount( groupCount_ ) + , pGroups( pGroups_ ) + , maxRecursionDepth( maxRecursionDepth_ ) + , layout( layout_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR + RayTracingPipelineCreateInfoNV( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingPipelineCreateInfoNV( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : RayTracingPipelineCreateInfoNV( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RayTracingPipelineCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + stages_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV> const & groups_ = {}, + uint32_t maxRecursionDepth_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {} ) + : flags( flags_ ) + , stageCount( static_cast( stages_.size() ) ) + , pStages( stages_.data() ) + , groupCount( static_cast( groups_.size() ) ) + , pGroups( groups_.data() ) + , maxRecursionDepth( maxRecursionDepth_ ) + , layout( layout_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & + operator=( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingPipelineCreateInfoNV & operator=( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + RayTracingPipelineCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RayTracingPipelineCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + RayTracingPipelineCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = stageCount_; + return *this; + } + + RayTracingPipelineCreateInfoNV & + setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT + { + pStages = pStages_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RayTracingPipelineCreateInfoNV & setStages( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + stages_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = static_cast( stages_.size() ); + pStages = stages_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RayTracingPipelineCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = groupCount_; + return *this; + } + + RayTracingPipelineCreateInfoNV & + setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups_ ) VULKAN_HPP_NOEXCEPT + { + pGroups = pGroups_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RayTracingPipelineCreateInfoNV & + setGroups( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV> const & groups_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = static_cast( groups_.size() ); + pGroups = groups_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RayTracingPipelineCreateInfoNV & setMaxRecursionDepth( uint32_t maxRecursionDepth_ ) VULKAN_HPP_NOEXCEPT + { + maxRecursionDepth = maxRecursionDepth_; + return *this; + } + + RayTracingPipelineCreateInfoNV & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + RayTracingPipelineCreateInfoNV & + setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineHandle = basePipelineHandle_; + return *this; + } + + RayTracingPipelineCreateInfoNV & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineIndex = basePipelineIndex_; + return *this; + } + + operator VkRayTracingPipelineCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingPipelineCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RayTracingPipelineCreateInfoNV const & ) const = default; +#else + bool operator==( RayTracingPipelineCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && ( groupCount == rhs.groupCount ) && + ( pGroups == rhs.pGroups ) && ( maxRecursionDepth == rhs.maxRecursionDepth ) && ( layout == rhs.layout ) && + ( basePipelineHandle == rhs.basePipelineHandle ) && ( basePipelineIndex == rhs.basePipelineIndex ); + } + + bool operator!=( RayTracingPipelineCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {}; + uint32_t groupCount = {}; + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups = {}; + uint32_t maxRecursionDepth = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; + }; + static_assert( sizeof( RayTracingPipelineCreateInfoNV ) == sizeof( VkRayTracingPipelineCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = RayTracingPipelineCreateInfoNV; + }; + + struct SubpassDescription + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassDescription( + VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + uint32_t inputAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments_ = {}, + uint32_t colorAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ = {}, + uint32_t preserveAttachmentCount_ = {}, + const uint32_t * pPreserveAttachments_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , inputAttachmentCount( inputAttachmentCount_ ) + , pInputAttachments( pInputAttachments_ ) + , colorAttachmentCount( colorAttachmentCount_ ) + , pColorAttachments( pColorAttachments_ ) + , pResolveAttachments( pResolveAttachments_ ) + , pDepthStencilAttachment( pDepthStencilAttachment_ ) + , preserveAttachmentCount( preserveAttachmentCount_ ) + , pPreserveAttachments( pPreserveAttachments_ ) + {} + + VULKAN_HPP_CONSTEXPR SubpassDescription( SubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDescription( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassDescription( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription( + VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + inputAttachments_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + colorAttachments_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + resolveAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & preserveAttachments_ = {} ) + : flags( flags_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , inputAttachmentCount( static_cast( inputAttachments_.size() ) ) + , pInputAttachments( inputAttachments_.data() ) + , colorAttachmentCount( static_cast( colorAttachments_.size() ) ) + , pColorAttachments( colorAttachments_.data() ) + , pResolveAttachments( resolveAttachments_.data() ) + , pDepthStencilAttachment( pDepthStencilAttachment_ ) + , preserveAttachmentCount( static_cast( preserveAttachments_.size() ) ) + , pPreserveAttachments( preserveAttachments_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( resolveAttachments_.empty() || ( colorAttachments_.size() == resolveAttachments_.size() ) ); +# else + if ( !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() ) ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::SubpassDescription::SubpassDescription: !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & + operator=( SubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDescription & operator=( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SubpassDescription & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + SubpassDescription & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + SubpassDescription & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + inputAttachmentCount = inputAttachmentCount_; + return *this; + } + + SubpassDescription & + setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pInputAttachments = pInputAttachments_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription & setInputAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + inputAttachments_ ) VULKAN_HPP_NOEXCEPT + { + inputAttachmentCount = static_cast( inputAttachments_.size() ); + pInputAttachments = inputAttachments_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SubpassDescription & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = colorAttachmentCount_; + return *this; + } + + SubpassDescription & + setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pColorAttachments = pColorAttachments_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription & setColorAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + colorAttachments_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( colorAttachments_.size() ); + pColorAttachments = colorAttachments_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SubpassDescription & setPResolveAttachments( + const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pResolveAttachments = pResolveAttachments_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription & setResolveAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + resolveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( resolveAttachments_.size() ); + pResolveAttachments = resolveAttachments_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SubpassDescription & setPDepthStencilAttachment( + const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT + { + pDepthStencilAttachment = pDepthStencilAttachment_; + return *this; + } + + SubpassDescription & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + preserveAttachmentCount = preserveAttachmentCount_; + return *this; + } + + SubpassDescription & setPPreserveAttachments( const uint32_t * pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pPreserveAttachments = pPreserveAttachments_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription & setPreserveAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + preserveAttachmentCount = static_cast( preserveAttachments_.size() ); + pPreserveAttachments = preserveAttachments_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkSubpassDescription const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassDescription &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassDescription const & ) const = default; +#else + bool operator==( SubpassDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && + ( inputAttachmentCount == rhs.inputAttachmentCount ) && ( pInputAttachments == rhs.pInputAttachments ) && + ( colorAttachmentCount == rhs.colorAttachmentCount ) && ( pColorAttachments == rhs.pColorAttachments ) && + ( pResolveAttachments == rhs.pResolveAttachments ) && + ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) && + ( preserveAttachmentCount == rhs.preserveAttachmentCount ) && + ( pPreserveAttachments == rhs.pPreserveAttachments ); + } + + bool operator!=( SubpassDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + uint32_t inputAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment = {}; + uint32_t preserveAttachmentCount = {}; + const uint32_t * pPreserveAttachments = {}; + }; + static_assert( sizeof( SubpassDescription ) == sizeof( VkSubpassDescription ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SubpassDependency + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SubpassDependency( uint32_t srcSubpass_ = {}, + uint32_t dstSubpass_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubpass( srcSubpass_ ) + , dstSubpass( dstSubpass_ ) + , srcStageMask( srcStageMask_ ) + , dstStageMask( dstStageMask_ ) + , srcAccessMask( srcAccessMask_ ) + , dstAccessMask( dstAccessMask_ ) + , dependencyFlags( dependencyFlags_ ) + {} + + VULKAN_HPP_CONSTEXPR SubpassDependency( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDependency( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassDependency( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency & + operator=( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDependency & operator=( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SubpassDependency & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT + { + srcSubpass = srcSubpass_; + return *this; + } + + SubpassDependency & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT + { + dstSubpass = dstSubpass_; + return *this; + } + + SubpassDependency & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT + { + srcStageMask = srcStageMask_; + return *this; + } + + SubpassDependency & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + dstStageMask = dstStageMask_; + return *this; + } + + SubpassDependency & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + SubpassDependency & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + SubpassDependency & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT + { + dependencyFlags = dependencyFlags_; + return *this; + } + + operator VkSubpassDependency const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassDependency &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassDependency const & ) const = default; +#else + bool operator==( SubpassDependency const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( srcSubpass == rhs.srcSubpass ) && ( dstSubpass == rhs.dstSubpass ) && + ( srcStageMask == rhs.srcStageMask ) && ( dstStageMask == rhs.dstStageMask ) && + ( srcAccessMask == rhs.srcAccessMask ) && ( dstAccessMask == rhs.dstAccessMask ) && + ( dependencyFlags == rhs.dependencyFlags ); + } + + bool operator!=( SubpassDependency const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t srcSubpass = {}; + uint32_t dstSubpass = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {}; + }; + static_assert( sizeof( SubpassDependency ) == sizeof( VkSubpassDependency ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct RenderPassCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + RenderPassCreateInfo( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, + uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments_ = {}, + uint32_t subpassCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses_ = {}, + uint32_t dependencyCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , attachmentCount( attachmentCount_ ) + , pAttachments( pAttachments_ ) + , subpassCount( subpassCount_ ) + , pSubpasses( pSubpasses_ ) + , dependencyCount( dependencyCount_ ) + , pDependencies( pDependencies_ ) + {} + + VULKAN_HPP_CONSTEXPR RenderPassCreateInfo( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo( + VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + attachments_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + subpasses_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + dependencies_ = {} ) + : flags( flags_ ) + , attachmentCount( static_cast( attachments_.size() ) ) + , pAttachments( attachments_.data() ) + , subpassCount( static_cast( subpasses_.size() ) ) + , pSubpasses( subpasses_.data() ) + , dependencyCount( static_cast( dependencies_.size() ) ) + , pDependencies( dependencies_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & + operator=( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassCreateInfo & operator=( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + RenderPassCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RenderPassCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + RenderPassCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = attachmentCount_; + return *this; + } + + RenderPassCreateInfo & + setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pAttachments = pAttachments_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo & setAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + attachments_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = static_cast( attachments_.size() ); + pAttachments = attachments_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderPassCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT + { + subpassCount = subpassCount_; + return *this; + } + + RenderPassCreateInfo & + setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses_ ) VULKAN_HPP_NOEXCEPT + { + pSubpasses = pSubpasses_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo & setSubpasses( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & subpasses_ ) + VULKAN_HPP_NOEXCEPT + { + subpassCount = static_cast( subpasses_.size() ); + pSubpasses = subpasses_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderPassCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = dependencyCount_; + return *this; + } + + RenderPassCreateInfo & + setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies_ ) VULKAN_HPP_NOEXCEPT + { + pDependencies = pDependencies_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo & setDependencies( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + dependencies_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = static_cast( dependencies_.size() ); + pDependencies = dependencies_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkRenderPassCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassCreateInfo const & ) const = default; +#else + bool operator==( RenderPassCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ) && + ( subpassCount == rhs.subpassCount ) && ( pSubpasses == rhs.pSubpasses ) && + ( dependencyCount == rhs.dependencyCount ) && ( pDependencies == rhs.pDependencies ); + } + + bool operator!=( RenderPassCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments = {}; + uint32_t subpassCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses = {}; + uint32_t dependencyCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies = {}; + }; + static_assert( sizeof( RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = RenderPassCreateInfo; + }; + + struct SubpassDescription2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDescription2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassDescription2( + VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + uint32_t viewMask_ = {}, + uint32_t inputAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments_ = {}, + uint32_t colorAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ = {}, + uint32_t preserveAttachmentCount_ = {}, + const uint32_t * pPreserveAttachments_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , viewMask( viewMask_ ) + , inputAttachmentCount( inputAttachmentCount_ ) + , pInputAttachments( pInputAttachments_ ) + , colorAttachmentCount( colorAttachmentCount_ ) + , pColorAttachments( pColorAttachments_ ) + , pResolveAttachments( pResolveAttachments_ ) + , pDepthStencilAttachment( pDepthStencilAttachment_ ) + , preserveAttachmentCount( preserveAttachmentCount_ ) + , pPreserveAttachments( pPreserveAttachments_ ) + {} + + VULKAN_HPP_CONSTEXPR SubpassDescription2( SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDescription2( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassDescription2( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription2( + VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, + uint32_t viewMask_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + inputAttachments_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + colorAttachments_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + resolveAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & preserveAttachments_ = {} ) + : flags( flags_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , viewMask( viewMask_ ) + , inputAttachmentCount( static_cast( inputAttachments_.size() ) ) + , pInputAttachments( inputAttachments_.data() ) + , colorAttachmentCount( static_cast( colorAttachments_.size() ) ) + , pColorAttachments( colorAttachments_.data() ) + , pResolveAttachments( resolveAttachments_.data() ) + , pDepthStencilAttachment( pDepthStencilAttachment_ ) + , preserveAttachmentCount( static_cast( preserveAttachments_.size() ) ) + , pPreserveAttachments( preserveAttachments_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( resolveAttachments_.empty() || ( colorAttachments_.size() == resolveAttachments_.size() ) ); +# else + if ( !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() ) ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::SubpassDescription2::SubpassDescription2: !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & + operator=( SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDescription2 & operator=( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SubpassDescription2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SubpassDescription2 & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + SubpassDescription2 & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + SubpassDescription2 & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT + { + viewMask = viewMask_; + return *this; + } + + SubpassDescription2 & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + inputAttachmentCount = inputAttachmentCount_; + return *this; + } + + SubpassDescription2 & + setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pInputAttachments = pInputAttachments_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription2 & setInputAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + inputAttachments_ ) VULKAN_HPP_NOEXCEPT + { + inputAttachmentCount = static_cast( inputAttachments_.size() ); + pInputAttachments = inputAttachments_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SubpassDescription2 & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = colorAttachmentCount_; + return *this; + } + + SubpassDescription2 & + setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pColorAttachments = pColorAttachments_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription2 & setColorAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + colorAttachments_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( colorAttachments_.size() ); + pColorAttachments = colorAttachments_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SubpassDescription2 & setPResolveAttachments( + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pResolveAttachments = pResolveAttachments_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription2 & setResolveAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + resolveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( resolveAttachments_.size() ); + pResolveAttachments = resolveAttachments_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SubpassDescription2 & setPDepthStencilAttachment( + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT + { + pDepthStencilAttachment = pDepthStencilAttachment_; + return *this; + } + + SubpassDescription2 & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + preserveAttachmentCount = preserveAttachmentCount_; + return *this; + } + + SubpassDescription2 & setPPreserveAttachments( const uint32_t * pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pPreserveAttachments = pPreserveAttachments_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription2 & setPreserveAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + preserveAttachmentCount = static_cast( preserveAttachments_.size() ); + pPreserveAttachments = preserveAttachments_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkSubpassDescription2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassDescription2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassDescription2 const & ) const = default; +#else + bool operator==( SubpassDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( viewMask == rhs.viewMask ) && + ( inputAttachmentCount == rhs.inputAttachmentCount ) && ( pInputAttachments == rhs.pInputAttachments ) && + ( colorAttachmentCount == rhs.colorAttachmentCount ) && ( pColorAttachments == rhs.pColorAttachments ) && + ( pResolveAttachments == rhs.pResolveAttachments ) && + ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) && + ( preserveAttachmentCount == rhs.preserveAttachmentCount ) && + ( pPreserveAttachments == rhs.pPreserveAttachments ); + } + + bool operator!=( SubpassDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescription2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + uint32_t viewMask = {}; + uint32_t inputAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment = {}; + uint32_t preserveAttachmentCount = {}; + const uint32_t * pPreserveAttachments = {}; + }; + static_assert( sizeof( SubpassDescription2 ) == sizeof( VkSubpassDescription2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SubpassDescription2; + }; + using SubpassDescription2KHR = SubpassDescription2; + + struct SubpassDependency2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDependency2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassDependency2( uint32_t srcSubpass_ = {}, + uint32_t dstSubpass_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}, + int32_t viewOffset_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubpass( srcSubpass_ ) + , dstSubpass( dstSubpass_ ) + , srcStageMask( srcStageMask_ ) + , dstStageMask( dstStageMask_ ) + , srcAccessMask( srcAccessMask_ ) + , dstAccessMask( dstAccessMask_ ) + , dependencyFlags( dependencyFlags_ ) + , viewOffset( viewOffset_ ) + {} + + VULKAN_HPP_CONSTEXPR SubpassDependency2( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDependency2( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassDependency2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & + operator=( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDependency2 & operator=( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SubpassDependency2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SubpassDependency2 & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT + { + srcSubpass = srcSubpass_; + return *this; + } + + SubpassDependency2 & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT + { + dstSubpass = dstSubpass_; + return *this; + } + + SubpassDependency2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT + { + srcStageMask = srcStageMask_; + return *this; + } + + SubpassDependency2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + dstStageMask = dstStageMask_; + return *this; + } + + SubpassDependency2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + SubpassDependency2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + SubpassDependency2 & + setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT + { + dependencyFlags = dependencyFlags_; + return *this; + } + + SubpassDependency2 & setViewOffset( int32_t viewOffset_ ) VULKAN_HPP_NOEXCEPT + { + viewOffset = viewOffset_; + return *this; + } + + operator VkSubpassDependency2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassDependency2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassDependency2 const & ) const = default; +#else + bool operator==( SubpassDependency2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubpass == rhs.srcSubpass ) && + ( dstSubpass == rhs.dstSubpass ) && ( srcStageMask == rhs.srcStageMask ) && + ( dstStageMask == rhs.dstStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) && + ( dstAccessMask == rhs.dstAccessMask ) && ( dependencyFlags == rhs.dependencyFlags ) && + ( viewOffset == rhs.viewOffset ); + } + + bool operator!=( SubpassDependency2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDependency2; + const void * pNext = {}; + uint32_t srcSubpass = {}; + uint32_t dstSubpass = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {}; + int32_t viewOffset = {}; + }; + static_assert( sizeof( SubpassDependency2 ) == sizeof( VkSubpassDependency2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SubpassDependency2; + }; + using SubpassDependency2KHR = SubpassDependency2; + + struct RenderPassCreateInfo2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, + uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments_ = {}, + uint32_t subpassCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses_ = {}, + uint32_t dependencyCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies_ = {}, + uint32_t correlatedViewMaskCount_ = {}, + const uint32_t * pCorrelatedViewMasks_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , attachmentCount( attachmentCount_ ) + , pAttachments( pAttachments_ ) + , subpassCount( subpassCount_ ) + , pSubpasses( pSubpasses_ ) + , dependencyCount( dependencyCount_ ) + , pDependencies( pDependencies_ ) + , correlatedViewMaskCount( correlatedViewMaskCount_ ) + , pCorrelatedViewMasks( pCorrelatedViewMasks_ ) + {} + + VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2( RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassCreateInfo2( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassCreateInfo2( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo2( + VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + attachments_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + subpasses_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + dependencies_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & correlatedViewMasks_ = {} ) + : flags( flags_ ) + , attachmentCount( static_cast( attachments_.size() ) ) + , pAttachments( attachments_.data() ) + , subpassCount( static_cast( subpasses_.size() ) ) + , pSubpasses( subpasses_.data() ) + , dependencyCount( static_cast( dependencies_.size() ) ) + , pDependencies( dependencies_.data() ) + , correlatedViewMaskCount( static_cast( correlatedViewMasks_.size() ) ) + , pCorrelatedViewMasks( correlatedViewMasks_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & + operator=( RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassCreateInfo2 & operator=( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + RenderPassCreateInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RenderPassCreateInfo2 & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + RenderPassCreateInfo2 & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = attachmentCount_; + return *this; + } + + RenderPassCreateInfo2 & + setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pAttachments = pAttachments_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo2 & setAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + attachments_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = static_cast( attachments_.size() ); + pAttachments = attachments_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderPassCreateInfo2 & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT + { + subpassCount = subpassCount_; + return *this; + } + + RenderPassCreateInfo2 & + setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses_ ) VULKAN_HPP_NOEXCEPT + { + pSubpasses = pSubpasses_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo2 & setSubpasses( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + subpasses_ ) VULKAN_HPP_NOEXCEPT + { + subpassCount = static_cast( subpasses_.size() ); + pSubpasses = subpasses_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderPassCreateInfo2 & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = dependencyCount_; + return *this; + } + + RenderPassCreateInfo2 & + setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies_ ) VULKAN_HPP_NOEXCEPT + { + pDependencies = pDependencies_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo2 & setDependencies( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + dependencies_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = static_cast( dependencies_.size() ); + pDependencies = dependencies_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderPassCreateInfo2 & setCorrelatedViewMaskCount( uint32_t correlatedViewMaskCount_ ) VULKAN_HPP_NOEXCEPT + { + correlatedViewMaskCount = correlatedViewMaskCount_; + return *this; + } + + RenderPassCreateInfo2 & setPCorrelatedViewMasks( const uint32_t * pCorrelatedViewMasks_ ) VULKAN_HPP_NOEXCEPT + { + pCorrelatedViewMasks = pCorrelatedViewMasks_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo2 & setCorrelatedViewMasks( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & correlatedViewMasks_ ) VULKAN_HPP_NOEXCEPT + { + correlatedViewMaskCount = static_cast( correlatedViewMasks_.size() ); + pCorrelatedViewMasks = correlatedViewMasks_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkRenderPassCreateInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassCreateInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassCreateInfo2 const & ) const = default; +#else + bool operator==( RenderPassCreateInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ) && + ( subpassCount == rhs.subpassCount ) && ( pSubpasses == rhs.pSubpasses ) && + ( dependencyCount == rhs.dependencyCount ) && ( pDependencies == rhs.pDependencies ) && + ( correlatedViewMaskCount == rhs.correlatedViewMaskCount ) && + ( pCorrelatedViewMasks == rhs.pCorrelatedViewMasks ); + } + + bool operator!=( RenderPassCreateInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments = {}; + uint32_t subpassCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses = {}; + uint32_t dependencyCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies = {}; + uint32_t correlatedViewMaskCount = {}; + const uint32_t * pCorrelatedViewMasks = {}; + }; + static_assert( sizeof( RenderPassCreateInfo2 ) == sizeof( VkRenderPassCreateInfo2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = RenderPassCreateInfo2; + }; + using RenderPassCreateInfo2KHR = RenderPassCreateInfo2; + + struct SamplerCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerCreateInfo( + VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Filter magFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, + VULKAN_HPP_NAMESPACE::Filter minFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, + VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest, + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, + float mipLodBias_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ = {}, + float maxAnisotropy_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ = {}, + VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, + float minLod_ = {}, + float maxLod_ = {}, + VULKAN_HPP_NAMESPACE::BorderColor borderColor_ = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack, + VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , magFilter( magFilter_ ) + , minFilter( minFilter_ ) + , mipmapMode( mipmapMode_ ) + , addressModeU( addressModeU_ ) + , addressModeV( addressModeV_ ) + , addressModeW( addressModeW_ ) + , mipLodBias( mipLodBias_ ) + , anisotropyEnable( anisotropyEnable_ ) + , maxAnisotropy( maxAnisotropy_ ) + , compareEnable( compareEnable_ ) + , compareOp( compareOp_ ) + , minLod( minLod_ ) + , maxLod( maxLod_ ) + , borderColor( borderColor_ ) + , unnormalizedCoordinates( unnormalizedCoordinates_ ) + {} + + VULKAN_HPP_CONSTEXPR SamplerCreateInfo( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerCreateInfo( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & + operator=( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerCreateInfo & operator=( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SamplerCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SamplerCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + SamplerCreateInfo & setMagFilter( VULKAN_HPP_NAMESPACE::Filter magFilter_ ) VULKAN_HPP_NOEXCEPT + { + magFilter = magFilter_; + return *this; + } + + SamplerCreateInfo & setMinFilter( VULKAN_HPP_NAMESPACE::Filter minFilter_ ) VULKAN_HPP_NOEXCEPT + { + minFilter = minFilter_; + return *this; + } + + SamplerCreateInfo & setMipmapMode( VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ ) VULKAN_HPP_NOEXCEPT + { + mipmapMode = mipmapMode_; + return *this; + } + + SamplerCreateInfo & setAddressModeU( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ ) VULKAN_HPP_NOEXCEPT + { + addressModeU = addressModeU_; + return *this; + } + + SamplerCreateInfo & setAddressModeV( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ ) VULKAN_HPP_NOEXCEPT + { + addressModeV = addressModeV_; + return *this; + } + + SamplerCreateInfo & setAddressModeW( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ ) VULKAN_HPP_NOEXCEPT + { + addressModeW = addressModeW_; + return *this; + } + + SamplerCreateInfo & setMipLodBias( float mipLodBias_ ) VULKAN_HPP_NOEXCEPT + { + mipLodBias = mipLodBias_; + return *this; + } + + SamplerCreateInfo & setAnisotropyEnable( VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ ) VULKAN_HPP_NOEXCEPT + { + anisotropyEnable = anisotropyEnable_; + return *this; + } + + SamplerCreateInfo & setMaxAnisotropy( float maxAnisotropy_ ) VULKAN_HPP_NOEXCEPT + { + maxAnisotropy = maxAnisotropy_; + return *this; + } + + SamplerCreateInfo & setCompareEnable( VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ ) VULKAN_HPP_NOEXCEPT + { + compareEnable = compareEnable_; + return *this; + } + + SamplerCreateInfo & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT + { + compareOp = compareOp_; + return *this; + } + + SamplerCreateInfo & setMinLod( float minLod_ ) VULKAN_HPP_NOEXCEPT + { + minLod = minLod_; + return *this; + } + + SamplerCreateInfo & setMaxLod( float maxLod_ ) VULKAN_HPP_NOEXCEPT + { + maxLod = maxLod_; + return *this; + } + + SamplerCreateInfo & setBorderColor( VULKAN_HPP_NAMESPACE::BorderColor borderColor_ ) VULKAN_HPP_NOEXCEPT + { + borderColor = borderColor_; + return *this; + } + + SamplerCreateInfo & + setUnnormalizedCoordinates( VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ ) VULKAN_HPP_NOEXCEPT + { + unnormalizedCoordinates = unnormalizedCoordinates_; + return *this; + } + + operator VkSamplerCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerCreateInfo const & ) const = default; +#else + bool operator==( SamplerCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( magFilter == rhs.magFilter ) && ( minFilter == rhs.minFilter ) && ( mipmapMode == rhs.mipmapMode ) && + ( addressModeU == rhs.addressModeU ) && ( addressModeV == rhs.addressModeV ) && + ( addressModeW == rhs.addressModeW ) && ( mipLodBias == rhs.mipLodBias ) && + ( anisotropyEnable == rhs.anisotropyEnable ) && ( maxAnisotropy == rhs.maxAnisotropy ) && + ( compareEnable == rhs.compareEnable ) && ( compareOp == rhs.compareOp ) && ( minLod == rhs.minLod ) && + ( maxLod == rhs.maxLod ) && ( borderColor == rhs.borderColor ) && + ( unnormalizedCoordinates == rhs.unnormalizedCoordinates ); + } + + bool operator!=( SamplerCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Filter magFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; + VULKAN_HPP_NAMESPACE::Filter minFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; + VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest; + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; + float mipLodBias = {}; + VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable = {}; + float maxAnisotropy = {}; + VULKAN_HPP_NAMESPACE::Bool32 compareEnable = {}; + VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; + float minLod = {}; + float maxLod = {}; + VULKAN_HPP_NAMESPACE::BorderColor borderColor = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack; + VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates = {}; + }; + static_assert( sizeof( SamplerCreateInfo ) == sizeof( VkSamplerCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SamplerCreateInfo; + }; + + struct SamplerYcbcrConversionCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo( + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ = + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, + VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, + VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + VULKAN_HPP_NAMESPACE::Filter chromaFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, + VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ = {} ) VULKAN_HPP_NOEXCEPT + : format( format_ ) + , ycbcrModel( ycbcrModel_ ) + , ycbcrRange( ycbcrRange_ ) + , components( components_ ) + , xChromaOffset( xChromaOffset_ ) + , yChromaOffset( yChromaOffset_ ) + , chromaFilter( chromaFilter_ ) + , forceExplicitReconstruction( forceExplicitReconstruction_ ) + {} + + VULKAN_HPP_CONSTEXPR + SamplerYcbcrConversionCreateInfo( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerYcbcrConversionCreateInfo( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerYcbcrConversionCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & + operator=( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerYcbcrConversionCreateInfo & operator=( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SamplerYcbcrConversionCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SamplerYcbcrConversionCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + SamplerYcbcrConversionCreateInfo & + setYcbcrModel( VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ ) VULKAN_HPP_NOEXCEPT + { + ycbcrModel = ycbcrModel_; + return *this; + } + + SamplerYcbcrConversionCreateInfo & + setYcbcrRange( VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ ) VULKAN_HPP_NOEXCEPT + { + ycbcrRange = ycbcrRange_; + return *this; + } + + SamplerYcbcrConversionCreateInfo & + setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT + { + components = components_; + return *this; + } + + SamplerYcbcrConversionCreateInfo & + setXChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ ) VULKAN_HPP_NOEXCEPT + { + xChromaOffset = xChromaOffset_; + return *this; + } + + SamplerYcbcrConversionCreateInfo & + setYChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ ) VULKAN_HPP_NOEXCEPT + { + yChromaOffset = yChromaOffset_; + return *this; + } + + SamplerYcbcrConversionCreateInfo & setChromaFilter( VULKAN_HPP_NAMESPACE::Filter chromaFilter_ ) VULKAN_HPP_NOEXCEPT + { + chromaFilter = chromaFilter_; + return *this; + } + + SamplerYcbcrConversionCreateInfo & + setForceExplicitReconstruction( VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ ) VULKAN_HPP_NOEXCEPT + { + forceExplicitReconstruction = forceExplicitReconstruction_; + return *this; + } + + operator VkSamplerYcbcrConversionCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerYcbcrConversionCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerYcbcrConversionCreateInfo const & ) const = default; +#else + bool operator==( SamplerYcbcrConversionCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && + ( ycbcrModel == rhs.ycbcrModel ) && ( ycbcrRange == rhs.ycbcrRange ) && ( components == rhs.components ) && + ( xChromaOffset == rhs.xChromaOffset ) && ( yChromaOffset == rhs.yChromaOffset ) && + ( chromaFilter == rhs.chromaFilter ) && ( forceExplicitReconstruction == rhs.forceExplicitReconstruction ); + } + + bool operator!=( SamplerYcbcrConversionCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel = + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity; + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull; + VULKAN_HPP_NAMESPACE::ComponentMapping components = {}; + VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::Filter chromaFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; + VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction = {}; + }; + static_assert( sizeof( SamplerYcbcrConversionCreateInfo ) == sizeof( VkSamplerYcbcrConversionCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SamplerYcbcrConversionCreateInfo; + }; + using SamplerYcbcrConversionCreateInfoKHR = SamplerYcbcrConversionCreateInfo; + + class SamplerYcbcrConversion + { + public: + using CType = VkSamplerYcbcrConversion; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eSamplerYcbcrConversion; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSamplerYcbcrConversion; + + public: + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion() = default; + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT + SamplerYcbcrConversion( VkSamplerYcbcrConversion samplerYcbcrConversion ) VULKAN_HPP_NOEXCEPT + : m_samplerYcbcrConversion( samplerYcbcrConversion ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + SamplerYcbcrConversion & operator=( VkSamplerYcbcrConversion samplerYcbcrConversion ) VULKAN_HPP_NOEXCEPT + { + m_samplerYcbcrConversion = samplerYcbcrConversion; + return *this; + } +#endif + + SamplerYcbcrConversion & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_samplerYcbcrConversion = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerYcbcrConversion const & ) const = default; +#else + bool operator==( SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion == rhs.m_samplerYcbcrConversion; + } + + bool operator!=( SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion != rhs.m_samplerYcbcrConversion; + } + + bool operator<( SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion < rhs.m_samplerYcbcrConversion; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSamplerYcbcrConversion() const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion == VK_NULL_HANDLE; + } + + private: + VkSamplerYcbcrConversion m_samplerYcbcrConversion = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ) == sizeof( VkSamplerYcbcrConversion ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + using SamplerYcbcrConversionKHR = SamplerYcbcrConversion; + + struct SemaphoreCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SemaphoreCreateInfo( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreCreateInfo( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SemaphoreCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SemaphoreCreateInfo & + operator=( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreCreateInfo & operator=( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SemaphoreCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SemaphoreCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + operator VkSemaphoreCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreCreateInfo const & ) const = default; +#else + bool operator==( SemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); + } + + bool operator!=( SemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags = {}; + }; + static_assert( sizeof( SemaphoreCreateInfo ) == sizeof( VkSemaphoreCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SemaphoreCreateInfo; + }; + + struct ShaderModuleCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ = {}, + size_t codeSize_ = {}, + const uint32_t * pCode_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , codeSize( codeSize_ ) + , pCode( pCode_ ) + {} + + VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderModuleCreateInfo( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ShaderModuleCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ShaderModuleCreateInfo( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & code_ ) + : flags( flags_ ), codeSize( code_.size() * 4 ), pCode( code_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & + operator=( ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderModuleCreateInfo & operator=( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ShaderModuleCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ShaderModuleCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + ShaderModuleCreateInfo & setCodeSize( size_t codeSize_ ) VULKAN_HPP_NOEXCEPT + { + codeSize = codeSize_; + return *this; + } + + ShaderModuleCreateInfo & setPCode( const uint32_t * pCode_ ) VULKAN_HPP_NOEXCEPT + { + pCode = pCode_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ShaderModuleCreateInfo & + setCode( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & code_ ) VULKAN_HPP_NOEXCEPT + { + codeSize = code_.size() * 4; + pCode = code_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkShaderModuleCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShaderModuleCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShaderModuleCreateInfo const & ) const = default; +#else + bool operator==( ShaderModuleCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( codeSize == rhs.codeSize ) && ( pCode == rhs.pCode ); + } + + bool operator!=( ShaderModuleCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags = {}; + size_t codeSize = {}; + const uint32_t * pCode = {}; + }; + static_assert( sizeof( ShaderModuleCreateInfo ) == sizeof( VkShaderModuleCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ShaderModuleCreateInfo; + }; + + class SurfaceKHR + { + public: + using CType = VkSurfaceKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eSurfaceKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSurfaceKHR; + + public: + VULKAN_HPP_CONSTEXPR SurfaceKHR() = default; + VULKAN_HPP_CONSTEXPR SurfaceKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT SurfaceKHR( VkSurfaceKHR surfaceKHR ) VULKAN_HPP_NOEXCEPT : m_surfaceKHR( surfaceKHR ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + SurfaceKHR & operator=( VkSurfaceKHR surfaceKHR ) VULKAN_HPP_NOEXCEPT + { + m_surfaceKHR = surfaceKHR; + return *this; + } +#endif + + SurfaceKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_surfaceKHR = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceKHR const & ) const = default; +#else + bool operator==( SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR == rhs.m_surfaceKHR; + } + + bool operator!=( SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR != rhs.m_surfaceKHR; + } + + bool operator<( SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR < rhs.m_surfaceKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSurfaceKHR() const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR == VK_NULL_HANDLE; + } + + private: + VkSurfaceKHR m_surfaceKHR = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::SurfaceKHR ) == sizeof( VkSurfaceKHR ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::SurfaceKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SurfaceKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SurfaceKHR; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct SwapchainCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR( + VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {}, + uint32_t minImageCount_ = {}, + VULKAN_HPP_NAMESPACE::Format imageFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear, + VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {}, + uint32_t imageArrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {}, + VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, + uint32_t queueFamilyIndexCount_ = {}, + const uint32_t * pQueueFamilyIndices_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque, + VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, + VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {}, + VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , surface( surface_ ) + , minImageCount( minImageCount_ ) + , imageFormat( imageFormat_ ) + , imageColorSpace( imageColorSpace_ ) + , imageExtent( imageExtent_ ) + , imageArrayLayers( imageArrayLayers_ ) + , imageUsage( imageUsage_ ) + , imageSharingMode( imageSharingMode_ ) + , queueFamilyIndexCount( queueFamilyIndexCount_ ) + , pQueueFamilyIndices( pQueueFamilyIndices_ ) + , preTransform( preTransform_ ) + , compositeAlpha( compositeAlpha_ ) + , presentMode( presentMode_ ) + , clipped( clipped_ ) + , oldSwapchain( oldSwapchain_ ) + {} + + VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SwapchainCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SwapchainCreateInfoKHR( + VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface_, + uint32_t minImageCount_, + VULKAN_HPP_NAMESPACE::Format imageFormat_, + VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_, + VULKAN_HPP_NAMESPACE::Extent2D imageExtent_, + uint32_t imageArrayLayers_, + VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_, + VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque, + VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, + VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {}, + VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {} ) + : flags( flags_ ) + , surface( surface_ ) + , minImageCount( minImageCount_ ) + , imageFormat( imageFormat_ ) + , imageColorSpace( imageColorSpace_ ) + , imageExtent( imageExtent_ ) + , imageArrayLayers( imageArrayLayers_ ) + , imageUsage( imageUsage_ ) + , imageSharingMode( imageSharingMode_ ) + , queueFamilyIndexCount( static_cast( queueFamilyIndices_.size() ) ) + , pQueueFamilyIndices( queueFamilyIndices_.data() ) + , preTransform( preTransform_ ) + , compositeAlpha( compositeAlpha_ ) + , presentMode( presentMode_ ) + , clipped( clipped_ ) + , oldSwapchain( oldSwapchain_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & + operator=( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SwapchainCreateInfoKHR & operator=( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SwapchainCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + SwapchainCreateInfoKHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT + { + surface = surface_; + return *this; + } + + SwapchainCreateInfoKHR & setMinImageCount( uint32_t minImageCount_ ) VULKAN_HPP_NOEXCEPT + { + minImageCount = minImageCount_; + return *this; + } + + SwapchainCreateInfoKHR & setImageFormat( VULKAN_HPP_NAMESPACE::Format imageFormat_ ) VULKAN_HPP_NOEXCEPT + { + imageFormat = imageFormat_; + return *this; + } + + SwapchainCreateInfoKHR & + setImageColorSpace( VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ ) VULKAN_HPP_NOEXCEPT + { + imageColorSpace = imageColorSpace_; + return *this; + } + + SwapchainCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT + { + imageExtent = imageExtent_; + return *this; + } + + SwapchainCreateInfoKHR & setImageArrayLayers( uint32_t imageArrayLayers_ ) VULKAN_HPP_NOEXCEPT + { + imageArrayLayers = imageArrayLayers_; + return *this; + } + + SwapchainCreateInfoKHR & setImageUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ ) VULKAN_HPP_NOEXCEPT + { + imageUsage = imageUsage_; + return *this; + } + + SwapchainCreateInfoKHR & + setImageSharingMode( VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ ) VULKAN_HPP_NOEXCEPT + { + imageSharingMode = imageSharingMode_; + return *this; + } + + SwapchainCreateInfoKHR & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = queueFamilyIndexCount_; + return *this; + } + + SwapchainCreateInfoKHR & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + pQueueFamilyIndices = pQueueFamilyIndices_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SwapchainCreateInfoKHR & setQueueFamilyIndices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = static_cast( queueFamilyIndices_.size() ); + pQueueFamilyIndices = queueFamilyIndices_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SwapchainCreateInfoKHR & + setPreTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ ) VULKAN_HPP_NOEXCEPT + { + preTransform = preTransform_; + return *this; + } + + SwapchainCreateInfoKHR & + setCompositeAlpha( VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ ) VULKAN_HPP_NOEXCEPT + { + compositeAlpha = compositeAlpha_; + return *this; + } + + SwapchainCreateInfoKHR & setPresentMode( VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ ) VULKAN_HPP_NOEXCEPT + { + presentMode = presentMode_; + return *this; + } + + SwapchainCreateInfoKHR & setClipped( VULKAN_HPP_NAMESPACE::Bool32 clipped_ ) VULKAN_HPP_NOEXCEPT + { + clipped = clipped_; + return *this; + } + + SwapchainCreateInfoKHR & setOldSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ ) VULKAN_HPP_NOEXCEPT + { + oldSwapchain = oldSwapchain_; + return *this; + } + + operator VkSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainCreateInfoKHR const & ) const = default; +#else + bool operator==( SwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( surface == rhs.surface ) && + ( minImageCount == rhs.minImageCount ) && ( imageFormat == rhs.imageFormat ) && + ( imageColorSpace == rhs.imageColorSpace ) && ( imageExtent == rhs.imageExtent ) && + ( imageArrayLayers == rhs.imageArrayLayers ) && ( imageUsage == rhs.imageUsage ) && + ( imageSharingMode == rhs.imageSharingMode ) && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && + ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) && ( preTransform == rhs.preTransform ) && + ( compositeAlpha == rhs.compositeAlpha ) && ( presentMode == rhs.presentMode ) && + ( clipped == rhs.clipped ) && ( oldSwapchain == rhs.oldSwapchain ); + } + + bool operator!=( SwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {}; + uint32_t minImageCount = {}; + VULKAN_HPP_NAMESPACE::Format imageFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear; + VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {}; + uint32_t imageArrayLayers = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {}; + VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; + uint32_t queueFamilyIndexCount = {}; + const uint32_t * pQueueFamilyIndices = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha = + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque; + VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate; + VULKAN_HPP_NAMESPACE::Bool32 clipped = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain = {}; + }; + static_assert( sizeof( SwapchainCreateInfoKHR ) == sizeof( VkSwapchainCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SwapchainCreateInfoKHR; + }; + + struct ValidationCacheCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationCacheCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ = {}, + size_t initialDataSize_ = {}, + const void * pInitialData_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , initialDataSize( initialDataSize_ ) + , pInitialData( pInitialData_ ) + {} + + VULKAN_HPP_CONSTEXPR + ValidationCacheCreateInfoEXT( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ValidationCacheCreateInfoEXT( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ValidationCacheCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + ValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialData_ ) + : flags( flags_ ), initialDataSize( initialData_.size() * sizeof( T ) ), pInitialData( initialData_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & + operator=( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ValidationCacheCreateInfoEXT & operator=( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ValidationCacheCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ValidationCacheCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + ValidationCacheCreateInfoEXT & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT + { + initialDataSize = initialDataSize_; + return *this; + } + + ValidationCacheCreateInfoEXT & setPInitialData( const void * pInitialData_ ) VULKAN_HPP_NOEXCEPT + { + pInitialData = pInitialData_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + ValidationCacheCreateInfoEXT & + setInitialData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialData_ ) VULKAN_HPP_NOEXCEPT + { + initialDataSize = initialData_.size() * sizeof( T ); + pInitialData = initialData_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkValidationCacheCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ValidationCacheCreateInfoEXT const & ) const = default; +#else + bool operator==( ValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( initialDataSize == rhs.initialDataSize ) && ( pInitialData == rhs.pInitialData ); + } + + bool operator!=( ValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationCacheCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags = {}; + size_t initialDataSize = {}; + const void * pInitialData = {}; + }; + static_assert( sizeof( ValidationCacheCreateInfoEXT ) == sizeof( VkValidationCacheCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ValidationCacheCreateInfoEXT; + }; + + class ValidationCacheEXT + { + public: + using CType = VkValidationCacheEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eValidationCacheEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eValidationCacheEXT; + + public: + VULKAN_HPP_CONSTEXPR ValidationCacheEXT() = default; + VULKAN_HPP_CONSTEXPR ValidationCacheEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT ValidationCacheEXT( VkValidationCacheEXT validationCacheEXT ) VULKAN_HPP_NOEXCEPT + : m_validationCacheEXT( validationCacheEXT ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + ValidationCacheEXT & operator=( VkValidationCacheEXT validationCacheEXT ) VULKAN_HPP_NOEXCEPT + { + m_validationCacheEXT = validationCacheEXT; + return *this; + } +#endif + + ValidationCacheEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_validationCacheEXT = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ValidationCacheEXT const & ) const = default; +#else + bool operator==( ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT == rhs.m_validationCacheEXT; + } + + bool operator!=( ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT != rhs.m_validationCacheEXT; + } + + bool operator<( ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT < rhs.m_validationCacheEXT; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkValidationCacheEXT() const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT == VK_NULL_HANDLE; + } + + private: + VkValidationCacheEXT m_validationCacheEXT = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::ValidationCacheEXT ) == sizeof( VkValidationCacheEXT ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoProfileKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoProfileKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoProfileKHR( VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation_ = + VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR::eInvalid, + VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling_ = {}, + VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth_ = {}, + VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth_ = {} ) VULKAN_HPP_NOEXCEPT + : videoCodecOperation( videoCodecOperation_ ) + , chromaSubsampling( chromaSubsampling_ ) + , lumaBitDepth( lumaBitDepth_ ) + , chromaBitDepth( chromaBitDepth_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoProfileKHR( VideoProfileKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoProfileKHR( VkVideoProfileKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoProfileKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoProfileKHR & operator=( VideoProfileKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoProfileKHR & operator=( VkVideoProfileKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoProfileKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoProfileKHR & setVideoCodecOperation( + VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation_ ) VULKAN_HPP_NOEXCEPT + { + videoCodecOperation = videoCodecOperation_; + return *this; + } + + VideoProfileKHR & setChromaSubsampling( VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling_ ) + VULKAN_HPP_NOEXCEPT + { + chromaSubsampling = chromaSubsampling_; + return *this; + } + + VideoProfileKHR & + setLumaBitDepth( VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth_ ) VULKAN_HPP_NOEXCEPT + { + lumaBitDepth = lumaBitDepth_; + return *this; + } + + VideoProfileKHR & + setChromaBitDepth( VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth_ ) VULKAN_HPP_NOEXCEPT + { + chromaBitDepth = chromaBitDepth_; + return *this; + } + + operator VkVideoProfileKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoProfileKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoProfileKHR const & ) const = default; +# else + bool operator==( VideoProfileKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoCodecOperation == rhs.videoCodecOperation ) && + ( chromaSubsampling == rhs.chromaSubsampling ) && ( lumaBitDepth == rhs.lumaBitDepth ) && + ( chromaBitDepth == rhs.chromaBitDepth ); + } + + bool operator!=( VideoProfileKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoProfileKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation = + VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR::eInvalid; + VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling = {}; + VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth = {}; + VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth = {}; + }; + static_assert( sizeof( VideoProfileKHR ) == sizeof( VkVideoProfileKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoProfileKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoSessionCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoSessionCreateInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoSessionCreateInfoKHR( + uint32_t queueFamilyIndex_ = {}, + VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags_ = {}, + const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pVideoProfile_ = {}, + VULKAN_HPP_NAMESPACE::Format pictureFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent_ = {}, + VULKAN_HPP_NAMESPACE::Format referencePicturesFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + uint32_t maxReferencePicturesSlotsCount_ = {}, + uint32_t maxReferencePicturesActiveCount_ = {} ) VULKAN_HPP_NOEXCEPT + : queueFamilyIndex( queueFamilyIndex_ ) + , flags( flags_ ) + , pVideoProfile( pVideoProfile_ ) + , pictureFormat( pictureFormat_ ) + , maxCodedExtent( maxCodedExtent_ ) + , referencePicturesFormat( referencePicturesFormat_ ) + , maxReferencePicturesSlotsCount( maxReferencePicturesSlotsCount_ ) + , maxReferencePicturesActiveCount( maxReferencePicturesActiveCount_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoSessionCreateInfoKHR( VideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoSessionCreateInfoKHR( VkVideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoSessionCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & + operator=( VideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoSessionCreateInfoKHR & operator=( VkVideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoSessionCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoSessionCreateInfoKHR & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndex = queueFamilyIndex_; + return *this; + } + + VideoSessionCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VideoSessionCreateInfoKHR & + setPVideoProfile( const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pVideoProfile_ ) VULKAN_HPP_NOEXCEPT + { + pVideoProfile = pVideoProfile_; + return *this; + } + + VideoSessionCreateInfoKHR & setPictureFormat( VULKAN_HPP_NAMESPACE::Format pictureFormat_ ) VULKAN_HPP_NOEXCEPT + { + pictureFormat = pictureFormat_; + return *this; + } + + VideoSessionCreateInfoKHR & + setMaxCodedExtent( VULKAN_HPP_NAMESPACE::Extent2D const & maxCodedExtent_ ) VULKAN_HPP_NOEXCEPT + { + maxCodedExtent = maxCodedExtent_; + return *this; + } + + VideoSessionCreateInfoKHR & + setReferencePicturesFormat( VULKAN_HPP_NAMESPACE::Format referencePicturesFormat_ ) VULKAN_HPP_NOEXCEPT + { + referencePicturesFormat = referencePicturesFormat_; + return *this; + } + + VideoSessionCreateInfoKHR & + setMaxReferencePicturesSlotsCount( uint32_t maxReferencePicturesSlotsCount_ ) VULKAN_HPP_NOEXCEPT + { + maxReferencePicturesSlotsCount = maxReferencePicturesSlotsCount_; + return *this; + } + + VideoSessionCreateInfoKHR & + setMaxReferencePicturesActiveCount( uint32_t maxReferencePicturesActiveCount_ ) VULKAN_HPP_NOEXCEPT + { + maxReferencePicturesActiveCount = maxReferencePicturesActiveCount_; + return *this; + } + + operator VkVideoSessionCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoSessionCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoSessionCreateInfoKHR const & ) const = default; +# else + bool operator==( VideoSessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) && + ( flags == rhs.flags ) && ( pVideoProfile == rhs.pVideoProfile ) && + ( pictureFormat == rhs.pictureFormat ) && ( maxCodedExtent == rhs.maxCodedExtent ) && + ( referencePicturesFormat == rhs.referencePicturesFormat ) && + ( maxReferencePicturesSlotsCount == rhs.maxReferencePicturesSlotsCount ) && + ( maxReferencePicturesActiveCount == rhs.maxReferencePicturesActiveCount ); + } + + bool operator!=( VideoSessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionCreateInfoKHR; + const void * pNext = {}; + uint32_t queueFamilyIndex = {}; + VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags = {}; + const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pVideoProfile = {}; + VULKAN_HPP_NAMESPACE::Format pictureFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent = {}; + VULKAN_HPP_NAMESPACE::Format referencePicturesFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + uint32_t maxReferencePicturesSlotsCount = {}; + uint32_t maxReferencePicturesActiveCount = {}; + }; + static_assert( sizeof( VideoSessionCreateInfoKHR ) == sizeof( VkVideoSessionCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoSessionCreateInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoSessionParametersCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoSessionParametersCreateInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoSessionParametersCreateInfoKHR( + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate_ = {}, + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ = {} ) VULKAN_HPP_NOEXCEPT + : videoSessionParametersTemplate( videoSessionParametersTemplate_ ) + , videoSession( videoSession_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoSessionParametersCreateInfoKHR( VideoSessionParametersCreateInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + VideoSessionParametersCreateInfoKHR( VkVideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoSessionParametersCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR & + operator=( VideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoSessionParametersCreateInfoKHR & + operator=( VkVideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoSessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoSessionParametersCreateInfoKHR & setVideoSessionParametersTemplate( + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate_ ) VULKAN_HPP_NOEXCEPT + { + videoSessionParametersTemplate = videoSessionParametersTemplate_; + return *this; + } + + VideoSessionParametersCreateInfoKHR & + setVideoSession( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ ) VULKAN_HPP_NOEXCEPT + { + videoSession = videoSession_; + return *this; + } + + operator VkVideoSessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoSessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoSessionParametersCreateInfoKHR const & ) const = default; +# else + bool operator==( VideoSessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( videoSessionParametersTemplate == rhs.videoSessionParametersTemplate ) && + ( videoSession == rhs.videoSession ); + } + + bool operator!=( VideoSessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionParametersCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate = {}; + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession = {}; + }; + static_assert( sizeof( VideoSessionParametersCreateInfoKHR ) == sizeof( VkVideoSessionParametersCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoSessionParametersCreateInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + struct DisplayPowerInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPowerInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ = + VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff ) VULKAN_HPP_NOEXCEPT + : powerState( powerState_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPowerInfoEXT( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPowerInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DisplayPowerInfoEXT & + operator=( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPowerInfoEXT & operator=( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DisplayPowerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DisplayPowerInfoEXT & setPowerState( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ ) VULKAN_HPP_NOEXCEPT + { + powerState = powerState_; + return *this; + } + + operator VkDisplayPowerInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPowerInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPowerInfoEXT const & ) const = default; +#else + bool operator==( DisplayPowerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( powerState == rhs.powerState ); + } + + bool operator!=( DisplayPowerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPowerInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff; + }; + static_assert( sizeof( DisplayPowerInfoEXT ) == sizeof( VkDisplayPowerInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DisplayPowerInfoEXT; + }; + + struct MappedMemoryRange + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMappedMemoryRange; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MappedMemoryRange( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + : memory( memory_ ) + , offset( offset_ ) + , size( size_ ) + {} + + VULKAN_HPP_CONSTEXPR MappedMemoryRange( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MappedMemoryRange( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT + : MappedMemoryRange( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & + operator=( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MappedMemoryRange & operator=( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MappedMemoryRange & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MappedMemoryRange & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + MappedMemoryRange & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + MappedMemoryRange & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + operator VkMappedMemoryRange const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMappedMemoryRange &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MappedMemoryRange const & ) const = default; +#else + bool operator==( MappedMemoryRange const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( offset == rhs.offset ) && + ( size == rhs.size ); + } + + bool operator!=( MappedMemoryRange const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMappedMemoryRange; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + static_assert( sizeof( MappedMemoryRange ) == sizeof( VkMappedMemoryRange ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MappedMemoryRange; + }; + + struct MemoryRequirements + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryRequirements( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize alignment_ = {}, + uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT + : size( size_ ) + , alignment( alignment_ ) + , memoryTypeBits( memoryTypeBits_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryRequirements( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryRequirements( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryRequirements( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryRequirements & + operator=( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryRequirements & operator=( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryRequirements &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryRequirements const & ) const = default; +#else + bool operator==( MemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( size == rhs.size ) && ( alignment == rhs.alignment ) && ( memoryTypeBits == rhs.memoryTypeBits ); + } + + bool operator!=( MemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceSize alignment = {}; + uint32_t memoryTypeBits = {}; + }; + static_assert( sizeof( MemoryRequirements ) == sizeof( VkMemoryRequirements ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct MemoryRequirements2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryRequirements2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + MemoryRequirements2( VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryRequirements( memoryRequirements_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryRequirements2( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryRequirements2( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryRequirements2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryRequirements2 & + operator=( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryRequirements2 & operator=( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryRequirements2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryRequirements2 const & ) const = default; +#else + bool operator==( MemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryRequirements == rhs.memoryRequirements ); + } + + bool operator!=( MemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryRequirements2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements = {}; + }; + static_assert( sizeof( MemoryRequirements2 ) == sizeof( VkMemoryRequirements2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MemoryRequirements2; + }; + using MemoryRequirements2KHR = MemoryRequirements2; + + struct DeviceGroupPresentCapabilitiesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDeviceGroupPresentCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR( + std::array const & presentMask_ = {}, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {} ) VULKAN_HPP_NOEXCEPT + : presentMask( presentMask_ ) + , modes( modes_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + DeviceGroupPresentCapabilitiesKHR( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupPresentCapabilitiesKHR( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupPresentCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR & + operator=( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupPresentCapabilitiesKHR & operator=( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDeviceGroupPresentCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupPresentCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupPresentCapabilitiesKHR const & ) const = default; +#else + bool operator==( DeviceGroupPresentCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentMask == rhs.presentMask ) && + ( modes == rhs.modes ); + } + + bool operator!=( DeviceGroupPresentCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentCapabilitiesKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D presentMask = {}; + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {}; + }; + static_assert( sizeof( DeviceGroupPresentCapabilitiesKHR ) == sizeof( VkDeviceGroupPresentCapabilitiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceGroupPresentCapabilitiesKHR; + }; + + struct PhysicalDeviceSurfaceInfo2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSurfaceInfo2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceSurfaceInfo2KHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {} ) VULKAN_HPP_NOEXCEPT + : surface( surface_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceSurfaceInfo2KHR( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSurfaceInfo2KHR( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSurfaceInfo2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSurfaceInfo2KHR & + operator=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSurfaceInfo2KHR & operator=( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceSurfaceInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceSurfaceInfo2KHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT + { + surface = surface_; + return *this; + } + + operator VkPhysicalDeviceSurfaceInfo2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSurfaceInfo2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSurfaceInfo2KHR const & ) const = default; +#else + bool operator==( PhysicalDeviceSurfaceInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surface == rhs.surface ); + } + + bool operator!=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSurfaceInfo2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {}; + }; + static_assert( sizeof( PhysicalDeviceSurfaceInfo2KHR ) == sizeof( VkPhysicalDeviceSurfaceInfo2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceSurfaceInfo2KHR; + }; + + struct DeviceMemoryOpaqueCaptureAddressInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDeviceMemoryOpaqueCaptureAddressInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DeviceMemoryOpaqueCaptureAddressInfo( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {} ) VULKAN_HPP_NOEXCEPT + : memory( memory_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + DeviceMemoryOpaqueCaptureAddressInfo( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceMemoryOpaqueCaptureAddressInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOpaqueCaptureAddressInfo & + operator=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceMemoryOpaqueCaptureAddressInfo & + operator=( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceMemoryOpaqueCaptureAddressInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceMemoryOpaqueCaptureAddressInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + operator VkDeviceMemoryOpaqueCaptureAddressInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceMemoryOpaqueCaptureAddressInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceMemoryOpaqueCaptureAddressInfo const & ) const = default; +#else + bool operator==( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ); + } + + bool operator!=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + }; + static_assert( sizeof( DeviceMemoryOpaqueCaptureAddressInfo ) == sizeof( VkDeviceMemoryOpaqueCaptureAddressInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceMemoryOpaqueCaptureAddressInfo; + }; + using DeviceMemoryOpaqueCaptureAddressInfoKHR = DeviceMemoryOpaqueCaptureAddressInfo; + + struct PresentInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PresentInfoKHR( uint32_t waitSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {}, + uint32_t swapchainCount_ = {}, + const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains_ = {}, + const uint32_t * pImageIndices_ = {}, + VULKAN_HPP_NAMESPACE::Result * pResults_ = {} ) VULKAN_HPP_NOEXCEPT + : waitSemaphoreCount( waitSemaphoreCount_ ) + , pWaitSemaphores( pWaitSemaphores_ ) + , swapchainCount( swapchainCount_ ) + , pSwapchains( pSwapchains_ ) + , pImageIndices( pImageIndices_ ) + , pResults( pResults_ ) + {} + + VULKAN_HPP_CONSTEXPR PresentInfoKHR( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentInfoKHR( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PresentInfoKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentInfoKHR( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & swapchains_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageIndices_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & results_ = {} ) + : waitSemaphoreCount( static_cast( waitSemaphores_.size() ) ) + , pWaitSemaphores( waitSemaphores_.data() ) + , swapchainCount( static_cast( swapchains_.size() ) ) + , pSwapchains( swapchains_.data() ) + , pImageIndices( imageIndices_.data() ) + , pResults( results_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( swapchains_.size() == imageIndices_.size() ); + VULKAN_HPP_ASSERT( results_.empty() || ( swapchains_.size() == results_.size() ) ); + VULKAN_HPP_ASSERT( results_.empty() || ( imageIndices_.size() == results_.size() ) ); +# else + if ( swapchains_.size() != imageIndices_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::PresentInfoKHR::PresentInfoKHR: swapchains_.size() != imageIndices_.size()" ); + } + if ( !results_.empty() && ( swapchains_.size() != results_.size() ) ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( swapchains_.size() != results_.size() )" ); + } + if ( !results_.empty() && ( imageIndices_.size() != results_.size() ) ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( imageIndices_.size() != results_.size() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & operator=( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentInfoKHR & operator=( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PresentInfoKHR & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = waitSemaphoreCount_; + return *this; + } + + PresentInfoKHR & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphores = pWaitSemaphores_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentInfoKHR & setWaitSemaphores( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_ ) + VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = static_cast( waitSemaphores_.size() ); + pWaitSemaphores = waitSemaphores_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = swapchainCount_; + return *this; + } + + PresentInfoKHR & setPSwapchains( const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains_ ) VULKAN_HPP_NOEXCEPT + { + pSwapchains = pSwapchains_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentInfoKHR & setSwapchains( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & swapchains_ ) + VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( swapchains_.size() ); + pSwapchains = swapchains_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PresentInfoKHR & setPImageIndices( const uint32_t * pImageIndices_ ) VULKAN_HPP_NOEXCEPT + { + pImageIndices = pImageIndices_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentInfoKHR & setImageIndices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageIndices_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( imageIndices_.size() ); + pImageIndices = imageIndices_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PresentInfoKHR & setPResults( VULKAN_HPP_NAMESPACE::Result * pResults_ ) VULKAN_HPP_NOEXCEPT + { + pResults = pResults_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentInfoKHR & setResults( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & results_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( results_.size() ); + pResults = results_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPresentInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentInfoKHR const & ) const = default; +#else + bool operator==( PresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) && + ( pWaitSemaphores == rhs.pWaitSemaphores ) && ( swapchainCount == rhs.swapchainCount ) && + ( pSwapchains == rhs.pSwapchains ) && ( pImageIndices == rhs.pImageIndices ) && + ( pResults == rhs.pResults ); + } + + bool operator!=( PresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentInfoKHR; + const void * pNext = {}; + uint32_t waitSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores = {}; + uint32_t swapchainCount = {}; + const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains = {}; + const uint32_t * pImageIndices = {}; + VULKAN_HPP_NAMESPACE::Result * pResults = {}; + }; + static_assert( sizeof( PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PresentInfoKHR; + }; + + struct SubmitInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubmitInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SubmitInfo( uint32_t waitSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask_ = {}, + uint32_t commandBufferCount_ = {}, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers_ = {}, + uint32_t signalSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ = {} ) VULKAN_HPP_NOEXCEPT + : waitSemaphoreCount( waitSemaphoreCount_ ) + , pWaitSemaphores( pWaitSemaphores_ ) + , pWaitDstStageMask( pWaitDstStageMask_ ) + , commandBufferCount( commandBufferCount_ ) + , pCommandBuffers( pCommandBuffers_ ) + , signalSemaphoreCount( signalSemaphoreCount_ ) + , pSignalSemaphores( pSignalSemaphores_ ) + {} + + VULKAN_HPP_CONSTEXPR SubmitInfo( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubmitInfo( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SubmitInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + waitDstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + commandBuffers_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + signalSemaphores_ = {} ) + : waitSemaphoreCount( static_cast( waitSemaphores_.size() ) ) + , pWaitSemaphores( waitSemaphores_.data() ) + , pWaitDstStageMask( waitDstStageMask_.data() ) + , commandBufferCount( static_cast( commandBuffers_.size() ) ) + , pCommandBuffers( commandBuffers_.data() ) + , signalSemaphoreCount( static_cast( signalSemaphores_.size() ) ) + , pSignalSemaphores( signalSemaphores_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( waitSemaphores_.size() == waitDstStageMask_.size() ); +# else + if ( waitSemaphores_.size() != waitDstStageMask_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::SubmitInfo::SubmitInfo: waitSemaphores_.size() != waitDstStageMask_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo & operator=( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubmitInfo & operator=( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = waitSemaphoreCount_; + return *this; + } + + SubmitInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphores = pWaitSemaphores_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo & setWaitSemaphores( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_ ) + VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = static_cast( waitSemaphores_.size() ); + pWaitSemaphores = waitSemaphores_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SubmitInfo & + setPWaitDstStageMask( const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + pWaitDstStageMask = pWaitDstStageMask_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo & setWaitDstStageMask( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + waitDstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = static_cast( waitDstStageMask_.size() ); + pWaitDstStageMask = waitDstStageMask_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferCount = commandBufferCount_; + return *this; + } + + SubmitInfo & setPCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers_ ) VULKAN_HPP_NOEXCEPT + { + pCommandBuffers = pCommandBuffers_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo & setCommandBuffers( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBuffers_ ) + VULKAN_HPP_NOEXCEPT + { + commandBufferCount = static_cast( commandBuffers_.size() ); + pCommandBuffers = commandBuffers_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = signalSemaphoreCount_; + return *this; + } + + SubmitInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + pSignalSemaphores = pSignalSemaphores_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo & setSignalSemaphores( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphores_ ) + VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = static_cast( signalSemaphores_.size() ); + pSignalSemaphores = signalSemaphores_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkSubmitInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubmitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubmitInfo const & ) const = default; +#else + bool operator==( SubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) && + ( pWaitSemaphores == rhs.pWaitSemaphores ) && ( pWaitDstStageMask == rhs.pWaitDstStageMask ) && + ( commandBufferCount == rhs.commandBufferCount ) && ( pCommandBuffers == rhs.pCommandBuffers ) && + ( signalSemaphoreCount == rhs.signalSemaphoreCount ) && ( pSignalSemaphores == rhs.pSignalSemaphores ); + } + + bool operator!=( SubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo; + const void * pNext = {}; + uint32_t waitSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores = {}; + const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask = {}; + uint32_t commandBufferCount = {}; + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers = {}; + uint32_t signalSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores = {}; + }; + static_assert( sizeof( SubmitInfo ) == sizeof( VkSubmitInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SubmitInfo; + }; + + struct SemaphoreSubmitInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSubmitInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreSubmitInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + uint64_t value_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stageMask_ = {}, + uint32_t deviceIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : semaphore( semaphore_ ) + , value( value_ ) + , stageMask( stageMask_ ) + , deviceIndex( deviceIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR SemaphoreSubmitInfoKHR( SemaphoreSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreSubmitInfoKHR( VkSemaphoreSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SemaphoreSubmitInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfoKHR & + operator=( SemaphoreSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreSubmitInfoKHR & operator=( VkSemaphoreSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SemaphoreSubmitInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SemaphoreSubmitInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + SemaphoreSubmitInfoKHR & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT + { + value = value_; + return *this; + } + + SemaphoreSubmitInfoKHR & setStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stageMask_ ) VULKAN_HPP_NOEXCEPT + { + stageMask = stageMask_; + return *this; + } + + SemaphoreSubmitInfoKHR & setDeviceIndex( uint32_t deviceIndex_ ) VULKAN_HPP_NOEXCEPT + { + deviceIndex = deviceIndex_; + return *this; + } + + operator VkSemaphoreSubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreSubmitInfoKHR const & ) const = default; +#else + bool operator==( SemaphoreSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && + ( value == rhs.value ) && ( stageMask == rhs.stageMask ) && ( deviceIndex == rhs.deviceIndex ); + } + + bool operator!=( SemaphoreSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSubmitInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + uint64_t value = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stageMask = {}; + uint32_t deviceIndex = {}; + }; + static_assert( sizeof( SemaphoreSubmitInfoKHR ) == sizeof( VkSemaphoreSubmitInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SemaphoreSubmitInfoKHR; + }; + + struct SubmitInfo2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubmitInfo2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubmitInfo2KHR( + VULKAN_HPP_NAMESPACE::SubmitFlagsKHR flags_ = {}, + uint32_t waitSemaphoreInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR * pWaitSemaphoreInfos_ = {}, + uint32_t commandBufferInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfoKHR * pCommandBufferInfos_ = {}, + uint32_t signalSemaphoreInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR * pSignalSemaphoreInfos_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , waitSemaphoreInfoCount( waitSemaphoreInfoCount_ ) + , pWaitSemaphoreInfos( pWaitSemaphoreInfos_ ) + , commandBufferInfoCount( commandBufferInfoCount_ ) + , pCommandBufferInfos( pCommandBufferInfos_ ) + , signalSemaphoreInfoCount( signalSemaphoreInfoCount_ ) + , pSignalSemaphoreInfos( pSignalSemaphoreInfos_ ) + {} + + VULKAN_HPP_CONSTEXPR SubmitInfo2KHR( SubmitInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubmitInfo2KHR( VkSubmitInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SubmitInfo2KHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo2KHR( + VULKAN_HPP_NAMESPACE::SubmitFlagsKHR flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + waitSemaphoreInfos_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + commandBufferInfos_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + signalSemaphoreInfos_ = {} ) + : flags( flags_ ) + , waitSemaphoreInfoCount( static_cast( waitSemaphoreInfos_.size() ) ) + , pWaitSemaphoreInfos( waitSemaphoreInfos_.data() ) + , commandBufferInfoCount( static_cast( commandBufferInfos_.size() ) ) + , pCommandBufferInfos( commandBufferInfos_.data() ) + , signalSemaphoreInfoCount( static_cast( signalSemaphoreInfos_.size() ) ) + , pSignalSemaphoreInfos( signalSemaphoreInfos_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo2KHR & operator=( SubmitInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubmitInfo2KHR & operator=( VkSubmitInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SubmitInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SubmitInfo2KHR & setFlags( VULKAN_HPP_NAMESPACE::SubmitFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + SubmitInfo2KHR & setWaitSemaphoreInfoCount( uint32_t waitSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreInfoCount = waitSemaphoreInfoCount_; + return *this; + } + + SubmitInfo2KHR & setPWaitSemaphoreInfos( const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR * pWaitSemaphoreInfos_ ) + VULKAN_HPP_NOEXCEPT + { + pWaitSemaphoreInfos = pWaitSemaphoreInfos_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo2KHR & setWaitSemaphoreInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + waitSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreInfoCount = static_cast( waitSemaphoreInfos_.size() ); + pWaitSemaphoreInfos = waitSemaphoreInfos_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SubmitInfo2KHR & setCommandBufferInfoCount( uint32_t commandBufferInfoCount_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferInfoCount = commandBufferInfoCount_; + return *this; + } + + SubmitInfo2KHR & setPCommandBufferInfos( + const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfoKHR * pCommandBufferInfos_ ) VULKAN_HPP_NOEXCEPT + { + pCommandBufferInfos = pCommandBufferInfos_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo2KHR & setCommandBufferInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + commandBufferInfos_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferInfoCount = static_cast( commandBufferInfos_.size() ); + pCommandBufferInfos = commandBufferInfos_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SubmitInfo2KHR & setSignalSemaphoreInfoCount( uint32_t signalSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreInfoCount = signalSemaphoreInfoCount_; + return *this; + } + + SubmitInfo2KHR & setPSignalSemaphoreInfos( + const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR * pSignalSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT + { + pSignalSemaphoreInfos = pSignalSemaphoreInfos_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo2KHR & setSignalSemaphoreInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + signalSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreInfoCount = static_cast( signalSemaphoreInfos_.size() ); + pSignalSemaphoreInfos = signalSemaphoreInfos_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkSubmitInfo2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubmitInfo2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubmitInfo2KHR const & ) const = default; +#else + bool operator==( SubmitInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( waitSemaphoreInfoCount == rhs.waitSemaphoreInfoCount ) && + ( pWaitSemaphoreInfos == rhs.pWaitSemaphoreInfos ) && + ( commandBufferInfoCount == rhs.commandBufferInfoCount ) && + ( pCommandBufferInfos == rhs.pCommandBufferInfos ) && + ( signalSemaphoreInfoCount == rhs.signalSemaphoreInfoCount ) && + ( pSignalSemaphoreInfos == rhs.pSignalSemaphoreInfos ); + } + + bool operator!=( SubmitInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SubmitFlagsKHR flags = {}; + uint32_t waitSemaphoreInfoCount = {}; + const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR * pWaitSemaphoreInfos = {}; + uint32_t commandBufferInfoCount = {}; + const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfoKHR * pCommandBufferInfos = {}; + uint32_t signalSemaphoreInfoCount = {}; + const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR * pSignalSemaphoreInfos = {}; + }; + static_assert( sizeof( SubmitInfo2KHR ) == sizeof( VkSubmitInfo2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SubmitInfo2KHR; + }; + + class Queue + { + public: + using CType = VkQueue; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eQueue; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueue; + + public: + VULKAN_HPP_CONSTEXPR Queue() = default; + VULKAN_HPP_CONSTEXPR Queue( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT Queue( VkQueue queue ) VULKAN_HPP_NOEXCEPT : m_queue( queue ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Queue & operator=( VkQueue queue ) VULKAN_HPP_NOEXCEPT + { + m_queue = queue; + return *this; + } +#endif + + Queue & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_queue = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Queue const & ) const = default; +#else + bool operator==( Queue const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_queue == rhs.m_queue; + } + + bool operator!=( Queue const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_queue != rhs.m_queue; + } + + bool operator<( Queue const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_queue < rhs.m_queue; + } +#endif + + template + void getCheckpointData2NV( uint32_t * pCheckpointDataCount, + VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getCheckpointData2NV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = CheckpointData2NVAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getCheckpointDataNV( uint32_t * pCheckpointDataCount, + VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getCheckpointDataNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = CheckpointDataNVAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void + beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + bindSparse( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindSparse( ArrayProxy const & bindInfo, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void endDebugUtilsLabelEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void + insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result presentKHR( const PresentInfoKHR & presentInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result setPerformanceConfigurationINTEL( + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + submit( uint32_t submitCount, + const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + submit( ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + submit2KHR( uint32_t submitCount, + const VULKAN_HPP_NAMESPACE::SubmitInfo2KHR * pSubmits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + submit2KHR( ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueue() const VULKAN_HPP_NOEXCEPT + { + return m_queue; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_queue != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_queue == VK_NULL_HANDLE; + } + + private: + VkQueue m_queue = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::Queue ) == sizeof( VkQueue ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Queue; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Queue; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Queue; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct DeviceQueueInfo2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, + uint32_t queueFamilyIndex_ = {}, + uint32_t queueIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , queueFamilyIndex( queueFamilyIndex_ ) + , queueIndex( queueIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceQueueInfo2( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceQueueInfo2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & operator=( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceQueueInfo2 & operator=( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceQueueInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceQueueInfo2 & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + DeviceQueueInfo2 & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndex = queueFamilyIndex_; + return *this; + } + + DeviceQueueInfo2 & setQueueIndex( uint32_t queueIndex_ ) VULKAN_HPP_NOEXCEPT + { + queueIndex = queueIndex_; + return *this; + } + + operator VkDeviceQueueInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceQueueInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceQueueInfo2 const & ) const = default; +#else + bool operator==( DeviceQueueInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( queueFamilyIndex == rhs.queueFamilyIndex ) && ( queueIndex == rhs.queueIndex ); + } + + bool operator!=( DeviceQueueInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {}; + uint32_t queueFamilyIndex = {}; + uint32_t queueIndex = {}; + }; + static_assert( sizeof( DeviceQueueInfo2 ) == sizeof( VkDeviceQueueInfo2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceQueueInfo2; + }; + + struct FenceGetFdInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetFdInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + FenceGetFdInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : fence( fence_ ) + , handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FenceGetFdInfoKHR( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : FenceGetFdInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & + operator=( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FenceGetFdInfoKHR & operator=( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + FenceGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + FenceGetFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT + { + fence = fence_; + return *this; + } + + FenceGetFdInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + operator VkFenceGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFenceGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FenceGetFdInfoKHR const & ) const = default; +#else + bool operator==( FenceGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && + ( handleType == rhs.handleType ); + } + + bool operator!=( FenceGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + }; + static_assert( sizeof( FenceGetFdInfoKHR ) == sizeof( VkFenceGetFdInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = FenceGetFdInfoKHR; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct FenceGetWin32HandleInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR( + VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : fence( fence_ ) + , handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR + FenceGetWin32HandleInfoKHR( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FenceGetWin32HandleInfoKHR( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : FenceGetWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR & + operator=( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FenceGetWin32HandleInfoKHR & operator=( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + FenceGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + FenceGetWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT + { + fence = fence_; + return *this; + } + + FenceGetWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + operator VkFenceGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFenceGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FenceGetWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( FenceGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && + ( handleType == rhs.handleType ); + } + + bool operator!=( FenceGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + }; + static_assert( sizeof( FenceGetWin32HandleInfoKHR ) == sizeof( VkFenceGetWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = FenceGetWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct GeneratedCommandsMemoryRequirementsInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eGeneratedCommandsMemoryRequirementsInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV( + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, + uint32_t maxSequencesCount_ = {} ) VULKAN_HPP_NOEXCEPT + : pipelineBindPoint( pipelineBindPoint_ ) + , pipeline( pipeline_ ) + , indirectCommandsLayout( indirectCommandsLayout_ ) + , maxSequencesCount( maxSequencesCount_ ) + {} + + VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV( + GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeneratedCommandsMemoryRequirementsInfoNV( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : GeneratedCommandsMemoryRequirementsInfoNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & + operator=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeneratedCommandsMemoryRequirementsInfoNV & + operator=( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + GeneratedCommandsMemoryRequirementsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + GeneratedCommandsMemoryRequirementsInfoNV & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + GeneratedCommandsMemoryRequirementsInfoNV & + setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } + + GeneratedCommandsMemoryRequirementsInfoNV & setIndirectCommandsLayout( + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT + { + indirectCommandsLayout = indirectCommandsLayout_; + return *this; + } + + GeneratedCommandsMemoryRequirementsInfoNV & setMaxSequencesCount( uint32_t maxSequencesCount_ ) VULKAN_HPP_NOEXCEPT + { + maxSequencesCount = maxSequencesCount_; + return *this; + } + + operator VkGeneratedCommandsMemoryRequirementsInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeneratedCommandsMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeneratedCommandsMemoryRequirementsInfoNV const & ) const = default; +#else + bool operator==( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && + ( pipeline == rhs.pipeline ) && ( indirectCommandsLayout == rhs.indirectCommandsLayout ) && + ( maxSequencesCount == rhs.maxSequencesCount ); + } + + bool operator!=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {}; + uint32_t maxSequencesCount = {}; + }; + static_assert( sizeof( GeneratedCommandsMemoryRequirementsInfoNV ) == + sizeof( VkGeneratedCommandsMemoryRequirementsInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = GeneratedCommandsMemoryRequirementsInfoNV; + }; + + struct ImageDrmFormatModifierPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eImageDrmFormatModifierPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( uint64_t drmFormatModifier_ = {} ) VULKAN_HPP_NOEXCEPT + : drmFormatModifier( drmFormatModifier_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( ImageDrmFormatModifierPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + ImageDrmFormatModifierPropertiesEXT( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageDrmFormatModifierPropertiesEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierPropertiesEXT & + operator=( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageDrmFormatModifierPropertiesEXT & + operator=( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkImageDrmFormatModifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageDrmFormatModifierPropertiesEXT const & ) const = default; +#else + bool operator==( ImageDrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier ); + } + + bool operator!=( ImageDrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierPropertiesEXT; + void * pNext = {}; + uint64_t drmFormatModifier = {}; + }; + static_assert( sizeof( ImageDrmFormatModifierPropertiesEXT ) == sizeof( VkImageDrmFormatModifierPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageDrmFormatModifierPropertiesEXT; + }; + + struct ImageMemoryRequirementsInfo2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryRequirementsInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = {} ) VULKAN_HPP_NOEXCEPT + : image( image_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImageMemoryRequirementsInfo2( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageMemoryRequirementsInfo2( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageMemoryRequirementsInfo2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryRequirementsInfo2 & + operator=( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageMemoryRequirementsInfo2 & operator=( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + operator VkImageMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageMemoryRequirementsInfo2 const & ) const = default; +#else + bool operator==( ImageMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ); + } + + bool operator!=( ImageMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryRequirementsInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + }; + static_assert( sizeof( ImageMemoryRequirementsInfo2 ) == sizeof( VkImageMemoryRequirementsInfo2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageMemoryRequirementsInfo2; + }; + using ImageMemoryRequirementsInfo2KHR = ImageMemoryRequirementsInfo2; + + struct SparseImageFormatProperties + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SparseImageFormatProperties( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D imageGranularity_ = {}, + VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : aspectMask( aspectMask_ ) + , imageGranularity( imageGranularity_ ) + , flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR + SparseImageFormatProperties( SparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageFormatProperties( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageFormatProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SparseImageFormatProperties & + operator=( SparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageFormatProperties & operator=( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSparseImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageFormatProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageFormatProperties const & ) const = default; +#else + bool operator==( SparseImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( aspectMask == rhs.aspectMask ) && ( imageGranularity == rhs.imageGranularity ) && ( flags == rhs.flags ); + } + + bool operator!=( SparseImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + VULKAN_HPP_NAMESPACE::Extent3D imageGranularity = {}; + VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags = {}; + }; + static_assert( sizeof( SparseImageFormatProperties ) == sizeof( VkSparseImageFormatProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct SparseImageMemoryRequirements + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SparseImageMemoryRequirements( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties_ = {}, + uint32_t imageMipTailFirstLod_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride_ = {} ) VULKAN_HPP_NOEXCEPT + : formatProperties( formatProperties_ ) + , imageMipTailFirstLod( imageMipTailFirstLod_ ) + , imageMipTailSize( imageMipTailSize_ ) + , imageMipTailOffset( imageMipTailOffset_ ) + , imageMipTailStride( imageMipTailStride_ ) + {} + + VULKAN_HPP_CONSTEXPR + SparseImageMemoryRequirements( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageMemoryRequirements( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageMemoryRequirements( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryRequirements & + operator=( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageMemoryRequirements & operator=( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSparseImageMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageMemoryRequirements const & ) const = default; +#else + bool operator==( SparseImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( formatProperties == rhs.formatProperties ) && ( imageMipTailFirstLod == rhs.imageMipTailFirstLod ) && + ( imageMipTailSize == rhs.imageMipTailSize ) && ( imageMipTailOffset == rhs.imageMipTailOffset ) && + ( imageMipTailStride == rhs.imageMipTailStride ); + } + + bool operator!=( SparseImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties = {}; + uint32_t imageMipTailFirstLod = {}; + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride = {}; + }; + static_assert( sizeof( SparseImageMemoryRequirements ) == sizeof( VkSparseImageMemoryRequirements ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct ImageSparseMemoryRequirementsInfo2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eImageSparseMemoryRequirementsInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImageSparseMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = {} ) VULKAN_HPP_NOEXCEPT + : image( image_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( ImageSparseMemoryRequirementsInfo2 const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + ImageSparseMemoryRequirementsInfo2( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageSparseMemoryRequirementsInfo2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageSparseMemoryRequirementsInfo2 & + operator=( ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSparseMemoryRequirementsInfo2 & + operator=( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageSparseMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageSparseMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + operator VkImageSparseMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSparseMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageSparseMemoryRequirementsInfo2 const & ) const = default; +#else + bool operator==( ImageSparseMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ); + } + + bool operator!=( ImageSparseMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSparseMemoryRequirementsInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + }; + static_assert( sizeof( ImageSparseMemoryRequirementsInfo2 ) == sizeof( VkImageSparseMemoryRequirementsInfo2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageSparseMemoryRequirementsInfo2; + }; + using ImageSparseMemoryRequirementsInfo2KHR = ImageSparseMemoryRequirementsInfo2; + + struct SparseImageMemoryRequirements2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSparseImageMemoryRequirements2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2( + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryRequirements( memoryRequirements_ ) + {} + + VULKAN_HPP_CONSTEXPR + SparseImageMemoryRequirements2( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageMemoryRequirements2( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageMemoryRequirements2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryRequirements2 & + operator=( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageMemoryRequirements2 & operator=( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSparseImageMemoryRequirements2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageMemoryRequirements2 const & ) const = default; +#else + bool operator==( SparseImageMemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryRequirements == rhs.memoryRequirements ); + } + + bool operator!=( SparseImageMemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageMemoryRequirements2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements = {}; + }; + static_assert( sizeof( SparseImageMemoryRequirements2 ) == sizeof( VkSparseImageMemoryRequirements2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SparseImageMemoryRequirements2; + }; + using SparseImageMemoryRequirements2KHR = SparseImageMemoryRequirements2; + + struct SubresourceLayout + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubresourceLayout( VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize rowPitch_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize depthPitch_ = {} ) VULKAN_HPP_NOEXCEPT + : offset( offset_ ) + , size( size_ ) + , rowPitch( rowPitch_ ) + , arrayPitch( arrayPitch_ ) + , depthPitch( depthPitch_ ) + {} + + VULKAN_HPP_CONSTEXPR SubresourceLayout( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubresourceLayout( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT + : SubresourceLayout( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & + operator=( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubresourceLayout & operator=( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSubresourceLayout const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubresourceLayout &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubresourceLayout const & ) const = default; +#else + bool operator==( SubresourceLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( offset == rhs.offset ) && ( size == rhs.size ) && ( rowPitch == rhs.rowPitch ) && + ( arrayPitch == rhs.arrayPitch ) && ( depthPitch == rhs.depthPitch ); + } + + bool operator!=( SubresourceLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceSize rowPitch = {}; + VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch = {}; + VULKAN_HPP_NAMESPACE::DeviceSize depthPitch = {}; + }; + static_assert( sizeof( SubresourceLayout ) == sizeof( VkSubresourceLayout ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageViewAddressPropertiesNVX + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAddressPropertiesNVX; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImageViewAddressPropertiesNVX( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceAddress( deviceAddress_ ) + , size( size_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImageViewAddressPropertiesNVX( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewAddressPropertiesNVX( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewAddressPropertiesNVX( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageViewAddressPropertiesNVX & + operator=( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewAddressPropertiesNVX & operator=( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkImageViewAddressPropertiesNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewAddressPropertiesNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewAddressPropertiesNVX const & ) const = default; +#else + bool operator==( ImageViewAddressPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceAddress == rhs.deviceAddress ) && + ( size == rhs.size ); + } + + bool operator!=( ImageViewAddressPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAddressPropertiesNVX; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + static_assert( sizeof( ImageViewAddressPropertiesNVX ) == sizeof( VkImageViewAddressPropertiesNVX ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageViewAddressPropertiesNVX; + }; + + struct ImageViewHandleInfoNVX + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewHandleInfoNVX; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( + VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + VULKAN_HPP_NAMESPACE::Sampler sampler_ = {} ) VULKAN_HPP_NOEXCEPT + : imageView( imageView_ ) + , descriptorType( descriptorType_ ) + , sampler( sampler_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewHandleInfoNVX( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewHandleInfoNVX( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & + operator=( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewHandleInfoNVX & operator=( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageViewHandleInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageViewHandleInfoNVX & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT + { + imageView = imageView_; + return *this; + } + + ImageViewHandleInfoNVX & + setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT + { + descriptorType = descriptorType_; + return *this; + } + + ImageViewHandleInfoNVX & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT + { + sampler = sampler_; + return *this; + } + + operator VkImageViewHandleInfoNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewHandleInfoNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewHandleInfoNVX const & ) const = default; +#else + bool operator==( ImageViewHandleInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageView == rhs.imageView ) && + ( descriptorType == rhs.descriptorType ) && ( sampler == rhs.sampler ); + } + + bool operator!=( ImageViewHandleInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewHandleInfoNVX; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageView imageView = {}; + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + VULKAN_HPP_NAMESPACE::Sampler sampler = {}; + }; + static_assert( sizeof( ImageViewHandleInfoNVX ) == sizeof( VkImageViewHandleInfoNVX ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageViewHandleInfoNVX; + }; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct MemoryGetAndroidHardwareBufferInfoANDROID + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + MemoryGetAndroidHardwareBufferInfoANDROID( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {} ) VULKAN_HPP_NOEXCEPT + : memory( memory_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID( + MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetAndroidHardwareBufferInfoANDROID( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) + VULKAN_HPP_NOEXCEPT + : MemoryGetAndroidHardwareBufferInfoANDROID( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryGetAndroidHardwareBufferInfoANDROID & + operator=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetAndroidHardwareBufferInfoANDROID & + operator=( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MemoryGetAndroidHardwareBufferInfoANDROID & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MemoryGetAndroidHardwareBufferInfoANDROID & + setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + operator VkMemoryGetAndroidHardwareBufferInfoANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryGetAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryGetAndroidHardwareBufferInfoANDROID const & ) const = default; +# else + bool operator==( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ); + } + + bool operator!=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + }; + static_assert( sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) == + sizeof( VkMemoryGetAndroidHardwareBufferInfoANDROID ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MemoryGetAndroidHardwareBufferInfoANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + struct MemoryGetFdInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetFdInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + MemoryGetFdInfoKHR( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : memory( memory_ ) + , handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetFdInfoKHR( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryGetFdInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & + operator=( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetFdInfoKHR & operator=( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MemoryGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MemoryGetFdInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + MemoryGetFdInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + operator VkMemoryGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryGetFdInfoKHR const & ) const = default; +#else + bool operator==( MemoryGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && + ( handleType == rhs.handleType ); + } + + bool operator!=( MemoryGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; + static_assert( sizeof( MemoryGetFdInfoKHR ) == sizeof( VkMemoryGetFdInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MemoryGetFdInfoKHR; + }; + + struct MemoryFdPropertiesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryFdPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR( uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryTypeBits( memoryTypeBits_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryFdPropertiesKHR( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryFdPropertiesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryFdPropertiesKHR & + operator=( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryFdPropertiesKHR & operator=( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryFdPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryFdPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryFdPropertiesKHR const & ) const = default; +#else + bool operator==( MemoryFdPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); + } + + bool operator!=( MemoryFdPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryFdPropertiesKHR; + void * pNext = {}; + uint32_t memoryTypeBits = {}; + }; + static_assert( sizeof( MemoryFdPropertiesKHR ) == sizeof( VkMemoryFdPropertiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MemoryFdPropertiesKHR; + }; + + struct MemoryHostPointerPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryHostPointerPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT( uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryTypeBits( memoryTypeBits_ ) + {} + + VULKAN_HPP_CONSTEXPR + MemoryHostPointerPropertiesEXT( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryHostPointerPropertiesEXT( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryHostPointerPropertiesEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryHostPointerPropertiesEXT & + operator=( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryHostPointerPropertiesEXT & operator=( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryHostPointerPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryHostPointerPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryHostPointerPropertiesEXT const & ) const = default; +#else + bool operator==( MemoryHostPointerPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); + } + + bool operator!=( MemoryHostPointerPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryHostPointerPropertiesEXT; + void * pNext = {}; + uint32_t memoryTypeBits = {}; + }; + static_assert( sizeof( MemoryHostPointerPropertiesEXT ) == sizeof( VkMemoryHostPointerPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MemoryHostPointerPropertiesEXT; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct MemoryGetWin32HandleInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR( + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : memory( memory_ ) + , handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR + MemoryGetWin32HandleInfoKHR( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetWin32HandleInfoKHR( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryGetWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR & + operator=( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetWin32HandleInfoKHR & operator=( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MemoryGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MemoryGetWin32HandleInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + MemoryGetWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + operator VkMemoryGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryGetWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( MemoryGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && + ( handleType == rhs.handleType ); + } + + bool operator!=( MemoryGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; + static_assert( sizeof( MemoryGetWin32HandleInfoKHR ) == sizeof( VkMemoryGetWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MemoryGetWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct MemoryWin32HandlePropertiesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryWin32HandlePropertiesKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR( uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryTypeBits( memoryTypeBits_ ) + {} + + VULKAN_HPP_CONSTEXPR + MemoryWin32HandlePropertiesKHR( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryWin32HandlePropertiesKHR( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryWin32HandlePropertiesKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryWin32HandlePropertiesKHR & + operator=( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryWin32HandlePropertiesKHR & operator=( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryWin32HandlePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryWin32HandlePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryWin32HandlePropertiesKHR const & ) const = default; +# else + bool operator==( MemoryWin32HandlePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); + } + + bool operator!=( MemoryWin32HandlePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryWin32HandlePropertiesKHR; + void * pNext = {}; + uint32_t memoryTypeBits = {}; + }; + static_assert( sizeof( MemoryWin32HandlePropertiesKHR ) == sizeof( VkMemoryWin32HandlePropertiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MemoryWin32HandlePropertiesKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct MemoryGetZirconHandleInfoFUCHSIA + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetZirconHandleInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryGetZirconHandleInfoFUCHSIA( + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : memory( memory_ ) + , handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR + MemoryGetZirconHandleInfoFUCHSIA( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetZirconHandleInfoFUCHSIA( VkMemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryGetZirconHandleInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA & + operator=( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetZirconHandleInfoFUCHSIA & operator=( VkMemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MemoryGetZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MemoryGetZirconHandleInfoFUCHSIA & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + MemoryGetZirconHandleInfoFUCHSIA & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + operator VkMemoryGetZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryGetZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryGetZirconHandleInfoFUCHSIA const & ) const = default; +# else + bool operator==( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && + ( handleType == rhs.handleType ); + } + + bool operator!=( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetZirconHandleInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; + static_assert( sizeof( MemoryGetZirconHandleInfoFUCHSIA ) == sizeof( VkMemoryGetZirconHandleInfoFUCHSIA ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MemoryGetZirconHandleInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct MemoryZirconHandlePropertiesFUCHSIA + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eMemoryZirconHandlePropertiesFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryZirconHandlePropertiesFUCHSIA( uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryTypeBits( memoryTypeBits_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryZirconHandlePropertiesFUCHSIA( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + MemoryZirconHandlePropertiesFUCHSIA( VkMemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryZirconHandlePropertiesFUCHSIA( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryZirconHandlePropertiesFUCHSIA & + operator=( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryZirconHandlePropertiesFUCHSIA & + operator=( VkMemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryZirconHandlePropertiesFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryZirconHandlePropertiesFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryZirconHandlePropertiesFUCHSIA const & ) const = default; +# else + bool operator==( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); + } + + bool operator!=( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryZirconHandlePropertiesFUCHSIA; + void * pNext = {}; + uint32_t memoryTypeBits = {}; + }; + static_assert( sizeof( MemoryZirconHandlePropertiesFUCHSIA ) == sizeof( VkMemoryZirconHandlePropertiesFUCHSIA ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MemoryZirconHandlePropertiesFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + struct PastPresentationTimingGOOGLE + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE( uint32_t presentID_ = {}, + uint64_t desiredPresentTime_ = {}, + uint64_t actualPresentTime_ = {}, + uint64_t earliestPresentTime_ = {}, + uint64_t presentMargin_ = {} ) VULKAN_HPP_NOEXCEPT + : presentID( presentID_ ) + , desiredPresentTime( desiredPresentTime_ ) + , actualPresentTime( actualPresentTime_ ) + , earliestPresentTime( earliestPresentTime_ ) + , presentMargin( presentMargin_ ) + {} + + VULKAN_HPP_CONSTEXPR + PastPresentationTimingGOOGLE( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PastPresentationTimingGOOGLE( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + : PastPresentationTimingGOOGLE( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PastPresentationTimingGOOGLE & + operator=( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PastPresentationTimingGOOGLE & operator=( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPastPresentationTimingGOOGLE const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPastPresentationTimingGOOGLE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PastPresentationTimingGOOGLE const & ) const = default; +#else + bool operator==( PastPresentationTimingGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( presentID == rhs.presentID ) && ( desiredPresentTime == rhs.desiredPresentTime ) && + ( actualPresentTime == rhs.actualPresentTime ) && ( earliestPresentTime == rhs.earliestPresentTime ) && + ( presentMargin == rhs.presentMargin ); + } + + bool operator!=( PastPresentationTimingGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t presentID = {}; + uint64_t desiredPresentTime = {}; + uint64_t actualPresentTime = {}; + uint64_t earliestPresentTime = {}; + uint64_t presentMargin = {}; + }; + static_assert( sizeof( PastPresentationTimingGOOGLE ) == sizeof( VkPastPresentationTimingGOOGLE ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + union PerformanceValueDataINTEL + { + PerformanceValueDataINTEL( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL ) ); + } + + PerformanceValueDataINTEL( uint32_t value32_ = {} ) : value32( value32_ ) {} + + PerformanceValueDataINTEL( uint64_t value64_ ) : value64( value64_ ) {} + + PerformanceValueDataINTEL( float valueFloat_ ) : valueFloat( valueFloat_ ) {} + + PerformanceValueDataINTEL( const char * valueString_ ) : valueString( valueString_ ) {} + + PerformanceValueDataINTEL & setValue32( uint32_t value32_ ) VULKAN_HPP_NOEXCEPT + { + value32 = value32_; + return *this; + } + + PerformanceValueDataINTEL & setValue64( uint64_t value64_ ) VULKAN_HPP_NOEXCEPT + { + value64 = value64_; + return *this; + } + + PerformanceValueDataINTEL & setValueFloat( float valueFloat_ ) VULKAN_HPP_NOEXCEPT + { + valueFloat = valueFloat_; + return *this; + } + + PerformanceValueDataINTEL & setValueBool( VULKAN_HPP_NAMESPACE::Bool32 valueBool_ ) VULKAN_HPP_NOEXCEPT + { + valueBool = valueBool_; + return *this; + } + + PerformanceValueDataINTEL & setValueString( const char * valueString_ ) VULKAN_HPP_NOEXCEPT + { + valueString = valueString_; + return *this; + } + + VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL & + operator=( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL ) ); + return *this; + } + + operator VkPerformanceValueDataINTEL const &() const + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceValueDataINTEL &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + uint32_t value32; + uint64_t value64; + float valueFloat; + VULKAN_HPP_NAMESPACE::Bool32 valueBool; + const char * valueString; +#else + uint32_t value32; + uint64_t value64; + float valueFloat; + VkBool32 valueBool; + const char * valueString; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + struct PerformanceValueINTEL + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + PerformanceValueINTEL( + VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32, + VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , data( data_ ) + {} + + PerformanceValueINTEL( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceValueINTEL( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceValueINTEL( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PerformanceValueINTEL & operator=( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceValueINTEL & operator=( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PerformanceValueINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + PerformanceValueINTEL & setData( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const & data_ ) VULKAN_HPP_NOEXCEPT + { + data = data_; + return *this; + } + + operator VkPerformanceValueINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceValueINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + public: + VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32; + VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data = {}; + }; + static_assert( sizeof( PerformanceValueINTEL ) == sizeof( VkPerformanceValueINTEL ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineExecutableInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, + uint32_t executableIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : pipeline( pipeline_ ) + , executableIndex( executableIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR + PipelineExecutableInfoKHR( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineExecutableInfoKHR( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineExecutableInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR & + operator=( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineExecutableInfoKHR & operator=( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineExecutableInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineExecutableInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } + + PipelineExecutableInfoKHR & setExecutableIndex( uint32_t executableIndex_ ) VULKAN_HPP_NOEXCEPT + { + executableIndex = executableIndex_; + return *this; + } + + operator VkPipelineExecutableInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineExecutableInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineExecutableInfoKHR const & ) const = default; +#else + bool operator==( PipelineExecutableInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipeline == rhs.pipeline ) && + ( executableIndex == rhs.executableIndex ); + } + + bool operator!=( PipelineExecutableInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + uint32_t executableIndex = {}; + }; + static_assert( sizeof( PipelineExecutableInfoKHR ) == sizeof( VkPipelineExecutableInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineExecutableInfoKHR; + }; + + struct PipelineExecutableInternalRepresentationKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineExecutableInternalRepresentationKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PipelineExecutableInternalRepresentationKHR( std::array const & name_ = {}, + std::array const & description_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 isText_ = {}, + size_t dataSize_ = {}, + void * pData_ = {} ) VULKAN_HPP_NOEXCEPT + : name( name_ ) + , description( description_ ) + , isText( isText_ ) + , dataSize( dataSize_ ) + , pData( pData_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR( + PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineExecutableInternalRepresentationKHR( VkPipelineExecutableInternalRepresentationKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineExecutableInternalRepresentationKHR( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + PipelineExecutableInternalRepresentationKHR( std::array const & name_, + std::array const & description_, + VULKAN_HPP_NAMESPACE::Bool32 isText_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) + : name( name_ ) + , description( description_ ) + , isText( isText_ ) + , dataSize( data_.size() * sizeof( T ) ) + , pData( data_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR & + operator=( PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineExecutableInternalRepresentationKHR & + operator=( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPipelineExecutableInternalRepresentationKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineExecutableInternalRepresentationKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineExecutableInternalRepresentationKHR const & ) const = default; +#else + bool operator==( PipelineExecutableInternalRepresentationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( name == rhs.name ) && + ( description == rhs.description ) && ( isText == rhs.isText ) && ( dataSize == rhs.dataSize ) && + ( pData == rhs.pData ); + } + + bool operator!=( PipelineExecutableInternalRepresentationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInternalRepresentationKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + VULKAN_HPP_NAMESPACE::Bool32 isText = {}; + size_t dataSize = {}; + void * pData = {}; + }; + static_assert( sizeof( PipelineExecutableInternalRepresentationKHR ) == + sizeof( VkPipelineExecutableInternalRepresentationKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineExecutableInternalRepresentationKHR; + }; + + struct PipelineInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {} ) VULKAN_HPP_NOEXCEPT + : pipeline( pipeline_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineInfoKHR( PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineInfoKHR( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineInfoKHR & operator=( PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineInfoKHR & operator=( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } + + operator VkPipelineInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineInfoKHR const & ) const = default; +#else + bool operator==( PipelineInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipeline == rhs.pipeline ); + } + + bool operator!=( PipelineInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + }; + static_assert( sizeof( PipelineInfoKHR ) == sizeof( VkPipelineInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineInfoKHR; + }; + + struct PipelineExecutablePropertiesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutablePropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PipelineExecutablePropertiesKHR( VULKAN_HPP_NAMESPACE::ShaderStageFlags stages_ = {}, + std::array const & name_ = {}, + std::array const & description_ = {}, + uint32_t subgroupSize_ = {} ) VULKAN_HPP_NOEXCEPT + : stages( stages_ ) + , name( name_ ) + , description( description_ ) + , subgroupSize( subgroupSize_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + PipelineExecutablePropertiesKHR( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineExecutablePropertiesKHR( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineExecutablePropertiesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutablePropertiesKHR & + operator=( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineExecutablePropertiesKHR & operator=( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPipelineExecutablePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineExecutablePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineExecutablePropertiesKHR const & ) const = default; +#else + bool operator==( PipelineExecutablePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stages == rhs.stages ) && ( name == rhs.name ) && + ( description == rhs.description ) && ( subgroupSize == rhs.subgroupSize ); + } + + bool operator!=( PipelineExecutablePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutablePropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags stages = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + uint32_t subgroupSize = {}; + }; + static_assert( sizeof( PipelineExecutablePropertiesKHR ) == sizeof( VkPipelineExecutablePropertiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineExecutablePropertiesKHR; + }; + + union PipelineExecutableStatisticValueKHR + { + PipelineExecutableStatisticValueKHR( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR ) ); + } + + PipelineExecutableStatisticValueKHR( VULKAN_HPP_NAMESPACE::Bool32 b32_ = {} ) : b32( b32_ ) {} + + PipelineExecutableStatisticValueKHR( int64_t i64_ ) : i64( i64_ ) {} + + PipelineExecutableStatisticValueKHR( uint64_t u64_ ) : u64( u64_ ) {} + + PipelineExecutableStatisticValueKHR( double f64_ ) : f64( f64_ ) {} + + PipelineExecutableStatisticValueKHR & setB32( VULKAN_HPP_NAMESPACE::Bool32 b32_ ) VULKAN_HPP_NOEXCEPT + { + b32 = b32_; + return *this; + } + + PipelineExecutableStatisticValueKHR & setI64( int64_t i64_ ) VULKAN_HPP_NOEXCEPT + { + i64 = i64_; + return *this; + } + + PipelineExecutableStatisticValueKHR & setU64( uint64_t u64_ ) VULKAN_HPP_NOEXCEPT + { + u64 = u64_; + return *this; + } + + PipelineExecutableStatisticValueKHR & setF64( double f64_ ) VULKAN_HPP_NOEXCEPT + { + f64 = f64_; + return *this; + } + + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR & + operator=( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR ) ); + return *this; + } + + operator VkPipelineExecutableStatisticValueKHR const &() const + { + return *reinterpret_cast( this ); + } + + operator VkPipelineExecutableStatisticValueKHR &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::Bool32 b32; + int64_t i64; + uint64_t u64; + double f64; +#else + VkBool32 b32; + int64_t i64; + uint64_t u64; + double f64; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + struct PipelineExecutableStatisticKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableStatisticKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + PipelineExecutableStatisticKHR( std::array const & name_ = {}, + std::array const & description_ = {}, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format_ = + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value_ = {} ) + VULKAN_HPP_NOEXCEPT + : name( name_ ) + , description( description_ ) + , format( format_ ) + , value( value_ ) + {} + + PipelineExecutableStatisticKHR( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineExecutableStatisticKHR( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineExecutableStatisticKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineExecutableStatisticKHR & + operator=( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineExecutableStatisticKHR & operator=( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPipelineExecutableStatisticKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineExecutableStatisticKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableStatisticKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format = + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32; + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value = {}; + }; + static_assert( sizeof( PipelineExecutableStatisticKHR ) == sizeof( VkPipelineExecutableStatisticKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineExecutableStatisticKHR; + }; + + struct RefreshCycleDurationGOOGLE + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE( uint64_t refreshDuration_ = {} ) VULKAN_HPP_NOEXCEPT + : refreshDuration( refreshDuration_ ) + {} + + VULKAN_HPP_CONSTEXPR + RefreshCycleDurationGOOGLE( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RefreshCycleDurationGOOGLE( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + : RefreshCycleDurationGOOGLE( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 RefreshCycleDurationGOOGLE & + operator=( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RefreshCycleDurationGOOGLE & operator=( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkRefreshCycleDurationGOOGLE const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRefreshCycleDurationGOOGLE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RefreshCycleDurationGOOGLE const & ) const = default; +#else + bool operator==( RefreshCycleDurationGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( refreshDuration == rhs.refreshDuration ); + } + + bool operator!=( RefreshCycleDurationGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint64_t refreshDuration = {}; + }; + static_assert( sizeof( RefreshCycleDurationGOOGLE ) == sizeof( VkRefreshCycleDurationGOOGLE ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct SemaphoreGetFdInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetFdInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) + VULKAN_HPP_NOEXCEPT + : semaphore( semaphore_ ) + , handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreGetFdInfoKHR( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SemaphoreGetFdInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR & + operator=( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreGetFdInfoKHR & operator=( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SemaphoreGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SemaphoreGetFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + SemaphoreGetFdInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + operator VkSemaphoreGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreGetFdInfoKHR const & ) const = default; +#else + bool operator==( SemaphoreGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && + ( handleType == rhs.handleType ); + } + + bool operator!=( SemaphoreGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + }; + static_assert( sizeof( SemaphoreGetFdInfoKHR ) == sizeof( VkSemaphoreGetFdInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SemaphoreGetFdInfoKHR; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct SemaphoreGetWin32HandleInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR( + VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : semaphore( semaphore_ ) + , handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR + SemaphoreGetWin32HandleInfoKHR( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreGetWin32HandleInfoKHR( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SemaphoreGetWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR & + operator=( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreGetWin32HandleInfoKHR & operator=( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SemaphoreGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SemaphoreGetWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + SemaphoreGetWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + operator VkSemaphoreGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreGetWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( SemaphoreGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && + ( handleType == rhs.handleType ); + } + + bool operator!=( SemaphoreGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + }; + static_assert( sizeof( SemaphoreGetWin32HandleInfoKHR ) == sizeof( VkSemaphoreGetWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SemaphoreGetWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct SemaphoreGetZirconHandleInfoFUCHSIA + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreGetZirconHandleInfoFUCHSIA( + VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : semaphore( semaphore_ ) + , handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR SemaphoreGetZirconHandleInfoFUCHSIA( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + SemaphoreGetZirconHandleInfoFUCHSIA( VkSemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : SemaphoreGetZirconHandleInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetZirconHandleInfoFUCHSIA & + operator=( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreGetZirconHandleInfoFUCHSIA & + operator=( VkSemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SemaphoreGetZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SemaphoreGetZirconHandleInfoFUCHSIA & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + SemaphoreGetZirconHandleInfoFUCHSIA & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + operator VkSemaphoreGetZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreGetZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreGetZirconHandleInfoFUCHSIA const & ) const = default; +# else + bool operator==( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && + ( handleType == rhs.handleType ); + } + + bool operator!=( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + }; + static_assert( sizeof( SemaphoreGetZirconHandleInfoFUCHSIA ) == sizeof( VkSemaphoreGetZirconHandleInfoFUCHSIA ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SemaphoreGetZirconHandleInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoGetMemoryPropertiesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoGetMemoryPropertiesKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoGetMemoryPropertiesKHR( + uint32_t memoryBindIndex_ = {}, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryBindIndex( memoryBindIndex_ ) + , pMemoryRequirements( pMemoryRequirements_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoGetMemoryPropertiesKHR( VideoGetMemoryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoGetMemoryPropertiesKHR( VkVideoGetMemoryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoGetMemoryPropertiesKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoGetMemoryPropertiesKHR & + operator=( VideoGetMemoryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoGetMemoryPropertiesKHR & operator=( VkVideoGetMemoryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoGetMemoryPropertiesKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoGetMemoryPropertiesKHR & setMemoryBindIndex( uint32_t memoryBindIndex_ ) VULKAN_HPP_NOEXCEPT + { + memoryBindIndex = memoryBindIndex_; + return *this; + } + + VideoGetMemoryPropertiesKHR & + setPMemoryRequirements( VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements_ ) VULKAN_HPP_NOEXCEPT + { + pMemoryRequirements = pMemoryRequirements_; + return *this; + } + + operator VkVideoGetMemoryPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoGetMemoryPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoGetMemoryPropertiesKHR const & ) const = default; +# else + bool operator==( VideoGetMemoryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryBindIndex == rhs.memoryBindIndex ) && + ( pMemoryRequirements == rhs.pMemoryRequirements ); + } + + bool operator!=( VideoGetMemoryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoGetMemoryPropertiesKHR; + const void * pNext = {}; + uint32_t memoryBindIndex = {}; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements = {}; + }; + static_assert( sizeof( VideoGetMemoryPropertiesKHR ) == sizeof( VkVideoGetMemoryPropertiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoGetMemoryPropertiesKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + struct ImportFenceFdInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceFdInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, + int fd_ = {} ) VULKAN_HPP_NOEXCEPT + : fence( fence_ ) + , flags( flags_ ) + , handleType( handleType_ ) + , fd( fd_ ) + {} + + VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportFenceFdInfoKHR( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportFenceFdInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & + operator=( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportFenceFdInfoKHR & operator=( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImportFenceFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImportFenceFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT + { + fence = fence_; + return *this; + } + + ImportFenceFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + ImportFenceFdInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + ImportFenceFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT + { + fd = fd_; + return *this; + } + + operator VkImportFenceFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportFenceFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportFenceFdInfoKHR const & ) const = default; +#else + bool operator==( ImportFenceFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( flags == rhs.flags ) && + ( handleType == rhs.handleType ) && ( fd == rhs.fd ); + } + + bool operator!=( ImportFenceFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + int fd = {}; + }; + static_assert( sizeof( ImportFenceFdInfoKHR ) == sizeof( VkImportFenceFdInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImportFenceFdInfoKHR; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ImportFenceWin32HandleInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImportFenceWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, + HANDLE handle_ = {}, + LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT + : fence( fence_ ) + , flags( flags_ ) + , handleType( handleType_ ) + , handle( handle_ ) + , name( name_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImportFenceWin32HandleInfoKHR( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportFenceWin32HandleInfoKHR( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportFenceWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & + operator=( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportFenceWin32HandleInfoKHR & operator=( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImportFenceWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImportFenceWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT + { + fence = fence_; + return *this; + } + + ImportFenceWin32HandleInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + ImportFenceWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + ImportFenceWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT + { + handle = handle_; + return *this; + } + + ImportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } + + operator VkImportFenceWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportFenceWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ImportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( flags == rhs.flags ) && + ( handleType == rhs.handleType ) && ( handle == rhs.handle ) && ( name == rhs.name ); + } + + bool operator!=( ImportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + HANDLE handle = {}; + LPCWSTR name = {}; + }; + static_assert( sizeof( ImportFenceWin32HandleInfoKHR ) == sizeof( VkImportFenceWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImportFenceWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct ImportSemaphoreFdInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreFdInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImportSemaphoreFdInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, + int fd_ = {} ) VULKAN_HPP_NOEXCEPT + : semaphore( semaphore_ ) + , flags( flags_ ) + , handleType( handleType_ ) + , fd( fd_ ) + {} + + VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportSemaphoreFdInfoKHR( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportSemaphoreFdInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & + operator=( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportSemaphoreFdInfoKHR & operator=( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImportSemaphoreFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImportSemaphoreFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + ImportSemaphoreFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + ImportSemaphoreFdInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + ImportSemaphoreFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT + { + fd = fd_; + return *this; + } + + operator VkImportSemaphoreFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportSemaphoreFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportSemaphoreFdInfoKHR const & ) const = default; +#else + bool operator==( ImportSemaphoreFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && + ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && ( fd == rhs.fd ); + } + + bool operator!=( ImportSemaphoreFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + int fd = {}; + }; + static_assert( sizeof( ImportSemaphoreFdInfoKHR ) == sizeof( VkImportSemaphoreFdInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImportSemaphoreFdInfoKHR; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ImportSemaphoreWin32HandleInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eImportSemaphoreWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImportSemaphoreWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, + HANDLE handle_ = {}, + LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT + : semaphore( semaphore_ ) + , flags( flags_ ) + , handleType( handleType_ ) + , handle( handle_ ) + , name( name_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImportSemaphoreWin32HandleInfoKHR( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportSemaphoreWin32HandleInfoKHR( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportSemaphoreWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & + operator=( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportSemaphoreWin32HandleInfoKHR & operator=( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImportSemaphoreWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImportSemaphoreWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + ImportSemaphoreWin32HandleInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + ImportSemaphoreWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + ImportSemaphoreWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT + { + handle = handle_; + return *this; + } + + ImportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } + + operator VkImportSemaphoreWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportSemaphoreWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ImportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && + ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && ( handle == rhs.handle ) && + ( name == rhs.name ); + } + + bool operator!=( ImportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + HANDLE handle = {}; + LPCWSTR name = {}; + }; + static_assert( sizeof( ImportSemaphoreWin32HandleInfoKHR ) == sizeof( VkImportSemaphoreWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImportSemaphoreWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct ImportSemaphoreZirconHandleInfoFUCHSIA + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImportSemaphoreZirconHandleInfoFUCHSIA( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, + zx_handle_t zirconHandle_ = {} ) VULKAN_HPP_NOEXCEPT + : semaphore( semaphore_ ) + , flags( flags_ ) + , handleType( handleType_ ) + , zirconHandle( zirconHandle_ ) + {} + + VULKAN_HPP_CONSTEXPR ImportSemaphoreZirconHandleInfoFUCHSIA( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + ImportSemaphoreZirconHandleInfoFUCHSIA( VkImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportSemaphoreZirconHandleInfoFUCHSIA( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & + operator=( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportSemaphoreZirconHandleInfoFUCHSIA & + operator=( VkImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImportSemaphoreZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImportSemaphoreZirconHandleInfoFUCHSIA & + setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + ImportSemaphoreZirconHandleInfoFUCHSIA & + setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + ImportSemaphoreZirconHandleInfoFUCHSIA & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + ImportSemaphoreZirconHandleInfoFUCHSIA & setZirconHandle( zx_handle_t zirconHandle_ ) VULKAN_HPP_NOEXCEPT + { + zirconHandle = zirconHandle_; + return *this; + } + + operator VkImportSemaphoreZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportSemaphoreZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportSemaphoreZirconHandleInfoFUCHSIA const & ) const = default; +# else + bool operator==( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && + ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && + ( memcmp( &zirconHandle, &rhs.zirconHandle, sizeof( zx_handle_t ) ) == 0 ); + } + + bool operator!=( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + zx_handle_t zirconHandle = {}; + }; + static_assert( sizeof( ImportSemaphoreZirconHandleInfoFUCHSIA ) == sizeof( VkImportSemaphoreZirconHandleInfoFUCHSIA ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImportSemaphoreZirconHandleInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + struct InitializePerformanceApiInfoINTEL + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eInitializePerformanceApiInfoINTEL; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( void * pUserData_ = {} ) VULKAN_HPP_NOEXCEPT + : pUserData( pUserData_ ) + {} + + VULKAN_HPP_CONSTEXPR + InitializePerformanceApiInfoINTEL( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + InitializePerformanceApiInfoINTEL( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : InitializePerformanceApiInfoINTEL( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 InitializePerformanceApiInfoINTEL & + operator=( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + InitializePerformanceApiInfoINTEL & operator=( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + InitializePerformanceApiInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + InitializePerformanceApiInfoINTEL & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT + { + pUserData = pUserData_; + return *this; + } + + operator VkInitializePerformanceApiInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkInitializePerformanceApiInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( InitializePerformanceApiInfoINTEL const & ) const = default; +#else + bool operator==( InitializePerformanceApiInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pUserData == rhs.pUserData ); + } + + bool operator!=( InitializePerformanceApiInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInitializePerformanceApiInfoINTEL; + const void * pNext = {}; + void * pUserData = {}; + }; + static_assert( sizeof( InitializePerformanceApiInfoINTEL ) == sizeof( VkInitializePerformanceApiInfoINTEL ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = InitializePerformanceApiInfoINTEL; + }; + + struct DisplayEventInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayEventInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DisplayEventInfoEXT( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ = + VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut ) VULKAN_HPP_NOEXCEPT + : displayEvent( displayEvent_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayEventInfoEXT( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayEventInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DisplayEventInfoEXT & + operator=( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayEventInfoEXT & operator=( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DisplayEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DisplayEventInfoEXT & setDisplayEvent( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ ) VULKAN_HPP_NOEXCEPT + { + displayEvent = displayEvent_; + return *this; + } + + operator VkDisplayEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayEventInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayEventInfoEXT const & ) const = default; +#else + bool operator==( DisplayEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayEvent == rhs.displayEvent ); + } + + bool operator!=( DisplayEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayEventInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut; + }; + static_assert( sizeof( DisplayEventInfoEXT ) == sizeof( VkDisplayEventInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DisplayEventInfoEXT; + }; + + struct XYColorEXT + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR XYColorEXT( float x_ = {}, float y_ = {} ) VULKAN_HPP_NOEXCEPT + : x( x_ ) + , y( y_ ) + {} + + VULKAN_HPP_CONSTEXPR XYColorEXT( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + XYColorEXT( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : XYColorEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 XYColorEXT & operator=( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + XYColorEXT & operator=( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + XYColorEXT & setX( float x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + XYColorEXT & setY( float y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } + + operator VkXYColorEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkXYColorEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( XYColorEXT const & ) const = default; +#else + bool operator==( XYColorEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( x == rhs.x ) && ( y == rhs.y ); + } + + bool operator!=( XYColorEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float x = {}; + float y = {}; + }; + static_assert( sizeof( XYColorEXT ) == sizeof( VkXYColorEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct HdrMetadataEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHdrMetadataEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR HdrMetadataEXT( VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed_ = {}, + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen_ = {}, + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue_ = {}, + VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint_ = {}, + float maxLuminance_ = {}, + float minLuminance_ = {}, + float maxContentLightLevel_ = {}, + float maxFrameAverageLightLevel_ = {} ) VULKAN_HPP_NOEXCEPT + : displayPrimaryRed( displayPrimaryRed_ ) + , displayPrimaryGreen( displayPrimaryGreen_ ) + , displayPrimaryBlue( displayPrimaryBlue_ ) + , whitePoint( whitePoint_ ) + , maxLuminance( maxLuminance_ ) + , minLuminance( minLuminance_ ) + , maxContentLightLevel( maxContentLightLevel_ ) + , maxFrameAverageLightLevel( maxFrameAverageLightLevel_ ) + {} + + VULKAN_HPP_CONSTEXPR HdrMetadataEXT( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + HdrMetadataEXT( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : HdrMetadataEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & operator=( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + HdrMetadataEXT & operator=( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + HdrMetadataEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + HdrMetadataEXT & + setDisplayPrimaryRed( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryRed_ ) VULKAN_HPP_NOEXCEPT + { + displayPrimaryRed = displayPrimaryRed_; + return *this; + } + + HdrMetadataEXT & + setDisplayPrimaryGreen( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryGreen_ ) VULKAN_HPP_NOEXCEPT + { + displayPrimaryGreen = displayPrimaryGreen_; + return *this; + } + + HdrMetadataEXT & + setDisplayPrimaryBlue( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryBlue_ ) VULKAN_HPP_NOEXCEPT + { + displayPrimaryBlue = displayPrimaryBlue_; + return *this; + } + + HdrMetadataEXT & setWhitePoint( VULKAN_HPP_NAMESPACE::XYColorEXT const & whitePoint_ ) VULKAN_HPP_NOEXCEPT + { + whitePoint = whitePoint_; + return *this; + } + + HdrMetadataEXT & setMaxLuminance( float maxLuminance_ ) VULKAN_HPP_NOEXCEPT + { + maxLuminance = maxLuminance_; + return *this; + } + + HdrMetadataEXT & setMinLuminance( float minLuminance_ ) VULKAN_HPP_NOEXCEPT + { + minLuminance = minLuminance_; + return *this; + } + + HdrMetadataEXT & setMaxContentLightLevel( float maxContentLightLevel_ ) VULKAN_HPP_NOEXCEPT + { + maxContentLightLevel = maxContentLightLevel_; + return *this; + } + + HdrMetadataEXT & setMaxFrameAverageLightLevel( float maxFrameAverageLightLevel_ ) VULKAN_HPP_NOEXCEPT + { + maxFrameAverageLightLevel = maxFrameAverageLightLevel_; + return *this; + } + + operator VkHdrMetadataEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkHdrMetadataEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( HdrMetadataEXT const & ) const = default; +#else + bool operator==( HdrMetadataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayPrimaryRed == rhs.displayPrimaryRed ) && + ( displayPrimaryGreen == rhs.displayPrimaryGreen ) && ( displayPrimaryBlue == rhs.displayPrimaryBlue ) && + ( whitePoint == rhs.whitePoint ) && ( maxLuminance == rhs.maxLuminance ) && + ( minLuminance == rhs.minLuminance ) && ( maxContentLightLevel == rhs.maxContentLightLevel ) && + ( maxFrameAverageLightLevel == rhs.maxFrameAverageLightLevel ); + } + + bool operator!=( HdrMetadataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHdrMetadataEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed = {}; + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen = {}; + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue = {}; + VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint = {}; + float maxLuminance = {}; + float minLuminance = {}; + float maxContentLightLevel = {}; + float maxFrameAverageLightLevel = {}; + }; + static_assert( sizeof( HdrMetadataEXT ) == sizeof( VkHdrMetadataEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = HdrMetadataEXT; + }; + + struct SemaphoreSignalInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSignalInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + uint64_t value_ = {} ) VULKAN_HPP_NOEXCEPT + : semaphore( semaphore_ ) + , value( value_ ) + {} + + VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreSignalInfo( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SemaphoreSignalInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo & + operator=( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreSignalInfo & operator=( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SemaphoreSignalInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SemaphoreSignalInfo & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + SemaphoreSignalInfo & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT + { + value = value_; + return *this; + } + + operator VkSemaphoreSignalInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreSignalInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreSignalInfo const & ) const = default; +#else + bool operator==( SemaphoreSignalInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && + ( value == rhs.value ); + } + + bool operator!=( SemaphoreSignalInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSignalInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + uint64_t value = {}; + }; + static_assert( sizeof( SemaphoreSignalInfo ) == sizeof( VkSemaphoreSignalInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SemaphoreSignalInfo; + }; + using SemaphoreSignalInfoKHR = SemaphoreSignalInfo; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoSessionParametersUpdateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoSessionParametersUpdateInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoSessionParametersUpdateInfoKHR( uint32_t updateSequenceCount_ = {} ) VULKAN_HPP_NOEXCEPT + : updateSequenceCount( updateSequenceCount_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoSessionParametersUpdateInfoKHR( VideoSessionParametersUpdateInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + VideoSessionParametersUpdateInfoKHR( VkVideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoSessionParametersUpdateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersUpdateInfoKHR & + operator=( VideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoSessionParametersUpdateInfoKHR & + operator=( VkVideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoSessionParametersUpdateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoSessionParametersUpdateInfoKHR & setUpdateSequenceCount( uint32_t updateSequenceCount_ ) VULKAN_HPP_NOEXCEPT + { + updateSequenceCount = updateSequenceCount_; + return *this; + } + + operator VkVideoSessionParametersUpdateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoSessionParametersUpdateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoSessionParametersUpdateInfoKHR const & ) const = default; +# else + bool operator==( VideoSessionParametersUpdateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( updateSequenceCount == rhs.updateSequenceCount ); + } + + bool operator!=( VideoSessionParametersUpdateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionParametersUpdateInfoKHR; + const void * pNext = {}; + uint32_t updateSequenceCount = {}; + }; + static_assert( sizeof( VideoSessionParametersUpdateInfoKHR ) == sizeof( VkVideoSessionParametersUpdateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoSessionParametersUpdateInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + struct SemaphoreWaitInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreWaitInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ = {}, + uint32_t semaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores_ = {}, + const uint64_t * pValues_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , semaphoreCount( semaphoreCount_ ) + , pSemaphores( pSemaphores_ ) + , pValues( pValues_ ) + {} + + VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo( SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreWaitInfo( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SemaphoreWaitInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SemaphoreWaitInfo( + VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & semaphores_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ = {} ) + : flags( flags_ ) + , semaphoreCount( static_cast( semaphores_.size() ) ) + , pSemaphores( semaphores_.data() ) + , pValues( values_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( semaphores_.size() == values_.size() ); +# else + if ( semaphores_.size() != values_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::SemaphoreWaitInfo::SemaphoreWaitInfo: semaphores_.size() != values_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & + operator=( SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreWaitInfo & operator=( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SemaphoreWaitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SemaphoreWaitInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + SemaphoreWaitInfo & setSemaphoreCount( uint32_t semaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + semaphoreCount = semaphoreCount_; + return *this; + } + + SemaphoreWaitInfo & setPSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + pSemaphores = pSemaphores_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SemaphoreWaitInfo & setSemaphores( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & semaphores_ ) + VULKAN_HPP_NOEXCEPT + { + semaphoreCount = static_cast( semaphores_.size() ); + pSemaphores = semaphores_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SemaphoreWaitInfo & setPValues( const uint64_t * pValues_ ) VULKAN_HPP_NOEXCEPT + { + pValues = pValues_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SemaphoreWaitInfo & + setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT + { + semaphoreCount = static_cast( values_.size() ); + pValues = values_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkSemaphoreWaitInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreWaitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreWaitInfo const & ) const = default; +#else + bool operator==( SemaphoreWaitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( semaphoreCount == rhs.semaphoreCount ) && ( pSemaphores == rhs.pSemaphores ) && + ( pValues == rhs.pValues ); + } + + bool operator!=( SemaphoreWaitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreWaitInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags = {}; + uint32_t semaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores = {}; + const uint64_t * pValues = {}; + }; + static_assert( sizeof( SemaphoreWaitInfo ) == sizeof( VkSemaphoreWaitInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SemaphoreWaitInfo; + }; + using SemaphoreWaitInfoKHR = SemaphoreWaitInfo; + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + class Device; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueAccelerationStructureKHR = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueAccelerationStructureNV = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueBuffer = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueBufferView = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = PoolFree; + }; + using UniqueCommandBuffer = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueCommandPool = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueCuFunctionNVX = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueCuModuleNVX = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueDeferredOperationKHR = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueDescriptorPool = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = PoolFree; + }; + using UniqueDescriptorSet = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueDescriptorSetLayout = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueDescriptorUpdateTemplate = UniqueHandle; + using UniqueDescriptorUpdateTemplateKHR = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectFree; + }; + using UniqueDeviceMemory = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueEvent = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueFence = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueFramebuffer = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueImage = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueImageView = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueIndirectCommandsLayoutNV = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniquePipeline = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniquePipelineCache = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniquePipelineLayout = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniquePrivateDataSlotEXT = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueQueryPool = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueRenderPass = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueSampler = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueSamplerYcbcrConversion = UniqueHandle; + using UniqueSamplerYcbcrConversionKHR = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueSemaphore = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueShaderModule = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueSwapchainKHR = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueValidationCacheEXT = UniqueHandle; +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueVideoSessionKHR = UniqueHandle; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueVideoSessionParametersKHR = UniqueHandle; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + class Device + { + public: + using CType = VkDevice; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDevice; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDevice; + + public: + VULKAN_HPP_CONSTEXPR Device() = default; + VULKAN_HPP_CONSTEXPR Device( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT Device( VkDevice device ) VULKAN_HPP_NOEXCEPT : m_device( device ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Device & operator=( VkDevice device ) VULKAN_HPP_NOEXCEPT + { + m_device = device; + return *this; + } +#endif + + Device & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_device = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Device const & ) const = default; +#else + bool operator==( Device const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_device == rhs.m_device; + } + + bool operator!=( Device const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_device != rhs.m_device; + } + + bool operator<( Device const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_device < rhs.m_device; + } +#endif + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result acquireFullScreenExclusiveModeEXT( + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + VULKAN_HPP_NODISCARD Result + acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo, + uint32_t * pImageIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD ResultValue + acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t timeout, + VULKAN_HPP_NAMESPACE::Semaphore semaphore, + VULKAN_HPP_NAMESPACE::Fence fence, + uint32_t * pImageIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD ResultValue + acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t timeout, + VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result acquirePerformanceConfigurationINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + acquirePerformanceConfigurationINTELUnique( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo, + VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType>::type + allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = CommandBufferAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType>::type + allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, + CommandBufferAllocator & commandBufferAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType, CommandBufferAllocator>>::type + allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename CommandBufferAllocator = std::allocator>, + typename B = CommandBufferAllocator, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType, CommandBufferAllocator>>::type + allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, + CommandBufferAllocator & commandBufferAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType>::type + allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = DescriptorSetAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType>::type + allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, + DescriptorSetAllocator & descriptorSetAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType, DescriptorSetAllocator>>::type + allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename DescriptorSetAllocator = std::allocator>, + typename B = DescriptorSetAllocator, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType, DescriptorSetAllocator>>::type + allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, + DescriptorSetAllocator & descriptorSetAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + allocateMemory( const MemoryAllocateInfo & allocateInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result bindAccelerationStructureMemoryNV( + uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type bindAccelerationStructureMemoryNV( + ArrayProxy const & bindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + bindBufferMemory2( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindBufferMemory2( ArrayProxy const & bindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + bindBufferMemory2KHR( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindBufferMemory2KHR( ArrayProxy const & bindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + bindImageMemory2( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindImageMemory2( ArrayProxy const & bindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + bindImageMemory2KHR( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindImageMemory2KHR( ArrayProxy const & bindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + VULKAN_HPP_NODISCARD Result bindVideoSessionMemoryKHR( + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + uint32_t videoSessionBindMemoryCount, + const VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR * pVideoSessionBindMemories, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type bindVideoSessionMemoryKHR( + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + ArrayProxy const & videoSessionBindMemories, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + VULKAN_HPP_NODISCARD Result buildAccelerationStructuresKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result buildAccelerationStructuresKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + ArrayProxy const & infos, + ArrayProxy const & pBuildRangeInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t shader, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t shader, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result copyAccelerationStructureKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const CopyAccelerationStructureInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result copyAccelerationStructureToMemoryKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const CopyAccelerationStructureToMemoryInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result copyMemoryToAccelerationStructureKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const CopyMemoryToAccelerationStructureInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createAccelerationStructureKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + createAccelerationStructureKHR( const AccelerationStructureCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createAccelerationStructureKHRUnique( const AccelerationStructureCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createAccelerationStructureNV( + const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createAccelerationStructureNV( + const AccelerationStructureCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE + typename ResultValueType>::type + createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Buffer * pBuffer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createBuffer( const BufferCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createBufferUnique( const BufferCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::BufferView * pView, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createBufferView( const BufferViewCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createBufferViewUnique( const BufferViewCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createCommandPool( const CommandPoolCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD ResultValue> createComputePipelines( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PipelineAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue createComputePipeline( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createComputePipelinesUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template >, + typename B = PipelineAllocator, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createComputePipelinesUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue> createComputePipelineUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createCuFunctionNVX( const CuFunctionCreateInfoNVX & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createCuFunctionNVXUnique( const CuFunctionCreateInfoNVX & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createCuModuleNVX( const CuModuleCreateInfoNVX & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createCuModuleNVXUnique( const CuModuleCreateInfoNVX & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createDeferredOperationKHR( + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createDeferredOperationKHR( + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + createDeferredOperationKHRUnique( Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createDescriptorSetLayout( + const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplate( + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplateKHR( + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Event * pEvent, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createEvent( const EventCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createEventUnique( const EventCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createFence( const FenceCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createFenceUnique( const FenceCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createFramebuffer( const FramebufferCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createFramebufferUnique( const FramebufferCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD ResultValue> createGraphicsPipelines( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PipelineAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue createGraphicsPipeline( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createGraphicsPipelinesUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template >, + typename B = PipelineAllocator, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createGraphicsPipelinesUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue> createGraphicsPipelineUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Image * pImage, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createImage( const ImageCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createImageUnique( const ImageCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ImageView * pView, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createImageView( const ImageViewCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createImageViewUnique( const ImageViewCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createIndirectCommandsLayoutNV( + const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + createIndirectCommandsLayoutNV( const IndirectCommandsLayoutCreateInfoNV & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createIndirectCommandsLayoutNVUnique( const IndirectCommandsLayoutCreateInfoNV & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createPipelineCache( const PipelineCacheCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT * pPrivateDataSlot, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createPrivateDataSlotEXT( + const PrivateDataSlotCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + createPrivateDataSlotEXTUnique( const PrivateDataSlotCreateInfoEXT & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createQueryPool( const QueryPoolCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createRayTracingPipelinesKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD ResultValue> createRayTracingPipelinesKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PipelineAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> createRayTracingPipelinesKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue createRayTracingPipelineKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createRayTracingPipelinesKHRUnique( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template >, + typename B = PipelineAllocator, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createRayTracingPipelinesKHRUnique( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue> createRayTracingPipelineKHRUnique( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createRayTracingPipelinesNV( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD ResultValue> createRayTracingPipelinesNV( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PipelineAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> createRayTracingPipelinesNV( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue createRayTracingPipelineNV( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createRayTracingPipelinesNVUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template >, + typename B = PipelineAllocator, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createRayTracingPipelinesNVUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue> createRayTracingPipelineNVUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createRenderPass( const RenderPassCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createRenderPassUnique( const RenderPassCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createRenderPass2( const RenderPassCreateInfo2 & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createRenderPass2Unique( const RenderPassCreateInfo2 & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createRenderPass2KHR( const RenderPassCreateInfo2 & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createRenderPass2KHRUnique( const RenderPassCreateInfo2 & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Sampler * pSampler, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createSampler( const SamplerCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createSamplerUnique( const SamplerCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversion( + const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversionKHR( + const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createSemaphore( const SemaphoreCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createShaderModule( const ShaderModuleCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createSharedSwapchainsKHR( + uint32_t swapchainCount, + const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType>::type + createSharedSwapchainsKHR( ArrayProxy const & createInfos, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = SwapchainKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType>::type + createSharedSwapchainsKHR( ArrayProxy const & createInfos, + Optional allocator, + SwapchainKHRAllocator & swapchainKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createSharedSwapchainKHR( + const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType, SwapchainKHRAllocator>>::type + createSharedSwapchainsKHRUnique( + ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template >, + typename B = SwapchainKHRAllocator, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType, SwapchainKHRAllocator>>::type + createSharedSwapchainsKHRUnique( + ArrayProxy const & createInfos, + Optional allocator, + SwapchainKHRAllocator & swapchainKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type + createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createValidationCacheEXT( + const ValidationCacheCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + VULKAN_HPP_NODISCARD Result + createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createVideoSessionKHR( const VideoSessionCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createVideoSessionKHRUnique( const VideoSessionCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + VULKAN_HPP_NODISCARD Result createVideoSessionParametersKHR( + const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + createVideoSessionParametersKHR( const VideoSessionParametersCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createVideoSessionParametersKHRUnique( const VideoSessionParametersCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + VULKAN_HPP_NODISCARD Result debugMarkerSetObjectNameEXT( + const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result debugMarkerSetObjectTagEXT( + const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD Result + deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyAccelerationStructureKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyAccelerationStructureNV( + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDeferredOperationKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDescriptorPool( + VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDescriptorSetLayout( + VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDescriptorUpdateTemplate( + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDescriptorUpdateTemplateKHR( + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyEvent( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyEvent( VULKAN_HPP_NAMESPACE::Event event VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Event event, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Fence fence, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Fence fence, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyImage( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyImage( VULKAN_HPP_NAMESPACE::Image image VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Image image, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyIndirectCommandsLayoutNV( + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyPipelineLayout( + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyPrivateDataSlotEXT( + VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySamplerYcbcrConversion( + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySamplerYcbcrConversionKHR( + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyValidationCacheEXT( + VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + void + destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyVideoSessionKHR( + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + void destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + void destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyVideoSessionParametersKHR( + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + void destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const DisplayPowerInfoEXT & displayPowerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + flushMappedMemoryRanges( uint32_t memoryRangeCount, + const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + flushMappedMemoryRanges( ArrayProxy const & memoryRanges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + ArrayProxy const & commandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + ArrayProxy const & commandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + ArrayProxy const & descriptorSets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + ArrayProxy const & descriptorSets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getAccelerationStructureBuildSizesKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo, + const uint32_t * pMaxPrimitiveCounts, + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR + getAccelerationStructureBuildSizesKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const AccelerationStructureBuildGeometryInfoKHR & buildInfo, + ArrayProxy const & maxPrimitiveCounts VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + DeviceAddress getAccelerationStructureAddressKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + DeviceAddress getAccelerationStructureAddressKHR( + const AccelerationStructureDeviceAddressInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getAccelerationStructureHandleNV( + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + size_t dataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + ArrayProxy const & data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type + getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + size_t dataSize, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getAccelerationStructureMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV( + const AccelerationStructureMemoryRequirementsInfoNV & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain getAccelerationStructureMemoryRequirementsNV( + const AccelerationStructureMemoryRequirementsInfoNV & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template + VULKAN_HPP_NODISCARD Result getAndroidHardwareBufferPropertiesANDROID( + const struct AHardwareBuffer * buffer, + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type + getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + template + DeviceAddress + getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + DeviceAddress + getBufferAddress( const BufferDeviceAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + DeviceAddress + getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + DeviceAddress + getBufferAddressEXT( const BufferDeviceAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + DeviceAddress + getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + DeviceAddress + getBufferAddressKHR( const BufferDeviceAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements getBufferMemoryRequirements( + VULKAN_HPP_NAMESPACE::Buffer buffer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2( + const BufferMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain getBufferMemoryRequirements2( + const BufferMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2KHR( + const BufferMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain getBufferMemoryRequirements2KHR( + const BufferMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + uint64_t getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + uint64_t getBufferOpaqueCaptureAddress( const BufferDeviceAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + uint64_t getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + uint64_t getBufferOpaqueCaptureAddressKHR( const BufferDeviceAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getCalibratedTimestampsEXT( + uint32_t timestampCount, + const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT * pTimestampInfos, + uint64_t * pTimestamps, + uint64_t * pMaxDeviation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getCalibratedTimestampsEXT( + ArrayProxy const & timestampInfos, + ArrayProxy const & timestamps, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType, uint64_t>>::type + getCalibratedTimestampsEXT( + ArrayProxy const & timestampInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = Uint64_tAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType, uint64_t>>::type + getCalibratedTimestampsEXT( + ArrayProxy const & timestampInfos, + Uint64_tAllocator & uint64_tAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + uint32_t getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result getDeferredOperationResultKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD Result + getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupport( + const DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain getDescriptorSetLayoutSupport( + const DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupportKHR( + const DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain getDescriptorSetLayoutSupportKHR( + const DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getAccelerationStructureCompatibilityKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo, + VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR + getAccelerationStructureCompatibilityKHR( const AccelerationStructureVersionInfoKHR & versionInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getGroupPeerMemoryFeatures( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeatures( + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeaturesKHR( + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getGroupPresentCapabilitiesKHR( + VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getGroupPresentCapabilitiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template + VULKAN_HPP_NODISCARD Result getGroupSurfacePresentModes2EXT( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + VULKAN_HPP_NODISCARD Result getGroupSurfacePresentModesKHR( + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize + getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + uint64_t getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + uint64_t getMemoryOpaqueCaptureAddress( const DeviceMemoryOpaqueCaptureAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + uint64_t getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + uint64_t getMemoryOpaqueCaptureAddressKHR( const DeviceMemoryOpaqueCaptureAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + PFN_vkVoidFunction + getProcAddr( const char * pName, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PFN_vkVoidFunction + getProcAddr( const std::string & name, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getQueue( uint32_t queueFamilyIndex, + uint32_t queueIndex, + VULKAN_HPP_NAMESPACE::Queue * pQueue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Queue + getQueue( uint32_t queueFamilyIndex, + uint32_t queueIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo, + VULKAN_HPP_NAMESPACE::Queue * pQueue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Queue + getQueue2( const DeviceQueueInfo2 & queueInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + getEventStatus( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template + VULKAN_HPP_NODISCARD Result + getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + void getGeneratedCommandsMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getGeneratedCommandsMemoryRequirementsNV( + const GeneratedCommandsMemoryRequirementsInfoNV & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain getGeneratedCommandsMemoryRequirementsNV( + const GeneratedCommandsMemoryRequirementsInfoNV & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getImageDrmFormatModifierPropertiesEXT( + VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements getImageMemoryRequirements( + VULKAN_HPP_NAMESPACE::Image image, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2( + const ImageMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain getImageMemoryRequirements2( + const ImageMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2KHR( + const ImageMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain getImageMemoryRequirements2KHR( + const ImageMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageSparseMemoryRequirements( + VULKAN_HPP_NAMESPACE::Image image, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = SparseImageMemoryRequirementsAllocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, + SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageSparseMemoryRequirements2( + const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = SparseImageMemoryRequirements2Allocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements2( + const ImageSparseMemoryRequirementsInfo2 & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageSparseMemoryRequirements2KHR( + const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = SparseImageMemoryRequirements2Allocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements2KHR( + const ImageSparseMemoryRequirementsInfo2 & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout getImageSubresourceLayout( + VULKAN_HPP_NAMESPACE::Image image, + const ImageSubresource & subresource, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + uint32_t + getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + uint32_t + getImageViewHandleNVX( const ImageViewHandleInfoNVX & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template + VULKAN_HPP_NODISCARD Result getMemoryAndroidHardwareBufferANDROID( + const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo, + struct AHardwareBuffer ** pBuffer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + template + VULKAN_HPP_NODISCARD Result + getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + int fd, + VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + int fd, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getMemoryHostPointerPropertiesEXT( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template + VULKAN_HPP_NODISCARD Result + getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template + VULKAN_HPP_NODISCARD Result + getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, + HANDLE * pHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template + VULKAN_HPP_NODISCARD Result getMemoryWin32HandlePropertiesKHR( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + template + VULKAN_HPP_NODISCARD Result getMemoryZirconHandleFUCHSIA( + const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getMemoryZirconHandleFUCHSIA( const MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + template + VULKAN_HPP_NODISCARD Result getMemoryZirconHandlePropertiesFUCHSIA( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + template + VULKAN_HPP_NODISCARD Result getPastPresentationTimingGOOGLE( + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint32_t * pPresentationTimingCount, + VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename PastPresentationTimingGOOGLEAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PastPresentationTimingGOOGLEAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getPerformanceParameterINTEL( + VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, + VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + size_t * pDataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = Uint8_tAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getPipelineExecutableInternalRepresentationsKHR( + const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pInternalRepresentationCount, + VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, + Dispatch const & d + VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename PipelineExecutableInternalRepresentationKHRAllocator = + std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PipelineExecutableInternalRepresentationKHRAllocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getPipelineExecutableInternalRepresentationsKHR( + const PipelineExecutableInfoKHR & executableInfo, + PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getPipelineExecutablePropertiesKHR( + const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo, + uint32_t * pExecutableCount, + VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PipelineExecutablePropertiesKHRAllocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getPipelineExecutablePropertiesKHR( + const PipelineInfoKHR & pipelineInfo, + PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getPipelineExecutableStatisticsKHR( + const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pStatisticCount, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PipelineExecutableStatisticKHRAllocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getPipelineExecutableStatisticsKHR( + const PipelineExecutableInfoKHR & executableInfo, + PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, + uint64_t * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD uint64_t + getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + void * pData, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + ArrayProxy const & data, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD ResultValue> + getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue + getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getRayTracingCaptureReplayShaderGroupHandlesKHR( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + ArrayProxy const & data, + Dispatch const & d + VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type + getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + Dispatch const & d + VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getRayTracingCaptureReplayShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + Dispatch const & d + VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesKHR( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + ArrayProxy const & data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type + getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesNV( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + ArrayProxy const & data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type + getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + DeviceSize getRayTracingShaderGroupStackSizeKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t group, + VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD Result getRefreshCycleDurationGOOGLE( + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void + getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D + getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + uint64_t * pValue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getSemaphoreCounterValueKHR( + VULKAN_HPP_NAMESPACE::Semaphore semaphore, + uint64_t * pValue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template + VULKAN_HPP_NODISCARD Result getSemaphoreWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + template + VULKAN_HPP_NODISCARD Result getSemaphoreZirconHandleFUCHSIA( + const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getSemaphoreZirconHandleFUCHSIA( const SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + template + VULKAN_HPP_NODISCARD Result + getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + size_t * pInfoSize, + void * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = Uint8_tAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, + uint64_t * pCounterValue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint32_t * pSwapchainImageCount, + VULKAN_HPP_NAMESPACE::Image * pSwapchainImages, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = ImageAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + ImageAllocator & imageAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD Result getSwapchainStatusKHR( + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getValidationCacheDataEXT( + VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + size_t * pDataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = Uint8_tAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + VULKAN_HPP_NODISCARD Result getVideoSessionMemoryRequirementsKHR( + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + uint32_t * pVideoSessionMemoryRequirementsCount, + VULKAN_HPP_NAMESPACE::VideoGetMemoryPropertiesKHR * pVideoSessionMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename VideoGetMemoryPropertiesKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = VideoGetMemoryPropertiesKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + VideoGetMemoryPropertiesKHRAllocator & videoGetMemoryPropertiesKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + VULKAN_HPP_NODISCARD Result + importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template + VULKAN_HPP_NODISCARD Result importFenceWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + VULKAN_HPP_NODISCARD Result + importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template + VULKAN_HPP_NODISCARD Result importSemaphoreWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + template + VULKAN_HPP_NODISCARD Result importSemaphoreZirconHandleFUCHSIA( + const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type importSemaphoreZirconHandleFUCHSIA( + const ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + template + VULKAN_HPP_NODISCARD Result initializePerformanceApiINTEL( + const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result invalidateMappedMemoryRanges( + uint32_t memoryRangeCount, + const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + invalidateMappedMemoryRanges( ArrayProxy const & memoryRanges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, + void ** ppData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::MemoryMapFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, + uint32_t srcCacheCount, + const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, + ArrayProxy const & srcCaches, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, + uint32_t srcCacheCount, + const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, + ArrayProxy const & srcCaches, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + registerEventEXTUnique( const DeviceEventInfoEXT & deviceEventInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type registerDisplayEventEXT( + VULKAN_HPP_NAMESPACE::DisplayKHR display, + const DisplayEventInfoEXT & displayEventInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const DisplayEventInfoEXT & displayEventInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result releaseFullScreenExclusiveModeEXT( + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result releasePerformanceConfigurationINTEL( + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type releasePerformanceConfigurationINTEL( + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void + releaseProfilingLockKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type + resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type + resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + resetEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type + resetEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + resetFences( uint32_t fenceCount, + const VULKAN_HPP_NAMESPACE::Fence * pFences, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + resetFences( ArrayProxy const & fences, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD Result setDebugUtilsObjectNameEXT( + const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result setDebugUtilsObjectTagEXT( + const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + setEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setHdrMetadataEXT( uint32_t swapchainCount, + const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, + const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setHdrMetadataEXT( ArrayProxy const & swapchains, + ArrayProxy const & metadata, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, + VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, + uint64_t data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type + setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, + uint64_t data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + signalSemaphore( const SemaphoreSignalInfo & signalInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + signalSemaphoreKHR( const SemaphoreSignalInfo & signalInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void uninitializePerformanceApiINTEL( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + template + void unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + template + void updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + template + void updateDescriptorSets( uint32_t descriptorWriteCount, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, + uint32_t descriptorCopyCount, + const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void updateDescriptorSets( ArrayProxy const & descriptorWrites, + ArrayProxy const & descriptorCopies, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + VULKAN_HPP_NODISCARD Result updateVideoSessionParametersKHR( + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VideoSessionParametersUpdateInfoKHR & updateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + VULKAN_HPP_NODISCARD Result + waitForFences( uint32_t fenceCount, + const VULKAN_HPP_NAMESPACE::Fence * pFences, + VULKAN_HPP_NAMESPACE::Bool32 waitAll, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result waitForFences( ArrayProxy const & fences, + VULKAN_HPP_NAMESPACE::Bool32 waitAll, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result waitSemaphores( const SemaphoreWaitInfo & waitInfo, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result waitSemaphoresKHR( const SemaphoreWaitInfo & waitInfo, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result writeAccelerationStructuresPropertiesKHR( + uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + void * pData, + size_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + writeAccelerationStructuresPropertiesKHR( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + ArrayProxy const & data, + size_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type + writeAccelerationStructuresPropertiesKHR( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + size_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type writeAccelerationStructuresPropertyKHR( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDevice() const VULKAN_HPP_NOEXCEPT + { + return m_device; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_device != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_device == VK_NULL_HANDLE; + } + + private: + VkDevice m_device = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::Device ) == sizeof( VkDevice ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Device; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Device; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Device; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct DisplayModeParametersKHR + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( VULKAN_HPP_NAMESPACE::Extent2D visibleRegion_ = {}, + uint32_t refreshRate_ = {} ) VULKAN_HPP_NOEXCEPT + : visibleRegion( visibleRegion_ ) + , refreshRate( refreshRate_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModeParametersKHR( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayModeParametersKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DisplayModeParametersKHR & + operator=( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModeParametersKHR & operator=( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DisplayModeParametersKHR & + setVisibleRegion( VULKAN_HPP_NAMESPACE::Extent2D const & visibleRegion_ ) VULKAN_HPP_NOEXCEPT + { + visibleRegion = visibleRegion_; + return *this; + } + + DisplayModeParametersKHR & setRefreshRate( uint32_t refreshRate_ ) VULKAN_HPP_NOEXCEPT + { + refreshRate = refreshRate_; + return *this; + } + + operator VkDisplayModeParametersKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayModeParametersKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayModeParametersKHR const & ) const = default; +#else + bool operator==( DisplayModeParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( visibleRegion == rhs.visibleRegion ) && ( refreshRate == rhs.refreshRate ); + } + + bool operator!=( DisplayModeParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Extent2D visibleRegion = {}; + uint32_t refreshRate = {}; + }; + static_assert( sizeof( DisplayModeParametersKHR ) == sizeof( VkDisplayModeParametersKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DisplayModeCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DisplayModeCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , parameters( parameters_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModeCreateInfoKHR( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayModeCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & + operator=( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModeCreateInfoKHR & operator=( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DisplayModeCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DisplayModeCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + DisplayModeCreateInfoKHR & + setParameters( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const & parameters_ ) VULKAN_HPP_NOEXCEPT + { + parameters = parameters_; + return *this; + } + + operator VkDisplayModeCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayModeCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayModeCreateInfoKHR const & ) const = default; +#else + bool operator==( DisplayModeCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( parameters == rhs.parameters ); + } + + bool operator!=( DisplayModeCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {}; + }; + static_assert( sizeof( DisplayModeCreateInfoKHR ) == sizeof( VkDisplayModeCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DisplayModeCreateInfoKHR; + }; + + class DisplayModeKHR + { + public: + using CType = VkDisplayModeKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDisplayModeKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayModeKHR; + + public: + VULKAN_HPP_CONSTEXPR DisplayModeKHR() = default; + VULKAN_HPP_CONSTEXPR DisplayModeKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT DisplayModeKHR( VkDisplayModeKHR displayModeKHR ) VULKAN_HPP_NOEXCEPT + : m_displayModeKHR( displayModeKHR ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DisplayModeKHR & operator=( VkDisplayModeKHR displayModeKHR ) VULKAN_HPP_NOEXCEPT + { + m_displayModeKHR = displayModeKHR; + return *this; + } +#endif + + DisplayModeKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_displayModeKHR = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayModeKHR const & ) const = default; +#else + bool operator==( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR == rhs.m_displayModeKHR; + } + + bool operator!=( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR != rhs.m_displayModeKHR; + } + + bool operator<( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR < rhs.m_displayModeKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayModeKHR() const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR == VK_NULL_HANDLE; + } + + private: + VkDisplayModeKHR m_displayModeKHR = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeKHR ) == sizeof( VkDisplayModeKHR ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DisplayModeKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DisplayModeKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DisplayModeKHR; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct ExtensionProperties + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + ExtensionProperties( std::array const & extensionName_ = {}, + uint32_t specVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : extensionName( extensionName_ ) + , specVersion( specVersion_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 ExtensionProperties( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExtensionProperties( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ExtensionProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExtensionProperties & + operator=( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExtensionProperties & operator=( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkExtensionProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExtensionProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExtensionProperties const & ) const = default; +#else + bool operator==( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( extensionName == rhs.extensionName ) && ( specVersion == rhs.specVersion ); + } + + bool operator!=( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ArrayWrapper1D extensionName = {}; + uint32_t specVersion = {}; + }; + static_assert( sizeof( ExtensionProperties ) == sizeof( VkExtensionProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct LayerProperties + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + LayerProperties( std::array const & layerName_ = {}, + uint32_t specVersion_ = {}, + uint32_t implementationVersion_ = {}, + std::array const & description_ = {} ) VULKAN_HPP_NOEXCEPT + : layerName( layerName_ ) + , specVersion( specVersion_ ) + , implementationVersion( implementationVersion_ ) + , description( description_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 LayerProperties( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + LayerProperties( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : LayerProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 LayerProperties & operator=( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + LayerProperties & operator=( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkLayerProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkLayerProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( LayerProperties const & ) const = default; +#else + bool operator==( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( layerName == rhs.layerName ) && ( specVersion == rhs.specVersion ) && + ( implementationVersion == rhs.implementationVersion ) && ( description == rhs.description ); + } + + bool operator!=( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ArrayWrapper1D layerName = {}; + uint32_t specVersion = {}; + uint32_t implementationVersion = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + }; + static_assert( sizeof( LayerProperties ) == sizeof( VkLayerProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PerformanceCounterKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR( + VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit_ = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric, + VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope_ = + VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer, + VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage_ = + VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32, + std::array const & uuid_ = {} ) VULKAN_HPP_NOEXCEPT + : unit( unit_ ) + , scope( scope_ ) + , storage( storage_ ) + , uuid( uuid_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceCounterKHR( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceCounterKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR & + operator=( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceCounterKHR & operator=( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPerformanceCounterKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceCounterKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceCounterKHR const & ) const = default; +#else + bool operator==( PerformanceCounterKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( unit == rhs.unit ) && ( scope == rhs.scope ) && + ( storage == rhs.storage ) && ( uuid == rhs.uuid ); + } + + bool operator!=( PerformanceCounterKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric; + VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope = + VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer; + VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage = + VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D uuid = {}; + }; + static_assert( sizeof( PerformanceCounterKHR ) == sizeof( VkPerformanceCounterKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PerformanceCounterKHR; + }; + + struct PerformanceCounterDescriptionKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterDescriptionKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR( + VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags_ = {}, + std::array const & name_ = {}, + std::array const & category_ = {}, + std::array const & description_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , name( name_ ) + , category( category_ ) + , description( description_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + PerformanceCounterDescriptionKHR( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceCounterDescriptionKHR( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceCounterDescriptionKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR & + operator=( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceCounterDescriptionKHR & operator=( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPerformanceCounterDescriptionKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceCounterDescriptionKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceCounterDescriptionKHR const & ) const = default; +#else + bool operator==( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( name == rhs.name ) && + ( category == rhs.category ) && ( description == rhs.description ); + } + + bool operator!=( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterDescriptionKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D category = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + }; + static_assert( sizeof( PerformanceCounterDescriptionKHR ) == sizeof( VkPerformanceCounterDescriptionKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PerformanceCounterDescriptionKHR; + }; + + struct DisplayModePropertiesKHR + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, + VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {} ) VULKAN_HPP_NOEXCEPT + : displayMode( displayMode_ ) + , parameters( parameters_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModePropertiesKHR( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayModePropertiesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DisplayModePropertiesKHR & + operator=( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModePropertiesKHR & operator=( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayModePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayModePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayModePropertiesKHR const & ) const = default; +#else + bool operator==( DisplayModePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( displayMode == rhs.displayMode ) && ( parameters == rhs.parameters ); + } + + bool operator!=( DisplayModePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {}; + VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {}; + }; + static_assert( sizeof( DisplayModePropertiesKHR ) == sizeof( VkDisplayModePropertiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DisplayModeProperties2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeProperties2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR( + VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : displayModeProperties( displayModeProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR + DisplayModeProperties2KHR( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModeProperties2KHR( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayModeProperties2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DisplayModeProperties2KHR & + operator=( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModeProperties2KHR & operator=( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayModeProperties2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayModeProperties2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayModeProperties2KHR const & ) const = default; +#else + bool operator==( DisplayModeProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayModeProperties == rhs.displayModeProperties ); + } + + bool operator!=( DisplayModeProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeProperties2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties = {}; + }; + static_assert( sizeof( DisplayModeProperties2KHR ) == sizeof( VkDisplayModeProperties2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DisplayModeProperties2KHR; + }; + + struct DisplayPlaneInfo2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneInfo2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ = {}, + uint32_t planeIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : mode( mode_ ) + , planeIndex( planeIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneInfo2KHR( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPlaneInfo2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & + operator=( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneInfo2KHR & operator=( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DisplayPlaneInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DisplayPlaneInfo2KHR & setMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } + + DisplayPlaneInfo2KHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT + { + planeIndex = planeIndex_; + return *this; + } + + operator VkDisplayPlaneInfo2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPlaneInfo2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPlaneInfo2KHR const & ) const = default; +#else + bool operator==( DisplayPlaneInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mode == rhs.mode ) && + ( planeIndex == rhs.planeIndex ); + } + + bool operator!=( DisplayPlaneInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneInfo2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayModeKHR mode = {}; + uint32_t planeIndex = {}; + }; + static_assert( sizeof( DisplayPlaneInfo2KHR ) == sizeof( VkDisplayPlaneInfo2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DisplayPlaneInfo2KHR; + }; + + struct DisplayPlaneCapabilitiesKHR + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D minDstPosition_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minDstExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent_ = {} ) VULKAN_HPP_NOEXCEPT + : supportedAlpha( supportedAlpha_ ) + , minSrcPosition( minSrcPosition_ ) + , maxSrcPosition( maxSrcPosition_ ) + , minSrcExtent( minSrcExtent_ ) + , maxSrcExtent( maxSrcExtent_ ) + , minDstPosition( minDstPosition_ ) + , maxDstPosition( maxDstPosition_ ) + , minDstExtent( minDstExtent_ ) + , maxDstExtent( maxDstExtent_ ) + {} + + VULKAN_HPP_CONSTEXPR + DisplayPlaneCapabilitiesKHR( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneCapabilitiesKHR( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPlaneCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DisplayPlaneCapabilitiesKHR & + operator=( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneCapabilitiesKHR & operator=( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayPlaneCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPlaneCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPlaneCapabilitiesKHR const & ) const = default; +#else + bool operator==( DisplayPlaneCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( supportedAlpha == rhs.supportedAlpha ) && ( minSrcPosition == rhs.minSrcPosition ) && + ( maxSrcPosition == rhs.maxSrcPosition ) && ( minSrcExtent == rhs.minSrcExtent ) && + ( maxSrcExtent == rhs.maxSrcExtent ) && ( minDstPosition == rhs.minDstPosition ) && + ( maxDstPosition == rhs.maxDstPosition ) && ( minDstExtent == rhs.minDstExtent ) && + ( maxDstExtent == rhs.maxDstExtent ); + } + + bool operator!=( DisplayPlaneCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha = {}; + VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition = {}; + VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition = {}; + VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent = {}; + VULKAN_HPP_NAMESPACE::Offset2D minDstPosition = {}; + VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition = {}; + VULKAN_HPP_NAMESPACE::Extent2D minDstExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent = {}; + }; + static_assert( sizeof( DisplayPlaneCapabilitiesKHR ) == sizeof( VkDisplayPlaneCapabilitiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct DisplayPlaneCapabilities2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneCapabilities2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR( + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities_ = {} ) VULKAN_HPP_NOEXCEPT + : capabilities( capabilities_ ) + {} + + VULKAN_HPP_CONSTEXPR + DisplayPlaneCapabilities2KHR( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneCapabilities2KHR( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPlaneCapabilities2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DisplayPlaneCapabilities2KHR & + operator=( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneCapabilities2KHR & operator=( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayPlaneCapabilities2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPlaneCapabilities2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPlaneCapabilities2KHR const & ) const = default; +#else + bool operator==( DisplayPlaneCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( capabilities == rhs.capabilities ); + } + + bool operator!=( DisplayPlaneCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneCapabilities2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities = {}; + }; + static_assert( sizeof( DisplayPlaneCapabilities2KHR ) == sizeof( VkDisplayPlaneCapabilities2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DisplayPlaneCapabilities2KHR; + }; + + struct DisplayPlanePropertiesKHR + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay_ = {}, + uint32_t currentStackIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : currentDisplay( currentDisplay_ ) + , currentStackIndex( currentStackIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR + DisplayPlanePropertiesKHR( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlanePropertiesKHR( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPlanePropertiesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DisplayPlanePropertiesKHR & + operator=( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlanePropertiesKHR & operator=( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayPlanePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPlanePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPlanePropertiesKHR const & ) const = default; +#else + bool operator==( DisplayPlanePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( currentDisplay == rhs.currentDisplay ) && ( currentStackIndex == rhs.currentStackIndex ); + } + + bool operator!=( DisplayPlanePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay = {}; + uint32_t currentStackIndex = {}; + }; + static_assert( sizeof( DisplayPlanePropertiesKHR ) == sizeof( VkDisplayPlanePropertiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct DisplayPlaneProperties2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneProperties2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR( + VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : displayPlaneProperties( displayPlaneProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR + DisplayPlaneProperties2KHR( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneProperties2KHR( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPlaneProperties2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DisplayPlaneProperties2KHR & + operator=( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneProperties2KHR & operator=( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayPlaneProperties2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPlaneProperties2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPlaneProperties2KHR const & ) const = default; +#else + bool operator==( DisplayPlaneProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( displayPlaneProperties == rhs.displayPlaneProperties ); + } + + bool operator!=( DisplayPlaneProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneProperties2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties = {}; + }; + static_assert( sizeof( DisplayPlaneProperties2KHR ) == sizeof( VkDisplayPlaneProperties2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DisplayPlaneProperties2KHR; + }; + + struct DisplayPropertiesKHR + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DisplayPropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display_ = {}, + const char * displayName_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D physicalResolution_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 persistentContent_ = {} ) VULKAN_HPP_NOEXCEPT + : display( display_ ) + , displayName( displayName_ ) + , physicalDimensions( physicalDimensions_ ) + , physicalResolution( physicalResolution_ ) + , supportedTransforms( supportedTransforms_ ) + , planeReorderPossible( planeReorderPossible_ ) + , persistentContent( persistentContent_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPropertiesKHR( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPropertiesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DisplayPropertiesKHR & + operator=( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPropertiesKHR & operator=( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPropertiesKHR const & ) const = default; +#else + bool operator==( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( display == rhs.display ) && ( displayName == rhs.displayName ) && + ( physicalDimensions == rhs.physicalDimensions ) && ( physicalResolution == rhs.physicalResolution ) && + ( supportedTransforms == rhs.supportedTransforms ) && + ( planeReorderPossible == rhs.planeReorderPossible ) && ( persistentContent == rhs.persistentContent ); + } + + bool operator!=( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DisplayKHR display = {}; + const char * displayName = {}; + VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions = {}; + VULKAN_HPP_NAMESPACE::Extent2D physicalResolution = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; + VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible = {}; + VULKAN_HPP_NAMESPACE::Bool32 persistentContent = {}; + }; + static_assert( sizeof( DisplayPropertiesKHR ) == sizeof( VkDisplayPropertiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DisplayProperties2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayProperties2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DisplayProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : displayProperties( displayProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayProperties2KHR( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayProperties2KHR( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayProperties2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DisplayProperties2KHR & + operator=( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayProperties2KHR & operator=( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayProperties2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayProperties2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayProperties2KHR const & ) const = default; +#else + bool operator==( DisplayProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayProperties == rhs.displayProperties ); + } + + bool operator!=( DisplayProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayProperties2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties = {}; + }; + static_assert( sizeof( DisplayProperties2KHR ) == sizeof( VkDisplayProperties2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DisplayProperties2KHR; + }; + + struct PhysicalDeviceExternalBufferInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalBufferInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( + VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , usage( usage_ ) + , handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExternalBufferInfo( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalBufferInfo( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalBufferInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & + operator=( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalBufferInfo & operator=( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceExternalBufferInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceExternalBufferInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PhysicalDeviceExternalBufferInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + PhysicalDeviceExternalBufferInfo & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + operator VkPhysicalDeviceExternalBufferInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalBufferInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalBufferInfo const & ) const = default; +#else + bool operator==( PhysicalDeviceExternalBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( usage == rhs.usage ) && + ( handleType == rhs.handleType ); + } + + bool operator!=( PhysicalDeviceExternalBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalBufferInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; + static_assert( sizeof( PhysicalDeviceExternalBufferInfo ) == sizeof( VkPhysicalDeviceExternalBufferInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalBufferInfo; + }; + using PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo; + + struct ExternalMemoryProperties + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalMemoryProperties( + VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : externalMemoryFeatures( externalMemoryFeatures_ ) + , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) + , compatibleHandleTypes( compatibleHandleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR ExternalMemoryProperties( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryProperties( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalMemoryProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryProperties & + operator=( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryProperties & operator=( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkExternalMemoryProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalMemoryProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalMemoryProperties const & ) const = default; +#else + bool operator==( ExternalMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( externalMemoryFeatures == rhs.externalMemoryFeatures ) && + ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && + ( compatibleHandleTypes == rhs.compatibleHandleTypes ); + } + + bool operator!=( ExternalMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes = {}; + }; + static_assert( sizeof( ExternalMemoryProperties ) == sizeof( VkExternalMemoryProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + using ExternalMemoryPropertiesKHR = ExternalMemoryProperties; + + struct ExternalBufferProperties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalBufferProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalBufferProperties( + VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : externalMemoryProperties( externalMemoryProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR ExternalBufferProperties( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalBufferProperties( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalBufferProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExternalBufferProperties & + operator=( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalBufferProperties & operator=( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkExternalBufferProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalBufferProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalBufferProperties const & ) const = default; +#else + bool operator==( ExternalBufferProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( externalMemoryProperties == rhs.externalMemoryProperties ); + } + + bool operator!=( ExternalBufferProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalBufferProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {}; + }; + static_assert( sizeof( ExternalBufferProperties ) == sizeof( VkExternalBufferProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExternalBufferProperties; + }; + using ExternalBufferPropertiesKHR = ExternalBufferProperties; + + struct PhysicalDeviceExternalFenceInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalFenceInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExternalFenceInfo( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalFenceInfo( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalFenceInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFenceInfo & + operator=( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalFenceInfo & operator=( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceExternalFenceInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceExternalFenceInfo & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + operator VkPhysicalDeviceExternalFenceInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalFenceInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalFenceInfo const & ) const = default; +#else + bool operator==( PhysicalDeviceExternalFenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ); + } + + bool operator!=( PhysicalDeviceExternalFenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFenceInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + }; + static_assert( sizeof( PhysicalDeviceExternalFenceInfo ) == sizeof( VkPhysicalDeviceExternalFenceInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalFenceInfo; + }; + using PhysicalDeviceExternalFenceInfoKHR = PhysicalDeviceExternalFenceInfo; + + struct ExternalFenceProperties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFenceProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalFenceProperties( + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + : exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) + , compatibleHandleTypes( compatibleHandleTypes_ ) + , externalFenceFeatures( externalFenceFeatures_ ) + {} + + VULKAN_HPP_CONSTEXPR ExternalFenceProperties( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalFenceProperties( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalFenceProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExternalFenceProperties & + operator=( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalFenceProperties & operator=( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkExternalFenceProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalFenceProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalFenceProperties const & ) const = default; +#else + bool operator==( ExternalFenceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && + ( compatibleHandleTypes == rhs.compatibleHandleTypes ) && + ( externalFenceFeatures == rhs.externalFenceFeatures ); + } + + bool operator!=( ExternalFenceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFenceProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures = {}; + }; + static_assert( sizeof( ExternalFenceProperties ) == sizeof( VkExternalFenceProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExternalFenceProperties; + }; + using ExternalFencePropertiesKHR = ExternalFenceProperties; + + struct ImageFormatProperties + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImageFormatProperties( VULKAN_HPP_NAMESPACE::Extent3D maxExtent_ = {}, + uint32_t maxMipLevels_ = {}, + uint32_t maxArrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize_ = {} ) VULKAN_HPP_NOEXCEPT + : maxExtent( maxExtent_ ) + , maxMipLevels( maxMipLevels_ ) + , maxArrayLayers( maxArrayLayers_ ) + , sampleCounts( sampleCounts_ ) + , maxResourceSize( maxResourceSize_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageFormatProperties( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageFormatProperties( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageFormatProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageFormatProperties & + operator=( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageFormatProperties & operator=( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageFormatProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageFormatProperties const & ) const = default; +#else + bool operator==( ImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( maxExtent == rhs.maxExtent ) && ( maxMipLevels == rhs.maxMipLevels ) && + ( maxArrayLayers == rhs.maxArrayLayers ) && ( sampleCounts == rhs.sampleCounts ) && + ( maxResourceSize == rhs.maxResourceSize ); + } + + bool operator!=( ImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Extent3D maxExtent = {}; + uint32_t maxMipLevels = {}; + uint32_t maxArrayLayers = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize = {}; + }; + static_assert( sizeof( ImageFormatProperties ) == sizeof( VkImageFormatProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ExternalImageFormatPropertiesNV + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalImageFormatPropertiesNV( + VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : imageFormatProperties( imageFormatProperties_ ) + , externalMemoryFeatures( externalMemoryFeatures_ ) + , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) + , compatibleHandleTypes( compatibleHandleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR + ExternalImageFormatPropertiesNV( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalImageFormatPropertiesNV( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalImageFormatPropertiesNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExternalImageFormatPropertiesNV & + operator=( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalImageFormatPropertiesNV & operator=( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkExternalImageFormatPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalImageFormatPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalImageFormatPropertiesNV const & ) const = default; +#else + bool operator==( ExternalImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( imageFormatProperties == rhs.imageFormatProperties ) && + ( externalMemoryFeatures == rhs.externalMemoryFeatures ) && + ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && + ( compatibleHandleTypes == rhs.compatibleHandleTypes ); + } + + bool operator!=( ExternalImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes = {}; + }; + static_assert( sizeof( ExternalImageFormatPropertiesNV ) == sizeof( VkExternalImageFormatPropertiesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceExternalSemaphoreInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceExternalSemaphoreInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( PhysicalDeviceExternalSemaphoreInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalSemaphoreInfo( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalSemaphoreInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSemaphoreInfo & + operator=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalSemaphoreInfo & + operator=( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceExternalSemaphoreInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceExternalSemaphoreInfo & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + operator VkPhysicalDeviceExternalSemaphoreInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalSemaphoreInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalSemaphoreInfo const & ) const = default; +#else + bool operator==( PhysicalDeviceExternalSemaphoreInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ); + } + + bool operator!=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalSemaphoreInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + }; + static_assert( sizeof( PhysicalDeviceExternalSemaphoreInfo ) == sizeof( VkPhysicalDeviceExternalSemaphoreInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalSemaphoreInfo; + }; + using PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo; + + struct ExternalSemaphoreProperties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalSemaphoreProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties( + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + : exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) + , compatibleHandleTypes( compatibleHandleTypes_ ) + , externalSemaphoreFeatures( externalSemaphoreFeatures_ ) + {} + + VULKAN_HPP_CONSTEXPR + ExternalSemaphoreProperties( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalSemaphoreProperties( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalSemaphoreProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExternalSemaphoreProperties & + operator=( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalSemaphoreProperties & operator=( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkExternalSemaphoreProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalSemaphoreProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalSemaphoreProperties const & ) const = default; +#else + bool operator==( ExternalSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && + ( compatibleHandleTypes == rhs.compatibleHandleTypes ) && + ( externalSemaphoreFeatures == rhs.externalSemaphoreFeatures ); + } + + bool operator!=( ExternalSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalSemaphoreProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures = {}; + }; + static_assert( sizeof( ExternalSemaphoreProperties ) == sizeof( VkExternalSemaphoreProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExternalSemaphoreProperties; + }; + using ExternalSemaphorePropertiesKHR = ExternalSemaphoreProperties; + + struct PhysicalDeviceFeatures2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFeatures2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features_ = {} ) VULKAN_HPP_NOEXCEPT + : features( features_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFeatures2( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFeatures2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures2 & + operator=( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFeatures2 & operator=( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceFeatures2 & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceFeatures2 & + setFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const & features_ ) VULKAN_HPP_NOEXCEPT + { + features = features_; + return *this; + } + + operator VkPhysicalDeviceFeatures2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFeatures2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFeatures2 const & ) const = default; +#else + bool operator==( PhysicalDeviceFeatures2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( features == rhs.features ); + } + + bool operator!=( PhysicalDeviceFeatures2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFeatures2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features = {}; + }; + static_assert( sizeof( PhysicalDeviceFeatures2 ) == sizeof( VkPhysicalDeviceFeatures2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFeatures2; + }; + using PhysicalDeviceFeatures2KHR = PhysicalDeviceFeatures2; + + struct FormatProperties + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + FormatProperties( VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + : linearTilingFeatures( linearTilingFeatures_ ) + , optimalTilingFeatures( optimalTilingFeatures_ ) + , bufferFeatures( bufferFeatures_ ) + {} + + VULKAN_HPP_CONSTEXPR FormatProperties( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FormatProperties( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : FormatProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 FormatProperties & operator=( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FormatProperties & operator=( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkFormatProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFormatProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FormatProperties const & ) const = default; +#else + bool operator==( FormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( linearTilingFeatures == rhs.linearTilingFeatures ) && + ( optimalTilingFeatures == rhs.optimalTilingFeatures ) && ( bufferFeatures == rhs.bufferFeatures ); + } + + bool operator!=( FormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures = {}; + }; + static_assert( sizeof( FormatProperties ) == sizeof( VkFormatProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct FormatProperties2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFormatProperties2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + FormatProperties2( VULKAN_HPP_NAMESPACE::FormatProperties formatProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : formatProperties( formatProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR FormatProperties2( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FormatProperties2( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + : FormatProperties2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 FormatProperties2 & + operator=( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FormatProperties2 & operator=( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFormatProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FormatProperties2 const & ) const = default; +#else + bool operator==( FormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatProperties == rhs.formatProperties ); + } + + bool operator!=( FormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFormatProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::FormatProperties formatProperties = {}; + }; + static_assert( sizeof( FormatProperties2 ) == sizeof( VkFormatProperties2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = FormatProperties2; + }; + using FormatProperties2KHR = FormatProperties2; + + struct PhysicalDeviceFragmentShadingRateKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFragmentShadingRateKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFragmentShadingRateKHR( VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D fragmentSize_ = {} ) VULKAN_HPP_NOEXCEPT + : sampleCounts( sampleCounts_ ) + , fragmentSize( fragmentSize_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateKHR( PhysicalDeviceFragmentShadingRateKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRateKHR( VkPhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShadingRateKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateKHR & + operator=( PhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRateKHR & + operator=( VkPhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceFragmentShadingRateKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShadingRateKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShadingRateKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShadingRateKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleCounts == rhs.sampleCounts ) && + ( fragmentSize == rhs.fragmentSize ); + } + + bool operator!=( PhysicalDeviceFragmentShadingRateKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {}; + VULKAN_HPP_NAMESPACE::Extent2D fragmentSize = {}; + }; + static_assert( sizeof( PhysicalDeviceFragmentShadingRateKHR ) == sizeof( VkPhysicalDeviceFragmentShadingRateKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShadingRateKHR; + }; + + struct PhysicalDeviceImageFormatInfo2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageFormatInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, + VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : format( format_ ) + , type( type_ ) + , tiling( tiling_ ) + , usage( usage_ ) + , flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceImageFormatInfo2( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageFormatInfo2( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageFormatInfo2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & + operator=( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageFormatInfo2 & operator=( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceImageFormatInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + PhysicalDeviceImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + PhysicalDeviceImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT + { + tiling = tiling_; + return *this; + } + + PhysicalDeviceImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + PhysicalDeviceImageFormatInfo2 & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + operator VkPhysicalDeviceImageFormatInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageFormatInfo2 const & ) const = default; +#else + bool operator==( PhysicalDeviceImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( type == rhs.type ) && + ( tiling == rhs.tiling ) && ( usage == rhs.usage ) && ( flags == rhs.flags ); + } + + bool operator!=( PhysicalDeviceImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageFormatInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D; + VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; + }; + static_assert( sizeof( PhysicalDeviceImageFormatInfo2 ) == sizeof( VkPhysicalDeviceImageFormatInfo2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceImageFormatInfo2; + }; + using PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2; + + struct ImageFormatProperties2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatProperties2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageFormatProperties2( + VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : imageFormatProperties( imageFormatProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageFormatProperties2( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageFormatProperties2( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageFormatProperties2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageFormatProperties2 & + operator=( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageFormatProperties2 & operator=( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkImageFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageFormatProperties2 const & ) const = default; +#else + bool operator==( ImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageFormatProperties == rhs.imageFormatProperties ); + } + + bool operator!=( ImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {}; + }; + static_assert( sizeof( ImageFormatProperties2 ) == sizeof( VkImageFormatProperties2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageFormatProperties2; + }; + using ImageFormatProperties2KHR = ImageFormatProperties2; + + struct MemoryType + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryType( VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags_ = {}, + uint32_t heapIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : propertyFlags( propertyFlags_ ) + , heapIndex( heapIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryType( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryType( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryType( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryType & operator=( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryType & operator=( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryType const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryType &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryType const & ) const = default; +#else + bool operator==( MemoryType const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( propertyFlags == rhs.propertyFlags ) && ( heapIndex == rhs.heapIndex ); + } + + bool operator!=( MemoryType const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags = {}; + uint32_t heapIndex = {}; + }; + static_assert( sizeof( MemoryType ) == sizeof( VkMemoryType ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct MemoryHeap + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryHeap( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : size( size_ ) + , flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryHeap( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryHeap( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryHeap( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryHeap & operator=( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryHeap & operator=( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryHeap const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryHeap &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryHeap const & ) const = default; +#else + bool operator==( MemoryHeap const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( size == rhs.size ) && ( flags == rhs.flags ); + } + + bool operator!=( MemoryHeap const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags = {}; + }; + static_assert( sizeof( MemoryHeap ) == sizeof( VkMemoryHeap ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceMemoryProperties + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties( + uint32_t memoryTypeCount_ = {}, + std::array const & memoryTypes_ = {}, + uint32_t memoryHeapCount_ = {}, + std::array const & memoryHeaps_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryTypeCount( memoryTypeCount_ ) + , memoryTypes( memoryTypes_ ) + , memoryHeapCount( memoryHeapCount_ ) + , memoryHeaps( memoryHeaps_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceMemoryProperties( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryProperties( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMemoryProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties & + operator=( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryProperties & operator=( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMemoryProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMemoryProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( memoryTypeCount == rhs.memoryTypeCount ) && ( memoryTypes == rhs.memoryTypes ) && + ( memoryHeapCount == rhs.memoryHeapCount ) && ( memoryHeaps == rhs.memoryHeaps ); + } + + bool operator!=( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t memoryTypeCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D memoryTypes = {}; + uint32_t memoryHeapCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D memoryHeaps = {}; + }; + static_assert( sizeof( PhysicalDeviceMemoryProperties ) == sizeof( VkPhysicalDeviceMemoryProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceMemoryProperties2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryProperties2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2( + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryProperties( memoryProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceMemoryProperties2( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryProperties2( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMemoryProperties2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2 & + operator=( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryProperties2 & operator=( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMemoryProperties2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMemoryProperties2 const & ) const = default; +#else + bool operator==( PhysicalDeviceMemoryProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryProperties == rhs.memoryProperties ); + } + + bool operator!=( PhysicalDeviceMemoryProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties = {}; + }; + static_assert( sizeof( PhysicalDeviceMemoryProperties2 ) == sizeof( VkPhysicalDeviceMemoryProperties2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceMemoryProperties2; + }; + using PhysicalDeviceMemoryProperties2KHR = PhysicalDeviceMemoryProperties2; + + struct MultisamplePropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultisamplePropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + MultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {} ) VULKAN_HPP_NOEXCEPT + : maxSampleLocationGridSize( maxSampleLocationGridSize_ ) + {} + + VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MultisamplePropertiesEXT( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MultisamplePropertiesEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MultisamplePropertiesEXT & + operator=( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MultisamplePropertiesEXT & operator=( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMultisamplePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMultisamplePropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MultisamplePropertiesEXT const & ) const = default; +#else + bool operator==( MultisamplePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ); + } + + bool operator!=( MultisamplePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultisamplePropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {}; + }; + static_assert( sizeof( MultisamplePropertiesEXT ) == sizeof( VkMultisamplePropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MultisamplePropertiesEXT; + }; + + struct PhysicalDeviceLimits + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceLimits( uint32_t maxImageDimension1D_ = {}, + uint32_t maxImageDimension2D_ = {}, + uint32_t maxImageDimension3D_ = {}, + uint32_t maxImageDimensionCube_ = {}, + uint32_t maxImageArrayLayers_ = {}, + uint32_t maxTexelBufferElements_ = {}, + uint32_t maxUniformBufferRange_ = {}, + uint32_t maxStorageBufferRange_ = {}, + uint32_t maxPushConstantsSize_ = {}, + uint32_t maxMemoryAllocationCount_ = {}, + uint32_t maxSamplerAllocationCount_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize_ = {}, + uint32_t maxBoundDescriptorSets_ = {}, + uint32_t maxPerStageDescriptorSamplers_ = {}, + uint32_t maxPerStageDescriptorUniformBuffers_ = {}, + uint32_t maxPerStageDescriptorStorageBuffers_ = {}, + uint32_t maxPerStageDescriptorSampledImages_ = {}, + uint32_t maxPerStageDescriptorStorageImages_ = {}, + uint32_t maxPerStageDescriptorInputAttachments_ = {}, + uint32_t maxPerStageResources_ = {}, + uint32_t maxDescriptorSetSamplers_ = {}, + uint32_t maxDescriptorSetUniformBuffers_ = {}, + uint32_t maxDescriptorSetUniformBuffersDynamic_ = {}, + uint32_t maxDescriptorSetStorageBuffers_ = {}, + uint32_t maxDescriptorSetStorageBuffersDynamic_ = {}, + uint32_t maxDescriptorSetSampledImages_ = {}, + uint32_t maxDescriptorSetStorageImages_ = {}, + uint32_t maxDescriptorSetInputAttachments_ = {}, + uint32_t maxVertexInputAttributes_ = {}, + uint32_t maxVertexInputBindings_ = {}, + uint32_t maxVertexInputAttributeOffset_ = {}, + uint32_t maxVertexInputBindingStride_ = {}, + uint32_t maxVertexOutputComponents_ = {}, + uint32_t maxTessellationGenerationLevel_ = {}, + uint32_t maxTessellationPatchSize_ = {}, + uint32_t maxTessellationControlPerVertexInputComponents_ = {}, + uint32_t maxTessellationControlPerVertexOutputComponents_ = {}, + uint32_t maxTessellationControlPerPatchOutputComponents_ = {}, + uint32_t maxTessellationControlTotalOutputComponents_ = {}, + uint32_t maxTessellationEvaluationInputComponents_ = {}, + uint32_t maxTessellationEvaluationOutputComponents_ = {}, + uint32_t maxGeometryShaderInvocations_ = {}, + uint32_t maxGeometryInputComponents_ = {}, + uint32_t maxGeometryOutputComponents_ = {}, + uint32_t maxGeometryOutputVertices_ = {}, + uint32_t maxGeometryTotalOutputComponents_ = {}, + uint32_t maxFragmentInputComponents_ = {}, + uint32_t maxFragmentOutputAttachments_ = {}, + uint32_t maxFragmentDualSrcAttachments_ = {}, + uint32_t maxFragmentCombinedOutputResources_ = {}, + uint32_t maxComputeSharedMemorySize_ = {}, + std::array const & maxComputeWorkGroupCount_ = {}, + uint32_t maxComputeWorkGroupInvocations_ = {}, + std::array const & maxComputeWorkGroupSize_ = {}, + uint32_t subPixelPrecisionBits_ = {}, + uint32_t subTexelPrecisionBits_ = {}, + uint32_t mipmapPrecisionBits_ = {}, + uint32_t maxDrawIndexedIndexValue_ = {}, + uint32_t maxDrawIndirectCount_ = {}, + float maxSamplerLodBias_ = {}, + float maxSamplerAnisotropy_ = {}, + uint32_t maxViewports_ = {}, + std::array const & maxViewportDimensions_ = {}, + std::array const & viewportBoundsRange_ = {}, + uint32_t viewportSubPixelBits_ = {}, + size_t minMemoryMapAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment_ = {}, + int32_t minTexelOffset_ = {}, + uint32_t maxTexelOffset_ = {}, + int32_t minTexelGatherOffset_ = {}, + uint32_t maxTexelGatherOffset_ = {}, + float minInterpolationOffset_ = {}, + float maxInterpolationOffset_ = {}, + uint32_t subPixelInterpolationOffsetBits_ = {}, + uint32_t maxFramebufferWidth_ = {}, + uint32_t maxFramebufferHeight_ = {}, + uint32_t maxFramebufferLayers_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts_ = {}, + uint32_t maxColorAttachments_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts_ = {}, + uint32_t maxSampleMaskWords_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics_ = {}, + float timestampPeriod_ = {}, + uint32_t maxClipDistances_ = {}, + uint32_t maxCullDistances_ = {}, + uint32_t maxCombinedClipAndCullDistances_ = {}, + uint32_t discreteQueuePriorities_ = {}, + std::array const & pointSizeRange_ = {}, + std::array const & lineWidthRange_ = {}, + float pointSizeGranularity_ = {}, + float lineWidthGranularity_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 strictLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize_ = {} ) VULKAN_HPP_NOEXCEPT + : maxImageDimension1D( maxImageDimension1D_ ) + , maxImageDimension2D( maxImageDimension2D_ ) + , maxImageDimension3D( maxImageDimension3D_ ) + , maxImageDimensionCube( maxImageDimensionCube_ ) + , maxImageArrayLayers( maxImageArrayLayers_ ) + , maxTexelBufferElements( maxTexelBufferElements_ ) + , maxUniformBufferRange( maxUniformBufferRange_ ) + , maxStorageBufferRange( maxStorageBufferRange_ ) + , maxPushConstantsSize( maxPushConstantsSize_ ) + , maxMemoryAllocationCount( maxMemoryAllocationCount_ ) + , maxSamplerAllocationCount( maxSamplerAllocationCount_ ) + , bufferImageGranularity( bufferImageGranularity_ ) + , sparseAddressSpaceSize( sparseAddressSpaceSize_ ) + , maxBoundDescriptorSets( maxBoundDescriptorSets_ ) + , maxPerStageDescriptorSamplers( maxPerStageDescriptorSamplers_ ) + , maxPerStageDescriptorUniformBuffers( maxPerStageDescriptorUniformBuffers_ ) + , maxPerStageDescriptorStorageBuffers( maxPerStageDescriptorStorageBuffers_ ) + , maxPerStageDescriptorSampledImages( maxPerStageDescriptorSampledImages_ ) + , maxPerStageDescriptorStorageImages( maxPerStageDescriptorStorageImages_ ) + , maxPerStageDescriptorInputAttachments( maxPerStageDescriptorInputAttachments_ ) + , maxPerStageResources( maxPerStageResources_ ) + , maxDescriptorSetSamplers( maxDescriptorSetSamplers_ ) + , maxDescriptorSetUniformBuffers( maxDescriptorSetUniformBuffers_ ) + , maxDescriptorSetUniformBuffersDynamic( maxDescriptorSetUniformBuffersDynamic_ ) + , maxDescriptorSetStorageBuffers( maxDescriptorSetStorageBuffers_ ) + , maxDescriptorSetStorageBuffersDynamic( maxDescriptorSetStorageBuffersDynamic_ ) + , maxDescriptorSetSampledImages( maxDescriptorSetSampledImages_ ) + , maxDescriptorSetStorageImages( maxDescriptorSetStorageImages_ ) + , maxDescriptorSetInputAttachments( maxDescriptorSetInputAttachments_ ) + , maxVertexInputAttributes( maxVertexInputAttributes_ ) + , maxVertexInputBindings( maxVertexInputBindings_ ) + , maxVertexInputAttributeOffset( maxVertexInputAttributeOffset_ ) + , maxVertexInputBindingStride( maxVertexInputBindingStride_ ) + , maxVertexOutputComponents( maxVertexOutputComponents_ ) + , maxTessellationGenerationLevel( maxTessellationGenerationLevel_ ) + , maxTessellationPatchSize( maxTessellationPatchSize_ ) + , maxTessellationControlPerVertexInputComponents( maxTessellationControlPerVertexInputComponents_ ) + , maxTessellationControlPerVertexOutputComponents( maxTessellationControlPerVertexOutputComponents_ ) + , maxTessellationControlPerPatchOutputComponents( maxTessellationControlPerPatchOutputComponents_ ) + , maxTessellationControlTotalOutputComponents( maxTessellationControlTotalOutputComponents_ ) + , maxTessellationEvaluationInputComponents( maxTessellationEvaluationInputComponents_ ) + , maxTessellationEvaluationOutputComponents( maxTessellationEvaluationOutputComponents_ ) + , maxGeometryShaderInvocations( maxGeometryShaderInvocations_ ) + , maxGeometryInputComponents( maxGeometryInputComponents_ ) + , maxGeometryOutputComponents( maxGeometryOutputComponents_ ) + , maxGeometryOutputVertices( maxGeometryOutputVertices_ ) + , maxGeometryTotalOutputComponents( maxGeometryTotalOutputComponents_ ) + , maxFragmentInputComponents( maxFragmentInputComponents_ ) + , maxFragmentOutputAttachments( maxFragmentOutputAttachments_ ) + , maxFragmentDualSrcAttachments( maxFragmentDualSrcAttachments_ ) + , maxFragmentCombinedOutputResources( maxFragmentCombinedOutputResources_ ) + , maxComputeSharedMemorySize( maxComputeSharedMemorySize_ ) + , maxComputeWorkGroupCount( maxComputeWorkGroupCount_ ) + , maxComputeWorkGroupInvocations( maxComputeWorkGroupInvocations_ ) + , maxComputeWorkGroupSize( maxComputeWorkGroupSize_ ) + , subPixelPrecisionBits( subPixelPrecisionBits_ ) + , subTexelPrecisionBits( subTexelPrecisionBits_ ) + , mipmapPrecisionBits( mipmapPrecisionBits_ ) + , maxDrawIndexedIndexValue( maxDrawIndexedIndexValue_ ) + , maxDrawIndirectCount( maxDrawIndirectCount_ ) + , maxSamplerLodBias( maxSamplerLodBias_ ) + , maxSamplerAnisotropy( maxSamplerAnisotropy_ ) + , maxViewports( maxViewports_ ) + , maxViewportDimensions( maxViewportDimensions_ ) + , viewportBoundsRange( viewportBoundsRange_ ) + , viewportSubPixelBits( viewportSubPixelBits_ ) + , minMemoryMapAlignment( minMemoryMapAlignment_ ) + , minTexelBufferOffsetAlignment( minTexelBufferOffsetAlignment_ ) + , minUniformBufferOffsetAlignment( minUniformBufferOffsetAlignment_ ) + , minStorageBufferOffsetAlignment( minStorageBufferOffsetAlignment_ ) + , minTexelOffset( minTexelOffset_ ) + , maxTexelOffset( maxTexelOffset_ ) + , minTexelGatherOffset( minTexelGatherOffset_ ) + , maxTexelGatherOffset( maxTexelGatherOffset_ ) + , minInterpolationOffset( minInterpolationOffset_ ) + , maxInterpolationOffset( maxInterpolationOffset_ ) + , subPixelInterpolationOffsetBits( subPixelInterpolationOffsetBits_ ) + , maxFramebufferWidth( maxFramebufferWidth_ ) + , maxFramebufferHeight( maxFramebufferHeight_ ) + , maxFramebufferLayers( maxFramebufferLayers_ ) + , framebufferColorSampleCounts( framebufferColorSampleCounts_ ) + , framebufferDepthSampleCounts( framebufferDepthSampleCounts_ ) + , framebufferStencilSampleCounts( framebufferStencilSampleCounts_ ) + , framebufferNoAttachmentsSampleCounts( framebufferNoAttachmentsSampleCounts_ ) + , maxColorAttachments( maxColorAttachments_ ) + , sampledImageColorSampleCounts( sampledImageColorSampleCounts_ ) + , sampledImageIntegerSampleCounts( sampledImageIntegerSampleCounts_ ) + , sampledImageDepthSampleCounts( sampledImageDepthSampleCounts_ ) + , sampledImageStencilSampleCounts( sampledImageStencilSampleCounts_ ) + , storageImageSampleCounts( storageImageSampleCounts_ ) + , maxSampleMaskWords( maxSampleMaskWords_ ) + , timestampComputeAndGraphics( timestampComputeAndGraphics_ ) + , timestampPeriod( timestampPeriod_ ) + , maxClipDistances( maxClipDistances_ ) + , maxCullDistances( maxCullDistances_ ) + , maxCombinedClipAndCullDistances( maxCombinedClipAndCullDistances_ ) + , discreteQueuePriorities( discreteQueuePriorities_ ) + , pointSizeRange( pointSizeRange_ ) + , lineWidthRange( lineWidthRange_ ) + , pointSizeGranularity( pointSizeGranularity_ ) + , lineWidthGranularity( lineWidthGranularity_ ) + , strictLines( strictLines_ ) + , standardSampleLocations( standardSampleLocations_ ) + , optimalBufferCopyOffsetAlignment( optimalBufferCopyOffsetAlignment_ ) + , optimalBufferCopyRowPitchAlignment( optimalBufferCopyRowPitchAlignment_ ) + , nonCoherentAtomSize( nonCoherentAtomSize_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLimits( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLimits( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits & + operator=( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLimits & operator=( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceLimits const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLimits &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLimits const & ) const = default; +#else + bool operator==( PhysicalDeviceLimits const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( maxImageDimension1D == rhs.maxImageDimension1D ) && ( maxImageDimension2D == rhs.maxImageDimension2D ) && + ( maxImageDimension3D == rhs.maxImageDimension3D ) && + ( maxImageDimensionCube == rhs.maxImageDimensionCube ) && + ( maxImageArrayLayers == rhs.maxImageArrayLayers ) && + ( maxTexelBufferElements == rhs.maxTexelBufferElements ) && + ( maxUniformBufferRange == rhs.maxUniformBufferRange ) && + ( maxStorageBufferRange == rhs.maxStorageBufferRange ) && + ( maxPushConstantsSize == rhs.maxPushConstantsSize ) && + ( maxMemoryAllocationCount == rhs.maxMemoryAllocationCount ) && + ( maxSamplerAllocationCount == rhs.maxSamplerAllocationCount ) && + ( bufferImageGranularity == rhs.bufferImageGranularity ) && + ( sparseAddressSpaceSize == rhs.sparseAddressSpaceSize ) && + ( maxBoundDescriptorSets == rhs.maxBoundDescriptorSets ) && + ( maxPerStageDescriptorSamplers == rhs.maxPerStageDescriptorSamplers ) && + ( maxPerStageDescriptorUniformBuffers == rhs.maxPerStageDescriptorUniformBuffers ) && + ( maxPerStageDescriptorStorageBuffers == rhs.maxPerStageDescriptorStorageBuffers ) && + ( maxPerStageDescriptorSampledImages == rhs.maxPerStageDescriptorSampledImages ) && + ( maxPerStageDescriptorStorageImages == rhs.maxPerStageDescriptorStorageImages ) && + ( maxPerStageDescriptorInputAttachments == rhs.maxPerStageDescriptorInputAttachments ) && + ( maxPerStageResources == rhs.maxPerStageResources ) && + ( maxDescriptorSetSamplers == rhs.maxDescriptorSetSamplers ) && + ( maxDescriptorSetUniformBuffers == rhs.maxDescriptorSetUniformBuffers ) && + ( maxDescriptorSetUniformBuffersDynamic == rhs.maxDescriptorSetUniformBuffersDynamic ) && + ( maxDescriptorSetStorageBuffers == rhs.maxDescriptorSetStorageBuffers ) && + ( maxDescriptorSetStorageBuffersDynamic == rhs.maxDescriptorSetStorageBuffersDynamic ) && + ( maxDescriptorSetSampledImages == rhs.maxDescriptorSetSampledImages ) && + ( maxDescriptorSetStorageImages == rhs.maxDescriptorSetStorageImages ) && + ( maxDescriptorSetInputAttachments == rhs.maxDescriptorSetInputAttachments ) && + ( maxVertexInputAttributes == rhs.maxVertexInputAttributes ) && + ( maxVertexInputBindings == rhs.maxVertexInputBindings ) && + ( maxVertexInputAttributeOffset == rhs.maxVertexInputAttributeOffset ) && + ( maxVertexInputBindingStride == rhs.maxVertexInputBindingStride ) && + ( maxVertexOutputComponents == rhs.maxVertexOutputComponents ) && + ( maxTessellationGenerationLevel == rhs.maxTessellationGenerationLevel ) && + ( maxTessellationPatchSize == rhs.maxTessellationPatchSize ) && + ( maxTessellationControlPerVertexInputComponents == rhs.maxTessellationControlPerVertexInputComponents ) && + ( maxTessellationControlPerVertexOutputComponents == + rhs.maxTessellationControlPerVertexOutputComponents ) && + ( maxTessellationControlPerPatchOutputComponents == rhs.maxTessellationControlPerPatchOutputComponents ) && + ( maxTessellationControlTotalOutputComponents == rhs.maxTessellationControlTotalOutputComponents ) && + ( maxTessellationEvaluationInputComponents == rhs.maxTessellationEvaluationInputComponents ) && + ( maxTessellationEvaluationOutputComponents == rhs.maxTessellationEvaluationOutputComponents ) && + ( maxGeometryShaderInvocations == rhs.maxGeometryShaderInvocations ) && + ( maxGeometryInputComponents == rhs.maxGeometryInputComponents ) && + ( maxGeometryOutputComponents == rhs.maxGeometryOutputComponents ) && + ( maxGeometryOutputVertices == rhs.maxGeometryOutputVertices ) && + ( maxGeometryTotalOutputComponents == rhs.maxGeometryTotalOutputComponents ) && + ( maxFragmentInputComponents == rhs.maxFragmentInputComponents ) && + ( maxFragmentOutputAttachments == rhs.maxFragmentOutputAttachments ) && + ( maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments ) && + ( maxFragmentCombinedOutputResources == rhs.maxFragmentCombinedOutputResources ) && + ( maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize ) && + ( maxComputeWorkGroupCount == rhs.maxComputeWorkGroupCount ) && + ( maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations ) && + ( maxComputeWorkGroupSize == rhs.maxComputeWorkGroupSize ) && + ( subPixelPrecisionBits == rhs.subPixelPrecisionBits ) && + ( subTexelPrecisionBits == rhs.subTexelPrecisionBits ) && + ( mipmapPrecisionBits == rhs.mipmapPrecisionBits ) && + ( maxDrawIndexedIndexValue == rhs.maxDrawIndexedIndexValue ) && + ( maxDrawIndirectCount == rhs.maxDrawIndirectCount ) && ( maxSamplerLodBias == rhs.maxSamplerLodBias ) && + ( maxSamplerAnisotropy == rhs.maxSamplerAnisotropy ) && ( maxViewports == rhs.maxViewports ) && + ( maxViewportDimensions == rhs.maxViewportDimensions ) && + ( viewportBoundsRange == rhs.viewportBoundsRange ) && + ( viewportSubPixelBits == rhs.viewportSubPixelBits ) && + ( minMemoryMapAlignment == rhs.minMemoryMapAlignment ) && + ( minTexelBufferOffsetAlignment == rhs.minTexelBufferOffsetAlignment ) && + ( minUniformBufferOffsetAlignment == rhs.minUniformBufferOffsetAlignment ) && + ( minStorageBufferOffsetAlignment == rhs.minStorageBufferOffsetAlignment ) && + ( minTexelOffset == rhs.minTexelOffset ) && ( maxTexelOffset == rhs.maxTexelOffset ) && + ( minTexelGatherOffset == rhs.minTexelGatherOffset ) && + ( maxTexelGatherOffset == rhs.maxTexelGatherOffset ) && + ( minInterpolationOffset == rhs.minInterpolationOffset ) && + ( maxInterpolationOffset == rhs.maxInterpolationOffset ) && + ( subPixelInterpolationOffsetBits == rhs.subPixelInterpolationOffsetBits ) && + ( maxFramebufferWidth == rhs.maxFramebufferWidth ) && + ( maxFramebufferHeight == rhs.maxFramebufferHeight ) && + ( maxFramebufferLayers == rhs.maxFramebufferLayers ) && + ( framebufferColorSampleCounts == rhs.framebufferColorSampleCounts ) && + ( framebufferDepthSampleCounts == rhs.framebufferDepthSampleCounts ) && + ( framebufferStencilSampleCounts == rhs.framebufferStencilSampleCounts ) && + ( framebufferNoAttachmentsSampleCounts == rhs.framebufferNoAttachmentsSampleCounts ) && + ( maxColorAttachments == rhs.maxColorAttachments ) && + ( sampledImageColorSampleCounts == rhs.sampledImageColorSampleCounts ) && + ( sampledImageIntegerSampleCounts == rhs.sampledImageIntegerSampleCounts ) && + ( sampledImageDepthSampleCounts == rhs.sampledImageDepthSampleCounts ) && + ( sampledImageStencilSampleCounts == rhs.sampledImageStencilSampleCounts ) && + ( storageImageSampleCounts == rhs.storageImageSampleCounts ) && + ( maxSampleMaskWords == rhs.maxSampleMaskWords ) && + ( timestampComputeAndGraphics == rhs.timestampComputeAndGraphics ) && + ( timestampPeriod == rhs.timestampPeriod ) && ( maxClipDistances == rhs.maxClipDistances ) && + ( maxCullDistances == rhs.maxCullDistances ) && + ( maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances ) && + ( discreteQueuePriorities == rhs.discreteQueuePriorities ) && ( pointSizeRange == rhs.pointSizeRange ) && + ( lineWidthRange == rhs.lineWidthRange ) && ( pointSizeGranularity == rhs.pointSizeGranularity ) && + ( lineWidthGranularity == rhs.lineWidthGranularity ) && ( strictLines == rhs.strictLines ) && + ( standardSampleLocations == rhs.standardSampleLocations ) && + ( optimalBufferCopyOffsetAlignment == rhs.optimalBufferCopyOffsetAlignment ) && + ( optimalBufferCopyRowPitchAlignment == rhs.optimalBufferCopyRowPitchAlignment ) && + ( nonCoherentAtomSize == rhs.nonCoherentAtomSize ); + } + + bool operator!=( PhysicalDeviceLimits const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t maxImageDimension1D = {}; + uint32_t maxImageDimension2D = {}; + uint32_t maxImageDimension3D = {}; + uint32_t maxImageDimensionCube = {}; + uint32_t maxImageArrayLayers = {}; + uint32_t maxTexelBufferElements = {}; + uint32_t maxUniformBufferRange = {}; + uint32_t maxStorageBufferRange = {}; + uint32_t maxPushConstantsSize = {}; + uint32_t maxMemoryAllocationCount = {}; + uint32_t maxSamplerAllocationCount = {}; + VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity = {}; + VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize = {}; + uint32_t maxBoundDescriptorSets = {}; + uint32_t maxPerStageDescriptorSamplers = {}; + uint32_t maxPerStageDescriptorUniformBuffers = {}; + uint32_t maxPerStageDescriptorStorageBuffers = {}; + uint32_t maxPerStageDescriptorSampledImages = {}; + uint32_t maxPerStageDescriptorStorageImages = {}; + uint32_t maxPerStageDescriptorInputAttachments = {}; + uint32_t maxPerStageResources = {}; + uint32_t maxDescriptorSetSamplers = {}; + uint32_t maxDescriptorSetUniformBuffers = {}; + uint32_t maxDescriptorSetUniformBuffersDynamic = {}; + uint32_t maxDescriptorSetStorageBuffers = {}; + uint32_t maxDescriptorSetStorageBuffersDynamic = {}; + uint32_t maxDescriptorSetSampledImages = {}; + uint32_t maxDescriptorSetStorageImages = {}; + uint32_t maxDescriptorSetInputAttachments = {}; + uint32_t maxVertexInputAttributes = {}; + uint32_t maxVertexInputBindings = {}; + uint32_t maxVertexInputAttributeOffset = {}; + uint32_t maxVertexInputBindingStride = {}; + uint32_t maxVertexOutputComponents = {}; + uint32_t maxTessellationGenerationLevel = {}; + uint32_t maxTessellationPatchSize = {}; + uint32_t maxTessellationControlPerVertexInputComponents = {}; + uint32_t maxTessellationControlPerVertexOutputComponents = {}; + uint32_t maxTessellationControlPerPatchOutputComponents = {}; + uint32_t maxTessellationControlTotalOutputComponents = {}; + uint32_t maxTessellationEvaluationInputComponents = {}; + uint32_t maxTessellationEvaluationOutputComponents = {}; + uint32_t maxGeometryShaderInvocations = {}; + uint32_t maxGeometryInputComponents = {}; + uint32_t maxGeometryOutputComponents = {}; + uint32_t maxGeometryOutputVertices = {}; + uint32_t maxGeometryTotalOutputComponents = {}; + uint32_t maxFragmentInputComponents = {}; + uint32_t maxFragmentOutputAttachments = {}; + uint32_t maxFragmentDualSrcAttachments = {}; + uint32_t maxFragmentCombinedOutputResources = {}; + uint32_t maxComputeSharedMemorySize = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxComputeWorkGroupCount = {}; + uint32_t maxComputeWorkGroupInvocations = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxComputeWorkGroupSize = {}; + uint32_t subPixelPrecisionBits = {}; + uint32_t subTexelPrecisionBits = {}; + uint32_t mipmapPrecisionBits = {}; + uint32_t maxDrawIndexedIndexValue = {}; + uint32_t maxDrawIndirectCount = {}; + float maxSamplerLodBias = {}; + float maxSamplerAnisotropy = {}; + uint32_t maxViewports = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxViewportDimensions = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D viewportBoundsRange = {}; + uint32_t viewportSubPixelBits = {}; + size_t minMemoryMapAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment = {}; + int32_t minTexelOffset = {}; + uint32_t maxTexelOffset = {}; + int32_t minTexelGatherOffset = {}; + uint32_t maxTexelGatherOffset = {}; + float minInterpolationOffset = {}; + float maxInterpolationOffset = {}; + uint32_t subPixelInterpolationOffsetBits = {}; + uint32_t maxFramebufferWidth = {}; + uint32_t maxFramebufferHeight = {}; + uint32_t maxFramebufferLayers = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts = {}; + uint32_t maxColorAttachments = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts = {}; + uint32_t maxSampleMaskWords = {}; + VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics = {}; + float timestampPeriod = {}; + uint32_t maxClipDistances = {}; + uint32_t maxCullDistances = {}; + uint32_t maxCombinedClipAndCullDistances = {}; + uint32_t discreteQueuePriorities = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D pointSizeRange = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D lineWidthRange = {}; + float pointSizeGranularity = {}; + float lineWidthGranularity = {}; + VULKAN_HPP_NAMESPACE::Bool32 strictLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations = {}; + VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize = {}; + }; + static_assert( sizeof( PhysicalDeviceLimits ) == sizeof( VkPhysicalDeviceLimits ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceSparseProperties + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict_ = {} ) VULKAN_HPP_NOEXCEPT + : residencyStandard2DBlockShape( residencyStandard2DBlockShape_ ) + , residencyStandard2DMultisampleBlockShape( residencyStandard2DMultisampleBlockShape_ ) + , residencyStandard3DBlockShape( residencyStandard3DBlockShape_ ) + , residencyAlignedMipSize( residencyAlignedMipSize_ ) + , residencyNonResidentStrict( residencyNonResidentStrict_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceSparseProperties( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSparseProperties( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSparseProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseProperties & + operator=( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSparseProperties & operator=( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceSparseProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSparseProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSparseProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceSparseProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape ) && + ( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape ) && + ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape ) && + ( residencyAlignedMipSize == rhs.residencyAlignedMipSize ) && + ( residencyNonResidentStrict == rhs.residencyNonResidentStrict ); + } + + bool operator!=( PhysicalDeviceSparseProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict = {}; + }; + static_assert( sizeof( PhysicalDeviceSparseProperties ) == sizeof( VkPhysicalDeviceSparseProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceProperties + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( + uint32_t apiVersion_ = {}, + uint32_t driverVersion_ = {}, + uint32_t vendorID_ = {}, + uint32_t deviceID_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther, + std::array const & deviceName_ = {}, + std::array const & pipelineCacheUUID_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : apiVersion( apiVersion_ ) + , driverVersion( driverVersion_ ) + , vendorID( vendorID_ ) + , deviceID( deviceID_ ) + , deviceType( deviceType_ ) + , deviceName( deviceName_ ) + , pipelineCacheUUID( pipelineCacheUUID_ ) + , limits( limits_ ) + , sparseProperties( sparseProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceProperties( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProperties( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties & + operator=( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProperties & operator=( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( apiVersion == rhs.apiVersion ) && ( driverVersion == rhs.driverVersion ) && + ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) && ( deviceType == rhs.deviceType ) && + ( deviceName == rhs.deviceName ) && ( pipelineCacheUUID == rhs.pipelineCacheUUID ) && + ( limits == rhs.limits ) && ( sparseProperties == rhs.sparseProperties ); + } + + bool operator!=( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t apiVersion = {}; + uint32_t driverVersion = {}; + uint32_t vendorID = {}; + uint32_t deviceID = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D pipelineCacheUUID = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties = {}; + }; + static_assert( sizeof( PhysicalDeviceProperties ) == sizeof( VkPhysicalDeviceProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceProperties2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProperties2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties_ = {} ) VULKAN_HPP_NOEXCEPT + : properties( properties_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceProperties2( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProperties2( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProperties2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2 & + operator=( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProperties2 & operator=( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceProperties2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProperties2 const & ) const = default; +#else + bool operator==( PhysicalDeviceProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( properties == rhs.properties ); + } + + bool operator!=( PhysicalDeviceProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties = {}; + }; + static_assert( sizeof( PhysicalDeviceProperties2 ) == sizeof( VkPhysicalDeviceProperties2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceProperties2; + }; + using PhysicalDeviceProperties2KHR = PhysicalDeviceProperties2; + + struct QueryPoolPerformanceCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eQueryPoolPerformanceCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR( uint32_t queueFamilyIndex_ = {}, + uint32_t counterIndexCount_ = {}, + const uint32_t * pCounterIndices_ = {} ) VULKAN_HPP_NOEXCEPT + : queueFamilyIndex( queueFamilyIndex_ ) + , counterIndexCount( counterIndexCount_ ) + , pCounterIndices( pCounterIndices_ ) + {} + + VULKAN_HPP_CONSTEXPR + QueryPoolPerformanceCreateInfoKHR( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueryPoolPerformanceCreateInfoKHR( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : QueryPoolPerformanceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + QueryPoolPerformanceCreateInfoKHR( + uint32_t queueFamilyIndex_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & counterIndices_ ) + : queueFamilyIndex( queueFamilyIndex_ ) + , counterIndexCount( static_cast( counterIndices_.size() ) ) + , pCounterIndices( counterIndices_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & + operator=( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueryPoolPerformanceCreateInfoKHR & operator=( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + QueryPoolPerformanceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + QueryPoolPerformanceCreateInfoKHR & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndex = queueFamilyIndex_; + return *this; + } + + QueryPoolPerformanceCreateInfoKHR & setCounterIndexCount( uint32_t counterIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + counterIndexCount = counterIndexCount_; + return *this; + } + + QueryPoolPerformanceCreateInfoKHR & setPCounterIndices( const uint32_t * pCounterIndices_ ) VULKAN_HPP_NOEXCEPT + { + pCounterIndices = pCounterIndices_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + QueryPoolPerformanceCreateInfoKHR & setCounterIndices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & counterIndices_ ) VULKAN_HPP_NOEXCEPT + { + counterIndexCount = static_cast( counterIndices_.size() ); + pCounterIndices = counterIndices_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkQueryPoolPerformanceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueryPoolPerformanceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueryPoolPerformanceCreateInfoKHR const & ) const = default; +#else + bool operator==( QueryPoolPerformanceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) && + ( counterIndexCount == rhs.counterIndexCount ) && ( pCounterIndices == rhs.pCounterIndices ); + } + + bool operator!=( QueryPoolPerformanceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceCreateInfoKHR; + const void * pNext = {}; + uint32_t queueFamilyIndex = {}; + uint32_t counterIndexCount = {}; + const uint32_t * pCounterIndices = {}; + }; + static_assert( sizeof( QueryPoolPerformanceCreateInfoKHR ) == sizeof( VkQueryPoolPerformanceCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = QueryPoolPerformanceCreateInfoKHR; + }; + + struct QueueFamilyProperties + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + QueueFamilyProperties( VULKAN_HPP_NAMESPACE::QueueFlags queueFlags_ = {}, + uint32_t queueCount_ = {}, + uint32_t timestampValidBits_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity_ = {} ) VULKAN_HPP_NOEXCEPT + : queueFlags( queueFlags_ ) + , queueCount( queueCount_ ) + , timestampValidBits( timestampValidBits_ ) + , minImageTransferGranularity( minImageTransferGranularity_ ) + {} + + VULKAN_HPP_CONSTEXPR QueueFamilyProperties( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyProperties( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : QueueFamilyProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 QueueFamilyProperties & + operator=( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyProperties & operator=( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkQueueFamilyProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueueFamilyProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueueFamilyProperties const & ) const = default; +#else + bool operator==( QueueFamilyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( queueFlags == rhs.queueFlags ) && ( queueCount == rhs.queueCount ) && + ( timestampValidBits == rhs.timestampValidBits ) && + ( minImageTransferGranularity == rhs.minImageTransferGranularity ); + } + + bool operator!=( QueueFamilyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::QueueFlags queueFlags = {}; + uint32_t queueCount = {}; + uint32_t timestampValidBits = {}; + VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity = {}; + }; + static_assert( sizeof( QueueFamilyProperties ) == sizeof( VkQueueFamilyProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct QueueFamilyProperties2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyProperties2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueueFamilyProperties2( + VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : queueFamilyProperties( queueFamilyProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR QueueFamilyProperties2( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyProperties2( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + : QueueFamilyProperties2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 QueueFamilyProperties2 & + operator=( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyProperties2 & operator=( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkQueueFamilyProperties2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueueFamilyProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueueFamilyProperties2 const & ) const = default; +#else + bool operator==( QueueFamilyProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueFamilyProperties == rhs.queueFamilyProperties ); + } + + bool operator!=( QueueFamilyProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties = {}; + }; + static_assert( sizeof( QueueFamilyProperties2 ) == sizeof( VkQueueFamilyProperties2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = QueueFamilyProperties2; + }; + using QueueFamilyProperties2KHR = QueueFamilyProperties2; + + struct PhysicalDeviceSparseImageFormatInfo2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceSparseImageFormatInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2( + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal ) VULKAN_HPP_NOEXCEPT + : format( format_ ) + , type( type_ ) + , samples( samples_ ) + , usage( usage_ ) + , tiling( tiling_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSparseImageFormatInfo2( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSparseImageFormatInfo2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & + operator=( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSparseImageFormatInfo2 & + operator=( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceSparseImageFormatInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceSparseImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + PhysicalDeviceSparseImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + PhysicalDeviceSparseImageFormatInfo2 & + setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT + { + samples = samples_; + return *this; + } + + PhysicalDeviceSparseImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + PhysicalDeviceSparseImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT + { + tiling = tiling_; + return *this; + } + + operator VkPhysicalDeviceSparseImageFormatInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSparseImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSparseImageFormatInfo2 const & ) const = default; +#else + bool operator==( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( type == rhs.type ) && + ( samples == rhs.samples ) && ( usage == rhs.usage ) && ( tiling == rhs.tiling ); + } + + bool operator!=( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSparseImageFormatInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; + }; + static_assert( sizeof( PhysicalDeviceSparseImageFormatInfo2 ) == sizeof( VkPhysicalDeviceSparseImageFormatInfo2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceSparseImageFormatInfo2; + }; + using PhysicalDeviceSparseImageFormatInfo2KHR = PhysicalDeviceSparseImageFormatInfo2; + + struct SparseImageFormatProperties2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSparseImageFormatProperties2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2( + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties_ = {} ) VULKAN_HPP_NOEXCEPT + : properties( properties_ ) + {} + + VULKAN_HPP_CONSTEXPR + SparseImageFormatProperties2( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageFormatProperties2( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageFormatProperties2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SparseImageFormatProperties2 & + operator=( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageFormatProperties2 & operator=( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSparseImageFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageFormatProperties2 const & ) const = default; +#else + bool operator==( SparseImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( properties == rhs.properties ); + } + + bool operator!=( SparseImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageFormatProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties = {}; + }; + static_assert( sizeof( SparseImageFormatProperties2 ) == sizeof( VkSparseImageFormatProperties2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SparseImageFormatProperties2; + }; + using SparseImageFormatProperties2KHR = SparseImageFormatProperties2; + + struct FramebufferMixedSamplesCombinationNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eFramebufferMixedSamplesCombinationNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV( + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples_ = {} ) VULKAN_HPP_NOEXCEPT + : coverageReductionMode( coverageReductionMode_ ) + , rasterizationSamples( rasterizationSamples_ ) + , depthStencilSamples( depthStencilSamples_ ) + , colorSamples( colorSamples_ ) + {} + + VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV( FramebufferMixedSamplesCombinationNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + FramebufferMixedSamplesCombinationNV( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT + : FramebufferMixedSamplesCombinationNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 FramebufferMixedSamplesCombinationNV & + operator=( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FramebufferMixedSamplesCombinationNV & + operator=( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkFramebufferMixedSamplesCombinationNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFramebufferMixedSamplesCombinationNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FramebufferMixedSamplesCombinationNV const & ) const = default; +#else + bool operator==( FramebufferMixedSamplesCombinationNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( coverageReductionMode == rhs.coverageReductionMode ) && + ( rasterizationSamples == rhs.rasterizationSamples ) && + ( depthStencilSamples == rhs.depthStencilSamples ) && ( colorSamples == rhs.colorSamples ); + } + + bool operator!=( FramebufferMixedSamplesCombinationNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferMixedSamplesCombinationNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples = {}; + }; + static_assert( sizeof( FramebufferMixedSamplesCombinationNV ) == sizeof( VkFramebufferMixedSamplesCombinationNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = FramebufferMixedSamplesCombinationNV; + }; + + struct SurfaceCapabilities2EXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2EXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT( + uint32_t minImageCount_ = {}, + uint32_t maxImageCount_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, + uint32_t maxImageArrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, + VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters_ = {} ) VULKAN_HPP_NOEXCEPT + : minImageCount( minImageCount_ ) + , maxImageCount( maxImageCount_ ) + , currentExtent( currentExtent_ ) + , minImageExtent( minImageExtent_ ) + , maxImageExtent( maxImageExtent_ ) + , maxImageArrayLayers( maxImageArrayLayers_ ) + , supportedTransforms( supportedTransforms_ ) + , currentTransform( currentTransform_ ) + , supportedCompositeAlpha( supportedCompositeAlpha_ ) + , supportedUsageFlags( supportedUsageFlags_ ) + , supportedSurfaceCounters( supportedSurfaceCounters_ ) + {} + + VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilities2EXT( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceCapabilities2EXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilities2EXT & + operator=( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilities2EXT & operator=( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSurfaceCapabilities2EXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceCapabilities2EXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceCapabilities2EXT const & ) const = default; +#else + bool operator==( SurfaceCapabilities2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minImageCount == rhs.minImageCount ) && + ( maxImageCount == rhs.maxImageCount ) && ( currentExtent == rhs.currentExtent ) && + ( minImageExtent == rhs.minImageExtent ) && ( maxImageExtent == rhs.maxImageExtent ) && + ( maxImageArrayLayers == rhs.maxImageArrayLayers ) && ( supportedTransforms == rhs.supportedTransforms ) && + ( currentTransform == rhs.currentTransform ) && + ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) && + ( supportedUsageFlags == rhs.supportedUsageFlags ) && + ( supportedSurfaceCounters == rhs.supportedSurfaceCounters ); + } + + bool operator!=( SurfaceCapabilities2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2EXT; + void * pNext = {}; + uint32_t minImageCount = {}; + uint32_t maxImageCount = {}; + VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {}; + uint32_t maxImageArrayLayers = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {}; + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters = {}; + }; + static_assert( sizeof( SurfaceCapabilities2EXT ) == sizeof( VkSurfaceCapabilities2EXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SurfaceCapabilities2EXT; + }; + + struct SurfaceCapabilitiesKHR + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SurfaceCapabilitiesKHR( uint32_t minImageCount_ = {}, + uint32_t maxImageCount_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, + uint32_t maxImageArrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, + VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {} ) VULKAN_HPP_NOEXCEPT + : minImageCount( minImageCount_ ) + , maxImageCount( maxImageCount_ ) + , currentExtent( currentExtent_ ) + , minImageExtent( minImageExtent_ ) + , maxImageExtent( maxImageExtent_ ) + , maxImageArrayLayers( maxImageArrayLayers_ ) + , supportedTransforms( supportedTransforms_ ) + , currentTransform( currentTransform_ ) + , supportedCompositeAlpha( supportedCompositeAlpha_ ) + , supportedUsageFlags( supportedUsageFlags_ ) + {} + + VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilitiesKHR( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesKHR & + operator=( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilitiesKHR & operator=( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSurfaceCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceCapabilitiesKHR const & ) const = default; +#else + bool operator==( SurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( minImageCount == rhs.minImageCount ) && ( maxImageCount == rhs.maxImageCount ) && + ( currentExtent == rhs.currentExtent ) && ( minImageExtent == rhs.minImageExtent ) && + ( maxImageExtent == rhs.maxImageExtent ) && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) && + ( supportedTransforms == rhs.supportedTransforms ) && ( currentTransform == rhs.currentTransform ) && + ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) && + ( supportedUsageFlags == rhs.supportedUsageFlags ); + } + + bool operator!=( SurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t minImageCount = {}; + uint32_t maxImageCount = {}; + VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {}; + uint32_t maxImageArrayLayers = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {}; + }; + static_assert( sizeof( SurfaceCapabilitiesKHR ) == sizeof( VkSurfaceCapabilitiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SurfaceCapabilities2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR( + VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities_ = {} ) VULKAN_HPP_NOEXCEPT + : surfaceCapabilities( surfaceCapabilities_ ) + {} + + VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilities2KHR( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceCapabilities2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilities2KHR & + operator=( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilities2KHR & operator=( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSurfaceCapabilities2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceCapabilities2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceCapabilities2KHR const & ) const = default; +#else + bool operator==( SurfaceCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surfaceCapabilities == rhs.surfaceCapabilities ); + } + + bool operator!=( SurfaceCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities = {}; + }; + static_assert( sizeof( SurfaceCapabilities2KHR ) == sizeof( VkSurfaceCapabilities2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SurfaceCapabilities2KHR; + }; + + struct SurfaceFormatKHR + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SurfaceFormatKHR( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace_ = + VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear ) VULKAN_HPP_NOEXCEPT + : format( format_ ) + , colorSpace( colorSpace_ ) + {} + + VULKAN_HPP_CONSTEXPR SurfaceFormatKHR( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceFormatKHR( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceFormatKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SurfaceFormatKHR & operator=( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceFormatKHR & operator=( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSurfaceFormatKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceFormatKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceFormatKHR const & ) const = default; +#else + bool operator==( SurfaceFormatKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( format == rhs.format ) && ( colorSpace == rhs.colorSpace ); + } + + bool operator!=( SurfaceFormatKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear; + }; + static_assert( sizeof( SurfaceFormatKHR ) == sizeof( VkSurfaceFormatKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SurfaceFormat2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFormat2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SurfaceFormat2KHR( VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat_ = {} ) VULKAN_HPP_NOEXCEPT + : surfaceFormat( surfaceFormat_ ) + {} + + VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceFormat2KHR( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceFormat2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SurfaceFormat2KHR & + operator=( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceFormat2KHR & operator=( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSurfaceFormat2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceFormat2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceFormat2KHR const & ) const = default; +#else + bool operator==( SurfaceFormat2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surfaceFormat == rhs.surfaceFormat ); + } + + bool operator!=( SurfaceFormat2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFormat2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat = {}; + }; + static_assert( sizeof( SurfaceFormat2KHR ) == sizeof( VkSurfaceFormat2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SurfaceFormat2KHR; + }; + + struct PhysicalDeviceToolPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceToolPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolPropertiesEXT( + std::array const & name_ = {}, + std::array const & version_ = {}, + VULKAN_HPP_NAMESPACE::ToolPurposeFlagsEXT purposes_ = {}, + std::array const & description_ = {}, + std::array const & layer_ = {} ) VULKAN_HPP_NOEXCEPT + : name( name_ ) + , version( version_ ) + , purposes( purposes_ ) + , description( description_ ) + , layer( layer_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceToolPropertiesEXT( PhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceToolPropertiesEXT( VkPhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceToolPropertiesEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolPropertiesEXT & + operator=( PhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceToolPropertiesEXT & operator=( VkPhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceToolPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceToolPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceToolPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceToolPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( name == rhs.name ) && ( version == rhs.version ) && + ( purposes == rhs.purposes ) && ( description == rhs.description ) && ( layer == rhs.layer ); + } + + bool operator!=( PhysicalDeviceToolPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceToolPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D version = {}; + VULKAN_HPP_NAMESPACE::ToolPurposeFlagsEXT purposes = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D layer = {}; + }; + static_assert( sizeof( PhysicalDeviceToolPropertiesEXT ) == sizeof( VkPhysicalDeviceToolPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceToolPropertiesEXT; + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoCapabilitiesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoCapabilitiesKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoCapabilitiesKHR( VULKAN_HPP_NAMESPACE::VideoCapabilitiesFlagsKHR capabilityFlags_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferOffsetAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferSizeAlignment_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D videoPictureExtentGranularity_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxExtent_ = {}, + uint32_t maxReferencePicturesSlotsCount_ = {}, + uint32_t maxReferencePicturesActiveCount_ = {} ) VULKAN_HPP_NOEXCEPT + : capabilityFlags( capabilityFlags_ ) + , minBitstreamBufferOffsetAlignment( minBitstreamBufferOffsetAlignment_ ) + , minBitstreamBufferSizeAlignment( minBitstreamBufferSizeAlignment_ ) + , videoPictureExtentGranularity( videoPictureExtentGranularity_ ) + , minExtent( minExtent_ ) + , maxExtent( maxExtent_ ) + , maxReferencePicturesSlotsCount( maxReferencePicturesSlotsCount_ ) + , maxReferencePicturesActiveCount( maxReferencePicturesActiveCount_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoCapabilitiesKHR( VideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoCapabilitiesKHR( VkVideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoCapabilitiesKHR & + operator=( VideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoCapabilitiesKHR & operator=( VkVideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoCapabilitiesKHR const & ) const = default; +# else + bool operator==( VideoCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( capabilityFlags == rhs.capabilityFlags ) && + ( minBitstreamBufferOffsetAlignment == rhs.minBitstreamBufferOffsetAlignment ) && + ( minBitstreamBufferSizeAlignment == rhs.minBitstreamBufferSizeAlignment ) && + ( videoPictureExtentGranularity == rhs.videoPictureExtentGranularity ) && ( minExtent == rhs.minExtent ) && + ( maxExtent == rhs.maxExtent ) && + ( maxReferencePicturesSlotsCount == rhs.maxReferencePicturesSlotsCount ) && + ( maxReferencePicturesActiveCount == rhs.maxReferencePicturesActiveCount ); + } + + bool operator!=( VideoCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoCapabilitiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoCapabilitiesFlagsKHR capabilityFlags = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferOffsetAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferSizeAlignment = {}; + VULKAN_HPP_NAMESPACE::Extent2D videoPictureExtentGranularity = {}; + VULKAN_HPP_NAMESPACE::Extent2D minExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxExtent = {}; + uint32_t maxReferencePicturesSlotsCount = {}; + uint32_t maxReferencePicturesActiveCount = {}; + }; + static_assert( sizeof( VideoCapabilitiesKHR ) == sizeof( VkVideoCapabilitiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoCapabilitiesKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoProfilesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoProfilesKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoProfilesKHR( uint32_t profileCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pProfiles_ = {} ) VULKAN_HPP_NOEXCEPT + : profileCount( profileCount_ ) + , pProfiles( pProfiles_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoProfilesKHR( VideoProfilesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoProfilesKHR( VkVideoProfilesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoProfilesKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoProfilesKHR & operator=( VideoProfilesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoProfilesKHR & operator=( VkVideoProfilesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoProfilesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoProfilesKHR & setProfileCount( uint32_t profileCount_ ) VULKAN_HPP_NOEXCEPT + { + profileCount = profileCount_; + return *this; + } + + VideoProfilesKHR & setPProfiles( const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pProfiles_ ) VULKAN_HPP_NOEXCEPT + { + pProfiles = pProfiles_; + return *this; + } + + operator VkVideoProfilesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoProfilesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoProfilesKHR const & ) const = default; +# else + bool operator==( VideoProfilesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( profileCount == rhs.profileCount ) && + ( pProfiles == rhs.pProfiles ); + } + + bool operator!=( VideoProfilesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoProfilesKHR; + void * pNext = {}; + uint32_t profileCount = {}; + const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pProfiles = {}; + }; + static_assert( sizeof( VideoProfilesKHR ) == sizeof( VkVideoProfilesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoProfilesKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct PhysicalDeviceVideoFormatInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVideoFormatInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoFormatInfoKHR( + VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {}, + const VULKAN_HPP_NAMESPACE::VideoProfilesKHR * pVideoProfiles_ = {} ) VULKAN_HPP_NOEXCEPT + : imageUsage( imageUsage_ ) + , pVideoProfiles( pVideoProfiles_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVideoFormatInfoKHR( PhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVideoFormatInfoKHR( VkPhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVideoFormatInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoFormatInfoKHR & + operator=( PhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVideoFormatInfoKHR & operator=( VkPhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceVideoFormatInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVideoFormatInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVideoFormatInfoKHR const & ) const = default; +# else + bool operator==( PhysicalDeviceVideoFormatInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageUsage == rhs.imageUsage ) && + ( pVideoProfiles == rhs.pVideoProfiles ); + } + + bool operator!=( PhysicalDeviceVideoFormatInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVideoFormatInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {}; + const VULKAN_HPP_NAMESPACE::VideoProfilesKHR * pVideoProfiles = {}; + }; + static_assert( sizeof( PhysicalDeviceVideoFormatInfoKHR ) == sizeof( VkPhysicalDeviceVideoFormatInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceVideoFormatInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoFormatPropertiesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoFormatPropertiesKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoFormatPropertiesKHR( + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined ) VULKAN_HPP_NOEXCEPT + : format( format_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoFormatPropertiesKHR( VideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoFormatPropertiesKHR( VkVideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoFormatPropertiesKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoFormatPropertiesKHR & + operator=( VideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoFormatPropertiesKHR & operator=( VkVideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoFormatPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoFormatPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoFormatPropertiesKHR const & ) const = default; +# else + bool operator==( VideoFormatPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ); + } + + bool operator!=( VideoFormatPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoFormatPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + }; + static_assert( sizeof( VideoFormatPropertiesKHR ) == sizeof( VkVideoFormatPropertiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoFormatPropertiesKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueDevice = UniqueHandle; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + class PhysicalDevice + { + public: + using CType = VkPhysicalDevice; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::ePhysicalDevice; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePhysicalDevice; + + public: + VULKAN_HPP_CONSTEXPR PhysicalDevice() = default; + VULKAN_HPP_CONSTEXPR PhysicalDevice( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT PhysicalDevice( VkPhysicalDevice physicalDevice ) VULKAN_HPP_NOEXCEPT + : m_physicalDevice( physicalDevice ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + PhysicalDevice & operator=( VkPhysicalDevice physicalDevice ) VULKAN_HPP_NOEXCEPT + { + m_physicalDevice = physicalDevice; + return *this; + } +#endif + + PhysicalDevice & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_physicalDevice = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevice const & ) const = default; +#else + bool operator==( PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice == rhs.m_physicalDevice; + } + + bool operator!=( PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice != rhs.m_physicalDevice; + } + + bool operator<( PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice < rhs.m_physicalDevice; + } +#endif + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + template + VULKAN_HPP_NODISCARD Result + acquireXlibDisplayEXT( Display * dpy, + VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + acquireXlibDisplayEXT( Display & dpy, + VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + template + VULKAN_HPP_NODISCARD Result + createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Device * pDevice, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createDevice( const DeviceCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createDeviceUnique( const DeviceCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const DisplayModeCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const DisplayModeCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result enumerateDeviceExtensionProperties( + const char * pLayerName, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateDeviceExtensionProperties( Optional layerName + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = ExtensionPropertiesAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateDeviceExtensionProperties( Optional layerName, + ExtensionPropertiesAllocator & extensionPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result enumerateDeviceLayerProperties( + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateDeviceLayerProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = LayerPropertiesAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result enumerateQueueFamilyPerformanceQueryCountersKHR( + uint32_t queueFamilyIndex, + uint32_t * pCounterCount, + VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters, + VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateQueueFamilyPerformanceQueryCountersKHR( + uint32_t queueFamilyIndex, + ArrayProxy const & counters, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = Allocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateQueueFamilyPerformanceQueryCountersKHR( + uint32_t queueFamilyIndex, + ArrayProxy const & counters, + Allocator const & vectorAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename PerformanceCounterDescriptionKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::pair, + std::vector>>::type + enumerateQueueFamilyPerformanceQueryCountersKHR( + uint32_t queueFamilyIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename PerformanceCounterDescriptionKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = PerformanceCounterKHRAllocator, + typename B2 = PerformanceCounterDescriptionKHRAllocator, + typename std::enable_if::value && + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::pair, + std::vector>>::type + enumerateQueueFamilyPerformanceQueryCountersKHR( + uint32_t queueFamilyIndex, + PerformanceCounterKHRAllocator & performanceCounterKHRAllocator, + PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getDisplayModeProperties2KHR( + VULKAN_HPP_NAMESPACE::DisplayKHR display, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename DisplayModeProperties2KHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = DisplayModeProperties2KHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getDisplayModePropertiesKHR( + VULKAN_HPP_NAMESPACE::DisplayKHR display, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename DisplayModePropertiesKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = DisplayModePropertiesKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getDisplayPlaneCapabilities2KHR( + const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo, + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getDisplayPlaneCapabilitiesKHR( + VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, + uint32_t planeIndex, + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, + uint32_t planeIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getDisplayPlaneSupportedDisplaysKHR( + uint32_t planeIndex, + uint32_t * pDisplayCount, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = DisplayKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, + DisplayKHRAllocator & displayKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getCalibrateableTimeDomainsEXT( + uint32_t * pTimeDomainCount, + VULKAN_HPP_NAMESPACE::TimeDomainEXT * pTimeDomains, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getCalibrateableTimeDomainsEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = TimeDomainEXTAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getCalibrateableTimeDomainsEXT( TimeDomainEXTAllocator & timeDomainEXTAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getCooperativeMatrixPropertiesNV( + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getCooperativeMatrixPropertiesNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = CooperativeMatrixPropertiesNVAllocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getCooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + template + Bool32 getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, + IDirectFB * dfb, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Bool32 getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, + IDirectFB & dfb, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + template + VULKAN_HPP_NODISCARD Result getDisplayPlaneProperties2KHR( + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getDisplayPlaneProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename DisplayPlaneProperties2KHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = DisplayPlaneProperties2KHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getDisplayPlaneProperties2KHR( DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getDisplayPlanePropertiesKHR( + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getDisplayPlanePropertiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename DisplayPlanePropertiesKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = DisplayPlanePropertiesKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getDisplayPlanePropertiesKHR( DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getDisplayProperties2KHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getDisplayProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename DisplayProperties2KHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = DisplayProperties2KHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getDisplayPropertiesKHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getDisplayPropertiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = DisplayPropertiesKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getExternalBufferProperties( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferProperties( + const PhysicalDeviceExternalBufferInfo & externalBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getExternalBufferPropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferPropertiesKHR( + const PhysicalDeviceExternalBufferInfo & externalBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFenceProperties( + const PhysicalDeviceExternalFenceInfo & externalFenceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getExternalFencePropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFencePropertiesKHR( + const PhysicalDeviceExternalFenceInfo & externalFenceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getExternalImageFormatPropertiesNV( + VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, + VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getExternalImageFormatPropertiesNV( + VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getExternalSemaphoreProperties( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphoreProperties( + const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getExternalSemaphorePropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphorePropertiesKHR( + const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures + getFeatures( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 + getFeatures2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain + getFeatures2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 + getFeatures2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain + getFeatures2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties + getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2 + getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain + getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void + getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2 + getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain + getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getFragmentShadingRatesKHR( + uint32_t * pFragmentShadingRateCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template < + typename PhysicalDeviceFragmentShadingRateKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getFragmentShadingRatesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename PhysicalDeviceFragmentShadingRateKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PhysicalDeviceFragmentShadingRateKHRAllocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getFragmentShadingRatesKHR( + PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getImageFormatProperties2( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type + getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getImageFormatProperties2KHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type + getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties + getMemoryProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 + getMemoryProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain + getMemoryProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void + getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 + getMemoryProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain + getMemoryProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT getMultisamplePropertiesEXT( + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pRectCount, + VULKAN_HPP_NAMESPACE::Rect2D * pRects, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = Rect2DAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Rect2DAllocator & rect2DAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties + getProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 + getProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain + getProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 + getProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain + getProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getQueueFamilyPerformanceQueryPassesKHR( + const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, + uint32_t * pNumPasses, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD uint32_t getQueueFamilyPerformanceQueryPassesKHR( + const QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void + getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename QueueFamilyPropertiesAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = QueueFamilyPropertiesAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getQueueFamilyProperties2( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename QueueFamilyProperties2Allocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = QueueFamilyProperties2Allocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = StructureChainAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getQueueFamilyProperties2KHR( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename QueueFamilyProperties2Allocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = QueueFamilyProperties2Allocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2KHR( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = StructureChainAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2KHR( StructureChainAllocator & structureChainAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + template + Bool32 getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, + struct _screen_window * window, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Bool32 getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, + struct _screen_window & window, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + template + void getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename SparseImageFormatPropertiesAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = SparseImageFormatPropertiesAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getSparseImageFormatProperties2( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename SparseImageFormatProperties2Allocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = SparseImageFormatProperties2Allocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getSparseImageFormatProperties2KHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename SparseImageFormatProperties2Allocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = SparseImageFormatProperties2Allocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getSupportedFramebufferMixedSamplesCombinationsNV( + uint32_t * pCombinationCount, + VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template < + typename FramebufferMixedSamplesCombinationNVAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getSupportedFramebufferMixedSamplesCombinationsNV( + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename FramebufferMixedSamplesCombinationNVAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = FramebufferMixedSamplesCombinationNVAllocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getSupportedFramebufferMixedSamplesCombinationsNV( + FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getSurfaceCapabilities2EXT( + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getSurfaceCapabilities2KHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type + getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getSurfaceCapabilitiesKHR( + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pSurfaceFormatCount, + VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = SurfaceFormat2KHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pSurfaceFormatCount, + VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = SurfaceFormatKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template + VULKAN_HPP_NODISCARD Result getSurfacePresentModes2EXT( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pPresentModeCount, + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PresentModeKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + PresentModeKHRAllocator & presentModeKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + VULKAN_HPP_NODISCARD Result getSurfacePresentModesKHR( + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pPresentModeCount, + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PresentModeKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + PresentModeKHRAllocator & presentModeKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getSurfaceSupportKHR( uint32_t queueFamilyIndex, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::Bool32 * pSupported, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getSurfaceSupportKHR( uint32_t queueFamilyIndex, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getToolPropertiesEXT( uint32_t * pToolCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT * pToolProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getToolPropertiesEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PhysicalDeviceToolPropertiesEXTAllocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getToolPropertiesEXT( PhysicalDeviceToolPropertiesEXTAllocator & physicalDeviceToolPropertiesEXTAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + VULKAN_HPP_NODISCARD Result + getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pVideoProfile, + VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getVideoCapabilitiesKHR( const VideoProfileKHR & videoProfile, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type + getVideoCapabilitiesKHR( const VideoProfileKHR & videoProfile, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + VULKAN_HPP_NODISCARD Result getVideoFormatPropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, + uint32_t * pVideoFormatPropertyCount, + VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getVideoFormatPropertiesKHR( const PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename VideoFormatPropertiesKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = VideoFormatPropertiesKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getVideoFormatPropertiesKHR( const PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, + VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + template + Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, + struct wl_display * display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, + struct wl_display & display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template + Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + template + Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, + xcb_connection_t * connection, + xcb_visualid_t visual_id, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, + xcb_connection_t & connection, + xcb_visualid_t visual_id, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + template + Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, + Display * dpy, + VisualID visualID, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, + Display & dpy, + VisualID visualID, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + template + VULKAN_HPP_NODISCARD Result + getRandROutputDisplayEXT( Display * dpy, + RROutput rrOutput, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getRandROutputDisplayEXT( + Display & dpy, RROutput rrOutput, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + getRandROutputDisplayEXTUnique( Display & dpy, + RROutput rrOutput, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template + VULKAN_HPP_NODISCARD Result + getWinrtDisplayNV( uint32_t deviceRelativeId, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getWinrtDisplayNV( uint32_t deviceRelativeId, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + getWinrtDisplayNVUnique( uint32_t deviceRelativeId, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type + releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPhysicalDevice() const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice == VK_NULL_HANDLE; + } + + private: + VkPhysicalDevice m_physicalDevice = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice ) == sizeof( VkPhysicalDevice ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::PhysicalDevice; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PhysicalDevice; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PhysicalDevice; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct DeviceGroupDeviceCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupDeviceCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( + uint32_t physicalDeviceCount_ = {}, + const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices_ = {} ) VULKAN_HPP_NOEXCEPT + : physicalDeviceCount( physicalDeviceCount_ ) + , pPhysicalDevices( pPhysicalDevices_ ) + {} + + VULKAN_HPP_CONSTEXPR + DeviceGroupDeviceCreateInfo( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupDeviceCreateInfo( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupDeviceCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupDeviceCreateInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + physicalDevices_ ) + : physicalDeviceCount( static_cast( physicalDevices_.size() ) ) + , pPhysicalDevices( physicalDevices_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & + operator=( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupDeviceCreateInfo & operator=( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceGroupDeviceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceGroupDeviceCreateInfo & setPhysicalDeviceCount( uint32_t physicalDeviceCount_ ) VULKAN_HPP_NOEXCEPT + { + physicalDeviceCount = physicalDeviceCount_; + return *this; + } + + DeviceGroupDeviceCreateInfo & + setPPhysicalDevices( const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices_ ) VULKAN_HPP_NOEXCEPT + { + pPhysicalDevices = pPhysicalDevices_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupDeviceCreateInfo & setPhysicalDevices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + physicalDevices_ ) VULKAN_HPP_NOEXCEPT + { + physicalDeviceCount = static_cast( physicalDevices_.size() ); + pPhysicalDevices = physicalDevices_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkDeviceGroupDeviceCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupDeviceCreateInfo const & ) const = default; +#else + bool operator==( DeviceGroupDeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( physicalDeviceCount == rhs.physicalDeviceCount ) && + ( pPhysicalDevices == rhs.pPhysicalDevices ); + } + + bool operator!=( DeviceGroupDeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupDeviceCreateInfo; + const void * pNext = {}; + uint32_t physicalDeviceCount = {}; + const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices = {}; + }; + static_assert( sizeof( DeviceGroupDeviceCreateInfo ) == sizeof( VkDeviceGroupDeviceCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceGroupDeviceCreateInfo; + }; + using DeviceGroupDeviceCreateInfoKHR = DeviceGroupDeviceCreateInfo; + + struct DeviceGroupPresentInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupPresentInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DeviceGroupPresentInfoKHR( uint32_t swapchainCount_ = {}, + const uint32_t * pDeviceMasks_ = {}, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal ) VULKAN_HPP_NOEXCEPT + : swapchainCount( swapchainCount_ ) + , pDeviceMasks( pDeviceMasks_ ) + , mode( mode_ ) + {} + + VULKAN_HPP_CONSTEXPR + DeviceGroupPresentInfoKHR( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupPresentInfoKHR( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupPresentInfoKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupPresentInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceMasks_, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal ) + : swapchainCount( static_cast( deviceMasks_.size() ) ) + , pDeviceMasks( deviceMasks_.data() ) + , mode( mode_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & + operator=( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupPresentInfoKHR & operator=( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceGroupPresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceGroupPresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = swapchainCount_; + return *this; + } + + DeviceGroupPresentInfoKHR & setPDeviceMasks( const uint32_t * pDeviceMasks_ ) VULKAN_HPP_NOEXCEPT + { + pDeviceMasks = pDeviceMasks_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupPresentInfoKHR & setDeviceMasks( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceMasks_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( deviceMasks_.size() ); + pDeviceMasks = deviceMasks_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DeviceGroupPresentInfoKHR & + setMode( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } + + operator VkDeviceGroupPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupPresentInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupPresentInfoKHR const & ) const = default; +#else + bool operator==( DeviceGroupPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && + ( pDeviceMasks == rhs.pDeviceMasks ) && ( mode == rhs.mode ); + } + + bool operator!=( DeviceGroupPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentInfoKHR; + const void * pNext = {}; + uint32_t swapchainCount = {}; + const uint32_t * pDeviceMasks = {}; + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode = + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal; + }; + static_assert( sizeof( DeviceGroupPresentInfoKHR ) == sizeof( VkDeviceGroupPresentInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceGroupPresentInfoKHR; + }; + + struct DeviceGroupRenderPassBeginInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupRenderPassBeginInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_ = {}, + uint32_t deviceRenderAreaCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas_ = {} ) + VULKAN_HPP_NOEXCEPT + : deviceMask( deviceMask_ ) + , deviceRenderAreaCount( deviceRenderAreaCount_ ) + , pDeviceRenderAreas( pDeviceRenderAreas_ ) + {} + + VULKAN_HPP_CONSTEXPR + DeviceGroupRenderPassBeginInfo( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupRenderPassBeginInfo( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupRenderPassBeginInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupRenderPassBeginInfo( + uint32_t deviceMask_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceRenderAreas_ ) + : deviceMask( deviceMask_ ) + , deviceRenderAreaCount( static_cast( deviceRenderAreas_.size() ) ) + , pDeviceRenderAreas( deviceRenderAreas_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & + operator=( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupRenderPassBeginInfo & operator=( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceGroupRenderPassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceGroupRenderPassBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT + { + deviceMask = deviceMask_; + return *this; + } + + DeviceGroupRenderPassBeginInfo & setDeviceRenderAreaCount( uint32_t deviceRenderAreaCount_ ) VULKAN_HPP_NOEXCEPT + { + deviceRenderAreaCount = deviceRenderAreaCount_; + return *this; + } + + DeviceGroupRenderPassBeginInfo & + setPDeviceRenderAreas( const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT + { + pDeviceRenderAreas = pDeviceRenderAreas_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupRenderPassBeginInfo & setDeviceRenderAreas( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceRenderAreas_ ) + VULKAN_HPP_NOEXCEPT + { + deviceRenderAreaCount = static_cast( deviceRenderAreas_.size() ); + pDeviceRenderAreas = deviceRenderAreas_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkDeviceGroupRenderPassBeginInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupRenderPassBeginInfo const & ) const = default; +#else + bool operator==( DeviceGroupRenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMask == rhs.deviceMask ) && + ( deviceRenderAreaCount == rhs.deviceRenderAreaCount ) && ( pDeviceRenderAreas == rhs.pDeviceRenderAreas ); + } + + bool operator!=( DeviceGroupRenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupRenderPassBeginInfo; + const void * pNext = {}; + uint32_t deviceMask = {}; + uint32_t deviceRenderAreaCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas = {}; + }; + static_assert( sizeof( DeviceGroupRenderPassBeginInfo ) == sizeof( VkDeviceGroupRenderPassBeginInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceGroupRenderPassBeginInfo; + }; + using DeviceGroupRenderPassBeginInfoKHR = DeviceGroupRenderPassBeginInfo; + + struct DeviceGroupSubmitInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupSubmitInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DeviceGroupSubmitInfo( uint32_t waitSemaphoreCount_ = {}, + const uint32_t * pWaitSemaphoreDeviceIndices_ = {}, + uint32_t commandBufferCount_ = {}, + const uint32_t * pCommandBufferDeviceMasks_ = {}, + uint32_t signalSemaphoreCount_ = {}, + const uint32_t * pSignalSemaphoreDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT + : waitSemaphoreCount( waitSemaphoreCount_ ) + , pWaitSemaphoreDeviceIndices( pWaitSemaphoreDeviceIndices_ ) + , commandBufferCount( commandBufferCount_ ) + , pCommandBufferDeviceMasks( pCommandBufferDeviceMasks_ ) + , signalSemaphoreCount( signalSemaphoreCount_ ) + , pSignalSemaphoreDeviceIndices( pSignalSemaphoreDeviceIndices_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupSubmitInfo( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupSubmitInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupSubmitInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreDeviceIndices_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBufferDeviceMasks_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreDeviceIndices_ = {} ) + : waitSemaphoreCount( static_cast( waitSemaphoreDeviceIndices_.size() ) ) + , pWaitSemaphoreDeviceIndices( waitSemaphoreDeviceIndices_.data() ) + , commandBufferCount( static_cast( commandBufferDeviceMasks_.size() ) ) + , pCommandBufferDeviceMasks( commandBufferDeviceMasks_.data() ) + , signalSemaphoreCount( static_cast( signalSemaphoreDeviceIndices_.size() ) ) + , pSignalSemaphoreDeviceIndices( signalSemaphoreDeviceIndices_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & + operator=( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupSubmitInfo & operator=( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceGroupSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceGroupSubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = waitSemaphoreCount_; + return *this; + } + + DeviceGroupSubmitInfo & + setPWaitSemaphoreDeviceIndices( const uint32_t * pWaitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphoreDeviceIndices = pWaitSemaphoreDeviceIndices_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupSubmitInfo & setWaitSemaphoreDeviceIndices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreDeviceIndices_ ) + VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = static_cast( waitSemaphoreDeviceIndices_.size() ); + pWaitSemaphoreDeviceIndices = waitSemaphoreDeviceIndices_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DeviceGroupSubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferCount = commandBufferCount_; + return *this; + } + + DeviceGroupSubmitInfo & + setPCommandBufferDeviceMasks( const uint32_t * pCommandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT + { + pCommandBufferDeviceMasks = pCommandBufferDeviceMasks_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupSubmitInfo & setCommandBufferDeviceMasks( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBufferDeviceMasks_ ) + VULKAN_HPP_NOEXCEPT + { + commandBufferCount = static_cast( commandBufferDeviceMasks_.size() ); + pCommandBufferDeviceMasks = commandBufferDeviceMasks_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DeviceGroupSubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = signalSemaphoreCount_; + return *this; + } + + DeviceGroupSubmitInfo & + setPSignalSemaphoreDeviceIndices( const uint32_t * pSignalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + pSignalSemaphoreDeviceIndices = pSignalSemaphoreDeviceIndices_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupSubmitInfo & setSignalSemaphoreDeviceIndices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreDeviceIndices_ ) + VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = static_cast( signalSemaphoreDeviceIndices_.size() ); + pSignalSemaphoreDeviceIndices = signalSemaphoreDeviceIndices_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkDeviceGroupSubmitInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupSubmitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupSubmitInfo const & ) const = default; +#else + bool operator==( DeviceGroupSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) && + ( pWaitSemaphoreDeviceIndices == rhs.pWaitSemaphoreDeviceIndices ) && + ( commandBufferCount == rhs.commandBufferCount ) && + ( pCommandBufferDeviceMasks == rhs.pCommandBufferDeviceMasks ) && + ( signalSemaphoreCount == rhs.signalSemaphoreCount ) && + ( pSignalSemaphoreDeviceIndices == rhs.pSignalSemaphoreDeviceIndices ); + } + + bool operator!=( DeviceGroupSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSubmitInfo; + const void * pNext = {}; + uint32_t waitSemaphoreCount = {}; + const uint32_t * pWaitSemaphoreDeviceIndices = {}; + uint32_t commandBufferCount = {}; + const uint32_t * pCommandBufferDeviceMasks = {}; + uint32_t signalSemaphoreCount = {}; + const uint32_t * pSignalSemaphoreDeviceIndices = {}; + }; + static_assert( sizeof( DeviceGroupSubmitInfo ) == sizeof( VkDeviceGroupSubmitInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceGroupSubmitInfo; + }; + using DeviceGroupSubmitInfoKHR = DeviceGroupSubmitInfo; + + struct DeviceGroupSwapchainCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDeviceGroupSwapchainCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {} ) VULKAN_HPP_NOEXCEPT : modes( modes_ ) + {} + + VULKAN_HPP_CONSTEXPR + DeviceGroupSwapchainCreateInfoKHR( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupSwapchainCreateInfoKHR( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupSwapchainCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSwapchainCreateInfoKHR & + operator=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupSwapchainCreateInfoKHR & operator=( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceGroupSwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceGroupSwapchainCreateInfoKHR & + setModes( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ ) VULKAN_HPP_NOEXCEPT + { + modes = modes_; + return *this; + } + + operator VkDeviceGroupSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupSwapchainCreateInfoKHR const & ) const = default; +#else + bool operator==( DeviceGroupSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( modes == rhs.modes ); + } + + bool operator!=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSwapchainCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {}; + }; + static_assert( sizeof( DeviceGroupSwapchainCreateInfoKHR ) == sizeof( VkDeviceGroupSwapchainCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceGroupSwapchainCreateInfoKHR; + }; + + struct DeviceMemoryOverallocationCreateInfoAMD + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDeviceMemoryOverallocationCreateInfoAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( + VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ = + VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault ) VULKAN_HPP_NOEXCEPT + : overallocationBehavior( overallocationBehavior_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + DeviceMemoryOverallocationCreateInfoAMD( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceMemoryOverallocationCreateInfoAMD( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOverallocationCreateInfoAMD & + operator=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceMemoryOverallocationCreateInfoAMD & + operator=( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceMemoryOverallocationCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceMemoryOverallocationCreateInfoAMD & setOverallocationBehavior( + VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ ) VULKAN_HPP_NOEXCEPT + { + overallocationBehavior = overallocationBehavior_; + return *this; + } + + operator VkDeviceMemoryOverallocationCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceMemoryOverallocationCreateInfoAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceMemoryOverallocationCreateInfoAMD const & ) const = default; +#else + bool operator==( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( overallocationBehavior == rhs.overallocationBehavior ); + } + + bool operator!=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior = + VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault; + }; + static_assert( sizeof( DeviceMemoryOverallocationCreateInfoAMD ) == + sizeof( VkDeviceMemoryOverallocationCreateInfoAMD ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceMemoryOverallocationCreateInfoAMD; + }; + + struct DeviceMemoryReportCallbackDataEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDeviceMemoryReportCallbackDataEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceMemoryReportCallbackDataEXT( + VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT type_ = + VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT::eAllocate, + uint64_t memoryObjectId_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, + uint64_t objectHandle_ = {}, + uint32_t heapIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , type( type_ ) + , memoryObjectId( memoryObjectId_ ) + , size( size_ ) + , objectType( objectType_ ) + , objectHandle( objectHandle_ ) + , heapIndex( heapIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR + DeviceMemoryReportCallbackDataEXT( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceMemoryReportCallbackDataEXT( VkDeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceMemoryReportCallbackDataEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceMemoryReportCallbackDataEXT & + operator=( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceMemoryReportCallbackDataEXT & operator=( VkDeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDeviceMemoryReportCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceMemoryReportCallbackDataEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceMemoryReportCallbackDataEXT const & ) const = default; +#else + bool operator==( DeviceMemoryReportCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( type == rhs.type ) && + ( memoryObjectId == rhs.memoryObjectId ) && ( size == rhs.size ) && ( objectType == rhs.objectType ) && + ( objectHandle == rhs.objectHandle ) && ( heapIndex == rhs.heapIndex ); + } + + bool operator!=( DeviceMemoryReportCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryReportCallbackDataEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT type = + VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT::eAllocate; + uint64_t memoryObjectId = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown; + uint64_t objectHandle = {}; + uint32_t heapIndex = {}; + }; + static_assert( sizeof( DeviceMemoryReportCallbackDataEXT ) == sizeof( VkDeviceMemoryReportCallbackDataEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceMemoryReportCallbackDataEXT; + }; + + struct DevicePrivateDataCreateInfoEXT + { + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDevicePrivateDataCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DevicePrivateDataCreateInfoEXT( uint32_t privateDataSlotRequestCount_ = {} ) VULKAN_HPP_NOEXCEPT + : privateDataSlotRequestCount( privateDataSlotRequestCount_ ) + {} + + VULKAN_HPP_CONSTEXPR + DevicePrivateDataCreateInfoEXT( DevicePrivateDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DevicePrivateDataCreateInfoEXT( VkDevicePrivateDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DevicePrivateDataCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DevicePrivateDataCreateInfoEXT & + operator=( DevicePrivateDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DevicePrivateDataCreateInfoEXT & operator=( VkDevicePrivateDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DevicePrivateDataCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DevicePrivateDataCreateInfoEXT & + setPrivateDataSlotRequestCount( uint32_t privateDataSlotRequestCount_ ) VULKAN_HPP_NOEXCEPT + { + privateDataSlotRequestCount = privateDataSlotRequestCount_; + return *this; + } + + operator VkDevicePrivateDataCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDevicePrivateDataCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DevicePrivateDataCreateInfoEXT const & ) const = default; +#else + bool operator==( DevicePrivateDataCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( privateDataSlotRequestCount == rhs.privateDataSlotRequestCount ); + } + + bool operator!=( DevicePrivateDataCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDevicePrivateDataCreateInfoEXT; + const void * pNext = {}; + uint32_t privateDataSlotRequestCount = {}; + }; + static_assert( sizeof( DevicePrivateDataCreateInfoEXT ) == sizeof( VkDevicePrivateDataCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DevicePrivateDataCreateInfoEXT; + }; + + struct DeviceQueueGlobalPriorityCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DeviceQueueGlobalPriorityCreateInfoEXT( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority_ = + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow ) VULKAN_HPP_NOEXCEPT + : globalPriority( globalPriority_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfoEXT( DeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + DeviceQueueGlobalPriorityCreateInfoEXT( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceQueueGlobalPriorityCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceQueueGlobalPriorityCreateInfoEXT & + operator=( DeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceQueueGlobalPriorityCreateInfoEXT & + operator=( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceQueueGlobalPriorityCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceQueueGlobalPriorityCreateInfoEXT & + setGlobalPriority( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority_ ) VULKAN_HPP_NOEXCEPT + { + globalPriority = globalPriority_; + return *this; + } + + operator VkDeviceQueueGlobalPriorityCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceQueueGlobalPriorityCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceQueueGlobalPriorityCreateInfoEXT const & ) const = default; +#else + bool operator==( DeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( globalPriority == rhs.globalPriority ); + } + + bool operator!=( DeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow; + }; + static_assert( sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) == sizeof( VkDeviceQueueGlobalPriorityCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceQueueGlobalPriorityCreateInfoEXT; + }; + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + struct DirectFBSurfaceCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDirectfbSurfaceCreateInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DirectFBSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags_ = {}, + IDirectFB * dfb_ = {}, + IDirectFBSurface * surface_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , dfb( dfb_ ) + , surface( surface_ ) + {} + + VULKAN_HPP_CONSTEXPR + DirectFBSurfaceCreateInfoEXT( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DirectFBSurfaceCreateInfoEXT( VkDirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DirectFBSurfaceCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & + operator=( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DirectFBSurfaceCreateInfoEXT & operator=( VkDirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DirectFBSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DirectFBSurfaceCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + DirectFBSurfaceCreateInfoEXT & setDfb( IDirectFB * dfb_ ) VULKAN_HPP_NOEXCEPT + { + dfb = dfb_; + return *this; + } + + DirectFBSurfaceCreateInfoEXT & setSurface( IDirectFBSurface * surface_ ) VULKAN_HPP_NOEXCEPT + { + surface = surface_; + return *this; + } + + operator VkDirectFBSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDirectFBSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DirectFBSurfaceCreateInfoEXT const & ) const = default; +# else + bool operator==( DirectFBSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dfb == rhs.dfb ) && + ( surface == rhs.surface ); + } + + bool operator!=( DirectFBSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDirectfbSurfaceCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags = {}; + IDirectFB * dfb = {}; + IDirectFBSurface * surface = {}; + }; + static_assert( sizeof( DirectFBSurfaceCreateInfoEXT ) == sizeof( VkDirectFBSurfaceCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DirectFBSurfaceCreateInfoEXT; + }; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + struct DispatchIndirectCommand + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DispatchIndirectCommand( uint32_t x_ = {}, uint32_t y_ = {}, uint32_t z_ = {} ) VULKAN_HPP_NOEXCEPT + : x( x_ ) + , y( y_ ) + , z( z_ ) + {} + + VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DispatchIndirectCommand( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + : DispatchIndirectCommand( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand & + operator=( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DispatchIndirectCommand & operator=( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DispatchIndirectCommand & setX( uint32_t x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + DispatchIndirectCommand & setY( uint32_t y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } + + DispatchIndirectCommand & setZ( uint32_t z_ ) VULKAN_HPP_NOEXCEPT + { + z = z_; + return *this; + } + + operator VkDispatchIndirectCommand const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDispatchIndirectCommand &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DispatchIndirectCommand const & ) const = default; +#else + bool operator==( DispatchIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( x == rhs.x ) && ( y == rhs.y ) && ( z == rhs.z ); + } + + bool operator!=( DispatchIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t x = {}; + uint32_t y = {}; + uint32_t z = {}; + }; + static_assert( sizeof( DispatchIndirectCommand ) == sizeof( VkDispatchIndirectCommand ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DisplayNativeHdrSurfaceCapabilitiesAMD + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD( + VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport_ = {} ) VULKAN_HPP_NOEXCEPT + : localDimmingSupport( localDimmingSupport_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + DisplayNativeHdrSurfaceCapabilitiesAMD( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayNativeHdrSurfaceCapabilitiesAMD( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DisplayNativeHdrSurfaceCapabilitiesAMD & + operator=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayNativeHdrSurfaceCapabilitiesAMD & + operator=( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayNativeHdrSurfaceCapabilitiesAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayNativeHdrSurfaceCapabilitiesAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayNativeHdrSurfaceCapabilitiesAMD const & ) const = default; +#else + bool operator==( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( localDimmingSupport == rhs.localDimmingSupport ); + } + + bool operator!=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport = {}; + }; + static_assert( sizeof( DisplayNativeHdrSurfaceCapabilitiesAMD ) == sizeof( VkDisplayNativeHdrSurfaceCapabilitiesAMD ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DisplayNativeHdrSurfaceCapabilitiesAMD; + }; + + struct DisplayPresentInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPresentInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( VULKAN_HPP_NAMESPACE::Rect2D srcRect_ = {}, + VULKAN_HPP_NAMESPACE::Rect2D dstRect_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 persistent_ = {} ) VULKAN_HPP_NOEXCEPT + : srcRect( srcRect_ ) + , dstRect( dstRect_ ) + , persistent( persistent_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPresentInfoKHR( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPresentInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & + operator=( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPresentInfoKHR & operator=( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DisplayPresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DisplayPresentInfoKHR & setSrcRect( VULKAN_HPP_NAMESPACE::Rect2D const & srcRect_ ) VULKAN_HPP_NOEXCEPT + { + srcRect = srcRect_; + return *this; + } + + DisplayPresentInfoKHR & setDstRect( VULKAN_HPP_NAMESPACE::Rect2D const & dstRect_ ) VULKAN_HPP_NOEXCEPT + { + dstRect = dstRect_; + return *this; + } + + DisplayPresentInfoKHR & setPersistent( VULKAN_HPP_NAMESPACE::Bool32 persistent_ ) VULKAN_HPP_NOEXCEPT + { + persistent = persistent_; + return *this; + } + + operator VkDisplayPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPresentInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPresentInfoKHR const & ) const = default; +#else + bool operator==( DisplayPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcRect == rhs.srcRect ) && + ( dstRect == rhs.dstRect ) && ( persistent == rhs.persistent ); + } + + bool operator!=( DisplayPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPresentInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Rect2D srcRect = {}; + VULKAN_HPP_NAMESPACE::Rect2D dstRect = {}; + VULKAN_HPP_NAMESPACE::Bool32 persistent = {}; + }; + static_assert( sizeof( DisplayPresentInfoKHR ) == sizeof( VkDisplayPresentInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DisplayPresentInfoKHR; + }; + + struct DisplaySurfaceCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplaySurfaceCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DisplaySurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, + uint32_t planeIndex_ = {}, + uint32_t planeStackIndex_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + float globalAlpha_ = {}, + VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ = + VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque, + VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , displayMode( displayMode_ ) + , planeIndex( planeIndex_ ) + , planeStackIndex( planeStackIndex_ ) + , transform( transform_ ) + , globalAlpha( globalAlpha_ ) + , alphaMode( alphaMode_ ) + , imageExtent( imageExtent_ ) + {} + + VULKAN_HPP_CONSTEXPR + DisplaySurfaceCreateInfoKHR( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplaySurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & + operator=( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplaySurfaceCreateInfoKHR & operator=( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DisplaySurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DisplaySurfaceCreateInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + DisplaySurfaceCreateInfoKHR & + setDisplayMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ ) VULKAN_HPP_NOEXCEPT + { + displayMode = displayMode_; + return *this; + } + + DisplaySurfaceCreateInfoKHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT + { + planeIndex = planeIndex_; + return *this; + } + + DisplaySurfaceCreateInfoKHR & setPlaneStackIndex( uint32_t planeStackIndex_ ) VULKAN_HPP_NOEXCEPT + { + planeStackIndex = planeStackIndex_; + return *this; + } + + DisplaySurfaceCreateInfoKHR & + setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT + { + transform = transform_; + return *this; + } + + DisplaySurfaceCreateInfoKHR & setGlobalAlpha( float globalAlpha_ ) VULKAN_HPP_NOEXCEPT + { + globalAlpha = globalAlpha_; + return *this; + } + + DisplaySurfaceCreateInfoKHR & + setAlphaMode( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ ) VULKAN_HPP_NOEXCEPT + { + alphaMode = alphaMode_; + return *this; + } + + DisplaySurfaceCreateInfoKHR & + setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT + { + imageExtent = imageExtent_; + return *this; + } + + operator VkDisplaySurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplaySurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplaySurfaceCreateInfoKHR const & ) const = default; +#else + bool operator==( DisplaySurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( displayMode == rhs.displayMode ) && ( planeIndex == rhs.planeIndex ) && + ( planeStackIndex == rhs.planeStackIndex ) && ( transform == rhs.transform ) && + ( globalAlpha == rhs.globalAlpha ) && ( alphaMode == rhs.alphaMode ) && ( imageExtent == rhs.imageExtent ); + } + + bool operator!=( DisplaySurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplaySurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {}; + uint32_t planeIndex = {}; + uint32_t planeStackIndex = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + float globalAlpha = {}; + VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode = + VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque; + VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {}; + }; + static_assert( sizeof( DisplaySurfaceCreateInfoKHR ) == sizeof( VkDisplaySurfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DisplaySurfaceCreateInfoKHR; + }; + + struct DrawIndexedIndirectCommand + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand( uint32_t indexCount_ = {}, + uint32_t instanceCount_ = {}, + uint32_t firstIndex_ = {}, + int32_t vertexOffset_ = {}, + uint32_t firstInstance_ = {} ) VULKAN_HPP_NOEXCEPT + : indexCount( indexCount_ ) + , instanceCount( instanceCount_ ) + , firstIndex( firstIndex_ ) + , vertexOffset( vertexOffset_ ) + , firstInstance( firstInstance_ ) + {} + + VULKAN_HPP_CONSTEXPR + DrawIndexedIndirectCommand( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrawIndexedIndirectCommand( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + : DrawIndexedIndirectCommand( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & + operator=( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrawIndexedIndirectCommand & operator=( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DrawIndexedIndirectCommand & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT + { + indexCount = indexCount_; + return *this; + } + + DrawIndexedIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT + { + instanceCount = instanceCount_; + return *this; + } + + DrawIndexedIndirectCommand & setFirstIndex( uint32_t firstIndex_ ) VULKAN_HPP_NOEXCEPT + { + firstIndex = firstIndex_; + return *this; + } + + DrawIndexedIndirectCommand & setVertexOffset( int32_t vertexOffset_ ) VULKAN_HPP_NOEXCEPT + { + vertexOffset = vertexOffset_; + return *this; + } + + DrawIndexedIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT + { + firstInstance = firstInstance_; + return *this; + } + + operator VkDrawIndexedIndirectCommand const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrawIndexedIndirectCommand &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrawIndexedIndirectCommand const & ) const = default; +#else + bool operator==( DrawIndexedIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( indexCount == rhs.indexCount ) && ( instanceCount == rhs.instanceCount ) && + ( firstIndex == rhs.firstIndex ) && ( vertexOffset == rhs.vertexOffset ) && + ( firstInstance == rhs.firstInstance ); + } + + bool operator!=( DrawIndexedIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t indexCount = {}; + uint32_t instanceCount = {}; + uint32_t firstIndex = {}; + int32_t vertexOffset = {}; + uint32_t firstInstance = {}; + }; + static_assert( sizeof( DrawIndexedIndirectCommand ) == sizeof( VkDrawIndexedIndirectCommand ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct DrawIndirectCommand + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrawIndirectCommand( uint32_t vertexCount_ = {}, + uint32_t instanceCount_ = {}, + uint32_t firstVertex_ = {}, + uint32_t firstInstance_ = {} ) VULKAN_HPP_NOEXCEPT + : vertexCount( vertexCount_ ) + , instanceCount( instanceCount_ ) + , firstVertex( firstVertex_ ) + , firstInstance( firstInstance_ ) + {} + + VULKAN_HPP_CONSTEXPR DrawIndirectCommand( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrawIndirectCommand( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + : DrawIndirectCommand( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & + operator=( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrawIndirectCommand & operator=( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DrawIndirectCommand & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT + { + vertexCount = vertexCount_; + return *this; + } + + DrawIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT + { + instanceCount = instanceCount_; + return *this; + } + + DrawIndirectCommand & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT + { + firstVertex = firstVertex_; + return *this; + } + + DrawIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT + { + firstInstance = firstInstance_; + return *this; + } + + operator VkDrawIndirectCommand const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrawIndirectCommand &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrawIndirectCommand const & ) const = default; +#else + bool operator==( DrawIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( vertexCount == rhs.vertexCount ) && ( instanceCount == rhs.instanceCount ) && + ( firstVertex == rhs.firstVertex ) && ( firstInstance == rhs.firstInstance ); + } + + bool operator!=( DrawIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t vertexCount = {}; + uint32_t instanceCount = {}; + uint32_t firstVertex = {}; + uint32_t firstInstance = {}; + }; + static_assert( sizeof( DrawIndirectCommand ) == sizeof( VkDrawIndirectCommand ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DrawMeshTasksIndirectCommandNV + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV( uint32_t taskCount_ = {}, + uint32_t firstTask_ = {} ) VULKAN_HPP_NOEXCEPT + : taskCount( taskCount_ ) + , firstTask( firstTask_ ) + {} + + VULKAN_HPP_CONSTEXPR + DrawMeshTasksIndirectCommandNV( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrawMeshTasksIndirectCommandNV( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DrawMeshTasksIndirectCommandNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandNV & + operator=( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrawMeshTasksIndirectCommandNV & operator=( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DrawMeshTasksIndirectCommandNV & setTaskCount( uint32_t taskCount_ ) VULKAN_HPP_NOEXCEPT + { + taskCount = taskCount_; + return *this; + } + + DrawMeshTasksIndirectCommandNV & setFirstTask( uint32_t firstTask_ ) VULKAN_HPP_NOEXCEPT + { + firstTask = firstTask_; + return *this; + } + + operator VkDrawMeshTasksIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrawMeshTasksIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrawMeshTasksIndirectCommandNV const & ) const = default; +#else + bool operator==( DrawMeshTasksIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( taskCount == rhs.taskCount ) && ( firstTask == rhs.firstTask ); + } + + bool operator!=( DrawMeshTasksIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t taskCount = {}; + uint32_t firstTask = {}; + }; + static_assert( sizeof( DrawMeshTasksIndirectCommandNV ) == sizeof( VkDrawMeshTasksIndirectCommandNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct DrmFormatModifierPropertiesEXT + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT( + uint64_t drmFormatModifier_ = {}, + uint32_t drmFormatModifierPlaneCount_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + : drmFormatModifier( drmFormatModifier_ ) + , drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ) + , drmFormatModifierTilingFeatures( drmFormatModifierTilingFeatures_ ) + {} + + VULKAN_HPP_CONSTEXPR + DrmFormatModifierPropertiesEXT( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrmFormatModifierPropertiesEXT( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DrmFormatModifierPropertiesEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DrmFormatModifierPropertiesEXT & + operator=( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrmFormatModifierPropertiesEXT & operator=( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDrmFormatModifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrmFormatModifierPropertiesEXT const & ) const = default; +#else + bool operator==( DrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( drmFormatModifier == rhs.drmFormatModifier ) && + ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) && + ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures ); + } + + bool operator!=( DrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint64_t drmFormatModifier = {}; + uint32_t drmFormatModifierPlaneCount = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures = {}; + }; + static_assert( sizeof( DrmFormatModifierPropertiesEXT ) == sizeof( VkDrmFormatModifierPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct DrmFormatModifierPropertiesListEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDrmFormatModifierPropertiesListEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( + uint32_t drmFormatModifierCount_ = {}, + VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT * pDrmFormatModifierProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : drmFormatModifierCount( drmFormatModifierCount_ ) + , pDrmFormatModifierProperties( pDrmFormatModifierProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( DrmFormatModifierPropertiesListEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + DrmFormatModifierPropertiesListEXT( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DrmFormatModifierPropertiesListEXT( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DrmFormatModifierPropertiesListEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + drmFormatModifierProperties_ ) + : drmFormatModifierCount( static_cast( drmFormatModifierProperties_.size() ) ) + , pDrmFormatModifierProperties( drmFormatModifierProperties_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DrmFormatModifierPropertiesListEXT & + operator=( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrmFormatModifierPropertiesListEXT & + operator=( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDrmFormatModifierPropertiesListEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrmFormatModifierPropertiesListEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrmFormatModifierPropertiesListEXT const & ) const = default; +#else + bool operator==( DrmFormatModifierPropertiesListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( drmFormatModifierCount == rhs.drmFormatModifierCount ) && + ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties ); + } + + bool operator!=( DrmFormatModifierPropertiesListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesListEXT; + void * pNext = {}; + uint32_t drmFormatModifierCount = {}; + VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT * pDrmFormatModifierProperties = {}; + }; + static_assert( sizeof( DrmFormatModifierPropertiesListEXT ) == sizeof( VkDrmFormatModifierPropertiesListEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DrmFormatModifierPropertiesListEXT; + }; + + struct ExportFenceCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ExportFenceCreateInfo( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : handleTypes( handleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportFenceCreateInfo( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportFenceCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExportFenceCreateInfo & + operator=( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportFenceCreateInfo & operator=( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExportFenceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExportFenceCreateInfo & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } + + operator VkExportFenceCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportFenceCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportFenceCreateInfo const & ) const = default; +#else + bool operator==( ExportFenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExportFenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes = {}; + }; + static_assert( sizeof( ExportFenceCreateInfo ) == sizeof( VkExportFenceCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExportFenceCreateInfo; + }; + using ExportFenceCreateInfoKHR = ExportFenceCreateInfo; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ExportFenceWin32HandleInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR( const SECURITY_ATTRIBUTES * pAttributes_ = {}, + DWORD dwAccess_ = {}, + LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT + : pAttributes( pAttributes_ ) + , dwAccess( dwAccess_ ) + , name( name_ ) + {} + + VULKAN_HPP_CONSTEXPR + ExportFenceWin32HandleInfoKHR( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportFenceWin32HandleInfoKHR( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportFenceWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & + operator=( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportFenceWin32HandleInfoKHR & operator=( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExportFenceWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExportFenceWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT + { + pAttributes = pAttributes_; + return *this; + } + + ExportFenceWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT + { + dwAccess = dwAccess_; + return *this; + } + + ExportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } + + operator VkExportFenceWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportFenceWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ExportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && + ( dwAccess == rhs.dwAccess ) && ( name == rhs.name ); + } + + bool operator!=( ExportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceWin32HandleInfoKHR; + const void * pNext = {}; + const SECURITY_ATTRIBUTES * pAttributes = {}; + DWORD dwAccess = {}; + LPCWSTR name = {}; + }; + static_assert( sizeof( ExportFenceWin32HandleInfoKHR ) == sizeof( VkExportFenceWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExportFenceWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct ExportMemoryAllocateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : handleTypes( handleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMemoryAllocateInfo( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMemoryAllocateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfo & + operator=( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMemoryAllocateInfo & operator=( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExportMemoryAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExportMemoryAllocateInfo & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } + + operator VkExportMemoryAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMemoryAllocateInfo const & ) const = default; +#else + bool operator==( ExportMemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExportMemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; + }; + static_assert( sizeof( ExportMemoryAllocateInfo ) == sizeof( VkExportMemoryAllocateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExportMemoryAllocateInfo; + }; + using ExportMemoryAllocateInfoKHR = ExportMemoryAllocateInfo; + + struct ExportMemoryAllocateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : handleTypes( handleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR + ExportMemoryAllocateInfoNV( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMemoryAllocateInfoNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfoNV & + operator=( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMemoryAllocateInfoNV & operator=( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExportMemoryAllocateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExportMemoryAllocateInfoNV & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } + + operator VkExportMemoryAllocateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMemoryAllocateInfoNV const & ) const = default; +#else + bool operator==( ExportMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExportMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {}; + }; + static_assert( sizeof( ExportMemoryAllocateInfoNV ) == sizeof( VkExportMemoryAllocateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExportMemoryAllocateInfoNV; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ExportMemoryWin32HandleInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR( const SECURITY_ATTRIBUTES * pAttributes_ = {}, + DWORD dwAccess_ = {}, + LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT + : pAttributes( pAttributes_ ) + , dwAccess( dwAccess_ ) + , name( name_ ) + {} + + VULKAN_HPP_CONSTEXPR + ExportMemoryWin32HandleInfoKHR( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMemoryWin32HandleInfoKHR( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMemoryWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & + operator=( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMemoryWin32HandleInfoKHR & operator=( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExportMemoryWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExportMemoryWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT + { + pAttributes = pAttributes_; + return *this; + } + + ExportMemoryWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT + { + dwAccess = dwAccess_; + return *this; + } + + ExportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } + + operator VkExportMemoryWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMemoryWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ExportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && + ( dwAccess == rhs.dwAccess ) && ( name == rhs.name ); + } + + bool operator!=( ExportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoKHR; + const void * pNext = {}; + const SECURITY_ATTRIBUTES * pAttributes = {}; + DWORD dwAccess = {}; + LPCWSTR name = {}; + }; + static_assert( sizeof( ExportMemoryWin32HandleInfoKHR ) == sizeof( VkExportMemoryWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExportMemoryWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ExportMemoryWin32HandleInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoNV; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV( const SECURITY_ATTRIBUTES * pAttributes_ = {}, + DWORD dwAccess_ = {} ) VULKAN_HPP_NOEXCEPT + : pAttributes( pAttributes_ ) + , dwAccess( dwAccess_ ) + {} + + VULKAN_HPP_CONSTEXPR + ExportMemoryWin32HandleInfoNV( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMemoryWin32HandleInfoNV( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMemoryWin32HandleInfoNV( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV & + operator=( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMemoryWin32HandleInfoNV & operator=( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExportMemoryWin32HandleInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExportMemoryWin32HandleInfoNV & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT + { + pAttributes = pAttributes_; + return *this; + } + + ExportMemoryWin32HandleInfoNV & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT + { + dwAccess = dwAccess_; + return *this; + } + + operator VkExportMemoryWin32HandleInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMemoryWin32HandleInfoNV const & ) const = default; +# else + bool operator==( ExportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && + ( dwAccess == rhs.dwAccess ); + } + + bool operator!=( ExportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoNV; + const void * pNext = {}; + const SECURITY_ATTRIBUTES * pAttributes = {}; + DWORD dwAccess = {}; + }; + static_assert( sizeof( ExportMemoryWin32HandleInfoNV ) == sizeof( VkExportMemoryWin32HandleInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExportMemoryWin32HandleInfoNV; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct ExportSemaphoreCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportSemaphoreCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : handleTypes( handleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR + ExportSemaphoreCreateInfo( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportSemaphoreCreateInfo( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportSemaphoreCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreCreateInfo & + operator=( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportSemaphoreCreateInfo & operator=( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExportSemaphoreCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExportSemaphoreCreateInfo & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } + + operator VkExportSemaphoreCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportSemaphoreCreateInfo const & ) const = default; +#else + bool operator==( ExportSemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExportSemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes = {}; + }; + static_assert( sizeof( ExportSemaphoreCreateInfo ) == sizeof( VkExportSemaphoreCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExportSemaphoreCreateInfo; + }; + using ExportSemaphoreCreateInfoKHR = ExportSemaphoreCreateInfo; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ExportSemaphoreWin32HandleInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eExportSemaphoreWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR( const SECURITY_ATTRIBUTES * pAttributes_ = {}, + DWORD dwAccess_ = {}, + LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT + : pAttributes( pAttributes_ ) + , dwAccess( dwAccess_ ) + , name( name_ ) + {} + + VULKAN_HPP_CONSTEXPR + ExportSemaphoreWin32HandleInfoKHR( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportSemaphoreWin32HandleInfoKHR( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportSemaphoreWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & + operator=( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportSemaphoreWin32HandleInfoKHR & operator=( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExportSemaphoreWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExportSemaphoreWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT + { + pAttributes = pAttributes_; + return *this; + } + + ExportSemaphoreWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT + { + dwAccess = dwAccess_; + return *this; + } + + ExportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } + + operator VkExportSemaphoreWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportSemaphoreWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ExportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && + ( dwAccess == rhs.dwAccess ) && ( name == rhs.name ); + } + + bool operator!=( ExportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreWin32HandleInfoKHR; + const void * pNext = {}; + const SECURITY_ATTRIBUTES * pAttributes = {}; + DWORD dwAccess = {}; + LPCWSTR name = {}; + }; + static_assert( sizeof( ExportSemaphoreWin32HandleInfoKHR ) == sizeof( VkExportSemaphoreWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExportSemaphoreWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct ExternalFormatANDROID + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFormatANDROID; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( uint64_t externalFormat_ = {} ) VULKAN_HPP_NOEXCEPT + : externalFormat( externalFormat_ ) + {} + + VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalFormatANDROID( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalFormatANDROID( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExternalFormatANDROID & + operator=( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalFormatANDROID & operator=( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExternalFormatANDROID & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExternalFormatANDROID & setExternalFormat( uint64_t externalFormat_ ) VULKAN_HPP_NOEXCEPT + { + externalFormat = externalFormat_; + return *this; + } + + operator VkExternalFormatANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalFormatANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalFormatANDROID const & ) const = default; +# else + bool operator==( ExternalFormatANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalFormat == rhs.externalFormat ); + } + + bool operator!=( ExternalFormatANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFormatANDROID; + void * pNext = {}; + uint64_t externalFormat = {}; + }; + static_assert( sizeof( ExternalFormatANDROID ) == sizeof( VkExternalFormatANDROID ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExternalFormatANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + struct ExternalImageFormatProperties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalImageFormatProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties( + VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : externalMemoryProperties( externalMemoryProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR + ExternalImageFormatProperties( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalImageFormatProperties( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalImageFormatProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExternalImageFormatProperties & + operator=( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalImageFormatProperties & operator=( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkExternalImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalImageFormatProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalImageFormatProperties const & ) const = default; +#else + bool operator==( ExternalImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( externalMemoryProperties == rhs.externalMemoryProperties ); + } + + bool operator!=( ExternalImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalImageFormatProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {}; + }; + static_assert( sizeof( ExternalImageFormatProperties ) == sizeof( VkExternalImageFormatProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExternalImageFormatProperties; + }; + using ExternalImageFormatPropertiesKHR = ExternalImageFormatProperties; + + struct ExternalMemoryBufferCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryBufferCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : handleTypes( handleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR + ExternalMemoryBufferCreateInfo( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryBufferCreateInfo( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalMemoryBufferCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryBufferCreateInfo & + operator=( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryBufferCreateInfo & operator=( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExternalMemoryBufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExternalMemoryBufferCreateInfo & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } + + operator VkExternalMemoryBufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalMemoryBufferCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalMemoryBufferCreateInfo const & ) const = default; +#else + bool operator==( ExternalMemoryBufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExternalMemoryBufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryBufferCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; + }; + static_assert( sizeof( ExternalMemoryBufferCreateInfo ) == sizeof( VkExternalMemoryBufferCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExternalMemoryBufferCreateInfo; + }; + using ExternalMemoryBufferCreateInfoKHR = ExternalMemoryBufferCreateInfo; + + struct ExternalMemoryImageCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : handleTypes( handleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR + ExternalMemoryImageCreateInfo( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryImageCreateInfo( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalMemoryImageCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfo & + operator=( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryImageCreateInfo & operator=( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExternalMemoryImageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExternalMemoryImageCreateInfo & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } + + operator VkExternalMemoryImageCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalMemoryImageCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalMemoryImageCreateInfo const & ) const = default; +#else + bool operator==( ExternalMemoryImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExternalMemoryImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; + }; + static_assert( sizeof( ExternalMemoryImageCreateInfo ) == sizeof( VkExternalMemoryImageCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExternalMemoryImageCreateInfo; + }; + using ExternalMemoryImageCreateInfoKHR = ExternalMemoryImageCreateInfo; + + struct ExternalMemoryImageCreateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : handleTypes( handleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR + ExternalMemoryImageCreateInfoNV( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalMemoryImageCreateInfoNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfoNV & + operator=( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryImageCreateInfoNV & operator=( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExternalMemoryImageCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExternalMemoryImageCreateInfoNV & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } + + operator VkExternalMemoryImageCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalMemoryImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalMemoryImageCreateInfoNV const & ) const = default; +#else + bool operator==( ExternalMemoryImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExternalMemoryImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {}; + }; + static_assert( sizeof( ExternalMemoryImageCreateInfoNV ) == sizeof( VkExternalMemoryImageCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExternalMemoryImageCreateInfoNV; + }; + + struct FilterCubicImageViewImageFormatPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eFilterCubicImageViewImageFormatPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT( + VULKAN_HPP_NAMESPACE::Bool32 filterCubic_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax_ = {} ) VULKAN_HPP_NOEXCEPT + : filterCubic( filterCubic_ ) + , filterCubicMinmax( filterCubicMinmax_ ) + {} + + VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT( + FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FilterCubicImageViewImageFormatPropertiesEXT( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : FilterCubicImageViewImageFormatPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 FilterCubicImageViewImageFormatPropertiesEXT & + operator=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FilterCubicImageViewImageFormatPropertiesEXT & + operator=( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkFilterCubicImageViewImageFormatPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFilterCubicImageViewImageFormatPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FilterCubicImageViewImageFormatPropertiesEXT const & ) const = default; +#else + bool operator==( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( filterCubic == rhs.filterCubic ) && + ( filterCubicMinmax == rhs.filterCubicMinmax ); + } + + bool operator!=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterCubic = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax = {}; + }; + static_assert( sizeof( FilterCubicImageViewImageFormatPropertiesEXT ) == + sizeof( VkFilterCubicImageViewImageFormatPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = FilterCubicImageViewImageFormatPropertiesEXT; + }; + + struct FragmentShadingRateAttachmentInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eFragmentShadingRateAttachmentInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR( + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize_ = {} ) VULKAN_HPP_NOEXCEPT + : pFragmentShadingRateAttachment( pFragmentShadingRateAttachment_ ) + , shadingRateAttachmentTexelSize( shadingRateAttachmentTexelSize_ ) + {} + + VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR( FragmentShadingRateAttachmentInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + FragmentShadingRateAttachmentInfoKHR( VkFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : FragmentShadingRateAttachmentInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR & + operator=( FragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FragmentShadingRateAttachmentInfoKHR & + operator=( VkFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + FragmentShadingRateAttachmentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + FragmentShadingRateAttachmentInfoKHR & setPFragmentShadingRateAttachment( + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment_ ) VULKAN_HPP_NOEXCEPT + { + pFragmentShadingRateAttachment = pFragmentShadingRateAttachment_; + return *this; + } + + FragmentShadingRateAttachmentInfoKHR & setShadingRateAttachmentTexelSize( + VULKAN_HPP_NAMESPACE::Extent2D const & shadingRateAttachmentTexelSize_ ) VULKAN_HPP_NOEXCEPT + { + shadingRateAttachmentTexelSize = shadingRateAttachmentTexelSize_; + return *this; + } + + operator VkFragmentShadingRateAttachmentInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFragmentShadingRateAttachmentInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FragmentShadingRateAttachmentInfoKHR const & ) const = default; +#else + bool operator==( FragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( pFragmentShadingRateAttachment == rhs.pFragmentShadingRateAttachment ) && + ( shadingRateAttachmentTexelSize == rhs.shadingRateAttachmentTexelSize ); + } + + bool operator!=( FragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFragmentShadingRateAttachmentInfoKHR; + const void * pNext = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment = {}; + VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize = {}; + }; + static_assert( sizeof( FragmentShadingRateAttachmentInfoKHR ) == sizeof( VkFragmentShadingRateAttachmentInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = FragmentShadingRateAttachmentInfoKHR; + }; + + struct FramebufferAttachmentImageInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentImageInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + FramebufferAttachmentImageInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, + uint32_t width_ = {}, + uint32_t height_ = {}, + uint32_t layerCount_ = {}, + uint32_t viewFormatCount_ = {}, + const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , usage( usage_ ) + , width( width_ ) + , height( height_ ) + , layerCount( layerCount_ ) + , viewFormatCount( viewFormatCount_ ) + , pViewFormats( pViewFormats_ ) + {} + + VULKAN_HPP_CONSTEXPR + FramebufferAttachmentImageInfo( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FramebufferAttachmentImageInfo( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : FramebufferAttachmentImageInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FramebufferAttachmentImageInfo( + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_, + uint32_t width_, + uint32_t height_, + uint32_t layerCount_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewFormats_ ) + : flags( flags_ ) + , usage( usage_ ) + , width( width_ ) + , height( height_ ) + , layerCount( layerCount_ ) + , viewFormatCount( static_cast( viewFormats_.size() ) ) + , pViewFormats( viewFormats_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & + operator=( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FramebufferAttachmentImageInfo & operator=( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + FramebufferAttachmentImageInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + FramebufferAttachmentImageInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + FramebufferAttachmentImageInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + FramebufferAttachmentImageInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + FramebufferAttachmentImageInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } + + FramebufferAttachmentImageInfo & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT + { + layerCount = layerCount_; + return *this; + } + + FramebufferAttachmentImageInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT + { + viewFormatCount = viewFormatCount_; + return *this; + } + + FramebufferAttachmentImageInfo & + setPViewFormats( const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ ) VULKAN_HPP_NOEXCEPT + { + pViewFormats = pViewFormats_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FramebufferAttachmentImageInfo & setViewFormats( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewFormats_ ) + VULKAN_HPP_NOEXCEPT + { + viewFormatCount = static_cast( viewFormats_.size() ); + pViewFormats = viewFormats_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkFramebufferAttachmentImageInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFramebufferAttachmentImageInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FramebufferAttachmentImageInfo const & ) const = default; +#else + bool operator==( FramebufferAttachmentImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( usage == rhs.usage ) && + ( width == rhs.width ) && ( height == rhs.height ) && ( layerCount == rhs.layerCount ) && + ( viewFormatCount == rhs.viewFormatCount ) && ( pViewFormats == rhs.pViewFormats ); + } + + bool operator!=( FramebufferAttachmentImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentImageInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + uint32_t width = {}; + uint32_t height = {}; + uint32_t layerCount = {}; + uint32_t viewFormatCount = {}; + const VULKAN_HPP_NAMESPACE::Format * pViewFormats = {}; + }; + static_assert( sizeof( FramebufferAttachmentImageInfo ) == sizeof( VkFramebufferAttachmentImageInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = FramebufferAttachmentImageInfo; + }; + using FramebufferAttachmentImageInfoKHR = FramebufferAttachmentImageInfo; + + struct FramebufferAttachmentsCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentsCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo( + uint32_t attachmentImageInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos_ = {} ) VULKAN_HPP_NOEXCEPT + : attachmentImageInfoCount( attachmentImageInfoCount_ ) + , pAttachmentImageInfos( pAttachmentImageInfos_ ) + {} + + VULKAN_HPP_CONSTEXPR + FramebufferAttachmentsCreateInfo( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FramebufferAttachmentsCreateInfo( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : FramebufferAttachmentsCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FramebufferAttachmentsCreateInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + attachmentImageInfos_ ) + : attachmentImageInfoCount( static_cast( attachmentImageInfos_.size() ) ) + , pAttachmentImageInfos( attachmentImageInfos_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & + operator=( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FramebufferAttachmentsCreateInfo & operator=( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + FramebufferAttachmentsCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + FramebufferAttachmentsCreateInfo & + setAttachmentImageInfoCount( uint32_t attachmentImageInfoCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentImageInfoCount = attachmentImageInfoCount_; + return *this; + } + + FramebufferAttachmentsCreateInfo & setPAttachmentImageInfos( + const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT + { + pAttachmentImageInfos = pAttachmentImageInfos_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FramebufferAttachmentsCreateInfo & setAttachmentImageInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + attachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT + { + attachmentImageInfoCount = static_cast( attachmentImageInfos_.size() ); + pAttachmentImageInfos = attachmentImageInfos_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkFramebufferAttachmentsCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFramebufferAttachmentsCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FramebufferAttachmentsCreateInfo const & ) const = default; +#else + bool operator==( FramebufferAttachmentsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( attachmentImageInfoCount == rhs.attachmentImageInfoCount ) && + ( pAttachmentImageInfos == rhs.pAttachmentImageInfos ); + } + + bool operator!=( FramebufferAttachmentsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentsCreateInfo; + const void * pNext = {}; + uint32_t attachmentImageInfoCount = {}; + const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos = {}; + }; + static_assert( sizeof( FramebufferAttachmentsCreateInfo ) == sizeof( VkFramebufferAttachmentsCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = FramebufferAttachmentsCreateInfo; + }; + using FramebufferAttachmentsCreateInfoKHR = FramebufferAttachmentsCreateInfo; + + struct GraphicsShaderGroupCreateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsShaderGroupCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV( + uint32_t stageCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {} ) VULKAN_HPP_NOEXCEPT + : stageCount( stageCount_ ) + , pStages( pStages_ ) + , pVertexInputState( pVertexInputState_ ) + , pTessellationState( pTessellationState_ ) + {} + + VULKAN_HPP_CONSTEXPR + GraphicsShaderGroupCreateInfoNV( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GraphicsShaderGroupCreateInfoNV( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : GraphicsShaderGroupCreateInfoNV( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsShaderGroupCreateInfoNV( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + stages_, + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {} ) + : stageCount( static_cast( stages_.size() ) ) + , pStages( stages_.data() ) + , pVertexInputState( pVertexInputState_ ) + , pTessellationState( pTessellationState_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & + operator=( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GraphicsShaderGroupCreateInfoNV & operator=( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + GraphicsShaderGroupCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + GraphicsShaderGroupCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = stageCount_; + return *this; + } + + GraphicsShaderGroupCreateInfoNV & + setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT + { + pStages = pStages_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsShaderGroupCreateInfoNV & setStages( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + stages_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = static_cast( stages_.size() ); + pStages = stages_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + GraphicsShaderGroupCreateInfoNV & setPVertexInputState( + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ ) VULKAN_HPP_NOEXCEPT + { + pVertexInputState = pVertexInputState_; + return *this; + } + + GraphicsShaderGroupCreateInfoNV & setPTessellationState( + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ ) VULKAN_HPP_NOEXCEPT + { + pTessellationState = pTessellationState_; + return *this; + } + + operator VkGraphicsShaderGroupCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGraphicsShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GraphicsShaderGroupCreateInfoNV const & ) const = default; +#else + bool operator==( GraphicsShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stageCount == rhs.stageCount ) && + ( pStages == rhs.pStages ) && ( pVertexInputState == rhs.pVertexInputState ) && + ( pTessellationState == rhs.pTessellationState ); + } + + bool operator!=( GraphicsShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsShaderGroupCreateInfoNV; + const void * pNext = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {}; + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState = {}; + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState = {}; + }; + static_assert( sizeof( GraphicsShaderGroupCreateInfoNV ) == sizeof( VkGraphicsShaderGroupCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = GraphicsShaderGroupCreateInfoNV; + }; + + struct GraphicsPipelineShaderGroupsCreateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV( + uint32_t groupCount_ = {}, + const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups_ = {}, + uint32_t pipelineCount_ = {}, + const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines_ = {} ) VULKAN_HPP_NOEXCEPT + : groupCount( groupCount_ ) + , pGroups( pGroups_ ) + , pipelineCount( pipelineCount_ ) + , pPipelines( pPipelines_ ) + {} + + VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV( + GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GraphicsPipelineShaderGroupsCreateInfoNV( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : GraphicsPipelineShaderGroupsCreateInfoNV( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsPipelineShaderGroupsCreateInfoNV( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + groups_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelines_ = {} ) + : groupCount( static_cast( groups_.size() ) ) + , pGroups( groups_.data() ) + , pipelineCount( static_cast( pipelines_.size() ) ) + , pPipelines( pipelines_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & + operator=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GraphicsPipelineShaderGroupsCreateInfoNV & + operator=( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + GraphicsPipelineShaderGroupsCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + GraphicsPipelineShaderGroupsCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = groupCount_; + return *this; + } + + GraphicsPipelineShaderGroupsCreateInfoNV & + setPGroups( const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups_ ) VULKAN_HPP_NOEXCEPT + { + pGroups = pGroups_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsPipelineShaderGroupsCreateInfoNV & setGroups( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + groups_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = static_cast( groups_.size() ); + pGroups = groups_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + GraphicsPipelineShaderGroupsCreateInfoNV & setPipelineCount( uint32_t pipelineCount_ ) VULKAN_HPP_NOEXCEPT + { + pipelineCount = pipelineCount_; + return *this; + } + + GraphicsPipelineShaderGroupsCreateInfoNV & + setPPipelines( const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines_ ) VULKAN_HPP_NOEXCEPT + { + pPipelines = pPipelines_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsPipelineShaderGroupsCreateInfoNV & setPipelines( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelines_ ) + VULKAN_HPP_NOEXCEPT + { + pipelineCount = static_cast( pipelines_.size() ); + pPipelines = pipelines_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkGraphicsPipelineShaderGroupsCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGraphicsPipelineShaderGroupsCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GraphicsPipelineShaderGroupsCreateInfoNV const & ) const = default; +#else + bool operator==( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( groupCount == rhs.groupCount ) && + ( pGroups == rhs.pGroups ) && ( pipelineCount == rhs.pipelineCount ) && ( pPipelines == rhs.pPipelines ); + } + + bool operator!=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV; + const void * pNext = {}; + uint32_t groupCount = {}; + const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups = {}; + uint32_t pipelineCount = {}; + const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines = {}; + }; + static_assert( sizeof( GraphicsPipelineShaderGroupsCreateInfoNV ) == + sizeof( VkGraphicsPipelineShaderGroupsCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = GraphicsPipelineShaderGroupsCreateInfoNV; + }; + + struct HeadlessSurfaceCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHeadlessSurfaceCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ = {} ) + VULKAN_HPP_NOEXCEPT : flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR + HeadlessSurfaceCreateInfoEXT( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + HeadlessSurfaceCreateInfoEXT( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : HeadlessSurfaceCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 HeadlessSurfaceCreateInfoEXT & + operator=( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + HeadlessSurfaceCreateInfoEXT & operator=( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + HeadlessSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + HeadlessSurfaceCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + operator VkHeadlessSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkHeadlessSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( HeadlessSurfaceCreateInfoEXT const & ) const = default; +#else + bool operator==( HeadlessSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); + } + + bool operator!=( HeadlessSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHeadlessSurfaceCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags = {}; + }; + static_assert( sizeof( HeadlessSurfaceCreateInfoEXT ) == sizeof( VkHeadlessSurfaceCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = HeadlessSurfaceCreateInfoEXT; + }; + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + struct IOSSurfaceCreateInfoMVK + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIosSurfaceCreateInfoMVK; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ = {}, + const void * pView_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pView( pView_ ) + {} + + VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IOSSurfaceCreateInfoMVK( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + : IOSSurfaceCreateInfoMVK( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK & + operator=( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IOSSurfaceCreateInfoMVK & operator=( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + IOSSurfaceCreateInfoMVK & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + IOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + IOSSurfaceCreateInfoMVK & setPView( const void * pView_ ) VULKAN_HPP_NOEXCEPT + { + pView = pView_; + return *this; + } + + operator VkIOSSurfaceCreateInfoMVK const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IOSSurfaceCreateInfoMVK const & ) const = default; +# else + bool operator==( IOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pView == rhs.pView ); + } + + bool operator!=( IOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIosSurfaceCreateInfoMVK; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags = {}; + const void * pView = {}; + }; + static_assert( sizeof( IOSSurfaceCreateInfoMVK ) == sizeof( VkIOSSurfaceCreateInfoMVK ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = IOSSurfaceCreateInfoMVK; + }; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + + struct ImageDrmFormatModifierExplicitCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( + uint64_t drmFormatModifier_ = {}, + uint32_t drmFormatModifierPlaneCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts_ = {} ) VULKAN_HPP_NOEXCEPT + : drmFormatModifier( drmFormatModifier_ ) + , drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ) + , pPlaneLayouts( pPlaneLayouts_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( + ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageDrmFormatModifierExplicitCreateInfoEXT( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : ImageDrmFormatModifierExplicitCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageDrmFormatModifierExplicitCreateInfoEXT( + uint64_t drmFormatModifier_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + planeLayouts_ ) + : drmFormatModifier( drmFormatModifier_ ) + , drmFormatModifierPlaneCount( static_cast( planeLayouts_.size() ) ) + , pPlaneLayouts( planeLayouts_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & + operator=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageDrmFormatModifierExplicitCreateInfoEXT & + operator=( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageDrmFormatModifierExplicitCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageDrmFormatModifierExplicitCreateInfoEXT & + setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifier = drmFormatModifier_; + return *this; + } + + ImageDrmFormatModifierExplicitCreateInfoEXT & + setDrmFormatModifierPlaneCount( uint32_t drmFormatModifierPlaneCount_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifierPlaneCount = drmFormatModifierPlaneCount_; + return *this; + } + + ImageDrmFormatModifierExplicitCreateInfoEXT & + setPPlaneLayouts( const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts_ ) VULKAN_HPP_NOEXCEPT + { + pPlaneLayouts = pPlaneLayouts_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageDrmFormatModifierExplicitCreateInfoEXT & setPlaneLayouts( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + planeLayouts_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifierPlaneCount = static_cast( planeLayouts_.size() ); + pPlaneLayouts = planeLayouts_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkImageDrmFormatModifierExplicitCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageDrmFormatModifierExplicitCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageDrmFormatModifierExplicitCreateInfoEXT const & ) const = default; +#else + bool operator==( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier ) && + ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) && + ( pPlaneLayouts == rhs.pPlaneLayouts ); + } + + bool operator!=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT; + const void * pNext = {}; + uint64_t drmFormatModifier = {}; + uint32_t drmFormatModifierPlaneCount = {}; + const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts = {}; + }; + static_assert( sizeof( ImageDrmFormatModifierExplicitCreateInfoEXT ) == + sizeof( VkImageDrmFormatModifierExplicitCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageDrmFormatModifierExplicitCreateInfoEXT; + }; + + struct ImageDrmFormatModifierListCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eImageDrmFormatModifierListCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImageDrmFormatModifierListCreateInfoEXT( uint32_t drmFormatModifierCount_ = {}, + const uint64_t * pDrmFormatModifiers_ = {} ) VULKAN_HPP_NOEXCEPT + : drmFormatModifierCount( drmFormatModifierCount_ ) + , pDrmFormatModifiers( pDrmFormatModifiers_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + ImageDrmFormatModifierListCreateInfoEXT( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageDrmFormatModifierListCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageDrmFormatModifierListCreateInfoEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & drmFormatModifiers_ ) + : drmFormatModifierCount( static_cast( drmFormatModifiers_.size() ) ) + , pDrmFormatModifiers( drmFormatModifiers_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & + operator=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageDrmFormatModifierListCreateInfoEXT & + operator=( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageDrmFormatModifierListCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageDrmFormatModifierListCreateInfoEXT & + setDrmFormatModifierCount( uint32_t drmFormatModifierCount_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifierCount = drmFormatModifierCount_; + return *this; + } + + ImageDrmFormatModifierListCreateInfoEXT & + setPDrmFormatModifiers( const uint64_t * pDrmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT + { + pDrmFormatModifiers = pDrmFormatModifiers_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifiers( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & drmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifierCount = static_cast( drmFormatModifiers_.size() ); + pDrmFormatModifiers = drmFormatModifiers_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkImageDrmFormatModifierListCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageDrmFormatModifierListCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageDrmFormatModifierListCreateInfoEXT const & ) const = default; +#else + bool operator==( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( drmFormatModifierCount == rhs.drmFormatModifierCount ) && + ( pDrmFormatModifiers == rhs.pDrmFormatModifiers ); + } + + bool operator!=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierListCreateInfoEXT; + const void * pNext = {}; + uint32_t drmFormatModifierCount = {}; + const uint64_t * pDrmFormatModifiers = {}; + }; + static_assert( sizeof( ImageDrmFormatModifierListCreateInfoEXT ) == + sizeof( VkImageDrmFormatModifierListCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageDrmFormatModifierListCreateInfoEXT; + }; + + struct ImageFormatListCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatListCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImageFormatListCreateInfo( uint32_t viewFormatCount_ = {}, + const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ = {} ) VULKAN_HPP_NOEXCEPT + : viewFormatCount( viewFormatCount_ ) + , pViewFormats( pViewFormats_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImageFormatListCreateInfo( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageFormatListCreateInfo( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageFormatListCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageFormatListCreateInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewFormats_ ) + : viewFormatCount( static_cast( viewFormats_.size() ) ), pViewFormats( viewFormats_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & + operator=( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageFormatListCreateInfo & operator=( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageFormatListCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageFormatListCreateInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT + { + viewFormatCount = viewFormatCount_; + return *this; + } + + ImageFormatListCreateInfo & + setPViewFormats( const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ ) VULKAN_HPP_NOEXCEPT + { + pViewFormats = pViewFormats_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageFormatListCreateInfo & setViewFormats( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewFormats_ ) + VULKAN_HPP_NOEXCEPT + { + viewFormatCount = static_cast( viewFormats_.size() ); + pViewFormats = viewFormats_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkImageFormatListCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageFormatListCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageFormatListCreateInfo const & ) const = default; +#else + bool operator==( ImageFormatListCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewFormatCount == rhs.viewFormatCount ) && + ( pViewFormats == rhs.pViewFormats ); + } + + bool operator!=( ImageFormatListCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatListCreateInfo; + const void * pNext = {}; + uint32_t viewFormatCount = {}; + const VULKAN_HPP_NAMESPACE::Format * pViewFormats = {}; + }; + static_assert( sizeof( ImageFormatListCreateInfo ) == sizeof( VkImageFormatListCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageFormatListCreateInfo; + }; + using ImageFormatListCreateInfoKHR = ImageFormatListCreateInfo; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct ImagePipeSurfaceCreateInfoFUCHSIA + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eImagepipeSurfaceCreateInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImagePipeSurfaceCreateInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ = {}, + zx_handle_t imagePipeHandle_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , imagePipeHandle( imagePipeHandle_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImagePipeSurfaceCreateInfoFUCHSIA( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImagePipeSurfaceCreateInfoFUCHSIA( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : ImagePipeSurfaceCreateInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA & + operator=( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImagePipeSurfaceCreateInfoFUCHSIA & operator=( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImagePipeSurfaceCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImagePipeSurfaceCreateInfoFUCHSIA & + setFlags( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + ImagePipeSurfaceCreateInfoFUCHSIA & setImagePipeHandle( zx_handle_t imagePipeHandle_ ) VULKAN_HPP_NOEXCEPT + { + imagePipeHandle = imagePipeHandle_; + return *this; + } + + operator VkImagePipeSurfaceCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImagePipeSurfaceCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImagePipeSurfaceCreateInfoFUCHSIA const & ) const = default; +# else + bool operator==( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( memcmp( &imagePipeHandle, &rhs.imagePipeHandle, sizeof( zx_handle_t ) ) == 0 ); + } + + bool operator!=( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags = {}; + zx_handle_t imagePipeHandle = {}; + }; + static_assert( sizeof( ImagePipeSurfaceCreateInfoFUCHSIA ) == sizeof( VkImagePipeSurfaceCreateInfoFUCHSIA ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImagePipeSurfaceCreateInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + struct ImagePlaneMemoryRequirementsInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImagePlaneMemoryRequirementsInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImagePlaneMemoryRequirementsInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor ) VULKAN_HPP_NOEXCEPT + : planeAspect( planeAspect_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImagePlaneMemoryRequirementsInfo( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImagePlaneMemoryRequirementsInfo( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ImagePlaneMemoryRequirementsInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImagePlaneMemoryRequirementsInfo & + operator=( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImagePlaneMemoryRequirementsInfo & operator=( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImagePlaneMemoryRequirementsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImagePlaneMemoryRequirementsInfo & + setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT + { + planeAspect = planeAspect_; + return *this; + } + + operator VkImagePlaneMemoryRequirementsInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImagePlaneMemoryRequirementsInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImagePlaneMemoryRequirementsInfo const & ) const = default; +#else + bool operator==( ImagePlaneMemoryRequirementsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( planeAspect == rhs.planeAspect ); + } + + bool operator!=( ImagePlaneMemoryRequirementsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagePlaneMemoryRequirementsInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor; + }; + static_assert( sizeof( ImagePlaneMemoryRequirementsInfo ) == sizeof( VkImagePlaneMemoryRequirementsInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImagePlaneMemoryRequirementsInfo; + }; + using ImagePlaneMemoryRequirementsInfoKHR = ImagePlaneMemoryRequirementsInfo; + + struct ImageStencilUsageCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageStencilUsageCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImageStencilUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ = {} ) VULKAN_HPP_NOEXCEPT + : stencilUsage( stencilUsage_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImageStencilUsageCreateInfo( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageStencilUsageCreateInfo( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageStencilUsageCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageStencilUsageCreateInfo & + operator=( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageStencilUsageCreateInfo & operator=( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageStencilUsageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageStencilUsageCreateInfo & + setStencilUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ ) VULKAN_HPP_NOEXCEPT + { + stencilUsage = stencilUsage_; + return *this; + } + + operator VkImageStencilUsageCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageStencilUsageCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageStencilUsageCreateInfo const & ) const = default; +#else + bool operator==( ImageStencilUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilUsage == rhs.stencilUsage ); + } + + bool operator!=( ImageStencilUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageStencilUsageCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage = {}; + }; + static_assert( sizeof( ImageStencilUsageCreateInfo ) == sizeof( VkImageStencilUsageCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageStencilUsageCreateInfo; + }; + using ImageStencilUsageCreateInfoEXT = ImageStencilUsageCreateInfo; + + struct ImageSwapchainCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSwapchainCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImageSwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {} ) VULKAN_HPP_NOEXCEPT + : swapchain( swapchain_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImageSwapchainCreateInfoKHR( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSwapchainCreateInfoKHR( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageSwapchainCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageSwapchainCreateInfoKHR & + operator=( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSwapchainCreateInfoKHR & operator=( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageSwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageSwapchainCreateInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT + { + swapchain = swapchain_; + return *this; + } + + operator VkImageSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageSwapchainCreateInfoKHR const & ) const = default; +#else + bool operator==( ImageSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ); + } + + bool operator!=( ImageSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSwapchainCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; + }; + static_assert( sizeof( ImageSwapchainCreateInfoKHR ) == sizeof( VkImageSwapchainCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageSwapchainCreateInfoKHR; + }; + + struct ImageViewASTCDecodeModeEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAstcDecodeModeEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( + VULKAN_HPP_NAMESPACE::Format decodeMode_ = VULKAN_HPP_NAMESPACE::Format::eUndefined ) VULKAN_HPP_NOEXCEPT + : decodeMode( decodeMode_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImageViewASTCDecodeModeEXT( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewASTCDecodeModeEXT( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewASTCDecodeModeEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageViewASTCDecodeModeEXT & + operator=( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewASTCDecodeModeEXT & operator=( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageViewASTCDecodeModeEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageViewASTCDecodeModeEXT & setDecodeMode( VULKAN_HPP_NAMESPACE::Format decodeMode_ ) VULKAN_HPP_NOEXCEPT + { + decodeMode = decodeMode_; + return *this; + } + + operator VkImageViewASTCDecodeModeEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewASTCDecodeModeEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewASTCDecodeModeEXT const & ) const = default; +#else + bool operator==( ImageViewASTCDecodeModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( decodeMode == rhs.decodeMode ); + } + + bool operator!=( ImageViewASTCDecodeModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAstcDecodeModeEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format decodeMode = VULKAN_HPP_NAMESPACE::Format::eUndefined; + }; + static_assert( sizeof( ImageViewASTCDecodeModeEXT ) == sizeof( VkImageViewASTCDecodeModeEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageViewASTCDecodeModeEXT; + }; + + struct ImageViewUsageCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewUsageCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImageViewUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {} ) VULKAN_HPP_NOEXCEPT + : usage( usage_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewUsageCreateInfo( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewUsageCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageViewUsageCreateInfo & + operator=( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewUsageCreateInfo & operator=( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageViewUsageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageViewUsageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + operator VkImageViewUsageCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewUsageCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewUsageCreateInfo const & ) const = default; +#else + bool operator==( ImageViewUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( usage == rhs.usage ); + } + + bool operator!=( ImageViewUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewUsageCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + }; + static_assert( sizeof( ImageViewUsageCreateInfo ) == sizeof( VkImageViewUsageCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageViewUsageCreateInfo; + }; + using ImageViewUsageCreateInfoKHR = ImageViewUsageCreateInfo; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct ImportAndroidHardwareBufferInfoANDROID + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eImportAndroidHardwareBufferInfoANDROID; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImportAndroidHardwareBufferInfoANDROID( struct AHardwareBuffer * buffer_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ) + {} + + VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( ImportAndroidHardwareBufferInfoANDROID const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + ImportAndroidHardwareBufferInfoANDROID( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportAndroidHardwareBufferInfoANDROID( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImportAndroidHardwareBufferInfoANDROID & + operator=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportAndroidHardwareBufferInfoANDROID & + operator=( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImportAndroidHardwareBufferInfoANDROID & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImportAndroidHardwareBufferInfoANDROID & setBuffer( struct AHardwareBuffer * buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + operator VkImportAndroidHardwareBufferInfoANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportAndroidHardwareBufferInfoANDROID const & ) const = default; +# else + bool operator==( ImportAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ); + } + + bool operator!=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportAndroidHardwareBufferInfoANDROID; + const void * pNext = {}; + struct AHardwareBuffer * buffer = {}; + }; + static_assert( sizeof( ImportAndroidHardwareBufferInfoANDROID ) == sizeof( VkImportAndroidHardwareBufferInfoANDROID ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImportAndroidHardwareBufferInfoANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + struct ImportMemoryFdInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryFdInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + int fd_ = {} ) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ) + , fd( fd_ ) + {} + + VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryFdInfoKHR( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMemoryFdInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & + operator=( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryFdInfoKHR & operator=( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImportMemoryFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImportMemoryFdInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + ImportMemoryFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT + { + fd = fd_; + return *this; + } + + operator VkImportMemoryFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMemoryFdInfoKHR const & ) const = default; +#else + bool operator==( ImportMemoryFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( fd == rhs.fd ); + } + + bool operator!=( ImportMemoryFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + int fd = {}; + }; + static_assert( sizeof( ImportMemoryFdInfoKHR ) == sizeof( VkImportMemoryFdInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImportMemoryFdInfoKHR; + }; + + struct ImportMemoryHostPointerInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryHostPointerInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImportMemoryHostPointerInfoEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + void * pHostPointer_ = {} ) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ) + , pHostPointer( pHostPointer_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImportMemoryHostPointerInfoEXT( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryHostPointerInfoEXT( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMemoryHostPointerInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT & + operator=( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryHostPointerInfoEXT & operator=( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImportMemoryHostPointerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImportMemoryHostPointerInfoEXT & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + ImportMemoryHostPointerInfoEXT & setPHostPointer( void * pHostPointer_ ) VULKAN_HPP_NOEXCEPT + { + pHostPointer = pHostPointer_; + return *this; + } + + operator VkImportMemoryHostPointerInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryHostPointerInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMemoryHostPointerInfoEXT const & ) const = default; +#else + bool operator==( ImportMemoryHostPointerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && + ( pHostPointer == rhs.pHostPointer ); + } + + bool operator!=( ImportMemoryHostPointerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryHostPointerInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + void * pHostPointer = {}; + }; + static_assert( sizeof( ImportMemoryHostPointerInfoEXT ) == sizeof( VkImportMemoryHostPointerInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImportMemoryHostPointerInfoEXT; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ImportMemoryWin32HandleInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImportMemoryWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + HANDLE handle_ = {}, + LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ) + , handle( handle_ ) + , name( name_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImportMemoryWin32HandleInfoKHR( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryWin32HandleInfoKHR( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMemoryWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & + operator=( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryWin32HandleInfoKHR & operator=( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImportMemoryWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImportMemoryWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + ImportMemoryWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT + { + handle = handle_; + return *this; + } + + ImportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } + + operator VkImportMemoryWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMemoryWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ImportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && + ( handle == rhs.handle ) && ( name == rhs.name ); + } + + bool operator!=( ImportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + HANDLE handle = {}; + LPCWSTR name = {}; + }; + static_assert( sizeof( ImportMemoryWin32HandleInfoKHR ) == sizeof( VkImportMemoryWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImportMemoryWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ImportMemoryWin32HandleInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoNV; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImportMemoryWin32HandleInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ = {}, + HANDLE handle_ = {} ) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ) + , handle( handle_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImportMemoryWin32HandleInfoNV( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMemoryWin32HandleInfoNV( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV & + operator=( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryWin32HandleInfoNV & operator=( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImportMemoryWin32HandleInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImportMemoryWin32HandleInfoNV & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + ImportMemoryWin32HandleInfoNV & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT + { + handle = handle_; + return *this; + } + + operator VkImportMemoryWin32HandleInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMemoryWin32HandleInfoNV const & ) const = default; +# else + bool operator==( ImportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && + ( handle == rhs.handle ); + } + + bool operator!=( ImportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType = {}; + HANDLE handle = {}; + }; + static_assert( sizeof( ImportMemoryWin32HandleInfoNV ) == sizeof( VkImportMemoryWin32HandleInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImportMemoryWin32HandleInfoNV; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct ImportMemoryZirconHandleInfoFUCHSIA + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eImportMemoryZirconHandleInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImportMemoryZirconHandleInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + zx_handle_t handle_ = {} ) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ) + , handle( handle_ ) + {} + + VULKAN_HPP_CONSTEXPR ImportMemoryZirconHandleInfoFUCHSIA( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryZirconHandleInfoFUCHSIA( VkImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMemoryZirconHandleInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA & + operator=( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryZirconHandleInfoFUCHSIA & + operator=( VkImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImportMemoryZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImportMemoryZirconHandleInfoFUCHSIA & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + ImportMemoryZirconHandleInfoFUCHSIA & setHandle( zx_handle_t handle_ ) VULKAN_HPP_NOEXCEPT + { + handle = handle_; + return *this; + } + + operator VkImportMemoryZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMemoryZirconHandleInfoFUCHSIA const & ) const = default; +# else + bool operator==( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && + ( memcmp( &handle, &rhs.handle, sizeof( zx_handle_t ) ) == 0 ); + } + + bool operator!=( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryZirconHandleInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + zx_handle_t handle = {}; + }; + static_assert( sizeof( ImportMemoryZirconHandleInfoFUCHSIA ) == sizeof( VkImportMemoryZirconHandleInfoFUCHSIA ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImportMemoryZirconHandleInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + struct InputAttachmentAspectReference + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + InputAttachmentAspectReference( uint32_t subpass_ = {}, + uint32_t inputAttachmentIndex_ = {}, + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {} ) VULKAN_HPP_NOEXCEPT + : subpass( subpass_ ) + , inputAttachmentIndex( inputAttachmentIndex_ ) + , aspectMask( aspectMask_ ) + {} + + VULKAN_HPP_CONSTEXPR + InputAttachmentAspectReference( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + InputAttachmentAspectReference( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT + : InputAttachmentAspectReference( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & + operator=( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + InputAttachmentAspectReference & operator=( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + InputAttachmentAspectReference & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT + { + subpass = subpass_; + return *this; + } + + InputAttachmentAspectReference & setInputAttachmentIndex( uint32_t inputAttachmentIndex_ ) VULKAN_HPP_NOEXCEPT + { + inputAttachmentIndex = inputAttachmentIndex_; + return *this; + } + + InputAttachmentAspectReference & + setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + { + aspectMask = aspectMask_; + return *this; + } + + operator VkInputAttachmentAspectReference const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkInputAttachmentAspectReference &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( InputAttachmentAspectReference const & ) const = default; +#else + bool operator==( InputAttachmentAspectReference const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( subpass == rhs.subpass ) && ( inputAttachmentIndex == rhs.inputAttachmentIndex ) && + ( aspectMask == rhs.aspectMask ); + } + + bool operator!=( InputAttachmentAspectReference const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t subpass = {}; + uint32_t inputAttachmentIndex = {}; + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + }; + static_assert( sizeof( InputAttachmentAspectReference ) == sizeof( VkInputAttachmentAspectReference ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + using InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference; + + struct InstanceCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eInstanceCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR InstanceCreateInfo( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ = {}, + const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_ = {}, + uint32_t enabledLayerCount_ = {}, + const char * const * ppEnabledLayerNames_ = {}, + uint32_t enabledExtensionCount_ = {}, + const char * const * ppEnabledExtensionNames_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pApplicationInfo( pApplicationInfo_ ) + , enabledLayerCount( enabledLayerCount_ ) + , ppEnabledLayerNames( ppEnabledLayerNames_ ) + , enabledExtensionCount( enabledExtensionCount_ ) + , ppEnabledExtensionNames( ppEnabledExtensionNames_ ) + {} + + VULKAN_HPP_CONSTEXPR InstanceCreateInfo( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + InstanceCreateInfo( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : InstanceCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + InstanceCreateInfo( + VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_, + const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledLayerNames_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledExtensionNames_ = {} ) + : flags( flags_ ) + , pApplicationInfo( pApplicationInfo_ ) + , enabledLayerCount( static_cast( pEnabledLayerNames_.size() ) ) + , ppEnabledLayerNames( pEnabledLayerNames_.data() ) + , enabledExtensionCount( static_cast( pEnabledExtensionNames_.size() ) ) + , ppEnabledExtensionNames( pEnabledExtensionNames_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & + operator=( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + InstanceCreateInfo & operator=( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + InstanceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + InstanceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + InstanceCreateInfo & + setPApplicationInfo( const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_ ) VULKAN_HPP_NOEXCEPT + { + pApplicationInfo = pApplicationInfo_; + return *this; + } + + InstanceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT + { + enabledLayerCount = enabledLayerCount_; + return *this; + } + + InstanceCreateInfo & setPpEnabledLayerNames( const char * const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT + { + ppEnabledLayerNames = ppEnabledLayerNames_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + InstanceCreateInfo & setPEnabledLayerNames( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledLayerNames_ ) + VULKAN_HPP_NOEXCEPT + { + enabledLayerCount = static_cast( pEnabledLayerNames_.size() ); + ppEnabledLayerNames = pEnabledLayerNames_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + InstanceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT + { + enabledExtensionCount = enabledExtensionCount_; + return *this; + } + + InstanceCreateInfo & setPpEnabledExtensionNames( const char * const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT + { + ppEnabledExtensionNames = ppEnabledExtensionNames_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + InstanceCreateInfo & setPEnabledExtensionNames( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledExtensionNames_ ) + VULKAN_HPP_NOEXCEPT + { + enabledExtensionCount = static_cast( pEnabledExtensionNames_.size() ); + ppEnabledExtensionNames = pEnabledExtensionNames_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkInstanceCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkInstanceCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( InstanceCreateInfo const & ) const = default; +#else + bool operator==( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( pApplicationInfo == rhs.pApplicationInfo ) && ( enabledLayerCount == rhs.enabledLayerCount ) && + ( ppEnabledLayerNames == rhs.ppEnabledLayerNames ) && + ( enabledExtensionCount == rhs.enabledExtensionCount ) && + ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames ); + } + + bool operator!=( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInstanceCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags = {}; + const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo = {}; + uint32_t enabledLayerCount = {}; + const char * const * ppEnabledLayerNames = {}; + uint32_t enabledExtensionCount = {}; + const char * const * ppEnabledExtensionNames = {}; + }; + static_assert( sizeof( InstanceCreateInfo ) == sizeof( VkInstanceCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = InstanceCreateInfo; + }; + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + struct MacOSSurfaceCreateInfoMVK + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMacosSurfaceCreateInfoMVK; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ = {}, + const void * pView_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pView( pView_ ) + {} + + VULKAN_HPP_CONSTEXPR + MacOSSurfaceCreateInfoMVK( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MacOSSurfaceCreateInfoMVK( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + : MacOSSurfaceCreateInfoMVK( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK & + operator=( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MacOSSurfaceCreateInfoMVK & operator=( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MacOSSurfaceCreateInfoMVK & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MacOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + MacOSSurfaceCreateInfoMVK & setPView( const void * pView_ ) VULKAN_HPP_NOEXCEPT + { + pView = pView_; + return *this; + } + + operator VkMacOSSurfaceCreateInfoMVK const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMacOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MacOSSurfaceCreateInfoMVK const & ) const = default; +# else + bool operator==( MacOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pView == rhs.pView ); + } + + bool operator!=( MacOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMacosSurfaceCreateInfoMVK; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags = {}; + const void * pView = {}; + }; + static_assert( sizeof( MacOSSurfaceCreateInfoMVK ) == sizeof( VkMacOSSurfaceCreateInfoMVK ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MacOSSurfaceCreateInfoMVK; + }; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + struct MemoryAllocateFlagsInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateFlagsInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ = {}, + uint32_t deviceMask_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , deviceMask( deviceMask_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryAllocateFlagsInfo( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryAllocateFlagsInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & + operator=( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryAllocateFlagsInfo & operator=( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MemoryAllocateFlagsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MemoryAllocateFlagsInfo & setFlags( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + MemoryAllocateFlagsInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT + { + deviceMask = deviceMask_; + return *this; + } + + operator VkMemoryAllocateFlagsInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryAllocateFlagsInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryAllocateFlagsInfo const & ) const = default; +#else + bool operator==( MemoryAllocateFlagsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( deviceMask == rhs.deviceMask ); + } + + bool operator!=( MemoryAllocateFlagsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateFlagsInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags = {}; + uint32_t deviceMask = {}; + }; + static_assert( sizeof( MemoryAllocateFlagsInfo ) == sizeof( VkMemoryAllocateFlagsInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MemoryAllocateFlagsInfo; + }; + using MemoryAllocateFlagsInfoKHR = MemoryAllocateFlagsInfo; + + struct MemoryDedicatedAllocateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedAllocateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT + : image( image_ ) + , buffer( buffer_ ) + {} + + VULKAN_HPP_CONSTEXPR + MemoryDedicatedAllocateInfo( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryDedicatedAllocateInfo( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryDedicatedAllocateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & + operator=( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryDedicatedAllocateInfo & operator=( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MemoryDedicatedAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MemoryDedicatedAllocateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + MemoryDedicatedAllocateInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + operator VkMemoryDedicatedAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryDedicatedAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryDedicatedAllocateInfo const & ) const = default; +#else + bool operator==( MemoryDedicatedAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( buffer == rhs.buffer ); + } + + bool operator!=( MemoryDedicatedAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedAllocateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + }; + static_assert( sizeof( MemoryDedicatedAllocateInfo ) == sizeof( VkMemoryDedicatedAllocateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MemoryDedicatedAllocateInfo; + }; + using MemoryDedicatedAllocateInfoKHR = MemoryDedicatedAllocateInfo; + + struct MemoryDedicatedRequirements + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedRequirements; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + MemoryDedicatedRequirements( VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation_ = {} ) VULKAN_HPP_NOEXCEPT + : prefersDedicatedAllocation( prefersDedicatedAllocation_ ) + , requiresDedicatedAllocation( requiresDedicatedAllocation_ ) + {} + + VULKAN_HPP_CONSTEXPR + MemoryDedicatedRequirements( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryDedicatedRequirements( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryDedicatedRequirements( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedRequirements & + operator=( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryDedicatedRequirements & operator=( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryDedicatedRequirements const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryDedicatedRequirements &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryDedicatedRequirements const & ) const = default; +#else + bool operator==( MemoryDedicatedRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( prefersDedicatedAllocation == rhs.prefersDedicatedAllocation ) && + ( requiresDedicatedAllocation == rhs.requiresDedicatedAllocation ); + } + + bool operator!=( MemoryDedicatedRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedRequirements; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation = {}; + VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation = {}; + }; + static_assert( sizeof( MemoryDedicatedRequirements ) == sizeof( VkMemoryDedicatedRequirements ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MemoryDedicatedRequirements; + }; + using MemoryDedicatedRequirementsKHR = MemoryDedicatedRequirements; + + struct MemoryOpaqueCaptureAddressAllocateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eMemoryOpaqueCaptureAddressAllocateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + MemoryOpaqueCaptureAddressAllocateInfo( uint64_t opaqueCaptureAddress_ = {} ) VULKAN_HPP_NOEXCEPT + : opaqueCaptureAddress( opaqueCaptureAddress_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + MemoryOpaqueCaptureAddressAllocateInfo( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryOpaqueCaptureAddressAllocateInfo( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryOpaqueCaptureAddressAllocateInfo & + operator=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryOpaqueCaptureAddressAllocateInfo & + operator=( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MemoryOpaqueCaptureAddressAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MemoryOpaqueCaptureAddressAllocateInfo & + setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT + { + opaqueCaptureAddress = opaqueCaptureAddress_; + return *this; + } + + operator VkMemoryOpaqueCaptureAddressAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryOpaqueCaptureAddressAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryOpaqueCaptureAddressAllocateInfo const & ) const = default; +#else + bool operator==( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress ); + } + + bool operator!=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo; + const void * pNext = {}; + uint64_t opaqueCaptureAddress = {}; + }; + static_assert( sizeof( MemoryOpaqueCaptureAddressAllocateInfo ) == sizeof( VkMemoryOpaqueCaptureAddressAllocateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MemoryOpaqueCaptureAddressAllocateInfo; + }; + using MemoryOpaqueCaptureAddressAllocateInfoKHR = MemoryOpaqueCaptureAddressAllocateInfo; + + struct MemoryPriorityAllocateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryPriorityAllocateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT( float priority_ = {} ) VULKAN_HPP_NOEXCEPT + : priority( priority_ ) + {} + + VULKAN_HPP_CONSTEXPR + MemoryPriorityAllocateInfoEXT( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryPriorityAllocateInfoEXT( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryPriorityAllocateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryPriorityAllocateInfoEXT & + operator=( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryPriorityAllocateInfoEXT & operator=( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MemoryPriorityAllocateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MemoryPriorityAllocateInfoEXT & setPriority( float priority_ ) VULKAN_HPP_NOEXCEPT + { + priority = priority_; + return *this; + } + + operator VkMemoryPriorityAllocateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryPriorityAllocateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryPriorityAllocateInfoEXT const & ) const = default; +#else + bool operator==( MemoryPriorityAllocateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( priority == rhs.priority ); + } + + bool operator!=( MemoryPriorityAllocateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryPriorityAllocateInfoEXT; + const void * pNext = {}; + float priority = {}; + }; + static_assert( sizeof( MemoryPriorityAllocateInfoEXT ) == sizeof( VkMemoryPriorityAllocateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MemoryPriorityAllocateInfoEXT; + }; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + struct MetalSurfaceCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMetalSurfaceCreateInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ = {}, + const CAMetalLayer * pLayer_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pLayer( pLayer_ ) + {} + + VULKAN_HPP_CONSTEXPR + MetalSurfaceCreateInfoEXT( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MetalSurfaceCreateInfoEXT( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MetalSurfaceCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT & + operator=( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MetalSurfaceCreateInfoEXT & operator=( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MetalSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MetalSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + MetalSurfaceCreateInfoEXT & setPLayer( const CAMetalLayer * pLayer_ ) VULKAN_HPP_NOEXCEPT + { + pLayer = pLayer_; + return *this; + } + + operator VkMetalSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMetalSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MetalSurfaceCreateInfoEXT const & ) const = default; +# else + bool operator==( MetalSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pLayer == rhs.pLayer ); + } + + bool operator!=( MetalSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMetalSurfaceCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags = {}; + const CAMetalLayer * pLayer = {}; + }; + static_assert( sizeof( MetalSurfaceCreateInfoEXT ) == sizeof( VkMetalSurfaceCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MetalSurfaceCreateInfoEXT; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + struct MutableDescriptorTypeListVALVE + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MutableDescriptorTypeListVALVE( + uint32_t descriptorTypeCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : descriptorTypeCount( descriptorTypeCount_ ) + , pDescriptorTypes( pDescriptorTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR + MutableDescriptorTypeListVALVE( MutableDescriptorTypeListVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MutableDescriptorTypeListVALVE( VkMutableDescriptorTypeListVALVE const & rhs ) VULKAN_HPP_NOEXCEPT + : MutableDescriptorTypeListVALVE( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MutableDescriptorTypeListVALVE( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + descriptorTypes_ ) + : descriptorTypeCount( static_cast( descriptorTypes_.size() ) ) + , pDescriptorTypes( descriptorTypes_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeListVALVE & + operator=( MutableDescriptorTypeListVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MutableDescriptorTypeListVALVE & operator=( VkMutableDescriptorTypeListVALVE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MutableDescriptorTypeListVALVE & setDescriptorTypeCount( uint32_t descriptorTypeCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorTypeCount = descriptorTypeCount_; + return *this; + } + + MutableDescriptorTypeListVALVE & + setPDescriptorTypes( const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes_ ) VULKAN_HPP_NOEXCEPT + { + pDescriptorTypes = pDescriptorTypes_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MutableDescriptorTypeListVALVE & setDescriptorTypes( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + descriptorTypes_ ) VULKAN_HPP_NOEXCEPT + { + descriptorTypeCount = static_cast( descriptorTypes_.size() ); + pDescriptorTypes = descriptorTypes_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkMutableDescriptorTypeListVALVE const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMutableDescriptorTypeListVALVE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MutableDescriptorTypeListVALVE const & ) const = default; +#else + bool operator==( MutableDescriptorTypeListVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( descriptorTypeCount == rhs.descriptorTypeCount ) && ( pDescriptorTypes == rhs.pDescriptorTypes ); + } + + bool operator!=( MutableDescriptorTypeListVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t descriptorTypeCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes = {}; + }; + static_assert( sizeof( MutableDescriptorTypeListVALVE ) == sizeof( VkMutableDescriptorTypeListVALVE ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct MutableDescriptorTypeCreateInfoVALVE + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eMutableDescriptorTypeCreateInfoVALVE; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MutableDescriptorTypeCreateInfoVALVE( + uint32_t mutableDescriptorTypeListCount_ = {}, + const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE * pMutableDescriptorTypeLists_ = {} ) + VULKAN_HPP_NOEXCEPT + : mutableDescriptorTypeListCount( mutableDescriptorTypeListCount_ ) + , pMutableDescriptorTypeLists( pMutableDescriptorTypeLists_ ) + {} + + VULKAN_HPP_CONSTEXPR MutableDescriptorTypeCreateInfoVALVE( MutableDescriptorTypeCreateInfoVALVE const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + MutableDescriptorTypeCreateInfoVALVE( VkMutableDescriptorTypeCreateInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT + : MutableDescriptorTypeCreateInfoVALVE( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MutableDescriptorTypeCreateInfoVALVE( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + mutableDescriptorTypeLists_ ) + : mutableDescriptorTypeListCount( static_cast( mutableDescriptorTypeLists_.size() ) ) + , pMutableDescriptorTypeLists( mutableDescriptorTypeLists_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoVALVE & + operator=( MutableDescriptorTypeCreateInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MutableDescriptorTypeCreateInfoVALVE & + operator=( VkMutableDescriptorTypeCreateInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MutableDescriptorTypeCreateInfoVALVE & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MutableDescriptorTypeCreateInfoVALVE & + setMutableDescriptorTypeListCount( uint32_t mutableDescriptorTypeListCount_ ) VULKAN_HPP_NOEXCEPT + { + mutableDescriptorTypeListCount = mutableDescriptorTypeListCount_; + return *this; + } + + MutableDescriptorTypeCreateInfoVALVE & setPMutableDescriptorTypeLists( + const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE * pMutableDescriptorTypeLists_ ) VULKAN_HPP_NOEXCEPT + { + pMutableDescriptorTypeLists = pMutableDescriptorTypeLists_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MutableDescriptorTypeCreateInfoVALVE & setMutableDescriptorTypeLists( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + mutableDescriptorTypeLists_ ) VULKAN_HPP_NOEXCEPT + { + mutableDescriptorTypeListCount = static_cast( mutableDescriptorTypeLists_.size() ); + pMutableDescriptorTypeLists = mutableDescriptorTypeLists_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkMutableDescriptorTypeCreateInfoVALVE const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMutableDescriptorTypeCreateInfoVALVE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MutableDescriptorTypeCreateInfoVALVE const & ) const = default; +#else + bool operator==( MutableDescriptorTypeCreateInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( mutableDescriptorTypeListCount == rhs.mutableDescriptorTypeListCount ) && + ( pMutableDescriptorTypeLists == rhs.pMutableDescriptorTypeLists ); + } + + bool operator!=( MutableDescriptorTypeCreateInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMutableDescriptorTypeCreateInfoVALVE; + const void * pNext = {}; + uint32_t mutableDescriptorTypeListCount = {}; + const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE * pMutableDescriptorTypeLists = {}; + }; + static_assert( sizeof( MutableDescriptorTypeCreateInfoVALVE ) == sizeof( VkMutableDescriptorTypeCreateInfoVALVE ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MutableDescriptorTypeCreateInfoVALVE; + }; + + union PerformanceCounterResultKHR + { + PerformanceCounterResultKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR ) ); + } + + PerformanceCounterResultKHR( int32_t int32_ = {} ) : int32( int32_ ) {} + + PerformanceCounterResultKHR( int64_t int64_ ) : int64( int64_ ) {} + + PerformanceCounterResultKHR( uint32_t uint32_ ) : uint32( uint32_ ) {} + + PerformanceCounterResultKHR( uint64_t uint64_ ) : uint64( uint64_ ) {} + + PerformanceCounterResultKHR( float float32_ ) : float32( float32_ ) {} + + PerformanceCounterResultKHR( double float64_ ) : float64( float64_ ) {} + + PerformanceCounterResultKHR & setInt32( int32_t int32_ ) VULKAN_HPP_NOEXCEPT + { + int32 = int32_; + return *this; + } + + PerformanceCounterResultKHR & setInt64( int64_t int64_ ) VULKAN_HPP_NOEXCEPT + { + int64 = int64_; + return *this; + } + + PerformanceCounterResultKHR & setUint32( uint32_t uint32_ ) VULKAN_HPP_NOEXCEPT + { + uint32 = uint32_; + return *this; + } + + PerformanceCounterResultKHR & setUint64( uint64_t uint64_ ) VULKAN_HPP_NOEXCEPT + { + uint64 = uint64_; + return *this; + } + + PerformanceCounterResultKHR & setFloat32( float float32_ ) VULKAN_HPP_NOEXCEPT + { + float32 = float32_; + return *this; + } + + PerformanceCounterResultKHR & setFloat64( double float64_ ) VULKAN_HPP_NOEXCEPT + { + float64 = float64_; + return *this; + } + + VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR & + operator=( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR ) ); + return *this; + } + + operator VkPerformanceCounterResultKHR const &() const + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceCounterResultKHR &() + { + return *reinterpret_cast( this ); + } + + int32_t int32; + int64_t int64; + uint32_t uint32; + uint64_t uint64; + float float32; + double float64; + }; + + struct PerformanceQuerySubmitInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceQuerySubmitInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR( uint32_t counterPassIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : counterPassIndex( counterPassIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR + PerformanceQuerySubmitInfoKHR( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceQuerySubmitInfoKHR( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceQuerySubmitInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PerformanceQuerySubmitInfoKHR & + operator=( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceQuerySubmitInfoKHR & operator=( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PerformanceQuerySubmitInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PerformanceQuerySubmitInfoKHR & setCounterPassIndex( uint32_t counterPassIndex_ ) VULKAN_HPP_NOEXCEPT + { + counterPassIndex = counterPassIndex_; + return *this; + } + + operator VkPerformanceQuerySubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceQuerySubmitInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceQuerySubmitInfoKHR const & ) const = default; +#else + bool operator==( PerformanceQuerySubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( counterPassIndex == rhs.counterPassIndex ); + } + + bool operator!=( PerformanceQuerySubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceQuerySubmitInfoKHR; + const void * pNext = {}; + uint32_t counterPassIndex = {}; + }; + static_assert( sizeof( PerformanceQuerySubmitInfoKHR ) == sizeof( VkPerformanceQuerySubmitInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PerformanceQuerySubmitInfoKHR; + }; + + struct PhysicalDevice16BitStorageFeatures + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevice16BitStorageFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDevice16BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {} ) VULKAN_HPP_NOEXCEPT + : storageBuffer16BitAccess( storageBuffer16BitAccess_ ) + , uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ ) + , storagePushConstant16( storagePushConstant16_ ) + , storageInputOutput16( storageInputOutput16_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( PhysicalDevice16BitStorageFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevice16BitStorageFeatures( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevice16BitStorageFeatures( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & + operator=( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevice16BitStorageFeatures & + operator=( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDevice16BitStorageFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDevice16BitStorageFeatures & + setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + storageBuffer16BitAccess = storageBuffer16BitAccess_; + return *this; + } + + PhysicalDevice16BitStorageFeatures & setUniformAndStorageBuffer16BitAccess( + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_; + return *this; + } + + PhysicalDevice16BitStorageFeatures & + setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT + { + storagePushConstant16 = storagePushConstant16_; + return *this; + } + + PhysicalDevice16BitStorageFeatures & + setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT + { + storageInputOutput16 = storageInputOutput16_; + return *this; + } + + operator VkPhysicalDevice16BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevice16BitStorageFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevice16BitStorageFeatures const & ) const = default; +#else + bool operator==( PhysicalDevice16BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess ) && + ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess ) && + ( storagePushConstant16 == rhs.storagePushConstant16 ) && + ( storageInputOutput16 == rhs.storageInputOutput16 ); + } + + bool operator!=( PhysicalDevice16BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice16BitStorageFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {}; + }; + static_assert( sizeof( PhysicalDevice16BitStorageFeatures ) == sizeof( VkPhysicalDevice16BitStorageFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDevice16BitStorageFeatures; + }; + using PhysicalDevice16BitStorageFeaturesKHR = PhysicalDevice16BitStorageFeatures; + + struct PhysicalDevice4444FormatsFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevice4444FormatsFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDevice4444FormatsFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ = {} ) VULKAN_HPP_NOEXCEPT + : formatA4R4G4B4( formatA4R4G4B4_ ) + , formatA4B4G4R4( formatA4B4G4R4_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevice4444FormatsFeaturesEXT( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevice4444FormatsFeaturesEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & + operator=( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevice4444FormatsFeaturesEXT & + operator=( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDevice4444FormatsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDevice4444FormatsFeaturesEXT & + setFormatA4R4G4B4( VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ ) VULKAN_HPP_NOEXCEPT + { + formatA4R4G4B4 = formatA4R4G4B4_; + return *this; + } + + PhysicalDevice4444FormatsFeaturesEXT & + setFormatA4B4G4R4( VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ ) VULKAN_HPP_NOEXCEPT + { + formatA4B4G4R4 = formatA4B4G4R4_; + return *this; + } + + operator VkPhysicalDevice4444FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevice4444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevice4444FormatsFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatA4R4G4B4 == rhs.formatA4R4G4B4 ) && + ( formatA4B4G4R4 == rhs.formatA4B4G4R4 ); + } + + bool operator!=( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice4444FormatsFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4 = {}; + VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4 = {}; + }; + static_assert( sizeof( PhysicalDevice4444FormatsFeaturesEXT ) == sizeof( VkPhysicalDevice4444FormatsFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDevice4444FormatsFeaturesEXT; + }; + + struct PhysicalDevice8BitStorageFeatures + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevice8BitStorageFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDevice8BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {} ) VULKAN_HPP_NOEXCEPT + : storageBuffer8BitAccess( storageBuffer8BitAccess_ ) + , uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ ) + , storagePushConstant8( storagePushConstant8_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDevice8BitStorageFeatures( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevice8BitStorageFeatures( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevice8BitStorageFeatures( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & + operator=( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevice8BitStorageFeatures & operator=( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDevice8BitStorageFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDevice8BitStorageFeatures & + setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + storageBuffer8BitAccess = storageBuffer8BitAccess_; + return *this; + } + + PhysicalDevice8BitStorageFeatures & setUniformAndStorageBuffer8BitAccess( + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_; + return *this; + } + + PhysicalDevice8BitStorageFeatures & + setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT + { + storagePushConstant8 = storagePushConstant8_; + return *this; + } + + operator VkPhysicalDevice8BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevice8BitStorageFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevice8BitStorageFeatures const & ) const = default; +#else + bool operator==( PhysicalDevice8BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess ) && + ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess ) && + ( storagePushConstant8 == rhs.storagePushConstant8 ); + } + + bool operator!=( PhysicalDevice8BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice8BitStorageFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {}; + }; + static_assert( sizeof( PhysicalDevice8BitStorageFeatures ) == sizeof( VkPhysicalDevice8BitStorageFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDevice8BitStorageFeatures; + }; + using PhysicalDevice8BitStorageFeaturesKHR = PhysicalDevice8BitStorageFeatures; + + struct PhysicalDeviceASTCDecodeFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ = {} ) VULKAN_HPP_NOEXCEPT + : decodeModeSharedExponent( decodeModeSharedExponent_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceASTCDecodeFeaturesEXT( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceASTCDecodeFeaturesEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceASTCDecodeFeaturesEXT & + operator=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceASTCDecodeFeaturesEXT & + operator=( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceASTCDecodeFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceASTCDecodeFeaturesEXT & + setDecodeModeSharedExponent( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ ) VULKAN_HPP_NOEXCEPT + { + decodeModeSharedExponent = decodeModeSharedExponent_; + return *this; + } + + operator VkPhysicalDeviceASTCDecodeFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceASTCDecodeFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceASTCDecodeFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( decodeModeSharedExponent == rhs.decodeModeSharedExponent ); + } + + bool operator!=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent = {}; + }; + static_assert( sizeof( PhysicalDeviceASTCDecodeFeaturesEXT ) == sizeof( VkPhysicalDeviceASTCDecodeFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceASTCDecodeFeaturesEXT; + }; + + struct PhysicalDeviceAccelerationStructureFeaturesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructureFeaturesKHR( + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind_ = {} ) VULKAN_HPP_NOEXCEPT + : accelerationStructure( accelerationStructure_ ) + , accelerationStructureCaptureReplay( accelerationStructureCaptureReplay_ ) + , accelerationStructureIndirectBuild( accelerationStructureIndirectBuild_ ) + , accelerationStructureHostCommands( accelerationStructureHostCommands_ ) + , descriptorBindingAccelerationStructureUpdateAfterBind( descriptorBindingAccelerationStructureUpdateAfterBind_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructureFeaturesKHR( + PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceAccelerationStructureFeaturesKHR( VkPhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceAccelerationStructureFeaturesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & + operator=( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceAccelerationStructureFeaturesKHR & + operator=( VkPhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceAccelerationStructureFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceAccelerationStructureFeaturesKHR & + setAccelerationStructure( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructure = accelerationStructure_; + return *this; + } + + PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructureCaptureReplay( + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureCaptureReplay = accelerationStructureCaptureReplay_; + return *this; + } + + PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructureIndirectBuild( + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureIndirectBuild = accelerationStructureIndirectBuild_; + return *this; + } + + PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructureHostCommands( + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureHostCommands = accelerationStructureHostCommands_; + return *this; + } + + PhysicalDeviceAccelerationStructureFeaturesKHR & setDescriptorBindingAccelerationStructureUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingAccelerationStructureUpdateAfterBind = descriptorBindingAccelerationStructureUpdateAfterBind_; + return *this; + } + + operator VkPhysicalDeviceAccelerationStructureFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAccelerationStructureFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceAccelerationStructureFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( accelerationStructure == rhs.accelerationStructure ) && + ( accelerationStructureCaptureReplay == rhs.accelerationStructureCaptureReplay ) && + ( accelerationStructureIndirectBuild == rhs.accelerationStructureIndirectBuild ) && + ( accelerationStructureHostCommands == rhs.accelerationStructureHostCommands ) && + ( descriptorBindingAccelerationStructureUpdateAfterBind == + rhs.descriptorBindingAccelerationStructureUpdateAfterBind ); + } + + bool operator!=( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure = {}; + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild = {}; + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind = {}; + }; + static_assert( sizeof( PhysicalDeviceAccelerationStructureFeaturesKHR ) == + sizeof( VkPhysicalDeviceAccelerationStructureFeaturesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceAccelerationStructureFeaturesKHR; + }; + + struct PhysicalDeviceAccelerationStructurePropertiesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructurePropertiesKHR( + uint64_t maxGeometryCount_ = {}, + uint64_t maxInstanceCount_ = {}, + uint64_t maxPrimitiveCount_ = {}, + uint32_t maxPerStageDescriptorAccelerationStructures_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures_ = {}, + uint32_t maxDescriptorSetAccelerationStructures_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures_ = {}, + uint32_t minAccelerationStructureScratchOffsetAlignment_ = {} ) VULKAN_HPP_NOEXCEPT + : maxGeometryCount( maxGeometryCount_ ) + , maxInstanceCount( maxInstanceCount_ ) + , maxPrimitiveCount( maxPrimitiveCount_ ) + , maxPerStageDescriptorAccelerationStructures( maxPerStageDescriptorAccelerationStructures_ ) + , maxPerStageDescriptorUpdateAfterBindAccelerationStructures( + maxPerStageDescriptorUpdateAfterBindAccelerationStructures_ ) + , maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ ) + , maxDescriptorSetUpdateAfterBindAccelerationStructures( maxDescriptorSetUpdateAfterBindAccelerationStructures_ ) + , minAccelerationStructureScratchOffsetAlignment( minAccelerationStructureScratchOffsetAlignment_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructurePropertiesKHR( + PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceAccelerationStructurePropertiesKHR( VkPhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceAccelerationStructurePropertiesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructurePropertiesKHR & + operator=( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceAccelerationStructurePropertiesKHR & + operator=( VkPhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceAccelerationStructurePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAccelerationStructurePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceAccelerationStructurePropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxGeometryCount == rhs.maxGeometryCount ) && + ( maxInstanceCount == rhs.maxInstanceCount ) && ( maxPrimitiveCount == rhs.maxPrimitiveCount ) && + ( maxPerStageDescriptorAccelerationStructures == rhs.maxPerStageDescriptorAccelerationStructures ) && + ( maxPerStageDescriptorUpdateAfterBindAccelerationStructures == + rhs.maxPerStageDescriptorUpdateAfterBindAccelerationStructures ) && + ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures ) && + ( maxDescriptorSetUpdateAfterBindAccelerationStructures == + rhs.maxDescriptorSetUpdateAfterBindAccelerationStructures ) && + ( minAccelerationStructureScratchOffsetAlignment == rhs.minAccelerationStructureScratchOffsetAlignment ); + } + + bool operator!=( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR; + void * pNext = {}; + uint64_t maxGeometryCount = {}; + uint64_t maxInstanceCount = {}; + uint64_t maxPrimitiveCount = {}; + uint32_t maxPerStageDescriptorAccelerationStructures = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures = {}; + uint32_t maxDescriptorSetAccelerationStructures = {}; + uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures = {}; + uint32_t minAccelerationStructureScratchOffsetAlignment = {}; + }; + static_assert( sizeof( PhysicalDeviceAccelerationStructurePropertiesKHR ) == + sizeof( VkPhysicalDeviceAccelerationStructurePropertiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceAccelerationStructurePropertiesKHR; + }; + + struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ = {} ) VULKAN_HPP_NOEXCEPT + : advancedBlendCoherentOperations( advancedBlendCoherentOperations_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT( + PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceBlendOperationAdvancedFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedFeaturesEXT & + operator=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBlendOperationAdvancedFeaturesEXT & + operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setAdvancedBlendCoherentOperations( + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ ) VULKAN_HPP_NOEXCEPT + { + advancedBlendCoherentOperations = advancedBlendCoherentOperations_; + return *this; + } + + operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( advancedBlendCoherentOperations == rhs.advancedBlendCoherentOperations ); + } + + bool operator!=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations = {}; + }; + static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) == + sizeof( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceBlendOperationAdvancedFeaturesEXT; + }; + + struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT( + uint32_t advancedBlendMaxColorAttachments_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations_ = {} ) VULKAN_HPP_NOEXCEPT + : advancedBlendMaxColorAttachments( advancedBlendMaxColorAttachments_ ) + , advancedBlendIndependentBlend( advancedBlendIndependentBlend_ ) + , advancedBlendNonPremultipliedSrcColor( advancedBlendNonPremultipliedSrcColor_ ) + , advancedBlendNonPremultipliedDstColor( advancedBlendNonPremultipliedDstColor_ ) + , advancedBlendCorrelatedOverlap( advancedBlendCorrelatedOverlap_ ) + , advancedBlendAllOperations( advancedBlendAllOperations_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT( + PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBlendOperationAdvancedPropertiesEXT( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceBlendOperationAdvancedPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedPropertiesEXT & + operator=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBlendOperationAdvancedPropertiesEXT & + operator=( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( advancedBlendMaxColorAttachments == rhs.advancedBlendMaxColorAttachments ) && + ( advancedBlendIndependentBlend == rhs.advancedBlendIndependentBlend ) && + ( advancedBlendNonPremultipliedSrcColor == rhs.advancedBlendNonPremultipliedSrcColor ) && + ( advancedBlendNonPremultipliedDstColor == rhs.advancedBlendNonPremultipliedDstColor ) && + ( advancedBlendCorrelatedOverlap == rhs.advancedBlendCorrelatedOverlap ) && + ( advancedBlendAllOperations == rhs.advancedBlendAllOperations ); + } + + bool operator!=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT; + void * pNext = {}; + uint32_t advancedBlendMaxColorAttachments = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations = {}; + }; + static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) == + sizeof( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceBlendOperationAdvancedPropertiesEXT; + }; + + struct PhysicalDeviceBufferDeviceAddressFeatures + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceBufferDeviceAddressFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures( + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {} ) VULKAN_HPP_NOEXCEPT + : bufferDeviceAddress( bufferDeviceAddress_ ) + , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ) + , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures( + PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBufferDeviceAddressFeatures( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceBufferDeviceAddressFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & + operator=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBufferDeviceAddressFeatures & + operator=( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceBufferDeviceAddressFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceBufferDeviceAddressFeatures & + setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddress = bufferDeviceAddress_; + return *this; + } + + PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddressCaptureReplay( + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_; + return *this; + } + + PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddressMultiDevice( + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_; + return *this; + } + + operator VkPhysicalDeviceBufferDeviceAddressFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceBufferDeviceAddressFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceBufferDeviceAddressFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) && + ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) && + ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ); + } + + bool operator!=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {}; + }; + static_assert( sizeof( PhysicalDeviceBufferDeviceAddressFeatures ) == + sizeof( VkPhysicalDeviceBufferDeviceAddressFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceBufferDeviceAddressFeatures; + }; + using PhysicalDeviceBufferDeviceAddressFeaturesKHR = PhysicalDeviceBufferDeviceAddressFeatures; + + struct PhysicalDeviceBufferDeviceAddressFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {} ) VULKAN_HPP_NOEXCEPT + : bufferDeviceAddress( bufferDeviceAddress_ ) + , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ) + , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( + PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBufferDeviceAddressFeaturesEXT( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceBufferDeviceAddressFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & + operator=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBufferDeviceAddressFeaturesEXT & + operator=( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceBufferDeviceAddressFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceBufferDeviceAddressFeaturesEXT & + setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddress = bufferDeviceAddress_; + return *this; + } + + PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressCaptureReplay( + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_; + return *this; + } + + PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressMultiDevice( + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_; + return *this; + } + + operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) && + ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) && + ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ); + } + + bool operator!=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {}; + }; + static_assert( sizeof( PhysicalDeviceBufferDeviceAddressFeaturesEXT ) == + sizeof( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceBufferDeviceAddressFeaturesEXT; + }; + using PhysicalDeviceBufferAddressFeaturesEXT = PhysicalDeviceBufferDeviceAddressFeaturesEXT; + + struct PhysicalDeviceCoherentMemoryFeaturesAMD + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( + VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceCoherentMemory( deviceCoherentMemory_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCoherentMemoryFeaturesAMD( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCoherentMemoryFeaturesAMD( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoherentMemoryFeaturesAMD & + operator=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCoherentMemoryFeaturesAMD & + operator=( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceCoherentMemoryFeaturesAMD & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceCoherentMemoryFeaturesAMD & + setDeviceCoherentMemory( VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ ) VULKAN_HPP_NOEXCEPT + { + deviceCoherentMemory = deviceCoherentMemory_; + return *this; + } + + operator VkPhysicalDeviceCoherentMemoryFeaturesAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCoherentMemoryFeaturesAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCoherentMemoryFeaturesAMD const & ) const = default; +#else + bool operator==( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceCoherentMemory == rhs.deviceCoherentMemory ); + } + + bool operator!=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory = {}; + }; + static_assert( sizeof( PhysicalDeviceCoherentMemoryFeaturesAMD ) == + sizeof( VkPhysicalDeviceCoherentMemoryFeaturesAMD ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceCoherentMemoryFeaturesAMD; + }; + + struct PhysicalDeviceColorWriteEnableFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceColorWriteEnableFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable_ = {} ) VULKAN_HPP_NOEXCEPT : colorWriteEnable( colorWriteEnable_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceColorWriteEnableFeaturesEXT( + PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceColorWriteEnableFeaturesEXT( VkPhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceColorWriteEnableFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceColorWriteEnableFeaturesEXT & + operator=( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceColorWriteEnableFeaturesEXT & + operator=( VkPhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceColorWriteEnableFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceColorWriteEnableFeaturesEXT & + setColorWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable_ ) VULKAN_HPP_NOEXCEPT + { + colorWriteEnable = colorWriteEnable_; + return *this; + } + + operator VkPhysicalDeviceColorWriteEnableFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceColorWriteEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceColorWriteEnableFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorWriteEnable == rhs.colorWriteEnable ); + } + + bool operator!=( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable = {}; + }; + static_assert( sizeof( PhysicalDeviceColorWriteEnableFeaturesEXT ) == + sizeof( VkPhysicalDeviceColorWriteEnableFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceColorWriteEnableFeaturesEXT; + }; + + struct PhysicalDeviceComputeShaderDerivativesFeaturesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ = {} ) VULKAN_HPP_NOEXCEPT + : computeDerivativeGroupQuads( computeDerivativeGroupQuads_ ) + , computeDerivativeGroupLinear( computeDerivativeGroupLinear_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV( + PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceComputeShaderDerivativesFeaturesNV( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceComputeShaderDerivativesFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesNV & + operator=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceComputeShaderDerivativesFeaturesNV & + operator=( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceComputeShaderDerivativesFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceComputeShaderDerivativesFeaturesNV & + setComputeDerivativeGroupQuads( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ ) VULKAN_HPP_NOEXCEPT + { + computeDerivativeGroupQuads = computeDerivativeGroupQuads_; + return *this; + } + + PhysicalDeviceComputeShaderDerivativesFeaturesNV & + setComputeDerivativeGroupLinear( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ ) VULKAN_HPP_NOEXCEPT + { + computeDerivativeGroupLinear = computeDerivativeGroupLinear_; + return *this; + } + + operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( computeDerivativeGroupQuads == rhs.computeDerivativeGroupQuads ) && + ( computeDerivativeGroupLinear == rhs.computeDerivativeGroupLinear ); + } + + bool operator!=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads = {}; + VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear = {}; + }; + static_assert( sizeof( PhysicalDeviceComputeShaderDerivativesFeaturesNV ) == + sizeof( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceComputeShaderDerivativesFeaturesNV; + }; + + struct PhysicalDeviceConditionalRenderingFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ = {} ) VULKAN_HPP_NOEXCEPT + : conditionalRendering( conditionalRendering_ ) + , inheritedConditionalRendering( inheritedConditionalRendering_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT( + PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceConditionalRenderingFeaturesEXT( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceConditionalRenderingFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT & + operator=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceConditionalRenderingFeaturesEXT & + operator=( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceConditionalRenderingFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceConditionalRenderingFeaturesEXT & + setConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ ) VULKAN_HPP_NOEXCEPT + { + conditionalRendering = conditionalRendering_; + return *this; + } + + PhysicalDeviceConditionalRenderingFeaturesEXT & setInheritedConditionalRendering( + VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ ) VULKAN_HPP_NOEXCEPT + { + inheritedConditionalRendering = inheritedConditionalRendering_; + return *this; + } + + operator VkPhysicalDeviceConditionalRenderingFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceConditionalRenderingFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceConditionalRenderingFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( conditionalRendering == rhs.conditionalRendering ) && + ( inheritedConditionalRendering == rhs.inheritedConditionalRendering ); + } + + bool operator!=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering = {}; + VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering = {}; + }; + static_assert( sizeof( PhysicalDeviceConditionalRenderingFeaturesEXT ) == + sizeof( VkPhysicalDeviceConditionalRenderingFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceConditionalRenderingFeaturesEXT; + }; + + struct PhysicalDeviceConservativeRasterizationPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT( + float primitiveOverestimationSize_ = {}, + float maxExtraPrimitiveOverestimationSize_ = {}, + float extraPrimitiveOverestimationSizeGranularity_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage_ = {} ) VULKAN_HPP_NOEXCEPT + : primitiveOverestimationSize( primitiveOverestimationSize_ ) + , maxExtraPrimitiveOverestimationSize( maxExtraPrimitiveOverestimationSize_ ) + , extraPrimitiveOverestimationSizeGranularity( extraPrimitiveOverestimationSizeGranularity_ ) + , primitiveUnderestimation( primitiveUnderestimation_ ) + , conservativePointAndLineRasterization( conservativePointAndLineRasterization_ ) + , degenerateTrianglesRasterized( degenerateTrianglesRasterized_ ) + , degenerateLinesRasterized( degenerateLinesRasterized_ ) + , fullyCoveredFragmentShaderInputVariable( fullyCoveredFragmentShaderInputVariable_ ) + , conservativeRasterizationPostDepthCoverage( conservativeRasterizationPostDepthCoverage_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT( + PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceConservativeRasterizationPropertiesEXT( + VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceConservativeRasterizationPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConservativeRasterizationPropertiesEXT & + operator=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceConservativeRasterizationPropertiesEXT & + operator=( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceConservativeRasterizationPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( primitiveOverestimationSize == rhs.primitiveOverestimationSize ) && + ( maxExtraPrimitiveOverestimationSize == rhs.maxExtraPrimitiveOverestimationSize ) && + ( extraPrimitiveOverestimationSizeGranularity == rhs.extraPrimitiveOverestimationSizeGranularity ) && + ( primitiveUnderestimation == rhs.primitiveUnderestimation ) && + ( conservativePointAndLineRasterization == rhs.conservativePointAndLineRasterization ) && + ( degenerateTrianglesRasterized == rhs.degenerateTrianglesRasterized ) && + ( degenerateLinesRasterized == rhs.degenerateLinesRasterized ) && + ( fullyCoveredFragmentShaderInputVariable == rhs.fullyCoveredFragmentShaderInputVariable ) && + ( conservativeRasterizationPostDepthCoverage == rhs.conservativeRasterizationPostDepthCoverage ); + } + + bool operator!=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT; + void * pNext = {}; + float primitiveOverestimationSize = {}; + float maxExtraPrimitiveOverestimationSize = {}; + float extraPrimitiveOverestimationSizeGranularity = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation = {}; + VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization = {}; + VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized = {}; + VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized = {}; + VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable = {}; + VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage = {}; + }; + static_assert( sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) == + sizeof( VkPhysicalDeviceConservativeRasterizationPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceConservativeRasterizationPropertiesEXT; + }; + + struct PhysicalDeviceCooperativeMatrixFeaturesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ = {} ) VULKAN_HPP_NOEXCEPT + : cooperativeMatrix( cooperativeMatrix_ ) + , cooperativeMatrixRobustBufferAccess( cooperativeMatrixRobustBufferAccess_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( + PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeMatrixFeaturesNV( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCooperativeMatrixFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV & + operator=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeMatrixFeaturesNV & + operator=( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceCooperativeMatrixFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceCooperativeMatrixFeaturesNV & + setCooperativeMatrix( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrix = cooperativeMatrix_; + return *this; + } + + PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrixRobustBufferAccess( + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixRobustBufferAccess = cooperativeMatrixRobustBufferAccess_; + return *this; + } + + operator VkPhysicalDeviceCooperativeMatrixFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrixFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCooperativeMatrixFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrix == rhs.cooperativeMatrix ) && + ( cooperativeMatrixRobustBufferAccess == rhs.cooperativeMatrixRobustBufferAccess ); + } + + bool operator!=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess = {}; + }; + static_assert( sizeof( PhysicalDeviceCooperativeMatrixFeaturesNV ) == + sizeof( VkPhysicalDeviceCooperativeMatrixFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceCooperativeMatrixFeaturesNV; + }; + + struct PhysicalDeviceCooperativeMatrixPropertiesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV( + VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages_ = {} ) VULKAN_HPP_NOEXCEPT + : cooperativeMatrixSupportedStages( cooperativeMatrixSupportedStages_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV( + PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCooperativeMatrixPropertiesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixPropertiesNV & + operator=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeMatrixPropertiesNV & + operator=( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceCooperativeMatrixPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCooperativeMatrixPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( cooperativeMatrixSupportedStages == rhs.cooperativeMatrixSupportedStages ); + } + + bool operator!=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages = {}; + }; + static_assert( sizeof( PhysicalDeviceCooperativeMatrixPropertiesNV ) == + sizeof( VkPhysicalDeviceCooperativeMatrixPropertiesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceCooperativeMatrixPropertiesNV; + }; + + struct PhysicalDeviceCornerSampledImageFeaturesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ = {} ) VULKAN_HPP_NOEXCEPT + : cornerSampledImage( cornerSampledImage_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( + PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCornerSampledImageFeaturesNV( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCornerSampledImageFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCornerSampledImageFeaturesNV & + operator=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCornerSampledImageFeaturesNV & + operator=( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceCornerSampledImageFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceCornerSampledImageFeaturesNV & + setCornerSampledImage( VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ ) VULKAN_HPP_NOEXCEPT + { + cornerSampledImage = cornerSampledImage_; + return *this; + } + + operator VkPhysicalDeviceCornerSampledImageFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCornerSampledImageFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCornerSampledImageFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cornerSampledImage == rhs.cornerSampledImage ); + } + + bool operator!=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage = {}; + }; + static_assert( sizeof( PhysicalDeviceCornerSampledImageFeaturesNV ) == + sizeof( VkPhysicalDeviceCornerSampledImageFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceCornerSampledImageFeaturesNV; + }; + + struct PhysicalDeviceCoverageReductionModeFeaturesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ = {} ) VULKAN_HPP_NOEXCEPT + : coverageReductionMode( coverageReductionMode_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV( + PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCoverageReductionModeFeaturesNV( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCoverageReductionModeFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoverageReductionModeFeaturesNV & + operator=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCoverageReductionModeFeaturesNV & + operator=( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceCoverageReductionModeFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceCoverageReductionModeFeaturesNV & + setCoverageReductionMode( VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT + { + coverageReductionMode = coverageReductionMode_; + return *this; + } + + operator VkPhysicalDeviceCoverageReductionModeFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCoverageReductionModeFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCoverageReductionModeFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( coverageReductionMode == rhs.coverageReductionMode ); + } + + bool operator!=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode = {}; + }; + static_assert( sizeof( PhysicalDeviceCoverageReductionModeFeaturesNV ) == + sizeof( VkPhysicalDeviceCoverageReductionModeFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceCoverageReductionModeFeaturesNV; + }; + + struct PhysicalDeviceCustomBorderColorFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ = {} ) VULKAN_HPP_NOEXCEPT + : customBorderColors( customBorderColors_ ) + , customBorderColorWithoutFormat( customBorderColorWithoutFormat_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT( + PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCustomBorderColorFeaturesEXT( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCustomBorderColorFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT & + operator=( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCustomBorderColorFeaturesEXT & + operator=( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceCustomBorderColorFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceCustomBorderColorFeaturesEXT & + setCustomBorderColors( VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ ) VULKAN_HPP_NOEXCEPT + { + customBorderColors = customBorderColors_; + return *this; + } + + PhysicalDeviceCustomBorderColorFeaturesEXT & setCustomBorderColorWithoutFormat( + VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ ) VULKAN_HPP_NOEXCEPT + { + customBorderColorWithoutFormat = customBorderColorWithoutFormat_; + return *this; + } + + operator VkPhysicalDeviceCustomBorderColorFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCustomBorderColorFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCustomBorderColorFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( customBorderColors == rhs.customBorderColors ) && + ( customBorderColorWithoutFormat == rhs.customBorderColorWithoutFormat ); + } + + bool operator!=( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 customBorderColors = {}; + VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat = {}; + }; + static_assert( sizeof( PhysicalDeviceCustomBorderColorFeaturesEXT ) == + sizeof( VkPhysicalDeviceCustomBorderColorFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceCustomBorderColorFeaturesEXT; + }; + + struct PhysicalDeviceCustomBorderColorPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceCustomBorderColorPropertiesEXT( uint32_t maxCustomBorderColorSamplers_ = {} ) VULKAN_HPP_NOEXCEPT + : maxCustomBorderColorSamplers( maxCustomBorderColorSamplers_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT( + PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCustomBorderColorPropertiesEXT( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCustomBorderColorPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorPropertiesEXT & + operator=( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCustomBorderColorPropertiesEXT & + operator=( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceCustomBorderColorPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCustomBorderColorPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCustomBorderColorPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxCustomBorderColorSamplers == rhs.maxCustomBorderColorSamplers ); + } + + bool operator!=( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT; + void * pNext = {}; + uint32_t maxCustomBorderColorSamplers = {}; + }; + static_assert( sizeof( PhysicalDeviceCustomBorderColorPropertiesEXT ) == + sizeof( VkPhysicalDeviceCustomBorderColorPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceCustomBorderColorPropertiesEXT; + }; + + struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ = {} ) VULKAN_HPP_NOEXCEPT + : dedicatedAllocationImageAliasing( dedicatedAllocationImageAliasing_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( + PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( + VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & + operator=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & + operator=( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( + &rhs ); + return *this; + } + + PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setDedicatedAllocationImageAliasing( + VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ ) VULKAN_HPP_NOEXCEPT + { + dedicatedAllocationImageAliasing = dedicatedAllocationImageAliasing_; + return *this; + } + + operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( dedicatedAllocationImageAliasing == rhs.dedicatedAllocationImageAliasing ); + } + + bool operator!=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = + StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing = {}; + }; + static_assert( sizeof( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) == + sizeof( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + }; + + struct PhysicalDeviceDepthClipEnableFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDepthClipEnableFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {} ) VULKAN_HPP_NOEXCEPT + : depthClipEnable( depthClipEnable_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( + PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDepthClipEnableFeaturesEXT( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDepthClipEnableFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipEnableFeaturesEXT & + operator=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDepthClipEnableFeaturesEXT & + operator=( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceDepthClipEnableFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceDepthClipEnableFeaturesEXT & + setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthClipEnable = depthClipEnable_; + return *this; + } + + operator VkPhysicalDeviceDepthClipEnableFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDepthClipEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDepthClipEnableFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClipEnable == rhs.depthClipEnable ); + } + + bool operator!=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {}; + }; + static_assert( sizeof( PhysicalDeviceDepthClipEnableFeaturesEXT ) == + sizeof( VkPhysicalDeviceDepthClipEnableFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDepthClipEnableFeaturesEXT; + }; + + struct PhysicalDeviceDepthStencilResolveProperties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceDepthStencilResolveProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties( + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {} ) VULKAN_HPP_NOEXCEPT + : supportedDepthResolveModes( supportedDepthResolveModes_ ) + , supportedStencilResolveModes( supportedStencilResolveModes_ ) + , independentResolveNone( independentResolveNone_ ) + , independentResolve( independentResolve_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties( + PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDepthStencilResolveProperties( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDepthStencilResolveProperties( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthStencilResolveProperties & + operator=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDepthStencilResolveProperties & + operator=( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDepthStencilResolveProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDepthStencilResolveProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDepthStencilResolveProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceDepthStencilResolveProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( supportedDepthResolveModes == rhs.supportedDepthResolveModes ) && + ( supportedStencilResolveModes == rhs.supportedStencilResolveModes ) && + ( independentResolveNone == rhs.independentResolveNone ) && + ( independentResolve == rhs.independentResolve ); + } + + bool operator!=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthStencilResolveProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {}; + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {}; + }; + static_assert( sizeof( PhysicalDeviceDepthStencilResolveProperties ) == + sizeof( VkPhysicalDeviceDepthStencilResolveProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDepthStencilResolveProperties; + }; + using PhysicalDeviceDepthStencilResolvePropertiesKHR = PhysicalDeviceDepthStencilResolveProperties; + + struct PhysicalDeviceDescriptorIndexingFeatures + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceDescriptorIndexingFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures( + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ ) + , shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ ) + , shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ ) + , shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ ) + , shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ ) + , shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ ) + , shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ ) + , shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ ) + , shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ ) + , shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ ) + , descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ ) + , descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ ) + , descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ ) + , descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ ) + , descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ ) + , descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ ) + , descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ ) + , descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ ) + , descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ ) + , runtimeDescriptorArray( runtimeDescriptorArray_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures( + PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDescriptorIndexingFeatures( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDescriptorIndexingFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + operator=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDescriptorIndexingFeatures & + operator=( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setShaderInputAttachmentArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformTexelBufferArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageTexelBufferArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformBufferArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setShaderSampledImageArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageBufferArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageImageArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setShaderInputAttachmentArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformTexelBufferArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageTexelBufferArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUniformBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingSampledImageUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageImageUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUniformTexelBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageTexelBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUpdateUnusedWhilePending( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingPartiallyBound( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingVariableDescriptorCount( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & + setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT + { + runtimeDescriptorArray = runtimeDescriptorArray_; + return *this; + } + + operator VkPhysicalDeviceDescriptorIndexingFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorIndexingFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDescriptorIndexingFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing ) && + ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing ) && + ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing ) && + ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing ) && + ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing ) && + ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing ) && + ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing ) && + ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing ) && + ( shaderUniformTexelBufferArrayNonUniformIndexing == + rhs.shaderUniformTexelBufferArrayNonUniformIndexing ) && + ( shaderStorageTexelBufferArrayNonUniformIndexing == + rhs.shaderStorageTexelBufferArrayNonUniformIndexing ) && + ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind ) && + ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind ) && + ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind ) && + ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind ) && + ( descriptorBindingUniformTexelBufferUpdateAfterBind == + rhs.descriptorBindingUniformTexelBufferUpdateAfterBind ) && + ( descriptorBindingStorageTexelBufferUpdateAfterBind == + rhs.descriptorBindingStorageTexelBufferUpdateAfterBind ) && + ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending ) && + ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound ) && + ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount ) && + ( runtimeDescriptorArray == rhs.runtimeDescriptorArray ); + } + + bool operator!=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {}; + }; + static_assert( sizeof( PhysicalDeviceDescriptorIndexingFeatures ) == + sizeof( VkPhysicalDeviceDescriptorIndexingFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDescriptorIndexingFeatures; + }; + using PhysicalDeviceDescriptorIndexingFeaturesEXT = PhysicalDeviceDescriptorIndexingFeatures; + + struct PhysicalDeviceDescriptorIndexingProperties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceDescriptorIndexingProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties( + uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, + uint32_t maxPerStageUpdateAfterBindResources_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {} ) VULKAN_HPP_NOEXCEPT + : maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ ) + , shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ ) + , shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ ) + , shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ ) + , shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ ) + , shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ ) + , robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ ) + , quadDivergentImplicitLod( quadDivergentImplicitLod_ ) + , maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ ) + , maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ ) + , maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ ) + , maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ ) + , maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ ) + , maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ ) + , maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ ) + , maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ ) + , maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ ) + , maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ ) + , maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ ) + , maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ ) + , maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ ) + , maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ ) + , maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties( + PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDescriptorIndexingProperties( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDescriptorIndexingProperties( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingProperties & + operator=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDescriptorIndexingProperties & + operator=( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDescriptorIndexingProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorIndexingProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDescriptorIndexingProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceDescriptorIndexingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools ) && + ( shaderUniformBufferArrayNonUniformIndexingNative == + rhs.shaderUniformBufferArrayNonUniformIndexingNative ) && + ( shaderSampledImageArrayNonUniformIndexingNative == + rhs.shaderSampledImageArrayNonUniformIndexingNative ) && + ( shaderStorageBufferArrayNonUniformIndexingNative == + rhs.shaderStorageBufferArrayNonUniformIndexingNative ) && + ( shaderStorageImageArrayNonUniformIndexingNative == + rhs.shaderStorageImageArrayNonUniformIndexingNative ) && + ( shaderInputAttachmentArrayNonUniformIndexingNative == + rhs.shaderInputAttachmentArrayNonUniformIndexingNative ) && + ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind ) && + ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod ) && + ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers ) && + ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == + rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers ) && + ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == + rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers ) && + ( maxPerStageDescriptorUpdateAfterBindSampledImages == + rhs.maxPerStageDescriptorUpdateAfterBindSampledImages ) && + ( maxPerStageDescriptorUpdateAfterBindStorageImages == + rhs.maxPerStageDescriptorUpdateAfterBindStorageImages ) && + ( maxPerStageDescriptorUpdateAfterBindInputAttachments == + rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments ) && + ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources ) && + ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers ) && + ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers ) && + ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == + rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers ) && + ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == + rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages ) && + ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages ) && + ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments ); + } + + bool operator!=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingProperties; + void * pNext = {}; + uint32_t maxUpdateAfterBindDescriptorsInAllPools = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {}; + uint32_t maxPerStageUpdateAfterBindResources = {}; + uint32_t maxDescriptorSetUpdateAfterBindSamplers = {}; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {}; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {}; + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {}; + }; + static_assert( sizeof( PhysicalDeviceDescriptorIndexingProperties ) == + sizeof( VkPhysicalDeviceDescriptorIndexingProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDescriptorIndexingProperties; + }; + using PhysicalDeviceDescriptorIndexingPropertiesEXT = PhysicalDeviceDescriptorIndexingProperties; + + struct PhysicalDeviceDeviceGeneratedCommandsFeaturesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceGeneratedCommands( deviceGeneratedCommands_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & + operator=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & + operator=( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & + setDeviceGeneratedCommands( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ ) VULKAN_HPP_NOEXCEPT + { + deviceGeneratedCommands = deviceGeneratedCommands_; + return *this; + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( deviceGeneratedCommands == rhs.deviceGeneratedCommands ); + } + + bool operator!=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands = {}; + }; + static_assert( sizeof( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV ) == + sizeof( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + }; + + struct PhysicalDeviceDeviceGeneratedCommandsPropertiesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( + uint32_t maxGraphicsShaderGroupCount_ = {}, + uint32_t maxIndirectSequenceCount_ = {}, + uint32_t maxIndirectCommandsTokenCount_ = {}, + uint32_t maxIndirectCommandsStreamCount_ = {}, + uint32_t maxIndirectCommandsTokenOffset_ = {}, + uint32_t maxIndirectCommandsStreamStride_ = {}, + uint32_t minSequencesCountBufferOffsetAlignment_ = {}, + uint32_t minSequencesIndexBufferOffsetAlignment_ = {}, + uint32_t minIndirectCommandsBufferOffsetAlignment_ = {} ) VULKAN_HPP_NOEXCEPT + : maxGraphicsShaderGroupCount( maxGraphicsShaderGroupCount_ ) + , maxIndirectSequenceCount( maxIndirectSequenceCount_ ) + , maxIndirectCommandsTokenCount( maxIndirectCommandsTokenCount_ ) + , maxIndirectCommandsStreamCount( maxIndirectCommandsStreamCount_ ) + , maxIndirectCommandsTokenOffset( maxIndirectCommandsTokenOffset_ ) + , maxIndirectCommandsStreamStride( maxIndirectCommandsStreamStride_ ) + , minSequencesCountBufferOffsetAlignment( minSequencesCountBufferOffsetAlignment_ ) + , minSequencesIndexBufferOffsetAlignment( minSequencesIndexBufferOffsetAlignment_ ) + , minIndirectCommandsBufferOffsetAlignment( minIndirectCommandsBufferOffsetAlignment_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( + PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & + operator=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & + operator=( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxGraphicsShaderGroupCount == rhs.maxGraphicsShaderGroupCount ) && + ( maxIndirectSequenceCount == rhs.maxIndirectSequenceCount ) && + ( maxIndirectCommandsTokenCount == rhs.maxIndirectCommandsTokenCount ) && + ( maxIndirectCommandsStreamCount == rhs.maxIndirectCommandsStreamCount ) && + ( maxIndirectCommandsTokenOffset == rhs.maxIndirectCommandsTokenOffset ) && + ( maxIndirectCommandsStreamStride == rhs.maxIndirectCommandsStreamStride ) && + ( minSequencesCountBufferOffsetAlignment == rhs.minSequencesCountBufferOffsetAlignment ) && + ( minSequencesIndexBufferOffsetAlignment == rhs.minSequencesIndexBufferOffsetAlignment ) && + ( minIndirectCommandsBufferOffsetAlignment == rhs.minIndirectCommandsBufferOffsetAlignment ); + } + + bool operator!=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + void * pNext = {}; + uint32_t maxGraphicsShaderGroupCount = {}; + uint32_t maxIndirectSequenceCount = {}; + uint32_t maxIndirectCommandsTokenCount = {}; + uint32_t maxIndirectCommandsStreamCount = {}; + uint32_t maxIndirectCommandsTokenOffset = {}; + uint32_t maxIndirectCommandsStreamStride = {}; + uint32_t minSequencesCountBufferOffsetAlignment = {}; + uint32_t minSequencesIndexBufferOffsetAlignment = {}; + uint32_t minIndirectCommandsBufferOffsetAlignment = {}; + }; + static_assert( sizeof( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV ) == + sizeof( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + }; + + struct PhysicalDeviceDeviceMemoryReportFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceMemoryReport( deviceMemoryReport_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT( + PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceMemoryReportFeaturesEXT( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDeviceMemoryReportFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceMemoryReportFeaturesEXT & + operator=( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceMemoryReportFeaturesEXT & + operator=( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceDeviceMemoryReportFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceDeviceMemoryReportFeaturesEXT & + setDeviceMemoryReport( VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport_ ) VULKAN_HPP_NOEXCEPT + { + deviceMemoryReport = deviceMemoryReport_; + return *this; + } + + operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMemoryReport == rhs.deviceMemoryReport ); + } + + bool operator!=( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport = {}; + }; + static_assert( sizeof( PhysicalDeviceDeviceMemoryReportFeaturesEXT ) == + sizeof( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDeviceMemoryReportFeaturesEXT; + }; + + struct PhysicalDeviceDiagnosticsConfigFeaturesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ = {} ) VULKAN_HPP_NOEXCEPT + : diagnosticsConfig( diagnosticsConfig_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV( + PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDiagnosticsConfigFeaturesNV( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDiagnosticsConfigFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDiagnosticsConfigFeaturesNV & + operator=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDiagnosticsConfigFeaturesNV & + operator=( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceDiagnosticsConfigFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceDiagnosticsConfigFeaturesNV & + setDiagnosticsConfig( VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ ) VULKAN_HPP_NOEXCEPT + { + diagnosticsConfig = diagnosticsConfig_; + return *this; + } + + operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDiagnosticsConfigFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( diagnosticsConfig == rhs.diagnosticsConfig ); + } + + bool operator!=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig = {}; + }; + static_assert( sizeof( PhysicalDeviceDiagnosticsConfigFeaturesNV ) == + sizeof( VkPhysicalDeviceDiagnosticsConfigFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDiagnosticsConfigFeaturesNV; + }; + + struct PhysicalDeviceDiscardRectanglePropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDiscardRectanglePropertiesEXT( uint32_t maxDiscardRectangles_ = {} ) VULKAN_HPP_NOEXCEPT + : maxDiscardRectangles( maxDiscardRectangles_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT( + PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDiscardRectanglePropertiesEXT( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDiscardRectanglePropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDiscardRectanglePropertiesEXT & + operator=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDiscardRectanglePropertiesEXT & + operator=( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDiscardRectanglePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDiscardRectanglePropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDiscardRectanglePropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxDiscardRectangles == rhs.maxDiscardRectangles ); + } + + bool operator!=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT; + void * pNext = {}; + uint32_t maxDiscardRectangles = {}; + }; + static_assert( sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) == + sizeof( VkPhysicalDeviceDiscardRectanglePropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDiscardRectanglePropertiesEXT; + }; + + struct PhysicalDeviceDriverProperties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDriverProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties( + VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, + std::array const & driverName_ = {}, + std::array const & driverInfo_ = {}, + VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : driverID( driverID_ ) + , driverName( driverName_ ) + , driverInfo( driverInfo_ ) + , conformanceVersion( conformanceVersion_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceDriverProperties( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDriverProperties( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDriverProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties & + operator=( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDriverProperties & operator=( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDriverProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDriverProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDriverProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) && + ( driverName == rhs.driverName ) && ( driverInfo == rhs.driverInfo ) && + ( conformanceVersion == rhs.conformanceVersion ); + } + + bool operator!=( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDriverProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverInfo = {}; + VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {}; + }; + static_assert( sizeof( PhysicalDeviceDriverProperties ) == sizeof( VkPhysicalDeviceDriverProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDriverProperties; + }; + using PhysicalDeviceDriverPropertiesKHR = PhysicalDeviceDriverProperties; + + struct PhysicalDeviceExclusiveScissorFeaturesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ = {} ) + VULKAN_HPP_NOEXCEPT : exclusiveScissor( exclusiveScissor_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( + PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExclusiveScissorFeaturesNV( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExclusiveScissorFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExclusiveScissorFeaturesNV & + operator=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExclusiveScissorFeaturesNV & + operator=( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceExclusiveScissorFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceExclusiveScissorFeaturesNV & + setExclusiveScissor( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ ) VULKAN_HPP_NOEXCEPT + { + exclusiveScissor = exclusiveScissor_; + return *this; + } + + operator VkPhysicalDeviceExclusiveScissorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExclusiveScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExclusiveScissorFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exclusiveScissor == rhs.exclusiveScissor ); + } + + bool operator!=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor = {}; + }; + static_assert( sizeof( PhysicalDeviceExclusiveScissorFeaturesNV ) == + sizeof( VkPhysicalDeviceExclusiveScissorFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceExclusiveScissorFeaturesNV; + }; + + struct PhysicalDeviceExtendedDynamicState2FeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState2FeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints_ = {} ) VULKAN_HPP_NOEXCEPT + : extendedDynamicState2( extendedDynamicState2_ ) + , extendedDynamicState2LogicOp( extendedDynamicState2LogicOp_ ) + , extendedDynamicState2PatchControlPoints( extendedDynamicState2PatchControlPoints_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState2FeaturesEXT( + PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExtendedDynamicState2FeaturesEXT( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExtendedDynamicState2FeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & + operator=( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExtendedDynamicState2FeaturesEXT & + operator=( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceExtendedDynamicState2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceExtendedDynamicState2FeaturesEXT & + setExtendedDynamicState2( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState2 = extendedDynamicState2_; + return *this; + } + + PhysicalDeviceExtendedDynamicState2FeaturesEXT & + setExtendedDynamicState2LogicOp( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState2LogicOp = extendedDynamicState2LogicOp_; + return *this; + } + + PhysicalDeviceExtendedDynamicState2FeaturesEXT & setExtendedDynamicState2PatchControlPoints( + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState2PatchControlPoints = extendedDynamicState2PatchControlPoints_; + return *this; + } + + operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( extendedDynamicState2 == rhs.extendedDynamicState2 ) && + ( extendedDynamicState2LogicOp == rhs.extendedDynamicState2LogicOp ) && + ( extendedDynamicState2PatchControlPoints == rhs.extendedDynamicState2PatchControlPoints ); + } + + bool operator!=( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2 = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints = {}; + }; + static_assert( sizeof( PhysicalDeviceExtendedDynamicState2FeaturesEXT ) == + sizeof( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceExtendedDynamicState2FeaturesEXT; + }; + + struct PhysicalDeviceExtendedDynamicStateFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ = {} ) VULKAN_HPP_NOEXCEPT + : extendedDynamicState( extendedDynamicState_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT( + PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExtendedDynamicStateFeaturesEXT( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExtendedDynamicStateFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicStateFeaturesEXT & + operator=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExtendedDynamicStateFeaturesEXT & + operator=( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceExtendedDynamicStateFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceExtendedDynamicStateFeaturesEXT & + setExtendedDynamicState( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState = extendedDynamicState_; + return *this; + } + + operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( extendedDynamicState == rhs.extendedDynamicState ); + } + + bool operator!=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState = {}; + }; + static_assert( sizeof( PhysicalDeviceExtendedDynamicStateFeaturesEXT ) == + sizeof( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceExtendedDynamicStateFeaturesEXT; + }; + + struct PhysicalDeviceExternalImageFormatInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceExternalImageFormatInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( PhysicalDeviceExternalImageFormatInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalImageFormatInfo( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalImageFormatInfo( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalImageFormatInfo & + operator=( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalImageFormatInfo & + operator=( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceExternalImageFormatInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceExternalImageFormatInfo & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + operator VkPhysicalDeviceExternalImageFormatInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalImageFormatInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalImageFormatInfo const & ) const = default; +#else + bool operator==( PhysicalDeviceExternalImageFormatInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ); + } + + bool operator!=( PhysicalDeviceExternalImageFormatInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalImageFormatInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; + static_assert( sizeof( PhysicalDeviceExternalImageFormatInfo ) == sizeof( VkPhysicalDeviceExternalImageFormatInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalImageFormatInfo; + }; + using PhysicalDeviceExternalImageFormatInfoKHR = PhysicalDeviceExternalImageFormatInfo; + + struct PhysicalDeviceExternalMemoryHostPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT( + VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment_ = {} ) VULKAN_HPP_NOEXCEPT + : minImportedHostPointerAlignment( minImportedHostPointerAlignment_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT( + PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalMemoryHostPropertiesEXT( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalMemoryHostPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryHostPropertiesEXT & + operator=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalMemoryHostPropertiesEXT & + operator=( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalMemoryHostPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( minImportedHostPointerAlignment == rhs.minImportedHostPointerAlignment ); + } + + bool operator!=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment = {}; + }; + static_assert( sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) == + sizeof( VkPhysicalDeviceExternalMemoryHostPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalMemoryHostPropertiesEXT; + }; + + struct PhysicalDeviceFloatControlsProperties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFloatControlsProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties( + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {} ) VULKAN_HPP_NOEXCEPT + : denormBehaviorIndependence( denormBehaviorIndependence_ ) + , roundingModeIndependence( roundingModeIndependence_ ) + , shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ ) + , shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ ) + , shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ ) + , shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ ) + , shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ ) + , shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ ) + , shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ ) + , shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ ) + , shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ ) + , shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ ) + , shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ ) + , shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ ) + , shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ ) + , shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ ) + , shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties( PhysicalDeviceFloatControlsProperties const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFloatControlsProperties( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFloatControlsProperties( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFloatControlsProperties & + operator=( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFloatControlsProperties & + operator=( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceFloatControlsProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFloatControlsProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFloatControlsProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceFloatControlsProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) && + ( roundingModeIndependence == rhs.roundingModeIndependence ) && + ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 ) && + ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 ) && + ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 ) && + ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 ) && + ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 ) && + ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 ) && + ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 ) && + ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 ) && + ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 ) && + ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 ) && + ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 ) && + ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 ) && + ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 ) && + ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 ) && + ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 ); + } + + bool operator!=( PhysicalDeviceFloatControlsProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFloatControlsProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {}; + }; + static_assert( sizeof( PhysicalDeviceFloatControlsProperties ) == sizeof( VkPhysicalDeviceFloatControlsProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFloatControlsProperties; + }; + using PhysicalDeviceFloatControlsPropertiesKHR = PhysicalDeviceFloatControlsProperties; + + struct PhysicalDeviceFragmentDensityMap2FeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_ = {} ) VULKAN_HPP_NOEXCEPT + : fragmentDensityMapDeferred( fragmentDensityMapDeferred_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT( + PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMap2FeaturesEXT( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentDensityMap2FeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMap2FeaturesEXT & + operator=( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMap2FeaturesEXT & + operator=( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceFragmentDensityMap2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceFragmentDensityMap2FeaturesEXT & + setFragmentDensityMapDeferred( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMapDeferred = fragmentDensityMapDeferred_; + return *this; + } + + operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( fragmentDensityMapDeferred == rhs.fragmentDensityMapDeferred ); + } + + bool operator!=( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred = {}; + }; + static_assert( sizeof( PhysicalDeviceFragmentDensityMap2FeaturesEXT ) == + sizeof( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentDensityMap2FeaturesEXT; + }; + + struct PhysicalDeviceFragmentDensityMap2PropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2PropertiesEXT( + VULKAN_HPP_NAMESPACE::Bool32 subsampledLoads_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 subsampledCoarseReconstructionEarlyAccess_ = {}, + uint32_t maxSubsampledArrayLayers_ = {}, + uint32_t maxDescriptorSetSubsampledSamplers_ = {} ) VULKAN_HPP_NOEXCEPT + : subsampledLoads( subsampledLoads_ ) + , subsampledCoarseReconstructionEarlyAccess( subsampledCoarseReconstructionEarlyAccess_ ) + , maxSubsampledArrayLayers( maxSubsampledArrayLayers_ ) + , maxDescriptorSetSubsampledSamplers( maxDescriptorSetSubsampledSamplers_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2PropertiesEXT( + PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMap2PropertiesEXT( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentDensityMap2PropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMap2PropertiesEXT & + operator=( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMap2PropertiesEXT & + operator=( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subsampledLoads == rhs.subsampledLoads ) && + ( subsampledCoarseReconstructionEarlyAccess == rhs.subsampledCoarseReconstructionEarlyAccess ) && + ( maxSubsampledArrayLayers == rhs.maxSubsampledArrayLayers ) && + ( maxDescriptorSetSubsampledSamplers == rhs.maxDescriptorSetSubsampledSamplers ); + } + + bool operator!=( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 subsampledLoads = {}; + VULKAN_HPP_NAMESPACE::Bool32 subsampledCoarseReconstructionEarlyAccess = {}; + uint32_t maxSubsampledArrayLayers = {}; + uint32_t maxDescriptorSetSubsampledSamplers = {}; + }; + static_assert( sizeof( PhysicalDeviceFragmentDensityMap2PropertiesEXT ) == + sizeof( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentDensityMap2PropertiesEXT; + }; + + struct PhysicalDeviceFragmentDensityMapFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ = {} ) VULKAN_HPP_NOEXCEPT + : fragmentDensityMap( fragmentDensityMap_ ) + , fragmentDensityMapDynamic( fragmentDensityMapDynamic_ ) + , fragmentDensityMapNonSubsampledImages( fragmentDensityMapNonSubsampledImages_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT( + PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMapFeaturesEXT( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentDensityMapFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & + operator=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMapFeaturesEXT & + operator=( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceFragmentDensityMapFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceFragmentDensityMapFeaturesEXT & + setFragmentDensityMap( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMap = fragmentDensityMap_; + return *this; + } + + PhysicalDeviceFragmentDensityMapFeaturesEXT & + setFragmentDensityMapDynamic( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMapDynamic = fragmentDensityMapDynamic_; + return *this; + } + + PhysicalDeviceFragmentDensityMapFeaturesEXT & setFragmentDensityMapNonSubsampledImages( + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMapNonSubsampledImages = fragmentDensityMapNonSubsampledImages_; + return *this; + } + + operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentDensityMapFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityMap == rhs.fragmentDensityMap ) && + ( fragmentDensityMapDynamic == rhs.fragmentDensityMapDynamic ) && + ( fragmentDensityMapNonSubsampledImages == rhs.fragmentDensityMapNonSubsampledImages ); + } + + bool operator!=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages = {}; + }; + static_assert( sizeof( PhysicalDeviceFragmentDensityMapFeaturesEXT ) == + sizeof( VkPhysicalDeviceFragmentDensityMapFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentDensityMapFeaturesEXT; + }; + + struct PhysicalDeviceFragmentDensityMapPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT( + VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations_ = {} ) VULKAN_HPP_NOEXCEPT + : minFragmentDensityTexelSize( minFragmentDensityTexelSize_ ) + , maxFragmentDensityTexelSize( maxFragmentDensityTexelSize_ ) + , fragmentDensityInvocations( fragmentDensityInvocations_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT( + PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMapPropertiesEXT( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentDensityMapPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapPropertiesEXT & + operator=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMapPropertiesEXT & + operator=( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentDensityMapPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( minFragmentDensityTexelSize == rhs.minFragmentDensityTexelSize ) && + ( maxFragmentDensityTexelSize == rhs.maxFragmentDensityTexelSize ) && + ( fragmentDensityInvocations == rhs.fragmentDensityInvocations ); + } + + bool operator!=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations = {}; + }; + static_assert( sizeof( PhysicalDeviceFragmentDensityMapPropertiesEXT ) == + sizeof( VkPhysicalDeviceFragmentDensityMapPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentDensityMapPropertiesEXT; + }; + + struct PhysicalDeviceFragmentShaderBarycentricFeaturesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ = {} ) VULKAN_HPP_NOEXCEPT + : fragmentShaderBarycentric( fragmentShaderBarycentric_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesNV( + PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShaderBarycentricFeaturesNV( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShaderBarycentricFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderBarycentricFeaturesNV & + operator=( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShaderBarycentricFeaturesNV & + operator=( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceFragmentShaderBarycentricFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceFragmentShaderBarycentricFeaturesNV & + setFragmentShaderBarycentric( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ ) VULKAN_HPP_NOEXCEPT + { + fragmentShaderBarycentric = fragmentShaderBarycentric_; + return *this; + } + + operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( fragmentShaderBarycentric == rhs.fragmentShaderBarycentric ); + } + + bool operator!=( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric = {}; + }; + static_assert( sizeof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) == + sizeof( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShaderBarycentricFeaturesNV; + }; + + struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ = {} ) VULKAN_HPP_NOEXCEPT + : fragmentShaderSampleInterlock( fragmentShaderSampleInterlock_ ) + , fragmentShaderPixelInterlock( fragmentShaderPixelInterlock_ ) + , fragmentShaderShadingRateInterlock( fragmentShaderShadingRateInterlock_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT( + PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShaderInterlockFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & + operator=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShaderInterlockFeaturesEXT & + operator=( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderSampleInterlock( + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ ) VULKAN_HPP_NOEXCEPT + { + fragmentShaderSampleInterlock = fragmentShaderSampleInterlock_; + return *this; + } + + PhysicalDeviceFragmentShaderInterlockFeaturesEXT & + setFragmentShaderPixelInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ ) VULKAN_HPP_NOEXCEPT + { + fragmentShaderPixelInterlock = fragmentShaderPixelInterlock_; + return *this; + } + + PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderShadingRateInterlock( + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ ) VULKAN_HPP_NOEXCEPT + { + fragmentShaderShadingRateInterlock = fragmentShaderShadingRateInterlock_; + return *this; + } + + operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( fragmentShaderSampleInterlock == rhs.fragmentShaderSampleInterlock ) && + ( fragmentShaderPixelInterlock == rhs.fragmentShaderPixelInterlock ) && + ( fragmentShaderShadingRateInterlock == rhs.fragmentShaderShadingRateInterlock ); + } + + bool operator!=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock = {}; + }; + static_assert( sizeof( PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) == + sizeof( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShaderInterlockFeaturesEXT; + }; + + struct PhysicalDeviceFragmentShadingRateEnumsFeaturesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates_ = {} ) VULKAN_HPP_NOEXCEPT + : fragmentShadingRateEnums( fragmentShadingRateEnums_ ) + , supersampleFragmentShadingRates( supersampleFragmentShadingRates_ ) + , noInvocationFragmentShadingRates( noInvocationFragmentShadingRates_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( + PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & + operator=( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & + operator=( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & + setFragmentShadingRateEnums( VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums_ ) VULKAN_HPP_NOEXCEPT + { + fragmentShadingRateEnums = fragmentShadingRateEnums_; + return *this; + } + + PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setSupersampleFragmentShadingRates( + VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates_ ) VULKAN_HPP_NOEXCEPT + { + supersampleFragmentShadingRates = supersampleFragmentShadingRates_; + return *this; + } + + PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setNoInvocationFragmentShadingRates( + VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates_ ) VULKAN_HPP_NOEXCEPT + { + noInvocationFragmentShadingRates = noInvocationFragmentShadingRates_; + return *this; + } + + operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( fragmentShadingRateEnums == rhs.fragmentShadingRateEnums ) && + ( supersampleFragmentShadingRates == rhs.supersampleFragmentShadingRates ) && + ( noInvocationFragmentShadingRates == rhs.noInvocationFragmentShadingRates ); + } + + bool operator!=( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums = {}; + VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates = {}; + VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates = {}; + }; + static_assert( sizeof( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV ) == + sizeof( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShadingRateEnumsFeaturesNV; + }; + + struct PhysicalDeviceFragmentShadingRateEnumsPropertiesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( + VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount_ = + VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1 ) VULKAN_HPP_NOEXCEPT + : maxFragmentShadingRateInvocationCount( maxFragmentShadingRateInvocationCount_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( + PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( + VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & + operator=( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & + operator=( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & setMaxFragmentShadingRateInvocationCount( + VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount_ ) VULKAN_HPP_NOEXCEPT + { + maxFragmentShadingRateInvocationCount = maxFragmentShadingRateInvocationCount_; + return *this; + } + + operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxFragmentShadingRateInvocationCount == rhs.maxFragmentShadingRateInvocationCount ); + } + + bool operator!=( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount = + VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + }; + static_assert( sizeof( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV ) == + sizeof( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShadingRateEnumsPropertiesNV; + }; + + struct PhysicalDeviceFragmentShadingRateFeaturesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR( + VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate_ = {} ) VULKAN_HPP_NOEXCEPT + : pipelineFragmentShadingRate( pipelineFragmentShadingRate_ ) + , primitiveFragmentShadingRate( primitiveFragmentShadingRate_ ) + , attachmentFragmentShadingRate( attachmentFragmentShadingRate_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR( + PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRateFeaturesKHR( VkPhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShadingRateFeaturesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & + operator=( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRateFeaturesKHR & + operator=( VkPhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceFragmentShadingRateFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceFragmentShadingRateFeaturesKHR & + setPipelineFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT + { + pipelineFragmentShadingRate = pipelineFragmentShadingRate_; + return *this; + } + + PhysicalDeviceFragmentShadingRateFeaturesKHR & + setPrimitiveFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT + { + primitiveFragmentShadingRate = primitiveFragmentShadingRate_; + return *this; + } + + PhysicalDeviceFragmentShadingRateFeaturesKHR & setAttachmentFragmentShadingRate( + VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT + { + attachmentFragmentShadingRate = attachmentFragmentShadingRate_; + return *this; + } + + operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShadingRateFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( pipelineFragmentShadingRate == rhs.pipelineFragmentShadingRate ) && + ( primitiveFragmentShadingRate == rhs.primitiveFragmentShadingRate ) && + ( attachmentFragmentShadingRate == rhs.attachmentFragmentShadingRate ); + } + + bool operator!=( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate = {}; + VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate = {}; + }; + static_assert( sizeof( PhysicalDeviceFragmentShadingRateFeaturesKHR ) == + sizeof( VkPhysicalDeviceFragmentShadingRateFeaturesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShadingRateFeaturesKHR; + }; + + struct PhysicalDeviceFragmentShadingRatePropertiesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRatePropertiesKHR( + VULKAN_HPP_NAMESPACE::Extent2D minFragmentShadingRateAttachmentTexelSize_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentShadingRateAttachmentTexelSize_ = {}, + uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateWithMultipleViewports_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 layeredShadingRateAttachments_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateNonTrivialCombinerOps_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentSize_ = {}, + uint32_t maxFragmentSizeAspectRatio_ = {}, + uint32_t maxFragmentShadingRateCoverageSamples_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateRasterizationSamples_ = + VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderDepthStencilWrites_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithSampleMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderSampleMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithConservativeRasterization_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithFragmentShaderInterlock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithCustomSampleLocations_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateStrictMultiplyCombiner_ = {} ) VULKAN_HPP_NOEXCEPT + : minFragmentShadingRateAttachmentTexelSize( minFragmentShadingRateAttachmentTexelSize_ ) + , maxFragmentShadingRateAttachmentTexelSize( maxFragmentShadingRateAttachmentTexelSize_ ) + , maxFragmentShadingRateAttachmentTexelSizeAspectRatio( maxFragmentShadingRateAttachmentTexelSizeAspectRatio_ ) + , primitiveFragmentShadingRateWithMultipleViewports( primitiveFragmentShadingRateWithMultipleViewports_ ) + , layeredShadingRateAttachments( layeredShadingRateAttachments_ ) + , fragmentShadingRateNonTrivialCombinerOps( fragmentShadingRateNonTrivialCombinerOps_ ) + , maxFragmentSize( maxFragmentSize_ ) + , maxFragmentSizeAspectRatio( maxFragmentSizeAspectRatio_ ) + , maxFragmentShadingRateCoverageSamples( maxFragmentShadingRateCoverageSamples_ ) + , maxFragmentShadingRateRasterizationSamples( maxFragmentShadingRateRasterizationSamples_ ) + , fragmentShadingRateWithShaderDepthStencilWrites( fragmentShadingRateWithShaderDepthStencilWrites_ ) + , fragmentShadingRateWithSampleMask( fragmentShadingRateWithSampleMask_ ) + , fragmentShadingRateWithShaderSampleMask( fragmentShadingRateWithShaderSampleMask_ ) + , fragmentShadingRateWithConservativeRasterization( fragmentShadingRateWithConservativeRasterization_ ) + , fragmentShadingRateWithFragmentShaderInterlock( fragmentShadingRateWithFragmentShaderInterlock_ ) + , fragmentShadingRateWithCustomSampleLocations( fragmentShadingRateWithCustomSampleLocations_ ) + , fragmentShadingRateStrictMultiplyCombiner( fragmentShadingRateStrictMultiplyCombiner_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRatePropertiesKHR( + PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRatePropertiesKHR( VkPhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShadingRatePropertiesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRatePropertiesKHR & + operator=( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRatePropertiesKHR & + operator=( VkPhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShadingRatePropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( minFragmentShadingRateAttachmentTexelSize == rhs.minFragmentShadingRateAttachmentTexelSize ) && + ( maxFragmentShadingRateAttachmentTexelSize == rhs.maxFragmentShadingRateAttachmentTexelSize ) && + ( maxFragmentShadingRateAttachmentTexelSizeAspectRatio == + rhs.maxFragmentShadingRateAttachmentTexelSizeAspectRatio ) && + ( primitiveFragmentShadingRateWithMultipleViewports == + rhs.primitiveFragmentShadingRateWithMultipleViewports ) && + ( layeredShadingRateAttachments == rhs.layeredShadingRateAttachments ) && + ( fragmentShadingRateNonTrivialCombinerOps == rhs.fragmentShadingRateNonTrivialCombinerOps ) && + ( maxFragmentSize == rhs.maxFragmentSize ) && + ( maxFragmentSizeAspectRatio == rhs.maxFragmentSizeAspectRatio ) && + ( maxFragmentShadingRateCoverageSamples == rhs.maxFragmentShadingRateCoverageSamples ) && + ( maxFragmentShadingRateRasterizationSamples == rhs.maxFragmentShadingRateRasterizationSamples ) && + ( fragmentShadingRateWithShaderDepthStencilWrites == + rhs.fragmentShadingRateWithShaderDepthStencilWrites ) && + ( fragmentShadingRateWithSampleMask == rhs.fragmentShadingRateWithSampleMask ) && + ( fragmentShadingRateWithShaderSampleMask == rhs.fragmentShadingRateWithShaderSampleMask ) && + ( fragmentShadingRateWithConservativeRasterization == + rhs.fragmentShadingRateWithConservativeRasterization ) && + ( fragmentShadingRateWithFragmentShaderInterlock == rhs.fragmentShadingRateWithFragmentShaderInterlock ) && + ( fragmentShadingRateWithCustomSampleLocations == rhs.fragmentShadingRateWithCustomSampleLocations ) && + ( fragmentShadingRateStrictMultiplyCombiner == rhs.fragmentShadingRateStrictMultiplyCombiner ); + } + + bool operator!=( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D minFragmentShadingRateAttachmentTexelSize = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentShadingRateAttachmentTexelSize = {}; + uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateWithMultipleViewports = {}; + VULKAN_HPP_NAMESPACE::Bool32 layeredShadingRateAttachments = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateNonTrivialCombinerOps = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentSize = {}; + uint32_t maxFragmentSizeAspectRatio = {}; + uint32_t maxFragmentShadingRateCoverageSamples = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateRasterizationSamples = + VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderDepthStencilWrites = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithSampleMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderSampleMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithConservativeRasterization = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithFragmentShaderInterlock = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithCustomSampleLocations = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateStrictMultiplyCombiner = {}; + }; + static_assert( sizeof( PhysicalDeviceFragmentShadingRatePropertiesKHR ) == + sizeof( VkPhysicalDeviceFragmentShadingRatePropertiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShadingRatePropertiesKHR; + }; + + struct PhysicalDeviceGroupProperties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGroupProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties( + uint32_t physicalDeviceCount_ = {}, + std::array const & physicalDevices_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation_ = {} ) VULKAN_HPP_NOEXCEPT + : physicalDeviceCount( physicalDeviceCount_ ) + , physicalDevices( physicalDevices_ ) + , subsetAllocation( subsetAllocation_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceGroupProperties( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceGroupProperties( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceGroupProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties & + operator=( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceGroupProperties & operator=( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceGroupProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceGroupProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceGroupProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( physicalDeviceCount == rhs.physicalDeviceCount ) && + ( physicalDevices == rhs.physicalDevices ) && ( subsetAllocation == rhs.subsetAllocation ); + } + + bool operator!=( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGroupProperties; + void * pNext = {}; + uint32_t physicalDeviceCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D + physicalDevices = {}; + VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation = {}; + }; + static_assert( sizeof( PhysicalDeviceGroupProperties ) == sizeof( VkPhysicalDeviceGroupProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceGroupProperties; + }; + using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties; + + struct PhysicalDeviceHostQueryResetFeatures + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceHostQueryResetFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceHostQueryResetFeatures( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {} ) VULKAN_HPP_NOEXCEPT + : hostQueryReset( hostQueryReset_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures( PhysicalDeviceHostQueryResetFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceHostQueryResetFeatures( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceHostQueryResetFeatures( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostQueryResetFeatures & + operator=( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceHostQueryResetFeatures & + operator=( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceHostQueryResetFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceHostQueryResetFeatures & + setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT + { + hostQueryReset = hostQueryReset_; + return *this; + } + + operator VkPhysicalDeviceHostQueryResetFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceHostQueryResetFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceHostQueryResetFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceHostQueryResetFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hostQueryReset == rhs.hostQueryReset ); + } + + bool operator!=( PhysicalDeviceHostQueryResetFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostQueryResetFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {}; + }; + static_assert( sizeof( PhysicalDeviceHostQueryResetFeatures ) == sizeof( VkPhysicalDeviceHostQueryResetFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceHostQueryResetFeatures; + }; + using PhysicalDeviceHostQueryResetFeaturesEXT = PhysicalDeviceHostQueryResetFeatures; + + struct PhysicalDeviceIDProperties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIdProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceIDProperties( std::array const & deviceUUID_ = {}, + std::array const & driverUUID_ = {}, + std::array const & deviceLUID_ = {}, + uint32_t deviceNodeMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceUUID( deviceUUID_ ) + , driverUUID( driverUUID_ ) + , deviceLUID( deviceLUID_ ) + , deviceNodeMask( deviceNodeMask_ ) + , deviceLUIDValid( deviceLUIDValid_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceIDProperties( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceIDProperties( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceIDProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties & + operator=( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceIDProperties & operator=( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceIDProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceIDProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceIDProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceIDProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceUUID == rhs.deviceUUID ) && + ( driverUUID == rhs.driverUUID ) && ( deviceLUID == rhs.deviceLUID ) && + ( deviceNodeMask == rhs.deviceNodeMask ) && ( deviceLUIDValid == rhs.deviceLUIDValid ); + } + + bool operator!=( PhysicalDeviceIDProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIdProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceUUID = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverUUID = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceLUID = {}; + uint32_t deviceNodeMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {}; + }; + static_assert( sizeof( PhysicalDeviceIDProperties ) == sizeof( VkPhysicalDeviceIDProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceIDProperties; + }; + using PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties; + + struct PhysicalDeviceImageDrmFormatModifierInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT( + uint64_t drmFormatModifier_ = {}, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, + uint32_t queueFamilyIndexCount_ = {}, + const uint32_t * pQueueFamilyIndices_ = {} ) VULKAN_HPP_NOEXCEPT + : drmFormatModifier( drmFormatModifier_ ) + , sharingMode( sharingMode_ ) + , queueFamilyIndexCount( queueFamilyIndexCount_ ) + , pQueueFamilyIndices( pQueueFamilyIndices_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT( + PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageDrmFormatModifierInfoEXT( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageDrmFormatModifierInfoEXT( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceImageDrmFormatModifierInfoEXT( + uint64_t drmFormatModifier_, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) + : drmFormatModifier( drmFormatModifier_ ) + , sharingMode( sharingMode_ ) + , queueFamilyIndexCount( static_cast( queueFamilyIndices_.size() ) ) + , pQueueFamilyIndices( queueFamilyIndices_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & + operator=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageDrmFormatModifierInfoEXT & + operator=( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceImageDrmFormatModifierInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceImageDrmFormatModifierInfoEXT & + setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifier = drmFormatModifier_; + return *this; + } + + PhysicalDeviceImageDrmFormatModifierInfoEXT & + setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT + { + sharingMode = sharingMode_; + return *this; + } + + PhysicalDeviceImageDrmFormatModifierInfoEXT & + setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = queueFamilyIndexCount_; + return *this; + } + + PhysicalDeviceImageDrmFormatModifierInfoEXT & + setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + pQueueFamilyIndices = pQueueFamilyIndices_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceImageDrmFormatModifierInfoEXT & setQueueFamilyIndices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = static_cast( queueFamilyIndices_.size() ); + pQueueFamilyIndices = queueFamilyIndices_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageDrmFormatModifierInfoEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier ) && + ( sharingMode == rhs.sharingMode ) && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && + ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ); + } + + bool operator!=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT; + const void * pNext = {}; + uint64_t drmFormatModifier = {}; + VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; + uint32_t queueFamilyIndexCount = {}; + const uint32_t * pQueueFamilyIndices = {}; + }; + static_assert( sizeof( PhysicalDeviceImageDrmFormatModifierInfoEXT ) == + sizeof( VkPhysicalDeviceImageDrmFormatModifierInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceImageDrmFormatModifierInfoEXT; + }; + + struct PhysicalDeviceImageRobustnessFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceImageRobustnessFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ = {} ) VULKAN_HPP_NOEXCEPT + : robustImageAccess( robustImageAccess_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeaturesEXT( + PhysicalDeviceImageRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageRobustnessFeaturesEXT( VkPhysicalDeviceImageRobustnessFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageRobustnessFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageRobustnessFeaturesEXT & + operator=( PhysicalDeviceImageRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageRobustnessFeaturesEXT & + operator=( VkPhysicalDeviceImageRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceImageRobustnessFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceImageRobustnessFeaturesEXT & + setRobustImageAccess( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ ) VULKAN_HPP_NOEXCEPT + { + robustImageAccess = robustImageAccess_; + return *this; + } + + operator VkPhysicalDeviceImageRobustnessFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageRobustnessFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageRobustnessFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceImageRobustnessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustImageAccess == rhs.robustImageAccess ); + } + + bool operator!=( PhysicalDeviceImageRobustnessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageRobustnessFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess = {}; + }; + static_assert( sizeof( PhysicalDeviceImageRobustnessFeaturesEXT ) == + sizeof( VkPhysicalDeviceImageRobustnessFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceImageRobustnessFeaturesEXT; + }; + + struct PhysicalDeviceImageViewImageFormatInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceImageViewImageFormatInfoEXT( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ = + VULKAN_HPP_NAMESPACE::ImageViewType::e1D ) VULKAN_HPP_NOEXCEPT + : imageViewType( imageViewType_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT( + PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageViewImageFormatInfoEXT( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageViewImageFormatInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewImageFormatInfoEXT & + operator=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageViewImageFormatInfoEXT & + operator=( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceImageViewImageFormatInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceImageViewImageFormatInfoEXT & + setImageViewType( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ ) VULKAN_HPP_NOEXCEPT + { + imageViewType = imageViewType_; + return *this; + } + + operator VkPhysicalDeviceImageViewImageFormatInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageViewImageFormatInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageViewImageFormatInfoEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageViewType == rhs.imageViewType ); + } + + bool operator!=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageViewType imageViewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D; + }; + static_assert( sizeof( PhysicalDeviceImageViewImageFormatInfoEXT ) == + sizeof( VkPhysicalDeviceImageViewImageFormatInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceImageViewImageFormatInfoEXT; + }; + + struct PhysicalDeviceImagelessFramebufferFeatures + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceImagelessFramebufferFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures( + VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {} ) VULKAN_HPP_NOEXCEPT + : imagelessFramebuffer( imagelessFramebuffer_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures( + PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImagelessFramebufferFeatures( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImagelessFramebufferFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImagelessFramebufferFeatures & + operator=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImagelessFramebufferFeatures & + operator=( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceImagelessFramebufferFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceImagelessFramebufferFeatures & + setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT + { + imagelessFramebuffer = imagelessFramebuffer_; + return *this; + } + + operator VkPhysicalDeviceImagelessFramebufferFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImagelessFramebufferFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImagelessFramebufferFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imagelessFramebuffer == rhs.imagelessFramebuffer ); + } + + bool operator!=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {}; + }; + static_assert( sizeof( PhysicalDeviceImagelessFramebufferFeatures ) == + sizeof( VkPhysicalDeviceImagelessFramebufferFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceImagelessFramebufferFeatures; + }; + using PhysicalDeviceImagelessFramebufferFeaturesKHR = PhysicalDeviceImagelessFramebufferFeatures; + + struct PhysicalDeviceIndexTypeUint8FeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceIndexTypeUint8FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ = {} ) VULKAN_HPP_NOEXCEPT + : indexTypeUint8( indexTypeUint8_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceIndexTypeUint8FeaturesEXT( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceIndexTypeUint8FeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIndexTypeUint8FeaturesEXT & + operator=( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceIndexTypeUint8FeaturesEXT & + operator=( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceIndexTypeUint8FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceIndexTypeUint8FeaturesEXT & + setIndexTypeUint8( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ ) VULKAN_HPP_NOEXCEPT + { + indexTypeUint8 = indexTypeUint8_; + return *this; + } + + operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceIndexTypeUint8FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( indexTypeUint8 == rhs.indexTypeUint8 ); + } + + bool operator!=( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8 = {}; + }; + static_assert( sizeof( PhysicalDeviceIndexTypeUint8FeaturesEXT ) == + sizeof( VkPhysicalDeviceIndexTypeUint8FeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceIndexTypeUint8FeaturesEXT; + }; + + struct PhysicalDeviceInheritedViewportScissorFeaturesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceInheritedViewportScissorFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D_ = {} ) VULKAN_HPP_NOEXCEPT + : inheritedViewportScissor2D( inheritedViewportScissor2D_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceInheritedViewportScissorFeaturesNV( + PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceInheritedViewportScissorFeaturesNV( VkPhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceInheritedViewportScissorFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInheritedViewportScissorFeaturesNV & + operator=( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceInheritedViewportScissorFeaturesNV & + operator=( VkPhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceInheritedViewportScissorFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceInheritedViewportScissorFeaturesNV & + setInheritedViewportScissor2D( VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D_ ) VULKAN_HPP_NOEXCEPT + { + inheritedViewportScissor2D = inheritedViewportScissor2D_; + return *this; + } + + operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceInheritedViewportScissorFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( inheritedViewportScissor2D == rhs.inheritedViewportScissor2D ); + } + + bool operator!=( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D = {}; + }; + static_assert( sizeof( PhysicalDeviceInheritedViewportScissorFeaturesNV ) == + sizeof( VkPhysicalDeviceInheritedViewportScissorFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceInheritedViewportScissorFeaturesNV; + }; + + struct PhysicalDeviceInlineUniformBlockFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {} ) VULKAN_HPP_NOEXCEPT + : inlineUniformBlock( inlineUniformBlock_ ) + , descriptorBindingInlineUniformBlockUpdateAfterBind( descriptorBindingInlineUniformBlockUpdateAfterBind_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeaturesEXT( + PhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceInlineUniformBlockFeaturesEXT( VkPhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceInlineUniformBlockFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeaturesEXT & + operator=( PhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceInlineUniformBlockFeaturesEXT & + operator=( VkPhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceInlineUniformBlockFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceInlineUniformBlockFeaturesEXT & + setInlineUniformBlock( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT + { + inlineUniformBlock = inlineUniformBlock_; + return *this; + } + + PhysicalDeviceInlineUniformBlockFeaturesEXT & setDescriptorBindingInlineUniformBlockUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_; + return *this; + } + + operator VkPhysicalDeviceInlineUniformBlockFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceInlineUniformBlockFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceInlineUniformBlockFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( inlineUniformBlock == rhs.inlineUniformBlock ) && + ( descriptorBindingInlineUniformBlockUpdateAfterBind == + rhs.descriptorBindingInlineUniformBlockUpdateAfterBind ); + } + + bool operator!=( PhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind = {}; + }; + static_assert( sizeof( PhysicalDeviceInlineUniformBlockFeaturesEXT ) == + sizeof( VkPhysicalDeviceInlineUniformBlockFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceInlineUniformBlockFeaturesEXT; + }; + + struct PhysicalDeviceInlineUniformBlockPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockPropertiesEXT( + uint32_t maxInlineUniformBlockSize_ = {}, + uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {}, + uint32_t maxDescriptorSetInlineUniformBlocks_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {} ) VULKAN_HPP_NOEXCEPT + : maxInlineUniformBlockSize( maxInlineUniformBlockSize_ ) + , maxPerStageDescriptorInlineUniformBlocks( maxPerStageDescriptorInlineUniformBlocks_ ) + , maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks( + maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ ) + , maxDescriptorSetInlineUniformBlocks( maxDescriptorSetInlineUniformBlocks_ ) + , maxDescriptorSetUpdateAfterBindInlineUniformBlocks( maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockPropertiesEXT( + PhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceInlineUniformBlockPropertiesEXT( VkPhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceInlineUniformBlockPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockPropertiesEXT & + operator=( PhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceInlineUniformBlockPropertiesEXT & + operator=( VkPhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceInlineUniformBlockPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceInlineUniformBlockPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceInlineUniformBlockPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize ) && + ( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks ) && + ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == + rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ) && + ( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks ) && + ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks == + rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks ); + } + + bool operator!=( PhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT; + void * pNext = {}; + uint32_t maxInlineUniformBlockSize = {}; + uint32_t maxPerStageDescriptorInlineUniformBlocks = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {}; + uint32_t maxDescriptorSetInlineUniformBlocks = {}; + uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks = {}; + }; + static_assert( sizeof( PhysicalDeviceInlineUniformBlockPropertiesEXT ) == + sizeof( VkPhysicalDeviceInlineUniformBlockPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceInlineUniformBlockPropertiesEXT; + }; + + struct PhysicalDeviceLineRasterizationFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ = {} ) VULKAN_HPP_NOEXCEPT + : rectangularLines( rectangularLines_ ) + , bresenhamLines( bresenhamLines_ ) + , smoothLines( smoothLines_ ) + , stippledRectangularLines( stippledRectangularLines_ ) + , stippledBresenhamLines( stippledBresenhamLines_ ) + , stippledSmoothLines( stippledSmoothLines_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT( + PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLineRasterizationFeaturesEXT( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLineRasterizationFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & + operator=( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLineRasterizationFeaturesEXT & + operator=( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceLineRasterizationFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceLineRasterizationFeaturesEXT & + setRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ ) VULKAN_HPP_NOEXCEPT + { + rectangularLines = rectangularLines_; + return *this; + } + + PhysicalDeviceLineRasterizationFeaturesEXT & + setBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ ) VULKAN_HPP_NOEXCEPT + { + bresenhamLines = bresenhamLines_; + return *this; + } + + PhysicalDeviceLineRasterizationFeaturesEXT & + setSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ ) VULKAN_HPP_NOEXCEPT + { + smoothLines = smoothLines_; + return *this; + } + + PhysicalDeviceLineRasterizationFeaturesEXT & + setStippledRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ ) VULKAN_HPP_NOEXCEPT + { + stippledRectangularLines = stippledRectangularLines_; + return *this; + } + + PhysicalDeviceLineRasterizationFeaturesEXT & + setStippledBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ ) VULKAN_HPP_NOEXCEPT + { + stippledBresenhamLines = stippledBresenhamLines_; + return *this; + } + + PhysicalDeviceLineRasterizationFeaturesEXT & + setStippledSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ ) VULKAN_HPP_NOEXCEPT + { + stippledSmoothLines = stippledSmoothLines_; + return *this; + } + + operator VkPhysicalDeviceLineRasterizationFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLineRasterizationFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLineRasterizationFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rectangularLines == rhs.rectangularLines ) && + ( bresenhamLines == rhs.bresenhamLines ) && ( smoothLines == rhs.smoothLines ) && + ( stippledRectangularLines == rhs.stippledRectangularLines ) && + ( stippledBresenhamLines == rhs.stippledBresenhamLines ) && + ( stippledSmoothLines == rhs.stippledSmoothLines ); + } + + bool operator!=( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rectangularLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 smoothLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines = {}; + }; + static_assert( sizeof( PhysicalDeviceLineRasterizationFeaturesEXT ) == + sizeof( VkPhysicalDeviceLineRasterizationFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceLineRasterizationFeaturesEXT; + }; + + struct PhysicalDeviceLineRasterizationPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceLineRasterizationPropertiesEXT( uint32_t lineSubPixelPrecisionBits_ = {} ) VULKAN_HPP_NOEXCEPT + : lineSubPixelPrecisionBits( lineSubPixelPrecisionBits_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationPropertiesEXT( + PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLineRasterizationPropertiesEXT( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLineRasterizationPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationPropertiesEXT & + operator=( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLineRasterizationPropertiesEXT & + operator=( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceLineRasterizationPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLineRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLineRasterizationPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( lineSubPixelPrecisionBits == rhs.lineSubPixelPrecisionBits ); + } + + bool operator!=( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT; + void * pNext = {}; + uint32_t lineSubPixelPrecisionBits = {}; + }; + static_assert( sizeof( PhysicalDeviceLineRasterizationPropertiesEXT ) == + sizeof( VkPhysicalDeviceLineRasterizationPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceLineRasterizationPropertiesEXT; + }; + + struct PhysicalDeviceMaintenance3Properties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceMaintenance3Properties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( + uint32_t maxPerSetDescriptors_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {} ) VULKAN_HPP_NOEXCEPT + : maxPerSetDescriptors( maxPerSetDescriptors_ ) + , maxMemoryAllocationSize( maxMemoryAllocationSize_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( PhysicalDeviceMaintenance3Properties const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance3Properties( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance3Properties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance3Properties & + operator=( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance3Properties & + operator=( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMaintenance3Properties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance3Properties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance3Properties const & ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance3Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors ) && + ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize ); + } + + bool operator!=( PhysicalDeviceMaintenance3Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance3Properties; + void * pNext = {}; + uint32_t maxPerSetDescriptors = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {}; + }; + static_assert( sizeof( PhysicalDeviceMaintenance3Properties ) == sizeof( VkPhysicalDeviceMaintenance3Properties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance3Properties; + }; + using PhysicalDeviceMaintenance3PropertiesKHR = PhysicalDeviceMaintenance3Properties; + + struct PhysicalDeviceMemoryBudgetPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( + std::array const & heapBudget_ = {}, + std::array const & heapUsage_ = {} ) VULKAN_HPP_NOEXCEPT + : heapBudget( heapBudget_ ) + , heapUsage( heapUsage_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( + PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryBudgetPropertiesEXT( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMemoryBudgetPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT & + operator=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryBudgetPropertiesEXT & + operator=( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMemoryBudgetPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryBudgetPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMemoryBudgetPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( heapBudget == rhs.heapBudget ) && + ( heapUsage == rhs.heapUsage ); + } + + bool operator!=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D heapBudget = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D heapUsage = {}; + }; + static_assert( sizeof( PhysicalDeviceMemoryBudgetPropertiesEXT ) == + sizeof( VkPhysicalDeviceMemoryBudgetPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceMemoryBudgetPropertiesEXT; + }; + + struct PhysicalDeviceMemoryPriorityFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMemoryPriorityFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryPriority( memoryPriority_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryPriorityFeaturesEXT( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMemoryPriorityFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryPriorityFeaturesEXT & + operator=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryPriorityFeaturesEXT & + operator=( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceMemoryPriorityFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceMemoryPriorityFeaturesEXT & + setMemoryPriority( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ ) VULKAN_HPP_NOEXCEPT + { + memoryPriority = memoryPriority_; + return *this; + } + + operator VkPhysicalDeviceMemoryPriorityFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryPriorityFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMemoryPriorityFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryPriority == rhs.memoryPriority ); + } + + bool operator!=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 memoryPriority = {}; + }; + static_assert( sizeof( PhysicalDeviceMemoryPriorityFeaturesEXT ) == + sizeof( VkPhysicalDeviceMemoryPriorityFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceMemoryPriorityFeaturesEXT; + }; + + struct PhysicalDeviceMeshShaderFeaturesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceMeshShaderFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMeshShaderFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 meshShader_ = {} ) VULKAN_HPP_NOEXCEPT + : taskShader( taskShader_ ) + , meshShader( meshShader_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMeshShaderFeaturesNV( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMeshShaderFeaturesNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV & + operator=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMeshShaderFeaturesNV & + operator=( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceMeshShaderFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceMeshShaderFeaturesNV & setTaskShader( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ ) VULKAN_HPP_NOEXCEPT + { + taskShader = taskShader_; + return *this; + } + + PhysicalDeviceMeshShaderFeaturesNV & setMeshShader( VULKAN_HPP_NAMESPACE::Bool32 meshShader_ ) VULKAN_HPP_NOEXCEPT + { + meshShader = meshShader_; + return *this; + } + + operator VkPhysicalDeviceMeshShaderFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMeshShaderFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMeshShaderFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( taskShader == rhs.taskShader ) && + ( meshShader == rhs.meshShader ); + } + + bool operator!=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 taskShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 meshShader = {}; + }; + static_assert( sizeof( PhysicalDeviceMeshShaderFeaturesNV ) == sizeof( VkPhysicalDeviceMeshShaderFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceMeshShaderFeaturesNV; + }; + + struct PhysicalDeviceMeshShaderPropertiesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceMeshShaderPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceMeshShaderPropertiesNV( uint32_t maxDrawMeshTasksCount_ = {}, + uint32_t maxTaskWorkGroupInvocations_ = {}, + std::array const & maxTaskWorkGroupSize_ = {}, + uint32_t maxTaskTotalMemorySize_ = {}, + uint32_t maxTaskOutputCount_ = {}, + uint32_t maxMeshWorkGroupInvocations_ = {}, + std::array const & maxMeshWorkGroupSize_ = {}, + uint32_t maxMeshTotalMemorySize_ = {}, + uint32_t maxMeshOutputVertices_ = {}, + uint32_t maxMeshOutputPrimitives_ = {}, + uint32_t maxMeshMultiviewViewCount_ = {}, + uint32_t meshOutputPerVertexGranularity_ = {}, + uint32_t meshOutputPerPrimitiveGranularity_ = {} ) VULKAN_HPP_NOEXCEPT + : maxDrawMeshTasksCount( maxDrawMeshTasksCount_ ) + , maxTaskWorkGroupInvocations( maxTaskWorkGroupInvocations_ ) + , maxTaskWorkGroupSize( maxTaskWorkGroupSize_ ) + , maxTaskTotalMemorySize( maxTaskTotalMemorySize_ ) + , maxTaskOutputCount( maxTaskOutputCount_ ) + , maxMeshWorkGroupInvocations( maxMeshWorkGroupInvocations_ ) + , maxMeshWorkGroupSize( maxMeshWorkGroupSize_ ) + , maxMeshTotalMemorySize( maxMeshTotalMemorySize_ ) + , maxMeshOutputVertices( maxMeshOutputVertices_ ) + , maxMeshOutputPrimitives( maxMeshOutputPrimitives_ ) + , maxMeshMultiviewViewCount( maxMeshMultiviewViewCount_ ) + , meshOutputPerVertexGranularity( meshOutputPerVertexGranularity_ ) + , meshOutputPerPrimitiveGranularity( meshOutputPerPrimitiveGranularity_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMeshShaderPropertiesNV( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMeshShaderPropertiesNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV & + operator=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMeshShaderPropertiesNV & + operator=( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMeshShaderPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMeshShaderPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMeshShaderPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxDrawMeshTasksCount == rhs.maxDrawMeshTasksCount ) && + ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations ) && + ( maxTaskWorkGroupSize == rhs.maxTaskWorkGroupSize ) && + ( maxTaskTotalMemorySize == rhs.maxTaskTotalMemorySize ) && + ( maxTaskOutputCount == rhs.maxTaskOutputCount ) && + ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations ) && + ( maxMeshWorkGroupSize == rhs.maxMeshWorkGroupSize ) && + ( maxMeshTotalMemorySize == rhs.maxMeshTotalMemorySize ) && + ( maxMeshOutputVertices == rhs.maxMeshOutputVertices ) && + ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives ) && + ( maxMeshMultiviewViewCount == rhs.maxMeshMultiviewViewCount ) && + ( meshOutputPerVertexGranularity == rhs.meshOutputPerVertexGranularity ) && + ( meshOutputPerPrimitiveGranularity == rhs.meshOutputPerPrimitiveGranularity ); + } + + bool operator!=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV; + void * pNext = {}; + uint32_t maxDrawMeshTasksCount = {}; + uint32_t maxTaskWorkGroupInvocations = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxTaskWorkGroupSize = {}; + uint32_t maxTaskTotalMemorySize = {}; + uint32_t maxTaskOutputCount = {}; + uint32_t maxMeshWorkGroupInvocations = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxMeshWorkGroupSize = {}; + uint32_t maxMeshTotalMemorySize = {}; + uint32_t maxMeshOutputVertices = {}; + uint32_t maxMeshOutputPrimitives = {}; + uint32_t maxMeshMultiviewViewCount = {}; + uint32_t meshOutputPerVertexGranularity = {}; + uint32_t meshOutputPerPrimitiveGranularity = {}; + }; + static_assert( sizeof( PhysicalDeviceMeshShaderPropertiesNV ) == sizeof( VkPhysicalDeviceMeshShaderPropertiesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceMeshShaderPropertiesNV; + }; + + struct PhysicalDeviceMultiviewFeatures + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( + VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {} ) VULKAN_HPP_NOEXCEPT + : multiview( multiview_ ) + , multiviewGeometryShader( multiviewGeometryShader_ ) + , multiviewTessellationShader( multiviewTessellationShader_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMultiviewFeatures( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiviewFeatures( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMultiviewFeatures( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & + operator=( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiviewFeatures & operator=( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceMultiviewFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceMultiviewFeatures & setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT + { + multiview = multiview_; + return *this; + } + + PhysicalDeviceMultiviewFeatures & + setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT + { + multiviewGeometryShader = multiviewGeometryShader_; + return *this; + } + + PhysicalDeviceMultiviewFeatures & + setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT + { + multiviewTessellationShader = multiviewTessellationShader_; + return *this; + } + + operator VkPhysicalDeviceMultiviewFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiviewFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultiviewFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceMultiviewFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multiview == rhs.multiview ) && + ( multiviewGeometryShader == rhs.multiviewGeometryShader ) && + ( multiviewTessellationShader == rhs.multiviewTessellationShader ); + } + + bool operator!=( PhysicalDeviceMultiviewFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiview = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {}; + }; + static_assert( sizeof( PhysicalDeviceMultiviewFeatures ) == sizeof( VkPhysicalDeviceMultiviewFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceMultiviewFeatures; + }; + using PhysicalDeviceMultiviewFeaturesKHR = PhysicalDeviceMultiviewFeatures; + + struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( + VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents_ = {} ) VULKAN_HPP_NOEXCEPT + : perViewPositionAllComponents( perViewPositionAllComponents_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( + PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( + VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & + operator=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & + operator=( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & ) const = default; +#else + bool operator==( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( perViewPositionAllComponents == rhs.perViewPositionAllComponents ); + } + + bool operator!=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents = {}; + }; + static_assert( sizeof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) == + sizeof( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + }; + + struct PhysicalDeviceMultiviewProperties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceMultiviewProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMultiviewProperties( uint32_t maxMultiviewViewCount_ = {}, + uint32_t maxMultiviewInstanceIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : maxMultiviewViewCount( maxMultiviewViewCount_ ) + , maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMultiviewProperties( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiviewProperties( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMultiviewProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewProperties & + operator=( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiviewProperties & operator=( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMultiviewProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiviewProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultiviewProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceMultiviewProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxMultiviewViewCount == rhs.maxMultiviewViewCount ) && + ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex ); + } + + bool operator!=( PhysicalDeviceMultiviewProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewProperties; + void * pNext = {}; + uint32_t maxMultiviewViewCount = {}; + uint32_t maxMultiviewInstanceIndex = {}; + }; + static_assert( sizeof( PhysicalDeviceMultiviewProperties ) == sizeof( VkPhysicalDeviceMultiviewProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceMultiviewProperties; + }; + using PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties; + + struct PhysicalDeviceMutableDescriptorTypeFeaturesVALVE + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMutableDescriptorTypeFeaturesVALVE( + VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType_ = {} ) VULKAN_HPP_NOEXCEPT + : mutableDescriptorType( mutableDescriptorType_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMutableDescriptorTypeFeaturesVALVE( + PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMutableDescriptorTypeFeaturesVALVE( VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMutableDescriptorTypeFeaturesVALVE( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMutableDescriptorTypeFeaturesVALVE & + operator=( PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMutableDescriptorTypeFeaturesVALVE & + operator=( VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceMutableDescriptorTypeFeaturesVALVE & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceMutableDescriptorTypeFeaturesVALVE & + setMutableDescriptorType( VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType_ ) VULKAN_HPP_NOEXCEPT + { + mutableDescriptorType = mutableDescriptorType_; + return *this; + } + + operator VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & ) const = default; +#else + bool operator==( PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mutableDescriptorType == rhs.mutableDescriptorType ); + } + + bool operator!=( PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType = {}; + }; + static_assert( sizeof( PhysicalDeviceMutableDescriptorTypeFeaturesVALVE ) == + sizeof( VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceMutableDescriptorTypeFeaturesVALVE; + }; + + struct PhysicalDevicePCIBusInfoPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevicePciBusInfoPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( uint32_t pciDomain_ = {}, + uint32_t pciBus_ = {}, + uint32_t pciDevice_ = {}, + uint32_t pciFunction_ = {} ) VULKAN_HPP_NOEXCEPT + : pciDomain( pciDomain_ ) + , pciBus( pciBus_ ) + , pciDevice( pciDevice_ ) + , pciFunction( pciFunction_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePCIBusInfoPropertiesEXT( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePCIBusInfoPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePCIBusInfoPropertiesEXT & + operator=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePCIBusInfoPropertiesEXT & + operator=( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDevicePCIBusInfoPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePCIBusInfoPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePCIBusInfoPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pciDomain == rhs.pciDomain ) && + ( pciBus == rhs.pciBus ) && ( pciDevice == rhs.pciDevice ) && ( pciFunction == rhs.pciFunction ); + } + + bool operator!=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT; + void * pNext = {}; + uint32_t pciDomain = {}; + uint32_t pciBus = {}; + uint32_t pciDevice = {}; + uint32_t pciFunction = {}; + }; + static_assert( sizeof( PhysicalDevicePCIBusInfoPropertiesEXT ) == sizeof( VkPhysicalDevicePCIBusInfoPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePCIBusInfoPropertiesEXT; + }; + + struct PhysicalDevicePerformanceQueryFeaturesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( + VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ = {} ) VULKAN_HPP_NOEXCEPT + : performanceCounterQueryPools( performanceCounterQueryPools_ ) + , performanceCounterMultipleQueryPools( performanceCounterMultipleQueryPools_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( + PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePerformanceQueryFeaturesKHR( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDevicePerformanceQueryFeaturesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR & + operator=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePerformanceQueryFeaturesKHR & + operator=( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDevicePerformanceQueryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDevicePerformanceQueryFeaturesKHR & + setPerformanceCounterQueryPools( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ ) VULKAN_HPP_NOEXCEPT + { + performanceCounterQueryPools = performanceCounterQueryPools_; + return *this; + } + + PhysicalDevicePerformanceQueryFeaturesKHR & setPerformanceCounterMultipleQueryPools( + VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ ) VULKAN_HPP_NOEXCEPT + { + performanceCounterMultipleQueryPools = performanceCounterMultipleQueryPools_; + return *this; + } + + operator VkPhysicalDevicePerformanceQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePerformanceQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePerformanceQueryFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( performanceCounterQueryPools == rhs.performanceCounterQueryPools ) && + ( performanceCounterMultipleQueryPools == rhs.performanceCounterMultipleQueryPools ); + } + + bool operator!=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools = {}; + VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools = {}; + }; + static_assert( sizeof( PhysicalDevicePerformanceQueryFeaturesKHR ) == + sizeof( VkPhysicalDevicePerformanceQueryFeaturesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePerformanceQueryFeaturesKHR; + }; + + struct PhysicalDevicePerformanceQueryPropertiesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR( + VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies_ = {} ) VULKAN_HPP_NOEXCEPT + : allowCommandBufferQueryCopies( allowCommandBufferQueryCopies_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR( + PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePerformanceQueryPropertiesKHR( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDevicePerformanceQueryPropertiesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryPropertiesKHR & + operator=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePerformanceQueryPropertiesKHR & + operator=( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDevicePerformanceQueryPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePerformanceQueryPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePerformanceQueryPropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( allowCommandBufferQueryCopies == rhs.allowCommandBufferQueryCopies ); + } + + bool operator!=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies = {}; + }; + static_assert( sizeof( PhysicalDevicePerformanceQueryPropertiesKHR ) == + sizeof( VkPhysicalDevicePerformanceQueryPropertiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePerformanceQueryPropertiesKHR; + }; + + struct PhysicalDevicePipelineCreationCacheControlFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevicePipelineCreationCacheControlFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ = {} ) VULKAN_HPP_NOEXCEPT + : pipelineCreationCacheControl( pipelineCreationCacheControl_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeaturesEXT( + PhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineCreationCacheControlFeaturesEXT( + VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelineCreationCacheControlFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCreationCacheControlFeaturesEXT & + operator=( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineCreationCacheControlFeaturesEXT & + operator=( VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDevicePipelineCreationCacheControlFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDevicePipelineCreationCacheControlFeaturesEXT & + setPipelineCreationCacheControl( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ ) VULKAN_HPP_NOEXCEPT + { + pipelineCreationCacheControl = pipelineCreationCacheControl_; + return *this; + } + + operator VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( pipelineCreationCacheControl == rhs.pipelineCreationCacheControl ); + } + + bool operator!=( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl = {}; + }; + static_assert( sizeof( PhysicalDevicePipelineCreationCacheControlFeaturesEXT ) == + sizeof( VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePipelineCreationCacheControlFeaturesEXT; + }; + + struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( + VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : pipelineExecutableInfo( pipelineExecutableInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( + VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & + operator=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & + operator=( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & + setPipelineExecutableInfo( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ ) VULKAN_HPP_NOEXCEPT + { + pipelineExecutableInfo = pipelineExecutableInfo_; + return *this; + } + + operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( pipelineExecutableInfo == rhs.pipelineExecutableInfo ); + } + + bool operator!=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo = {}; + }; + static_assert( sizeof( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) == + sizeof( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + }; + + struct PhysicalDevicePointClippingProperties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevicePointClippingProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( + VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = + VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes ) VULKAN_HPP_NOEXCEPT + : pointClippingBehavior( pointClippingBehavior_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( PhysicalDevicePointClippingProperties const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePointClippingProperties( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePointClippingProperties( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePointClippingProperties & + operator=( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePointClippingProperties & + operator=( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDevicePointClippingProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePointClippingProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePointClippingProperties const & ) const = default; +#else + bool operator==( PhysicalDevicePointClippingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pointClippingBehavior == rhs.pointClippingBehavior ); + } + + bool operator!=( PhysicalDevicePointClippingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePointClippingProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = + VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes; + }; + static_assert( sizeof( PhysicalDevicePointClippingProperties ) == sizeof( VkPhysicalDevicePointClippingProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePointClippingProperties; + }; + using PhysicalDevicePointClippingPropertiesKHR = PhysicalDevicePointClippingProperties; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct PhysicalDevicePortabilitySubsetFeaturesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR( + VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 events_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pointPolygons_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 triangleFans_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_ = {} ) VULKAN_HPP_NOEXCEPT + : constantAlphaColorBlendFactors( constantAlphaColorBlendFactors_ ) + , events( events_ ) + , imageViewFormatReinterpretation( imageViewFormatReinterpretation_ ) + , imageViewFormatSwizzle( imageViewFormatSwizzle_ ) + , imageView2DOn3DImage( imageView2DOn3DImage_ ) + , multisampleArrayImage( multisampleArrayImage_ ) + , mutableComparisonSamplers( mutableComparisonSamplers_ ) + , pointPolygons( pointPolygons_ ) + , samplerMipLodBias( samplerMipLodBias_ ) + , separateStencilMaskRef( separateStencilMaskRef_ ) + , shaderSampleRateInterpolationFunctions( shaderSampleRateInterpolationFunctions_ ) + , tessellationIsolines( tessellationIsolines_ ) + , tessellationPointMode( tessellationPointMode_ ) + , triangleFans( triangleFans_ ) + , vertexAttributeAccessBeyondStride( vertexAttributeAccessBeyondStride_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR( + PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePortabilitySubsetFeaturesKHR( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDevicePortabilitySubsetFeaturesKHR( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + operator=( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePortabilitySubsetFeaturesKHR & + operator=( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & setConstantAlphaColorBlendFactors( + VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors_ ) VULKAN_HPP_NOEXCEPT + { + constantAlphaColorBlendFactors = constantAlphaColorBlendFactors_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & setEvents( VULKAN_HPP_NAMESPACE::Bool32 events_ ) VULKAN_HPP_NOEXCEPT + { + events = events_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & setImageViewFormatReinterpretation( + VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation_ ) VULKAN_HPP_NOEXCEPT + { + imageViewFormatReinterpretation = imageViewFormatReinterpretation_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & + setImageViewFormatSwizzle( VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle_ ) VULKAN_HPP_NOEXCEPT + { + imageViewFormatSwizzle = imageViewFormatSwizzle_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & + setImageView2DOn3DImage( VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage_ ) VULKAN_HPP_NOEXCEPT + { + imageView2DOn3DImage = imageView2DOn3DImage_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & + setMultisampleArrayImage( VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage_ ) VULKAN_HPP_NOEXCEPT + { + multisampleArrayImage = multisampleArrayImage_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & + setMutableComparisonSamplers( VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers_ ) VULKAN_HPP_NOEXCEPT + { + mutableComparisonSamplers = mutableComparisonSamplers_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & + setPointPolygons( VULKAN_HPP_NAMESPACE::Bool32 pointPolygons_ ) VULKAN_HPP_NOEXCEPT + { + pointPolygons = pointPolygons_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & + setSamplerMipLodBias( VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias_ ) VULKAN_HPP_NOEXCEPT + { + samplerMipLodBias = samplerMipLodBias_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & + setSeparateStencilMaskRef( VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef_ ) VULKAN_HPP_NOEXCEPT + { + separateStencilMaskRef = separateStencilMaskRef_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & setShaderSampleRateInterpolationFunctions( + VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions_ ) VULKAN_HPP_NOEXCEPT + { + shaderSampleRateInterpolationFunctions = shaderSampleRateInterpolationFunctions_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & + setTessellationIsolines( VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines_ ) VULKAN_HPP_NOEXCEPT + { + tessellationIsolines = tessellationIsolines_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & + setTessellationPointMode( VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode_ ) VULKAN_HPP_NOEXCEPT + { + tessellationPointMode = tessellationPointMode_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & + setTriangleFans( VULKAN_HPP_NAMESPACE::Bool32 triangleFans_ ) VULKAN_HPP_NOEXCEPT + { + triangleFans = triangleFans_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & setVertexAttributeAccessBeyondStride( + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_ ) VULKAN_HPP_NOEXCEPT + { + vertexAttributeAccessBeyondStride = vertexAttributeAccessBeyondStride_; + return *this; + } + + operator VkPhysicalDevicePortabilitySubsetFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePortabilitySubsetFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePortabilitySubsetFeaturesKHR const & ) const = default; +# else + bool operator==( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( constantAlphaColorBlendFactors == rhs.constantAlphaColorBlendFactors ) && ( events == rhs.events ) && + ( imageViewFormatReinterpretation == rhs.imageViewFormatReinterpretation ) && + ( imageViewFormatSwizzle == rhs.imageViewFormatSwizzle ) && + ( imageView2DOn3DImage == rhs.imageView2DOn3DImage ) && + ( multisampleArrayImage == rhs.multisampleArrayImage ) && + ( mutableComparisonSamplers == rhs.mutableComparisonSamplers ) && ( pointPolygons == rhs.pointPolygons ) && + ( samplerMipLodBias == rhs.samplerMipLodBias ) && + ( separateStencilMaskRef == rhs.separateStencilMaskRef ) && + ( shaderSampleRateInterpolationFunctions == rhs.shaderSampleRateInterpolationFunctions ) && + ( tessellationIsolines == rhs.tessellationIsolines ) && + ( tessellationPointMode == rhs.tessellationPointMode ) && ( triangleFans == rhs.triangleFans ) && + ( vertexAttributeAccessBeyondStride == rhs.vertexAttributeAccessBeyondStride ); + } + + bool operator!=( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors = {}; + VULKAN_HPP_NAMESPACE::Bool32 events = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage = {}; + VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage = {}; + VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers = {}; + VULKAN_HPP_NAMESPACE::Bool32 pointPolygons = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias = {}; + VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions = {}; + VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines = {}; + VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode = {}; + VULKAN_HPP_NAMESPACE::Bool32 triangleFans = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride = {}; + }; + static_assert( sizeof( PhysicalDevicePortabilitySubsetFeaturesKHR ) == + sizeof( VkPhysicalDevicePortabilitySubsetFeaturesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePortabilitySubsetFeaturesKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct PhysicalDevicePortabilitySubsetPropertiesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR( + uint32_t minVertexInputBindingStrideAlignment_ = {} ) VULKAN_HPP_NOEXCEPT + : minVertexInputBindingStrideAlignment( minVertexInputBindingStrideAlignment_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR( + PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePortabilitySubsetPropertiesKHR( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDevicePortabilitySubsetPropertiesKHR( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetPropertiesKHR & + operator=( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePortabilitySubsetPropertiesKHR & + operator=( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDevicePortabilitySubsetPropertiesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDevicePortabilitySubsetPropertiesKHR & + setMinVertexInputBindingStrideAlignment( uint32_t minVertexInputBindingStrideAlignment_ ) VULKAN_HPP_NOEXCEPT + { + minVertexInputBindingStrideAlignment = minVertexInputBindingStrideAlignment_; + return *this; + } + + operator VkPhysicalDevicePortabilitySubsetPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePortabilitySubsetPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePortabilitySubsetPropertiesKHR const & ) const = default; +# else + bool operator==( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( minVertexInputBindingStrideAlignment == rhs.minVertexInputBindingStrideAlignment ); + } + + bool operator!=( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR; + void * pNext = {}; + uint32_t minVertexInputBindingStrideAlignment = {}; + }; + static_assert( sizeof( PhysicalDevicePortabilitySubsetPropertiesKHR ) == + sizeof( VkPhysicalDevicePortabilitySubsetPropertiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePortabilitySubsetPropertiesKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + struct PhysicalDevicePrivateDataFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevicePrivateDataFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDevicePrivateDataFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 privateData_ = {} ) VULKAN_HPP_NOEXCEPT + : privateData( privateData_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeaturesEXT( PhysicalDevicePrivateDataFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePrivateDataFeaturesEXT( VkPhysicalDevicePrivateDataFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePrivateDataFeaturesEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrivateDataFeaturesEXT & + operator=( PhysicalDevicePrivateDataFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePrivateDataFeaturesEXT & + operator=( VkPhysicalDevicePrivateDataFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDevicePrivateDataFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDevicePrivateDataFeaturesEXT & + setPrivateData( VULKAN_HPP_NAMESPACE::Bool32 privateData_ ) VULKAN_HPP_NOEXCEPT + { + privateData = privateData_; + return *this; + } + + operator VkPhysicalDevicePrivateDataFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePrivateDataFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePrivateDataFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDevicePrivateDataFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( privateData == rhs.privateData ); + } + + bool operator!=( PhysicalDevicePrivateDataFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrivateDataFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 privateData = {}; + }; + static_assert( sizeof( PhysicalDevicePrivateDataFeaturesEXT ) == sizeof( VkPhysicalDevicePrivateDataFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePrivateDataFeaturesEXT; + }; + + struct PhysicalDeviceProtectedMemoryFeatures + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceProtectedMemoryFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceProtectedMemoryFeatures( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {} ) VULKAN_HPP_NOEXCEPT + : protectedMemory( protectedMemory_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( PhysicalDeviceProtectedMemoryFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProtectedMemoryFeatures( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProtectedMemoryFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProtectedMemoryFeatures & + operator=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProtectedMemoryFeatures & + operator=( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceProtectedMemoryFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceProtectedMemoryFeatures & + setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT + { + protectedMemory = protectedMemory_; + return *this; + } + + operator VkPhysicalDeviceProtectedMemoryFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProtectedMemoryFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProtectedMemoryFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceProtectedMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedMemory == rhs.protectedMemory ); + } + + bool operator!=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {}; + }; + static_assert( sizeof( PhysicalDeviceProtectedMemoryFeatures ) == sizeof( VkPhysicalDeviceProtectedMemoryFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceProtectedMemoryFeatures; + }; + + struct PhysicalDeviceProtectedMemoryProperties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceProtectedMemoryProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceProtectedMemoryProperties( VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {} ) VULKAN_HPP_NOEXCEPT + : protectedNoFault( protectedNoFault_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties( PhysicalDeviceProtectedMemoryProperties const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProtectedMemoryProperties( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProtectedMemoryProperties( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProtectedMemoryProperties & + operator=( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProtectedMemoryProperties & + operator=( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceProtectedMemoryProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProtectedMemoryProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProtectedMemoryProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceProtectedMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedNoFault == rhs.protectedNoFault ); + } + + bool operator!=( PhysicalDeviceProtectedMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {}; + }; + static_assert( sizeof( PhysicalDeviceProtectedMemoryProperties ) == + sizeof( VkPhysicalDeviceProtectedMemoryProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceProtectedMemoryProperties; + }; + + struct PhysicalDeviceProvokingVertexFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex_ = {} ) VULKAN_HPP_NOEXCEPT + : provokingVertexLast( provokingVertexLast_ ) + , transformFeedbackPreservesProvokingVertex( transformFeedbackPreservesProvokingVertex_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexFeaturesEXT( + PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProvokingVertexFeaturesEXT( VkPhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProvokingVertexFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT & + operator=( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProvokingVertexFeaturesEXT & + operator=( VkPhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceProvokingVertexFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceProvokingVertexFeaturesEXT & + setProvokingVertexLast( VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast_ ) VULKAN_HPP_NOEXCEPT + { + provokingVertexLast = provokingVertexLast_; + return *this; + } + + PhysicalDeviceProvokingVertexFeaturesEXT & setTransformFeedbackPreservesProvokingVertex( + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex_ ) VULKAN_HPP_NOEXCEPT + { + transformFeedbackPreservesProvokingVertex = transformFeedbackPreservesProvokingVertex_; + return *this; + } + + operator VkPhysicalDeviceProvokingVertexFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProvokingVertexFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProvokingVertexFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( provokingVertexLast == rhs.provokingVertexLast ) && + ( transformFeedbackPreservesProvokingVertex == rhs.transformFeedbackPreservesProvokingVertex ); + } + + bool operator!=( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex = {}; + }; + static_assert( sizeof( PhysicalDeviceProvokingVertexFeaturesEXT ) == + sizeof( VkPhysicalDeviceProvokingVertexFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceProvokingVertexFeaturesEXT; + }; + + struct PhysicalDeviceProvokingVertexPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexPropertiesEXT( + VULKAN_HPP_NAMESPACE::Bool32 provokingVertexModePerPipeline_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesTriangleFanProvokingVertex_ = {} ) VULKAN_HPP_NOEXCEPT + : provokingVertexModePerPipeline( provokingVertexModePerPipeline_ ) + , transformFeedbackPreservesTriangleFanProvokingVertex( transformFeedbackPreservesTriangleFanProvokingVertex_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexPropertiesEXT( + PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProvokingVertexPropertiesEXT( VkPhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProvokingVertexPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexPropertiesEXT & + operator=( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProvokingVertexPropertiesEXT & + operator=( VkPhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceProvokingVertexPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProvokingVertexPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProvokingVertexPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( provokingVertexModePerPipeline == rhs.provokingVertexModePerPipeline ) && + ( transformFeedbackPreservesTriangleFanProvokingVertex == + rhs.transformFeedbackPreservesTriangleFanProvokingVertex ); + } + + bool operator!=( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 provokingVertexModePerPipeline = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesTriangleFanProvokingVertex = {}; + }; + static_assert( sizeof( PhysicalDeviceProvokingVertexPropertiesEXT ) == + sizeof( VkPhysicalDeviceProvokingVertexPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceProvokingVertexPropertiesEXT; + }; + + struct PhysicalDevicePushDescriptorPropertiesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevicePushDescriptorPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDevicePushDescriptorPropertiesKHR( uint32_t maxPushDescriptors_ = {} ) VULKAN_HPP_NOEXCEPT + : maxPushDescriptors( maxPushDescriptors_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorPropertiesKHR( + PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePushDescriptorPropertiesKHR( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDevicePushDescriptorPropertiesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePushDescriptorPropertiesKHR & + operator=( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePushDescriptorPropertiesKHR & + operator=( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDevicePushDescriptorPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePushDescriptorPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePushDescriptorPropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPushDescriptors == rhs.maxPushDescriptors ); + } + + bool operator!=( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR; + void * pNext = {}; + uint32_t maxPushDescriptors = {}; + }; + static_assert( sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) == + sizeof( VkPhysicalDevicePushDescriptorPropertiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePushDescriptorPropertiesKHR; + }; + + struct PhysicalDeviceRayQueryFeaturesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceRayQueryFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRayQueryFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ = {} ) VULKAN_HPP_NOEXCEPT + : rayQuery( rayQuery_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRayQueryFeaturesKHR( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayQueryFeaturesKHR( VkPhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayQueryFeaturesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayQueryFeaturesKHR & + operator=( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayQueryFeaturesKHR & operator=( VkPhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceRayQueryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceRayQueryFeaturesKHR & setRayQuery( VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ ) VULKAN_HPP_NOEXCEPT + { + rayQuery = rayQuery_; + return *this; + } + + operator VkPhysicalDeviceRayQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayQueryFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayQuery == rhs.rayQuery ); + } + + bool operator!=( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayQueryFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayQuery = {}; + }; + static_assert( sizeof( PhysicalDeviceRayQueryFeaturesKHR ) == sizeof( VkPhysicalDeviceRayQueryFeaturesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceRayQueryFeaturesKHR; + }; + + struct PhysicalDeviceRayTracingPipelineFeaturesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelineFeaturesKHR( + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling_ = {} ) VULKAN_HPP_NOEXCEPT + : rayTracingPipeline( rayTracingPipeline_ ) + , rayTracingPipelineShaderGroupHandleCaptureReplay( rayTracingPipelineShaderGroupHandleCaptureReplay_ ) + , rayTracingPipelineShaderGroupHandleCaptureReplayMixed( rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ ) + , rayTracingPipelineTraceRaysIndirect( rayTracingPipelineTraceRaysIndirect_ ) + , rayTraversalPrimitiveCulling( rayTraversalPrimitiveCulling_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelineFeaturesKHR( + PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingPipelineFeaturesKHR( VkPhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingPipelineFeaturesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & + operator=( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingPipelineFeaturesKHR & + operator=( VkPhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceRayTracingPipelineFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceRayTracingPipelineFeaturesKHR & + setRayTracingPipeline( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingPipeline = rayTracingPipeline_; + return *this; + } + + PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTracingPipelineShaderGroupHandleCaptureReplay( + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingPipelineShaderGroupHandleCaptureReplay = rayTracingPipelineShaderGroupHandleCaptureReplay_; + return *this; + } + + PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTracingPipelineShaderGroupHandleCaptureReplayMixed( + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingPipelineShaderGroupHandleCaptureReplayMixed = rayTracingPipelineShaderGroupHandleCaptureReplayMixed_; + return *this; + } + + PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTracingPipelineTraceRaysIndirect( + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingPipelineTraceRaysIndirect = rayTracingPipelineTraceRaysIndirect_; + return *this; + } + + PhysicalDeviceRayTracingPipelineFeaturesKHR & + setRayTraversalPrimitiveCulling( VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling_ ) VULKAN_HPP_NOEXCEPT + { + rayTraversalPrimitiveCulling = rayTraversalPrimitiveCulling_; + return *this; + } + + operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingPipelineFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingPipeline == rhs.rayTracingPipeline ) && + ( rayTracingPipelineShaderGroupHandleCaptureReplay == + rhs.rayTracingPipelineShaderGroupHandleCaptureReplay ) && + ( rayTracingPipelineShaderGroupHandleCaptureReplayMixed == + rhs.rayTracingPipelineShaderGroupHandleCaptureReplayMixed ) && + ( rayTracingPipelineTraceRaysIndirect == rhs.rayTracingPipelineTraceRaysIndirect ) && + ( rayTraversalPrimitiveCulling == rhs.rayTraversalPrimitiveCulling ); + } + + bool operator!=( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling = {}; + }; + static_assert( sizeof( PhysicalDeviceRayTracingPipelineFeaturesKHR ) == + sizeof( VkPhysicalDeviceRayTracingPipelineFeaturesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingPipelineFeaturesKHR; + }; + + struct PhysicalDeviceRayTracingPipelinePropertiesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRayTracingPipelinePropertiesKHR( uint32_t shaderGroupHandleSize_ = {}, + uint32_t maxRayRecursionDepth_ = {}, + uint32_t maxShaderGroupStride_ = {}, + uint32_t shaderGroupBaseAlignment_ = {}, + uint32_t shaderGroupHandleCaptureReplaySize_ = {}, + uint32_t maxRayDispatchInvocationCount_ = {}, + uint32_t shaderGroupHandleAlignment_ = {}, + uint32_t maxRayHitAttributeSize_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderGroupHandleSize( shaderGroupHandleSize_ ) + , maxRayRecursionDepth( maxRayRecursionDepth_ ) + , maxShaderGroupStride( maxShaderGroupStride_ ) + , shaderGroupBaseAlignment( shaderGroupBaseAlignment_ ) + , shaderGroupHandleCaptureReplaySize( shaderGroupHandleCaptureReplaySize_ ) + , maxRayDispatchInvocationCount( maxRayDispatchInvocationCount_ ) + , shaderGroupHandleAlignment( shaderGroupHandleAlignment_ ) + , maxRayHitAttributeSize( maxRayHitAttributeSize_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelinePropertiesKHR( + PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingPipelinePropertiesKHR( VkPhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingPipelinePropertiesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelinePropertiesKHR & + operator=( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingPipelinePropertiesKHR & + operator=( VkPhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingPipelinePropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderGroupHandleSize == rhs.shaderGroupHandleSize ) && + ( maxRayRecursionDepth == rhs.maxRayRecursionDepth ) && + ( maxShaderGroupStride == rhs.maxShaderGroupStride ) && + ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment ) && + ( shaderGroupHandleCaptureReplaySize == rhs.shaderGroupHandleCaptureReplaySize ) && + ( maxRayDispatchInvocationCount == rhs.maxRayDispatchInvocationCount ) && + ( shaderGroupHandleAlignment == rhs.shaderGroupHandleAlignment ) && + ( maxRayHitAttributeSize == rhs.maxRayHitAttributeSize ); + } + + bool operator!=( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR; + void * pNext = {}; + uint32_t shaderGroupHandleSize = {}; + uint32_t maxRayRecursionDepth = {}; + uint32_t maxShaderGroupStride = {}; + uint32_t shaderGroupBaseAlignment = {}; + uint32_t shaderGroupHandleCaptureReplaySize = {}; + uint32_t maxRayDispatchInvocationCount = {}; + uint32_t shaderGroupHandleAlignment = {}; + uint32_t maxRayHitAttributeSize = {}; + }; + static_assert( sizeof( PhysicalDeviceRayTracingPipelinePropertiesKHR ) == + sizeof( VkPhysicalDeviceRayTracingPipelinePropertiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingPipelinePropertiesKHR; + }; + + struct PhysicalDeviceRayTracingPropertiesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceRayTracingPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRayTracingPropertiesNV( uint32_t shaderGroupHandleSize_ = {}, + uint32_t maxRecursionDepth_ = {}, + uint32_t maxShaderGroupStride_ = {}, + uint32_t shaderGroupBaseAlignment_ = {}, + uint64_t maxGeometryCount_ = {}, + uint64_t maxInstanceCount_ = {}, + uint64_t maxTriangleCount_ = {}, + uint32_t maxDescriptorSetAccelerationStructures_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderGroupHandleSize( shaderGroupHandleSize_ ) + , maxRecursionDepth( maxRecursionDepth_ ) + , maxShaderGroupStride( maxShaderGroupStride_ ) + , shaderGroupBaseAlignment( shaderGroupBaseAlignment_ ) + , maxGeometryCount( maxGeometryCount_ ) + , maxInstanceCount( maxInstanceCount_ ) + , maxTriangleCount( maxTriangleCount_ ) + , maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV( PhysicalDeviceRayTracingPropertiesNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingPropertiesNV( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingPropertiesNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPropertiesNV & + operator=( PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingPropertiesNV & + operator=( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceRayTracingPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderGroupHandleSize == rhs.shaderGroupHandleSize ) && ( maxRecursionDepth == rhs.maxRecursionDepth ) && + ( maxShaderGroupStride == rhs.maxShaderGroupStride ) && + ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment ) && + ( maxGeometryCount == rhs.maxGeometryCount ) && ( maxInstanceCount == rhs.maxInstanceCount ) && + ( maxTriangleCount == rhs.maxTriangleCount ) && + ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures ); + } + + bool operator!=( PhysicalDeviceRayTracingPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPropertiesNV; + void * pNext = {}; + uint32_t shaderGroupHandleSize = {}; + uint32_t maxRecursionDepth = {}; + uint32_t maxShaderGroupStride = {}; + uint32_t shaderGroupBaseAlignment = {}; + uint64_t maxGeometryCount = {}; + uint64_t maxInstanceCount = {}; + uint64_t maxTriangleCount = {}; + uint32_t maxDescriptorSetAccelerationStructures = {}; + }; + static_assert( sizeof( PhysicalDeviceRayTracingPropertiesNV ) == sizeof( VkPhysicalDeviceRayTracingPropertiesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingPropertiesNV; + }; + + struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ = {} ) VULKAN_HPP_NOEXCEPT + : representativeFragmentTest( representativeFragmentTest_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV( + PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRepresentativeFragmentTestFeaturesNV( + VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRepresentativeFragmentTestFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRepresentativeFragmentTestFeaturesNV & + operator=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRepresentativeFragmentTestFeaturesNV & + operator=( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceRepresentativeFragmentTestFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceRepresentativeFragmentTestFeaturesNV & + setRepresentativeFragmentTest( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ ) VULKAN_HPP_NOEXCEPT + { + representativeFragmentTest = representativeFragmentTest_; + return *this; + } + + operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( representativeFragmentTest == rhs.representativeFragmentTest ); + } + + bool operator!=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest = {}; + }; + static_assert( sizeof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) == + sizeof( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceRepresentativeFragmentTestFeaturesNV; + }; + + struct PhysicalDeviceRobustness2FeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceRobustness2FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRobustness2FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ = {} ) VULKAN_HPP_NOEXCEPT + : robustBufferAccess2( robustBufferAccess2_ ) + , robustImageAccess2( robustImageAccess2_ ) + , nullDescriptor( nullDescriptor_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2FeaturesEXT( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRobustness2FeaturesEXT( VkPhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRobustness2FeaturesEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & + operator=( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRobustness2FeaturesEXT & + operator=( VkPhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceRobustness2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceRobustness2FeaturesEXT & + setRobustBufferAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ ) VULKAN_HPP_NOEXCEPT + { + robustBufferAccess2 = robustBufferAccess2_; + return *this; + } + + PhysicalDeviceRobustness2FeaturesEXT & + setRobustImageAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ ) VULKAN_HPP_NOEXCEPT + { + robustImageAccess2 = robustImageAccess2_; + return *this; + } + + PhysicalDeviceRobustness2FeaturesEXT & + setNullDescriptor( VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ ) VULKAN_HPP_NOEXCEPT + { + nullDescriptor = nullDescriptor_; + return *this; + } + + operator VkPhysicalDeviceRobustness2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRobustness2FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRobustness2FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustBufferAccess2 == rhs.robustBufferAccess2 ) && + ( robustImageAccess2 == rhs.robustImageAccess2 ) && ( nullDescriptor == rhs.nullDescriptor ); + } + + bool operator!=( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2 = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2 = {}; + VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor = {}; + }; + static_assert( sizeof( PhysicalDeviceRobustness2FeaturesEXT ) == sizeof( VkPhysicalDeviceRobustness2FeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceRobustness2FeaturesEXT; + }; + + struct PhysicalDeviceRobustness2PropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceRobustness2PropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT( + VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment_ = {} ) VULKAN_HPP_NOEXCEPT + : robustStorageBufferAccessSizeAlignment( robustStorageBufferAccessSizeAlignment_ ) + , robustUniformBufferAccessSizeAlignment( robustUniformBufferAccessSizeAlignment_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRobustness2PropertiesEXT( VkPhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRobustness2PropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2PropertiesEXT & + operator=( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRobustness2PropertiesEXT & + operator=( VkPhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceRobustness2PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRobustness2PropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRobustness2PropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( robustStorageBufferAccessSizeAlignment == rhs.robustStorageBufferAccessSizeAlignment ) && + ( robustUniformBufferAccessSizeAlignment == rhs.robustUniformBufferAccessSizeAlignment ); + } + + bool operator!=( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2PropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment = {}; + }; + static_assert( sizeof( PhysicalDeviceRobustness2PropertiesEXT ) == sizeof( VkPhysicalDeviceRobustness2PropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceRobustness2PropertiesEXT; + }; + + struct PhysicalDeviceSampleLocationsPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT( + VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {}, + std::array const & sampleLocationCoordinateRange_ = {}, + uint32_t sampleLocationSubPixelBits_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT + : sampleLocationSampleCounts( sampleLocationSampleCounts_ ) + , maxSampleLocationGridSize( maxSampleLocationGridSize_ ) + , sampleLocationCoordinateRange( sampleLocationCoordinateRange_ ) + , sampleLocationSubPixelBits( sampleLocationSubPixelBits_ ) + , variableSampleLocations( variableSampleLocations_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT( + PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSampleLocationsPropertiesEXT( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSampleLocationsPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT & + operator=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSampleLocationsPropertiesEXT & + operator=( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceSampleLocationsPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSampleLocationsPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSampleLocationsPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( sampleLocationSampleCounts == rhs.sampleLocationSampleCounts ) && + ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ) && + ( sampleLocationCoordinateRange == rhs.sampleLocationCoordinateRange ) && + ( sampleLocationSubPixelBits == rhs.sampleLocationSubPixelBits ) && + ( variableSampleLocations == rhs.variableSampleLocations ); + } + + bool operator!=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D sampleLocationCoordinateRange = {}; + uint32_t sampleLocationSubPixelBits = {}; + VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations = {}; + }; + static_assert( sizeof( PhysicalDeviceSampleLocationsPropertiesEXT ) == + sizeof( VkPhysicalDeviceSampleLocationsPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceSampleLocationsPropertiesEXT; + }; + + struct PhysicalDeviceSamplerFilterMinmaxProperties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties( + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {} ) VULKAN_HPP_NOEXCEPT + : filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ ) + , filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties( + PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSamplerFilterMinmaxProperties( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSamplerFilterMinmaxProperties( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSamplerFilterMinmaxProperties & + operator=( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSamplerFilterMinmaxProperties & + operator=( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceSamplerFilterMinmaxProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSamplerFilterMinmaxProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSamplerFilterMinmaxProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats ) && + ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping ); + } + + bool operator!=( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {}; + }; + static_assert( sizeof( PhysicalDeviceSamplerFilterMinmaxProperties ) == + sizeof( VkPhysicalDeviceSamplerFilterMinmaxProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceSamplerFilterMinmaxProperties; + }; + using PhysicalDeviceSamplerFilterMinmaxPropertiesEXT = PhysicalDeviceSamplerFilterMinmaxProperties; + + struct PhysicalDeviceSamplerYcbcrConversionFeatures + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( + VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {} ) VULKAN_HPP_NOEXCEPT + : samplerYcbcrConversion( samplerYcbcrConversion_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( + PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSamplerYcbcrConversionFeatures( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSamplerYcbcrConversionFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSamplerYcbcrConversionFeatures & + operator=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSamplerYcbcrConversionFeatures & + operator=( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceSamplerYcbcrConversionFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceSamplerYcbcrConversionFeatures & + setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT + { + samplerYcbcrConversion = samplerYcbcrConversion_; + return *this; + } + + operator VkPhysicalDeviceSamplerYcbcrConversionFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSamplerYcbcrConversionFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSamplerYcbcrConversionFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( samplerYcbcrConversion == rhs.samplerYcbcrConversion ); + } + + bool operator!=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {}; + }; + static_assert( sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) == + sizeof( VkPhysicalDeviceSamplerYcbcrConversionFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceSamplerYcbcrConversionFeatures; + }; + using PhysicalDeviceSamplerYcbcrConversionFeaturesKHR = PhysicalDeviceSamplerYcbcrConversionFeatures; + + struct PhysicalDeviceScalarBlockLayoutFeatures + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceScalarBlockLayoutFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {} ) + VULKAN_HPP_NOEXCEPT : scalarBlockLayout( scalarBlockLayout_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceScalarBlockLayoutFeatures( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceScalarBlockLayoutFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceScalarBlockLayoutFeatures & + operator=( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceScalarBlockLayoutFeatures & + operator=( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceScalarBlockLayoutFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceScalarBlockLayoutFeatures & + setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT + { + scalarBlockLayout = scalarBlockLayout_; + return *this; + } + + operator VkPhysicalDeviceScalarBlockLayoutFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceScalarBlockLayoutFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceScalarBlockLayoutFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( scalarBlockLayout == rhs.scalarBlockLayout ); + } + + bool operator!=( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {}; + }; + static_assert( sizeof( PhysicalDeviceScalarBlockLayoutFeatures ) == + sizeof( VkPhysicalDeviceScalarBlockLayoutFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceScalarBlockLayoutFeatures; + }; + using PhysicalDeviceScalarBlockLayoutFeaturesEXT = PhysicalDeviceScalarBlockLayoutFeatures; + + struct PhysicalDeviceSeparateDepthStencilLayoutsFeatures + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures( + VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {} ) VULKAN_HPP_NOEXCEPT + : separateDepthStencilLayouts( separateDepthStencilLayouts_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures( + PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSeparateDepthStencilLayoutsFeatures( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSeparateDepthStencilLayoutsFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSeparateDepthStencilLayoutsFeatures & + operator=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSeparateDepthStencilLayoutsFeatures & + operator=( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceSeparateDepthStencilLayoutsFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceSeparateDepthStencilLayoutsFeatures & + setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT + { + separateDepthStencilLayouts = separateDepthStencilLayouts_; + return *this; + } + + operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts ); + } + + bool operator!=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {}; + }; + static_assert( sizeof( PhysicalDeviceSeparateDepthStencilLayoutsFeatures ) == + sizeof( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceSeparateDepthStencilLayoutsFeatures; + }; + using PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = PhysicalDeviceSeparateDepthStencilLayoutsFeatures; + + struct PhysicalDeviceShaderAtomicFloatFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderBufferFloat32Atomics( shaderBufferFloat32Atomics_ ) + , shaderBufferFloat32AtomicAdd( shaderBufferFloat32AtomicAdd_ ) + , shaderBufferFloat64Atomics( shaderBufferFloat64Atomics_ ) + , shaderBufferFloat64AtomicAdd( shaderBufferFloat64AtomicAdd_ ) + , shaderSharedFloat32Atomics( shaderSharedFloat32Atomics_ ) + , shaderSharedFloat32AtomicAdd( shaderSharedFloat32AtomicAdd_ ) + , shaderSharedFloat64Atomics( shaderSharedFloat64Atomics_ ) + , shaderSharedFloat64AtomicAdd( shaderSharedFloat64AtomicAdd_ ) + , shaderImageFloat32Atomics( shaderImageFloat32Atomics_ ) + , shaderImageFloat32AtomicAdd( shaderImageFloat32AtomicAdd_ ) + , sparseImageFloat32Atomics( sparseImageFloat32Atomics_ ) + , sparseImageFloat32AtomicAdd( sparseImageFloat32AtomicAdd_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT( + PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderAtomicFloatFeaturesEXT( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderAtomicFloatFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + operator=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & + operator=( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderBufferFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat32Atomics = shaderBufferFloat32Atomics_; + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderBufferFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat32AtomicAdd = shaderBufferFloat32AtomicAdd_; + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderBufferFloat64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat64Atomics = shaderBufferFloat64Atomics_; + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderBufferFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat64AtomicAdd = shaderBufferFloat64AtomicAdd_; + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderSharedFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat32Atomics = shaderSharedFloat32Atomics_; + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderSharedFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat32AtomicAdd = shaderSharedFloat32AtomicAdd_; + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderSharedFloat64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat64Atomics = shaderSharedFloat64Atomics_; + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderSharedFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat64AtomicAdd = shaderSharedFloat64AtomicAdd_; + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderImageFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderImageFloat32Atomics = shaderImageFloat32Atomics_; + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderImageFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderImageFloat32AtomicAdd = shaderImageFloat32AtomicAdd_; + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setSparseImageFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT + { + sparseImageFloat32Atomics = sparseImageFloat32Atomics_; + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setSparseImageFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + sparseImageFloat32AtomicAdd = sparseImageFloat32AtomicAdd_; + return *this; + } + + operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderBufferFloat32Atomics == rhs.shaderBufferFloat32Atomics ) && + ( shaderBufferFloat32AtomicAdd == rhs.shaderBufferFloat32AtomicAdd ) && + ( shaderBufferFloat64Atomics == rhs.shaderBufferFloat64Atomics ) && + ( shaderBufferFloat64AtomicAdd == rhs.shaderBufferFloat64AtomicAdd ) && + ( shaderSharedFloat32Atomics == rhs.shaderSharedFloat32Atomics ) && + ( shaderSharedFloat32AtomicAdd == rhs.shaderSharedFloat32AtomicAdd ) && + ( shaderSharedFloat64Atomics == rhs.shaderSharedFloat64Atomics ) && + ( shaderSharedFloat64AtomicAdd == rhs.shaderSharedFloat64AtomicAdd ) && + ( shaderImageFloat32Atomics == rhs.shaderImageFloat32Atomics ) && + ( shaderImageFloat32AtomicAdd == rhs.shaderImageFloat32AtomicAdd ) && + ( sparseImageFloat32Atomics == rhs.sparseImageFloat32Atomics ) && + ( sparseImageFloat32AtomicAdd == rhs.sparseImageFloat32AtomicAdd ); + } + + bool operator!=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderAtomicFloatFeaturesEXT ) == + sizeof( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderAtomicFloatFeaturesEXT; + }; + + struct PhysicalDeviceShaderAtomicInt64Features + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderAtomicInt64Features; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderBufferInt64Atomics( shaderBufferInt64Atomics_ ) + , shaderSharedInt64Atomics( shaderSharedInt64Atomics_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( PhysicalDeviceShaderAtomicInt64Features const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderAtomicInt64Features( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderAtomicInt64Features( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features & + operator=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderAtomicInt64Features & + operator=( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceShaderAtomicInt64Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderAtomicInt64Features & + setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferInt64Atomics = shaderBufferInt64Atomics_; + return *this; + } + + PhysicalDeviceShaderAtomicInt64Features & + setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedInt64Atomics = shaderSharedInt64Atomics_; + return *this; + } + + operator VkPhysicalDeviceShaderAtomicInt64Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderAtomicInt64Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderAtomicInt64Features const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderAtomicInt64Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics ) && + ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics ); + } + + bool operator!=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicInt64Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderAtomicInt64Features ) == + sizeof( VkPhysicalDeviceShaderAtomicInt64Features ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderAtomicInt64Features; + }; + using PhysicalDeviceShaderAtomicInt64FeaturesKHR = PhysicalDeviceShaderAtomicInt64Features; + + struct PhysicalDeviceShaderClockFeaturesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderClockFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderClockFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderSubgroupClock( shaderSubgroupClock_ ) + , shaderDeviceClock( shaderDeviceClock_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderClockFeaturesKHR( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderClockFeaturesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR & + operator=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderClockFeaturesKHR & + operator=( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceShaderClockFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderClockFeaturesKHR & + setShaderSubgroupClock( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ ) VULKAN_HPP_NOEXCEPT + { + shaderSubgroupClock = shaderSubgroupClock_; + return *this; + } + + PhysicalDeviceShaderClockFeaturesKHR & + setShaderDeviceClock( VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ ) VULKAN_HPP_NOEXCEPT + { + shaderDeviceClock = shaderDeviceClock_; + return *this; + } + + operator VkPhysicalDeviceShaderClockFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderClockFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderClockFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSubgroupClock == rhs.shaderSubgroupClock ) && + ( shaderDeviceClock == rhs.shaderDeviceClock ); + } + + bool operator!=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderClockFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderClockFeaturesKHR ) == sizeof( VkPhysicalDeviceShaderClockFeaturesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderClockFeaturesKHR; + }; + + struct PhysicalDeviceShaderCoreProperties2AMD + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderCoreProperties2AMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( + VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures_ = {}, + uint32_t activeComputeUnitCount_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderCoreFeatures( shaderCoreFeatures_ ) + , activeComputeUnitCount( activeComputeUnitCount_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderCoreProperties2AMD( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderCoreProperties2AMD( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderCoreProperties2AMD & + operator=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderCoreProperties2AMD & + operator=( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShaderCoreProperties2AMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderCoreProperties2AMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderCoreProperties2AMD const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCoreFeatures == rhs.shaderCoreFeatures ) && + ( activeComputeUnitCount == rhs.activeComputeUnitCount ); + } + + bool operator!=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures = {}; + uint32_t activeComputeUnitCount = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderCoreProperties2AMD ) == sizeof( VkPhysicalDeviceShaderCoreProperties2AMD ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderCoreProperties2AMD; + }; + + struct PhysicalDeviceShaderCorePropertiesAMD + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderCorePropertiesAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderCorePropertiesAMD( uint32_t shaderEngineCount_ = {}, + uint32_t shaderArraysPerEngineCount_ = {}, + uint32_t computeUnitsPerShaderArray_ = {}, + uint32_t simdPerComputeUnit_ = {}, + uint32_t wavefrontsPerSimd_ = {}, + uint32_t wavefrontSize_ = {}, + uint32_t sgprsPerSimd_ = {}, + uint32_t minSgprAllocation_ = {}, + uint32_t maxSgprAllocation_ = {}, + uint32_t sgprAllocationGranularity_ = {}, + uint32_t vgprsPerSimd_ = {}, + uint32_t minVgprAllocation_ = {}, + uint32_t maxVgprAllocation_ = {}, + uint32_t vgprAllocationGranularity_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderEngineCount( shaderEngineCount_ ) + , shaderArraysPerEngineCount( shaderArraysPerEngineCount_ ) + , computeUnitsPerShaderArray( computeUnitsPerShaderArray_ ) + , simdPerComputeUnit( simdPerComputeUnit_ ) + , wavefrontsPerSimd( wavefrontsPerSimd_ ) + , wavefrontSize( wavefrontSize_ ) + , sgprsPerSimd( sgprsPerSimd_ ) + , minSgprAllocation( minSgprAllocation_ ) + , maxSgprAllocation( maxSgprAllocation_ ) + , sgprAllocationGranularity( sgprAllocationGranularity_ ) + , vgprsPerSimd( vgprsPerSimd_ ) + , minVgprAllocation( minVgprAllocation_ ) + , maxVgprAllocation( maxVgprAllocation_ ) + , vgprAllocationGranularity( vgprAllocationGranularity_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderCorePropertiesAMD( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderCorePropertiesAMD( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderCorePropertiesAMD & + operator=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderCorePropertiesAMD & + operator=( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShaderCorePropertiesAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderCorePropertiesAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderCorePropertiesAMD const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderEngineCount == rhs.shaderEngineCount ) && + ( shaderArraysPerEngineCount == rhs.shaderArraysPerEngineCount ) && + ( computeUnitsPerShaderArray == rhs.computeUnitsPerShaderArray ) && + ( simdPerComputeUnit == rhs.simdPerComputeUnit ) && ( wavefrontsPerSimd == rhs.wavefrontsPerSimd ) && + ( wavefrontSize == rhs.wavefrontSize ) && ( sgprsPerSimd == rhs.sgprsPerSimd ) && + ( minSgprAllocation == rhs.minSgprAllocation ) && ( maxSgprAllocation == rhs.maxSgprAllocation ) && + ( sgprAllocationGranularity == rhs.sgprAllocationGranularity ) && ( vgprsPerSimd == rhs.vgprsPerSimd ) && + ( minVgprAllocation == rhs.minVgprAllocation ) && ( maxVgprAllocation == rhs.maxVgprAllocation ) && + ( vgprAllocationGranularity == rhs.vgprAllocationGranularity ); + } + + bool operator!=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD; + void * pNext = {}; + uint32_t shaderEngineCount = {}; + uint32_t shaderArraysPerEngineCount = {}; + uint32_t computeUnitsPerShaderArray = {}; + uint32_t simdPerComputeUnit = {}; + uint32_t wavefrontsPerSimd = {}; + uint32_t wavefrontSize = {}; + uint32_t sgprsPerSimd = {}; + uint32_t minSgprAllocation = {}; + uint32_t maxSgprAllocation = {}; + uint32_t sgprAllocationGranularity = {}; + uint32_t vgprsPerSimd = {}; + uint32_t minVgprAllocation = {}; + uint32_t maxVgprAllocation = {}; + uint32_t vgprAllocationGranularity = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderCorePropertiesAMD ) == sizeof( VkPhysicalDeviceShaderCorePropertiesAMD ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderCorePropertiesAMD; + }; + + struct PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderDemoteToHelperInvocation( shaderDemoteToHelperInvocation_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( + PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( + VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & + operator=( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & + operator=( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( + &rhs ); + return *this; + } + + PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & setShaderDemoteToHelperInvocation( + VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT + { + shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_; + return *this; + } + + operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation ); + } + + bool operator!=( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ) == + sizeof( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT; + }; + + struct PhysicalDeviceShaderDrawParametersFeatures + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderDrawParametersFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( + VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderDrawParameters( shaderDrawParameters_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( + PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderDrawParametersFeatures( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderDrawParametersFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDrawParametersFeatures & + operator=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderDrawParametersFeatures & + operator=( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceShaderDrawParametersFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderDrawParametersFeatures & + setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT + { + shaderDrawParameters = shaderDrawParameters_; + return *this; + } + + operator VkPhysicalDeviceShaderDrawParametersFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderDrawParametersFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderDrawParametersFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderDrawParameters == rhs.shaderDrawParameters ); + } + + bool operator!=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderDrawParametersFeatures ) == + sizeof( VkPhysicalDeviceShaderDrawParametersFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderDrawParametersFeatures; + }; + using PhysicalDeviceShaderDrawParameterFeatures = PhysicalDeviceShaderDrawParametersFeatures; + + struct PhysicalDeviceShaderFloat16Int8Features + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderFloat16Int8Features; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderFloat16Int8Features( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderFloat16( shaderFloat16_ ) + , shaderInt8( shaderInt8_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features( PhysicalDeviceShaderFloat16Int8Features const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderFloat16Int8Features( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderFloat16Int8Features( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features & + operator=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderFloat16Int8Features & + operator=( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceShaderFloat16Int8Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderFloat16Int8Features & + setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT + { + shaderFloat16 = shaderFloat16_; + return *this; + } + + PhysicalDeviceShaderFloat16Int8Features & + setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT + { + shaderInt8 = shaderInt8_; + return *this; + } + + operator VkPhysicalDeviceShaderFloat16Int8Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderFloat16Int8Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderFloat16Int8Features const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderFloat16Int8Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderFloat16 == rhs.shaderFloat16 ) && + ( shaderInt8 == rhs.shaderInt8 ); + } + + bool operator!=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderFloat16Int8Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderFloat16Int8Features ) == + sizeof( VkPhysicalDeviceShaderFloat16Int8Features ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderFloat16Int8Features; + }; + using PhysicalDeviceFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; + using PhysicalDeviceShaderFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; + + struct PhysicalDeviceShaderImageAtomicInt64FeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderImageInt64Atomics( shaderImageInt64Atomics_ ) + , sparseImageInt64Atomics( sparseImageInt64Atomics_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( + PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & + operator=( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & + operator=( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & + setShaderImageInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderImageInt64Atomics = shaderImageInt64Atomics_; + return *this; + } + + PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & + setSparseImageInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + sparseImageInt64Atomics = sparseImageInt64Atomics_; + return *this; + } + + operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderImageInt64Atomics == rhs.shaderImageInt64Atomics ) && + ( sparseImageInt64Atomics == rhs.sparseImageInt64Atomics ); + } + + bool operator!=( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT ) == + sizeof( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + }; + + struct PhysicalDeviceShaderImageFootprintFeaturesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ = {} ) VULKAN_HPP_NOEXCEPT : imageFootprint( imageFootprint_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( + PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderImageFootprintFeaturesNV( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderImageFootprintFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageFootprintFeaturesNV & + operator=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderImageFootprintFeaturesNV & + operator=( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceShaderImageFootprintFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderImageFootprintFeaturesNV & + setImageFootprint( VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ ) VULKAN_HPP_NOEXCEPT + { + imageFootprint = imageFootprint_; + return *this; + } + + operator VkPhysicalDeviceShaderImageFootprintFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderImageFootprintFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderImageFootprintFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageFootprint == rhs.imageFootprint ); + } + + bool operator!=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageFootprint = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderImageFootprintFeaturesNV ) == + sizeof( VkPhysicalDeviceShaderImageFootprintFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderImageFootprintFeaturesNV; + }; + + struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( + VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderIntegerFunctions2( shaderIntegerFunctions2_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( + VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & + operator=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & + operator=( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & + setShaderIntegerFunctions2( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ ) VULKAN_HPP_NOEXCEPT + { + shaderIntegerFunctions2 = shaderIntegerFunctions2_; + return *this; + } + + operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderIntegerFunctions2 == rhs.shaderIntegerFunctions2 ); + } + + bool operator!=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2 = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) == + sizeof( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + }; + + struct PhysicalDeviceShaderSMBuiltinsFeaturesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ = {} ) + VULKAN_HPP_NOEXCEPT : shaderSMBuiltins( shaderSMBuiltins_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( + PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderSMBuiltinsFeaturesNV( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderSMBuiltinsFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSMBuiltinsFeaturesNV & + operator=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderSMBuiltinsFeaturesNV & + operator=( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceShaderSMBuiltinsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderSMBuiltinsFeaturesNV & + setShaderSMBuiltins( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ ) VULKAN_HPP_NOEXCEPT + { + shaderSMBuiltins = shaderSMBuiltins_; + return *this; + } + + operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSMBuiltins == rhs.shaderSMBuiltins ); + } + + bool operator!=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderSMBuiltinsFeaturesNV ) == + sizeof( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderSMBuiltinsFeaturesNV; + }; + + struct PhysicalDeviceShaderSMBuiltinsPropertiesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderSMBuiltinsPropertiesNV( uint32_t shaderSMCount_ = {}, + uint32_t shaderWarpsPerSM_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderSMCount( shaderSMCount_ ) + , shaderWarpsPerSM( shaderWarpsPerSM_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsPropertiesNV( + PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderSMBuiltinsPropertiesNV( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderSMBuiltinsPropertiesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSMBuiltinsPropertiesNV & + operator=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderSMBuiltinsPropertiesNV & + operator=( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSMCount == rhs.shaderSMCount ) && + ( shaderWarpsPerSM == rhs.shaderWarpsPerSM ); + } + + bool operator!=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV; + void * pNext = {}; + uint32_t shaderSMCount = {}; + uint32_t shaderWarpsPerSM = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderSMBuiltinsPropertiesNV ) == + sizeof( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderSMBuiltinsPropertiesNV; + }; + + struct PhysicalDeviceShaderSubgroupExtendedTypesFeatures + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures( + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures( + PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderSubgroupExtendedTypesFeatures( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderSubgroupExtendedTypesFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupExtendedTypesFeatures & + operator=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderSubgroupExtendedTypesFeatures & + operator=( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceShaderSubgroupExtendedTypesFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderSubgroupExtendedTypesFeatures & + setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT + { + shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_; + return *this; + } + + operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes ); + } + + bool operator!=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderSubgroupExtendedTypesFeatures ) == + sizeof( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderSubgroupExtendedTypesFeatures; + }; + using PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = PhysicalDeviceShaderSubgroupExtendedTypesFeatures; + + struct PhysicalDeviceShaderTerminateInvocationFeaturesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderTerminateInvocationFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTerminateInvocationFeaturesKHR( + VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderTerminateInvocation( shaderTerminateInvocation_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTerminateInvocationFeaturesKHR( + PhysicalDeviceShaderTerminateInvocationFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderTerminateInvocationFeaturesKHR( + VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderTerminateInvocationFeaturesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTerminateInvocationFeaturesKHR & + operator=( PhysicalDeviceShaderTerminateInvocationFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderTerminateInvocationFeaturesKHR & + operator=( VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceShaderTerminateInvocationFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderTerminateInvocationFeaturesKHR & + setShaderTerminateInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ ) VULKAN_HPP_NOEXCEPT + { + shaderTerminateInvocation = shaderTerminateInvocation_; + return *this; + } + + operator VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderTerminateInvocationFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderTerminateInvocationFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderTerminateInvocation == rhs.shaderTerminateInvocation ); + } + + bool operator!=( PhysicalDeviceShaderTerminateInvocationFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderTerminateInvocationFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderTerminateInvocationFeaturesKHR ) == + sizeof( VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderTerminateInvocationFeaturesKHR; + }; + + struct PhysicalDeviceShadingRateImageFeaturesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShadingRateImageFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ = {} ) VULKAN_HPP_NOEXCEPT + : shadingRateImage( shadingRateImage_ ) + , shadingRateCoarseSampleOrder( shadingRateCoarseSampleOrder_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( + PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShadingRateImageFeaturesNV( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShadingRateImageFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV & + operator=( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShadingRateImageFeaturesNV & + operator=( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceShadingRateImageFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShadingRateImageFeaturesNV & + setShadingRateImage( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ ) VULKAN_HPP_NOEXCEPT + { + shadingRateImage = shadingRateImage_; + return *this; + } + + PhysicalDeviceShadingRateImageFeaturesNV & + setShadingRateCoarseSampleOrder( VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ ) VULKAN_HPP_NOEXCEPT + { + shadingRateCoarseSampleOrder = shadingRateCoarseSampleOrder_; + return *this; + } + + operator VkPhysicalDeviceShadingRateImageFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShadingRateImageFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShadingRateImageFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateImage == rhs.shadingRateImage ) && + ( shadingRateCoarseSampleOrder == rhs.shadingRateCoarseSampleOrder ); + } + + bool operator!=( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage = {}; + VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder = {}; + }; + static_assert( sizeof( PhysicalDeviceShadingRateImageFeaturesNV ) == + sizeof( VkPhysicalDeviceShadingRateImageFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShadingRateImageFeaturesNV; + }; + + struct PhysicalDeviceShadingRateImagePropertiesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShadingRateImagePropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShadingRateImagePropertiesNV( VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize_ = {}, + uint32_t shadingRatePaletteSize_ = {}, + uint32_t shadingRateMaxCoarseSamples_ = {} ) VULKAN_HPP_NOEXCEPT + : shadingRateTexelSize( shadingRateTexelSize_ ) + , shadingRatePaletteSize( shadingRatePaletteSize_ ) + , shadingRateMaxCoarseSamples( shadingRateMaxCoarseSamples_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV( + PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShadingRateImagePropertiesNV( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShadingRateImagePropertiesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImagePropertiesNV & + operator=( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShadingRateImagePropertiesNV & + operator=( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShadingRateImagePropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShadingRateImagePropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShadingRateImagePropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateTexelSize == rhs.shadingRateTexelSize ) && + ( shadingRatePaletteSize == rhs.shadingRatePaletteSize ) && + ( shadingRateMaxCoarseSamples == rhs.shadingRateMaxCoarseSamples ); + } + + bool operator!=( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize = {}; + uint32_t shadingRatePaletteSize = {}; + uint32_t shadingRateMaxCoarseSamples = {}; + }; + static_assert( sizeof( PhysicalDeviceShadingRateImagePropertiesNV ) == + sizeof( VkPhysicalDeviceShadingRateImagePropertiesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShadingRateImagePropertiesNV; + }; + + struct PhysicalDeviceSubgroupProperties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties( + uint32_t subgroupSize_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages_ = {}, + VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages_ = {} ) VULKAN_HPP_NOEXCEPT + : subgroupSize( subgroupSize_ ) + , supportedStages( supportedStages_ ) + , supportedOperations( supportedOperations_ ) + , quadOperationsInAllStages( quadOperationsInAllStages_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceSubgroupProperties( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSubgroupProperties( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSubgroupProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupProperties & + operator=( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSubgroupProperties & operator=( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceSubgroupProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSubgroupProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSubgroupProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceSubgroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subgroupSize == rhs.subgroupSize ) && + ( supportedStages == rhs.supportedStages ) && ( supportedOperations == rhs.supportedOperations ) && + ( quadOperationsInAllStages == rhs.quadOperationsInAllStages ); + } + + bool operator!=( PhysicalDeviceSubgroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupProperties; + void * pNext = {}; + uint32_t subgroupSize = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages = {}; + VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations = {}; + VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages = {}; + }; + static_assert( sizeof( PhysicalDeviceSubgroupProperties ) == sizeof( VkPhysicalDeviceSubgroupProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceSubgroupProperties; + }; + + struct PhysicalDeviceSubgroupSizeControlFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {} ) VULKAN_HPP_NOEXCEPT + : subgroupSizeControl( subgroupSizeControl_ ) + , computeFullSubgroups( computeFullSubgroups_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeaturesEXT( + PhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSubgroupSizeControlFeaturesEXT( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSubgroupSizeControlFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeaturesEXT & + operator=( PhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSubgroupSizeControlFeaturesEXT & + operator=( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceSubgroupSizeControlFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceSubgroupSizeControlFeaturesEXT & + setSubgroupSizeControl( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT + { + subgroupSizeControl = subgroupSizeControl_; + return *this; + } + + PhysicalDeviceSubgroupSizeControlFeaturesEXT & + setComputeFullSubgroups( VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT + { + computeFullSubgroups = computeFullSubgroups_; + return *this; + } + + operator VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSubgroupSizeControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSubgroupSizeControlFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subgroupSizeControl == rhs.subgroupSizeControl ) && + ( computeFullSubgroups == rhs.computeFullSubgroups ); + } + + bool operator!=( PhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl = {}; + VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups = {}; + }; + static_assert( sizeof( PhysicalDeviceSubgroupSizeControlFeaturesEXT ) == + sizeof( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceSubgroupSizeControlFeaturesEXT; + }; + + struct PhysicalDeviceSubgroupSizeControlPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlPropertiesEXT( + uint32_t minSubgroupSize_ = {}, + uint32_t maxSubgroupSize_ = {}, + uint32_t maxComputeWorkgroupSubgroups_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {} ) VULKAN_HPP_NOEXCEPT + : minSubgroupSize( minSubgroupSize_ ) + , maxSubgroupSize( maxSubgroupSize_ ) + , maxComputeWorkgroupSubgroups( maxComputeWorkgroupSubgroups_ ) + , requiredSubgroupSizeStages( requiredSubgroupSizeStages_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlPropertiesEXT( + PhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSubgroupSizeControlPropertiesEXT( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSubgroupSizeControlPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlPropertiesEXT & + operator=( PhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSubgroupSizeControlPropertiesEXT & + operator=( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSubgroupSizeControlPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSubgroupSizeControlPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minSubgroupSize == rhs.minSubgroupSize ) && + ( maxSubgroupSize == rhs.maxSubgroupSize ) && + ( maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups ) && + ( requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages ); + } + + bool operator!=( PhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT; + void * pNext = {}; + uint32_t minSubgroupSize = {}; + uint32_t maxSubgroupSize = {}; + uint32_t maxComputeWorkgroupSubgroups = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages = {}; + }; + static_assert( sizeof( PhysicalDeviceSubgroupSizeControlPropertiesEXT ) == + sizeof( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceSubgroupSizeControlPropertiesEXT; + }; + + struct PhysicalDeviceSynchronization2FeaturesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceSynchronization2FeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSynchronization2FeaturesKHR( + VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ = {} ) VULKAN_HPP_NOEXCEPT : synchronization2( synchronization2_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSynchronization2FeaturesKHR( + PhysicalDeviceSynchronization2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSynchronization2FeaturesKHR( VkPhysicalDeviceSynchronization2FeaturesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSynchronization2FeaturesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSynchronization2FeaturesKHR & + operator=( PhysicalDeviceSynchronization2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSynchronization2FeaturesKHR & + operator=( VkPhysicalDeviceSynchronization2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceSynchronization2FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceSynchronization2FeaturesKHR & + setSynchronization2( VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ ) VULKAN_HPP_NOEXCEPT + { + synchronization2 = synchronization2_; + return *this; + } + + operator VkPhysicalDeviceSynchronization2FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSynchronization2FeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSynchronization2FeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceSynchronization2FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( synchronization2 == rhs.synchronization2 ); + } + + bool operator!=( PhysicalDeviceSynchronization2FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSynchronization2FeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 synchronization2 = {}; + }; + static_assert( sizeof( PhysicalDeviceSynchronization2FeaturesKHR ) == + sizeof( VkPhysicalDeviceSynchronization2FeaturesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceSynchronization2FeaturesKHR; + }; + + struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ = {} ) VULKAN_HPP_NOEXCEPT + : texelBufferAlignment( texelBufferAlignment_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT( + PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTexelBufferAlignmentFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTexelBufferAlignmentFeaturesEXT & + operator=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTexelBufferAlignmentFeaturesEXT & + operator=( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceTexelBufferAlignmentFeaturesEXT & + setTexelBufferAlignment( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ ) VULKAN_HPP_NOEXCEPT + { + texelBufferAlignment = texelBufferAlignment_; + return *this; + } + + operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( texelBufferAlignment == rhs.texelBufferAlignment ); + } + + bool operator!=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment = {}; + }; + static_assert( sizeof( PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) == + sizeof( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceTexelBufferAlignmentFeaturesEXT; + }; + + struct PhysicalDeviceTexelBufferAlignmentPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentPropertiesEXT( + VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {} ) VULKAN_HPP_NOEXCEPT + : storageTexelBufferOffsetAlignmentBytes( storageTexelBufferOffsetAlignmentBytes_ ) + , storageTexelBufferOffsetSingleTexelAlignment( storageTexelBufferOffsetSingleTexelAlignment_ ) + , uniformTexelBufferOffsetAlignmentBytes( uniformTexelBufferOffsetAlignmentBytes_ ) + , uniformTexelBufferOffsetSingleTexelAlignment( uniformTexelBufferOffsetSingleTexelAlignment_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentPropertiesEXT( + PhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTexelBufferAlignmentPropertiesEXT( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTexelBufferAlignmentPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTexelBufferAlignmentPropertiesEXT & + operator=( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTexelBufferAlignmentPropertiesEXT & + operator=( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes ) && + ( storageTexelBufferOffsetSingleTexelAlignment == rhs.storageTexelBufferOffsetSingleTexelAlignment ) && + ( uniformTexelBufferOffsetAlignmentBytes == rhs.uniformTexelBufferOffsetAlignmentBytes ) && + ( uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment ); + } + + bool operator!=( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment = {}; + }; + static_assert( sizeof( PhysicalDeviceTexelBufferAlignmentPropertiesEXT ) == + sizeof( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceTexelBufferAlignmentPropertiesEXT; + }; + + struct PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {} ) VULKAN_HPP_NOEXCEPT + : textureCompressionASTC_HDR( textureCompressionASTC_HDR_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( + PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( + VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & + operator=( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & + operator=( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & + setTextureCompressionASTC_HDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT + { + textureCompressionASTC_HDR = textureCompressionASTC_HDR_; + return *this; + } + + operator VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR ); + } + + bool operator!=( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR = {}; + }; + static_assert( sizeof( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ) == + sizeof( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT; + }; + + struct PhysicalDeviceTimelineSemaphoreFeatures + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceTimelineSemaphoreFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {} ) + VULKAN_HPP_NOEXCEPT : timelineSemaphore( timelineSemaphore_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTimelineSemaphoreFeatures( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTimelineSemaphoreFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTimelineSemaphoreFeatures & + operator=( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTimelineSemaphoreFeatures & + operator=( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceTimelineSemaphoreFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceTimelineSemaphoreFeatures & + setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT + { + timelineSemaphore = timelineSemaphore_; + return *this; + } + + operator VkPhysicalDeviceTimelineSemaphoreFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTimelineSemaphoreFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTimelineSemaphoreFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( timelineSemaphore == rhs.timelineSemaphore ); + } + + bool operator!=( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {}; + }; + static_assert( sizeof( PhysicalDeviceTimelineSemaphoreFeatures ) == + sizeof( VkPhysicalDeviceTimelineSemaphoreFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceTimelineSemaphoreFeatures; + }; + using PhysicalDeviceTimelineSemaphoreFeaturesKHR = PhysicalDeviceTimelineSemaphoreFeatures; + + struct PhysicalDeviceTimelineSemaphoreProperties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceTimelineSemaphoreProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties( uint64_t maxTimelineSemaphoreValueDifference_ = {} ) + VULKAN_HPP_NOEXCEPT : maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties( + PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTimelineSemaphoreProperties( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTimelineSemaphoreProperties( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTimelineSemaphoreProperties & + operator=( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTimelineSemaphoreProperties & + operator=( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceTimelineSemaphoreProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTimelineSemaphoreProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTimelineSemaphoreProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference ); + } + + bool operator!=( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties; + void * pNext = {}; + uint64_t maxTimelineSemaphoreValueDifference = {}; + }; + static_assert( sizeof( PhysicalDeviceTimelineSemaphoreProperties ) == + sizeof( VkPhysicalDeviceTimelineSemaphoreProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceTimelineSemaphoreProperties; + }; + using PhysicalDeviceTimelineSemaphorePropertiesKHR = PhysicalDeviceTimelineSemaphoreProperties; + + struct PhysicalDeviceTransformFeedbackFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ = {} ) VULKAN_HPP_NOEXCEPT + : transformFeedback( transformFeedback_ ) + , geometryStreams( geometryStreams_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( + PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTransformFeedbackFeaturesEXT( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTransformFeedbackFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackFeaturesEXT & + operator=( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTransformFeedbackFeaturesEXT & + operator=( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceTransformFeedbackFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceTransformFeedbackFeaturesEXT & + setTransformFeedback( VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ ) VULKAN_HPP_NOEXCEPT + { + transformFeedback = transformFeedback_; + return *this; + } + + PhysicalDeviceTransformFeedbackFeaturesEXT & + setGeometryStreams( VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ ) VULKAN_HPP_NOEXCEPT + { + geometryStreams = geometryStreams_; + return *this; + } + + operator VkPhysicalDeviceTransformFeedbackFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTransformFeedbackFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTransformFeedbackFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transformFeedback == rhs.transformFeedback ) && + ( geometryStreams == rhs.geometryStreams ); + } + + bool operator!=( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedback = {}; + VULKAN_HPP_NAMESPACE::Bool32 geometryStreams = {}; + }; + static_assert( sizeof( PhysicalDeviceTransformFeedbackFeaturesEXT ) == + sizeof( VkPhysicalDeviceTransformFeedbackFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceTransformFeedbackFeaturesEXT; + }; + + struct PhysicalDeviceTransformFeedbackPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT( + uint32_t maxTransformFeedbackStreams_ = {}, + uint32_t maxTransformFeedbackBuffers_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize_ = {}, + uint32_t maxTransformFeedbackStreamDataSize_ = {}, + uint32_t maxTransformFeedbackBufferDataSize_ = {}, + uint32_t maxTransformFeedbackBufferDataStride_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw_ = {} ) VULKAN_HPP_NOEXCEPT + : maxTransformFeedbackStreams( maxTransformFeedbackStreams_ ) + , maxTransformFeedbackBuffers( maxTransformFeedbackBuffers_ ) + , maxTransformFeedbackBufferSize( maxTransformFeedbackBufferSize_ ) + , maxTransformFeedbackStreamDataSize( maxTransformFeedbackStreamDataSize_ ) + , maxTransformFeedbackBufferDataSize( maxTransformFeedbackBufferDataSize_ ) + , maxTransformFeedbackBufferDataStride( maxTransformFeedbackBufferDataStride_ ) + , transformFeedbackQueries( transformFeedbackQueries_ ) + , transformFeedbackStreamsLinesTriangles( transformFeedbackStreamsLinesTriangles_ ) + , transformFeedbackRasterizationStreamSelect( transformFeedbackRasterizationStreamSelect_ ) + , transformFeedbackDraw( transformFeedbackDraw_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT( + PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTransformFeedbackPropertiesEXT( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTransformFeedbackPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackPropertiesEXT & + operator=( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTransformFeedbackPropertiesEXT & + operator=( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceTransformFeedbackPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTransformFeedbackPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTransformFeedbackPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxTransformFeedbackStreams == rhs.maxTransformFeedbackStreams ) && + ( maxTransformFeedbackBuffers == rhs.maxTransformFeedbackBuffers ) && + ( maxTransformFeedbackBufferSize == rhs.maxTransformFeedbackBufferSize ) && + ( maxTransformFeedbackStreamDataSize == rhs.maxTransformFeedbackStreamDataSize ) && + ( maxTransformFeedbackBufferDataSize == rhs.maxTransformFeedbackBufferDataSize ) && + ( maxTransformFeedbackBufferDataStride == rhs.maxTransformFeedbackBufferDataStride ) && + ( transformFeedbackQueries == rhs.transformFeedbackQueries ) && + ( transformFeedbackStreamsLinesTriangles == rhs.transformFeedbackStreamsLinesTriangles ) && + ( transformFeedbackRasterizationStreamSelect == rhs.transformFeedbackRasterizationStreamSelect ) && + ( transformFeedbackDraw == rhs.transformFeedbackDraw ); + } + + bool operator!=( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT; + void * pNext = {}; + uint32_t maxTransformFeedbackStreams = {}; + uint32_t maxTransformFeedbackBuffers = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize = {}; + uint32_t maxTransformFeedbackStreamDataSize = {}; + uint32_t maxTransformFeedbackBufferDataSize = {}; + uint32_t maxTransformFeedbackBufferDataStride = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw = {}; + }; + static_assert( sizeof( PhysicalDeviceTransformFeedbackPropertiesEXT ) == + sizeof( VkPhysicalDeviceTransformFeedbackPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceTransformFeedbackPropertiesEXT; + }; + + struct PhysicalDeviceUniformBufferStandardLayoutFeatures + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures( + VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {} ) VULKAN_HPP_NOEXCEPT + : uniformBufferStandardLayout( uniformBufferStandardLayout_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures( + PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceUniformBufferStandardLayoutFeatures( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceUniformBufferStandardLayoutFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUniformBufferStandardLayoutFeatures & + operator=( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceUniformBufferStandardLayoutFeatures & + operator=( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceUniformBufferStandardLayoutFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceUniformBufferStandardLayoutFeatures & + setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT + { + uniformBufferStandardLayout = uniformBufferStandardLayout_; + return *this; + } + + operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceUniformBufferStandardLayoutFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout ); + } + + bool operator!=( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {}; + }; + static_assert( sizeof( PhysicalDeviceUniformBufferStandardLayoutFeatures ) == + sizeof( VkPhysicalDeviceUniformBufferStandardLayoutFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceUniformBufferStandardLayoutFeatures; + }; + using PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = PhysicalDeviceUniformBufferStandardLayoutFeatures; + + struct PhysicalDeviceVariablePointersFeatures + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceVariablePointersFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVariablePointersFeatures( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {} ) VULKAN_HPP_NOEXCEPT + : variablePointersStorageBuffer( variablePointersStorageBuffer_ ) + , variablePointers( variablePointers_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( PhysicalDeviceVariablePointersFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVariablePointersFeatures( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVariablePointersFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures & + operator=( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVariablePointersFeatures & + operator=( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceVariablePointersFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceVariablePointersFeatures & setVariablePointersStorageBuffer( + VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT + { + variablePointersStorageBuffer = variablePointersStorageBuffer_; + return *this; + } + + PhysicalDeviceVariablePointersFeatures & + setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT + { + variablePointers = variablePointers_; + return *this; + } + + operator VkPhysicalDeviceVariablePointersFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVariablePointersFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVariablePointersFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceVariablePointersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer ) && + ( variablePointers == rhs.variablePointers ); + } + + bool operator!=( PhysicalDeviceVariablePointersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVariablePointersFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {}; + }; + static_assert( sizeof( PhysicalDeviceVariablePointersFeatures ) == sizeof( VkPhysicalDeviceVariablePointersFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceVariablePointersFeatures; + }; + using PhysicalDeviceVariablePointerFeatures = PhysicalDeviceVariablePointersFeatures; + using PhysicalDeviceVariablePointerFeaturesKHR = PhysicalDeviceVariablePointersFeatures; + using PhysicalDeviceVariablePointersFeaturesKHR = PhysicalDeviceVariablePointersFeatures; + + struct PhysicalDeviceVertexAttributeDivisorFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ = {} ) VULKAN_HPP_NOEXCEPT + : vertexAttributeInstanceRateDivisor( vertexAttributeInstanceRateDivisor_ ) + , vertexAttributeInstanceRateZeroDivisor( vertexAttributeInstanceRateZeroDivisor_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT( + PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVertexAttributeDivisorFeaturesEXT( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVertexAttributeDivisorFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeaturesEXT & + operator=( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVertexAttributeDivisorFeaturesEXT & + operator=( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateDivisor( + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ ) VULKAN_HPP_NOEXCEPT + { + vertexAttributeInstanceRateDivisor = vertexAttributeInstanceRateDivisor_; + return *this; + } + + PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateZeroDivisor( + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ ) VULKAN_HPP_NOEXCEPT + { + vertexAttributeInstanceRateZeroDivisor = vertexAttributeInstanceRateZeroDivisor_; + return *this; + } + + operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( vertexAttributeInstanceRateDivisor == rhs.vertexAttributeInstanceRateDivisor ) && + ( vertexAttributeInstanceRateZeroDivisor == rhs.vertexAttributeInstanceRateZeroDivisor ); + } + + bool operator!=( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor = {}; + }; + static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) == + sizeof( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceVertexAttributeDivisorFeaturesEXT; + }; + + struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVertexAttributeDivisorPropertiesEXT( uint32_t maxVertexAttribDivisor_ = {} ) VULKAN_HPP_NOEXCEPT + : maxVertexAttribDivisor( maxVertexAttribDivisor_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesEXT( + PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVertexAttributeDivisorPropertiesEXT( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVertexAttributeDivisorPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorPropertiesEXT & + operator=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVertexAttributeDivisorPropertiesEXT & + operator=( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor ); + } + + bool operator!=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT; + void * pNext = {}; + uint32_t maxVertexAttribDivisor = {}; + }; + static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) == + sizeof( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceVertexAttributeDivisorPropertiesEXT; + }; + + struct PhysicalDeviceVertexInputDynamicStateFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexInputDynamicStateFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState_ = {} ) VULKAN_HPP_NOEXCEPT + : vertexInputDynamicState( vertexInputDynamicState_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexInputDynamicStateFeaturesEXT( + PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVertexInputDynamicStateFeaturesEXT( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVertexInputDynamicStateFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexInputDynamicStateFeaturesEXT & + operator=( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVertexInputDynamicStateFeaturesEXT & + operator=( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceVertexInputDynamicStateFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceVertexInputDynamicStateFeaturesEXT & + setVertexInputDynamicState( VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState_ ) VULKAN_HPP_NOEXCEPT + { + vertexInputDynamicState = vertexInputDynamicState_; + return *this; + } + + operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( vertexInputDynamicState == rhs.vertexInputDynamicState ); + } + + bool operator!=( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState = {}; + }; + static_assert( sizeof( PhysicalDeviceVertexInputDynamicStateFeaturesEXT ) == + sizeof( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceVertexInputDynamicStateFeaturesEXT; + }; + + struct PhysicalDeviceVulkan11Features + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Features; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVulkan11Features( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {} ) VULKAN_HPP_NOEXCEPT + : storageBuffer16BitAccess( storageBuffer16BitAccess_ ) + , uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ ) + , storagePushConstant16( storagePushConstant16_ ) + , storageInputOutput16( storageInputOutput16_ ) + , multiview( multiview_ ) + , multiviewGeometryShader( multiviewGeometryShader_ ) + , multiviewTessellationShader( multiviewTessellationShader_ ) + , variablePointersStorageBuffer( variablePointersStorageBuffer_ ) + , variablePointers( variablePointers_ ) + , protectedMemory( protectedMemory_ ) + , samplerYcbcrConversion( samplerYcbcrConversion_ ) + , shaderDrawParameters( shaderDrawParameters_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVulkan11Features( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan11Features( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkan11Features( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & + operator=( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan11Features & operator=( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceVulkan11Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceVulkan11Features & + setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + storageBuffer16BitAccess = storageBuffer16BitAccess_; + return *this; + } + + PhysicalDeviceVulkan11Features & setUniformAndStorageBuffer16BitAccess( + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_; + return *this; + } + + PhysicalDeviceVulkan11Features & + setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT + { + storagePushConstant16 = storagePushConstant16_; + return *this; + } + + PhysicalDeviceVulkan11Features & + setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT + { + storageInputOutput16 = storageInputOutput16_; + return *this; + } + + PhysicalDeviceVulkan11Features & setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT + { + multiview = multiview_; + return *this; + } + + PhysicalDeviceVulkan11Features & + setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT + { + multiviewGeometryShader = multiviewGeometryShader_; + return *this; + } + + PhysicalDeviceVulkan11Features & + setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT + { + multiviewTessellationShader = multiviewTessellationShader_; + return *this; + } + + PhysicalDeviceVulkan11Features & setVariablePointersStorageBuffer( + VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT + { + variablePointersStorageBuffer = variablePointersStorageBuffer_; + return *this; + } + + PhysicalDeviceVulkan11Features & + setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT + { + variablePointers = variablePointers_; + return *this; + } + + PhysicalDeviceVulkan11Features & + setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT + { + protectedMemory = protectedMemory_; + return *this; + } + + PhysicalDeviceVulkan11Features & + setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT + { + samplerYcbcrConversion = samplerYcbcrConversion_; + return *this; + } + + PhysicalDeviceVulkan11Features & + setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT + { + shaderDrawParameters = shaderDrawParameters_; + return *this; + } + + operator VkPhysicalDeviceVulkan11Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan11Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkan11Features const & ) const = default; +#else + bool operator==( PhysicalDeviceVulkan11Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess ) && + ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess ) && + ( storagePushConstant16 == rhs.storagePushConstant16 ) && + ( storageInputOutput16 == rhs.storageInputOutput16 ) && ( multiview == rhs.multiview ) && + ( multiviewGeometryShader == rhs.multiviewGeometryShader ) && + ( multiviewTessellationShader == rhs.multiviewTessellationShader ) && + ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer ) && + ( variablePointers == rhs.variablePointers ) && ( protectedMemory == rhs.protectedMemory ) && + ( samplerYcbcrConversion == rhs.samplerYcbcrConversion ) && + ( shaderDrawParameters == rhs.shaderDrawParameters ); + } + + bool operator!=( PhysicalDeviceVulkan11Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiview = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {}; + }; + static_assert( sizeof( PhysicalDeviceVulkan11Features ) == sizeof( VkPhysicalDeviceVulkan11Features ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceVulkan11Features; + }; + + struct PhysicalDeviceVulkan11Properties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Properties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties( + std::array const & deviceUUID_ = {}, + std::array const & driverUUID_ = {}, + std::array const & deviceLUID_ = {}, + uint32_t deviceNodeMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {}, + uint32_t subgroupSize_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages_ = {}, + VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages_ = {}, + VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = + VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes, + uint32_t maxMultiviewViewCount_ = {}, + uint32_t maxMultiviewInstanceIndex_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {}, + uint32_t maxPerSetDescriptors_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceUUID( deviceUUID_ ) + , driverUUID( driverUUID_ ) + , deviceLUID( deviceLUID_ ) + , deviceNodeMask( deviceNodeMask_ ) + , deviceLUIDValid( deviceLUIDValid_ ) + , subgroupSize( subgroupSize_ ) + , subgroupSupportedStages( subgroupSupportedStages_ ) + , subgroupSupportedOperations( subgroupSupportedOperations_ ) + , subgroupQuadOperationsInAllStages( subgroupQuadOperationsInAllStages_ ) + , pointClippingBehavior( pointClippingBehavior_ ) + , maxMultiviewViewCount( maxMultiviewViewCount_ ) + , maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ ) + , protectedNoFault( protectedNoFault_ ) + , maxPerSetDescriptors( maxPerSetDescriptors_ ) + , maxMemoryAllocationSize( maxMemoryAllocationSize_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceVulkan11Properties( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan11Properties( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkan11Properties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties & + operator=( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan11Properties & operator=( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceVulkan11Properties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan11Properties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkan11Properties const & ) const = default; +#else + bool operator==( PhysicalDeviceVulkan11Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceUUID == rhs.deviceUUID ) && + ( driverUUID == rhs.driverUUID ) && ( deviceLUID == rhs.deviceLUID ) && + ( deviceNodeMask == rhs.deviceNodeMask ) && ( deviceLUIDValid == rhs.deviceLUIDValid ) && + ( subgroupSize == rhs.subgroupSize ) && ( subgroupSupportedStages == rhs.subgroupSupportedStages ) && + ( subgroupSupportedOperations == rhs.subgroupSupportedOperations ) && + ( subgroupQuadOperationsInAllStages == rhs.subgroupQuadOperationsInAllStages ) && + ( pointClippingBehavior == rhs.pointClippingBehavior ) && + ( maxMultiviewViewCount == rhs.maxMultiviewViewCount ) && + ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex ) && + ( protectedNoFault == rhs.protectedNoFault ) && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors ) && + ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize ); + } + + bool operator!=( PhysicalDeviceVulkan11Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Properties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceUUID = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverUUID = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceLUID = {}; + uint32_t deviceNodeMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {}; + uint32_t subgroupSize = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages = {}; + VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations = {}; + VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages = {}; + VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = + VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes; + uint32_t maxMultiviewViewCount = {}; + uint32_t maxMultiviewInstanceIndex = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {}; + uint32_t maxPerSetDescriptors = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {}; + }; + static_assert( sizeof( PhysicalDeviceVulkan11Properties ) == sizeof( VkPhysicalDeviceVulkan11Properties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceVulkan11Properties; + }; + + struct PhysicalDeviceVulkan12Features + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Features; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features( + VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ = {} ) VULKAN_HPP_NOEXCEPT + : samplerMirrorClampToEdge( samplerMirrorClampToEdge_ ) + , drawIndirectCount( drawIndirectCount_ ) + , storageBuffer8BitAccess( storageBuffer8BitAccess_ ) + , uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ ) + , storagePushConstant8( storagePushConstant8_ ) + , shaderBufferInt64Atomics( shaderBufferInt64Atomics_ ) + , shaderSharedInt64Atomics( shaderSharedInt64Atomics_ ) + , shaderFloat16( shaderFloat16_ ) + , shaderInt8( shaderInt8_ ) + , descriptorIndexing( descriptorIndexing_ ) + , shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ ) + , shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ ) + , shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ ) + , shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ ) + , shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ ) + , shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ ) + , shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ ) + , shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ ) + , shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ ) + , shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ ) + , descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ ) + , descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ ) + , descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ ) + , descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ ) + , descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ ) + , descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ ) + , descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ ) + , descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ ) + , descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ ) + , runtimeDescriptorArray( runtimeDescriptorArray_ ) + , samplerFilterMinmax( samplerFilterMinmax_ ) + , scalarBlockLayout( scalarBlockLayout_ ) + , imagelessFramebuffer( imagelessFramebuffer_ ) + , uniformBufferStandardLayout( uniformBufferStandardLayout_ ) + , shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ ) + , separateDepthStencilLayouts( separateDepthStencilLayouts_ ) + , hostQueryReset( hostQueryReset_ ) + , timelineSemaphore( timelineSemaphore_ ) + , bufferDeviceAddress( bufferDeviceAddress_ ) + , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ) + , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ ) + , vulkanMemoryModel( vulkanMemoryModel_ ) + , vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ ) + , vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ ) + , shaderOutputViewportIndex( shaderOutputViewportIndex_ ) + , shaderOutputLayer( shaderOutputLayer_ ) + , subgroupBroadcastDynamicId( subgroupBroadcastDynamicId_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVulkan12Features( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan12Features( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkan12Features( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + operator=( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan12Features & operator=( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceVulkan12Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceVulkan12Features & + setSamplerMirrorClampToEdge( VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ ) VULKAN_HPP_NOEXCEPT + { + samplerMirrorClampToEdge = samplerMirrorClampToEdge_; + return *this; + } + + PhysicalDeviceVulkan12Features & + setDrawIndirectCount( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ ) VULKAN_HPP_NOEXCEPT + { + drawIndirectCount = drawIndirectCount_; + return *this; + } + + PhysicalDeviceVulkan12Features & + setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + storageBuffer8BitAccess = storageBuffer8BitAccess_; + return *this; + } + + PhysicalDeviceVulkan12Features & setUniformAndStorageBuffer8BitAccess( + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_; + return *this; + } + + PhysicalDeviceVulkan12Features & + setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT + { + storagePushConstant8 = storagePushConstant8_; + return *this; + } + + PhysicalDeviceVulkan12Features & + setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferInt64Atomics = shaderBufferInt64Atomics_; + return *this; + } + + PhysicalDeviceVulkan12Features & + setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedInt64Atomics = shaderSharedInt64Atomics_; + return *this; + } + + PhysicalDeviceVulkan12Features & setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT + { + shaderFloat16 = shaderFloat16_; + return *this; + } + + PhysicalDeviceVulkan12Features & setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT + { + shaderInt8 = shaderInt8_; + return *this; + } + + PhysicalDeviceVulkan12Features & + setDescriptorIndexing( VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ ) VULKAN_HPP_NOEXCEPT + { + descriptorIndexing = descriptorIndexing_; + return *this; + } + + PhysicalDeviceVulkan12Features & setShaderInputAttachmentArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_; + return *this; + } + + PhysicalDeviceVulkan12Features & setShaderUniformTexelBufferArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_; + return *this; + } + + PhysicalDeviceVulkan12Features & setShaderStorageTexelBufferArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_; + return *this; + } + + PhysicalDeviceVulkan12Features & setShaderUniformBufferArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceVulkan12Features & setShaderSampledImageArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceVulkan12Features & setShaderStorageBufferArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceVulkan12Features & setShaderStorageImageArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceVulkan12Features & setShaderInputAttachmentArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceVulkan12Features & setShaderUniformTexelBufferArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceVulkan12Features & setShaderStorageTexelBufferArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceVulkan12Features & setDescriptorBindingUniformBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_; + return *this; + } + + PhysicalDeviceVulkan12Features & setDescriptorBindingSampledImageUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_; + return *this; + } + + PhysicalDeviceVulkan12Features & setDescriptorBindingStorageImageUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_; + return *this; + } + + PhysicalDeviceVulkan12Features & setDescriptorBindingStorageBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_; + return *this; + } + + PhysicalDeviceVulkan12Features & setDescriptorBindingUniformTexelBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_; + return *this; + } + + PhysicalDeviceVulkan12Features & setDescriptorBindingStorageTexelBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_; + return *this; + } + + PhysicalDeviceVulkan12Features & setDescriptorBindingUpdateUnusedWhilePending( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_; + return *this; + } + + PhysicalDeviceVulkan12Features & setDescriptorBindingPartiallyBound( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_; + return *this; + } + + PhysicalDeviceVulkan12Features & setDescriptorBindingVariableDescriptorCount( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_; + return *this; + } + + PhysicalDeviceVulkan12Features & + setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT + { + runtimeDescriptorArray = runtimeDescriptorArray_; + return *this; + } + + PhysicalDeviceVulkan12Features & + setSamplerFilterMinmax( VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ ) VULKAN_HPP_NOEXCEPT + { + samplerFilterMinmax = samplerFilterMinmax_; + return *this; + } + + PhysicalDeviceVulkan12Features & + setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT + { + scalarBlockLayout = scalarBlockLayout_; + return *this; + } + + PhysicalDeviceVulkan12Features & + setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT + { + imagelessFramebuffer = imagelessFramebuffer_; + return *this; + } + + PhysicalDeviceVulkan12Features & + setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT + { + uniformBufferStandardLayout = uniformBufferStandardLayout_; + return *this; + } + + PhysicalDeviceVulkan12Features & + setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT + { + shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_; + return *this; + } + + PhysicalDeviceVulkan12Features & + setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT + { + separateDepthStencilLayouts = separateDepthStencilLayouts_; + return *this; + } + + PhysicalDeviceVulkan12Features & + setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT + { + hostQueryReset = hostQueryReset_; + return *this; + } + + PhysicalDeviceVulkan12Features & + setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT + { + timelineSemaphore = timelineSemaphore_; + return *this; + } + + PhysicalDeviceVulkan12Features & + setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddress = bufferDeviceAddress_; + return *this; + } + + PhysicalDeviceVulkan12Features & setBufferDeviceAddressCaptureReplay( + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_; + return *this; + } + + PhysicalDeviceVulkan12Features & setBufferDeviceAddressMultiDevice( + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_; + return *this; + } + + PhysicalDeviceVulkan12Features & + setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT + { + vulkanMemoryModel = vulkanMemoryModel_; + return *this; + } + + PhysicalDeviceVulkan12Features & + setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT + { + vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_; + return *this; + } + + PhysicalDeviceVulkan12Features & setVulkanMemoryModelAvailabilityVisibilityChains( + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT + { + vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_; + return *this; + } + + PhysicalDeviceVulkan12Features & + setShaderOutputViewportIndex( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ ) VULKAN_HPP_NOEXCEPT + { + shaderOutputViewportIndex = shaderOutputViewportIndex_; + return *this; + } + + PhysicalDeviceVulkan12Features & + setShaderOutputLayer( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ ) VULKAN_HPP_NOEXCEPT + { + shaderOutputLayer = shaderOutputLayer_; + return *this; + } + + PhysicalDeviceVulkan12Features & + setSubgroupBroadcastDynamicId( VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ ) VULKAN_HPP_NOEXCEPT + { + subgroupBroadcastDynamicId = subgroupBroadcastDynamicId_; + return *this; + } + + operator VkPhysicalDeviceVulkan12Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan12Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkan12Features const & ) const = default; +#else + bool operator==( PhysicalDeviceVulkan12Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( samplerMirrorClampToEdge == rhs.samplerMirrorClampToEdge ) && + ( drawIndirectCount == rhs.drawIndirectCount ) && + ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess ) && + ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess ) && + ( storagePushConstant8 == rhs.storagePushConstant8 ) && + ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics ) && + ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics ) && ( shaderFloat16 == rhs.shaderFloat16 ) && + ( shaderInt8 == rhs.shaderInt8 ) && ( descriptorIndexing == rhs.descriptorIndexing ) && + ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing ) && + ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing ) && + ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing ) && + ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing ) && + ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing ) && + ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing ) && + ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing ) && + ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing ) && + ( shaderUniformTexelBufferArrayNonUniformIndexing == + rhs.shaderUniformTexelBufferArrayNonUniformIndexing ) && + ( shaderStorageTexelBufferArrayNonUniformIndexing == + rhs.shaderStorageTexelBufferArrayNonUniformIndexing ) && + ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind ) && + ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind ) && + ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind ) && + ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind ) && + ( descriptorBindingUniformTexelBufferUpdateAfterBind == + rhs.descriptorBindingUniformTexelBufferUpdateAfterBind ) && + ( descriptorBindingStorageTexelBufferUpdateAfterBind == + rhs.descriptorBindingStorageTexelBufferUpdateAfterBind ) && + ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending ) && + ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound ) && + ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount ) && + ( runtimeDescriptorArray == rhs.runtimeDescriptorArray ) && + ( samplerFilterMinmax == rhs.samplerFilterMinmax ) && ( scalarBlockLayout == rhs.scalarBlockLayout ) && + ( imagelessFramebuffer == rhs.imagelessFramebuffer ) && + ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout ) && + ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes ) && + ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts ) && + ( hostQueryReset == rhs.hostQueryReset ) && ( timelineSemaphore == rhs.timelineSemaphore ) && + ( bufferDeviceAddress == rhs.bufferDeviceAddress ) && + ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) && + ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ) && + ( vulkanMemoryModel == rhs.vulkanMemoryModel ) && + ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope ) && + ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains ) && + ( shaderOutputViewportIndex == rhs.shaderOutputViewportIndex ) && + ( shaderOutputLayer == rhs.shaderOutputLayer ) && + ( subgroupBroadcastDynamicId == rhs.subgroupBroadcastDynamicId ); + } + + bool operator!=( PhysicalDeviceVulkan12Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge = {}; + VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax = {}; + VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {}; + VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {}; + VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {}; + VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {}; + VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer = {}; + VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId = {}; + }; + static_assert( sizeof( PhysicalDeviceVulkan12Features ) == sizeof( VkPhysicalDeviceVulkan12Features ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceVulkan12Features; + }; + + struct PhysicalDeviceVulkan12Properties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Properties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties( + VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, + std::array const & driverName_ = {}, + std::array const & driverInfo_ = {}, + VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}, + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {}, + uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, + uint32_t maxPerStageUpdateAfterBindResources_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {}, + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {}, + uint64_t maxTimelineSemaphoreValueDifference_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts_ = {} ) VULKAN_HPP_NOEXCEPT + : driverID( driverID_ ) + , driverName( driverName_ ) + , driverInfo( driverInfo_ ) + , conformanceVersion( conformanceVersion_ ) + , denormBehaviorIndependence( denormBehaviorIndependence_ ) + , roundingModeIndependence( roundingModeIndependence_ ) + , shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ ) + , shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ ) + , shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ ) + , shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ ) + , shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ ) + , shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ ) + , shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ ) + , shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ ) + , shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ ) + , shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ ) + , shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ ) + , shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ ) + , shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ ) + , shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ ) + , shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ ) + , maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ ) + , shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ ) + , shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ ) + , shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ ) + , shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ ) + , shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ ) + , robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ ) + , quadDivergentImplicitLod( quadDivergentImplicitLod_ ) + , maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ ) + , maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ ) + , maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ ) + , maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ ) + , maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ ) + , maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ ) + , maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ ) + , maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ ) + , maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ ) + , maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ ) + , maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ ) + , maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ ) + , maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ ) + , maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ ) + , maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ ) + , supportedDepthResolveModes( supportedDepthResolveModes_ ) + , supportedStencilResolveModes( supportedStencilResolveModes_ ) + , independentResolveNone( independentResolveNone_ ) + , independentResolve( independentResolve_ ) + , filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ ) + , filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ ) + , maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ ) + , framebufferIntegerColorSampleCounts( framebufferIntegerColorSampleCounts_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceVulkan12Properties( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan12Properties( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkan12Properties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties & + operator=( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan12Properties & operator=( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceVulkan12Properties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan12Properties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkan12Properties const & ) const = default; +#else + bool operator==( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) && + ( driverName == rhs.driverName ) && ( driverInfo == rhs.driverInfo ) && + ( conformanceVersion == rhs.conformanceVersion ) && + ( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) && + ( roundingModeIndependence == rhs.roundingModeIndependence ) && + ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 ) && + ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 ) && + ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 ) && + ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 ) && + ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 ) && + ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 ) && + ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 ) && + ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 ) && + ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 ) && + ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 ) && + ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 ) && + ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 ) && + ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 ) && + ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 ) && + ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 ) && + ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools ) && + ( shaderUniformBufferArrayNonUniformIndexingNative == + rhs.shaderUniformBufferArrayNonUniformIndexingNative ) && + ( shaderSampledImageArrayNonUniformIndexingNative == + rhs.shaderSampledImageArrayNonUniformIndexingNative ) && + ( shaderStorageBufferArrayNonUniformIndexingNative == + rhs.shaderStorageBufferArrayNonUniformIndexingNative ) && + ( shaderStorageImageArrayNonUniformIndexingNative == + rhs.shaderStorageImageArrayNonUniformIndexingNative ) && + ( shaderInputAttachmentArrayNonUniformIndexingNative == + rhs.shaderInputAttachmentArrayNonUniformIndexingNative ) && + ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind ) && + ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod ) && + ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers ) && + ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == + rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers ) && + ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == + rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers ) && + ( maxPerStageDescriptorUpdateAfterBindSampledImages == + rhs.maxPerStageDescriptorUpdateAfterBindSampledImages ) && + ( maxPerStageDescriptorUpdateAfterBindStorageImages == + rhs.maxPerStageDescriptorUpdateAfterBindStorageImages ) && + ( maxPerStageDescriptorUpdateAfterBindInputAttachments == + rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments ) && + ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources ) && + ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers ) && + ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers ) && + ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == + rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers ) && + ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == + rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages ) && + ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages ) && + ( maxDescriptorSetUpdateAfterBindInputAttachments == + rhs.maxDescriptorSetUpdateAfterBindInputAttachments ) && + ( supportedDepthResolveModes == rhs.supportedDepthResolveModes ) && + ( supportedStencilResolveModes == rhs.supportedStencilResolveModes ) && + ( independentResolveNone == rhs.independentResolveNone ) && + ( independentResolve == rhs.independentResolve ) && + ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats ) && + ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping ) && + ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference ) && + ( framebufferIntegerColorSampleCounts == rhs.framebufferIntegerColorSampleCounts ); + } + + bool operator!=( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Properties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverInfo = {}; + VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {}; + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {}; + uint32_t maxUpdateAfterBindDescriptorsInAllPools = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {}; + uint32_t maxPerStageUpdateAfterBindResources = {}; + uint32_t maxDescriptorSetUpdateAfterBindSamplers = {}; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {}; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {}; + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {}; + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {}; + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {}; + uint64_t maxTimelineSemaphoreValueDifference = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts = {}; + }; + static_assert( sizeof( PhysicalDeviceVulkan12Properties ) == sizeof( VkPhysicalDeviceVulkan12Properties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceVulkan12Properties; + }; + + struct PhysicalDeviceVulkanMemoryModelFeatures + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceVulkanMemoryModelFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures( + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {} ) VULKAN_HPP_NOEXCEPT + : vulkanMemoryModel( vulkanMemoryModel_ ) + , vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ ) + , vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkanMemoryModelFeatures( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkanMemoryModelFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & + operator=( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkanMemoryModelFeatures & + operator=( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceVulkanMemoryModelFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceVulkanMemoryModelFeatures & + setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT + { + vulkanMemoryModel = vulkanMemoryModel_; + return *this; + } + + PhysicalDeviceVulkanMemoryModelFeatures & + setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT + { + vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_; + return *this; + } + + PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModelAvailabilityVisibilityChains( + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT + { + vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_; + return *this; + } + + operator VkPhysicalDeviceVulkanMemoryModelFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkanMemoryModelFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkanMemoryModelFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vulkanMemoryModel == rhs.vulkanMemoryModel ) && + ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope ) && + ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains ); + } + + bool operator!=( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {}; + }; + static_assert( sizeof( PhysicalDeviceVulkanMemoryModelFeatures ) == + sizeof( VkPhysicalDeviceVulkanMemoryModelFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceVulkanMemoryModelFeatures; + }; + using PhysicalDeviceVulkanMemoryModelFeaturesKHR = PhysicalDeviceVulkanMemoryModelFeatures; + + struct PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess_ = {} ) VULKAN_HPP_NOEXCEPT + : workgroupMemoryExplicitLayout( workgroupMemoryExplicitLayout_ ) + , workgroupMemoryExplicitLayoutScalarBlockLayout( workgroupMemoryExplicitLayoutScalarBlockLayout_ ) + , workgroupMemoryExplicitLayout8BitAccess( workgroupMemoryExplicitLayout8BitAccess_ ) + , workgroupMemoryExplicitLayout16BitAccess( workgroupMemoryExplicitLayout16BitAccess_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( + PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( + VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & + operator=( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & + operator=( VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setWorkgroupMemoryExplicitLayout( + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout_ ) VULKAN_HPP_NOEXCEPT + { + workgroupMemoryExplicitLayout = workgroupMemoryExplicitLayout_; + return *this; + } + + PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setWorkgroupMemoryExplicitLayoutScalarBlockLayout( + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT + { + workgroupMemoryExplicitLayoutScalarBlockLayout = workgroupMemoryExplicitLayoutScalarBlockLayout_; + return *this; + } + + PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setWorkgroupMemoryExplicitLayout8BitAccess( + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + workgroupMemoryExplicitLayout8BitAccess = workgroupMemoryExplicitLayout8BitAccess_; + return *this; + } + + PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setWorkgroupMemoryExplicitLayout16BitAccess( + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + workgroupMemoryExplicitLayout16BitAccess = workgroupMemoryExplicitLayout16BitAccess_; + return *this; + } + + operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( workgroupMemoryExplicitLayout == rhs.workgroupMemoryExplicitLayout ) && + ( workgroupMemoryExplicitLayoutScalarBlockLayout == rhs.workgroupMemoryExplicitLayoutScalarBlockLayout ) && + ( workgroupMemoryExplicitLayout8BitAccess == rhs.workgroupMemoryExplicitLayout8BitAccess ) && + ( workgroupMemoryExplicitLayout16BitAccess == rhs.workgroupMemoryExplicitLayout16BitAccess ); + } + + bool operator!=( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout = {}; + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout = {}; + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess = {}; + }; + static_assert( sizeof( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR ) == + sizeof( VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; + }; + + struct PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats_ = {} ) VULKAN_HPP_NOEXCEPT + : ycbcr2plane444Formats( ycbcr2plane444Formats_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( + PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & + operator=( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & + operator=( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & + setYcbcr2plane444Formats( VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats_ ) VULKAN_HPP_NOEXCEPT + { + ycbcr2plane444Formats = ycbcr2plane444Formats_; + return *this; + } + + operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ycbcr2plane444Formats == rhs.ycbcr2plane444Formats ); + } + + bool operator!=( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats = {}; + }; + static_assert( sizeof( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT ) == + sizeof( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; + }; + + struct PhysicalDeviceYcbcrImageArraysFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ = {} ) VULKAN_HPP_NOEXCEPT : ycbcrImageArrays( ycbcrImageArrays_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( + PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceYcbcrImageArraysFeaturesEXT( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceYcbcrImageArraysFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcrImageArraysFeaturesEXT & + operator=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceYcbcrImageArraysFeaturesEXT & + operator=( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceYcbcrImageArraysFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceYcbcrImageArraysFeaturesEXT & + setYcbcrImageArrays( VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ ) VULKAN_HPP_NOEXCEPT + { + ycbcrImageArrays = ycbcrImageArrays_; + return *this; + } + + operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ycbcrImageArrays == rhs.ycbcrImageArrays ); + } + + bool operator!=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays = {}; + }; + static_assert( sizeof( PhysicalDeviceYcbcrImageArraysFeaturesEXT ) == + sizeof( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceYcbcrImageArraysFeaturesEXT; + }; + + struct PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR( + VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderZeroInitializeWorkgroupMemory( shaderZeroInitializeWorkgroupMemory_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR( + PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR( + VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR & + operator=( PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR & + operator=( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR & setShaderZeroInitializeWorkgroupMemory( + VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ ) VULKAN_HPP_NOEXCEPT + { + shaderZeroInitializeWorkgroupMemory = shaderZeroInitializeWorkgroupMemory_; + return *this; + } + + operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderZeroInitializeWorkgroupMemory == rhs.shaderZeroInitializeWorkgroupMemory ); + } + + bool operator!=( PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory = {}; + }; + static_assert( sizeof( PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR ) == + sizeof( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR; + }; + + struct PipelineColorBlendAdvancedStateCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ = {}, + VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated ) + VULKAN_HPP_NOEXCEPT + : srcPremultiplied( srcPremultiplied_ ) + , dstPremultiplied( dstPremultiplied_ ) + , blendOverlap( blendOverlap_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT( + PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineColorBlendAdvancedStateCreateInfoEXT( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineColorBlendAdvancedStateCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & + operator=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineColorBlendAdvancedStateCreateInfoEXT & + operator=( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineColorBlendAdvancedStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineColorBlendAdvancedStateCreateInfoEXT & + setSrcPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ ) VULKAN_HPP_NOEXCEPT + { + srcPremultiplied = srcPremultiplied_; + return *this; + } + + PipelineColorBlendAdvancedStateCreateInfoEXT & + setDstPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ ) VULKAN_HPP_NOEXCEPT + { + dstPremultiplied = dstPremultiplied_; + return *this; + } + + PipelineColorBlendAdvancedStateCreateInfoEXT & + setBlendOverlap( VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ ) VULKAN_HPP_NOEXCEPT + { + blendOverlap = blendOverlap_; + return *this; + } + + operator VkPipelineColorBlendAdvancedStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineColorBlendAdvancedStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineColorBlendAdvancedStateCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcPremultiplied == rhs.srcPremultiplied ) && + ( dstPremultiplied == rhs.dstPremultiplied ) && ( blendOverlap == rhs.blendOverlap ); + } + + bool operator!=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied = {}; + VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied = {}; + VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated; + }; + static_assert( sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) == + sizeof( VkPipelineColorBlendAdvancedStateCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineColorBlendAdvancedStateCreateInfoEXT; + }; + + struct PipelineColorWriteCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorWriteCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineColorWriteCreateInfoEXT( + uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables_ = {} ) VULKAN_HPP_NOEXCEPT + : attachmentCount( attachmentCount_ ) + , pColorWriteEnables( pColorWriteEnables_ ) + {} + + VULKAN_HPP_CONSTEXPR + PipelineColorWriteCreateInfoEXT( PipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineColorWriteCreateInfoEXT( VkPipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineColorWriteCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineColorWriteCreateInfoEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorWriteEnables_ ) + : attachmentCount( static_cast( colorWriteEnables_.size() ) ) + , pColorWriteEnables( colorWriteEnables_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT & + operator=( PipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineColorWriteCreateInfoEXT & operator=( VkPipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineColorWriteCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineColorWriteCreateInfoEXT & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = attachmentCount_; + return *this; + } + + PipelineColorWriteCreateInfoEXT & + setPColorWriteEnables( const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables_ ) VULKAN_HPP_NOEXCEPT + { + pColorWriteEnables = pColorWriteEnables_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineColorWriteCreateInfoEXT & setColorWriteEnables( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorWriteEnables_ ) + VULKAN_HPP_NOEXCEPT + { + attachmentCount = static_cast( colorWriteEnables_.size() ); + pColorWriteEnables = colorWriteEnables_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPipelineColorWriteCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineColorWriteCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineColorWriteCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineColorWriteCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentCount == rhs.attachmentCount ) && + ( pColorWriteEnables == rhs.pColorWriteEnables ); + } + + bool operator!=( PipelineColorWriteCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorWriteCreateInfoEXT; + const void * pNext = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables = {}; + }; + static_assert( sizeof( PipelineColorWriteCreateInfoEXT ) == sizeof( VkPipelineColorWriteCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineColorWriteCreateInfoEXT; + }; + + struct PipelineCompilerControlCreateInfoAMD + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineCompilerControlCreateInfoAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( + VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ = {} ) VULKAN_HPP_NOEXCEPT + : compilerControlFlags( compilerControlFlags_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( PipelineCompilerControlCreateInfoAMD const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PipelineCompilerControlCreateInfoAMD( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCompilerControlCreateInfoAMD( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineCompilerControlCreateInfoAMD & + operator=( PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCompilerControlCreateInfoAMD & + operator=( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineCompilerControlCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineCompilerControlCreateInfoAMD & setCompilerControlFlags( + VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ ) VULKAN_HPP_NOEXCEPT + { + compilerControlFlags = compilerControlFlags_; + return *this; + } + + operator VkPipelineCompilerControlCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCompilerControlCreateInfoAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCompilerControlCreateInfoAMD const & ) const = default; +#else + bool operator==( PipelineCompilerControlCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( compilerControlFlags == rhs.compilerControlFlags ); + } + + bool operator!=( PipelineCompilerControlCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCompilerControlCreateInfoAMD; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags = {}; + }; + static_assert( sizeof( PipelineCompilerControlCreateInfoAMD ) == sizeof( VkPipelineCompilerControlCreateInfoAMD ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineCompilerControlCreateInfoAMD; + }; + + struct PipelineCoverageModulationStateCreateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineCoverageModulationStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ = {}, + VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ = + VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone, + VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ = {}, + uint32_t coverageModulationTableCount_ = {}, + const float * pCoverageModulationTable_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , coverageModulationMode( coverageModulationMode_ ) + , coverageModulationTableEnable( coverageModulationTableEnable_ ) + , coverageModulationTableCount( coverageModulationTableCount_ ) + , pCoverageModulationTable( pCoverageModulationTable_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV( + PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCoverageModulationStateCreateInfoNV( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineCoverageModulationStateCreateInfoNV( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineCoverageModulationStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_, + VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_, + VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & coverageModulationTable_ ) + : flags( flags_ ) + , coverageModulationMode( coverageModulationMode_ ) + , coverageModulationTableEnable( coverageModulationTableEnable_ ) + , coverageModulationTableCount( static_cast( coverageModulationTable_.size() ) ) + , pCoverageModulationTable( coverageModulationTable_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & + operator=( PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCoverageModulationStateCreateInfoNV & + operator=( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineCoverageModulationStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineCoverageModulationStateCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationMode( + VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ ) VULKAN_HPP_NOEXCEPT + { + coverageModulationMode = coverageModulationMode_; + return *this; + } + + PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTableEnable( + VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ ) VULKAN_HPP_NOEXCEPT + { + coverageModulationTableEnable = coverageModulationTableEnable_; + return *this; + } + + PipelineCoverageModulationStateCreateInfoNV & + setCoverageModulationTableCount( uint32_t coverageModulationTableCount_ ) VULKAN_HPP_NOEXCEPT + { + coverageModulationTableCount = coverageModulationTableCount_; + return *this; + } + + PipelineCoverageModulationStateCreateInfoNV & + setPCoverageModulationTable( const float * pCoverageModulationTable_ ) VULKAN_HPP_NOEXCEPT + { + pCoverageModulationTable = pCoverageModulationTable_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTable( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & coverageModulationTable_ ) VULKAN_HPP_NOEXCEPT + { + coverageModulationTableCount = static_cast( coverageModulationTable_.size() ); + pCoverageModulationTable = coverageModulationTable_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPipelineCoverageModulationStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCoverageModulationStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCoverageModulationStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineCoverageModulationStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( coverageModulationMode == rhs.coverageModulationMode ) && + ( coverageModulationTableEnable == rhs.coverageModulationTableEnable ) && + ( coverageModulationTableCount == rhs.coverageModulationTableCount ) && + ( pCoverageModulationTable == rhs.pCoverageModulationTable ); + } + + bool operator!=( PipelineCoverageModulationStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageModulationStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags = {}; + VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode = + VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone; + VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable = {}; + uint32_t coverageModulationTableCount = {}; + const float * pCoverageModulationTable = {}; + }; + static_assert( sizeof( PipelineCoverageModulationStateCreateInfoNV ) == + sizeof( VkPipelineCoverageModulationStateCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineCoverageModulationStateCreateInfoNV; + }; + + struct PipelineCoverageReductionStateCreateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineCoverageReductionStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ = {}, + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , coverageReductionMode( coverageReductionMode_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV( + PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCoverageReductionStateCreateInfoNV( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineCoverageReductionStateCreateInfoNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV & + operator=( PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCoverageReductionStateCreateInfoNV & + operator=( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineCoverageReductionStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineCoverageReductionStateCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineCoverageReductionStateCreateInfoNV & setCoverageReductionMode( + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT + { + coverageReductionMode = coverageReductionMode_; + return *this; + } + + operator VkPipelineCoverageReductionStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCoverageReductionStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCoverageReductionStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineCoverageReductionStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( coverageReductionMode == rhs.coverageReductionMode ); + } + + bool operator!=( PipelineCoverageReductionStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageReductionStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags = {}; + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge; + }; + static_assert( sizeof( PipelineCoverageReductionStateCreateInfoNV ) == + sizeof( VkPipelineCoverageReductionStateCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineCoverageReductionStateCreateInfoNV; + }; + + struct PipelineCoverageToColorStateCreateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineCoverageToColorStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ = {}, + uint32_t coverageToColorLocation_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , coverageToColorEnable( coverageToColorEnable_ ) + , coverageToColorLocation( coverageToColorLocation_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV( + PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCoverageToColorStateCreateInfoNV( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineCoverageToColorStateCreateInfoNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & + operator=( PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCoverageToColorStateCreateInfoNV & + operator=( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineCoverageToColorStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineCoverageToColorStateCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineCoverageToColorStateCreateInfoNV & + setCoverageToColorEnable( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ ) VULKAN_HPP_NOEXCEPT + { + coverageToColorEnable = coverageToColorEnable_; + return *this; + } + + PipelineCoverageToColorStateCreateInfoNV & + setCoverageToColorLocation( uint32_t coverageToColorLocation_ ) VULKAN_HPP_NOEXCEPT + { + coverageToColorLocation = coverageToColorLocation_; + return *this; + } + + operator VkPipelineCoverageToColorStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCoverageToColorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCoverageToColorStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineCoverageToColorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( coverageToColorEnable == rhs.coverageToColorEnable ) && + ( coverageToColorLocation == rhs.coverageToColorLocation ); + } + + bool operator!=( PipelineCoverageToColorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageToColorStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable = {}; + uint32_t coverageToColorLocation = {}; + }; + static_assert( sizeof( PipelineCoverageToColorStateCreateInfoNV ) == + sizeof( VkPipelineCoverageToColorStateCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineCoverageToColorStateCreateInfoNV; + }; + + struct PipelineCreationFeedbackEXT + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PipelineCreationFeedbackEXT( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagsEXT flags_ = {}, + uint64_t duration_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , duration( duration_ ) + {} + + VULKAN_HPP_CONSTEXPR + PipelineCreationFeedbackEXT( PipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCreationFeedbackEXT( VkPipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCreationFeedbackEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackEXT & + operator=( PipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCreationFeedbackEXT & operator=( VkPipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPipelineCreationFeedbackEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCreationFeedbackEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCreationFeedbackEXT const & ) const = default; +#else + bool operator==( PipelineCreationFeedbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( duration == rhs.duration ); + } + + bool operator!=( PipelineCreationFeedbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagsEXT flags = {}; + uint64_t duration = {}; + }; + static_assert( sizeof( PipelineCreationFeedbackEXT ) == sizeof( VkPipelineCreationFeedbackEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct PipelineCreationFeedbackCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineCreationFeedbackCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfoEXT( + VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT * pPipelineCreationFeedback_ = {}, + uint32_t pipelineStageCreationFeedbackCount_ = {}, + VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT * pPipelineStageCreationFeedbacks_ = {} ) VULKAN_HPP_NOEXCEPT + : pPipelineCreationFeedback( pPipelineCreationFeedback_ ) + , pipelineStageCreationFeedbackCount( pipelineStageCreationFeedbackCount_ ) + , pPipelineStageCreationFeedbacks( pPipelineStageCreationFeedbacks_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfoEXT( PipelineCreationFeedbackCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PipelineCreationFeedbackCreateInfoEXT( VkPipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCreationFeedbackCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineCreationFeedbackCreateInfoEXT( + VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT * pPipelineCreationFeedback_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + pipelineStageCreationFeedbacks_ ) + : pPipelineCreationFeedback( pPipelineCreationFeedback_ ) + , pipelineStageCreationFeedbackCount( static_cast( pipelineStageCreationFeedbacks_.size() ) ) + , pPipelineStageCreationFeedbacks( pipelineStageCreationFeedbacks_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfoEXT & + operator=( PipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCreationFeedbackCreateInfoEXT & + operator=( VkPipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineCreationFeedbackCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineCreationFeedbackCreateInfoEXT & setPPipelineCreationFeedback( + VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT * pPipelineCreationFeedback_ ) VULKAN_HPP_NOEXCEPT + { + pPipelineCreationFeedback = pPipelineCreationFeedback_; + return *this; + } + + PipelineCreationFeedbackCreateInfoEXT & + setPipelineStageCreationFeedbackCount( uint32_t pipelineStageCreationFeedbackCount_ ) VULKAN_HPP_NOEXCEPT + { + pipelineStageCreationFeedbackCount = pipelineStageCreationFeedbackCount_; + return *this; + } + + PipelineCreationFeedbackCreateInfoEXT & setPPipelineStageCreationFeedbacks( + VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT * pPipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT + { + pPipelineStageCreationFeedbacks = pPipelineStageCreationFeedbacks_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineCreationFeedbackCreateInfoEXT & setPipelineStageCreationFeedbacks( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + pipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT + { + pipelineStageCreationFeedbackCount = static_cast( pipelineStageCreationFeedbacks_.size() ); + pPipelineStageCreationFeedbacks = pipelineStageCreationFeedbacks_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPipelineCreationFeedbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCreationFeedbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCreationFeedbackCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineCreationFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( pPipelineCreationFeedback == rhs.pPipelineCreationFeedback ) && + ( pipelineStageCreationFeedbackCount == rhs.pipelineStageCreationFeedbackCount ) && + ( pPipelineStageCreationFeedbacks == rhs.pPipelineStageCreationFeedbacks ); + } + + bool operator!=( PipelineCreationFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCreationFeedbackCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT * pPipelineCreationFeedback = {}; + uint32_t pipelineStageCreationFeedbackCount = {}; + VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT * pPipelineStageCreationFeedbacks = {}; + }; + static_assert( sizeof( PipelineCreationFeedbackCreateInfoEXT ) == sizeof( VkPipelineCreationFeedbackCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineCreationFeedbackCreateInfoEXT; + }; + + struct PipelineDiscardRectangleStateCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineDiscardRectangleStateCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ = + VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive, + uint32_t discardRectangleCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , discardRectangleMode( discardRectangleMode_ ) + , discardRectangleCount( discardRectangleCount_ ) + , pDiscardRectangles( pDiscardRectangles_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( + PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineDiscardRectangleStateCreateInfoEXT( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineDiscardRectangleStateCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineDiscardRectangleStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_, + VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & discardRectangles_ ) + : flags( flags_ ) + , discardRectangleMode( discardRectangleMode_ ) + , discardRectangleCount( static_cast( discardRectangles_.size() ) ) + , pDiscardRectangles( discardRectangles_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & + operator=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineDiscardRectangleStateCreateInfoEXT & + operator=( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineDiscardRectangleStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineDiscardRectangleStateCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineDiscardRectangleStateCreateInfoEXT & + setDiscardRectangleMode( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ ) VULKAN_HPP_NOEXCEPT + { + discardRectangleMode = discardRectangleMode_; + return *this; + } + + PipelineDiscardRectangleStateCreateInfoEXT & + setDiscardRectangleCount( uint32_t discardRectangleCount_ ) VULKAN_HPP_NOEXCEPT + { + discardRectangleCount = discardRectangleCount_; + return *this; + } + + PipelineDiscardRectangleStateCreateInfoEXT & + setPDiscardRectangles( const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles_ ) VULKAN_HPP_NOEXCEPT + { + pDiscardRectangles = pDiscardRectangles_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangles( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & discardRectangles_ ) + VULKAN_HPP_NOEXCEPT + { + discardRectangleCount = static_cast( discardRectangles_.size() ); + pDiscardRectangles = discardRectangles_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPipelineDiscardRectangleStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineDiscardRectangleStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineDiscardRectangleStateCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( discardRectangleMode == rhs.discardRectangleMode ) && + ( discardRectangleCount == rhs.discardRectangleCount ) && ( pDiscardRectangles == rhs.pDiscardRectangles ); + } + + bool operator!=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode = + VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive; + uint32_t discardRectangleCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles = {}; + }; + static_assert( sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) == + sizeof( VkPipelineDiscardRectangleStateCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineDiscardRectangleStateCreateInfoEXT; + }; + + struct PipelineFragmentShadingRateEnumStateCreateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType_ = + VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV::eFragmentSize, + VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate_ = + VULKAN_HPP_NAMESPACE::FragmentShadingRateNV::e1InvocationPerPixel, + std::array const & + combinerOps_ = { { VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep, + VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep } } ) VULKAN_HPP_NOEXCEPT + : shadingRateType( shadingRateType_ ) + , shadingRate( shadingRate_ ) + , combinerOps( combinerOps_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV( + PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineFragmentShadingRateEnumStateCreateInfoNV( VkPipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineFragmentShadingRateEnumStateCreateInfoNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & + operator=( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineFragmentShadingRateEnumStateCreateInfoNV & + operator=( VkPipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineFragmentShadingRateEnumStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineFragmentShadingRateEnumStateCreateInfoNV & + setShadingRateType( VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType_ ) VULKAN_HPP_NOEXCEPT + { + shadingRateType = shadingRateType_; + return *this; + } + + PipelineFragmentShadingRateEnumStateCreateInfoNV & + setShadingRate( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate_ ) VULKAN_HPP_NOEXCEPT + { + shadingRate = shadingRate_; + return *this; + } + + PipelineFragmentShadingRateEnumStateCreateInfoNV & setCombinerOps( + std::array combinerOps_ ) VULKAN_HPP_NOEXCEPT + { + combinerOps = combinerOps_; + return *this; + } + + operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineFragmentShadingRateEnumStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateType == rhs.shadingRateType ) && + ( shadingRate == rhs.shadingRate ) && ( combinerOps == rhs.combinerOps ); + } + + bool operator!=( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType = + VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV::eFragmentSize; + VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate = + VULKAN_HPP_NAMESPACE::FragmentShadingRateNV::e1InvocationPerPixel; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D combinerOps = {}; + }; + static_assert( sizeof( PipelineFragmentShadingRateEnumStateCreateInfoNV ) == + sizeof( VkPipelineFragmentShadingRateEnumStateCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineFragmentShadingRateEnumStateCreateInfoNV; + }; + + struct PipelineFragmentShadingRateStateCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR( + VULKAN_HPP_NAMESPACE::Extent2D fragmentSize_ = {}, + std::array const & + combinerOps_ = { { VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep, + VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep } } ) VULKAN_HPP_NOEXCEPT + : fragmentSize( fragmentSize_ ) + , combinerOps( combinerOps_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR( + PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineFragmentShadingRateStateCreateInfoKHR( VkPipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineFragmentShadingRateStateCreateInfoKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR & + operator=( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineFragmentShadingRateStateCreateInfoKHR & + operator=( VkPipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineFragmentShadingRateStateCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineFragmentShadingRateStateCreateInfoKHR & + setFragmentSize( VULKAN_HPP_NAMESPACE::Extent2D const & fragmentSize_ ) VULKAN_HPP_NOEXCEPT + { + fragmentSize = fragmentSize_; + return *this; + } + + PipelineFragmentShadingRateStateCreateInfoKHR & setCombinerOps( + std::array combinerOps_ ) VULKAN_HPP_NOEXCEPT + { + combinerOps = combinerOps_; + return *this; + } + + operator VkPipelineFragmentShadingRateStateCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineFragmentShadingRateStateCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineFragmentShadingRateStateCreateInfoKHR const & ) const = default; +#else + bool operator==( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentSize == rhs.fragmentSize ) && + ( combinerOps == rhs.combinerOps ); + } + + bool operator!=( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D fragmentSize = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D combinerOps = {}; + }; + static_assert( sizeof( PipelineFragmentShadingRateStateCreateInfoKHR ) == + sizeof( VkPipelineFragmentShadingRateStateCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineFragmentShadingRateStateCreateInfoKHR; + }; + + struct PipelineRasterizationConservativeStateCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ = + VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled, + float extraPrimitiveOverestimationSize_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , conservativeRasterizationMode( conservativeRasterizationMode_ ) + , extraPrimitiveOverestimationSize( extraPrimitiveOverestimationSize_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT( + PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationConservativeStateCreateInfoEXT( + VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRasterizationConservativeStateCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & + operator=( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationConservativeStateCreateInfoEXT & + operator=( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + PipelineRasterizationConservativeStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineRasterizationConservativeStateCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineRasterizationConservativeStateCreateInfoEXT & setConservativeRasterizationMode( + VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ ) VULKAN_HPP_NOEXCEPT + { + conservativeRasterizationMode = conservativeRasterizationMode_; + return *this; + } + + PipelineRasterizationConservativeStateCreateInfoEXT & + setExtraPrimitiveOverestimationSize( float extraPrimitiveOverestimationSize_ ) VULKAN_HPP_NOEXCEPT + { + extraPrimitiveOverestimationSize = extraPrimitiveOverestimationSize_; + return *this; + } + + operator VkPipelineRasterizationConservativeStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationConservativeStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationConservativeStateCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( conservativeRasterizationMode == rhs.conservativeRasterizationMode ) && + ( extraPrimitiveOverestimationSize == rhs.extraPrimitiveOverestimationSize ); + } + + bool operator!=( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode = + VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled; + float extraPrimitiveOverestimationSize = {}; + }; + static_assert( sizeof( PipelineRasterizationConservativeStateCreateInfoEXT ) == + sizeof( VkPipelineRasterizationConservativeStateCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineRasterizationConservativeStateCreateInfoEXT; + }; + + struct PipelineRasterizationDepthClipStateCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , depthClipEnable( depthClipEnable_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT( + PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationDepthClipStateCreateInfoEXT( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineRasterizationDepthClipStateCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT & + operator=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationDepthClipStateCreateInfoEXT & + operator=( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineRasterizationDepthClipStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineRasterizationDepthClipStateCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineRasterizationDepthClipStateCreateInfoEXT & + setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthClipEnable = depthClipEnable_; + return *this; + } + + operator VkPipelineRasterizationDepthClipStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationDepthClipStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationDepthClipStateCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( depthClipEnable == rhs.depthClipEnable ); + } + + bool operator!=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {}; + }; + static_assert( sizeof( PipelineRasterizationDepthClipStateCreateInfoEXT ) == + sizeof( VkPipelineRasterizationDepthClipStateCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineRasterizationDepthClipStateCreateInfoEXT; + }; + + struct PipelineRasterizationLineStateCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineRasterizationLineStateCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ = + VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault, + VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ = {}, + uint32_t lineStippleFactor_ = {}, + uint16_t lineStipplePattern_ = {} ) VULKAN_HPP_NOEXCEPT + : lineRasterizationMode( lineRasterizationMode_ ) + , stippledLineEnable( stippledLineEnable_ ) + , lineStippleFactor( lineStippleFactor_ ) + , lineStipplePattern( lineStipplePattern_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT( + PipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationLineStateCreateInfoEXT( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineRasterizationLineStateCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT & + operator=( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationLineStateCreateInfoEXT & + operator=( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineRasterizationLineStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineRasterizationLineStateCreateInfoEXT & setLineRasterizationMode( + VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ ) VULKAN_HPP_NOEXCEPT + { + lineRasterizationMode = lineRasterizationMode_; + return *this; + } + + PipelineRasterizationLineStateCreateInfoEXT & + setStippledLineEnable( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ ) VULKAN_HPP_NOEXCEPT + { + stippledLineEnable = stippledLineEnable_; + return *this; + } + + PipelineRasterizationLineStateCreateInfoEXT & + setLineStippleFactor( uint32_t lineStippleFactor_ ) VULKAN_HPP_NOEXCEPT + { + lineStippleFactor = lineStippleFactor_; + return *this; + } + + PipelineRasterizationLineStateCreateInfoEXT & + setLineStipplePattern( uint16_t lineStipplePattern_ ) VULKAN_HPP_NOEXCEPT + { + lineStipplePattern = lineStipplePattern_; + return *this; + } + + operator VkPipelineRasterizationLineStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationLineStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationLineStateCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( lineRasterizationMode == rhs.lineRasterizationMode ) && + ( stippledLineEnable == rhs.stippledLineEnable ) && ( lineStippleFactor == rhs.lineStippleFactor ) && + ( lineStipplePattern == rhs.lineStipplePattern ); + } + + bool operator!=( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationLineStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode = + VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault; + VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable = {}; + uint32_t lineStippleFactor = {}; + uint16_t lineStipplePattern = {}; + }; + static_assert( sizeof( PipelineRasterizationLineStateCreateInfoEXT ) == + sizeof( VkPipelineRasterizationLineStateCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineRasterizationLineStateCreateInfoEXT; + }; + + struct PipelineRasterizationProvokingVertexStateCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationProvokingVertexStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode_ = + VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT::eFirstVertex ) VULKAN_HPP_NOEXCEPT + : provokingVertexMode( provokingVertexMode_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineRasterizationProvokingVertexStateCreateInfoEXT( + PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationProvokingVertexStateCreateInfoEXT( + VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRasterizationProvokingVertexStateCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationProvokingVertexStateCreateInfoEXT & + operator=( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationProvokingVertexStateCreateInfoEXT & + operator=( VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + PipelineRasterizationProvokingVertexStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineRasterizationProvokingVertexStateCreateInfoEXT & + setProvokingVertexMode( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode_ ) VULKAN_HPP_NOEXCEPT + { + provokingVertexMode = provokingVertexMode_; + return *this; + } + + operator VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationProvokingVertexStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( provokingVertexMode == rhs.provokingVertexMode ); + } + + bool operator!=( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode = + VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT::eFirstVertex; + }; + static_assert( sizeof( PipelineRasterizationProvokingVertexStateCreateInfoEXT ) == + sizeof( VkPipelineRasterizationProvokingVertexStateCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineRasterizationProvokingVertexStateCreateInfoEXT; + }; + + struct PipelineRasterizationStateRasterizationOrderAMD + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineRasterizationStateRasterizationOrderAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD( + VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ = + VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict ) VULKAN_HPP_NOEXCEPT + : rasterizationOrder( rasterizationOrder_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD( + PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineRasterizationStateRasterizationOrderAMD( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateRasterizationOrderAMD & + operator=( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationStateRasterizationOrderAMD & + operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineRasterizationStateRasterizationOrderAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineRasterizationStateRasterizationOrderAMD & + setRasterizationOrder( VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ ) VULKAN_HPP_NOEXCEPT + { + rasterizationOrder = rasterizationOrder_; + return *this; + } + + operator VkPipelineRasterizationStateRasterizationOrderAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationStateRasterizationOrderAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationStateRasterizationOrderAMD const & ) const = default; +#else + bool operator==( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rasterizationOrder == rhs.rasterizationOrder ); + } + + bool operator!=( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder = + VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict; + }; + static_assert( sizeof( PipelineRasterizationStateRasterizationOrderAMD ) == + sizeof( VkPipelineRasterizationStateRasterizationOrderAMD ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineRasterizationStateRasterizationOrderAMD; + }; + + struct PipelineRasterizationStateStreamCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineRasterizationStateStreamCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT( + VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ = {}, + uint32_t rasterizationStream_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , rasterizationStream( rasterizationStream_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT( + PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationStateStreamCreateInfoEXT( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineRasterizationStateStreamCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT & + operator=( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationStateStreamCreateInfoEXT & + operator=( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineRasterizationStateStreamCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineRasterizationStateStreamCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineRasterizationStateStreamCreateInfoEXT & + setRasterizationStream( uint32_t rasterizationStream_ ) VULKAN_HPP_NOEXCEPT + { + rasterizationStream = rasterizationStream_; + return *this; + } + + operator VkPipelineRasterizationStateStreamCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationStateStreamCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationStateStreamCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( rasterizationStream == rhs.rasterizationStream ); + } + + bool operator!=( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags = {}; + uint32_t rasterizationStream = {}; + }; + static_assert( sizeof( PipelineRasterizationStateStreamCreateInfoEXT ) == + sizeof( VkPipelineRasterizationStateStreamCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineRasterizationStateStreamCreateInfoEXT; + }; + + struct PipelineRepresentativeFragmentTestStateCreateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ = {} ) VULKAN_HPP_NOEXCEPT + : representativeFragmentTestEnable( representativeFragmentTestEnable_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV( + PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRepresentativeFragmentTestStateCreateInfoNV( + VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRepresentativeFragmentTestStateCreateInfoNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineRepresentativeFragmentTestStateCreateInfoNV & + operator=( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRepresentativeFragmentTestStateCreateInfoNV & + operator=( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + PipelineRepresentativeFragmentTestStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineRepresentativeFragmentTestStateCreateInfoNV & setRepresentativeFragmentTestEnable( + VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ ) VULKAN_HPP_NOEXCEPT + { + representativeFragmentTestEnable = representativeFragmentTestEnable_; + return *this; + } + + operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRepresentativeFragmentTestStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( representativeFragmentTestEnable == rhs.representativeFragmentTestEnable ); + } + + bool operator!=( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable = {}; + }; + static_assert( sizeof( PipelineRepresentativeFragmentTestStateCreateInfoNV ) == + sizeof( VkPipelineRepresentativeFragmentTestStateCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineRepresentativeFragmentTestStateCreateInfoNV; + }; + + struct PipelineSampleLocationsStateCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineSampleLocationsStateCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ = {}, + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : sampleLocationsEnable( sampleLocationsEnable_ ) + , sampleLocationsInfo( sampleLocationsInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( + PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineSampleLocationsStateCreateInfoEXT( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineSampleLocationsStateCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT & + operator=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineSampleLocationsStateCreateInfoEXT & + operator=( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineSampleLocationsStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineSampleLocationsStateCreateInfoEXT & + setSampleLocationsEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationsEnable = sampleLocationsEnable_; + return *this; + } + + PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsInfo( + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationsInfo = sampleLocationsInfo_; + return *this; + } + + operator VkPipelineSampleLocationsStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineSampleLocationsStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineSampleLocationsStateCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( sampleLocationsEnable == rhs.sampleLocationsEnable ) && + ( sampleLocationsInfo == rhs.sampleLocationsInfo ); + } + + bool operator!=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable = {}; + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {}; + }; + static_assert( sizeof( PipelineSampleLocationsStateCreateInfoEXT ) == + sizeof( VkPipelineSampleLocationsStateCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineSampleLocationsStateCreateInfoEXT; + }; + + struct PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( uint32_t requiredSubgroupSize_ = {} ) VULKAN_HPP_NOEXCEPT + : requiredSubgroupSize( requiredSubgroupSize_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( + PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( + VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT & + operator=( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT & + operator=( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( requiredSubgroupSize == rhs.requiredSubgroupSize ); + } + + bool operator!=( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT; + void * pNext = {}; + uint32_t requiredSubgroupSize = {}; + }; + static_assert( sizeof( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ) == + sizeof( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT; + }; + + struct PipelineTessellationDomainOriginStateCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineTessellationDomainOriginStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo( + VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ = + VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft ) VULKAN_HPP_NOEXCEPT : domainOrigin( domainOrigin_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo( + PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineTessellationDomainOriginStateCreateInfo( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineTessellationDomainOriginStateCreateInfo( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineTessellationDomainOriginStateCreateInfo & + operator=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineTessellationDomainOriginStateCreateInfo & + operator=( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineTessellationDomainOriginStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineTessellationDomainOriginStateCreateInfo & + setDomainOrigin( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ ) VULKAN_HPP_NOEXCEPT + { + domainOrigin = domainOrigin_; + return *this; + } + + operator VkPipelineTessellationDomainOriginStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineTessellationDomainOriginStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineTessellationDomainOriginStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( domainOrigin == rhs.domainOrigin ); + } + + bool operator!=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin = + VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft; + }; + static_assert( sizeof( PipelineTessellationDomainOriginStateCreateInfo ) == + sizeof( VkPipelineTessellationDomainOriginStateCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineTessellationDomainOriginStateCreateInfo; + }; + using PipelineTessellationDomainOriginStateCreateInfoKHR = PipelineTessellationDomainOriginStateCreateInfo; + + struct VertexInputBindingDivisorDescriptionEXT + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionEXT( uint32_t binding_ = {}, + uint32_t divisor_ = {} ) VULKAN_HPP_NOEXCEPT + : binding( binding_ ) + , divisor( divisor_ ) + {} + + VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionEXT( VertexInputBindingDivisorDescriptionEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + VertexInputBindingDivisorDescriptionEXT( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VertexInputBindingDivisorDescriptionEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDivisorDescriptionEXT & + operator=( VertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VertexInputBindingDivisorDescriptionEXT & + operator=( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VertexInputBindingDivisorDescriptionEXT & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + { + binding = binding_; + return *this; + } + + VertexInputBindingDivisorDescriptionEXT & setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT + { + divisor = divisor_; + return *this; + } + + operator VkVertexInputBindingDivisorDescriptionEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVertexInputBindingDivisorDescriptionEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VertexInputBindingDivisorDescriptionEXT const & ) const = default; +#else + bool operator==( VertexInputBindingDivisorDescriptionEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( binding == rhs.binding ) && ( divisor == rhs.divisor ); + } + + bool operator!=( VertexInputBindingDivisorDescriptionEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t binding = {}; + uint32_t divisor = {}; + }; + static_assert( sizeof( VertexInputBindingDivisorDescriptionEXT ) == + sizeof( VkVertexInputBindingDivisorDescriptionEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct PipelineVertexInputDivisorStateCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT( + uint32_t vertexBindingDivisorCount_ = {}, + const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT * pVertexBindingDivisors_ = {} ) + VULKAN_HPP_NOEXCEPT + : vertexBindingDivisorCount( vertexBindingDivisorCount_ ) + , pVertexBindingDivisors( pVertexBindingDivisors_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT( + PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineVertexInputDivisorStateCreateInfoEXT( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineVertexInputDivisorStateCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineVertexInputDivisorStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT> const & vertexBindingDivisors_ ) + : vertexBindingDivisorCount( static_cast( vertexBindingDivisors_.size() ) ) + , pVertexBindingDivisors( vertexBindingDivisors_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfoEXT & + operator=( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineVertexInputDivisorStateCreateInfoEXT & + operator=( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineVertexInputDivisorStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineVertexInputDivisorStateCreateInfoEXT & + setVertexBindingDivisorCount( uint32_t vertexBindingDivisorCount_ ) VULKAN_HPP_NOEXCEPT + { + vertexBindingDivisorCount = vertexBindingDivisorCount_; + return *this; + } + + PipelineVertexInputDivisorStateCreateInfoEXT & setPVertexBindingDivisors( + const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT * pVertexBindingDivisors_ ) + VULKAN_HPP_NOEXCEPT + { + pVertexBindingDivisors = pVertexBindingDivisors_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineVertexInputDivisorStateCreateInfoEXT & setVertexBindingDivisors( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT> const & vertexBindingDivisors_ ) + VULKAN_HPP_NOEXCEPT + { + vertexBindingDivisorCount = static_cast( vertexBindingDivisors_.size() ); + pVertexBindingDivisors = vertexBindingDivisors_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPipelineVertexInputDivisorStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineVertexInputDivisorStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineVertexInputDivisorStateCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( vertexBindingDivisorCount == rhs.vertexBindingDivisorCount ) && + ( pVertexBindingDivisors == rhs.pVertexBindingDivisors ); + } + + bool operator!=( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT; + const void * pNext = {}; + uint32_t vertexBindingDivisorCount = {}; + const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT * pVertexBindingDivisors = {}; + }; + static_assert( sizeof( PipelineVertexInputDivisorStateCreateInfoEXT ) == + sizeof( VkPipelineVertexInputDivisorStateCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineVertexInputDivisorStateCreateInfoEXT; + }; + + struct PipelineViewportCoarseSampleOrderStateCreateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ = + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault, + uint32_t customSampleOrderCount_ = {}, + const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders_ = {} ) VULKAN_HPP_NOEXCEPT + : sampleOrderType( sampleOrderType_ ) + , customSampleOrderCount( customSampleOrderCount_ ) + , pCustomSampleOrders( pCustomSampleOrders_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV( + PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportCoarseSampleOrderStateCreateInfoNV( + VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineViewportCoarseSampleOrderStateCreateInfoNV( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportCoarseSampleOrderStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + customSampleOrders_ ) + : sampleOrderType( sampleOrderType_ ) + , customSampleOrderCount( static_cast( customSampleOrders_.size() ) ) + , pCustomSampleOrders( customSampleOrders_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & + operator=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportCoarseSampleOrderStateCreateInfoNV & + operator=( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineViewportCoarseSampleOrderStateCreateInfoNV & + setSampleOrderType( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ ) VULKAN_HPP_NOEXCEPT + { + sampleOrderType = sampleOrderType_; + return *this; + } + + PipelineViewportCoarseSampleOrderStateCreateInfoNV & + setCustomSampleOrderCount( uint32_t customSampleOrderCount_ ) VULKAN_HPP_NOEXCEPT + { + customSampleOrderCount = customSampleOrderCount_; + return *this; + } + + PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPCustomSampleOrders( + const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders_ ) VULKAN_HPP_NOEXCEPT + { + pCustomSampleOrders = pCustomSampleOrders_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportCoarseSampleOrderStateCreateInfoNV & setCustomSampleOrders( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + customSampleOrders_ ) VULKAN_HPP_NOEXCEPT + { + customSampleOrderCount = static_cast( customSampleOrders_.size() ); + pCustomSampleOrders = customSampleOrders_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleOrderType == rhs.sampleOrderType ) && + ( customSampleOrderCount == rhs.customSampleOrderCount ) && + ( pCustomSampleOrders == rhs.pCustomSampleOrders ); + } + + bool operator!=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType = + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault; + uint32_t customSampleOrderCount = {}; + const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders = {}; + }; + static_assert( sizeof( PipelineViewportCoarseSampleOrderStateCreateInfoNV ) == + sizeof( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineViewportCoarseSampleOrderStateCreateInfoNV; + }; + + struct PipelineViewportExclusiveScissorStateCreateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV( + uint32_t exclusiveScissorCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors_ = {} ) VULKAN_HPP_NOEXCEPT + : exclusiveScissorCount( exclusiveScissorCount_ ) + , pExclusiveScissors( pExclusiveScissors_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV( + PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportExclusiveScissorStateCreateInfoNV( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineViewportExclusiveScissorStateCreateInfoNV( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportExclusiveScissorStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & exclusiveScissors_ ) + : exclusiveScissorCount( static_cast( exclusiveScissors_.size() ) ) + , pExclusiveScissors( exclusiveScissors_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV & + operator=( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportExclusiveScissorStateCreateInfoNV & + operator=( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + PipelineViewportExclusiveScissorStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineViewportExclusiveScissorStateCreateInfoNV & + setExclusiveScissorCount( uint32_t exclusiveScissorCount_ ) VULKAN_HPP_NOEXCEPT + { + exclusiveScissorCount = exclusiveScissorCount_; + return *this; + } + + PipelineViewportExclusiveScissorStateCreateInfoNV & + setPExclusiveScissors( const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors_ ) VULKAN_HPP_NOEXCEPT + { + pExclusiveScissors = pExclusiveScissors_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportExclusiveScissorStateCreateInfoNV & setExclusiveScissors( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & exclusiveScissors_ ) + VULKAN_HPP_NOEXCEPT + { + exclusiveScissorCount = static_cast( exclusiveScissors_.size() ); + pExclusiveScissors = exclusiveScissors_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPipelineViewportExclusiveScissorStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportExclusiveScissorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportExclusiveScissorStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( exclusiveScissorCount == rhs.exclusiveScissorCount ) && ( pExclusiveScissors == rhs.pExclusiveScissors ); + } + + bool operator!=( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV; + const void * pNext = {}; + uint32_t exclusiveScissorCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors = {}; + }; + static_assert( sizeof( PipelineViewportExclusiveScissorStateCreateInfoNV ) == + sizeof( VkPipelineViewportExclusiveScissorStateCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineViewportExclusiveScissorStateCreateInfoNV; + }; + + struct PipelineViewportShadingRateImageStateCreateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ = {}, + uint32_t viewportCount_ = {}, + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes_ = {} ) VULKAN_HPP_NOEXCEPT + : shadingRateImageEnable( shadingRateImageEnable_ ) + , viewportCount( viewportCount_ ) + , pShadingRatePalettes( pShadingRatePalettes_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV( + PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportShadingRateImageStateCreateInfoNV( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineViewportShadingRateImageStateCreateInfoNV( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportShadingRateImageStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + shadingRatePalettes_ ) + : shadingRateImageEnable( shadingRateImageEnable_ ) + , viewportCount( static_cast( shadingRatePalettes_.size() ) ) + , pShadingRatePalettes( shadingRatePalettes_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & + operator=( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportShadingRateImageStateCreateInfoNV & + operator=( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + PipelineViewportShadingRateImageStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineViewportShadingRateImageStateCreateInfoNV & + setShadingRateImageEnable( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ ) VULKAN_HPP_NOEXCEPT + { + shadingRateImageEnable = shadingRateImageEnable_; + return *this; + } + + PipelineViewportShadingRateImageStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = viewportCount_; + return *this; + } + + PipelineViewportShadingRateImageStateCreateInfoNV & setPShadingRatePalettes( + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT + { + pShadingRatePalettes = pShadingRatePalettes_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportShadingRateImageStateCreateInfoNV & setShadingRatePalettes( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + shadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = static_cast( shadingRatePalettes_.size() ); + pShadingRatePalettes = shadingRatePalettes_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPipelineViewportShadingRateImageStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportShadingRateImageStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportShadingRateImageStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shadingRateImageEnable == rhs.shadingRateImageEnable ) && ( viewportCount == rhs.viewportCount ) && + ( pShadingRatePalettes == rhs.pShadingRatePalettes ); + } + + bool operator!=( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable = {}; + uint32_t viewportCount = {}; + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes = {}; + }; + static_assert( sizeof( PipelineViewportShadingRateImageStateCreateInfoNV ) == + sizeof( VkPipelineViewportShadingRateImageStateCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineViewportShadingRateImageStateCreateInfoNV; + }; + + struct ViewportSwizzleNV + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ViewportSwizzleNV( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ = + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ = + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ = + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ = + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX ) VULKAN_HPP_NOEXCEPT + : x( x_ ) + , y( y_ ) + , z( z_ ) + , w( w_ ) + {} + + VULKAN_HPP_CONSTEXPR ViewportSwizzleNV( ViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ViewportSwizzleNV( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ViewportSwizzleNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & + operator=( ViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ViewportSwizzleNV & operator=( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ViewportSwizzleNV & setX( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + ViewportSwizzleNV & setY( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } + + ViewportSwizzleNV & setZ( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ ) VULKAN_HPP_NOEXCEPT + { + z = z_; + return *this; + } + + ViewportSwizzleNV & setW( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ ) VULKAN_HPP_NOEXCEPT + { + w = w_; + return *this; + } + + operator VkViewportSwizzleNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkViewportSwizzleNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ViewportSwizzleNV const & ) const = default; +#else + bool operator==( ViewportSwizzleNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( x == rhs.x ) && ( y == rhs.y ) && ( z == rhs.z ) && ( w == rhs.w ); + } + + bool operator!=( ViewportSwizzleNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; + }; + static_assert( sizeof( ViewportSwizzleNV ) == sizeof( VkViewportSwizzleNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineViewportSwizzleStateCreateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineViewportSwizzleStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ = {}, + uint32_t viewportCount_ = {}, + const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , viewportCount( viewportCount_ ) + , pViewportSwizzles( pViewportSwizzles_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV( + PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportSwizzleStateCreateInfoNV( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineViewportSwizzleStateCreateInfoNV( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportSwizzleStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + viewportSwizzles_ ) + : flags( flags_ ) + , viewportCount( static_cast( viewportSwizzles_.size() ) ) + , pViewportSwizzles( viewportSwizzles_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & + operator=( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportSwizzleStateCreateInfoNV & + operator=( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineViewportSwizzleStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineViewportSwizzleStateCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineViewportSwizzleStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = viewportCount_; + return *this; + } + + PipelineViewportSwizzleStateCreateInfoNV & + setPViewportSwizzles( const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles_ ) VULKAN_HPP_NOEXCEPT + { + pViewportSwizzles = pViewportSwizzles_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportSwizzleStateCreateInfoNV & setViewportSwizzles( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + viewportSwizzles_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = static_cast( viewportSwizzles_.size() ); + pViewportSwizzles = viewportSwizzles_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPipelineViewportSwizzleStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportSwizzleStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportSwizzleStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( viewportCount == rhs.viewportCount ) && ( pViewportSwizzles == rhs.pViewportSwizzles ); + } + + bool operator!=( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags = {}; + uint32_t viewportCount = {}; + const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles = {}; + }; + static_assert( sizeof( PipelineViewportSwizzleStateCreateInfoNV ) == + sizeof( VkPipelineViewportSwizzleStateCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineViewportSwizzleStateCreateInfoNV; + }; + + struct PipelineViewportWScalingStateCreateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineViewportWScalingStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ = {}, + uint32_t viewportCount_ = {}, + const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings_ = {} ) VULKAN_HPP_NOEXCEPT + : viewportWScalingEnable( viewportWScalingEnable_ ) + , viewportCount( viewportCount_ ) + , pViewportWScalings( pViewportWScalings_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV( + PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportWScalingStateCreateInfoNV( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineViewportWScalingStateCreateInfoNV( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportWScalingStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + viewportWScalings_ ) + : viewportWScalingEnable( viewportWScalingEnable_ ) + , viewportCount( static_cast( viewportWScalings_.size() ) ) + , pViewportWScalings( viewportWScalings_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & + operator=( PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportWScalingStateCreateInfoNV & + operator=( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineViewportWScalingStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineViewportWScalingStateCreateInfoNV & + setViewportWScalingEnable( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ ) VULKAN_HPP_NOEXCEPT + { + viewportWScalingEnable = viewportWScalingEnable_; + return *this; + } + + PipelineViewportWScalingStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = viewportCount_; + return *this; + } + + PipelineViewportWScalingStateCreateInfoNV & + setPViewportWScalings( const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings_ ) VULKAN_HPP_NOEXCEPT + { + pViewportWScalings = pViewportWScalings_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportWScalingStateCreateInfoNV & setViewportWScalings( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + viewportWScalings_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = static_cast( viewportWScalings_.size() ); + pViewportWScalings = viewportWScalings_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPipelineViewportWScalingStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportWScalingStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportWScalingStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineViewportWScalingStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( viewportWScalingEnable == rhs.viewportWScalingEnable ) && ( viewportCount == rhs.viewportCount ) && + ( pViewportWScalings == rhs.pViewportWScalings ); + } + + bool operator!=( PipelineViewportWScalingStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportWScalingStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable = {}; + uint32_t viewportCount = {}; + const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings = {}; + }; + static_assert( sizeof( PipelineViewportWScalingStateCreateInfoNV ) == + sizeof( VkPipelineViewportWScalingStateCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineViewportWScalingStateCreateInfoNV; + }; + +#if defined( VK_USE_PLATFORM_GGP ) + struct PresentFrameTokenGGP + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentFrameTokenGGP; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( GgpFrameToken frameToken_ = {} ) VULKAN_HPP_NOEXCEPT + : frameToken( frameToken_ ) + {} + + VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentFrameTokenGGP( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT + : PresentFrameTokenGGP( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PresentFrameTokenGGP & + operator=( PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentFrameTokenGGP & operator=( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PresentFrameTokenGGP & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PresentFrameTokenGGP & setFrameToken( GgpFrameToken frameToken_ ) VULKAN_HPP_NOEXCEPT + { + frameToken = frameToken_; + return *this; + } + + operator VkPresentFrameTokenGGP const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPresentFrameTokenGGP &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentFrameTokenGGP const & ) const = default; +# else + bool operator==( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( memcmp( &frameToken, &rhs.frameToken, sizeof( GgpFrameToken ) ) == 0 ); + } + + bool operator!=( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentFrameTokenGGP; + const void * pNext = {}; + GgpFrameToken frameToken = {}; + }; + static_assert( sizeof( PresentFrameTokenGGP ) == sizeof( VkPresentFrameTokenGGP ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PresentFrameTokenGGP; + }; +#endif /*VK_USE_PLATFORM_GGP*/ + + struct RectLayerKHR + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RectLayerKHR( VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D extent_ = {}, + uint32_t layer_ = {} ) VULKAN_HPP_NOEXCEPT + : offset( offset_ ) + , extent( extent_ ) + , layer( layer_ ) + {} + + VULKAN_HPP_CONSTEXPR RectLayerKHR( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RectLayerKHR( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : RectLayerKHR( *reinterpret_cast( &rhs ) ) + {} + + explicit RectLayerKHR( Rect2D const & rect2D, uint32_t layer_ = {} ) + : offset( rect2D.offset ), extent( rect2D.extent ), layer( layer_ ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 RectLayerKHR & operator=( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RectLayerKHR & operator=( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + RectLayerKHR & setOffset( VULKAN_HPP_NAMESPACE::Offset2D const & offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + RectLayerKHR & setExtent( VULKAN_HPP_NAMESPACE::Extent2D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } + + RectLayerKHR & setLayer( uint32_t layer_ ) VULKAN_HPP_NOEXCEPT + { + layer = layer_; + return *this; + } + + operator VkRectLayerKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRectLayerKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RectLayerKHR const & ) const = default; +#else + bool operator==( RectLayerKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( offset == rhs.offset ) && ( extent == rhs.extent ) && ( layer == rhs.layer ); + } + + bool operator!=( RectLayerKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Offset2D offset = {}; + VULKAN_HPP_NAMESPACE::Extent2D extent = {}; + uint32_t layer = {}; + }; + static_assert( sizeof( RectLayerKHR ) == sizeof( VkRectLayerKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PresentRegionKHR + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PresentRegionKHR( uint32_t rectangleCount_ = {}, + const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles_ = {} ) VULKAN_HPP_NOEXCEPT + : rectangleCount( rectangleCount_ ) + , pRectangles( pRectangles_ ) + {} + + VULKAN_HPP_CONSTEXPR PresentRegionKHR( PresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentRegionKHR( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PresentRegionKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentRegionKHR( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & rectangles_ ) + : rectangleCount( static_cast( rectangles_.size() ) ), pRectangles( rectangles_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PresentRegionKHR & operator=( PresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentRegionKHR & operator=( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PresentRegionKHR & setRectangleCount( uint32_t rectangleCount_ ) VULKAN_HPP_NOEXCEPT + { + rectangleCount = rectangleCount_; + return *this; + } + + PresentRegionKHR & setPRectangles( const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles_ ) VULKAN_HPP_NOEXCEPT + { + pRectangles = pRectangles_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentRegionKHR & setRectangles( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & rectangles_ ) + VULKAN_HPP_NOEXCEPT + { + rectangleCount = static_cast( rectangles_.size() ); + pRectangles = rectangles_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPresentRegionKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPresentRegionKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentRegionKHR const & ) const = default; +#else + bool operator==( PresentRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( rectangleCount == rhs.rectangleCount ) && ( pRectangles == rhs.pRectangles ); + } + + bool operator!=( PresentRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t rectangleCount = {}; + const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles = {}; + }; + static_assert( sizeof( PresentRegionKHR ) == sizeof( VkPresentRegionKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PresentRegionsKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentRegionsKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PresentRegionsKHR( uint32_t swapchainCount_ = {}, + const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions_ = {} ) VULKAN_HPP_NOEXCEPT + : swapchainCount( swapchainCount_ ) + , pRegions( pRegions_ ) + {} + + VULKAN_HPP_CONSTEXPR PresentRegionsKHR( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentRegionsKHR( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PresentRegionsKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentRegionsKHR( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + : swapchainCount( static_cast( regions_.size() ) ), pRegions( regions_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR & + operator=( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentRegionsKHR & operator=( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PresentRegionsKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PresentRegionsKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = swapchainCount_; + return *this; + } + + PresentRegionsKHR & setPRegions( const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentRegionsKHR & setRegions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPresentRegionsKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPresentRegionsKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentRegionsKHR const & ) const = default; +#else + bool operator==( PresentRegionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && + ( pRegions == rhs.pRegions ); + } + + bool operator!=( PresentRegionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentRegionsKHR; + const void * pNext = {}; + uint32_t swapchainCount = {}; + const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions = {}; + }; + static_assert( sizeof( PresentRegionsKHR ) == sizeof( VkPresentRegionsKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PresentRegionsKHR; + }; + + struct PresentTimeGOOGLE + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( uint32_t presentID_ = {}, + uint64_t desiredPresentTime_ = {} ) VULKAN_HPP_NOEXCEPT + : presentID( presentID_ ) + , desiredPresentTime( desiredPresentTime_ ) + {} + + VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( PresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentTimeGOOGLE( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + : PresentTimeGOOGLE( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PresentTimeGOOGLE & + operator=( PresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentTimeGOOGLE & operator=( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PresentTimeGOOGLE & setPresentID( uint32_t presentID_ ) VULKAN_HPP_NOEXCEPT + { + presentID = presentID_; + return *this; + } + + PresentTimeGOOGLE & setDesiredPresentTime( uint64_t desiredPresentTime_ ) VULKAN_HPP_NOEXCEPT + { + desiredPresentTime = desiredPresentTime_; + return *this; + } + + operator VkPresentTimeGOOGLE const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPresentTimeGOOGLE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentTimeGOOGLE const & ) const = default; +#else + bool operator==( PresentTimeGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( presentID == rhs.presentID ) && ( desiredPresentTime == rhs.desiredPresentTime ); + } + + bool operator!=( PresentTimeGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t presentID = {}; + uint64_t desiredPresentTime = {}; + }; + static_assert( sizeof( PresentTimeGOOGLE ) == sizeof( VkPresentTimeGOOGLE ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PresentTimesInfoGOOGLE + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentTimesInfoGOOGLE; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PresentTimesInfoGOOGLE( uint32_t swapchainCount_ = {}, + const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes_ = {} ) VULKAN_HPP_NOEXCEPT + : swapchainCount( swapchainCount_ ) + , pTimes( pTimes_ ) + {} + + VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE( PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentTimesInfoGOOGLE( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + : PresentTimesInfoGOOGLE( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentTimesInfoGOOGLE( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & times_ ) + : swapchainCount( static_cast( times_.size() ) ), pTimes( times_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PresentTimesInfoGOOGLE & + operator=( PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentTimesInfoGOOGLE & operator=( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PresentTimesInfoGOOGLE & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PresentTimesInfoGOOGLE & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = swapchainCount_; + return *this; + } + + PresentTimesInfoGOOGLE & setPTimes( const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes_ ) VULKAN_HPP_NOEXCEPT + { + pTimes = pTimes_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentTimesInfoGOOGLE & setTimes( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & times_ ) + VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( times_.size() ); + pTimes = times_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPresentTimesInfoGOOGLE const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPresentTimesInfoGOOGLE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentTimesInfoGOOGLE const & ) const = default; +#else + bool operator==( PresentTimesInfoGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && + ( pTimes == rhs.pTimes ); + } + + bool operator!=( PresentTimesInfoGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentTimesInfoGOOGLE; + const void * pNext = {}; + uint32_t swapchainCount = {}; + const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes = {}; + }; + static_assert( sizeof( PresentTimesInfoGOOGLE ) == sizeof( VkPresentTimesInfoGOOGLE ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PresentTimesInfoGOOGLE; + }; + + struct ProtectedSubmitInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eProtectedSubmitInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ = {} ) VULKAN_HPP_NOEXCEPT + : protectedSubmit( protectedSubmit_ ) + {} + + VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ProtectedSubmitInfo( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ProtectedSubmitInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ProtectedSubmitInfo & + operator=( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ProtectedSubmitInfo & operator=( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ProtectedSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ProtectedSubmitInfo & setProtectedSubmit( VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ ) VULKAN_HPP_NOEXCEPT + { + protectedSubmit = protectedSubmit_; + return *this; + } + + operator VkProtectedSubmitInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkProtectedSubmitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ProtectedSubmitInfo const & ) const = default; +#else + bool operator==( ProtectedSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedSubmit == rhs.protectedSubmit ); + } + + bool operator!=( ProtectedSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eProtectedSubmitInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit = {}; + }; + static_assert( sizeof( ProtectedSubmitInfo ) == sizeof( VkProtectedSubmitInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ProtectedSubmitInfo; + }; + + struct QueryPoolPerformanceQueryCreateInfoINTEL + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL( + VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ = + VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual ) VULKAN_HPP_NOEXCEPT + : performanceCountersSampling( performanceCountersSampling_ ) + {} + + VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL( + QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueryPoolPerformanceQueryCreateInfoINTEL( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) + VULKAN_HPP_NOEXCEPT + : QueryPoolPerformanceQueryCreateInfoINTEL( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceQueryCreateInfoINTEL & + operator=( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueryPoolPerformanceQueryCreateInfoINTEL & + operator=( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + QueryPoolPerformanceQueryCreateInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + QueryPoolPerformanceQueryCreateInfoINTEL & setPerformanceCountersSampling( + VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ ) VULKAN_HPP_NOEXCEPT + { + performanceCountersSampling = performanceCountersSampling_; + return *this; + } + + operator VkQueryPoolPerformanceQueryCreateInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueryPoolPerformanceQueryCreateInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueryPoolPerformanceQueryCreateInfoINTEL const & ) const = default; +#else + bool operator==( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( performanceCountersSampling == rhs.performanceCountersSampling ); + } + + bool operator!=( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling = + VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual; + }; + static_assert( sizeof( QueryPoolPerformanceQueryCreateInfoINTEL ) == + sizeof( VkQueryPoolPerformanceQueryCreateInfoINTEL ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = QueryPoolPerformanceQueryCreateInfoINTEL; + }; + using QueryPoolCreateInfoINTEL = QueryPoolPerformanceQueryCreateInfoINTEL; + + struct QueueFamilyCheckpointProperties2NV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eQueueFamilyCheckpointProperties2Nv; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV( + VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR checkpointExecutionStageMask_ = {} ) VULKAN_HPP_NOEXCEPT + : checkpointExecutionStageMask( checkpointExecutionStageMask_ ) + {} + + VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV( QueueFamilyCheckpointProperties2NV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyCheckpointProperties2NV( VkQueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT + : QueueFamilyCheckpointProperties2NV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 QueueFamilyCheckpointProperties2NV & + operator=( QueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyCheckpointProperties2NV & + operator=( VkQueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkQueueFamilyCheckpointProperties2NV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueueFamilyCheckpointProperties2NV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueueFamilyCheckpointProperties2NV const & ) const = default; +#else + bool operator==( QueueFamilyCheckpointProperties2NV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask ); + } + + bool operator!=( QueueFamilyCheckpointProperties2NV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointProperties2Nv; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR checkpointExecutionStageMask = {}; + }; + static_assert( sizeof( QueueFamilyCheckpointProperties2NV ) == sizeof( VkQueueFamilyCheckpointProperties2NV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = QueueFamilyCheckpointProperties2NV; + }; + + struct QueueFamilyCheckpointPropertiesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eQueueFamilyCheckpointPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV( + VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask_ = {} ) VULKAN_HPP_NOEXCEPT + : checkpointExecutionStageMask( checkpointExecutionStageMask_ ) + {} + + VULKAN_HPP_CONSTEXPR + QueueFamilyCheckpointPropertiesNV( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyCheckpointPropertiesNV( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : QueueFamilyCheckpointPropertiesNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 QueueFamilyCheckpointPropertiesNV & + operator=( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyCheckpointPropertiesNV & operator=( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkQueueFamilyCheckpointPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueueFamilyCheckpointPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueueFamilyCheckpointPropertiesNV const & ) const = default; +#else + bool operator==( QueueFamilyCheckpointPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask ); + } + + bool operator!=( QueueFamilyCheckpointPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointPropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask = {}; + }; + static_assert( sizeof( QueueFamilyCheckpointPropertiesNV ) == sizeof( VkQueueFamilyCheckpointPropertiesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = QueueFamilyCheckpointPropertiesNV; + }; + + struct RenderPassAttachmentBeginInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassAttachmentBeginInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + RenderPassAttachmentBeginInfo( uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ = {} ) VULKAN_HPP_NOEXCEPT + : attachmentCount( attachmentCount_ ) + , pAttachments( pAttachments_ ) + {} + + VULKAN_HPP_CONSTEXPR + RenderPassAttachmentBeginInfo( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassAttachmentBeginInfo( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassAttachmentBeginInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassAttachmentBeginInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_ ) + : attachmentCount( static_cast( attachments_.size() ) ), pAttachments( attachments_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo & + operator=( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassAttachmentBeginInfo & operator=( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + RenderPassAttachmentBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RenderPassAttachmentBeginInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = attachmentCount_; + return *this; + } + + RenderPassAttachmentBeginInfo & + setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pAttachments = pAttachments_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassAttachmentBeginInfo & setAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_ ) + VULKAN_HPP_NOEXCEPT + { + attachmentCount = static_cast( attachments_.size() ); + pAttachments = attachments_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkRenderPassAttachmentBeginInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassAttachmentBeginInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassAttachmentBeginInfo const & ) const = default; +#else + bool operator==( RenderPassAttachmentBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentCount == rhs.attachmentCount ) && + ( pAttachments == rhs.pAttachments ); + } + + bool operator!=( RenderPassAttachmentBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassAttachmentBeginInfo; + const void * pNext = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::ImageView * pAttachments = {}; + }; + static_assert( sizeof( RenderPassAttachmentBeginInfo ) == sizeof( VkRenderPassAttachmentBeginInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = RenderPassAttachmentBeginInfo; + }; + using RenderPassAttachmentBeginInfoKHR = RenderPassAttachmentBeginInfo; + + struct RenderPassFragmentDensityMapCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eRenderPassFragmentDensityMapCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( + VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment_ = {} ) VULKAN_HPP_NOEXCEPT + : fragmentDensityMapAttachment( fragmentDensityMapAttachment_ ) + {} + + VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( + RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassFragmentDensityMapCreateInfoEXT( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : RenderPassFragmentDensityMapCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapCreateInfoEXT & + operator=( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassFragmentDensityMapCreateInfoEXT & + operator=( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + RenderPassFragmentDensityMapCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RenderPassFragmentDensityMapCreateInfoEXT & setFragmentDensityMapAttachment( + VULKAN_HPP_NAMESPACE::AttachmentReference const & fragmentDensityMapAttachment_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMapAttachment = fragmentDensityMapAttachment_; + return *this; + } + + operator VkRenderPassFragmentDensityMapCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassFragmentDensityMapCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassFragmentDensityMapCreateInfoEXT const & ) const = default; +#else + bool operator==( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( fragmentDensityMapAttachment == rhs.fragmentDensityMapAttachment ); + } + + bool operator!=( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment = {}; + }; + static_assert( sizeof( RenderPassFragmentDensityMapCreateInfoEXT ) == + sizeof( VkRenderPassFragmentDensityMapCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = RenderPassFragmentDensityMapCreateInfoEXT; + }; + + struct RenderPassInputAttachmentAspectCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eRenderPassInputAttachmentAspectCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( + uint32_t aspectReferenceCount_ = {}, + const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences_ = {} ) VULKAN_HPP_NOEXCEPT + : aspectReferenceCount( aspectReferenceCount_ ) + , pAspectReferences( pAspectReferences_ ) + {} + + VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( + RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassInputAttachmentAspectCreateInfo( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT + : RenderPassInputAttachmentAspectCreateInfo( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassInputAttachmentAspectCreateInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + aspectReferences_ ) + : aspectReferenceCount( static_cast( aspectReferences_.size() ) ) + , pAspectReferences( aspectReferences_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo & + operator=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassInputAttachmentAspectCreateInfo & + operator=( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + RenderPassInputAttachmentAspectCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RenderPassInputAttachmentAspectCreateInfo & + setAspectReferenceCount( uint32_t aspectReferenceCount_ ) VULKAN_HPP_NOEXCEPT + { + aspectReferenceCount = aspectReferenceCount_; + return *this; + } + + RenderPassInputAttachmentAspectCreateInfo & setPAspectReferences( + const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences_ ) VULKAN_HPP_NOEXCEPT + { + pAspectReferences = pAspectReferences_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassInputAttachmentAspectCreateInfo & setAspectReferences( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + aspectReferences_ ) VULKAN_HPP_NOEXCEPT + { + aspectReferenceCount = static_cast( aspectReferences_.size() ); + pAspectReferences = aspectReferences_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkRenderPassInputAttachmentAspectCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassInputAttachmentAspectCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassInputAttachmentAspectCreateInfo const & ) const = default; +#else + bool operator==( RenderPassInputAttachmentAspectCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( aspectReferenceCount == rhs.aspectReferenceCount ) && + ( pAspectReferences == rhs.pAspectReferences ); + } + + bool operator!=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassInputAttachmentAspectCreateInfo; + const void * pNext = {}; + uint32_t aspectReferenceCount = {}; + const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences = {}; + }; + static_assert( sizeof( RenderPassInputAttachmentAspectCreateInfo ) == + sizeof( VkRenderPassInputAttachmentAspectCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = RenderPassInputAttachmentAspectCreateInfo; + }; + using RenderPassInputAttachmentAspectCreateInfoKHR = RenderPassInputAttachmentAspectCreateInfo; + + struct RenderPassMultiviewCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassMultiviewCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo( uint32_t subpassCount_ = {}, + const uint32_t * pViewMasks_ = {}, + uint32_t dependencyCount_ = {}, + const int32_t * pViewOffsets_ = {}, + uint32_t correlationMaskCount_ = {}, + const uint32_t * pCorrelationMasks_ = {} ) VULKAN_HPP_NOEXCEPT + : subpassCount( subpassCount_ ) + , pViewMasks( pViewMasks_ ) + , dependencyCount( dependencyCount_ ) + , pViewOffsets( pViewOffsets_ ) + , correlationMaskCount( correlationMaskCount_ ) + , pCorrelationMasks( pCorrelationMasks_ ) + {} + + VULKAN_HPP_CONSTEXPR + RenderPassMultiviewCreateInfo( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassMultiviewCreateInfo( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassMultiviewCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassMultiviewCreateInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewMasks_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewOffsets_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & correlationMasks_ = {} ) + : subpassCount( static_cast( viewMasks_.size() ) ) + , pViewMasks( viewMasks_.data() ) + , dependencyCount( static_cast( viewOffsets_.size() ) ) + , pViewOffsets( viewOffsets_.data() ) + , correlationMaskCount( static_cast( correlationMasks_.size() ) ) + , pCorrelationMasks( correlationMasks_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & + operator=( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassMultiviewCreateInfo & operator=( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + RenderPassMultiviewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RenderPassMultiviewCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT + { + subpassCount = subpassCount_; + return *this; + } + + RenderPassMultiviewCreateInfo & setPViewMasks( const uint32_t * pViewMasks_ ) VULKAN_HPP_NOEXCEPT + { + pViewMasks = pViewMasks_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassMultiviewCreateInfo & setViewMasks( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewMasks_ ) VULKAN_HPP_NOEXCEPT + { + subpassCount = static_cast( viewMasks_.size() ); + pViewMasks = viewMasks_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderPassMultiviewCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = dependencyCount_; + return *this; + } + + RenderPassMultiviewCreateInfo & setPViewOffsets( const int32_t * pViewOffsets_ ) VULKAN_HPP_NOEXCEPT + { + pViewOffsets = pViewOffsets_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassMultiviewCreateInfo & setViewOffsets( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewOffsets_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = static_cast( viewOffsets_.size() ); + pViewOffsets = viewOffsets_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderPassMultiviewCreateInfo & setCorrelationMaskCount( uint32_t correlationMaskCount_ ) VULKAN_HPP_NOEXCEPT + { + correlationMaskCount = correlationMaskCount_; + return *this; + } + + RenderPassMultiviewCreateInfo & setPCorrelationMasks( const uint32_t * pCorrelationMasks_ ) VULKAN_HPP_NOEXCEPT + { + pCorrelationMasks = pCorrelationMasks_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassMultiviewCreateInfo & setCorrelationMasks( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & correlationMasks_ ) VULKAN_HPP_NOEXCEPT + { + correlationMaskCount = static_cast( correlationMasks_.size() ); + pCorrelationMasks = correlationMasks_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkRenderPassMultiviewCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassMultiviewCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassMultiviewCreateInfo const & ) const = default; +#else + bool operator==( RenderPassMultiviewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subpassCount == rhs.subpassCount ) && + ( pViewMasks == rhs.pViewMasks ) && ( dependencyCount == rhs.dependencyCount ) && + ( pViewOffsets == rhs.pViewOffsets ) && ( correlationMaskCount == rhs.correlationMaskCount ) && + ( pCorrelationMasks == rhs.pCorrelationMasks ); + } + + bool operator!=( RenderPassMultiviewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassMultiviewCreateInfo; + const void * pNext = {}; + uint32_t subpassCount = {}; + const uint32_t * pViewMasks = {}; + uint32_t dependencyCount = {}; + const int32_t * pViewOffsets = {}; + uint32_t correlationMaskCount = {}; + const uint32_t * pCorrelationMasks = {}; + }; + static_assert( sizeof( RenderPassMultiviewCreateInfo ) == sizeof( VkRenderPassMultiviewCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = RenderPassMultiviewCreateInfo; + }; + using RenderPassMultiviewCreateInfoKHR = RenderPassMultiviewCreateInfo; + + struct SubpassSampleLocationsEXT + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT( + uint32_t subpassIndex_ = {}, + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : subpassIndex( subpassIndex_ ) + , sampleLocationsInfo( sampleLocationsInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR + SubpassSampleLocationsEXT( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassSampleLocationsEXT( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassSampleLocationsEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SubpassSampleLocationsEXT & + operator=( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassSampleLocationsEXT & operator=( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SubpassSampleLocationsEXT & setSubpassIndex( uint32_t subpassIndex_ ) VULKAN_HPP_NOEXCEPT + { + subpassIndex = subpassIndex_; + return *this; + } + + SubpassSampleLocationsEXT & setSampleLocationsInfo( + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationsInfo = sampleLocationsInfo_; + return *this; + } + + operator VkSubpassSampleLocationsEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassSampleLocationsEXT const & ) const = default; +#else + bool operator==( SubpassSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( subpassIndex == rhs.subpassIndex ) && ( sampleLocationsInfo == rhs.sampleLocationsInfo ); + } + + bool operator!=( SubpassSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t subpassIndex = {}; + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {}; + }; + static_assert( sizeof( SubpassSampleLocationsEXT ) == sizeof( VkSubpassSampleLocationsEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct RenderPassSampleLocationsBeginInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eRenderPassSampleLocationsBeginInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT( + uint32_t attachmentInitialSampleLocationsCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations_ = {}, + uint32_t postSubpassSampleLocationsCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT + : attachmentInitialSampleLocationsCount( attachmentInitialSampleLocationsCount_ ) + , pAttachmentInitialSampleLocations( pAttachmentInitialSampleLocations_ ) + , postSubpassSampleLocationsCount( postSubpassSampleLocationsCount_ ) + , pPostSubpassSampleLocations( pPostSubpassSampleLocations_ ) + {} + + VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT( RenderPassSampleLocationsBeginInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + RenderPassSampleLocationsBeginInfoEXT( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassSampleLocationsBeginInfoEXT( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassSampleLocationsBeginInfoEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + attachmentInitialSampleLocations_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + postSubpassSampleLocations_ = {} ) + : attachmentInitialSampleLocationsCount( static_cast( attachmentInitialSampleLocations_.size() ) ) + , pAttachmentInitialSampleLocations( attachmentInitialSampleLocations_.data() ) + , postSubpassSampleLocationsCount( static_cast( postSubpassSampleLocations_.size() ) ) + , pPostSubpassSampleLocations( postSubpassSampleLocations_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & + operator=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassSampleLocationsBeginInfoEXT & + operator=( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + RenderPassSampleLocationsBeginInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RenderPassSampleLocationsBeginInfoEXT & + setAttachmentInitialSampleLocationsCount( uint32_t attachmentInitialSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentInitialSampleLocationsCount = attachmentInitialSampleLocationsCount_; + return *this; + } + + RenderPassSampleLocationsBeginInfoEXT & setPAttachmentInitialSampleLocations( + const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations_ ) + VULKAN_HPP_NOEXCEPT + { + pAttachmentInitialSampleLocations = pAttachmentInitialSampleLocations_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassSampleLocationsBeginInfoEXT & setAttachmentInitialSampleLocations( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + attachmentInitialSampleLocations_ ) VULKAN_HPP_NOEXCEPT + { + attachmentInitialSampleLocationsCount = static_cast( attachmentInitialSampleLocations_.size() ); + pAttachmentInitialSampleLocations = attachmentInitialSampleLocations_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderPassSampleLocationsBeginInfoEXT & + setPostSubpassSampleLocationsCount( uint32_t postSubpassSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT + { + postSubpassSampleLocationsCount = postSubpassSampleLocationsCount_; + return *this; + } + + RenderPassSampleLocationsBeginInfoEXT & setPPostSubpassSampleLocations( + const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT + { + pPostSubpassSampleLocations = pPostSubpassSampleLocations_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassSampleLocationsBeginInfoEXT & setPostSubpassSampleLocations( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + postSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT + { + postSubpassSampleLocationsCount = static_cast( postSubpassSampleLocations_.size() ); + pPostSubpassSampleLocations = postSubpassSampleLocations_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkRenderPassSampleLocationsBeginInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassSampleLocationsBeginInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassSampleLocationsBeginInfoEXT const & ) const = default; +#else + bool operator==( RenderPassSampleLocationsBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( attachmentInitialSampleLocationsCount == rhs.attachmentInitialSampleLocationsCount ) && + ( pAttachmentInitialSampleLocations == rhs.pAttachmentInitialSampleLocations ) && + ( postSubpassSampleLocationsCount == rhs.postSubpassSampleLocationsCount ) && + ( pPostSubpassSampleLocations == rhs.pPostSubpassSampleLocations ); + } + + bool operator!=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassSampleLocationsBeginInfoEXT; + const void * pNext = {}; + uint32_t attachmentInitialSampleLocationsCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations = {}; + uint32_t postSubpassSampleLocationsCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations = {}; + }; + static_assert( sizeof( RenderPassSampleLocationsBeginInfoEXT ) == sizeof( VkRenderPassSampleLocationsBeginInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = RenderPassSampleLocationsBeginInfoEXT; + }; + + struct RenderPassTransformBeginInfoQCOM + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassTransformBeginInfoQCOM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassTransformBeginInfoQCOM( + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity ) VULKAN_HPP_NOEXCEPT : transform( transform_ ) + {} + + VULKAN_HPP_CONSTEXPR + RenderPassTransformBeginInfoQCOM( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassTransformBeginInfoQCOM( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassTransformBeginInfoQCOM( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassTransformBeginInfoQCOM & + operator=( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassTransformBeginInfoQCOM & operator=( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + RenderPassTransformBeginInfoQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RenderPassTransformBeginInfoQCOM & + setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT + { + transform = transform_; + return *this; + } + + operator VkRenderPassTransformBeginInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassTransformBeginInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassTransformBeginInfoQCOM const & ) const = default; +#else + bool operator==( RenderPassTransformBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform ); + } + + bool operator!=( RenderPassTransformBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassTransformBeginInfoQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + }; + static_assert( sizeof( RenderPassTransformBeginInfoQCOM ) == sizeof( VkRenderPassTransformBeginInfoQCOM ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = RenderPassTransformBeginInfoQCOM; + }; + + struct SamplerCustomBorderColorCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eSamplerCustomBorderColorCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + SamplerCustomBorderColorCreateInfoEXT( + VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined ) VULKAN_HPP_NOEXCEPT + : customBorderColor( customBorderColor_ ) + , format( format_ ) + {} + + SamplerCustomBorderColorCreateInfoEXT( SamplerCustomBorderColorCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + SamplerCustomBorderColorCreateInfoEXT( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerCustomBorderColorCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SamplerCustomBorderColorCreateInfoEXT & + operator=( SamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerCustomBorderColorCreateInfoEXT & + operator=( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SamplerCustomBorderColorCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SamplerCustomBorderColorCreateInfoEXT & + setCustomBorderColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & customBorderColor_ ) VULKAN_HPP_NOEXCEPT + { + customBorderColor = customBorderColor_; + return *this; + } + + SamplerCustomBorderColorCreateInfoEXT & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + operator VkSamplerCustomBorderColorCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerCustomBorderColorCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCustomBorderColorCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + }; + static_assert( sizeof( SamplerCustomBorderColorCreateInfoEXT ) == sizeof( VkSamplerCustomBorderColorCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SamplerCustomBorderColorCreateInfoEXT; + }; + + struct SamplerReductionModeCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerReductionModeCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfo( + VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ = + VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage ) VULKAN_HPP_NOEXCEPT + : reductionMode( reductionMode_ ) + {} + + VULKAN_HPP_CONSTEXPR + SamplerReductionModeCreateInfo( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerReductionModeCreateInfo( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerReductionModeCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SamplerReductionModeCreateInfo & + operator=( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerReductionModeCreateInfo & operator=( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SamplerReductionModeCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SamplerReductionModeCreateInfo & + setReductionMode( VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ ) VULKAN_HPP_NOEXCEPT + { + reductionMode = reductionMode_; + return *this; + } + + operator VkSamplerReductionModeCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerReductionModeCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerReductionModeCreateInfo const & ) const = default; +#else + bool operator==( SamplerReductionModeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( reductionMode == rhs.reductionMode ); + } + + bool operator!=( SamplerReductionModeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerReductionModeCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode = + VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage; + }; + static_assert( sizeof( SamplerReductionModeCreateInfo ) == sizeof( VkSamplerReductionModeCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SamplerReductionModeCreateInfo; + }; + using SamplerReductionModeCreateInfoEXT = SamplerReductionModeCreateInfo; + + struct SamplerYcbcrConversionImageFormatProperties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eSamplerYcbcrConversionImageFormatProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties( + uint32_t combinedImageSamplerDescriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT + : combinedImageSamplerDescriptorCount( combinedImageSamplerDescriptorCount_ ) + {} + + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties( + SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerYcbcrConversionImageFormatProperties( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) + VULKAN_HPP_NOEXCEPT + : SamplerYcbcrConversionImageFormatProperties( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionImageFormatProperties & + operator=( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerYcbcrConversionImageFormatProperties & + operator=( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSamplerYcbcrConversionImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerYcbcrConversionImageFormatProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerYcbcrConversionImageFormatProperties const & ) const = default; +#else + bool operator==( SamplerYcbcrConversionImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( combinedImageSamplerDescriptorCount == rhs.combinedImageSamplerDescriptorCount ); + } + + bool operator!=( SamplerYcbcrConversionImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionImageFormatProperties; + void * pNext = {}; + uint32_t combinedImageSamplerDescriptorCount = {}; + }; + static_assert( sizeof( SamplerYcbcrConversionImageFormatProperties ) == + sizeof( VkSamplerYcbcrConversionImageFormatProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SamplerYcbcrConversionImageFormatProperties; + }; + using SamplerYcbcrConversionImageFormatPropertiesKHR = SamplerYcbcrConversionImageFormatProperties; + + struct SamplerYcbcrConversionInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SamplerYcbcrConversionInfo( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ = {} ) VULKAN_HPP_NOEXCEPT + : conversion( conversion_ ) + {} + + VULKAN_HPP_CONSTEXPR + SamplerYcbcrConversionInfo( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerYcbcrConversionInfo( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerYcbcrConversionInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionInfo & + operator=( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerYcbcrConversionInfo & operator=( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SamplerYcbcrConversionInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SamplerYcbcrConversionInfo & + setConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ ) VULKAN_HPP_NOEXCEPT + { + conversion = conversion_; + return *this; + } + + operator VkSamplerYcbcrConversionInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerYcbcrConversionInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerYcbcrConversionInfo const & ) const = default; +#else + bool operator==( SamplerYcbcrConversionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( conversion == rhs.conversion ); + } + + bool operator!=( SamplerYcbcrConversionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion = {}; + }; + static_assert( sizeof( SamplerYcbcrConversionInfo ) == sizeof( VkSamplerYcbcrConversionInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SamplerYcbcrConversionInfo; + }; + using SamplerYcbcrConversionInfoKHR = SamplerYcbcrConversionInfo; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + struct ScreenSurfaceCreateInfoQNX + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eScreenSurfaceCreateInfoQNX; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ScreenSurfaceCreateInfoQNX( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags_ = {}, + struct _screen_context * context_ = {}, + struct _screen_window * window_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , context( context_ ) + , window( window_ ) + {} + + VULKAN_HPP_CONSTEXPR + ScreenSurfaceCreateInfoQNX( ScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ScreenSurfaceCreateInfoQNX( VkScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT + : ScreenSurfaceCreateInfoQNX( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & + operator=( ScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ScreenSurfaceCreateInfoQNX & operator=( VkScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ScreenSurfaceCreateInfoQNX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ScreenSurfaceCreateInfoQNX & + setFlags( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + ScreenSurfaceCreateInfoQNX & setContext( struct _screen_context * context_ ) VULKAN_HPP_NOEXCEPT + { + context = context_; + return *this; + } + + ScreenSurfaceCreateInfoQNX & setWindow( struct _screen_window * window_ ) VULKAN_HPP_NOEXCEPT + { + window = window_; + return *this; + } + + operator VkScreenSurfaceCreateInfoQNX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkScreenSurfaceCreateInfoQNX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ScreenSurfaceCreateInfoQNX const & ) const = default; +# else + bool operator==( ScreenSurfaceCreateInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( context == rhs.context ) && + ( window == rhs.window ); + } + + bool operator!=( ScreenSurfaceCreateInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eScreenSurfaceCreateInfoQNX; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags = {}; + struct _screen_context * context = {}; + struct _screen_window * window = {}; + }; + static_assert( sizeof( ScreenSurfaceCreateInfoQNX ) == sizeof( VkScreenSurfaceCreateInfoQNX ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ScreenSurfaceCreateInfoQNX; + }; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + struct SemaphoreTypeCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreTypeCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo( + VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary, + uint64_t initialValue_ = {} ) VULKAN_HPP_NOEXCEPT + : semaphoreType( semaphoreType_ ) + , initialValue( initialValue_ ) + {} + + VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreTypeCreateInfo( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SemaphoreTypeCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo & + operator=( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreTypeCreateInfo & operator=( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SemaphoreTypeCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SemaphoreTypeCreateInfo & setSemaphoreType( VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ ) VULKAN_HPP_NOEXCEPT + { + semaphoreType = semaphoreType_; + return *this; + } + + SemaphoreTypeCreateInfo & setInitialValue( uint64_t initialValue_ ) VULKAN_HPP_NOEXCEPT + { + initialValue = initialValue_; + return *this; + } + + operator VkSemaphoreTypeCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreTypeCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreTypeCreateInfo const & ) const = default; +#else + bool operator==( SemaphoreTypeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphoreType == rhs.semaphoreType ) && + ( initialValue == rhs.initialValue ); + } + + bool operator!=( SemaphoreTypeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreTypeCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary; + uint64_t initialValue = {}; + }; + static_assert( sizeof( SemaphoreTypeCreateInfo ) == sizeof( VkSemaphoreTypeCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SemaphoreTypeCreateInfo; + }; + using SemaphoreTypeCreateInfoKHR = SemaphoreTypeCreateInfo; + + struct SetStateFlagsIndirectCommandNV + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV( uint32_t data_ = {} ) VULKAN_HPP_NOEXCEPT : data( data_ ) {} + + VULKAN_HPP_CONSTEXPR + SetStateFlagsIndirectCommandNV( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SetStateFlagsIndirectCommandNV( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : SetStateFlagsIndirectCommandNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SetStateFlagsIndirectCommandNV & + operator=( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SetStateFlagsIndirectCommandNV & operator=( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SetStateFlagsIndirectCommandNV & setData( uint32_t data_ ) VULKAN_HPP_NOEXCEPT + { + data = data_; + return *this; + } + + operator VkSetStateFlagsIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSetStateFlagsIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SetStateFlagsIndirectCommandNV const & ) const = default; +#else + bool operator==( SetStateFlagsIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( data == rhs.data ); + } + + bool operator!=( SetStateFlagsIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t data = {}; + }; + static_assert( sizeof( SetStateFlagsIndirectCommandNV ) == sizeof( VkSetStateFlagsIndirectCommandNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct ShaderModuleValidationCacheCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eShaderModuleValidationCacheCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( + VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ = {} ) VULKAN_HPP_NOEXCEPT + : validationCache( validationCache_ ) + {} + + VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( + ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderModuleValidationCacheCreateInfoEXT( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : ShaderModuleValidationCacheCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ShaderModuleValidationCacheCreateInfoEXT & + operator=( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderModuleValidationCacheCreateInfoEXT & + operator=( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ShaderModuleValidationCacheCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ShaderModuleValidationCacheCreateInfoEXT & + setValidationCache( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ ) VULKAN_HPP_NOEXCEPT + { + validationCache = validationCache_; + return *this; + } + + operator VkShaderModuleValidationCacheCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShaderModuleValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShaderModuleValidationCacheCreateInfoEXT const & ) const = default; +#else + bool operator==( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( validationCache == rhs.validationCache ); + } + + bool operator!=( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleValidationCacheCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache = {}; + }; + static_assert( sizeof( ShaderModuleValidationCacheCreateInfoEXT ) == + sizeof( VkShaderModuleValidationCacheCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ShaderModuleValidationCacheCreateInfoEXT; + }; + + struct ShaderResourceUsageAMD + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD( uint32_t numUsedVgprs_ = {}, + uint32_t numUsedSgprs_ = {}, + uint32_t ldsSizePerLocalWorkGroup_ = {}, + size_t ldsUsageSizeInBytes_ = {}, + size_t scratchMemUsageInBytes_ = {} ) VULKAN_HPP_NOEXCEPT + : numUsedVgprs( numUsedVgprs_ ) + , numUsedSgprs( numUsedSgprs_ ) + , ldsSizePerLocalWorkGroup( ldsSizePerLocalWorkGroup_ ) + , ldsUsageSizeInBytes( ldsUsageSizeInBytes_ ) + , scratchMemUsageInBytes( scratchMemUsageInBytes_ ) + {} + + VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD( ShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderResourceUsageAMD( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : ShaderResourceUsageAMD( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ShaderResourceUsageAMD & + operator=( ShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderResourceUsageAMD & operator=( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkShaderResourceUsageAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShaderResourceUsageAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShaderResourceUsageAMD const & ) const = default; +#else + bool operator==( ShaderResourceUsageAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( numUsedVgprs == rhs.numUsedVgprs ) && ( numUsedSgprs == rhs.numUsedSgprs ) && + ( ldsSizePerLocalWorkGroup == rhs.ldsSizePerLocalWorkGroup ) && + ( ldsUsageSizeInBytes == rhs.ldsUsageSizeInBytes ) && + ( scratchMemUsageInBytes == rhs.scratchMemUsageInBytes ); + } + + bool operator!=( ShaderResourceUsageAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t numUsedVgprs = {}; + uint32_t numUsedSgprs = {}; + uint32_t ldsSizePerLocalWorkGroup = {}; + size_t ldsUsageSizeInBytes = {}; + size_t scratchMemUsageInBytes = {}; + }; + static_assert( sizeof( ShaderResourceUsageAMD ) == sizeof( VkShaderResourceUsageAMD ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ShaderStatisticsInfoAMD + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + ShaderStatisticsInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask_ = {}, + VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage_ = {}, + uint32_t numPhysicalVgprs_ = {}, + uint32_t numPhysicalSgprs_ = {}, + uint32_t numAvailableVgprs_ = {}, + uint32_t numAvailableSgprs_ = {}, + std::array const & computeWorkGroupSize_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderStageMask( shaderStageMask_ ) + , resourceUsage( resourceUsage_ ) + , numPhysicalVgprs( numPhysicalVgprs_ ) + , numPhysicalSgprs( numPhysicalSgprs_ ) + , numAvailableVgprs( numAvailableVgprs_ ) + , numAvailableSgprs( numAvailableSgprs_ ) + , computeWorkGroupSize( computeWorkGroupSize_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + ShaderStatisticsInfoAMD( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderStatisticsInfoAMD( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : ShaderStatisticsInfoAMD( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD & + operator=( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderStatisticsInfoAMD & operator=( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkShaderStatisticsInfoAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShaderStatisticsInfoAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShaderStatisticsInfoAMD const & ) const = default; +#else + bool operator==( ShaderStatisticsInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( shaderStageMask == rhs.shaderStageMask ) && ( resourceUsage == rhs.resourceUsage ) && + ( numPhysicalVgprs == rhs.numPhysicalVgprs ) && ( numPhysicalSgprs == rhs.numPhysicalSgprs ) && + ( numAvailableVgprs == rhs.numAvailableVgprs ) && ( numAvailableSgprs == rhs.numAvailableSgprs ) && + ( computeWorkGroupSize == rhs.computeWorkGroupSize ); + } + + bool operator!=( ShaderStatisticsInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask = {}; + VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage = {}; + uint32_t numPhysicalVgprs = {}; + uint32_t numPhysicalSgprs = {}; + uint32_t numAvailableVgprs = {}; + uint32_t numAvailableSgprs = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D computeWorkGroupSize = {}; + }; + static_assert( sizeof( ShaderStatisticsInfoAMD ) == sizeof( VkShaderStatisticsInfoAMD ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SharedPresentSurfaceCapabilitiesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eSharedPresentSurfaceCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR( + VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags_ = {} ) VULKAN_HPP_NOEXCEPT + : sharedPresentSupportedUsageFlags( sharedPresentSupportedUsageFlags_ ) + {} + + VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR( SharedPresentSurfaceCapabilitiesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + SharedPresentSurfaceCapabilitiesKHR( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SharedPresentSurfaceCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SharedPresentSurfaceCapabilitiesKHR & + operator=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SharedPresentSurfaceCapabilitiesKHR & + operator=( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSharedPresentSurfaceCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSharedPresentSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SharedPresentSurfaceCapabilitiesKHR const & ) const = default; +#else + bool operator==( SharedPresentSurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( sharedPresentSupportedUsageFlags == rhs.sharedPresentSupportedUsageFlags ); + } + + bool operator!=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSharedPresentSurfaceCapabilitiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags = {}; + }; + static_assert( sizeof( SharedPresentSurfaceCapabilitiesKHR ) == sizeof( VkSharedPresentSurfaceCapabilitiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SharedPresentSurfaceCapabilitiesKHR; + }; + +#if defined( VK_USE_PLATFORM_GGP ) + struct StreamDescriptorSurfaceCreateInfoGGP + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eStreamDescriptorSurfaceCreateInfoGGP; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + StreamDescriptorSurfaceCreateInfoGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ = {}, + GgpStreamDescriptor streamDescriptor_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , streamDescriptor( streamDescriptor_ ) + {} + + VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + StreamDescriptorSurfaceCreateInfoGGP( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT + : StreamDescriptorSurfaceCreateInfoGGP( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP & + operator=( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + StreamDescriptorSurfaceCreateInfoGGP & + operator=( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + StreamDescriptorSurfaceCreateInfoGGP & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + StreamDescriptorSurfaceCreateInfoGGP & + setFlags( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + StreamDescriptorSurfaceCreateInfoGGP & + setStreamDescriptor( GgpStreamDescriptor streamDescriptor_ ) VULKAN_HPP_NOEXCEPT + { + streamDescriptor = streamDescriptor_; + return *this; + } + + operator VkStreamDescriptorSurfaceCreateInfoGGP const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkStreamDescriptorSurfaceCreateInfoGGP &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( StreamDescriptorSurfaceCreateInfoGGP const & ) const = default; +# else + bool operator==( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( memcmp( &streamDescriptor, &rhs.streamDescriptor, sizeof( GgpStreamDescriptor ) ) == 0 ); + } + + bool operator!=( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags = {}; + GgpStreamDescriptor streamDescriptor = {}; + }; + static_assert( sizeof( StreamDescriptorSurfaceCreateInfoGGP ) == sizeof( VkStreamDescriptorSurfaceCreateInfoGGP ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = StreamDescriptorSurfaceCreateInfoGGP; + }; +#endif /*VK_USE_PLATFORM_GGP*/ + + struct SubpassDescriptionDepthStencilResolve + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eSubpassDescriptionDepthStencilResolve; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve( + VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, + VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment_ = {} ) VULKAN_HPP_NOEXCEPT + : depthResolveMode( depthResolveMode_ ) + , stencilResolveMode( stencilResolveMode_ ) + , pDepthStencilResolveAttachment( pDepthStencilResolveAttachment_ ) + {} + + VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve( SubpassDescriptionDepthStencilResolve const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + SubpassDescriptionDepthStencilResolve( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassDescriptionDepthStencilResolve( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & + operator=( SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDescriptionDepthStencilResolve & + operator=( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SubpassDescriptionDepthStencilResolve & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SubpassDescriptionDepthStencilResolve & + setDepthResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ ) VULKAN_HPP_NOEXCEPT + { + depthResolveMode = depthResolveMode_; + return *this; + } + + SubpassDescriptionDepthStencilResolve & + setStencilResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ ) VULKAN_HPP_NOEXCEPT + { + stencilResolveMode = stencilResolveMode_; + return *this; + } + + SubpassDescriptionDepthStencilResolve & setPDepthStencilResolveAttachment( + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment_ ) VULKAN_HPP_NOEXCEPT + { + pDepthStencilResolveAttachment = pDepthStencilResolveAttachment_; + return *this; + } + + operator VkSubpassDescriptionDepthStencilResolve const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassDescriptionDepthStencilResolve &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassDescriptionDepthStencilResolve const & ) const = default; +#else + bool operator==( SubpassDescriptionDepthStencilResolve const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthResolveMode == rhs.depthResolveMode ) && + ( stencilResolveMode == rhs.stencilResolveMode ) && + ( pDepthStencilResolveAttachment == rhs.pDepthStencilResolveAttachment ); + } + + bool operator!=( SubpassDescriptionDepthStencilResolve const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescriptionDepthStencilResolve; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone; + VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment = {}; + }; + static_assert( sizeof( SubpassDescriptionDepthStencilResolve ) == sizeof( VkSubpassDescriptionDepthStencilResolve ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SubpassDescriptionDepthStencilResolve; + }; + using SubpassDescriptionDepthStencilResolveKHR = SubpassDescriptionDepthStencilResolve; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct SurfaceCapabilitiesFullScreenExclusiveEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( + VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ = {} ) VULKAN_HPP_NOEXCEPT + : fullScreenExclusiveSupported( fullScreenExclusiveSupported_ ) + {} + + VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( + SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilitiesFullScreenExclusiveEXT( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : SurfaceCapabilitiesFullScreenExclusiveEXT( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesFullScreenExclusiveEXT & + operator=( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilitiesFullScreenExclusiveEXT & + operator=( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SurfaceCapabilitiesFullScreenExclusiveEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SurfaceCapabilitiesFullScreenExclusiveEXT & + setFullScreenExclusiveSupported( VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ ) VULKAN_HPP_NOEXCEPT + { + fullScreenExclusiveSupported = fullScreenExclusiveSupported_; + return *this; + } + + operator VkSurfaceCapabilitiesFullScreenExclusiveEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceCapabilitiesFullScreenExclusiveEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceCapabilitiesFullScreenExclusiveEXT const & ) const = default; +# else + bool operator==( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( fullScreenExclusiveSupported == rhs.fullScreenExclusiveSupported ); + } + + bool operator!=( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported = {}; + }; + static_assert( sizeof( SurfaceCapabilitiesFullScreenExclusiveEXT ) == + sizeof( VkSurfaceCapabilitiesFullScreenExclusiveEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SurfaceCapabilitiesFullScreenExclusiveEXT; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct SurfaceFullScreenExclusiveInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eSurfaceFullScreenExclusiveInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SurfaceFullScreenExclusiveInfoEXT( VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ = + VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault ) VULKAN_HPP_NOEXCEPT + : fullScreenExclusive( fullScreenExclusive_ ) + {} + + VULKAN_HPP_CONSTEXPR + SurfaceFullScreenExclusiveInfoEXT( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceFullScreenExclusiveInfoEXT( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceFullScreenExclusiveInfoEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveInfoEXT & + operator=( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceFullScreenExclusiveInfoEXT & operator=( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SurfaceFullScreenExclusiveInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SurfaceFullScreenExclusiveInfoEXT & + setFullScreenExclusive( VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ ) VULKAN_HPP_NOEXCEPT + { + fullScreenExclusive = fullScreenExclusive_; + return *this; + } + + operator VkSurfaceFullScreenExclusiveInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceFullScreenExclusiveInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceFullScreenExclusiveInfoEXT const & ) const = default; +# else + bool operator==( SurfaceFullScreenExclusiveInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fullScreenExclusive == rhs.fullScreenExclusive ); + } + + bool operator!=( SurfaceFullScreenExclusiveInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveInfoEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive = + VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault; + }; + static_assert( sizeof( SurfaceFullScreenExclusiveInfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SurfaceFullScreenExclusiveInfoEXT; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct SurfaceFullScreenExclusiveWin32InfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( HMONITOR hmonitor_ = {} ) VULKAN_HPP_NOEXCEPT + : hmonitor( hmonitor_ ) + {} + + VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + SurfaceFullScreenExclusiveWin32InfoEXT( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceFullScreenExclusiveWin32InfoEXT( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveWin32InfoEXT & + operator=( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceFullScreenExclusiveWin32InfoEXT & + operator=( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SurfaceFullScreenExclusiveWin32InfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SurfaceFullScreenExclusiveWin32InfoEXT & setHmonitor( HMONITOR hmonitor_ ) VULKAN_HPP_NOEXCEPT + { + hmonitor = hmonitor_; + return *this; + } + + operator VkSurfaceFullScreenExclusiveWin32InfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceFullScreenExclusiveWin32InfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceFullScreenExclusiveWin32InfoEXT const & ) const = default; +# else + bool operator==( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hmonitor == rhs.hmonitor ); + } + + bool operator!=( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT; + const void * pNext = {}; + HMONITOR hmonitor = {}; + }; + static_assert( sizeof( SurfaceFullScreenExclusiveWin32InfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveWin32InfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SurfaceFullScreenExclusiveWin32InfoEXT; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct SurfaceProtectedCapabilitiesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceProtectedCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SurfaceProtectedCapabilitiesKHR( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ = {} ) VULKAN_HPP_NOEXCEPT + : supportsProtected( supportsProtected_ ) + {} + + VULKAN_HPP_CONSTEXPR + SurfaceProtectedCapabilitiesKHR( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceProtectedCapabilitiesKHR( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceProtectedCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SurfaceProtectedCapabilitiesKHR & + operator=( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceProtectedCapabilitiesKHR & operator=( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SurfaceProtectedCapabilitiesKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SurfaceProtectedCapabilitiesKHR & + setSupportsProtected( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ ) VULKAN_HPP_NOEXCEPT + { + supportsProtected = supportsProtected_; + return *this; + } + + operator VkSurfaceProtectedCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceProtectedCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceProtectedCapabilitiesKHR const & ) const = default; +#else + bool operator==( SurfaceProtectedCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportsProtected == rhs.supportsProtected ); + } + + bool operator!=( SurfaceProtectedCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceProtectedCapabilitiesKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 supportsProtected = {}; + }; + static_assert( sizeof( SurfaceProtectedCapabilitiesKHR ) == sizeof( VkSurfaceProtectedCapabilitiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SurfaceProtectedCapabilitiesKHR; + }; + + struct SwapchainCounterCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCounterCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT( + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ = {} ) VULKAN_HPP_NOEXCEPT + : surfaceCounters( surfaceCounters_ ) + {} + + VULKAN_HPP_CONSTEXPR + SwapchainCounterCreateInfoEXT( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SwapchainCounterCreateInfoEXT( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SwapchainCounterCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SwapchainCounterCreateInfoEXT & + operator=( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SwapchainCounterCreateInfoEXT & operator=( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SwapchainCounterCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SwapchainCounterCreateInfoEXT & + setSurfaceCounters( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ ) VULKAN_HPP_NOEXCEPT + { + surfaceCounters = surfaceCounters_; + return *this; + } + + operator VkSwapchainCounterCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainCounterCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainCounterCreateInfoEXT const & ) const = default; +#else + bool operator==( SwapchainCounterCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surfaceCounters == rhs.surfaceCounters ); + } + + bool operator!=( SwapchainCounterCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCounterCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters = {}; + }; + static_assert( sizeof( SwapchainCounterCreateInfoEXT ) == sizeof( VkSwapchainCounterCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SwapchainCounterCreateInfoEXT; + }; + + struct SwapchainDisplayNativeHdrCreateInfoAMD + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ = {} ) + VULKAN_HPP_NOEXCEPT : localDimmingEnable( localDimmingEnable_ ) + {} + + VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + SwapchainDisplayNativeHdrCreateInfoAMD( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : SwapchainDisplayNativeHdrCreateInfoAMD( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SwapchainDisplayNativeHdrCreateInfoAMD & + operator=( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SwapchainDisplayNativeHdrCreateInfoAMD & + operator=( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SwapchainDisplayNativeHdrCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SwapchainDisplayNativeHdrCreateInfoAMD & + setLocalDimmingEnable( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ ) VULKAN_HPP_NOEXCEPT + { + localDimmingEnable = localDimmingEnable_; + return *this; + } + + operator VkSwapchainDisplayNativeHdrCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainDisplayNativeHdrCreateInfoAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainDisplayNativeHdrCreateInfoAMD const & ) const = default; +#else + bool operator==( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( localDimmingEnable == rhs.localDimmingEnable ); + } + + bool operator!=( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable = {}; + }; + static_assert( sizeof( SwapchainDisplayNativeHdrCreateInfoAMD ) == sizeof( VkSwapchainDisplayNativeHdrCreateInfoAMD ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SwapchainDisplayNativeHdrCreateInfoAMD; + }; + + struct TextureLODGatherFormatPropertiesAMD + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eTextureLodGatherFormatPropertiesAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD( + VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD_ = {} ) VULKAN_HPP_NOEXCEPT + : supportsTextureGatherLODBiasAMD( supportsTextureGatherLODBiasAMD_ ) + {} + + VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD( TextureLODGatherFormatPropertiesAMD const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + TextureLODGatherFormatPropertiesAMD( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : TextureLODGatherFormatPropertiesAMD( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 TextureLODGatherFormatPropertiesAMD & + operator=( TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TextureLODGatherFormatPropertiesAMD & + operator=( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkTextureLODGatherFormatPropertiesAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTextureLODGatherFormatPropertiesAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( TextureLODGatherFormatPropertiesAMD const & ) const = default; +#else + bool operator==( TextureLODGatherFormatPropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( supportsTextureGatherLODBiasAMD == rhs.supportsTextureGatherLODBiasAMD ); + } + + bool operator!=( TextureLODGatherFormatPropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTextureLodGatherFormatPropertiesAMD; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD = {}; + }; + static_assert( sizeof( TextureLODGatherFormatPropertiesAMD ) == sizeof( VkTextureLODGatherFormatPropertiesAMD ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = TextureLODGatherFormatPropertiesAMD; + }; + + struct TimelineSemaphoreSubmitInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTimelineSemaphoreSubmitInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + TimelineSemaphoreSubmitInfo( uint32_t waitSemaphoreValueCount_ = {}, + const uint64_t * pWaitSemaphoreValues_ = {}, + uint32_t signalSemaphoreValueCount_ = {}, + const uint64_t * pSignalSemaphoreValues_ = {} ) VULKAN_HPP_NOEXCEPT + : waitSemaphoreValueCount( waitSemaphoreValueCount_ ) + , pWaitSemaphoreValues( pWaitSemaphoreValues_ ) + , signalSemaphoreValueCount( signalSemaphoreValueCount_ ) + , pSignalSemaphoreValues( pSignalSemaphoreValues_ ) + {} + + VULKAN_HPP_CONSTEXPR + TimelineSemaphoreSubmitInfo( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TimelineSemaphoreSubmitInfo( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : TimelineSemaphoreSubmitInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + TimelineSemaphoreSubmitInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreValues_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreValues_ = {} ) + : waitSemaphoreValueCount( static_cast( waitSemaphoreValues_.size() ) ) + , pWaitSemaphoreValues( waitSemaphoreValues_.data() ) + , signalSemaphoreValueCount( static_cast( signalSemaphoreValues_.size() ) ) + , pSignalSemaphoreValues( signalSemaphoreValues_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & + operator=( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TimelineSemaphoreSubmitInfo & operator=( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + TimelineSemaphoreSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + TimelineSemaphoreSubmitInfo & setWaitSemaphoreValueCount( uint32_t waitSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreValueCount = waitSemaphoreValueCount_; + return *this; + } + + TimelineSemaphoreSubmitInfo & setPWaitSemaphoreValues( const uint64_t * pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphoreValues = pWaitSemaphoreValues_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + TimelineSemaphoreSubmitInfo & setWaitSemaphoreValues( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreValueCount = static_cast( waitSemaphoreValues_.size() ); + pWaitSemaphoreValues = waitSemaphoreValues_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + TimelineSemaphoreSubmitInfo & + setSignalSemaphoreValueCount( uint32_t signalSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreValueCount = signalSemaphoreValueCount_; + return *this; + } + + TimelineSemaphoreSubmitInfo & + setPSignalSemaphoreValues( const uint64_t * pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + pSignalSemaphoreValues = pSignalSemaphoreValues_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + TimelineSemaphoreSubmitInfo & setSignalSemaphoreValues( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreValueCount = static_cast( signalSemaphoreValues_.size() ); + pSignalSemaphoreValues = signalSemaphoreValues_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkTimelineSemaphoreSubmitInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTimelineSemaphoreSubmitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( TimelineSemaphoreSubmitInfo const & ) const = default; +#else + bool operator==( TimelineSemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( waitSemaphoreValueCount == rhs.waitSemaphoreValueCount ) && + ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues ) && + ( signalSemaphoreValueCount == rhs.signalSemaphoreValueCount ) && + ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues ); + } + + bool operator!=( TimelineSemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTimelineSemaphoreSubmitInfo; + const void * pNext = {}; + uint32_t waitSemaphoreValueCount = {}; + const uint64_t * pWaitSemaphoreValues = {}; + uint32_t signalSemaphoreValueCount = {}; + const uint64_t * pSignalSemaphoreValues = {}; + }; + static_assert( sizeof( TimelineSemaphoreSubmitInfo ) == sizeof( VkTimelineSemaphoreSubmitInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = TimelineSemaphoreSubmitInfo; + }; + using TimelineSemaphoreSubmitInfoKHR = TimelineSemaphoreSubmitInfo; + + struct TraceRaysIndirectCommandKHR + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR( uint32_t width_ = {}, + uint32_t height_ = {}, + uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT + : width( width_ ) + , height( height_ ) + , depth( depth_ ) + {} + + VULKAN_HPP_CONSTEXPR + TraceRaysIndirectCommandKHR( TraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TraceRaysIndirectCommandKHR( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : TraceRaysIndirectCommandKHR( *reinterpret_cast( &rhs ) ) + {} + + explicit TraceRaysIndirectCommandKHR( Extent2D const & extent2D, uint32_t depth_ = {} ) + : width( extent2D.width ), height( extent2D.height ), depth( depth_ ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommandKHR & + operator=( TraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TraceRaysIndirectCommandKHR & operator=( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + TraceRaysIndirectCommandKHR & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + TraceRaysIndirectCommandKHR & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } + + TraceRaysIndirectCommandKHR & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT + { + depth = depth_; + return *this; + } + + operator VkTraceRaysIndirectCommandKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTraceRaysIndirectCommandKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( TraceRaysIndirectCommandKHR const & ) const = default; +#else + bool operator==( TraceRaysIndirectCommandKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( width == rhs.width ) && ( height == rhs.height ) && ( depth == rhs.depth ); + } + + bool operator!=( TraceRaysIndirectCommandKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t width = {}; + uint32_t height = {}; + uint32_t depth = {}; + }; + static_assert( sizeof( TraceRaysIndirectCommandKHR ) == sizeof( VkTraceRaysIndirectCommandKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct ValidationFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( + uint32_t enabledValidationFeatureCount_ = {}, + const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures_ = {}, + uint32_t disabledValidationFeatureCount_ = {}, + const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + : enabledValidationFeatureCount( enabledValidationFeatureCount_ ) + , pEnabledValidationFeatures( pEnabledValidationFeatures_ ) + , disabledValidationFeatureCount( disabledValidationFeatureCount_ ) + , pDisabledValidationFeatures( pDisabledValidationFeatures_ ) + {} + + VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ValidationFeaturesEXT( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ValidationFeaturesEXT( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ValidationFeaturesEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + enabledValidationFeatures_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + disabledValidationFeatures_ = {} ) + : enabledValidationFeatureCount( static_cast( enabledValidationFeatures_.size() ) ) + , pEnabledValidationFeatures( enabledValidationFeatures_.data() ) + , disabledValidationFeatureCount( static_cast( disabledValidationFeatures_.size() ) ) + , pDisabledValidationFeatures( disabledValidationFeatures_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & + operator=( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ValidationFeaturesEXT & operator=( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ValidationFeaturesEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ValidationFeaturesEXT & + setEnabledValidationFeatureCount( uint32_t enabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT + { + enabledValidationFeatureCount = enabledValidationFeatureCount_; + return *this; + } + + ValidationFeaturesEXT & setPEnabledValidationFeatures( + const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT + { + pEnabledValidationFeatures = pEnabledValidationFeatures_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ValidationFeaturesEXT & setEnabledValidationFeatures( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + enabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT + { + enabledValidationFeatureCount = static_cast( enabledValidationFeatures_.size() ); + pEnabledValidationFeatures = enabledValidationFeatures_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ValidationFeaturesEXT & + setDisabledValidationFeatureCount( uint32_t disabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT + { + disabledValidationFeatureCount = disabledValidationFeatureCount_; + return *this; + } + + ValidationFeaturesEXT & setPDisabledValidationFeatures( + const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT + { + pDisabledValidationFeatures = pDisabledValidationFeatures_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ValidationFeaturesEXT & setDisabledValidationFeatures( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + disabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT + { + disabledValidationFeatureCount = static_cast( disabledValidationFeatures_.size() ); + pDisabledValidationFeatures = disabledValidationFeatures_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkValidationFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkValidationFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ValidationFeaturesEXT const & ) const = default; +#else + bool operator==( ValidationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( enabledValidationFeatureCount == rhs.enabledValidationFeatureCount ) && + ( pEnabledValidationFeatures == rhs.pEnabledValidationFeatures ) && + ( disabledValidationFeatureCount == rhs.disabledValidationFeatureCount ) && + ( pDisabledValidationFeatures == rhs.pDisabledValidationFeatures ); + } + + bool operator!=( ValidationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFeaturesEXT; + const void * pNext = {}; + uint32_t enabledValidationFeatureCount = {}; + const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures = {}; + uint32_t disabledValidationFeatureCount = {}; + const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures = {}; + }; + static_assert( sizeof( ValidationFeaturesEXT ) == sizeof( VkValidationFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ValidationFeaturesEXT; + }; + + struct ValidationFlagsEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationFlagsEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( + uint32_t disabledValidationCheckCount_ = {}, + const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks_ = {} ) VULKAN_HPP_NOEXCEPT + : disabledValidationCheckCount( disabledValidationCheckCount_ ) + , pDisabledValidationChecks( pDisabledValidationChecks_ ) + {} + + VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ValidationFlagsEXT( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ValidationFlagsEXT( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ValidationFlagsEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + disabledValidationChecks_ ) + : disabledValidationCheckCount( static_cast( disabledValidationChecks_.size() ) ) + , pDisabledValidationChecks( disabledValidationChecks_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT & + operator=( ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ValidationFlagsEXT & operator=( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ValidationFlagsEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ValidationFlagsEXT & setDisabledValidationCheckCount( uint32_t disabledValidationCheckCount_ ) VULKAN_HPP_NOEXCEPT + { + disabledValidationCheckCount = disabledValidationCheckCount_; + return *this; + } + + ValidationFlagsEXT & setPDisabledValidationChecks( + const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT + { + pDisabledValidationChecks = pDisabledValidationChecks_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ValidationFlagsEXT & setDisabledValidationChecks( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + disabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT + { + disabledValidationCheckCount = static_cast( disabledValidationChecks_.size() ); + pDisabledValidationChecks = disabledValidationChecks_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkValidationFlagsEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkValidationFlagsEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ValidationFlagsEXT const & ) const = default; +#else + bool operator==( ValidationFlagsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( disabledValidationCheckCount == rhs.disabledValidationCheckCount ) && + ( pDisabledValidationChecks == rhs.pDisabledValidationChecks ); + } + + bool operator!=( ValidationFlagsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFlagsEXT; + const void * pNext = {}; + uint32_t disabledValidationCheckCount = {}; + const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks = {}; + }; + static_assert( sizeof( ValidationFlagsEXT ) == sizeof( VkValidationFlagsEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ValidationFlagsEXT; + }; + +#if defined( VK_USE_PLATFORM_VI_NN ) + struct ViSurfaceCreateInfoNN + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eViSurfaceCreateInfoNN; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ = {}, + void * window_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , window( window_ ) + {} + + VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN( ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ViSurfaceCreateInfoNN( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT + : ViSurfaceCreateInfoNN( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ViSurfaceCreateInfoNN & + operator=( ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ViSurfaceCreateInfoNN & operator=( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ViSurfaceCreateInfoNN & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ViSurfaceCreateInfoNN & setFlags( VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + ViSurfaceCreateInfoNN & setWindow( void * window_ ) VULKAN_HPP_NOEXCEPT + { + window = window_; + return *this; + } + + operator VkViSurfaceCreateInfoNN const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkViSurfaceCreateInfoNN &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ViSurfaceCreateInfoNN const & ) const = default; +# else + bool operator==( ViSurfaceCreateInfoNN const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( window == rhs.window ); + } + + bool operator!=( ViSurfaceCreateInfoNN const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eViSurfaceCreateInfoNN; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags = {}; + void * window = {}; + }; + static_assert( sizeof( ViSurfaceCreateInfoNN ) == sizeof( VkViSurfaceCreateInfoNN ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ViSurfaceCreateInfoNN; + }; +#endif /*VK_USE_PLATFORM_VI_NN*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH264CapabilitiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264CapabilitiesEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264CapabilitiesEXT( + uint32_t maxLevel_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D fieldOffsetGranularity_ = {}, + VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : maxLevel( maxLevel_ ) + , fieldOffsetGranularity( fieldOffsetGranularity_ ) + , stdExtensionVersion( stdExtensionVersion_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + VideoDecodeH264CapabilitiesEXT( VideoDecodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264CapabilitiesEXT( VkVideoDecodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264CapabilitiesEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264CapabilitiesEXT & + operator=( VideoDecodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264CapabilitiesEXT & operator=( VkVideoDecodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoDecodeH264CapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH264CapabilitiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH264CapabilitiesEXT const & ) const = default; +# else + bool operator==( VideoDecodeH264CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxLevel == rhs.maxLevel ) && + ( fieldOffsetGranularity == rhs.fieldOffsetGranularity ) && + ( stdExtensionVersion == rhs.stdExtensionVersion ); + } + + bool operator!=( VideoDecodeH264CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264CapabilitiesEXT; + void * pNext = {}; + uint32_t maxLevel = {}; + VULKAN_HPP_NAMESPACE::Offset2D fieldOffsetGranularity = {}; + VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion = {}; + }; + static_assert( sizeof( VideoDecodeH264CapabilitiesEXT ) == sizeof( VkVideoDecodeH264CapabilitiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH264CapabilitiesEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH264DpbSlotInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264DpbSlotInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH264DpbSlotInfoEXT( + const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : pStdReferenceInfo( pStdReferenceInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoDecodeH264DpbSlotInfoEXT( VideoDecodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264DpbSlotInfoEXT( VkVideoDecodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264DpbSlotInfoEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264DpbSlotInfoEXT & + operator=( VideoDecodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264DpbSlotInfoEXT & operator=( VkVideoDecodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoDecodeH264DpbSlotInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoDecodeH264DpbSlotInfoEXT & + setPStdReferenceInfo( const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdReferenceInfo = pStdReferenceInfo_; + return *this; + } + + operator VkVideoDecodeH264DpbSlotInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH264DpbSlotInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH264DpbSlotInfoEXT const & ) const = default; +# else + bool operator==( VideoDecodeH264DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo ); + } + + bool operator!=( VideoDecodeH264DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264DpbSlotInfoEXT; + const void * pNext = {}; + const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo = {}; + }; + static_assert( sizeof( VideoDecodeH264DpbSlotInfoEXT ) == sizeof( VkVideoDecodeH264DpbSlotInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH264DpbSlotInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH264MvcEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264MvcEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH264MvcEXT( const StdVideoDecodeH264Mvc * pStdMvc_ = {} ) VULKAN_HPP_NOEXCEPT + : pStdMvc( pStdMvc_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoDecodeH264MvcEXT( VideoDecodeH264MvcEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264MvcEXT( VkVideoDecodeH264MvcEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264MvcEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264MvcEXT & + operator=( VideoDecodeH264MvcEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264MvcEXT & operator=( VkVideoDecodeH264MvcEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoDecodeH264MvcEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoDecodeH264MvcEXT & setPStdMvc( const StdVideoDecodeH264Mvc * pStdMvc_ ) VULKAN_HPP_NOEXCEPT + { + pStdMvc = pStdMvc_; + return *this; + } + + operator VkVideoDecodeH264MvcEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH264MvcEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH264MvcEXT const & ) const = default; +# else + bool operator==( VideoDecodeH264MvcEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdMvc == rhs.pStdMvc ); + } + + bool operator!=( VideoDecodeH264MvcEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264MvcEXT; + const void * pNext = {}; + const StdVideoDecodeH264Mvc * pStdMvc = {}; + }; + static_assert( sizeof( VideoDecodeH264MvcEXT ) == sizeof( VkVideoDecodeH264MvcEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH264MvcEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH264PictureInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264PictureInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH264PictureInfoEXT( const StdVideoDecodeH264PictureInfo * pStdPictureInfo_ = {}, + uint32_t slicesCount_ = {}, + const uint32_t * pSlicesDataOffsets_ = {} ) VULKAN_HPP_NOEXCEPT + : pStdPictureInfo( pStdPictureInfo_ ) + , slicesCount( slicesCount_ ) + , pSlicesDataOffsets( pSlicesDataOffsets_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoDecodeH264PictureInfoEXT( VideoDecodeH264PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264PictureInfoEXT( VkVideoDecodeH264PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264PictureInfoEXT( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH264PictureInfoEXT( + const StdVideoDecodeH264PictureInfo * pStdPictureInfo_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & slicesDataOffsets_ ) + : pStdPictureInfo( pStdPictureInfo_ ) + , slicesCount( static_cast( slicesDataOffsets_.size() ) ) + , pSlicesDataOffsets( slicesDataOffsets_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoEXT & + operator=( VideoDecodeH264PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264PictureInfoEXT & operator=( VkVideoDecodeH264PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoDecodeH264PictureInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoDecodeH264PictureInfoEXT & + setPStdPictureInfo( const StdVideoDecodeH264PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdPictureInfo = pStdPictureInfo_; + return *this; + } + + VideoDecodeH264PictureInfoEXT & setSlicesCount( uint32_t slicesCount_ ) VULKAN_HPP_NOEXCEPT + { + slicesCount = slicesCount_; + return *this; + } + + VideoDecodeH264PictureInfoEXT & setPSlicesDataOffsets( const uint32_t * pSlicesDataOffsets_ ) VULKAN_HPP_NOEXCEPT + { + pSlicesDataOffsets = pSlicesDataOffsets_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH264PictureInfoEXT & setSlicesDataOffsets( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & slicesDataOffsets_ ) VULKAN_HPP_NOEXCEPT + { + slicesCount = static_cast( slicesDataOffsets_.size() ); + pSlicesDataOffsets = slicesDataOffsets_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkVideoDecodeH264PictureInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH264PictureInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH264PictureInfoEXT const & ) const = default; +# else + bool operator==( VideoDecodeH264PictureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdPictureInfo == rhs.pStdPictureInfo ) && + ( slicesCount == rhs.slicesCount ) && ( pSlicesDataOffsets == rhs.pSlicesDataOffsets ); + } + + bool operator!=( VideoDecodeH264PictureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264PictureInfoEXT; + const void * pNext = {}; + const StdVideoDecodeH264PictureInfo * pStdPictureInfo = {}; + uint32_t slicesCount = {}; + const uint32_t * pSlicesDataOffsets = {}; + }; + static_assert( sizeof( VideoDecodeH264PictureInfoEXT ) == sizeof( VkVideoDecodeH264PictureInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH264PictureInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH264ProfileEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264ProfileEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH264ProfileEXT( + StdVideoH264ProfileIdc stdProfileIdc_ = {}, + VULKAN_HPP_NAMESPACE::VideoDecodeH264FieldLayoutFlagsEXT fieldLayout_ = {} ) VULKAN_HPP_NOEXCEPT + : stdProfileIdc( stdProfileIdc_ ) + , fieldLayout( fieldLayout_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoDecodeH264ProfileEXT( VideoDecodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264ProfileEXT( VkVideoDecodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264ProfileEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileEXT & + operator=( VideoDecodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264ProfileEXT & operator=( VkVideoDecodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoDecodeH264ProfileEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoDecodeH264ProfileEXT & setStdProfileIdc( StdVideoH264ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT + { + stdProfileIdc = stdProfileIdc_; + return *this; + } + + VideoDecodeH264ProfileEXT & + setFieldLayout( VULKAN_HPP_NAMESPACE::VideoDecodeH264FieldLayoutFlagsEXT fieldLayout_ ) VULKAN_HPP_NOEXCEPT + { + fieldLayout = fieldLayout_; + return *this; + } + + operator VkVideoDecodeH264ProfileEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH264ProfileEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH264ProfileEXT const & ) const = default; +# else + bool operator==( VideoDecodeH264ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ) == 0 ) && + ( fieldLayout == rhs.fieldLayout ); + } + + bool operator!=( VideoDecodeH264ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264ProfileEXT; + const void * pNext = {}; + StdVideoH264ProfileIdc stdProfileIdc = {}; + VULKAN_HPP_NAMESPACE::VideoDecodeH264FieldLayoutFlagsEXT fieldLayout = {}; + }; + static_assert( sizeof( VideoDecodeH264ProfileEXT ) == sizeof( VkVideoDecodeH264ProfileEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH264ProfileEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH264SessionCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoDecodeH264SessionCreateInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionCreateInfoEXT( + VULKAN_HPP_NAMESPACE::VideoDecodeH264CreateFlagsEXT flags_ = {}, + const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pStdExtensionVersion( pStdExtensionVersion_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionCreateInfoEXT( VideoDecodeH264SessionCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264SessionCreateInfoEXT( VkVideoDecodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264SessionCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionCreateInfoEXT & + operator=( VideoDecodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264SessionCreateInfoEXT & + operator=( VkVideoDecodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoDecodeH264SessionCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoDecodeH264SessionCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::VideoDecodeH264CreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VideoDecodeH264SessionCreateInfoEXT & setPStdExtensionVersion( + const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ ) VULKAN_HPP_NOEXCEPT + { + pStdExtensionVersion = pStdExtensionVersion_; + return *this; + } + + operator VkVideoDecodeH264SessionCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH264SessionCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH264SessionCreateInfoEXT const & ) const = default; +# else + bool operator==( VideoDecodeH264SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( pStdExtensionVersion == rhs.pStdExtensionVersion ); + } + + bool operator!=( VideoDecodeH264SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264SessionCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoDecodeH264CreateFlagsEXT flags = {}; + const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion = {}; + }; + static_assert( sizeof( VideoDecodeH264SessionCreateInfoEXT ) == sizeof( VkVideoDecodeH264SessionCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH264SessionCreateInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH264SessionParametersAddInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoDecodeH264SessionParametersAddInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersAddInfoEXT( + uint32_t spsStdCount_ = {}, + const StdVideoH264SequenceParameterSet * pSpsStd_ = {}, + uint32_t ppsStdCount_ = {}, + const StdVideoH264PictureParameterSet * pPpsStd_ = {} ) VULKAN_HPP_NOEXCEPT + : spsStdCount( spsStdCount_ ) + , pSpsStd( pSpsStd_ ) + , ppsStdCount( ppsStdCount_ ) + , pPpsStd( pPpsStd_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersAddInfoEXT( + VideoDecodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264SessionParametersAddInfoEXT( VkVideoDecodeH264SessionParametersAddInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : VideoDecodeH264SessionParametersAddInfoEXT( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH264SessionParametersAddInfoEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & spsStd_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & ppsStd_ = {} ) + : spsStdCount( static_cast( spsStd_.size() ) ) + , pSpsStd( spsStd_.data() ) + , ppsStdCount( static_cast( ppsStd_.size() ) ) + , pPpsStd( ppsStd_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoEXT & + operator=( VideoDecodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264SessionParametersAddInfoEXT & + operator=( VkVideoDecodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoDecodeH264SessionParametersAddInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoDecodeH264SessionParametersAddInfoEXT & setSpsStdCount( uint32_t spsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + spsStdCount = spsStdCount_; + return *this; + } + + VideoDecodeH264SessionParametersAddInfoEXT & + setPSpsStd( const StdVideoH264SequenceParameterSet * pSpsStd_ ) VULKAN_HPP_NOEXCEPT + { + pSpsStd = pSpsStd_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH264SessionParametersAddInfoEXT & + setSpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & spsStd_ ) + VULKAN_HPP_NOEXCEPT + { + spsStdCount = static_cast( spsStd_.size() ); + pSpsStd = spsStd_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoDecodeH264SessionParametersAddInfoEXT & setPpsStdCount( uint32_t ppsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + ppsStdCount = ppsStdCount_; + return *this; + } + + VideoDecodeH264SessionParametersAddInfoEXT & + setPPpsStd( const StdVideoH264PictureParameterSet * pPpsStd_ ) VULKAN_HPP_NOEXCEPT + { + pPpsStd = pPpsStd_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH264SessionParametersAddInfoEXT & + setPpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & ppsStd_ ) + VULKAN_HPP_NOEXCEPT + { + ppsStdCount = static_cast( ppsStd_.size() ); + pPpsStd = ppsStd_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkVideoDecodeH264SessionParametersAddInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH264SessionParametersAddInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH264SessionParametersAddInfoEXT const & ) const = default; +# else + bool operator==( VideoDecodeH264SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( spsStdCount == rhs.spsStdCount ) && + ( pSpsStd == rhs.pSpsStd ) && ( ppsStdCount == rhs.ppsStdCount ) && ( pPpsStd == rhs.pPpsStd ); + } + + bool operator!=( VideoDecodeH264SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264SessionParametersAddInfoEXT; + const void * pNext = {}; + uint32_t spsStdCount = {}; + const StdVideoH264SequenceParameterSet * pSpsStd = {}; + uint32_t ppsStdCount = {}; + const StdVideoH264PictureParameterSet * pPpsStd = {}; + }; + static_assert( sizeof( VideoDecodeH264SessionParametersAddInfoEXT ) == + sizeof( VkVideoDecodeH264SessionParametersAddInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH264SessionParametersAddInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH264SessionParametersCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoDecodeH264SessionParametersCreateInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersCreateInfoEXT( + uint32_t maxSpsStdCount_ = {}, + uint32_t maxPpsStdCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT * pParametersAddInfo_ = {} ) + VULKAN_HPP_NOEXCEPT + : maxSpsStdCount( maxSpsStdCount_ ) + , maxPpsStdCount( maxPpsStdCount_ ) + , pParametersAddInfo( pParametersAddInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersCreateInfoEXT( + VideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264SessionParametersCreateInfoEXT( VkVideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : VideoDecodeH264SessionParametersCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoEXT & + operator=( VideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264SessionParametersCreateInfoEXT & + operator=( VkVideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoDecodeH264SessionParametersCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoDecodeH264SessionParametersCreateInfoEXT & setMaxSpsStdCount( uint32_t maxSpsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + maxSpsStdCount = maxSpsStdCount_; + return *this; + } + + VideoDecodeH264SessionParametersCreateInfoEXT & setMaxPpsStdCount( uint32_t maxPpsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + maxPpsStdCount = maxPpsStdCount_; + return *this; + } + + VideoDecodeH264SessionParametersCreateInfoEXT & setPParametersAddInfo( + const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT + { + pParametersAddInfo = pParametersAddInfo_; + return *this; + } + + operator VkVideoDecodeH264SessionParametersCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH264SessionParametersCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH264SessionParametersCreateInfoEXT const & ) const = default; +# else + bool operator==( VideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxSpsStdCount == rhs.maxSpsStdCount ) && + ( maxPpsStdCount == rhs.maxPpsStdCount ) && ( pParametersAddInfo == rhs.pParametersAddInfo ); + } + + bool operator!=( VideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264SessionParametersCreateInfoEXT; + const void * pNext = {}; + uint32_t maxSpsStdCount = {}; + uint32_t maxPpsStdCount = {}; + const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT * pParametersAddInfo = {}; + }; + static_assert( sizeof( VideoDecodeH264SessionParametersCreateInfoEXT ) == + sizeof( VkVideoDecodeH264SessionParametersCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH264SessionParametersCreateInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH265CapabilitiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265CapabilitiesEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265CapabilitiesEXT( + uint32_t maxLevel_ = {}, VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : maxLevel( maxLevel_ ) + , stdExtensionVersion( stdExtensionVersion_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + VideoDecodeH265CapabilitiesEXT( VideoDecodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265CapabilitiesEXT( VkVideoDecodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH265CapabilitiesEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265CapabilitiesEXT & + operator=( VideoDecodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265CapabilitiesEXT & operator=( VkVideoDecodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoDecodeH265CapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH265CapabilitiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH265CapabilitiesEXT const & ) const = default; +# else + bool operator==( VideoDecodeH265CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxLevel == rhs.maxLevel ) && + ( stdExtensionVersion == rhs.stdExtensionVersion ); + } + + bool operator!=( VideoDecodeH265CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265CapabilitiesEXT; + void * pNext = {}; + uint32_t maxLevel = {}; + VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion = {}; + }; + static_assert( sizeof( VideoDecodeH265CapabilitiesEXT ) == sizeof( VkVideoDecodeH265CapabilitiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH265CapabilitiesEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH265DpbSlotInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265DpbSlotInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH265DpbSlotInfoEXT( + const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : pStdReferenceInfo( pStdReferenceInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoDecodeH265DpbSlotInfoEXT( VideoDecodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265DpbSlotInfoEXT( VkVideoDecodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH265DpbSlotInfoEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265DpbSlotInfoEXT & + operator=( VideoDecodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265DpbSlotInfoEXT & operator=( VkVideoDecodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoDecodeH265DpbSlotInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoDecodeH265DpbSlotInfoEXT & + setPStdReferenceInfo( const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdReferenceInfo = pStdReferenceInfo_; + return *this; + } + + operator VkVideoDecodeH265DpbSlotInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH265DpbSlotInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH265DpbSlotInfoEXT const & ) const = default; +# else + bool operator==( VideoDecodeH265DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo ); + } + + bool operator!=( VideoDecodeH265DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265DpbSlotInfoEXT; + const void * pNext = {}; + const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo = {}; + }; + static_assert( sizeof( VideoDecodeH265DpbSlotInfoEXT ) == sizeof( VkVideoDecodeH265DpbSlotInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH265DpbSlotInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH265PictureInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265PictureInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH265PictureInfoEXT( StdVideoDecodeH265PictureInfo * pStdPictureInfo_ = {}, + uint32_t slicesCount_ = {}, + const uint32_t * pSlicesDataOffsets_ = {} ) VULKAN_HPP_NOEXCEPT + : pStdPictureInfo( pStdPictureInfo_ ) + , slicesCount( slicesCount_ ) + , pSlicesDataOffsets( pSlicesDataOffsets_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoDecodeH265PictureInfoEXT( VideoDecodeH265PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265PictureInfoEXT( VkVideoDecodeH265PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH265PictureInfoEXT( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH265PictureInfoEXT( + StdVideoDecodeH265PictureInfo * pStdPictureInfo_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & slicesDataOffsets_ ) + : pStdPictureInfo( pStdPictureInfo_ ) + , slicesCount( static_cast( slicesDataOffsets_.size() ) ) + , pSlicesDataOffsets( slicesDataOffsets_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoEXT & + operator=( VideoDecodeH265PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265PictureInfoEXT & operator=( VkVideoDecodeH265PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoDecodeH265PictureInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoDecodeH265PictureInfoEXT & + setPStdPictureInfo( StdVideoDecodeH265PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdPictureInfo = pStdPictureInfo_; + return *this; + } + + VideoDecodeH265PictureInfoEXT & setSlicesCount( uint32_t slicesCount_ ) VULKAN_HPP_NOEXCEPT + { + slicesCount = slicesCount_; + return *this; + } + + VideoDecodeH265PictureInfoEXT & setPSlicesDataOffsets( const uint32_t * pSlicesDataOffsets_ ) VULKAN_HPP_NOEXCEPT + { + pSlicesDataOffsets = pSlicesDataOffsets_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH265PictureInfoEXT & setSlicesDataOffsets( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & slicesDataOffsets_ ) VULKAN_HPP_NOEXCEPT + { + slicesCount = static_cast( slicesDataOffsets_.size() ); + pSlicesDataOffsets = slicesDataOffsets_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkVideoDecodeH265PictureInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH265PictureInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH265PictureInfoEXT const & ) const = default; +# else + bool operator==( VideoDecodeH265PictureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdPictureInfo == rhs.pStdPictureInfo ) && + ( slicesCount == rhs.slicesCount ) && ( pSlicesDataOffsets == rhs.pSlicesDataOffsets ); + } + + bool operator!=( VideoDecodeH265PictureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265PictureInfoEXT; + const void * pNext = {}; + StdVideoDecodeH265PictureInfo * pStdPictureInfo = {}; + uint32_t slicesCount = {}; + const uint32_t * pSlicesDataOffsets = {}; + }; + static_assert( sizeof( VideoDecodeH265PictureInfoEXT ) == sizeof( VkVideoDecodeH265PictureInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH265PictureInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH265ProfileEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265ProfileEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH265ProfileEXT( StdVideoH265ProfileIdc stdProfileIdc_ = {} ) VULKAN_HPP_NOEXCEPT + : stdProfileIdc( stdProfileIdc_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoDecodeH265ProfileEXT( VideoDecodeH265ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265ProfileEXT( VkVideoDecodeH265ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH265ProfileEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265ProfileEXT & + operator=( VideoDecodeH265ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265ProfileEXT & operator=( VkVideoDecodeH265ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoDecodeH265ProfileEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoDecodeH265ProfileEXT & setStdProfileIdc( StdVideoH265ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT + { + stdProfileIdc = stdProfileIdc_; + return *this; + } + + operator VkVideoDecodeH265ProfileEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH265ProfileEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH265ProfileEXT const & ) const = default; +# else + bool operator==( VideoDecodeH265ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ) == 0 ); + } + + bool operator!=( VideoDecodeH265ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265ProfileEXT; + const void * pNext = {}; + StdVideoH265ProfileIdc stdProfileIdc = {}; + }; + static_assert( sizeof( VideoDecodeH265ProfileEXT ) == sizeof( VkVideoDecodeH265ProfileEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH265ProfileEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH265SessionCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoDecodeH265SessionCreateInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionCreateInfoEXT( + VULKAN_HPP_NAMESPACE::VideoDecodeH265CreateFlagsEXT flags_ = {}, + const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pStdExtensionVersion( pStdExtensionVersion_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionCreateInfoEXT( VideoDecodeH265SessionCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265SessionCreateInfoEXT( VkVideoDecodeH265SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH265SessionCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionCreateInfoEXT & + operator=( VideoDecodeH265SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265SessionCreateInfoEXT & + operator=( VkVideoDecodeH265SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoDecodeH265SessionCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoDecodeH265SessionCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::VideoDecodeH265CreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VideoDecodeH265SessionCreateInfoEXT & setPStdExtensionVersion( + const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ ) VULKAN_HPP_NOEXCEPT + { + pStdExtensionVersion = pStdExtensionVersion_; + return *this; + } + + operator VkVideoDecodeH265SessionCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH265SessionCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH265SessionCreateInfoEXT const & ) const = default; +# else + bool operator==( VideoDecodeH265SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( pStdExtensionVersion == rhs.pStdExtensionVersion ); + } + + bool operator!=( VideoDecodeH265SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265SessionCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoDecodeH265CreateFlagsEXT flags = {}; + const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion = {}; + }; + static_assert( sizeof( VideoDecodeH265SessionCreateInfoEXT ) == sizeof( VkVideoDecodeH265SessionCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH265SessionCreateInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH265SessionParametersAddInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoDecodeH265SessionParametersAddInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersAddInfoEXT( + uint32_t spsStdCount_ = {}, + const StdVideoH265SequenceParameterSet * pSpsStd_ = {}, + uint32_t ppsStdCount_ = {}, + const StdVideoH265PictureParameterSet * pPpsStd_ = {} ) VULKAN_HPP_NOEXCEPT + : spsStdCount( spsStdCount_ ) + , pSpsStd( pSpsStd_ ) + , ppsStdCount( ppsStdCount_ ) + , pPpsStd( pPpsStd_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersAddInfoEXT( + VideoDecodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265SessionParametersAddInfoEXT( VkVideoDecodeH265SessionParametersAddInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : VideoDecodeH265SessionParametersAddInfoEXT( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH265SessionParametersAddInfoEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & spsStd_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & ppsStd_ = {} ) + : spsStdCount( static_cast( spsStd_.size() ) ) + , pSpsStd( spsStd_.data() ) + , ppsStdCount( static_cast( ppsStd_.size() ) ) + , pPpsStd( ppsStd_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT & + operator=( VideoDecodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265SessionParametersAddInfoEXT & + operator=( VkVideoDecodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoDecodeH265SessionParametersAddInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoDecodeH265SessionParametersAddInfoEXT & setSpsStdCount( uint32_t spsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + spsStdCount = spsStdCount_; + return *this; + } + + VideoDecodeH265SessionParametersAddInfoEXT & + setPSpsStd( const StdVideoH265SequenceParameterSet * pSpsStd_ ) VULKAN_HPP_NOEXCEPT + { + pSpsStd = pSpsStd_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH265SessionParametersAddInfoEXT & + setSpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & spsStd_ ) + VULKAN_HPP_NOEXCEPT + { + spsStdCount = static_cast( spsStd_.size() ); + pSpsStd = spsStd_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoDecodeH265SessionParametersAddInfoEXT & setPpsStdCount( uint32_t ppsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + ppsStdCount = ppsStdCount_; + return *this; + } + + VideoDecodeH265SessionParametersAddInfoEXT & + setPPpsStd( const StdVideoH265PictureParameterSet * pPpsStd_ ) VULKAN_HPP_NOEXCEPT + { + pPpsStd = pPpsStd_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH265SessionParametersAddInfoEXT & + setPpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & ppsStd_ ) + VULKAN_HPP_NOEXCEPT + { + ppsStdCount = static_cast( ppsStd_.size() ); + pPpsStd = ppsStd_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkVideoDecodeH265SessionParametersAddInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH265SessionParametersAddInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH265SessionParametersAddInfoEXT const & ) const = default; +# else + bool operator==( VideoDecodeH265SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( spsStdCount == rhs.spsStdCount ) && + ( pSpsStd == rhs.pSpsStd ) && ( ppsStdCount == rhs.ppsStdCount ) && ( pPpsStd == rhs.pPpsStd ); + } + + bool operator!=( VideoDecodeH265SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265SessionParametersAddInfoEXT; + const void * pNext = {}; + uint32_t spsStdCount = {}; + const StdVideoH265SequenceParameterSet * pSpsStd = {}; + uint32_t ppsStdCount = {}; + const StdVideoH265PictureParameterSet * pPpsStd = {}; + }; + static_assert( sizeof( VideoDecodeH265SessionParametersAddInfoEXT ) == + sizeof( VkVideoDecodeH265SessionParametersAddInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH265SessionParametersAddInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH265SessionParametersCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoDecodeH265SessionParametersCreateInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersCreateInfoEXT( + uint32_t maxSpsStdCount_ = {}, + uint32_t maxPpsStdCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT * pParametersAddInfo_ = {} ) + VULKAN_HPP_NOEXCEPT + : maxSpsStdCount( maxSpsStdCount_ ) + , maxPpsStdCount( maxPpsStdCount_ ) + , pParametersAddInfo( pParametersAddInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersCreateInfoEXT( + VideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265SessionParametersCreateInfoEXT( VkVideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : VideoDecodeH265SessionParametersCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoEXT & + operator=( VideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265SessionParametersCreateInfoEXT & + operator=( VkVideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoDecodeH265SessionParametersCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoDecodeH265SessionParametersCreateInfoEXT & setMaxSpsStdCount( uint32_t maxSpsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + maxSpsStdCount = maxSpsStdCount_; + return *this; + } + + VideoDecodeH265SessionParametersCreateInfoEXT & setMaxPpsStdCount( uint32_t maxPpsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + maxPpsStdCount = maxPpsStdCount_; + return *this; + } + + VideoDecodeH265SessionParametersCreateInfoEXT & setPParametersAddInfo( + const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT + { + pParametersAddInfo = pParametersAddInfo_; + return *this; + } + + operator VkVideoDecodeH265SessionParametersCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH265SessionParametersCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH265SessionParametersCreateInfoEXT const & ) const = default; +# else + bool operator==( VideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxSpsStdCount == rhs.maxSpsStdCount ) && + ( maxPpsStdCount == rhs.maxPpsStdCount ) && ( pParametersAddInfo == rhs.pParametersAddInfo ); + } + + bool operator!=( VideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265SessionParametersCreateInfoEXT; + const void * pNext = {}; + uint32_t maxSpsStdCount = {}; + uint32_t maxPpsStdCount = {}; + const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT * pParametersAddInfo = {}; + }; + static_assert( sizeof( VideoDecodeH265SessionParametersCreateInfoEXT ) == + sizeof( VkVideoDecodeH265SessionParametersCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH265SessionParametersCreateInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH264CapabilitiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264CapabilitiesEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT( + VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH264InputModeFlagsEXT inputModeFlags_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH264OutputModeFlagsEXT outputModeFlags_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minPictureSizeInMbs_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxPictureSizeInMbs_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D inputImageDataAlignment_ = {}, + uint8_t maxNumL0ReferenceForP_ = {}, + uint8_t maxNumL0ReferenceForB_ = {}, + uint8_t maxNumL1Reference_ = {}, + uint8_t qualityLevelCount_ = {}, + VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , inputModeFlags( inputModeFlags_ ) + , outputModeFlags( outputModeFlags_ ) + , minPictureSizeInMbs( minPictureSizeInMbs_ ) + , maxPictureSizeInMbs( maxPictureSizeInMbs_ ) + , inputImageDataAlignment( inputImageDataAlignment_ ) + , maxNumL0ReferenceForP( maxNumL0ReferenceForP_ ) + , maxNumL0ReferenceForB( maxNumL0ReferenceForB_ ) + , maxNumL1Reference( maxNumL1Reference_ ) + , qualityLevelCount( qualityLevelCount_ ) + , stdExtensionVersion( stdExtensionVersion_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + VideoEncodeH264CapabilitiesEXT( VideoEncodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264CapabilitiesEXT( VkVideoEncodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264CapabilitiesEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT & + operator=( VideoEncodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264CapabilitiesEXT & operator=( VkVideoEncodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoEncodeH264CapabilitiesEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoEncodeH264CapabilitiesEXT & + setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VideoEncodeH264CapabilitiesEXT & + setInputModeFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH264InputModeFlagsEXT inputModeFlags_ ) VULKAN_HPP_NOEXCEPT + { + inputModeFlags = inputModeFlags_; + return *this; + } + + VideoEncodeH264CapabilitiesEXT & + setOutputModeFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH264OutputModeFlagsEXT outputModeFlags_ ) VULKAN_HPP_NOEXCEPT + { + outputModeFlags = outputModeFlags_; + return *this; + } + + VideoEncodeH264CapabilitiesEXT & + setMinPictureSizeInMbs( VULKAN_HPP_NAMESPACE::Extent2D const & minPictureSizeInMbs_ ) VULKAN_HPP_NOEXCEPT + { + minPictureSizeInMbs = minPictureSizeInMbs_; + return *this; + } + + VideoEncodeH264CapabilitiesEXT & + setMaxPictureSizeInMbs( VULKAN_HPP_NAMESPACE::Extent2D const & maxPictureSizeInMbs_ ) VULKAN_HPP_NOEXCEPT + { + maxPictureSizeInMbs = maxPictureSizeInMbs_; + return *this; + } + + VideoEncodeH264CapabilitiesEXT & + setInputImageDataAlignment( VULKAN_HPP_NAMESPACE::Extent2D const & inputImageDataAlignment_ ) VULKAN_HPP_NOEXCEPT + { + inputImageDataAlignment = inputImageDataAlignment_; + return *this; + } + + VideoEncodeH264CapabilitiesEXT & setMaxNumL0ReferenceForP( uint8_t maxNumL0ReferenceForP_ ) VULKAN_HPP_NOEXCEPT + { + maxNumL0ReferenceForP = maxNumL0ReferenceForP_; + return *this; + } + + VideoEncodeH264CapabilitiesEXT & setMaxNumL0ReferenceForB( uint8_t maxNumL0ReferenceForB_ ) VULKAN_HPP_NOEXCEPT + { + maxNumL0ReferenceForB = maxNumL0ReferenceForB_; + return *this; + } + + VideoEncodeH264CapabilitiesEXT & setMaxNumL1Reference( uint8_t maxNumL1Reference_ ) VULKAN_HPP_NOEXCEPT + { + maxNumL1Reference = maxNumL1Reference_; + return *this; + } + + VideoEncodeH264CapabilitiesEXT & setQualityLevelCount( uint8_t qualityLevelCount_ ) VULKAN_HPP_NOEXCEPT + { + qualityLevelCount = qualityLevelCount_; + return *this; + } + + VideoEncodeH264CapabilitiesEXT & setStdExtensionVersion( + VULKAN_HPP_NAMESPACE::ExtensionProperties const & stdExtensionVersion_ ) VULKAN_HPP_NOEXCEPT + { + stdExtensionVersion = stdExtensionVersion_; + return *this; + } + + operator VkVideoEncodeH264CapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264CapabilitiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264CapabilitiesEXT const & ) const = default; +# else + bool operator==( VideoEncodeH264CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( inputModeFlags == rhs.inputModeFlags ) && ( outputModeFlags == rhs.outputModeFlags ) && + ( minPictureSizeInMbs == rhs.minPictureSizeInMbs ) && ( maxPictureSizeInMbs == rhs.maxPictureSizeInMbs ) && + ( inputImageDataAlignment == rhs.inputImageDataAlignment ) && + ( maxNumL0ReferenceForP == rhs.maxNumL0ReferenceForP ) && + ( maxNumL0ReferenceForB == rhs.maxNumL0ReferenceForB ) && ( maxNumL1Reference == rhs.maxNumL1Reference ) && + ( qualityLevelCount == rhs.qualityLevelCount ) && ( stdExtensionVersion == rhs.stdExtensionVersion ); + } + + bool operator!=( VideoEncodeH264CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264CapabilitiesEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264InputModeFlagsEXT inputModeFlags = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264OutputModeFlagsEXT outputModeFlags = {}; + VULKAN_HPP_NAMESPACE::Extent2D minPictureSizeInMbs = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxPictureSizeInMbs = {}; + VULKAN_HPP_NAMESPACE::Extent2D inputImageDataAlignment = {}; + uint8_t maxNumL0ReferenceForP = {}; + uint8_t maxNumL0ReferenceForB = {}; + uint8_t maxNumL1Reference = {}; + uint8_t qualityLevelCount = {}; + VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion = {}; + }; + static_assert( sizeof( VideoEncodeH264CapabilitiesEXT ) == sizeof( VkVideoEncodeH264CapabilitiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH264CapabilitiesEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH264DpbSlotInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264DpbSlotInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEncodeH264DpbSlotInfoEXT( int8_t slotIndex_ = {}, + const StdVideoEncodeH264PictureInfo * pStdPictureInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : slotIndex( slotIndex_ ) + , pStdPictureInfo( pStdPictureInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoEncodeH264DpbSlotInfoEXT( VideoEncodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264DpbSlotInfoEXT( VkVideoEncodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264DpbSlotInfoEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264DpbSlotInfoEXT & + operator=( VideoEncodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264DpbSlotInfoEXT & operator=( VkVideoEncodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoEncodeH264DpbSlotInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoEncodeH264DpbSlotInfoEXT & setSlotIndex( int8_t slotIndex_ ) VULKAN_HPP_NOEXCEPT + { + slotIndex = slotIndex_; + return *this; + } + + VideoEncodeH264DpbSlotInfoEXT & + setPStdPictureInfo( const StdVideoEncodeH264PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdPictureInfo = pStdPictureInfo_; + return *this; + } + + operator VkVideoEncodeH264DpbSlotInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264DpbSlotInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264DpbSlotInfoEXT const & ) const = default; +# else + bool operator==( VideoEncodeH264DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( slotIndex == rhs.slotIndex ) && + ( pStdPictureInfo == rhs.pStdPictureInfo ); + } + + bool operator!=( VideoEncodeH264DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264DpbSlotInfoEXT; + const void * pNext = {}; + int8_t slotIndex = {}; + const StdVideoEncodeH264PictureInfo * pStdPictureInfo = {}; + }; + static_assert( sizeof( VideoEncodeH264DpbSlotInfoEXT ) == sizeof( VkVideoEncodeH264DpbSlotInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH264DpbSlotInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH264EmitPictureParametersEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoEncodeH264EmitPictureParametersEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEncodeH264EmitPictureParametersEXT( uint8_t spsId_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_ = {}, + uint32_t ppsIdEntryCount_ = {}, + const uint8_t * ppsIdEntries_ = {} ) VULKAN_HPP_NOEXCEPT + : spsId( spsId_ ) + , emitSpsEnable( emitSpsEnable_ ) + , ppsIdEntryCount( ppsIdEntryCount_ ) + , ppsIdEntries( ppsIdEntries_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoEncodeH264EmitPictureParametersEXT( VideoEncodeH264EmitPictureParametersEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264EmitPictureParametersEXT( VkVideoEncodeH264EmitPictureParametersEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264EmitPictureParametersEXT( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264EmitPictureParametersEXT( + uint8_t spsId_, + VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & psIdEntries_ ) + : spsId( spsId_ ) + , emitSpsEnable( emitSpsEnable_ ) + , ppsIdEntryCount( static_cast( psIdEntries_.size() ) ) + , ppsIdEntries( psIdEntries_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersEXT & + operator=( VideoEncodeH264EmitPictureParametersEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264EmitPictureParametersEXT & + operator=( VkVideoEncodeH264EmitPictureParametersEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoEncodeH264EmitPictureParametersEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoEncodeH264EmitPictureParametersEXT & setSpsId( uint8_t spsId_ ) VULKAN_HPP_NOEXCEPT + { + spsId = spsId_; + return *this; + } + + VideoEncodeH264EmitPictureParametersEXT & + setEmitSpsEnable( VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_ ) VULKAN_HPP_NOEXCEPT + { + emitSpsEnable = emitSpsEnable_; + return *this; + } + + VideoEncodeH264EmitPictureParametersEXT & setPpsIdEntryCount( uint32_t ppsIdEntryCount_ ) VULKAN_HPP_NOEXCEPT + { + ppsIdEntryCount = ppsIdEntryCount_; + return *this; + } + + VideoEncodeH264EmitPictureParametersEXT & setPpsIdEntries( const uint8_t * ppsIdEntries_ ) VULKAN_HPP_NOEXCEPT + { + ppsIdEntries = ppsIdEntries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264EmitPictureParametersEXT & setPsIdEntries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & psIdEntries_ ) VULKAN_HPP_NOEXCEPT + { + ppsIdEntryCount = static_cast( psIdEntries_.size() ); + ppsIdEntries = psIdEntries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkVideoEncodeH264EmitPictureParametersEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264EmitPictureParametersEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264EmitPictureParametersEXT const & ) const = default; +# else + bool operator==( VideoEncodeH264EmitPictureParametersEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( spsId == rhs.spsId ) && + ( emitSpsEnable == rhs.emitSpsEnable ) && ( ppsIdEntryCount == rhs.ppsIdEntryCount ) && + ( ppsIdEntries == rhs.ppsIdEntries ); + } + + bool operator!=( VideoEncodeH264EmitPictureParametersEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264EmitPictureParametersEXT; + const void * pNext = {}; + uint8_t spsId = {}; + VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable = {}; + uint32_t ppsIdEntryCount = {}; + const uint8_t * ppsIdEntries = {}; + }; + static_assert( sizeof( VideoEncodeH264EmitPictureParametersEXT ) == + sizeof( VkVideoEncodeH264EmitPictureParametersEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH264EmitPictureParametersEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH264NaluSliceEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264NaluSliceEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264NaluSliceEXT( + const StdVideoEncodeH264SliceHeader * pSliceHeaderStd_ = {}, + uint32_t mbCount_ = {}, + uint8_t refFinalList0EntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefFinalList0Entries_ = {}, + uint8_t refFinalList1EntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefFinalList1Entries_ = {}, + uint32_t precedingNaluBytes_ = {}, + uint8_t minQp_ = {}, + uint8_t maxQp_ = {} ) VULKAN_HPP_NOEXCEPT + : pSliceHeaderStd( pSliceHeaderStd_ ) + , mbCount( mbCount_ ) + , refFinalList0EntryCount( refFinalList0EntryCount_ ) + , pRefFinalList0Entries( pRefFinalList0Entries_ ) + , refFinalList1EntryCount( refFinalList1EntryCount_ ) + , pRefFinalList1Entries( pRefFinalList1Entries_ ) + , precedingNaluBytes( precedingNaluBytes_ ) + , minQp( minQp_ ) + , maxQp( maxQp_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoEncodeH264NaluSliceEXT( VideoEncodeH264NaluSliceEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264NaluSliceEXT( VkVideoEncodeH264NaluSliceEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264NaluSliceEXT( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264NaluSliceEXT( + const StdVideoEncodeH264SliceHeader * pSliceHeaderStd_, + uint32_t mbCount_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + refFinalList0Entries_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + refFinalList1Entries_ = {}, + uint32_t precedingNaluBytes_ = {}, + uint8_t minQp_ = {}, + uint8_t maxQp_ = {} ) + : pSliceHeaderStd( pSliceHeaderStd_ ) + , mbCount( mbCount_ ) + , refFinalList0EntryCount( static_cast( refFinalList0Entries_.size() ) ) + , pRefFinalList0Entries( refFinalList0Entries_.data() ) + , refFinalList1EntryCount( static_cast( refFinalList1Entries_.size() ) ) + , pRefFinalList1Entries( refFinalList1Entries_.data() ) + , precedingNaluBytes( precedingNaluBytes_ ) + , minQp( minQp_ ) + , maxQp( maxQp_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceEXT & + operator=( VideoEncodeH264NaluSliceEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264NaluSliceEXT & operator=( VkVideoEncodeH264NaluSliceEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoEncodeH264NaluSliceEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoEncodeH264NaluSliceEXT & + setPSliceHeaderStd( const StdVideoEncodeH264SliceHeader * pSliceHeaderStd_ ) VULKAN_HPP_NOEXCEPT + { + pSliceHeaderStd = pSliceHeaderStd_; + return *this; + } + + VideoEncodeH264NaluSliceEXT & setMbCount( uint32_t mbCount_ ) VULKAN_HPP_NOEXCEPT + { + mbCount = mbCount_; + return *this; + } + + VideoEncodeH264NaluSliceEXT & setRefFinalList0EntryCount( uint8_t refFinalList0EntryCount_ ) VULKAN_HPP_NOEXCEPT + { + refFinalList0EntryCount = refFinalList0EntryCount_; + return *this; + } + + VideoEncodeH264NaluSliceEXT & setPRefFinalList0Entries( + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefFinalList0Entries_ ) VULKAN_HPP_NOEXCEPT + { + pRefFinalList0Entries = pRefFinalList0Entries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264NaluSliceEXT & setRefFinalList0Entries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + refFinalList0Entries_ ) VULKAN_HPP_NOEXCEPT + { + refFinalList0EntryCount = static_cast( refFinalList0Entries_.size() ); + pRefFinalList0Entries = refFinalList0Entries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoEncodeH264NaluSliceEXT & setRefFinalList1EntryCount( uint8_t refFinalList1EntryCount_ ) VULKAN_HPP_NOEXCEPT + { + refFinalList1EntryCount = refFinalList1EntryCount_; + return *this; + } + + VideoEncodeH264NaluSliceEXT & setPRefFinalList1Entries( + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefFinalList1Entries_ ) VULKAN_HPP_NOEXCEPT + { + pRefFinalList1Entries = pRefFinalList1Entries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264NaluSliceEXT & setRefFinalList1Entries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + refFinalList1Entries_ ) VULKAN_HPP_NOEXCEPT + { + refFinalList1EntryCount = static_cast( refFinalList1Entries_.size() ); + pRefFinalList1Entries = refFinalList1Entries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoEncodeH264NaluSliceEXT & setPrecedingNaluBytes( uint32_t precedingNaluBytes_ ) VULKAN_HPP_NOEXCEPT + { + precedingNaluBytes = precedingNaluBytes_; + return *this; + } + + VideoEncodeH264NaluSliceEXT & setMinQp( uint8_t minQp_ ) VULKAN_HPP_NOEXCEPT + { + minQp = minQp_; + return *this; + } + + VideoEncodeH264NaluSliceEXT & setMaxQp( uint8_t maxQp_ ) VULKAN_HPP_NOEXCEPT + { + maxQp = maxQp_; + return *this; + } + + operator VkVideoEncodeH264NaluSliceEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264NaluSliceEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264NaluSliceEXT const & ) const = default; +# else + bool operator==( VideoEncodeH264NaluSliceEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pSliceHeaderStd == rhs.pSliceHeaderStd ) && + ( mbCount == rhs.mbCount ) && ( refFinalList0EntryCount == rhs.refFinalList0EntryCount ) && + ( pRefFinalList0Entries == rhs.pRefFinalList0Entries ) && + ( refFinalList1EntryCount == rhs.refFinalList1EntryCount ) && + ( pRefFinalList1Entries == rhs.pRefFinalList1Entries ) && + ( precedingNaluBytes == rhs.precedingNaluBytes ) && ( minQp == rhs.minQp ) && ( maxQp == rhs.maxQp ); + } + + bool operator!=( VideoEncodeH264NaluSliceEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264NaluSliceEXT; + const void * pNext = {}; + const StdVideoEncodeH264SliceHeader * pSliceHeaderStd = {}; + uint32_t mbCount = {}; + uint8_t refFinalList0EntryCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefFinalList0Entries = {}; + uint8_t refFinalList1EntryCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefFinalList1Entries = {}; + uint32_t precedingNaluBytes = {}; + uint8_t minQp = {}; + uint8_t maxQp = {}; + }; + static_assert( sizeof( VideoEncodeH264NaluSliceEXT ) == sizeof( VkVideoEncodeH264NaluSliceEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH264NaluSliceEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH264ProfileEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264ProfileEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264ProfileEXT( StdVideoH264ProfileIdc stdProfileIdc_ = {} ) VULKAN_HPP_NOEXCEPT + : stdProfileIdc( stdProfileIdc_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoEncodeH264ProfileEXT( VideoEncodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264ProfileEXT( VkVideoEncodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264ProfileEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ProfileEXT & + operator=( VideoEncodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264ProfileEXT & operator=( VkVideoEncodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoEncodeH264ProfileEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoEncodeH264ProfileEXT & setStdProfileIdc( StdVideoH264ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT + { + stdProfileIdc = stdProfileIdc_; + return *this; + } + + operator VkVideoEncodeH264ProfileEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264ProfileEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264ProfileEXT const & ) const = default; +# else + bool operator==( VideoEncodeH264ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ) == 0 ); + } + + bool operator!=( VideoEncodeH264ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264ProfileEXT; + const void * pNext = {}; + StdVideoH264ProfileIdc stdProfileIdc = {}; + }; + static_assert( sizeof( VideoEncodeH264ProfileEXT ) == sizeof( VkVideoEncodeH264ProfileEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH264ProfileEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH264SessionCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoEncodeH264SessionCreateInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoEXT( + VULKAN_HPP_NAMESPACE::VideoEncodeH264CreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxPictureSizeInMbs_ = {}, + const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , maxPictureSizeInMbs( maxPictureSizeInMbs_ ) + , pStdExtensionVersion( pStdExtensionVersion_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoEXT( VideoEncodeH264SessionCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264SessionCreateInfoEXT( VkVideoEncodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264SessionCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoEXT & + operator=( VideoEncodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264SessionCreateInfoEXT & + operator=( VkVideoEncodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoEncodeH264SessionCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoEncodeH264SessionCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH264CreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VideoEncodeH264SessionCreateInfoEXT & + setMaxPictureSizeInMbs( VULKAN_HPP_NAMESPACE::Extent2D const & maxPictureSizeInMbs_ ) VULKAN_HPP_NOEXCEPT + { + maxPictureSizeInMbs = maxPictureSizeInMbs_; + return *this; + } + + VideoEncodeH264SessionCreateInfoEXT & setPStdExtensionVersion( + const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ ) VULKAN_HPP_NOEXCEPT + { + pStdExtensionVersion = pStdExtensionVersion_; + return *this; + } + + operator VkVideoEncodeH264SessionCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264SessionCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264SessionCreateInfoEXT const & ) const = default; +# else + bool operator==( VideoEncodeH264SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( maxPictureSizeInMbs == rhs.maxPictureSizeInMbs ) && ( pStdExtensionVersion == rhs.pStdExtensionVersion ); + } + + bool operator!=( VideoEncodeH264SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264CreateFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxPictureSizeInMbs = {}; + const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion = {}; + }; + static_assert( sizeof( VideoEncodeH264SessionCreateInfoEXT ) == sizeof( VkVideoEncodeH264SessionCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH264SessionCreateInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH264SessionParametersAddInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoEncodeH264SessionParametersAddInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersAddInfoEXT( + uint32_t spsStdCount_ = {}, + const StdVideoH264SequenceParameterSet * pSpsStd_ = {}, + uint32_t ppsStdCount_ = {}, + const StdVideoH264PictureParameterSet * pPpsStd_ = {} ) VULKAN_HPP_NOEXCEPT + : spsStdCount( spsStdCount_ ) + , pSpsStd( pSpsStd_ ) + , ppsStdCount( ppsStdCount_ ) + , pPpsStd( pPpsStd_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersAddInfoEXT( + VideoEncodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264SessionParametersAddInfoEXT( VkVideoEncodeH264SessionParametersAddInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : VideoEncodeH264SessionParametersAddInfoEXT( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264SessionParametersAddInfoEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & spsStd_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & ppsStd_ = {} ) + : spsStdCount( static_cast( spsStd_.size() ) ) + , pSpsStd( spsStd_.data() ) + , ppsStdCount( static_cast( ppsStd_.size() ) ) + , pPpsStd( ppsStd_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT & + operator=( VideoEncodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264SessionParametersAddInfoEXT & + operator=( VkVideoEncodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoEncodeH264SessionParametersAddInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoEncodeH264SessionParametersAddInfoEXT & setSpsStdCount( uint32_t spsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + spsStdCount = spsStdCount_; + return *this; + } + + VideoEncodeH264SessionParametersAddInfoEXT & + setPSpsStd( const StdVideoH264SequenceParameterSet * pSpsStd_ ) VULKAN_HPP_NOEXCEPT + { + pSpsStd = pSpsStd_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264SessionParametersAddInfoEXT & + setSpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & spsStd_ ) + VULKAN_HPP_NOEXCEPT + { + spsStdCount = static_cast( spsStd_.size() ); + pSpsStd = spsStd_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoEncodeH264SessionParametersAddInfoEXT & setPpsStdCount( uint32_t ppsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + ppsStdCount = ppsStdCount_; + return *this; + } + + VideoEncodeH264SessionParametersAddInfoEXT & + setPPpsStd( const StdVideoH264PictureParameterSet * pPpsStd_ ) VULKAN_HPP_NOEXCEPT + { + pPpsStd = pPpsStd_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264SessionParametersAddInfoEXT & + setPpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & ppsStd_ ) + VULKAN_HPP_NOEXCEPT + { + ppsStdCount = static_cast( ppsStd_.size() ); + pPpsStd = ppsStd_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkVideoEncodeH264SessionParametersAddInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264SessionParametersAddInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264SessionParametersAddInfoEXT const & ) const = default; +# else + bool operator==( VideoEncodeH264SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( spsStdCount == rhs.spsStdCount ) && + ( pSpsStd == rhs.pSpsStd ) && ( ppsStdCount == rhs.ppsStdCount ) && ( pPpsStd == rhs.pPpsStd ); + } + + bool operator!=( VideoEncodeH264SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionParametersAddInfoEXT; + const void * pNext = {}; + uint32_t spsStdCount = {}; + const StdVideoH264SequenceParameterSet * pSpsStd = {}; + uint32_t ppsStdCount = {}; + const StdVideoH264PictureParameterSet * pPpsStd = {}; + }; + static_assert( sizeof( VideoEncodeH264SessionParametersAddInfoEXT ) == + sizeof( VkVideoEncodeH264SessionParametersAddInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH264SessionParametersAddInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH264SessionParametersCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoEncodeH264SessionParametersCreateInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersCreateInfoEXT( + uint32_t maxSpsStdCount_ = {}, + uint32_t maxPpsStdCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT * pParametersAddInfo_ = {} ) + VULKAN_HPP_NOEXCEPT + : maxSpsStdCount( maxSpsStdCount_ ) + , maxPpsStdCount( maxPpsStdCount_ ) + , pParametersAddInfo( pParametersAddInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersCreateInfoEXT( + VideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264SessionParametersCreateInfoEXT( VkVideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : VideoEncodeH264SessionParametersCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoEXT & + operator=( VideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264SessionParametersCreateInfoEXT & + operator=( VkVideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoEncodeH264SessionParametersCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoEncodeH264SessionParametersCreateInfoEXT & setMaxSpsStdCount( uint32_t maxSpsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + maxSpsStdCount = maxSpsStdCount_; + return *this; + } + + VideoEncodeH264SessionParametersCreateInfoEXT & setMaxPpsStdCount( uint32_t maxPpsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + maxPpsStdCount = maxPpsStdCount_; + return *this; + } + + VideoEncodeH264SessionParametersCreateInfoEXT & setPParametersAddInfo( + const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT + { + pParametersAddInfo = pParametersAddInfo_; + return *this; + } + + operator VkVideoEncodeH264SessionParametersCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264SessionParametersCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264SessionParametersCreateInfoEXT const & ) const = default; +# else + bool operator==( VideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxSpsStdCount == rhs.maxSpsStdCount ) && + ( maxPpsStdCount == rhs.maxPpsStdCount ) && ( pParametersAddInfo == rhs.pParametersAddInfo ); + } + + bool operator!=( VideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionParametersCreateInfoEXT; + const void * pNext = {}; + uint32_t maxSpsStdCount = {}; + uint32_t maxPpsStdCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT * pParametersAddInfo = {}; + }; + static_assert( sizeof( VideoEncodeH264SessionParametersCreateInfoEXT ) == + sizeof( VkVideoEncodeH264SessionParametersCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH264SessionParametersCreateInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH264VclFrameInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264VclFrameInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264VclFrameInfoEXT( + uint8_t refDefaultFinalList0EntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefDefaultFinalList0Entries_ = {}, + uint8_t refDefaultFinalList1EntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefDefaultFinalList1Entries_ = {}, + uint32_t naluSliceEntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT * pNaluSliceEntries_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pCurrentPictureInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : refDefaultFinalList0EntryCount( refDefaultFinalList0EntryCount_ ) + , pRefDefaultFinalList0Entries( pRefDefaultFinalList0Entries_ ) + , refDefaultFinalList1EntryCount( refDefaultFinalList1EntryCount_ ) + , pRefDefaultFinalList1Entries( pRefDefaultFinalList1Entries_ ) + , naluSliceEntryCount( naluSliceEntryCount_ ) + , pNaluSliceEntries( pNaluSliceEntries_ ) + , pCurrentPictureInfo( pCurrentPictureInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoEncodeH264VclFrameInfoEXT( VideoEncodeH264VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264VclFrameInfoEXT( VkVideoEncodeH264VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264VclFrameInfoEXT( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264VclFrameInfoEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + refDefaultFinalList0Entries_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + refDefaultFinalList1Entries_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + naluSliceEntries_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pCurrentPictureInfo_ = {} ) + : refDefaultFinalList0EntryCount( static_cast( refDefaultFinalList0Entries_.size() ) ) + , pRefDefaultFinalList0Entries( refDefaultFinalList0Entries_.data() ) + , refDefaultFinalList1EntryCount( static_cast( refDefaultFinalList1Entries_.size() ) ) + , pRefDefaultFinalList1Entries( refDefaultFinalList1Entries_.data() ) + , naluSliceEntryCount( static_cast( naluSliceEntries_.size() ) ) + , pNaluSliceEntries( naluSliceEntries_.data() ) + , pCurrentPictureInfo( pCurrentPictureInfo_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT & + operator=( VideoEncodeH264VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264VclFrameInfoEXT & operator=( VkVideoEncodeH264VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoEncodeH264VclFrameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoEncodeH264VclFrameInfoEXT & + setRefDefaultFinalList0EntryCount( uint8_t refDefaultFinalList0EntryCount_ ) VULKAN_HPP_NOEXCEPT + { + refDefaultFinalList0EntryCount = refDefaultFinalList0EntryCount_; + return *this; + } + + VideoEncodeH264VclFrameInfoEXT & setPRefDefaultFinalList0Entries( + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefDefaultFinalList0Entries_ ) VULKAN_HPP_NOEXCEPT + { + pRefDefaultFinalList0Entries = pRefDefaultFinalList0Entries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264VclFrameInfoEXT & setRefDefaultFinalList0Entries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + refDefaultFinalList0Entries_ ) VULKAN_HPP_NOEXCEPT + { + refDefaultFinalList0EntryCount = static_cast( refDefaultFinalList0Entries_.size() ); + pRefDefaultFinalList0Entries = refDefaultFinalList0Entries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoEncodeH264VclFrameInfoEXT & + setRefDefaultFinalList1EntryCount( uint8_t refDefaultFinalList1EntryCount_ ) VULKAN_HPP_NOEXCEPT + { + refDefaultFinalList1EntryCount = refDefaultFinalList1EntryCount_; + return *this; + } + + VideoEncodeH264VclFrameInfoEXT & setPRefDefaultFinalList1Entries( + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefDefaultFinalList1Entries_ ) VULKAN_HPP_NOEXCEPT + { + pRefDefaultFinalList1Entries = pRefDefaultFinalList1Entries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264VclFrameInfoEXT & setRefDefaultFinalList1Entries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + refDefaultFinalList1Entries_ ) VULKAN_HPP_NOEXCEPT + { + refDefaultFinalList1EntryCount = static_cast( refDefaultFinalList1Entries_.size() ); + pRefDefaultFinalList1Entries = refDefaultFinalList1Entries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoEncodeH264VclFrameInfoEXT & setNaluSliceEntryCount( uint32_t naluSliceEntryCount_ ) VULKAN_HPP_NOEXCEPT + { + naluSliceEntryCount = naluSliceEntryCount_; + return *this; + } + + VideoEncodeH264VclFrameInfoEXT & setPNaluSliceEntries( + const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT * pNaluSliceEntries_ ) VULKAN_HPP_NOEXCEPT + { + pNaluSliceEntries = pNaluSliceEntries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264VclFrameInfoEXT & setNaluSliceEntries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + naluSliceEntries_ ) VULKAN_HPP_NOEXCEPT + { + naluSliceEntryCount = static_cast( naluSliceEntries_.size() ); + pNaluSliceEntries = naluSliceEntries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoEncodeH264VclFrameInfoEXT & setPCurrentPictureInfo( + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pCurrentPictureInfo_ ) VULKAN_HPP_NOEXCEPT + { + pCurrentPictureInfo = pCurrentPictureInfo_; + return *this; + } + + operator VkVideoEncodeH264VclFrameInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264VclFrameInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264VclFrameInfoEXT const & ) const = default; +# else + bool operator==( VideoEncodeH264VclFrameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( refDefaultFinalList0EntryCount == rhs.refDefaultFinalList0EntryCount ) && + ( pRefDefaultFinalList0Entries == rhs.pRefDefaultFinalList0Entries ) && + ( refDefaultFinalList1EntryCount == rhs.refDefaultFinalList1EntryCount ) && + ( pRefDefaultFinalList1Entries == rhs.pRefDefaultFinalList1Entries ) && + ( naluSliceEntryCount == rhs.naluSliceEntryCount ) && ( pNaluSliceEntries == rhs.pNaluSliceEntries ) && + ( pCurrentPictureInfo == rhs.pCurrentPictureInfo ); + } + + bool operator!=( VideoEncodeH264VclFrameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264VclFrameInfoEXT; + const void * pNext = {}; + uint8_t refDefaultFinalList0EntryCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefDefaultFinalList0Entries = {}; + uint8_t refDefaultFinalList1EntryCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefDefaultFinalList1Entries = {}; + uint32_t naluSliceEntryCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT * pNaluSliceEntries = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pCurrentPictureInfo = {}; + }; + static_assert( sizeof( VideoEncodeH264VclFrameInfoEXT ) == sizeof( VkVideoEncodeH264VclFrameInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH264VclFrameInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeRateControlInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeRateControlInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEncodeRateControlInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_ = + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eNone, + uint32_t averageBitrate_ = {}, + uint16_t peakToAverageBitrateRatio_ = {}, + uint16_t frameRateNumerator_ = {}, + uint16_t frameRateDenominator_ = {}, + uint32_t virtualBufferSizeInMs_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , rateControlMode( rateControlMode_ ) + , averageBitrate( averageBitrate_ ) + , peakToAverageBitrateRatio( peakToAverageBitrateRatio_ ) + , frameRateNumerator( frameRateNumerator_ ) + , frameRateDenominator( frameRateDenominator_ ) + , virtualBufferSizeInMs( virtualBufferSizeInMs_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoEncodeRateControlInfoKHR( VideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeRateControlInfoKHR( VkVideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeRateControlInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & + operator=( VideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeRateControlInfoKHR & operator=( VkVideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoEncodeRateControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoEncodeRateControlInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VideoEncodeRateControlInfoKHR & setRateControlMode( + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_ ) VULKAN_HPP_NOEXCEPT + { + rateControlMode = rateControlMode_; + return *this; + } + + VideoEncodeRateControlInfoKHR & setAverageBitrate( uint32_t averageBitrate_ ) VULKAN_HPP_NOEXCEPT + { + averageBitrate = averageBitrate_; + return *this; + } + + VideoEncodeRateControlInfoKHR & + setPeakToAverageBitrateRatio( uint16_t peakToAverageBitrateRatio_ ) VULKAN_HPP_NOEXCEPT + { + peakToAverageBitrateRatio = peakToAverageBitrateRatio_; + return *this; + } + + VideoEncodeRateControlInfoKHR & setFrameRateNumerator( uint16_t frameRateNumerator_ ) VULKAN_HPP_NOEXCEPT + { + frameRateNumerator = frameRateNumerator_; + return *this; + } + + VideoEncodeRateControlInfoKHR & setFrameRateDenominator( uint16_t frameRateDenominator_ ) VULKAN_HPP_NOEXCEPT + { + frameRateDenominator = frameRateDenominator_; + return *this; + } + + VideoEncodeRateControlInfoKHR & setVirtualBufferSizeInMs( uint32_t virtualBufferSizeInMs_ ) VULKAN_HPP_NOEXCEPT + { + virtualBufferSizeInMs = virtualBufferSizeInMs_; + return *this; + } + + operator VkVideoEncodeRateControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeRateControlInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeRateControlInfoKHR const & ) const = default; +# else + bool operator==( VideoEncodeRateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( rateControlMode == rhs.rateControlMode ) && ( averageBitrate == rhs.averageBitrate ) && + ( peakToAverageBitrateRatio == rhs.peakToAverageBitrateRatio ) && + ( frameRateNumerator == rhs.frameRateNumerator ) && ( frameRateDenominator == rhs.frameRateDenominator ) && + ( virtualBufferSizeInMs == rhs.virtualBufferSizeInMs ); + } + + bool operator!=( VideoEncodeRateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeRateControlInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode = + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eNone; + uint32_t averageBitrate = {}; + uint16_t peakToAverageBitrateRatio = {}; + uint16_t frameRateNumerator = {}; + uint16_t frameRateDenominator = {}; + uint32_t virtualBufferSizeInMs = {}; + }; + static_assert( sizeof( VideoEncodeRateControlInfoKHR ) == sizeof( VkVideoEncodeRateControlInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoEncodeRateControlInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoQueueFamilyProperties2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoQueueFamilyProperties2KHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoQueueFamilyProperties2KHR( + VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations_ = {} ) VULKAN_HPP_NOEXCEPT + : videoCodecOperations( videoCodecOperations_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoQueueFamilyProperties2KHR( VideoQueueFamilyProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoQueueFamilyProperties2KHR( VkVideoQueueFamilyProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoQueueFamilyProperties2KHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoQueueFamilyProperties2KHR & + operator=( VideoQueueFamilyProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoQueueFamilyProperties2KHR & operator=( VkVideoQueueFamilyProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoQueueFamilyProperties2KHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoQueueFamilyProperties2KHR & setVideoCodecOperations( + VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations_ ) VULKAN_HPP_NOEXCEPT + { + videoCodecOperations = videoCodecOperations_; + return *this; + } + + operator VkVideoQueueFamilyProperties2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoQueueFamilyProperties2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoQueueFamilyProperties2KHR const & ) const = default; +# else + bool operator==( VideoQueueFamilyProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoCodecOperations == rhs.videoCodecOperations ); + } + + bool operator!=( VideoQueueFamilyProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoQueueFamilyProperties2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations = {}; + }; + static_assert( sizeof( VideoQueueFamilyProperties2KHR ) == sizeof( VkVideoQueueFamilyProperties2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoQueueFamilyProperties2KHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + struct WaylandSurfaceCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWaylandSurfaceCreateInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ = {}, + struct wl_display * display_ = {}, + struct wl_surface * surface_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , display( display_ ) + , surface( surface_ ) + {} + + VULKAN_HPP_CONSTEXPR + WaylandSurfaceCreateInfoKHR( WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WaylandSurfaceCreateInfoKHR( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : WaylandSurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR & + operator=( WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WaylandSurfaceCreateInfoKHR & operator=( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + WaylandSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + WaylandSurfaceCreateInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + WaylandSurfaceCreateInfoKHR & setDisplay( struct wl_display * display_ ) VULKAN_HPP_NOEXCEPT + { + display = display_; + return *this; + } + + WaylandSurfaceCreateInfoKHR & setSurface( struct wl_surface * surface_ ) VULKAN_HPP_NOEXCEPT + { + surface = surface_; + return *this; + } + + operator VkWaylandSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWaylandSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WaylandSurfaceCreateInfoKHR const & ) const = default; +# else + bool operator==( WaylandSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( display == rhs.display ) && + ( surface == rhs.surface ); + } + + bool operator!=( WaylandSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWaylandSurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags = {}; + struct wl_display * display = {}; + struct wl_surface * surface = {}; + }; + static_assert( sizeof( WaylandSurfaceCreateInfoKHR ) == sizeof( VkWaylandSurfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = WaylandSurfaceCreateInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct Win32KeyedMutexAcquireReleaseInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + Win32KeyedMutexAcquireReleaseInfoKHR( uint32_t acquireCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ = {}, + const uint64_t * pAcquireKeys_ = {}, + const uint32_t * pAcquireTimeouts_ = {}, + uint32_t releaseCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ = {}, + const uint64_t * pReleaseKeys_ = {} ) VULKAN_HPP_NOEXCEPT + : acquireCount( acquireCount_ ) + , pAcquireSyncs( pAcquireSyncs_ ) + , pAcquireKeys( pAcquireKeys_ ) + , pAcquireTimeouts( pAcquireTimeouts_ ) + , releaseCount( releaseCount_ ) + , pReleaseSyncs( pReleaseSyncs_ ) + , pReleaseKeys( pReleaseKeys_ ) + {} + + VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + Win32KeyedMutexAcquireReleaseInfoKHR( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : Win32KeyedMutexAcquireReleaseInfoKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoKHR( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireSyncs_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireKeys_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireTimeouts_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + releaseSyncs_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseKeys_ = {} ) + : acquireCount( static_cast( acquireSyncs_.size() ) ) + , pAcquireSyncs( acquireSyncs_.data() ) + , pAcquireKeys( acquireKeys_.data() ) + , pAcquireTimeouts( acquireTimeouts_.data() ) + , releaseCount( static_cast( releaseSyncs_.size() ) ) + , pReleaseSyncs( releaseSyncs_.data() ) + , pReleaseKeys( releaseKeys_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireKeys_.size() ); + VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireTimeouts_.size() ); + VULKAN_HPP_ASSERT( acquireKeys_.size() == acquireTimeouts_.size() ); +# else + if ( acquireSyncs_.size() != acquireKeys_.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireSyncs_.size() != acquireKeys_.size()" ); + } + if ( acquireSyncs_.size() != acquireTimeouts_.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireSyncs_.size() != acquireTimeouts_.size()" ); + } + if ( acquireKeys_.size() != acquireTimeouts_.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireKeys_.size() != acquireTimeouts_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( releaseSyncs_.size() == releaseKeys_.size() ); +# else + if ( releaseSyncs_.size() != releaseKeys_.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: releaseSyncs_.size() != releaseKeys_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & + operator=( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Win32KeyedMutexAcquireReleaseInfoKHR & + operator=( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireCount( uint32_t acquireCount_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = acquireCount_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoKHR & + setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT + { + pAcquireSyncs = pAcquireSyncs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireSyncs( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireSyncs_ ) + VULKAN_HPP_NOEXCEPT + { + acquireCount = static_cast( acquireSyncs_.size() ); + pAcquireSyncs = acquireSyncs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireKeys( const uint64_t * pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT + { + pAcquireKeys = pAcquireKeys_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireKeys( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireKeys_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = static_cast( acquireKeys_.size() ); + pAcquireKeys = acquireKeys_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireTimeouts( const uint32_t * pAcquireTimeouts_ ) VULKAN_HPP_NOEXCEPT + { + pAcquireTimeouts = pAcquireTimeouts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireTimeouts( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireTimeouts_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = static_cast( acquireTimeouts_.size() ); + pAcquireTimeouts = acquireTimeouts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseCount( uint32_t releaseCount_ ) VULKAN_HPP_NOEXCEPT + { + releaseCount = releaseCount_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoKHR & + setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT + { + pReleaseSyncs = pReleaseSyncs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseSyncs( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseSyncs_ ) + VULKAN_HPP_NOEXCEPT + { + releaseCount = static_cast( releaseSyncs_.size() ); + pReleaseSyncs = releaseSyncs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + Win32KeyedMutexAcquireReleaseInfoKHR & setPReleaseKeys( const uint64_t * pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT + { + pReleaseKeys = pReleaseKeys_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseKeys( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseKeys_ ) VULKAN_HPP_NOEXCEPT + { + releaseCount = static_cast( releaseKeys_.size() ); + pReleaseKeys = releaseKeys_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkWin32KeyedMutexAcquireReleaseInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWin32KeyedMutexAcquireReleaseInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Win32KeyedMutexAcquireReleaseInfoKHR const & ) const = default; +# else + bool operator==( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( acquireCount == rhs.acquireCount ) && + ( pAcquireSyncs == rhs.pAcquireSyncs ) && ( pAcquireKeys == rhs.pAcquireKeys ) && + ( pAcquireTimeouts == rhs.pAcquireTimeouts ) && ( releaseCount == rhs.releaseCount ) && + ( pReleaseSyncs == rhs.pReleaseSyncs ) && ( pReleaseKeys == rhs.pReleaseKeys ); + } + + bool operator!=( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR; + const void * pNext = {}; + uint32_t acquireCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs = {}; + const uint64_t * pAcquireKeys = {}; + const uint32_t * pAcquireTimeouts = {}; + uint32_t releaseCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs = {}; + const uint64_t * pReleaseKeys = {}; + }; + static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = Win32KeyedMutexAcquireReleaseInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct Win32KeyedMutexAcquireReleaseInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eWin32KeyedMutexAcquireReleaseInfoNV; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + Win32KeyedMutexAcquireReleaseInfoNV( uint32_t acquireCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ = {}, + const uint64_t * pAcquireKeys_ = {}, + const uint32_t * pAcquireTimeoutMilliseconds_ = {}, + uint32_t releaseCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ = {}, + const uint64_t * pReleaseKeys_ = {} ) VULKAN_HPP_NOEXCEPT + : acquireCount( acquireCount_ ) + , pAcquireSyncs( pAcquireSyncs_ ) + , pAcquireKeys( pAcquireKeys_ ) + , pAcquireTimeoutMilliseconds( pAcquireTimeoutMilliseconds_ ) + , releaseCount( releaseCount_ ) + , pReleaseSyncs( pReleaseSyncs_ ) + , pReleaseKeys( pReleaseKeys_ ) + {} + + VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + Win32KeyedMutexAcquireReleaseInfoNV( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : Win32KeyedMutexAcquireReleaseInfoNV( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoNV( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireSyncs_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireKeys_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireTimeoutMilliseconds_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + releaseSyncs_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseKeys_ = {} ) + : acquireCount( static_cast( acquireSyncs_.size() ) ) + , pAcquireSyncs( acquireSyncs_.data() ) + , pAcquireKeys( acquireKeys_.data() ) + , pAcquireTimeoutMilliseconds( acquireTimeoutMilliseconds_.data() ) + , releaseCount( static_cast( releaseSyncs_.size() ) ) + , pReleaseSyncs( releaseSyncs_.data() ) + , pReleaseKeys( releaseKeys_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireKeys_.size() ); + VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireTimeoutMilliseconds_.size() ); + VULKAN_HPP_ASSERT( acquireKeys_.size() == acquireTimeoutMilliseconds_.size() ); +# else + if ( acquireSyncs_.size() != acquireKeys_.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireSyncs_.size() != acquireKeys_.size()" ); + } + if ( acquireSyncs_.size() != acquireTimeoutMilliseconds_.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireSyncs_.size() != acquireTimeoutMilliseconds_.size()" ); + } + if ( acquireKeys_.size() != acquireTimeoutMilliseconds_.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireKeys_.size() != acquireTimeoutMilliseconds_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( releaseSyncs_.size() == releaseKeys_.size() ); +# else + if ( releaseSyncs_.size() != releaseKeys_.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: releaseSyncs_.size() != releaseKeys_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & + operator=( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Win32KeyedMutexAcquireReleaseInfoNV & + operator=( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoNV & setAcquireCount( uint32_t acquireCount_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = acquireCount_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoNV & + setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT + { + pAcquireSyncs = pAcquireSyncs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoNV & setAcquireSyncs( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireSyncs_ ) + VULKAN_HPP_NOEXCEPT + { + acquireCount = static_cast( acquireSyncs_.size() ); + pAcquireSyncs = acquireSyncs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireKeys( const uint64_t * pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT + { + pAcquireKeys = pAcquireKeys_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoNV & setAcquireKeys( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireKeys_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = static_cast( acquireKeys_.size() ); + pAcquireKeys = acquireKeys_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + Win32KeyedMutexAcquireReleaseInfoNV & + setPAcquireTimeoutMilliseconds( const uint32_t * pAcquireTimeoutMilliseconds_ ) VULKAN_HPP_NOEXCEPT + { + pAcquireTimeoutMilliseconds = pAcquireTimeoutMilliseconds_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoNV & setAcquireTimeoutMilliseconds( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireTimeoutMilliseconds_ ) + VULKAN_HPP_NOEXCEPT + { + acquireCount = static_cast( acquireTimeoutMilliseconds_.size() ); + pAcquireTimeoutMilliseconds = acquireTimeoutMilliseconds_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + Win32KeyedMutexAcquireReleaseInfoNV & setReleaseCount( uint32_t releaseCount_ ) VULKAN_HPP_NOEXCEPT + { + releaseCount = releaseCount_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoNV & + setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT + { + pReleaseSyncs = pReleaseSyncs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoNV & setReleaseSyncs( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseSyncs_ ) + VULKAN_HPP_NOEXCEPT + { + releaseCount = static_cast( releaseSyncs_.size() ); + pReleaseSyncs = releaseSyncs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + Win32KeyedMutexAcquireReleaseInfoNV & setPReleaseKeys( const uint64_t * pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT + { + pReleaseKeys = pReleaseKeys_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoNV & setReleaseKeys( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseKeys_ ) VULKAN_HPP_NOEXCEPT + { + releaseCount = static_cast( releaseKeys_.size() ); + pReleaseKeys = releaseKeys_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkWin32KeyedMutexAcquireReleaseInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWin32KeyedMutexAcquireReleaseInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Win32KeyedMutexAcquireReleaseInfoNV const & ) const = default; +# else + bool operator==( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( acquireCount == rhs.acquireCount ) && + ( pAcquireSyncs == rhs.pAcquireSyncs ) && ( pAcquireKeys == rhs.pAcquireKeys ) && + ( pAcquireTimeoutMilliseconds == rhs.pAcquireTimeoutMilliseconds ) && + ( releaseCount == rhs.releaseCount ) && ( pReleaseSyncs == rhs.pReleaseSyncs ) && + ( pReleaseKeys == rhs.pReleaseKeys ); + } + + bool operator!=( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV; + const void * pNext = {}; + uint32_t acquireCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs = {}; + const uint64_t * pAcquireKeys = {}; + const uint32_t * pAcquireTimeoutMilliseconds = {}; + uint32_t releaseCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs = {}; + const uint64_t * pReleaseKeys = {}; + }; + static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = Win32KeyedMutexAcquireReleaseInfoNV; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct Win32SurfaceCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32SurfaceCreateInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ = {}, + HINSTANCE hinstance_ = {}, + HWND hwnd_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , hinstance( hinstance_ ) + , hwnd( hwnd_ ) + {} + + VULKAN_HPP_CONSTEXPR + Win32SurfaceCreateInfoKHR( Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Win32SurfaceCreateInfoKHR( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : Win32SurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR & + operator=( Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Win32SurfaceCreateInfoKHR & operator=( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + Win32SurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + Win32SurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + Win32SurfaceCreateInfoKHR & setHinstance( HINSTANCE hinstance_ ) VULKAN_HPP_NOEXCEPT + { + hinstance = hinstance_; + return *this; + } + + Win32SurfaceCreateInfoKHR & setHwnd( HWND hwnd_ ) VULKAN_HPP_NOEXCEPT + { + hwnd = hwnd_; + return *this; + } + + operator VkWin32SurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWin32SurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Win32SurfaceCreateInfoKHR const & ) const = default; +# else + bool operator==( Win32SurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( hinstance == rhs.hinstance ) && ( hwnd == rhs.hwnd ); + } + + bool operator!=( Win32SurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32SurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags = {}; + HINSTANCE hinstance = {}; + HWND hwnd = {}; + }; + static_assert( sizeof( Win32SurfaceCreateInfoKHR ) == sizeof( VkWin32SurfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = Win32SurfaceCreateInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct WriteDescriptorSetAccelerationStructureKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eWriteDescriptorSetAccelerationStructureKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR( + uint32_t accelerationStructureCount_ = {}, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures_ = {} ) VULKAN_HPP_NOEXCEPT + : accelerationStructureCount( accelerationStructureCount_ ) + , pAccelerationStructures( pAccelerationStructures_ ) + {} + + VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR( + WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteDescriptorSetAccelerationStructureKHR( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : WriteDescriptorSetAccelerationStructureKHR( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSetAccelerationStructureKHR( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + accelerationStructures_ ) + : accelerationStructureCount( static_cast( accelerationStructures_.size() ) ) + , pAccelerationStructures( accelerationStructures_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureKHR & + operator=( WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteDescriptorSetAccelerationStructureKHR & + operator=( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + WriteDescriptorSetAccelerationStructureKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + WriteDescriptorSetAccelerationStructureKHR & + setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureCount = accelerationStructureCount_; + return *this; + } + + WriteDescriptorSetAccelerationStructureKHR & setPAccelerationStructures( + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT + { + pAccelerationStructures = pAccelerationStructures_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSetAccelerationStructureKHR & setAccelerationStructures( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + accelerationStructures_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureCount = static_cast( accelerationStructures_.size() ); + pAccelerationStructures = accelerationStructures_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkWriteDescriptorSetAccelerationStructureKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWriteDescriptorSetAccelerationStructureKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WriteDescriptorSetAccelerationStructureKHR const & ) const = default; +#else + bool operator==( WriteDescriptorSetAccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( accelerationStructureCount == rhs.accelerationStructureCount ) && + ( pAccelerationStructures == rhs.pAccelerationStructures ); + } + + bool operator!=( WriteDescriptorSetAccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureKHR; + const void * pNext = {}; + uint32_t accelerationStructureCount = {}; + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures = {}; + }; + static_assert( sizeof( WriteDescriptorSetAccelerationStructureKHR ) == + sizeof( VkWriteDescriptorSetAccelerationStructureKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = WriteDescriptorSetAccelerationStructureKHR; + }; + + struct WriteDescriptorSetAccelerationStructureNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eWriteDescriptorSetAccelerationStructureNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV( + uint32_t accelerationStructureCount_ = {}, + const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures_ = {} ) VULKAN_HPP_NOEXCEPT + : accelerationStructureCount( accelerationStructureCount_ ) + , pAccelerationStructures( pAccelerationStructures_ ) + {} + + VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV( + WriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteDescriptorSetAccelerationStructureNV( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : WriteDescriptorSetAccelerationStructureNV( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSetAccelerationStructureNV( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + accelerationStructures_ ) + : accelerationStructureCount( static_cast( accelerationStructures_.size() ) ) + , pAccelerationStructures( accelerationStructures_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureNV & + operator=( WriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteDescriptorSetAccelerationStructureNV & + operator=( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + WriteDescriptorSetAccelerationStructureNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + WriteDescriptorSetAccelerationStructureNV & + setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureCount = accelerationStructureCount_; + return *this; + } + + WriteDescriptorSetAccelerationStructureNV & setPAccelerationStructures( + const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT + { + pAccelerationStructures = pAccelerationStructures_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSetAccelerationStructureNV & setAccelerationStructures( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + accelerationStructures_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureCount = static_cast( accelerationStructures_.size() ); + pAccelerationStructures = accelerationStructures_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkWriteDescriptorSetAccelerationStructureNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWriteDescriptorSetAccelerationStructureNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WriteDescriptorSetAccelerationStructureNV const & ) const = default; +#else + bool operator==( WriteDescriptorSetAccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( accelerationStructureCount == rhs.accelerationStructureCount ) && + ( pAccelerationStructures == rhs.pAccelerationStructures ); + } + + bool operator!=( WriteDescriptorSetAccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureNV; + const void * pNext = {}; + uint32_t accelerationStructureCount = {}; + const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures = {}; + }; + static_assert( sizeof( WriteDescriptorSetAccelerationStructureNV ) == + sizeof( VkWriteDescriptorSetAccelerationStructureNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = WriteDescriptorSetAccelerationStructureNV; + }; + + struct WriteDescriptorSetInlineUniformBlockEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eWriteDescriptorSetInlineUniformBlockEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlockEXT( uint32_t dataSize_ = {}, + const void * pData_ = {} ) VULKAN_HPP_NOEXCEPT + : dataSize( dataSize_ ) + , pData( pData_ ) + {} + + VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlockEXT( WriteDescriptorSetInlineUniformBlockEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + WriteDescriptorSetInlineUniformBlockEXT( VkWriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : WriteDescriptorSetInlineUniformBlockEXT( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + WriteDescriptorSetInlineUniformBlockEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) + : dataSize( static_cast( data_.size() * sizeof( T ) ) ), pData( data_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetInlineUniformBlockEXT & + operator=( WriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteDescriptorSetInlineUniformBlockEXT & + operator=( VkWriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + WriteDescriptorSetInlineUniformBlockEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + WriteDescriptorSetInlineUniformBlockEXT & setDataSize( uint32_t dataSize_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = dataSize_; + return *this; + } + + WriteDescriptorSetInlineUniformBlockEXT & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT + { + pData = pData_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + WriteDescriptorSetInlineUniformBlockEXT & + setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = static_cast( data_.size() * sizeof( T ) ); + pData = data_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkWriteDescriptorSetInlineUniformBlockEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWriteDescriptorSetInlineUniformBlockEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WriteDescriptorSetInlineUniformBlockEXT const & ) const = default; +#else + bool operator==( WriteDescriptorSetInlineUniformBlockEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); + } + + bool operator!=( WriteDescriptorSetInlineUniformBlockEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetInlineUniformBlockEXT; + const void * pNext = {}; + uint32_t dataSize = {}; + const void * pData = {}; + }; + static_assert( sizeof( WriteDescriptorSetInlineUniformBlockEXT ) == + sizeof( VkWriteDescriptorSetInlineUniformBlockEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = WriteDescriptorSetInlineUniformBlockEXT; + }; + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + struct XcbSurfaceCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eXcbSurfaceCreateInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ = {}, + xcb_connection_t * connection_ = {}, + xcb_window_t window_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , connection( connection_ ) + , window( window_ ) + {} + + VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR( XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + XcbSurfaceCreateInfoKHR( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : XcbSurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR & + operator=( XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + XcbSurfaceCreateInfoKHR & operator=( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + XcbSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + XcbSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + XcbSurfaceCreateInfoKHR & setConnection( xcb_connection_t * connection_ ) VULKAN_HPP_NOEXCEPT + { + connection = connection_; + return *this; + } + + XcbSurfaceCreateInfoKHR & setWindow( xcb_window_t window_ ) VULKAN_HPP_NOEXCEPT + { + window = window_; + return *this; + } + + operator VkXcbSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkXcbSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( XcbSurfaceCreateInfoKHR const & ) const = default; +# else + bool operator==( XcbSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( connection == rhs.connection ) && ( memcmp( &window, &rhs.window, sizeof( xcb_window_t ) ) == 0 ); + } + + bool operator!=( XcbSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXcbSurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags = {}; + xcb_connection_t * connection = {}; + xcb_window_t window = {}; + }; + static_assert( sizeof( XcbSurfaceCreateInfoKHR ) == sizeof( VkXcbSurfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = XcbSurfaceCreateInfoKHR; + }; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + struct XlibSurfaceCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eXlibSurfaceCreateInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ = {}, + Display * dpy_ = {}, + Window window_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , dpy( dpy_ ) + , window( window_ ) + {} + + VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR( XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + XlibSurfaceCreateInfoKHR( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : XlibSurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR & + operator=( XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + XlibSurfaceCreateInfoKHR & operator=( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + XlibSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + XlibSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + XlibSurfaceCreateInfoKHR & setDpy( Display * dpy_ ) VULKAN_HPP_NOEXCEPT + { + dpy = dpy_; + return *this; + } + + XlibSurfaceCreateInfoKHR & setWindow( Window window_ ) VULKAN_HPP_NOEXCEPT + { + window = window_; + return *this; + } + + operator VkXlibSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkXlibSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( XlibSurfaceCreateInfoKHR const & ) const = default; +# else + bool operator==( XlibSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dpy == rhs.dpy ) && + ( memcmp( &window, &rhs.window, sizeof( Window ) ) == 0 ); + } + + bool operator!=( XlibSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXlibSurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags = {}; + Display * dpy = {}; + Window window = {}; + }; + static_assert( sizeof( XlibSurfaceCreateInfoKHR ) == sizeof( VkXlibSurfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = XlibSurfaceCreateInfoKHR; + }; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + + class DebugReportCallbackEXT + { + public: + using CType = VkDebugReportCallbackEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDebugReportCallbackEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDebugReportCallbackEXT; + + public: + VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT() = default; + VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT + DebugReportCallbackEXT( VkDebugReportCallbackEXT debugReportCallbackEXT ) VULKAN_HPP_NOEXCEPT + : m_debugReportCallbackEXT( debugReportCallbackEXT ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DebugReportCallbackEXT & operator=( VkDebugReportCallbackEXT debugReportCallbackEXT ) VULKAN_HPP_NOEXCEPT + { + m_debugReportCallbackEXT = debugReportCallbackEXT; + return *this; + } +#endif + + DebugReportCallbackEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_debugReportCallbackEXT = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugReportCallbackEXT const & ) const = default; +#else + bool operator==( DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT == rhs.m_debugReportCallbackEXT; + } + + bool operator!=( DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT != rhs.m_debugReportCallbackEXT; + } + + bool operator<( DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT < rhs.m_debugReportCallbackEXT; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugReportCallbackEXT() const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT == VK_NULL_HANDLE; + } + + private: + VkDebugReportCallbackEXT m_debugReportCallbackEXT = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT ) == sizeof( VkDebugReportCallbackEXT ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class DebugUtilsMessengerEXT + { + public: + using CType = VkDebugUtilsMessengerEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDebugUtilsMessengerEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT() = default; + VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT + DebugUtilsMessengerEXT( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT ) VULKAN_HPP_NOEXCEPT + : m_debugUtilsMessengerEXT( debugUtilsMessengerEXT ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DebugUtilsMessengerEXT & operator=( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT ) VULKAN_HPP_NOEXCEPT + { + m_debugUtilsMessengerEXT = debugUtilsMessengerEXT; + return *this; + } +#endif + + DebugUtilsMessengerEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_debugUtilsMessengerEXT = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugUtilsMessengerEXT const & ) const = default; +#else + bool operator==( DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT == rhs.m_debugUtilsMessengerEXT; + } + + bool operator!=( DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT != rhs.m_debugUtilsMessengerEXT; + } + + bool operator<( DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT < rhs.m_debugUtilsMessengerEXT; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugUtilsMessengerEXT() const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT == VK_NULL_HANDLE; + } + + private: + VkDebugUtilsMessengerEXT m_debugUtilsMessengerEXT = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT ) == sizeof( VkDebugUtilsMessengerEXT ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + class Instance; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueDebugReportCallbackEXT = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueDebugUtilsMessengerEXT = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueSurfaceKHR = UniqueHandle; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + class Instance + { + public: + using CType = VkInstance; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eInstance; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eInstance; + + public: + VULKAN_HPP_CONSTEXPR Instance() = default; + VULKAN_HPP_CONSTEXPR Instance( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT Instance( VkInstance instance ) VULKAN_HPP_NOEXCEPT : m_instance( instance ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Instance & operator=( VkInstance instance ) VULKAN_HPP_NOEXCEPT + { + m_instance = instance; + return *this; + } +#endif + + Instance & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_instance = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Instance const & ) const = default; +#else + bool operator==( Instance const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_instance == rhs.m_instance; + } + + bool operator!=( Instance const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_instance != rhs.m_instance; + } + + bool operator<( Instance const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_instance < rhs.m_instance; + } +#endif + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template + VULKAN_HPP_NODISCARD Result + createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + template + VULKAN_HPP_NODISCARD Result createDebugReportCallbackEXT( + const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createDebugReportCallbackEXT( + const DebugReportCallbackCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE + typename ResultValueType>::type + createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createDebugUtilsMessengerEXT( + const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createDebugUtilsMessengerEXT( + const DebugUtilsMessengerCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE + typename ResultValueType>::type + createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + template + VULKAN_HPP_NODISCARD Result + createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createDirectFBSurfaceEXT( const DirectFBSurfaceCreateInfoEXT & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createDirectFBSurfaceEXTUnique( const DirectFBSurfaceCreateInfoEXT & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + template + VULKAN_HPP_NODISCARD Result createDisplayPlaneSurfaceKHR( + const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + template + VULKAN_HPP_NODISCARD Result + createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + template + VULKAN_HPP_NODISCARD Result createImagePipeSurfaceFUCHSIA( + const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + template + VULKAN_HPP_NODISCARD Result + createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + template + VULKAN_HPP_NODISCARD Result + createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createMetalSurfaceEXTUnique( const MetalSurfaceCreateInfoEXT & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + template + VULKAN_HPP_NODISCARD Result + createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createScreenSurfaceQNX( const ScreenSurfaceCreateInfoQNX & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createScreenSurfaceQNXUnique( const ScreenSurfaceCreateInfoQNX & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + +#if defined( VK_USE_PLATFORM_GGP ) + template + VULKAN_HPP_NODISCARD Result createStreamDescriptorSurfaceGGP( + const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createStreamDescriptorSurfaceGGPUnique( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_GGP*/ + +#if defined( VK_USE_PLATFORM_VI_NN ) + template + VULKAN_HPP_NODISCARD Result + createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_VI_NN*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + template + VULKAN_HPP_NODISCARD Result + createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template + VULKAN_HPP_NODISCARD Result + createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + template + VULKAN_HPP_NODISCARD Result + createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + template + VULKAN_HPP_NODISCARD Result + createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + + template + void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char * pLayerPrefix, + const char * pMessage, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const std::string & layerPrefix, + const std::string & message, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDebugReportCallbackEXT( + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDebugUtilsMessengerEXT( + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result enumeratePhysicalDeviceGroups( + uint32_t * pPhysicalDeviceGroupCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + enumeratePhysicalDeviceGroups( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PhysicalDeviceGroupPropertiesAllocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + enumeratePhysicalDeviceGroups( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result enumeratePhysicalDeviceGroupsKHR( + uint32_t * pPhysicalDeviceGroupCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + enumeratePhysicalDeviceGroupsKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PhysicalDeviceGroupPropertiesAllocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + enumeratePhysicalDeviceGroupsKHR( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + enumeratePhysicalDevices( uint32_t * pPhysicalDeviceCount, + VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumeratePhysicalDevices( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PhysicalDeviceAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + PFN_vkVoidFunction + getProcAddr( const char * pName, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PFN_vkVoidFunction + getProcAddr( const std::string & name, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, + const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, + const DebugUtilsMessengerCallbackDataEXT & callbackData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkInstance() const VULKAN_HPP_NOEXCEPT + { + return m_instance; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_instance != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_instance == VK_NULL_HANDLE; + } + + private: + VkInstance m_instance = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::Instance ) == sizeof( VkInstance ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Instance; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Instance; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Instance; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueInstance = UniqueHandle; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + template + VULKAN_HPP_NODISCARD Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Instance * pInstance, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createInstance( const InstanceCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createInstanceUnique( const InstanceCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result enumerateInstanceExtensionProperties( + const char * pLayerName, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateInstanceExtensionProperties( Optional layerName + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = ExtensionPropertiesAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateInstanceExtensionProperties( Optional layerName, + ExtensionPropertiesAllocator & extensionPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + enumerateInstanceLayerProperties( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateInstanceLayerProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = LayerPropertiesAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateInstanceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result enumerateInstanceVersion( + uint32_t * pApiVersion, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + enumerateInstanceVersion( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Instance * pInstance, + Dispatch const & d ) VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateInstance( reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pInstance ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + createInstance( const InstanceCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) + { + VULKAN_HPP_NAMESPACE::Instance instance; + Result result = static_cast( + d.vkCreateInstance( reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &instance ) ) ); + return createResultValue( result, instance, VULKAN_HPP_NAMESPACE_STRING "::createInstance" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createInstanceUnique( const InstanceCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) + { + VULKAN_HPP_NAMESPACE::Instance instance; + Result result = static_cast( + d.vkCreateInstance( reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &instance ) ) ); + ObjectDestroy deleter( allocator, d ); + return createResultValue( + result, instance, VULKAN_HPP_NAMESPACE_STRING "::createInstanceUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + enumerateInstanceExtensionProperties( const char * pLayerName, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, + Dispatch const & d ) VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkEnumerateInstanceExtensionProperties( + pLayerName, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + enumerateInstanceExtensionProperties( Optional layerName, Dispatch const & d ) + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( + d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + enumerateInstanceExtensionProperties( Optional layerName, + ExtensionPropertiesAllocator & extensionPropertiesAllocator, + Dispatch const & d ) + { + std::vector properties( extensionPropertiesAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( + d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + enumerateInstanceLayerProperties( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, + Dispatch const & d ) VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + enumerateInstanceLayerProperties( Dispatch const & d ) + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkEnumerateInstanceLayerProperties( + &propertyCount, reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + enumerateInstanceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d ) + { + std::vector properties( layerPropertiesAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkEnumerateInstanceLayerProperties( + &propertyCount, reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceVersion( uint32_t * pApiVersion, + Dispatch const & d ) VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkEnumerateInstanceVersion( pApiVersion ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type enumerateInstanceVersion( Dispatch const & d ) + { + uint32_t apiVersion; + Result result = static_cast( d.vkEnumerateInstanceVersion( &apiVersion ) ); + return createResultValue( result, apiVersion, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceVersion" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::begin( + const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast( pBeginInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + CommandBuffer::begin( const CommandBufferBeginInfo & beginInfo, Dispatch const & d ) const + { + Result result = static_cast( + d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast( &beginInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( + const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginConditionalRenderingEXT( + m_commandBuffer, reinterpret_cast( pConditionalRenderingBegin ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginConditionalRenderingEXT( + m_commandBuffer, reinterpret_cast( &conditionalRenderingBegin ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( pLabelInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( &labelInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginQuery( + m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags, + uint32_t index, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginQueryIndexedEXT( + m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ), index ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + VULKAN_HPP_NAMESPACE::SubpassContents contents, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginRenderPass( m_commandBuffer, + reinterpret_cast( pRenderPassBegin ), + static_cast( contents ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const RenderPassBeginInfo & renderPassBegin, + VULKAN_HPP_NAMESPACE::SubpassContents contents, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginRenderPass( m_commandBuffer, + reinterpret_cast( &renderPassBegin ), + static_cast( contents ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginRenderPass2( m_commandBuffer, + reinterpret_cast( pRenderPassBegin ), + reinterpret_cast( pSubpassBeginInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const RenderPassBeginInfo & renderPassBegin, + const SubpassBeginInfo & subpassBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginRenderPass2( m_commandBuffer, + reinterpret_cast( &renderPassBegin ), + reinterpret_cast( &subpassBeginInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginRenderPass2KHR( m_commandBuffer, + reinterpret_cast( pRenderPassBegin ), + reinterpret_cast( pSubpassBeginInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, + const SubpassBeginInfo & subpassBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginRenderPass2KHR( m_commandBuffer, + reinterpret_cast( &renderPassBegin ), + reinterpret_cast( &subpassBeginInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, + firstCounterBuffer, + counterBufferCount, + reinterpret_cast( pCounterBuffers ), + reinterpret_cast( pCounterBufferOffsets ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( + uint32_t firstCounterBuffer, + ArrayProxy const & counterBuffers, + ArrayProxy const & counterBufferOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() ); +# else + if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, + firstCounterBuffer, + counterBuffers.size(), + reinterpret_cast( counterBuffers.data() ), + reinterpret_cast( counterBufferOffsets.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + VULKAN_HPP_INLINE void + CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast( pBeginInfo ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VideoBeginCodingInfoKHR & beginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast( &beginInfo ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + uint32_t dynamicOffsetCount, + const uint32_t * pDynamicOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBindDescriptorSets( m_commandBuffer, + static_cast( pipelineBindPoint ), + static_cast( layout ), + firstSet, + descriptorSetCount, + reinterpret_cast( pDescriptorSets ), + dynamicOffsetCount, + pDynamicOffsets ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + ArrayProxy const & descriptorSets, + ArrayProxy const & dynamicOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBindDescriptorSets( m_commandBuffer, + static_cast( pipelineBindPoint ), + static_cast( layout ), + firstSet, + descriptorSets.size(), + reinterpret_cast( descriptorSets.data() ), + dynamicOffsets.size(), + dynamicOffsets.data() ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::IndexType indexType, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBindIndexBuffer( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( indexType ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBindPipeline( + m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ) ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t groupIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer, + static_cast( pipelineBindPoint ), + static_cast( pipeline ), + groupIndex ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBindShadingRateImageNV( + m_commandBuffer, static_cast( imageView ), static_cast( imageLayout ) ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, + firstBinding, + bindingCount, + reinterpret_cast( pBuffers ), + reinterpret_cast( pOffsets ), + reinterpret_cast( pSizes ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, + ArrayProxy const & buffers, + ArrayProxy const & offsets, + ArrayProxy const & sizes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); + VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); +# else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" ); + } + if ( !sizes.empty() && buffers.size() != sizes.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ), + reinterpret_cast( sizes.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBindVertexBuffers( m_commandBuffer, + firstBinding, + bindingCount, + reinterpret_cast( pBuffers ), + reinterpret_cast( pOffsets ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::bindVertexBuffers( uint32_t firstBinding, + ArrayProxy const & buffers, + ArrayProxy const & offsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); +# else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBindVertexBuffers( m_commandBuffer, + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, + const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, + firstBinding, + bindingCount, + reinterpret_cast( pBuffers ), + reinterpret_cast( pOffsets ), + reinterpret_cast( pSizes ), + reinterpret_cast( pStrides ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, + ArrayProxy const & buffers, + ArrayProxy const & offsets, + ArrayProxy const & sizes, + ArrayProxy const & strides, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); + VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); + VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() ); +# else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" ); + } + if ( !sizes.empty() && buffers.size() != sizes.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" ); + } + if ( !strides.empty() && buffers.size() != strides.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ), + reinterpret_cast( sizes.data() ), + reinterpret_cast( strides.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions, + VULKAN_HPP_NAMESPACE::Filter filter, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBlitImage( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regionCount, + reinterpret_cast( pRegions ), + static_cast( filter ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy const & regions, + VULKAN_HPP_NAMESPACE::Filter filter, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBlitImage( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ), + static_cast( filter ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2KHR * pBlitImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast( pBlitImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const BlitImageInfo2KHR & blitImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast( &blitImageInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::Buffer instanceData, + VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, + VULKAN_HPP_NAMESPACE::Bool32 update, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::Buffer scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, + reinterpret_cast( pInfo ), + static_cast( instanceData ), + static_cast( instanceOffset ), + static_cast( update ), + static_cast( dst ), + static_cast( src ), + static_cast( scratch ), + static_cast( scratchOffset ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, + VULKAN_HPP_NAMESPACE::Buffer instanceData, + VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, + VULKAN_HPP_NAMESPACE::Bool32 update, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::Buffer scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, + reinterpret_cast( &info ), + static_cast( instanceData ), + static_cast( instanceOffset ), + static_cast( update ), + static_cast( dst ), + static_cast( src ), + static_cast( scratch ), + static_cast( scratchOffset ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( + uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses, + const uint32_t * pIndirectStrides, + const uint32_t * const * ppMaxPrimitiveCounts, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBuildAccelerationStructuresIndirectKHR( + m_commandBuffer, + infoCount, + reinterpret_cast( pInfos ), + reinterpret_cast( pIndirectDeviceAddresses ), + pIndirectStrides, + ppMaxPrimitiveCounts ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( + ArrayProxy const & infos, + ArrayProxy const & indirectDeviceAddresses, + ArrayProxy const & indirectStrides, + ArrayProxy const & pMaxPrimitiveCounts, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( infos.size() == indirectDeviceAddresses.size() ); + VULKAN_HPP_ASSERT( infos.size() == indirectStrides.size() ); + VULKAN_HPP_ASSERT( infos.size() == pMaxPrimitiveCounts.size() ); +# else + if ( infos.size() != indirectDeviceAddresses.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectDeviceAddresses.size()" ); + } + if ( infos.size() != indirectStrides.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectStrides.size()" ); + } + if ( infos.size() != pMaxPrimitiveCounts.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != pMaxPrimitiveCounts.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBuildAccelerationStructuresIndirectKHR( + m_commandBuffer, + infos.size(), + reinterpret_cast( infos.data() ), + reinterpret_cast( indirectDeviceAddresses.data() ), + indirectStrides.data(), + pMaxPrimitiveCounts.data() ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR( + uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBuildAccelerationStructuresKHR( + m_commandBuffer, + infoCount, + reinterpret_cast( pInfos ), + reinterpret_cast( ppBuildRangeInfos ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR( + ArrayProxy const & infos, + ArrayProxy const & pBuildRangeInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() ); +# else + if ( infos.size() != pBuildRangeInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBuildAccelerationStructuresKHR( + m_commandBuffer, + infos.size(), + reinterpret_cast( infos.data() ), + reinterpret_cast( pBuildRangeInfos.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments, + uint32_t rectCount, + const VULKAN_HPP_NAMESPACE::ClearRect * pRects, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdClearAttachments( m_commandBuffer, + attachmentCount, + reinterpret_cast( pAttachments ), + rectCount, + reinterpret_cast( pRects ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::clearAttachments( ArrayProxy const & attachments, + ArrayProxy const & rects, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdClearAttachments( m_commandBuffer, + attachments.size(), + reinterpret_cast( attachments.data() ), + rects.size(), + reinterpret_cast( rects.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor, + uint32_t rangeCount, + const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdClearColorImage( m_commandBuffer, + static_cast( image ), + static_cast( imageLayout ), + reinterpret_cast( pColor ), + rangeCount, + reinterpret_cast( pRanges ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const ClearColorValue & color, + ArrayProxy const & ranges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdClearColorImage( m_commandBuffer, + static_cast( image ), + static_cast( imageLayout ), + reinterpret_cast( &color ), + ranges.size(), + reinterpret_cast( ranges.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil, + uint32_t rangeCount, + const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdClearDepthStencilImage( m_commandBuffer, + static_cast( image ), + static_cast( imageLayout ), + reinterpret_cast( pDepthStencil ), + rangeCount, + reinterpret_cast( pRanges ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const ClearDepthStencilValue & depthStencil, + ArrayProxy const & ranges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdClearDepthStencilImage( m_commandBuffer, + static_cast( image ), + static_cast( imageLayout ), + reinterpret_cast( &depthStencil ), + ranges.size(), + reinterpret_cast( ranges.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + VULKAN_HPP_INLINE void + CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdControlVideoCodingKHR( m_commandBuffer, + reinterpret_cast( pCodingControlInfo ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VideoCodingControlInfoKHR & codingControlInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdControlVideoCodingKHR( m_commandBuffer, + reinterpret_cast( &codingControlInfo ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, + reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, + reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, + static_cast( dst ), + static_cast( src ), + static_cast( mode ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyAccelerationStructureToMemoryKHR( + m_commandBuffer, reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyAccelerationStructureToMemoryKHR( + m_commandBuffer, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyBuffer( m_commandBuffer, + static_cast( srcBuffer ), + static_cast( dstBuffer ), + regionCount, + reinterpret_cast( pRegions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + ArrayProxy const & regions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyBuffer( m_commandBuffer, + static_cast( srcBuffer ), + static_cast( dstBuffer ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2KHR * pCopyBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast( pCopyBufferInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const CopyBufferInfo2KHR & copyBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast( ©BufferInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyBufferToImage( m_commandBuffer, + static_cast( srcBuffer ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regionCount, + reinterpret_cast( pRegions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy const & regions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyBufferToImage( m_commandBuffer, + static_cast( srcBuffer ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( + const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2KHR * pCopyBufferToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, + reinterpret_cast( pCopyBufferToImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const CopyBufferToImageInfo2KHR & copyBufferToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, + reinterpret_cast( ©BufferToImageInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyImage( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regionCount, + reinterpret_cast( pRegions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy const & regions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyImage( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR * pCopyImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast( pCopyImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const CopyImageInfo2KHR & copyImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast( ©ImageInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyImageToBuffer( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstBuffer ), + regionCount, + reinterpret_cast( pRegions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + ArrayProxy const & regions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyImageToBuffer( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstBuffer ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( + const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR * pCopyImageToBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, + reinterpret_cast( pCopyImageToBufferInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const CopyImageToBufferInfo2KHR & copyImageToBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, + reinterpret_cast( ©ImageToBufferInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyMemoryToAccelerationStructureKHR( + m_commandBuffer, reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyMemoryToAccelerationStructureKHR( + m_commandBuffer, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyQueryPoolResults( m_commandBuffer, + static_cast( queryPool ), + firstQuery, + queryCount, + static_cast( dstBuffer ), + static_cast( dstOffset ), + static_cast( stride ), + static_cast( flags ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast( pLaunchInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const CuLaunchInfoNVX & launchInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast( &launchInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast( &markerInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDebugMarkerEndEXT( m_commandBuffer ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast( &markerInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pFrameInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast( pFrameInfo ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VideoDecodeInfoKHR & frameInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast( &frameInfo ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDispatchBaseKHR( + m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDispatchIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, + uint32_t instanceCount, + uint32_t firstVertex, + uint32_t firstInstance, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, + uint32_t instanceCount, + uint32_t firstIndex, + int32_t vertexOffset, + uint32_t firstInstance, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawIndexedIndirect( + m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawIndexedIndirectCount( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawIndirect( + m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, + uint32_t firstInstance, + VULKAN_HPP_NAMESPACE::Buffer counterBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, + instanceCount, + firstInstance, + static_cast( counterBuffer ), + static_cast( counterBufferOffset ), + counterOffset, + vertexStride ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawIndirectCount( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawIndirectCountAMD( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawIndirectCountKHR( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawMeshTasksIndirectNV( + m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, + uint32_t firstTask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask ); + } + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast( pEncodeInfo ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VideoEncodeInfoKHR & encodeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast( &encodeInfo ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndConditionalRenderingEXT( m_commandBuffer ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndQuery( m_commandBuffer, static_cast( queryPool ), query ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + uint32_t index, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast( queryPool ), query, index ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndRenderPass( m_commandBuffer ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast( pSubpassEndInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const SubpassEndInfo & subpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast( &subpassEndInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast( pSubpassEndInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const SubpassEndInfo & subpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast( &subpassEndInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, + firstCounterBuffer, + counterBufferCount, + reinterpret_cast( pCounterBuffers ), + reinterpret_cast( pCounterBufferOffsets ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( + uint32_t firstCounterBuffer, + ArrayProxy const & counterBuffers, + ArrayProxy const & counterBufferOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() ); +# else + if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, + firstCounterBuffer, + counterBuffers.size(), + reinterpret_cast( counterBuffers.data() ), + reinterpret_cast( counterBufferOffsets.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + VULKAN_HPP_INLINE void + CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast( pEndCodingInfo ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VideoEndCodingInfoKHR & endCodingInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast( &endCodingInfo ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdExecuteCommands( + m_commandBuffer, commandBufferCount, reinterpret_cast( pCommandBuffers ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::executeCommands( ArrayProxy const & commandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdExecuteCommands( + m_commandBuffer, commandBuffers.size(), reinterpret_cast( commandBuffers.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( + VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer, + static_cast( isPreprocessed ), + reinterpret_cast( pGeneratedCommandsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const GeneratedCommandsInfoNV & generatedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer, + static_cast( isPreprocessed ), + reinterpret_cast( &generatedCommandsInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + uint32_t data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdFillBuffer( m_commandBuffer, + static_cast( dstBuffer ), + static_cast( dstOffset ), + static_cast( size ), + data ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( pLabelInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( &labelInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdNextSubpass( m_commandBuffer, static_cast( contents ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdNextSubpass2( m_commandBuffer, + reinterpret_cast( pSubpassBeginInfo ), + reinterpret_cast( pSubpassEndInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const SubpassBeginInfo & subpassBeginInfo, + const SubpassEndInfo & subpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdNextSubpass2( m_commandBuffer, + reinterpret_cast( &subpassBeginInfo ), + reinterpret_cast( &subpassEndInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdNextSubpass2KHR( m_commandBuffer, + reinterpret_cast( pSubpassBeginInfo ), + reinterpret_cast( pSubpassEndInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const SubpassBeginInfo & subpassBeginInfo, + const SubpassEndInfo & subpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdNextSubpass2KHR( m_commandBuffer, + reinterpret_cast( &subpassBeginInfo ), + reinterpret_cast( &subpassEndInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, + uint32_t memoryBarrierCount, + const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdPipelineBarrier( m_commandBuffer, + static_cast( srcStageMask ), + static_cast( dstStageMask ), + static_cast( dependencyFlags ), + memoryBarrierCount, + reinterpret_cast( pMemoryBarriers ), + bufferMemoryBarrierCount, + reinterpret_cast( pBufferMemoryBarriers ), + imageMemoryBarrierCount, + reinterpret_cast( pImageMemoryBarriers ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, + ArrayProxy const & memoryBarriers, + ArrayProxy const & bufferMemoryBarriers, + ArrayProxy const & imageMemoryBarriers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdPipelineBarrier( m_commandBuffer, + static_cast( srcStageMask ), + static_cast( dstStageMask ), + static_cast( dependencyFlags ), + memoryBarriers.size(), + reinterpret_cast( memoryBarriers.data() ), + bufferMemoryBarriers.size(), + reinterpret_cast( bufferMemoryBarriers.data() ), + imageMemoryBarriers.size(), + reinterpret_cast( imageMemoryBarriers.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfoKHR * pDependencyInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast( pDependencyInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const DependencyInfoKHR & dependencyInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast( &dependencyInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdPreprocessGeneratedCommandsNV( + m_commandBuffer, reinterpret_cast( pGeneratedCommandsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV & generatedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdPreprocessGeneratedCommandsNV( + m_commandBuffer, reinterpret_cast( &generatedCommandsInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, + uint32_t offset, + uint32_t size, + const void * pValues, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdPushConstants( m_commandBuffer, + static_cast( layout ), + static_cast( stageFlags ), + offset, + size, + pValues ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, + uint32_t offset, + ArrayProxy const & values, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdPushConstants( m_commandBuffer, + static_cast( layout ), + static_cast( stageFlags ), + offset, + values.size() * sizeof( T ), + reinterpret_cast( values.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdPushDescriptorSetKHR( m_commandBuffer, + static_cast( pipelineBindPoint ), + static_cast( layout ), + set, + descriptorWriteCount, + reinterpret_cast( pDescriptorWrites ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + ArrayProxy const & descriptorWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdPushDescriptorSetKHR( m_commandBuffer, + static_cast( pipelineBindPoint ), + static_cast( layout ), + set, + descriptorWrites.size(), + reinterpret_cast( descriptorWrites.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + const void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, + static_cast( descriptorUpdateTemplate ), + static_cast( layout ), + set, + pData ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdResetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stageMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdResetEvent2KHR( + m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdResetQueryPool( m_commandBuffer, static_cast( queryPool ), firstQuery, queryCount ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdResolveImage( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regionCount, + reinterpret_cast( pRegions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy const & regions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdResolveImage( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR * pResolveImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast( pResolveImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const ResolveImageInfo2KHR & resolveImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast( &resolveImageInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void * pCheckpointMarker, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, + uint32_t customSampleOrderCount, + const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, + static_cast( sampleOrderType ), + customSampleOrderCount, + reinterpret_cast( pCustomSampleOrders ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, + ArrayProxy const & customSampleOrders, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, + static_cast( sampleOrderType ), + customSampleOrders.size(), + reinterpret_cast( customSampleOrders.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetColorWriteEnableEXT( + m_commandBuffer, attachmentCount, reinterpret_cast( pColorWriteEnables ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::setColorWriteEnableEXT( ArrayProxy const & colorWriteEnables, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetColorWriteEnableEXT( + m_commandBuffer, colorWriteEnables.size(), reinterpret_cast( colorWriteEnables.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetCullModeEXT( m_commandBuffer, static_cast( cullMode ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, + float depthBiasClamp, + float depthBiasSlopeFactor, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDepthBiasEnableEXT( m_commandBuffer, static_cast( depthBiasEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, + float maxDepthBounds, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDepthBoundsTestEnableEXT( m_commandBuffer, static_cast( depthBoundsTestEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDepthCompareOpEXT( m_commandBuffer, static_cast( depthCompareOp ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDepthTestEnableEXT( m_commandBuffer, static_cast( depthTestEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDepthWriteEnableEXT( m_commandBuffer, static_cast( depthWriteEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, + uint32_t discardRectangleCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, + firstDiscardRectangle, + discardRectangleCount, + reinterpret_cast( pDiscardRectangles ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, + ArrayProxy const & discardRectangles, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, + firstDiscardRectangle, + discardRectangles.size(), + reinterpret_cast( discardRectangles.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfoKHR * pDependencyInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetEvent2KHR( m_commandBuffer, + static_cast( event ), + reinterpret_cast( pDependencyInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + const DependencyInfoKHR & dependencyInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetEvent2KHR( m_commandBuffer, + static_cast( event ), + reinterpret_cast( &dependencyInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetExclusiveScissorNV( m_commandBuffer, + firstExclusiveScissor, + exclusiveScissorCount, + reinterpret_cast( pExclusiveScissors ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, + ArrayProxy const & exclusiveScissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetExclusiveScissorNV( m_commandBuffer, + firstExclusiveScissor, + exclusiveScissors.size(), + reinterpret_cast( exclusiveScissors.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateEnumNV( + VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetFragmentShadingRateEnumNV( m_commandBuffer, + static_cast( shadingRate ), + reinterpret_cast( combinerOps ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( + const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetFragmentShadingRateKHR( m_commandBuffer, + reinterpret_cast( pFragmentSize ), + reinterpret_cast( combinerOps ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( + const Extent2D & fragmentSize, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetFragmentShadingRateKHR( m_commandBuffer, + reinterpret_cast( &fragmentSize ), + reinterpret_cast( combinerOps ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetFrontFaceEXT( m_commandBuffer, static_cast( frontFace ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, + uint16_t lineStipplePattern, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetLineWidth( m_commandBuffer, lineWidth ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetLogicOpEXT( m_commandBuffer, static_cast( logicOp ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetPatchControlPointsEXT( m_commandBuffer, patchControlPoints ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCmdSetPerformanceMarkerINTEL( + m_commandBuffer, reinterpret_cast( pMarkerInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + CommandBuffer::setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const & d ) const + { + Result result = static_cast( d.vkCmdSetPerformanceMarkerINTEL( + m_commandBuffer, reinterpret_cast( &markerInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCmdSetPerformanceOverrideINTEL( + m_commandBuffer, reinterpret_cast( pOverrideInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + CommandBuffer::setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, + Dispatch const & d ) const + { + Result result = static_cast( d.vkCmdSetPerformanceOverrideINTEL( + m_commandBuffer, reinterpret_cast( &overrideInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCmdSetPerformanceStreamMarkerINTEL( + m_commandBuffer, reinterpret_cast( pMarkerInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + CommandBuffer::setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, + Dispatch const & d ) const + { + Result result = static_cast( d.vkCmdSetPerformanceStreamMarkerINTEL( + m_commandBuffer, reinterpret_cast( &markerInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetPrimitiveRestartEnableEXT( m_commandBuffer, static_cast( primitiveRestartEnable ) ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetPrimitiveTopologyEXT( m_commandBuffer, static_cast( primitiveTopology ) ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetRasterizerDiscardEnableEXT( m_commandBuffer, static_cast( rasterizerDiscardEnable ) ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetRayTracingPipelineStackSizeKHR( m_commandBuffer, pipelineStackSize ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetSampleLocationsEXT( m_commandBuffer, + reinterpret_cast( pSampleLocationsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetSampleLocationsEXT( m_commandBuffer, + reinterpret_cast( &sampleLocationsInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, + uint32_t scissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast( pScissors ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, + ArrayProxy const & scissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetScissor( + m_commandBuffer, firstScissor, scissors.size(), reinterpret_cast( scissors.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( uint32_t scissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissorCount, reinterpret_cast( pScissors ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::setScissorWithCountEXT( ArrayProxy const & scissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetScissorWithCountEXT( + m_commandBuffer, scissors.size(), reinterpret_cast( scissors.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t compareMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast( faceMask ), compareMask ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + VULKAN_HPP_NAMESPACE::StencilOp failOp, + VULKAN_HPP_NAMESPACE::StencilOp passOp, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, + VULKAN_HPP_NAMESPACE::CompareOp compareOp, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetStencilOpEXT( m_commandBuffer, + static_cast( faceMask ), + static_cast( failOp ), + static_cast( passOp ), + static_cast( depthFailOp ), + static_cast( compareOp ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t reference, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetStencilReference( m_commandBuffer, static_cast( faceMask ), reference ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetStencilTestEnableEXT( m_commandBuffer, static_cast( stencilTestEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t writeMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast( faceMask ), writeMask ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( + uint32_t vertexBindingDescriptionCount, + const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions, + uint32_t vertexAttributeDescriptionCount, + const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetVertexInputEXT( + m_commandBuffer, + vertexBindingDescriptionCount, + reinterpret_cast( pVertexBindingDescriptions ), + vertexAttributeDescriptionCount, + reinterpret_cast( pVertexAttributeDescriptions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( + ArrayProxy const & vertexBindingDescriptions, + ArrayProxy const & vertexAttributeDescriptions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetVertexInputEXT( + m_commandBuffer, + vertexBindingDescriptions.size(), + reinterpret_cast( vertexBindingDescriptions.data() ), + vertexAttributeDescriptions.size(), + reinterpret_cast( vertexAttributeDescriptions.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetViewport( + m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pViewports ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, + ArrayProxy const & viewports, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetViewport( + m_commandBuffer, firstViewport, viewports.size(), reinterpret_cast( viewports.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( + uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer, + firstViewport, + viewportCount, + reinterpret_cast( pShadingRatePalettes ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( + uint32_t firstViewport, + ArrayProxy const & shadingRatePalettes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetViewportShadingRatePaletteNV( + m_commandBuffer, + firstViewport, + shadingRatePalettes.size(), + reinterpret_cast( shadingRatePalettes.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetViewportWScalingNV( m_commandBuffer, + firstViewport, + viewportCount, + reinterpret_cast( pViewportWScalings ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( + uint32_t firstViewport, + ArrayProxy const & viewportWScalings, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetViewportWScalingNV( m_commandBuffer, + firstViewport, + viewportWScalings.size(), + reinterpret_cast( viewportWScalings.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetViewportWithCountEXT( + m_commandBuffer, viewportCount, reinterpret_cast( pViewports ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::setViewportWithCountEXT( ArrayProxy const & viewports, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetViewportWithCountEXT( + m_commandBuffer, viewports.size(), reinterpret_cast( viewports.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdTraceRaysIndirectKHR( + m_commandBuffer, + reinterpret_cast( pRaygenShaderBindingTable ), + reinterpret_cast( pMissShaderBindingTable ), + reinterpret_cast( pHitShaderBindingTable ), + reinterpret_cast( pCallableShaderBindingTable ), + static_cast( indirectDeviceAddress ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::traceRaysIndirectKHR( const StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const StridedDeviceAddressRegionKHR & missShaderBindingTable, + const StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const StridedDeviceAddressRegionKHR & callableShaderBindingTable, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdTraceRaysIndirectKHR( + m_commandBuffer, + reinterpret_cast( &raygenShaderBindingTable ), + reinterpret_cast( &missShaderBindingTable ), + reinterpret_cast( &hitShaderBindingTable ), + reinterpret_cast( &callableShaderBindingTable ), + static_cast( indirectDeviceAddress ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdTraceRaysKHR( m_commandBuffer, + reinterpret_cast( pRaygenShaderBindingTable ), + reinterpret_cast( pMissShaderBindingTable ), + reinterpret_cast( pHitShaderBindingTable ), + reinterpret_cast( pCallableShaderBindingTable ), + width, + height, + depth ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const StridedDeviceAddressRegionKHR & missShaderBindingTable, + const StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const StridedDeviceAddressRegionKHR & callableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdTraceRaysKHR( m_commandBuffer, + reinterpret_cast( &raygenShaderBindingTable ), + reinterpret_cast( &missShaderBindingTable ), + reinterpret_cast( &hitShaderBindingTable ), + reinterpret_cast( &callableShaderBindingTable ), + width, + height, + depth ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, + VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdTraceRaysNV( m_commandBuffer, + static_cast( raygenShaderBindingTableBuffer ), + static_cast( raygenShaderBindingOffset ), + static_cast( missShaderBindingTableBuffer ), + static_cast( missShaderBindingOffset ), + static_cast( missShaderBindingStride ), + static_cast( hitShaderBindingTableBuffer ), + static_cast( hitShaderBindingOffset ), + static_cast( hitShaderBindingStride ), + static_cast( callableShaderBindingTableBuffer ), + static_cast( callableShaderBindingOffset ), + static_cast( callableShaderBindingStride ), + width, + height, + depth ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize dataSize, + const void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdUpdateBuffer( m_commandBuffer, + static_cast( dstBuffer ), + static_cast( dstOffset ), + static_cast( dataSize ), + pData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + ArrayProxy const & data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdUpdateBuffer( m_commandBuffer, + static_cast( dstBuffer ), + static_cast( dstOffset ), + data.size() * sizeof( T ), + reinterpret_cast( data.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::waitEvents( uint32_t eventCount, + const VULKAN_HPP_NAMESPACE::Event * pEvents, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + uint32_t memoryBarrierCount, + const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdWaitEvents( m_commandBuffer, + eventCount, + reinterpret_cast( pEvents ), + static_cast( srcStageMask ), + static_cast( dstStageMask ), + memoryBarrierCount, + reinterpret_cast( pMemoryBarriers ), + bufferMemoryBarrierCount, + reinterpret_cast( pBufferMemoryBarriers ), + imageMemoryBarrierCount, + reinterpret_cast( pImageMemoryBarriers ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::waitEvents( ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + ArrayProxy const & memoryBarriers, + ArrayProxy const & bufferMemoryBarriers, + ArrayProxy const & imageMemoryBarriers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdWaitEvents( m_commandBuffer, + events.size(), + reinterpret_cast( events.data() ), + static_cast( srcStageMask ), + static_cast( dstStageMask ), + memoryBarriers.size(), + reinterpret_cast( memoryBarriers.data() ), + bufferMemoryBarriers.size(), + reinterpret_cast( bufferMemoryBarriers.data() ), + imageMemoryBarriers.size(), + reinterpret_cast( imageMemoryBarriers.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::waitEvents2KHR( uint32_t eventCount, + const VULKAN_HPP_NAMESPACE::Event * pEvents, + const VULKAN_HPP_NAMESPACE::DependencyInfoKHR * pDependencyInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdWaitEvents2KHR( m_commandBuffer, + eventCount, + reinterpret_cast( pEvents ), + reinterpret_cast( pDependencyInfos ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::waitEvents2KHR( ArrayProxy const & events, + ArrayProxy const & dependencyInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() ); +# else + if ( events.size() != dependencyInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdWaitEvents2KHR( m_commandBuffer, + events.size(), + reinterpret_cast( events.data() ), + reinterpret_cast( dependencyInfos.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( + uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdWriteAccelerationStructuresPropertiesKHR( + m_commandBuffer, + accelerationStructureCount, + reinterpret_cast( pAccelerationStructures ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdWriteAccelerationStructuresPropertiesKHR( + m_commandBuffer, + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( + uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdWriteAccelerationStructuresPropertiesNV( + m_commandBuffer, + accelerationStructureCount, + reinterpret_cast( pAccelerationStructures ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdWriteAccelerationStructuresPropertiesNV( + m_commandBuffer, + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdWriteBufferMarker2AMD( m_commandBuffer, + static_cast( stage ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + marker ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, + static_cast( pipelineStage ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + marker ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdWriteTimestamp( m_commandBuffer, + static_cast( pipelineStage ), + static_cast( queryPool ), + query ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdWriteTimestamp2KHR( + m_commandBuffer, static_cast( stage ), static_cast( queryPool ), query ); + } + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::end( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkEndCommandBuffer( m_commandBuffer ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + CommandBuffer::end( Dispatch const & d ) const + { + Result result = static_cast( d.vkEndCommandBuffer( m_commandBuffer ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::reset( + VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkResetCommandBuffer( m_commandBuffer, static_cast( flags ) ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type + CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const + { + Result result = + static_cast( d.vkResetCommandBuffer( m_commandBuffer, static_cast( flags ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT( + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); + } +# else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + { + Result result = static_cast( + d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireFullScreenExclusiveModeEXT" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo, + uint32_t * pImageIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkAcquireNextImage2KHR( + m_device, reinterpret_cast( pAcquireInfo ), pImageIndex ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const & d ) const + { + uint32_t imageIndex; + Result result = static_cast( d.vkAcquireNextImage2KHR( + m_device, reinterpret_cast( &acquireInfo ), &imageIndex ) ); + return createResultValue( result, + imageIndex, + VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eTimeout, + VULKAN_HPP_NAMESPACE::Result::eNotReady, + VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t timeout, + VULKAN_HPP_NAMESPACE::Semaphore semaphore, + VULKAN_HPP_NAMESPACE::Fence fence, + uint32_t * pImageIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkAcquireNextImageKHR( m_device, + static_cast( swapchain ), + timeout, + static_cast( semaphore ), + static_cast( fence ), + pImageIndex ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t timeout, + VULKAN_HPP_NAMESPACE::Semaphore semaphore, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const + { + uint32_t imageIndex; + Result result = static_cast( d.vkAcquireNextImageKHR( m_device, + static_cast( swapchain ), + timeout, + static_cast( semaphore ), + static_cast( fence ), + &imageIndex ) ); + return createResultValue( result, + imageIndex, + VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImageKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eTimeout, + VULKAN_HPP_NAMESPACE::Result::eNotReady, + VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquirePerformanceConfigurationINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkAcquirePerformanceConfigurationINTEL( + m_device, + reinterpret_cast( pAcquireInfo ), + reinterpret_cast( pConfiguration ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration; + Result result = static_cast( d.vkAcquirePerformanceConfigurationINTEL( + m_device, + reinterpret_cast( &acquireInfo ), + reinterpret_cast( &configuration ) ) ); + return createResultValue( + result, configuration, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTEL" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::acquirePerformanceConfigurationINTELUnique( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration; + Result result = static_cast( d.vkAcquirePerformanceConfigurationINTEL( + m_device, + reinterpret_cast( &acquireInfo ), + reinterpret_cast( &configuration ) ) ); + ObjectRelease deleter( *this, d ); + return createResultValue( + result, + configuration, + VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTELUnique", + deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR( + const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info, Dispatch const & d ) const + { + Result result = static_cast( + d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast( &info ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo, + VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkAllocateCommandBuffers( m_device, + reinterpret_cast( pAllocateInfo ), + reinterpret_cast( pCommandBuffers ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const + { + std::vector commandBuffers( allocateInfo.commandBufferCount ); + Result result = static_cast( + d.vkAllocateCommandBuffers( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( commandBuffers.data() ) ) ); + return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, + CommandBufferAllocator & commandBufferAllocator, + Dispatch const & d ) const + { + std::vector commandBuffers( allocateInfo.commandBufferCount, + commandBufferAllocator ); + Result result = static_cast( + d.vkAllocateCommandBuffers( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( commandBuffers.data() ) ) ); + return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType, CommandBufferAllocator>>::type + Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const + { + std::vector, CommandBufferAllocator> uniqueCommandBuffers; + std::vector commandBuffers( allocateInfo.commandBufferCount ); + Result result = static_cast( + d.vkAllocateCommandBuffers( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( commandBuffers.data() ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); + PoolFree deleter( *this, allocateInfo.commandPool, d ); + for ( size_t i = 0; i < allocateInfo.commandBufferCount; i++ ) + { + uniqueCommandBuffers.push_back( UniqueHandle( commandBuffers[i], deleter ) ); + } + } + return createResultValue( + result, std::move( uniqueCommandBuffers ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); + } + + template >::value, + int>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType, CommandBufferAllocator>>::type + Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, + CommandBufferAllocator & commandBufferAllocator, + Dispatch const & d ) const + { + std::vector, CommandBufferAllocator> uniqueCommandBuffers( + commandBufferAllocator ); + std::vector commandBuffers( allocateInfo.commandBufferCount ); + Result result = static_cast( + d.vkAllocateCommandBuffers( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( commandBuffers.data() ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); + PoolFree deleter( *this, allocateInfo.commandPool, d ); + for ( size_t i = 0; i < allocateInfo.commandBufferCount; i++ ) + { + uniqueCommandBuffers.push_back( UniqueHandle( commandBuffers[i], deleter ) ); + } + } + return createResultValue( + result, std::move( uniqueCommandBuffers ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkAllocateDescriptorSets( m_device, + reinterpret_cast( pAllocateInfo ), + reinterpret_cast( pDescriptorSets ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const + { + std::vector descriptorSets( allocateInfo.descriptorSetCount ); + Result result = static_cast( + d.vkAllocateDescriptorSets( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( descriptorSets.data() ) ) ); + return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, + DescriptorSetAllocator & descriptorSetAllocator, + Dispatch const & d ) const + { + std::vector descriptorSets( allocateInfo.descriptorSetCount, + descriptorSetAllocator ); + Result result = static_cast( + d.vkAllocateDescriptorSets( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( descriptorSets.data() ) ) ); + return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType, DescriptorSetAllocator>>::type + Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const + { + std::vector, DescriptorSetAllocator> uniqueDescriptorSets; + std::vector descriptorSets( allocateInfo.descriptorSetCount ); + Result result = static_cast( + d.vkAllocateDescriptorSets( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( descriptorSets.data() ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); + PoolFree deleter( *this, allocateInfo.descriptorPool, d ); + for ( size_t i = 0; i < allocateInfo.descriptorSetCount; i++ ) + { + uniqueDescriptorSets.push_back( UniqueHandle( descriptorSets[i], deleter ) ); + } + } + return createResultValue( + result, std::move( uniqueDescriptorSets ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); + } + + template >::value, + int>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType, DescriptorSetAllocator>>::type + Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, + DescriptorSetAllocator & descriptorSetAllocator, + Dispatch const & d ) const + { + std::vector, DescriptorSetAllocator> uniqueDescriptorSets( + descriptorSetAllocator ); + std::vector descriptorSets( allocateInfo.descriptorSetCount ); + Result result = static_cast( + d.vkAllocateDescriptorSets( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( descriptorSets.data() ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); + PoolFree deleter( *this, allocateInfo.descriptorPool, d ); + for ( size_t i = 0; i < allocateInfo.descriptorSetCount; i++ ) + { + uniqueDescriptorSets.push_back( UniqueHandle( descriptorSets[i], deleter ) ); + } + } + return createResultValue( + result, std::move( uniqueDescriptorSets ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkAllocateMemory( m_device, + reinterpret_cast( pAllocateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pMemory ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::allocateMemory( const MemoryAllocateInfo & allocateInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DeviceMemory memory; + Result result = static_cast( + d.vkAllocateMemory( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &memory ) ) ); + return createResultValue( result, memory, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemory" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DeviceMemory memory; + Result result = static_cast( + d.vkAllocateMemory( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &memory ) ) ); + ObjectFree deleter( *this, allocator, d ); + return createResultValue( + result, memory, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemoryUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV( + uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkBindAccelerationStructureMemoryNV( + m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindAccelerationStructureMemoryNV( + ArrayProxy const & bindInfos, + Dispatch const & d ) const + { + Result result = static_cast( d.vkBindAccelerationStructureMemoryNV( + m_device, + bindInfos.size(), + reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkBindBufferMemory( m_device, + static_cast( buffer ), + static_cast( memory ), + static_cast( memoryOffset ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d ) const + { + Result result = static_cast( d.vkBindBufferMemory( m_device, + static_cast( buffer ), + static_cast( memory ), + static_cast( memoryOffset ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::bindBufferMemory2( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkBindBufferMemory2( + m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindBufferMemory2( ArrayProxy const & bindInfos, + Dispatch const & d ) const + { + Result result = static_cast( d.vkBindBufferMemory2( + m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::bindBufferMemory2KHR( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkBindBufferMemory2KHR( + m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindBufferMemory2KHR( ArrayProxy const & bindInfos, + Dispatch const & d ) const + { + Result result = static_cast( d.vkBindBufferMemory2KHR( + m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkBindImageMemory( m_device, + static_cast( image ), + static_cast( memory ), + static_cast( memoryOffset ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d ) const + { + Result result = static_cast( d.vkBindImageMemory( m_device, + static_cast( image ), + static_cast( memory ), + static_cast( memoryOffset ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::bindImageMemory2( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindImageMemory2( ArrayProxy const & bindInfos, + Dispatch const & d ) const + { + Result result = static_cast( d.vkBindImageMemory2( + m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::bindImageMemory2KHR( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkBindImageMemory2KHR( + m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindImageMemory2KHR( ArrayProxy const & bindInfos, + Dispatch const & d ) const + { + Result result = static_cast( d.vkBindImageMemory2KHR( + m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + uint32_t videoSessionBindMemoryCount, + const VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR * pVideoSessionBindMemories, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkBindVideoSessionMemoryKHR( m_device, + static_cast( videoSession ), + videoSessionBindMemoryCount, + reinterpret_cast( pVideoSessionBindMemories ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindVideoSessionMemoryKHR( + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + ArrayProxy const & videoSessionBindMemories, + Dispatch const & d ) const + { + Result result = static_cast( d.vkBindVideoSessionMemoryKHR( + m_device, + static_cast( videoSession ), + videoSessionBindMemories.size(), + reinterpret_cast( videoSessionBindMemories.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindVideoSessionMemoryKHR" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::buildAccelerationStructuresKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkBuildAccelerationStructuresKHR( + m_device, + static_cast( deferredOperation ), + infoCount, + reinterpret_cast( pInfos ), + reinterpret_cast( ppBuildRangeInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::buildAccelerationStructuresKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + ArrayProxy const & infos, + ArrayProxy const & pBuildRangeInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() ); +# else + if ( infos.size() != pBuildRangeInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::Device::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + Result result = static_cast( d.vkBuildAccelerationStructuresKHR( + m_device, + static_cast( deferredOperation ), + infos.size(), + reinterpret_cast( infos.data() ), + reinterpret_cast( pBuildRangeInfos.data() ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::compileDeferredNV( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCompileDeferredNV( m_device, static_cast( pipeline ), shader ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d ) const + { + Result result = + static_cast( d.vkCompileDeferredNV( m_device, static_cast( pipeline ), shader ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::compileDeferredNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCopyAccelerationStructureKHR( m_device, + static_cast( deferredOperation ), + reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const CopyAccelerationStructureInfoKHR & info, + Dispatch const & d ) const + { + Result result = static_cast( + d.vkCopyAccelerationStructureKHR( m_device, + static_cast( deferredOperation ), + reinterpret_cast( &info ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureToMemoryKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCopyAccelerationStructureToMemoryKHR( + m_device, + static_cast( deferredOperation ), + reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const CopyAccelerationStructureToMemoryInfoKHR & info, + Dispatch const & d ) const + { + Result result = static_cast( d.vkCopyAccelerationStructureToMemoryKHR( + m_device, + static_cast( deferredOperation ), + reinterpret_cast( &info ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToAccelerationStructureKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCopyMemoryToAccelerationStructureKHR( + m_device, + static_cast( deferredOperation ), + reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const CopyMemoryToAccelerationStructureInfoKHR & info, + Dispatch const & d ) const + { + Result result = static_cast( d.vkCopyMemoryToAccelerationStructureKHR( + m_device, + static_cast( deferredOperation ), + reinterpret_cast( &info ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createAccelerationStructureKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateAccelerationStructureKHR( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pAccelerationStructure ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createAccelerationStructureKHR( const AccelerationStructureCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure; + Result result = static_cast( d.vkCreateAccelerationStructureKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &accelerationStructure ) ) ); + return createResultValue( + result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createAccelerationStructureKHRUnique( const AccelerationStructureCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure; + Result result = static_cast( d.vkCreateAccelerationStructureKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &accelerationStructure ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, + accelerationStructure, + VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHRUnique", + deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateAccelerationStructureNV( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pAccelerationStructure ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::createAccelerationStructureNV( const AccelerationStructureCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure; + Result result = static_cast( d.vkCreateAccelerationStructureNV( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &accelerationStructure ) ) ); + return createResultValue( + result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNV" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure; + Result result = static_cast( d.vkCreateAccelerationStructureNV( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &accelerationStructure ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, + accelerationStructure, + VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNVUnique", + deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Buffer * pBuffer, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateBuffer( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pBuffer ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::createBuffer( const BufferCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Buffer buffer; + Result result = static_cast( + d.vkCreateBuffer( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &buffer ) ) ); + return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createBuffer" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createBufferUnique( const BufferCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Buffer buffer; + Result result = static_cast( + d.vkCreateBuffer( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &buffer ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, buffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::BufferView * pView, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateBufferView( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pView ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createBufferView( const BufferViewCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::BufferView view; + Result result = static_cast( + d.vkCreateBufferView( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &view ) ) ); + return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferView" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createBufferViewUnique( const BufferViewCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::BufferView view; + Result result = static_cast( + d.vkCreateBufferView( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &view ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferViewUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateCommandPool( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pCommandPool ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createCommandPool( const CommandPoolCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::CommandPool commandPool; + Result result = static_cast( + d.vkCreateCommandPool( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &commandPool ) ) ); + return createResultValue( result, commandPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPool" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::CommandPool commandPool; + Result result = static_cast( + d.vkCreateCommandPool( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &commandPool ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, commandPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPoolUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateComputePipelines( m_device, + static_cast( pipelineCache ), + createInfoCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelines ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createComputePipelines( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + std::vector pipelines( createInfos.size() ); + Result result = static_cast( + d.vkCreateComputePipelines( m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( + result, + pipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createComputePipelines( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + std::vector pipelines( createInfos.size(), pipelineAllocator ); + Result result = static_cast( + d.vkCreateComputePipelines( m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( + result, + pipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + Pipeline pipeline; + Result result = static_cast( + d.vkCreateComputePipelines( m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + return createResultValue( + result, + pipeline, + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipeline", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createComputePipelinesUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + std::vector, PipelineAllocator> uniquePipelines; + std::vector pipelines( createInfos.size() ); + Result result = static_cast( + d.vkCreateComputePipelines( m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || + ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) + { + uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); + } + } + return createResultValue( + result, + std::move( uniquePipelines ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template < + typename Dispatch, + typename PipelineAllocator, + typename B, + typename std::enable_if>::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createComputePipelinesUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); + std::vector pipelines( createInfos.size() ); + Result result = static_cast( + d.vkCreateComputePipelines( m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || + ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) + { + uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); + } + } + return createResultValue( + result, + std::move( uniquePipelines ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + Pipeline pipeline; + Result result = static_cast( + d.vkCreateComputePipelines( m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, + pipeline, + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelineUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT }, + deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateCuFunctionNVX( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pFunction ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createCuFunctionNVX( const CuFunctionCreateInfoNVX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::CuFunctionNVX function; + Result result = static_cast( + d.vkCreateCuFunctionNVX( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &function ) ) ); + return createResultValue( result, function, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVX" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createCuFunctionNVXUnique( const CuFunctionCreateInfoNVX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::CuFunctionNVX function; + Result result = static_cast( + d.vkCreateCuFunctionNVX( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &function ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, function, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVXUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateCuModuleNVX( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pModule ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createCuModuleNVX( const CuModuleCreateInfoNVX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::CuModuleNVX module; + Result result = static_cast( + d.vkCreateCuModuleNVX( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &module ) ) ); + return createResultValue( result, module, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVX" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createCuModuleNVXUnique( const CuModuleCreateInfoNVX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::CuModuleNVX module; + Result result = static_cast( + d.vkCreateCuModuleNVX( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &module ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, module, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVXUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateDeferredOperationKHR( m_device, + reinterpret_cast( pAllocator ), + reinterpret_cast( pDeferredOperation ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::createDeferredOperationKHR( Optional allocator, Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation; + Result result = static_cast( + d.vkCreateDeferredOperationKHR( m_device, + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &deferredOperation ) ) ); + return createResultValue( + result, deferredOperation, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createDeferredOperationKHRUnique( Optional allocator, Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation; + Result result = static_cast( + d.vkCreateDeferredOperationKHR( m_device, + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &deferredOperation ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, deferredOperation, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateDescriptorPool( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pDescriptorPool ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool; + Result result = static_cast( + d.vkCreateDescriptorPool( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &descriptorPool ) ) ); + return createResultValue( result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPool" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool; + Result result = static_cast( + d.vkCreateDescriptorPool( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &descriptorPool ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPoolUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateDescriptorSetLayout( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSetLayout ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout; + Result result = static_cast( + d.vkCreateDescriptorSetLayout( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &setLayout ) ) ); + return createResultValue( result, setLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayout" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout; + Result result = static_cast( + d.vkCreateDescriptorSetLayout( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &setLayout ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, setLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayoutUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplate( + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateDescriptorUpdateTemplate( + m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pDescriptorUpdateTemplate ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; + Result result = static_cast( d.vkCreateDescriptorUpdateTemplate( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &descriptorUpdateTemplate ) ) ); + return createResultValue( + result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplate" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; + Result result = static_cast( d.vkCreateDescriptorUpdateTemplate( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &descriptorUpdateTemplate ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, + descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateUnique", + deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplateKHR( + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateDescriptorUpdateTemplateKHR( + m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pDescriptorUpdateTemplate ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; + Result result = static_cast( d.vkCreateDescriptorUpdateTemplateKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &descriptorUpdateTemplate ) ) ); + return createResultValue( + result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; + Result result = static_cast( d.vkCreateDescriptorUpdateTemplateKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &descriptorUpdateTemplate ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, + descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHRUnique", + deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Event * pEvent, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateEvent( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pEvent ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::createEvent( const EventCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Event event; + Result result = static_cast( + d.vkCreateEvent( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &event ) ) ); + return createResultValue( result, event, VULKAN_HPP_NAMESPACE_STRING "::Device::createEvent" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createEventUnique( const EventCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Event event; + Result result = static_cast( + d.vkCreateEvent( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &event ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, event, VULKAN_HPP_NAMESPACE_STRING "::Device::createEventUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateFence( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pFence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::createFence( const FenceCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Fence fence; + Result result = static_cast( + d.vkCreateFence( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::createFence" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createFenceUnique( const FenceCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Fence fence; + Result result = static_cast( + d.vkCreateFence( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::createFenceUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateFramebuffer( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pFramebuffer ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createFramebuffer( const FramebufferCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer; + Result result = static_cast( + d.vkCreateFramebuffer( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &framebuffer ) ) ); + return createResultValue( result, framebuffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebuffer" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createFramebufferUnique( const FramebufferCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer; + Result result = static_cast( + d.vkCreateFramebuffer( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &framebuffer ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, framebuffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebufferUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateGraphicsPipelines( m_device, + static_cast( pipelineCache ), + createInfoCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelines ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createGraphicsPipelines( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + std::vector pipelines( createInfos.size() ); + Result result = static_cast( + d.vkCreateGraphicsPipelines( m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( + result, + pipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createGraphicsPipelines( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + std::vector pipelines( createInfos.size(), pipelineAllocator ); + Result result = static_cast( + d.vkCreateGraphicsPipelines( m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( + result, + pipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + Pipeline pipeline; + Result result = static_cast( + d.vkCreateGraphicsPipelines( m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + return createResultValue( + result, + pipeline, + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipeline", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createGraphicsPipelinesUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + std::vector, PipelineAllocator> uniquePipelines; + std::vector pipelines( createInfos.size() ); + Result result = static_cast( + d.vkCreateGraphicsPipelines( m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || + ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) + { + uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); + } + } + return createResultValue( + result, + std::move( uniquePipelines ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template < + typename Dispatch, + typename PipelineAllocator, + typename B, + typename std::enable_if>::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createGraphicsPipelinesUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); + std::vector pipelines( createInfos.size() ); + Result result = static_cast( + d.vkCreateGraphicsPipelines( m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || + ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) + { + uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); + } + } + return createResultValue( + result, + std::move( uniquePipelines ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + Pipeline pipeline; + Result result = static_cast( + d.vkCreateGraphicsPipelines( m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, + pipeline, + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelineUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT }, + deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Image * pImage, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateImage( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pImage ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::createImage( const ImageCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Image image; + Result result = static_cast( + d.vkCreateImage( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &image ) ) ); + return createResultValue( result, image, VULKAN_HPP_NAMESPACE_STRING "::Device::createImage" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createImageUnique( const ImageCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Image image; + Result result = static_cast( + d.vkCreateImage( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &image ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, image, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ImageView * pView, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateImageView( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pView ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createImageView( const ImageViewCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::ImageView view; + Result result = static_cast( + d.vkCreateImageView( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &view ) ) ); + return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageView" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createImageViewUnique( const ImageViewCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::ImageView view; + Result result = static_cast( + d.vkCreateImageView( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &view ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageViewUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createIndirectCommandsLayoutNV( + const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateIndirectCommandsLayoutNV( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pIndirectCommandsLayout ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createIndirectCommandsLayoutNV( const IndirectCommandsLayoutCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout; + Result result = static_cast( d.vkCreateIndirectCommandsLayoutNV( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &indirectCommandsLayout ) ) ); + return createResultValue( + result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNV" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createIndirectCommandsLayoutNVUnique( const IndirectCommandsLayoutCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout; + Result result = static_cast( d.vkCreateIndirectCommandsLayoutNV( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &indirectCommandsLayout ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, + indirectCommandsLayout, + VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNVUnique", + deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreatePipelineCache( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelineCache ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createPipelineCache( const PipelineCacheCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache; + Result result = static_cast( + d.vkCreatePipelineCache( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &pipelineCache ) ) ); + return createResultValue( result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCache" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache; + Result result = static_cast( + d.vkCreatePipelineCache( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &pipelineCache ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCacheUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreatePipelineLayout( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelineLayout ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout; + Result result = static_cast( + d.vkCreatePipelineLayout( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &pipelineLayout ) ) ); + return createResultValue( result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayout" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout; + Result result = static_cast( + d.vkCreatePipelineLayout( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &pipelineLayout ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayoutUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT * pPrivateDataSlot, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreatePrivateDataSlotEXT( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPrivateDataSlot ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::createPrivateDataSlotEXT( const PrivateDataSlotCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot; + Result result = static_cast( + d.vkCreatePrivateDataSlotEXT( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &privateDataSlot ) ) ); + return createResultValue( + result, privateDataSlot, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXT" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createPrivateDataSlotEXTUnique( const PrivateDataSlotCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot; + Result result = static_cast( + d.vkCreatePrivateDataSlotEXT( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &privateDataSlot ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, privateDataSlot, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXTUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateQueryPool( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pQueryPool ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createQueryPool( const QueryPoolCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::QueryPool queryPool; + Result result = static_cast( + d.vkCreateQueryPool( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &queryPool ) ) ); + return createResultValue( result, queryPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPool" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::QueryPool queryPool; + Result result = static_cast( + d.vkCreateQueryPool( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &queryPool ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, queryPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPoolUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateRayTracingPipelinesKHR( m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + createInfoCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelines ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelinesKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( + m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( result, + pipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelinesKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + std::vector pipelines( createInfos.size(), pipelineAllocator ); + Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( + m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( result, + pipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + Pipeline pipeline; + Result result = static_cast( + d.vkCreateRayTracingPipelinesKHR( m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + return createResultValue( result, + pipeline, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createRayTracingPipelinesKHRUnique( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + std::vector, PipelineAllocator> uniquePipelines; + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( + m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || + ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) || + ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) || + ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) + { + uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); + } + } + return createResultValue( result, + std::move( uniquePipelines ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template < + typename Dispatch, + typename PipelineAllocator, + typename B, + typename std::enable_if>::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createRayTracingPipelinesKHRUnique( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( + m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || + ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) || + ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) || + ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) + { + uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); + } + } + return createResultValue( result, + std::move( uniquePipelines ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + Pipeline pipeline; + Result result = static_cast( + d.vkCreateRayTracingPipelinesKHR( m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, + pipeline, + VULKAN_HPP_NAMESPACE_STRING + "::Device::createRayTracingPipelineKHRUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT }, + deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateRayTracingPipelinesNV( m_device, + static_cast( pipelineCache ), + createInfoCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelines ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelinesNV( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + std::vector pipelines( createInfos.size() ); + Result result = static_cast( + d.vkCreateRayTracingPipelinesNV( m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( + result, + pipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelinesNV( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + std::vector pipelines( createInfos.size(), pipelineAllocator ); + Result result = static_cast( + d.vkCreateRayTracingPipelinesNV( m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( + result, + pipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + Pipeline pipeline; + Result result = static_cast( + d.vkCreateRayTracingPipelinesNV( m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + return createResultValue( + result, + pipeline, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNV", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createRayTracingPipelinesNVUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + std::vector, PipelineAllocator> uniquePipelines; + std::vector pipelines( createInfos.size() ); + Result result = static_cast( + d.vkCreateRayTracingPipelinesNV( m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || + ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) + { + uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); + } + } + return createResultValue( + result, + std::move( uniquePipelines ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template < + typename Dispatch, + typename PipelineAllocator, + typename B, + typename std::enable_if>::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createRayTracingPipelinesNVUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); + std::vector pipelines( createInfos.size() ); + Result result = static_cast( + d.vkCreateRayTracingPipelinesNV( m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || + ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) + { + uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); + } + } + return createResultValue( + result, + std::move( uniquePipelines ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + Pipeline pipeline; + Result result = static_cast( + d.vkCreateRayTracingPipelinesNV( m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, + pipeline, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNVUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT }, + deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateRenderPass( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pRenderPass ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createRenderPass( const RenderPassCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + Result result = static_cast( + d.vkCreateRenderPass( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); + return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createRenderPassUnique( const RenderPassCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + Result result = static_cast( + d.vkCreateRenderPass( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPassUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateRenderPass2( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pRenderPass ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createRenderPass2( const RenderPassCreateInfo2 & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + Result result = static_cast( + d.vkCreateRenderPass2( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); + return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createRenderPass2Unique( const RenderPassCreateInfo2 & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + Result result = static_cast( + d.vkCreateRenderPass2( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2Unique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateRenderPass2KHR( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pRenderPass ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createRenderPass2KHR( const RenderPassCreateInfo2 & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + Result result = static_cast( + d.vkCreateRenderPass2KHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); + return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createRenderPass2KHRUnique( const RenderPassCreateInfo2 & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + Result result = static_cast( + d.vkCreateRenderPass2KHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Sampler * pSampler, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateSampler( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSampler ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createSampler( const SamplerCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Sampler sampler; + Result result = static_cast( + d.vkCreateSampler( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &sampler ) ) ); + return createResultValue( result, sampler, VULKAN_HPP_NAMESPACE_STRING "::Device::createSampler" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createSamplerUnique( const SamplerCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Sampler sampler; + Result result = static_cast( + d.vkCreateSampler( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &sampler ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, sampler, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateSamplerYcbcrConversion( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pYcbcrConversion ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; + Result result = static_cast( + d.vkCreateSamplerYcbcrConversion( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &ycbcrConversion ) ) ); + return createResultValue( + result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversion" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; + Result result = static_cast( + d.vkCreateSamplerYcbcrConversion( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &ycbcrConversion ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateSamplerYcbcrConversionKHR( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pYcbcrConversion ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; + Result result = static_cast( d.vkCreateSamplerYcbcrConversionKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &ycbcrConversion ) ) ); + return createResultValue( + result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; + Result result = static_cast( d.vkCreateSamplerYcbcrConversionKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &ycbcrConversion ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateSemaphore( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSemaphore ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createSemaphore( const SemaphoreCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Semaphore semaphore; + Result result = static_cast( + d.vkCreateSemaphore( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &semaphore ) ) ); + return createResultValue( result, semaphore, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphore" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Semaphore semaphore; + Result result = static_cast( + d.vkCreateSemaphore( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &semaphore ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, semaphore, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateShaderModule( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pShaderModule ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createShaderModule( const ShaderModuleCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::ShaderModule shaderModule; + Result result = static_cast( + d.vkCreateShaderModule( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &shaderModule ) ) ); + return createResultValue( result, shaderModule, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModule" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::ShaderModule shaderModule; + Result result = static_cast( + d.vkCreateShaderModule( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &shaderModule ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, shaderModule, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModuleUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createSharedSwapchainsKHR( uint32_t swapchainCount, + const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateSharedSwapchainsKHR( m_device, + swapchainCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSwapchains ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createSharedSwapchainsKHR( + ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + std::vector swapchains( createInfos.size() ); + Result result = static_cast( + d.vkCreateSharedSwapchainsKHR( m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( swapchains.data() ) ) ); + return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createSharedSwapchainsKHR( + ArrayProxy const & createInfos, + Optional allocator, + SwapchainKHRAllocator & swapchainKHRAllocator, + Dispatch const & d ) const + { + std::vector swapchains( createInfos.size(), swapchainKHRAllocator ); + Result result = static_cast( + d.vkCreateSharedSwapchainsKHR( m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( swapchains.data() ) ) ); + return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::createSharedSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + SwapchainKHR swapchain; + Result result = static_cast( + d.vkCreateSharedSwapchainsKHR( m_device, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &swapchain ) ) ); + return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType, SwapchainKHRAllocator>>::type + Device::createSharedSwapchainsKHRUnique( + ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + std::vector, SwapchainKHRAllocator> uniqueSwapchains; + std::vector swapchains( createInfos.size() ); + Result result = static_cast( + d.vkCreateSharedSwapchainsKHR( m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( swapchains.data() ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + uniqueSwapchains.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) + { + uniqueSwapchains.push_back( UniqueHandle( swapchains[i], deleter ) ); + } + } + return createResultValue( + result, std::move( uniqueSwapchains ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); + } + + template >::value, + int>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType, SwapchainKHRAllocator>>::type + Device::createSharedSwapchainsKHRUnique( + ArrayProxy const & createInfos, + Optional allocator, + SwapchainKHRAllocator & swapchainKHRAllocator, + Dispatch const & d ) const + { + std::vector, SwapchainKHRAllocator> uniqueSwapchains( swapchainKHRAllocator ); + std::vector swapchains( createInfos.size() ); + Result result = static_cast( + d.vkCreateSharedSwapchainsKHR( m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( swapchains.data() ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + uniqueSwapchains.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) + { + uniqueSwapchains.push_back( UniqueHandle( swapchains[i], deleter ) ); + } + } + return createResultValue( + result, std::move( uniqueSwapchains ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + SwapchainKHR swapchain; + Result result = static_cast( + d.vkCreateSharedSwapchainsKHR( m_device, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &swapchain ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateSwapchainKHR( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSwapchain ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; + Result result = static_cast( + d.vkCreateSwapchainKHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &swapchain ) ) ); + return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; + Result result = static_cast( + d.vkCreateSwapchainKHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &swapchain ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateValidationCacheEXT( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pValidationCache ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache; + Result result = static_cast( + d.vkCreateValidationCacheEXT( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &validationCache ) ) ); + return createResultValue( + result, validationCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXT" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache; + Result result = static_cast( + d.vkCreateValidationCacheEXT( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &validationCache ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, validationCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXTUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateVideoSessionKHR( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pVideoSession ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createVideoSessionKHR( const VideoSessionCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession; + Result result = static_cast( + d.vkCreateVideoSessionKHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &videoSession ) ) ); + return createResultValue( result, videoSession, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createVideoSessionKHRUnique( const VideoSessionCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession; + Result result = static_cast( + d.vkCreateVideoSessionKHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &videoSession ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, videoSession, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createVideoSessionParametersKHR( + const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateVideoSessionParametersKHR( + m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pVideoSessionParameters ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createVideoSessionParametersKHR( const VideoSessionParametersCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters; + Result result = static_cast( d.vkCreateVideoSessionParametersKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &videoSessionParameters ) ) ); + return createResultValue( + result, videoSessionParameters, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createVideoSessionParametersKHRUnique( const VideoSessionParametersCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters; + Result result = static_cast( d.vkCreateVideoSessionParametersKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &videoSessionParameters ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, + videoSessionParameters, + VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHRUnique", + deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( + const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkDebugMarkerSetObjectNameEXT( + m_device, reinterpret_cast( pNameInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const & d ) const + { + Result result = static_cast( d.vkDebugMarkerSetObjectNameEXT( + m_device, reinterpret_cast( &nameInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( + const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast( pTagInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const & d ) const + { + Result result = static_cast( + d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkDeferredOperationJoinKHR( m_device, static_cast( operation ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const + { + Result result = + static_cast( d.vkDeferredOperationJoinKHR( m_device, static_cast( operation ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::deferredOperationJoinKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR, + VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyAccelerationStructureKHR( m_device, + static_cast( accelerationStructure ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyAccelerationStructureKHR( + m_device, + static_cast( accelerationStructure ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyAccelerationStructureKHR( m_device, + static_cast( accelerationStructure ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyAccelerationStructureKHR( + m_device, + static_cast( accelerationStructure ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyAccelerationStructureNV( m_device, + static_cast( accelerationStructure ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyAccelerationStructureNV( + m_device, + static_cast( accelerationStructure ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyAccelerationStructureNV( m_device, + static_cast( accelerationStructure ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyAccelerationStructureNV( + m_device, + static_cast( accelerationStructure ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyBuffer( + m_device, static_cast( buffer ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyBuffer( m_device, + static_cast( buffer ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyBuffer( + m_device, static_cast( buffer ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyBuffer( m_device, + static_cast( buffer ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyBufferView( m_device, + static_cast( bufferView ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyBufferView( m_device, + static_cast( bufferView ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyBufferView( m_device, + static_cast( bufferView ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyBufferView( m_device, + static_cast( bufferView ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyCommandPool( m_device, + static_cast( commandPool ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyCommandPool( m_device, + static_cast( commandPool ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyCommandPool( m_device, + static_cast( commandPool ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyCommandPool( m_device, + static_cast( commandPool ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyCuFunctionNVX( m_device, + static_cast( function ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyCuFunctionNVX( m_device, + static_cast( function ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyCuFunctionNVX( m_device, + static_cast( function ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyCuFunctionNVX( m_device, + static_cast( function ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyCuModuleNVX( + m_device, static_cast( module ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyCuModuleNVX( m_device, + static_cast( module ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyCuModuleNVX( + m_device, static_cast( module ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyCuModuleNVX( m_device, + static_cast( module ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDeferredOperationKHR( m_device, + static_cast( operation ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDeferredOperationKHR( m_device, + static_cast( operation ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDeferredOperationKHR( m_device, + static_cast( operation ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDeferredOperationKHR( m_device, + static_cast( operation ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorPool( m_device, + static_cast( descriptorPool ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorPool( m_device, + static_cast( descriptorPool ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorPool( m_device, + static_cast( descriptorPool ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorPool( m_device, + static_cast( descriptorPool ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorSetLayout( m_device, + static_cast( descriptorSetLayout ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorSetLayout( m_device, + static_cast( descriptorSetLayout ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorSetLayout( m_device, + static_cast( descriptorSetLayout ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorSetLayout( m_device, + static_cast( descriptorSetLayout ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorUpdateTemplate( m_device, + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorUpdateTemplate( + m_device, + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorUpdateTemplate( m_device, + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorUpdateTemplate( + m_device, + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorUpdateTemplateKHR( m_device, + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorUpdateTemplateKHR( + m_device, + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDevice( m_device, reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDevice( m_device, + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyEvent( + m_device, static_cast( event ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyEvent( m_device, + static_cast( event ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyEvent( + m_device, static_cast( event ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyEvent( m_device, + static_cast( event ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyFence( + m_device, static_cast( fence ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyFence( m_device, + static_cast( fence ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyFence( + m_device, static_cast( fence ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyFence( m_device, + static_cast( fence ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyFramebuffer( m_device, + static_cast( framebuffer ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyFramebuffer( m_device, + static_cast( framebuffer ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyFramebuffer( m_device, + static_cast( framebuffer ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyFramebuffer( m_device, + static_cast( framebuffer ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyImage( + m_device, static_cast( image ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyImage( m_device, + static_cast( image ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyImage( + m_device, static_cast( image ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyImage( m_device, + static_cast( image ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyImageView( + m_device, static_cast( imageView ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyImageView( m_device, + static_cast( imageView ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyImageView( + m_device, static_cast( imageView ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyImageView( m_device, + static_cast( imageView ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyIndirectCommandsLayoutNV( m_device, + static_cast( indirectCommandsLayout ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyIndirectCommandsLayoutNV( + m_device, + static_cast( indirectCommandsLayout ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyIndirectCommandsLayoutNV( m_device, + static_cast( indirectCommandsLayout ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyIndirectCommandsLayoutNV( + m_device, + static_cast( indirectCommandsLayout ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPipeline( + m_device, static_cast( pipeline ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPipeline( m_device, + static_cast( pipeline ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPipeline( + m_device, static_cast( pipeline ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPipeline( m_device, + static_cast( pipeline ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPipelineCache( m_device, + static_cast( pipelineCache ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPipelineCache( m_device, + static_cast( pipelineCache ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPipelineCache( m_device, + static_cast( pipelineCache ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPipelineCache( m_device, + static_cast( pipelineCache ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPipelineLayout( m_device, + static_cast( pipelineLayout ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPipelineLayout( m_device, + static_cast( pipelineLayout ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPipelineLayout( m_device, + static_cast( pipelineLayout ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPipelineLayout( m_device, + static_cast( pipelineLayout ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPrivateDataSlotEXT( m_device, + static_cast( privateDataSlot ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPrivateDataSlotEXT( m_device, + static_cast( privateDataSlot ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPrivateDataSlotEXT( m_device, + static_cast( privateDataSlot ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPrivateDataSlotEXT( m_device, + static_cast( privateDataSlot ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyQueryPool( + m_device, static_cast( queryPool ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyQueryPool( m_device, + static_cast( queryPool ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyQueryPool( + m_device, static_cast( queryPool ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyQueryPool( m_device, + static_cast( queryPool ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyRenderPass( m_device, + static_cast( renderPass ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyRenderPass( m_device, + static_cast( renderPass ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyRenderPass( m_device, + static_cast( renderPass ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyRenderPass( m_device, + static_cast( renderPass ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySampler( + m_device, static_cast( sampler ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySampler( m_device, + static_cast( sampler ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySampler( + m_device, static_cast( sampler ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySampler( m_device, + static_cast( sampler ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySamplerYcbcrConversion( m_device, + static_cast( ycbcrConversion ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySamplerYcbcrConversion( + m_device, + static_cast( ycbcrConversion ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySamplerYcbcrConversion( m_device, + static_cast( ycbcrConversion ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySamplerYcbcrConversion( + m_device, + static_cast( ycbcrConversion ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySamplerYcbcrConversionKHR( m_device, + static_cast( ycbcrConversion ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySamplerYcbcrConversionKHR( + m_device, + static_cast( ycbcrConversion ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySemaphore( + m_device, static_cast( semaphore ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySemaphore( m_device, + static_cast( semaphore ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySemaphore( + m_device, static_cast( semaphore ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySemaphore( m_device, + static_cast( semaphore ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyShaderModule( m_device, + static_cast( shaderModule ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyShaderModule( m_device, + static_cast( shaderModule ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyShaderModule( m_device, + static_cast( shaderModule ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyShaderModule( m_device, + static_cast( shaderModule ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySwapchainKHR( m_device, + static_cast( swapchain ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySwapchainKHR( m_device, + static_cast( swapchain ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySwapchainKHR( m_device, + static_cast( swapchain ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySwapchainKHR( m_device, + static_cast( swapchain ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyValidationCacheEXT( m_device, + static_cast( validationCache ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyValidationCacheEXT( m_device, + static_cast( validationCache ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyValidationCacheEXT( m_device, + static_cast( validationCache ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyValidationCacheEXT( m_device, + static_cast( validationCache ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyVideoSessionKHR( m_device, + static_cast( videoSession ), + reinterpret_cast( pAllocator ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyVideoSessionKHR( m_device, + static_cast( videoSession ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyVideoSessionKHR( m_device, + static_cast( videoSession ), + reinterpret_cast( pAllocator ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyVideoSessionKHR( m_device, + static_cast( videoSession ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + VULKAN_HPP_INLINE void + Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyVideoSessionParametersKHR( m_device, + static_cast( videoSessionParameters ), + reinterpret_cast( pAllocator ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyVideoSessionParametersKHR( + m_device, + static_cast( videoSessionParameters ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyVideoSessionParametersKHR( m_device, + static_cast( videoSessionParameters ), + reinterpret_cast( pAllocator ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyVideoSessionParametersKHR( + m_device, + static_cast( videoSessionParameters ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkDeviceWaitIdle( m_device ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::waitIdle( Dispatch const & d ) const + { + Result result = static_cast( d.vkDeviceWaitIdle( m_device ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkDisplayPowerControlEXT( m_device, + static_cast( display ), + reinterpret_cast( pDisplayPowerInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::displayPowerControlEXT( + VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const & d ) const + { + Result result = static_cast( + d.vkDisplayPowerControlEXT( m_device, + static_cast( display ), + reinterpret_cast( &displayPowerInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::flushMappedMemoryRanges( uint32_t memoryRangeCount, + const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkFlushMappedMemoryRanges( + m_device, memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::flushMappedMemoryRanges( ArrayProxy const & memoryRanges, + Dispatch const & d ) const + { + Result result = static_cast( d.vkFlushMappedMemoryRanges( + m_device, memoryRanges.size(), reinterpret_cast( memoryRanges.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkFreeCommandBuffers( m_device, + static_cast( commandPool ), + commandBufferCount, + reinterpret_cast( pCommandBuffers ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + ArrayProxy const & commandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkFreeCommandBuffers( m_device, + static_cast( commandPool ), + commandBuffers.size(), + reinterpret_cast( commandBuffers.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkFreeCommandBuffers( m_device, + static_cast( commandPool ), + commandBufferCount, + reinterpret_cast( pCommandBuffers ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + ArrayProxy const & commandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkFreeCommandBuffers( m_device, + static_cast( commandPool ), + commandBuffers.size(), + reinterpret_cast( commandBuffers.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkFreeDescriptorSets( m_device, + static_cast( descriptorPool ), + descriptorSetCount, + reinterpret_cast( pDescriptorSets ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + ArrayProxy const & descriptorSets, + Dispatch const & d ) const + { + Result result = static_cast( + d.vkFreeDescriptorSets( m_device, + static_cast( descriptorPool ), + descriptorSets.size(), + reinterpret_cast( descriptorSets.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::freeDescriptorSets" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkFreeDescriptorSets( m_device, + static_cast( descriptorPool ), + descriptorSetCount, + reinterpret_cast( pDescriptorSets ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + ArrayProxy const & descriptorSets, + Dispatch const & d ) const + { + Result result = static_cast( + d.vkFreeDescriptorSets( m_device, + static_cast( descriptorPool ), + descriptorSets.size(), + reinterpret_cast( descriptorSets.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::free" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkFreeMemory( + m_device, static_cast( memory ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkFreeMemory( m_device, + static_cast( memory ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkFreeMemory( + m_device, static_cast( memory ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkFreeMemory( m_device, + static_cast( memory ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getAccelerationStructureBuildSizesKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo, + const uint32_t * pMaxPrimitiveCounts, + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetAccelerationStructureBuildSizesKHR( + m_device, + static_cast( buildType ), + reinterpret_cast( pBuildInfo ), + pMaxPrimitiveCounts, + reinterpret_cast( pSizeInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR + Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const AccelerationStructureBuildGeometryInfoKHR & buildInfo, + ArrayProxy const & maxPrimitiveCounts, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( maxPrimitiveCounts.size() == buildInfo.geometryCount ); +# else + if ( maxPrimitiveCounts.size() != buildInfo.geometryCount ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::Device::getAccelerationStructureBuildSizesKHR: maxPrimitiveCounts.size() != buildInfo.geometryCount" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo; + d.vkGetAccelerationStructureBuildSizesKHR( + m_device, + static_cast( buildType ), + reinterpret_cast( &buildInfo ), + maxPrimitiveCounts.data(), + reinterpret_cast( &sizeInfo ) ); + return sizeInfo; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetAccelerationStructureDeviceAddressKHR( + m_device, reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( + const AccelerationStructureDeviceAddressInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetAccelerationStructureDeviceAddressKHR( + m_device, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + size_t dataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetAccelerationStructureHandleNV( + m_device, static_cast( accelerationStructure ), dataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." ) + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type Device::getAccelerationStructureHandleNV( + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + ArrayProxy const & data, + Dispatch const & d ) const + { + Result result = static_cast( + d.vkGetAccelerationStructureHandleNV( m_device, + static_cast( accelerationStructure ), + data.size() * sizeof( T ), + reinterpret_cast( data.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + size_t dataSize, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 ); + std::vector data( dataSize / sizeof( T ) ); + Result result = static_cast( + d.vkGetAccelerationStructureHandleNV( m_device, + static_cast( accelerationStructure ), + data.size() * sizeof( T ), + reinterpret_cast( data.data() ) ) ); + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + Dispatch const & d ) const + { + T data; + Result result = static_cast( + d.vkGetAccelerationStructureHandleNV( m_device, + static_cast( accelerationStructure ), + sizeof( T ), + reinterpret_cast( &data ) ) ); + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getAccelerationStructureMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetAccelerationStructureMemoryRequirementsNV( + m_device, + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR + Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR memoryRequirements; + d.vkGetAccelerationStructureMemoryRequirementsNV( + m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR & memoryRequirements = + structureChain.template get(); + d.vkGetAccelerationStructureMemoryRequirementsNV( + m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAndroidHardwareBufferPropertiesANDROID( + const struct AHardwareBuffer * buffer, + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( + m_device, buffer, reinterpret_cast( pProperties ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties; + Result result = static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( + m_device, &buffer, reinterpret_cast( &properties ) ) ); + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & properties = + structureChain.template get(); + Result result = static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( + m_device, &buffer, reinterpret_cast( &properties ) ) ); + return createResultValue( + result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + template + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetBufferDeviceAddress( m_device, reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const BufferDeviceAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetBufferDeviceAddress( m_device, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( + const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const BufferDeviceAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( + const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( const BufferDeviceAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetBufferMemoryRequirements( + m_device, static_cast( buffer ), reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements + Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; + d.vkGetBufferMemoryRequirements( + m_device, static_cast( buffer ), reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetBufferMemoryRequirements2( m_device, + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetBufferMemoryRequirements2( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = + structureChain.template get(); + d.vkGetBufferMemoryRequirements2( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetBufferMemoryRequirements2KHR( m_device, + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetBufferMemoryRequirements2KHR( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = + structureChain.template get(); + d.vkGetBufferMemoryRequirements2KHR( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( + const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const BufferDeviceAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( + const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, + reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const BufferDeviceAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, + reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getCalibratedTimestampsEXT( uint32_t timestampCount, + const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT * pTimestampInfos, + uint64_t * pTimestamps, + uint64_t * pMaxDeviation, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetCalibratedTimestampsEXT( m_device, + timestampCount, + reinterpret_cast( pTimestampInfos ), + pTimestamps, + pMaxDeviation ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." ) + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type Device::getCalibratedTimestampsEXT( + ArrayProxy const & timestampInfos, + ArrayProxy const & timestamps, + Dispatch const & d ) const + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( timestampInfos.size() == timestamps.size() ); +# else + if ( timestampInfos.size() != timestamps.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::VkDevice::getCalibratedTimestampsEXT: timestampInfos.size() != timestamps.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + uint64_t maxDeviation; + Result result = static_cast( + d.vkGetCalibratedTimestampsEXT( m_device, + timestampInfos.size(), + reinterpret_cast( timestampInfos.data() ), + timestamps.data(), + &maxDeviation ) ); + return createResultValue( + result, maxDeviation, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType, uint64_t>>::type + Device::getCalibratedTimestampsEXT( + ArrayProxy const & timestampInfos, + Dispatch const & d ) const + { + std::pair, uint64_t> data( + std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) ); + std::vector & timestamps = data.first; + uint64_t & maxDeviation = data.second; + Result result = static_cast( + d.vkGetCalibratedTimestampsEXT( m_device, + timestampInfos.size(), + reinterpret_cast( timestampInfos.data() ), + timestamps.data(), + &maxDeviation ) ); + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType, uint64_t>>::type + Device::getCalibratedTimestampsEXT( + ArrayProxy const & timestampInfos, + Uint64_tAllocator & uint64_tAllocator, + Dispatch const & d ) const + { + std::pair, uint64_t> data( + std::piecewise_construct, + std::forward_as_tuple( timestampInfos.size(), uint64_tAllocator ), + std::forward_as_tuple( 0 ) ); + std::vector & timestamps = data.first; + uint64_t & maxDeviation = data.second; + Result result = static_cast( + d.vkGetCalibratedTimestampsEXT( m_device, + timestampInfos.size(), + reinterpret_cast( timestampInfos.data() ), + timestamps.data(), + &maxDeviation ) ); + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetDeferredOperationMaxConcurrencyKHR( m_device, static_cast( operation ) ); + } + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetDeferredOperationResultKHR( m_device, static_cast( operation ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const + { + Result result = static_cast( + d.vkGetDeferredOperationResultKHR( m_device, static_cast( operation ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::getDeferredOperationResultKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetDescriptorSetLayoutSupport( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pSupport ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport + Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; + d.vkGetDescriptorSetLayoutSupport( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( &support ) ); + return support; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = + structureChain.template get(); + d.vkGetDescriptorSetLayoutSupport( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( &support ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetDescriptorSetLayoutSupportKHR( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pSupport ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport + Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; + d.vkGetDescriptorSetLayoutSupportKHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( &support ) ); + return support; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = + structureChain.template get(); + d.vkGetDescriptorSetLayoutSupportKHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( &support ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getAccelerationStructureCompatibilityKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo, + VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetDeviceAccelerationStructureCompatibilityKHR( + m_device, + reinterpret_cast( pVersionInfo ), + reinterpret_cast( pCompatibility ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR + Device::getAccelerationStructureCompatibilityKHR( const AccelerationStructureVersionInfoKHR & versionInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility; + d.vkGetDeviceAccelerationStructureCompatibilityKHR( + m_device, + reinterpret_cast( &versionInfo ), + reinterpret_cast( &compatibility ) ); + return compatibility; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetDeviceGroupPeerMemoryFeatures( m_device, + heapIndex, + localDeviceIndex, + remoteDeviceIndex, + reinterpret_cast( pPeerMemoryFeatures ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags + Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; + d.vkGetDeviceGroupPeerMemoryFeatures( m_device, + heapIndex, + localDeviceIndex, + remoteDeviceIndex, + reinterpret_cast( &peerMemoryFeatures ) ); + return peerMemoryFeatures; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device, + heapIndex, + localDeviceIndex, + remoteDeviceIndex, + reinterpret_cast( pPeerMemoryFeatures ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags + Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; + d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device, + heapIndex, + localDeviceIndex, + remoteDeviceIndex, + reinterpret_cast( &peerMemoryFeatures ) ); + return peerMemoryFeatures; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR( + VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetDeviceGroupPresentCapabilitiesKHR( + m_device, reinterpret_cast( pDeviceGroupPresentCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::getGroupPresentCapabilitiesKHR( Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities; + Result result = static_cast( d.vkGetDeviceGroupPresentCapabilitiesKHR( + m_device, reinterpret_cast( &deviceGroupPresentCapabilities ) ) ); + return createResultValue( + result, deviceGroupPresentCapabilities, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetDeviceGroupSurfacePresentModes2EXT( + m_device, + reinterpret_cast( pSurfaceInfo ), + reinterpret_cast( pModes ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; + Result result = static_cast( d.vkGetDeviceGroupSurfacePresentModes2EXT( + m_device, + reinterpret_cast( &surfaceInfo ), + reinterpret_cast( &modes ) ) ); + return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, + static_cast( surface ), + reinterpret_cast( pModes ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; + Result result = static_cast( + d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, + static_cast( surface ), + reinterpret_cast( &modes ) ) ); + return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetDeviceMemoryCommitment( + m_device, static_cast( memory ), reinterpret_cast( pCommittedMemoryInBytes ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize + Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes; + d.vkGetDeviceMemoryCommitment( + m_device, static_cast( memory ), reinterpret_cast( &committedMemoryInBytes ) ); + return committedMemoryInBytes; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE uint64_t + Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetDeviceMemoryOpaqueCaptureAddress( + m_device, reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const DeviceMemoryOpaqueCaptureAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetDeviceMemoryOpaqueCaptureAddress( + m_device, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE uint64_t + Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( + m_device, reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( + const DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( + m_device, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char * pName, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetDeviceProcAddr( m_device, pName ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetDeviceProcAddr( m_device, name.c_str() ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getQueue( uint32_t queueFamilyIndex, + uint32_t queueIndex, + VULKAN_HPP_NAMESPACE::Queue * pQueue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast( pQueue ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue + Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Queue queue; + d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast( &queue ) ); + return queue; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo, + VULKAN_HPP_NAMESPACE::Queue * pQueue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetDeviceQueue2( + m_device, reinterpret_cast( pQueueInfo ), reinterpret_cast( pQueue ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue + Device::getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Queue queue; + d.vkGetDeviceQueue2( + m_device, reinterpret_cast( &queueInfo ), reinterpret_cast( &queue ) ); + return queue; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetEventStatus( m_device, static_cast( event ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d ) const + { + Result result = static_cast( d.vkGetEventStatus( m_device, static_cast( event ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::getEventStatus", + { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetFenceFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const & d ) const + { + int fd; + Result result = static_cast( + d.vkGetFenceFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); + return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetFenceStatus( m_device, static_cast( fence ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const + { + Result result = static_cast( d.vkGetFenceStatus( m_device, static_cast( fence ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceStatus", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetFenceWin32HandleKHR( + m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const + { + HANDLE handle; + Result result = static_cast( d.vkGetFenceWin32HandleKHR( + m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); + return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + VULKAN_HPP_INLINE void Device::getGeneratedCommandsMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetGeneratedCommandsMemoryRequirementsNV( + m_device, + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetGeneratedCommandsMemoryRequirementsNV( + m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + Device::getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = + structureChain.template get(); + d.vkGetGeneratedCommandsMemoryRequirementsNV( + m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT( + VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetImageDrmFormatModifierPropertiesEXT( + m_device, + static_cast( image ), + reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties; + Result result = static_cast( d.vkGetImageDrmFormatModifierPropertiesEXT( + m_device, + static_cast( image ), + reinterpret_cast( &properties ) ) ); + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageDrmFormatModifierPropertiesEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetImageMemoryRequirements( + m_device, static_cast( image ), reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements + Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; + d.vkGetImageMemoryRequirements( + m_device, static_cast( image ), reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetImageMemoryRequirements2( m_device, + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetImageMemoryRequirements2( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = + structureChain.template get(); + d.vkGetImageMemoryRequirements2( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetImageMemoryRequirements2KHR( m_device, + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetImageMemoryRequirements2KHR( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = + structureChain.template get(); + d.vkGetImageMemoryRequirements2KHR( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( + VULKAN_HPP_NAMESPACE::Image image, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetImageSparseMemoryRequirements( + m_device, + static_cast( image ), + pSparseMemoryRequirementCount, + reinterpret_cast( pSparseMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const + { + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements( + m_device, static_cast( image ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements( + m_device, + static_cast( image ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + return sparseMemoryRequirements; + } + + template < + typename SparseImageMemoryRequirementsAllocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements( + VULKAN_HPP_NAMESPACE::Image image, + SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator, + Dispatch const & d ) const + { + std::vector sparseMemoryRequirements( + sparseImageMemoryRequirementsAllocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements( + m_device, static_cast( image ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements( + m_device, + static_cast( image ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + return sparseMemoryRequirements; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2( + const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetImageSparseMemoryRequirements2( + m_device, + reinterpret_cast( pInfo ), + pSparseMemoryRequirementCount, + reinterpret_cast( pSparseMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, + Dispatch const & d ) const + { + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements2( + m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + return sparseMemoryRequirements; + } + + template < + typename SparseImageMemoryRequirements2Allocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements2( + const ImageSparseMemoryRequirementsInfo2 & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d ) const + { + std::vector sparseMemoryRequirements( + sparseImageMemoryRequirements2Allocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements2( + m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + return sparseMemoryRequirements; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2KHR( + const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetImageSparseMemoryRequirements2KHR( + m_device, + reinterpret_cast( pInfo ), + pSparseMemoryRequirementCount, + reinterpret_cast( pSparseMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, + Dispatch const & d ) const + { + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2KHR( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements2KHR( + m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + return sparseMemoryRequirements; + } + + template < + typename SparseImageMemoryRequirements2Allocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements2KHR( + const ImageSparseMemoryRequirementsInfo2 & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d ) const + { + std::vector sparseMemoryRequirements( + sparseImageMemoryRequirements2Allocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2KHR( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements2KHR( + m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + return sparseMemoryRequirements; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetImageSubresourceLayout( m_device, + static_cast( image ), + reinterpret_cast( pSubresource ), + reinterpret_cast( pLayout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout + Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, + const ImageSubresource & subresource, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::SubresourceLayout layout; + d.vkGetImageSubresourceLayout( m_device, + static_cast( image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + return layout; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetImageViewAddressNVX( m_device, + static_cast( imageView ), + reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties; + Result result = static_cast( + d.vkGetImageViewAddressNVX( m_device, + static_cast( imageView ), + reinterpret_cast( &properties ) ) ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewAddressNVX" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const ImageViewHandleInfoNVX & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryAndroidHardwareBufferANDROID( + const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo, + struct AHardwareBuffer ** pBuffer, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetMemoryAndroidHardwareBufferANDROID( + m_device, reinterpret_cast( pInfo ), pBuffer ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, + Dispatch const & d ) const + { + struct AHardwareBuffer * buffer; + Result result = static_cast( d.vkGetMemoryAndroidHardwareBufferANDROID( + m_device, reinterpret_cast( &info ), &buffer ) ); + return createResultValue( + result, buffer, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetMemoryFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const & d ) const + { + int fd; + Result result = static_cast( + d.vkGetMemoryFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); + return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + int fd, + VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetMemoryFdPropertiesKHR( m_device, + static_cast( handleType ), + fd, + reinterpret_cast( pMemoryFdProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + int fd, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties; + Result result = static_cast( + d.vkGetMemoryFdPropertiesKHR( m_device, + static_cast( handleType ), + fd, + reinterpret_cast( &memoryFdProperties ) ) ); + return createResultValue( + result, memoryFdProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryHostPointerPropertiesEXT( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetMemoryHostPointerPropertiesEXT( + m_device, + static_cast( handleType ), + pHostPointer, + reinterpret_cast( pMemoryHostPointerProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties; + Result result = static_cast( d.vkGetMemoryHostPointerPropertiesEXT( + m_device, + static_cast( handleType ), + pHostPointer, + reinterpret_cast( &memoryHostPointerProperties ) ) ); + return createResultValue( + result, memoryHostPointerProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetMemoryWin32HandleKHR( + m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const + { + HANDLE handle; + Result result = static_cast( d.vkGetMemoryWin32HandleKHR( + m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); + return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, + HANDLE * pHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetMemoryWin32HandleNV( m_device, + static_cast( memory ), + static_cast( handleType ), + pHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, + Dispatch const & d ) const + { + HANDLE handle; + Result result = + static_cast( d.vkGetMemoryWin32HandleNV( m_device, + static_cast( memory ), + static_cast( handleType ), + &handle ) ); + return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleNV" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandlePropertiesKHR( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetMemoryWin32HandlePropertiesKHR( + m_device, + static_cast( handleType ), + handle, + reinterpret_cast( pMemoryWin32HandleProperties ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties; + Result result = static_cast( d.vkGetMemoryWin32HandlePropertiesKHR( + m_device, + static_cast( handleType ), + handle, + reinterpret_cast( &memoryWin32HandleProperties ) ) ); + return createResultValue( + result, memoryWin32HandleProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryZirconHandleFUCHSIA( + const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetMemoryZirconHandleFUCHSIA( + m_device, reinterpret_cast( pGetZirconHandleInfo ), pZirconHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryZirconHandleFUCHSIA( const MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, + Dispatch const & d ) const + { + zx_handle_t zirconHandle; + Result result = static_cast( d.vkGetMemoryZirconHandleFUCHSIA( + m_device, reinterpret_cast( &getZirconHandleInfo ), &zirconHandle ) ); + return createResultValue( + result, zirconHandle, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryZirconHandlePropertiesFUCHSIA( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetMemoryZirconHandlePropertiesFUCHSIA( + m_device, + static_cast( handleType ), + zirconHandle, + reinterpret_cast( pMemoryZirconHandleProperties ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA memoryZirconHandleProperties; + Result result = static_cast( d.vkGetMemoryZirconHandlePropertiesFUCHSIA( + m_device, + static_cast( handleType ), + zirconHandle, + reinterpret_cast( &memoryZirconHandleProperties ) ) ); + return createResultValue( result, + memoryZirconHandleProperties, + VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint32_t * pPresentationTimingCount, + VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPastPresentationTimingGOOGLE( + m_device, + static_cast( swapchain ), + pPresentationTimingCount, + reinterpret_cast( pPresentationTimings ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + { + std::vector presentationTimings; + uint32_t presentationTimingCount; + Result result; + do + { + result = static_cast( d.vkGetPastPresentationTimingGOOGLE( + m_device, static_cast( swapchain ), &presentationTimingCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && presentationTimingCount ) + { + presentationTimings.resize( presentationTimingCount ); + result = static_cast( d.vkGetPastPresentationTimingGOOGLE( + m_device, + static_cast( swapchain ), + &presentationTimingCount, + reinterpret_cast( presentationTimings.data() ) ) ); + VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( presentationTimingCount < presentationTimings.size() ) ) + { + presentationTimings.resize( presentationTimingCount ); + } + return createResultValue( + result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); + } + + template < + typename PastPresentationTimingGOOGLEAllocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getPastPresentationTimingGOOGLE( + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator, + Dispatch const & d ) const + { + std::vector presentationTimings( + pastPresentationTimingGOOGLEAllocator ); + uint32_t presentationTimingCount; + Result result; + do + { + result = static_cast( d.vkGetPastPresentationTimingGOOGLE( + m_device, static_cast( swapchain ), &presentationTimingCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && presentationTimingCount ) + { + presentationTimings.resize( presentationTimingCount ); + result = static_cast( d.vkGetPastPresentationTimingGOOGLE( + m_device, + static_cast( swapchain ), + &presentationTimingCount, + reinterpret_cast( presentationTimings.data() ) ) ); + VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( presentationTimingCount < presentationTimings.size() ) ) + { + presentationTimings.resize( presentationTimingCount ); + } + return createResultValue( + result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, + VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetPerformanceParameterINTEL( m_device, + static_cast( parameter ), + reinterpret_cast( pValue ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value; + Result result = + static_cast( d.vkGetPerformanceParameterINTEL( m_device, + static_cast( parameter ), + reinterpret_cast( &value ) ) ); + return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + size_t * pDataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), pDataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const & d ) const + { + std::vector data; + size_t dataSize; + Result result; + do + { + result = static_cast( + d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, nullptr ) ); + if ( ( result == Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( d.vkGetPipelineCacheData( m_device, + static_cast( pipelineCache ), + &dataSize, + reinterpret_cast( data.data() ) ) ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( dataSize < data.size() ) ) + { + data.resize( dataSize ); + } + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d ) const + { + std::vector data( uint8_tAllocator ); + size_t dataSize; + Result result; + do + { + result = static_cast( + d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, nullptr ) ); + if ( ( result == Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( d.vkGetPipelineCacheData( m_device, + static_cast( pipelineCache ), + &dataSize, + reinterpret_cast( data.data() ) ) ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( dataSize < data.size() ) ) + { + data.resize( dataSize ); + } + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutableInternalRepresentationsKHR( + const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pInternalRepresentationCount, + VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( + m_device, + reinterpret_cast( pExecutableInfo ), + pInternalRepresentationCount, + reinterpret_cast( pInternalRepresentations ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, + Dispatch const & d ) const + { + std::vector + internalRepresentations; + uint32_t internalRepresentationCount; + Result result; + do + { + result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &internalRepresentationCount, + nullptr ) ); + if ( ( result == Result::eSuccess ) && internalRepresentationCount ) + { + internalRepresentations.resize( internalRepresentationCount ); + result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &internalRepresentationCount, + reinterpret_cast( internalRepresentations.data() ) ) ); + VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( internalRepresentationCount < internalRepresentations.size() ) ) + { + internalRepresentations.resize( internalRepresentationCount ); + } + return createResultValue( result, + internalRepresentations, + VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); + } + + template < + typename PipelineExecutableInternalRepresentationKHRAllocator, + typename Dispatch, + typename B, + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getPipelineExecutableInternalRepresentationsKHR( + const PipelineExecutableInfoKHR & executableInfo, + PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator, + Dispatch const & d ) const + { + std::vector + internalRepresentations( pipelineExecutableInternalRepresentationKHRAllocator ); + uint32_t internalRepresentationCount; + Result result; + do + { + result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &internalRepresentationCount, + nullptr ) ); + if ( ( result == Result::eSuccess ) && internalRepresentationCount ) + { + internalRepresentations.resize( internalRepresentationCount ); + result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &internalRepresentationCount, + reinterpret_cast( internalRepresentations.data() ) ) ); + VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( internalRepresentationCount < internalRepresentations.size() ) ) + { + internalRepresentations.resize( internalRepresentationCount ); + } + return createResultValue( result, + internalRepresentations, + VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo, + uint32_t * pExecutableCount, + VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetPipelineExecutablePropertiesKHR( m_device, + reinterpret_cast( pPipelineInfo ), + pExecutableCount, + reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::vector>::type + Device::getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Dispatch const & d ) const + { + std::vector properties; + uint32_t executableCount; + Result result; + do + { + result = static_cast( d.vkGetPipelineExecutablePropertiesKHR( + m_device, reinterpret_cast( &pipelineInfo ), &executableCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && executableCount ) + { + properties.resize( executableCount ); + result = static_cast( d.vkGetPipelineExecutablePropertiesKHR( + m_device, + reinterpret_cast( &pipelineInfo ), + &executableCount, + reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( executableCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( executableCount < properties.size() ) ) + { + properties.resize( executableCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); + } + + template < + typename PipelineExecutablePropertiesKHRAllocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::vector>::type + Device::getPipelineExecutablePropertiesKHR( + const PipelineInfoKHR & pipelineInfo, + PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator, + Dispatch const & d ) const + { + std::vector properties( + pipelineExecutablePropertiesKHRAllocator ); + uint32_t executableCount; + Result result; + do + { + result = static_cast( d.vkGetPipelineExecutablePropertiesKHR( + m_device, reinterpret_cast( &pipelineInfo ), &executableCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && executableCount ) + { + properties.resize( executableCount ); + result = static_cast( d.vkGetPipelineExecutablePropertiesKHR( + m_device, + reinterpret_cast( &pipelineInfo ), + &executableCount, + reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( executableCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( executableCount < properties.size() ) ) + { + properties.resize( executableCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pStatisticCount, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetPipelineExecutableStatisticsKHR( m_device, + reinterpret_cast( pExecutableInfo ), + pStatisticCount, + reinterpret_cast( pStatistics ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, + Dispatch const & d ) const + { + std::vector statistics; + uint32_t statisticCount; + Result result; + do + { + result = static_cast( d.vkGetPipelineExecutableStatisticsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &statisticCount, + nullptr ) ); + if ( ( result == Result::eSuccess ) && statisticCount ) + { + statistics.resize( statisticCount ); + result = static_cast( d.vkGetPipelineExecutableStatisticsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &statisticCount, + reinterpret_cast( statistics.data() ) ) ); + VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( statisticCount < statistics.size() ) ) + { + statistics.resize( statisticCount ); + } + return createResultValue( + result, statistics, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); + } + + template < + typename PipelineExecutableStatisticKHRAllocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getPipelineExecutableStatisticsKHR( + const PipelineExecutableInfoKHR & executableInfo, + PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator, + Dispatch const & d ) const + { + std::vector statistics( + pipelineExecutableStatisticKHRAllocator ); + uint32_t statisticCount; + Result result; + do + { + result = static_cast( d.vkGetPipelineExecutableStatisticsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &statisticCount, + nullptr ) ); + if ( ( result == Result::eSuccess ) && statisticCount ) + { + statistics.resize( statisticCount ); + result = static_cast( d.vkGetPipelineExecutableStatisticsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &statisticCount, + reinterpret_cast( statistics.data() ) ) ); + VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( statisticCount < statistics.size() ) ) + { + statistics.resize( statisticCount ); + } + return createResultValue( + result, statistics, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, + uint64_t * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPrivateDataEXT( m_device, + static_cast( objectType ), + objectHandle, + static_cast( privateDataSlot ), + pData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t + Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + uint64_t data; + d.vkGetPrivateDataEXT( m_device, + static_cast( objectType ), + objectHandle, + static_cast( privateDataSlot ), + &data ); + return data; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + void * pData, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetQueryPoolResults( m_device, + static_cast( queryPool ), + firstQuery, + queryCount, + dataSize, + pData, + static_cast( stride ), + static_cast( flags ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." ) + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + ArrayProxy const & data, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d ) const + { + Result result = static_cast( d.vkGetQueryPoolResults( m_device, + static_cast( queryPool ), + firstQuery, + queryCount, + data.size() * sizeof( T ), + reinterpret_cast( data.data() ), + static_cast( stride ), + static_cast( flags ) ) ); + return createResultValue( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults", { Result::eSuccess, Result::eNotReady } ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 ); + std::vector data( dataSize / sizeof( T ) ); + Result result = static_cast( d.vkGetQueryPoolResults( m_device, + static_cast( queryPool ), + firstQuery, + queryCount, + data.size() * sizeof( T ), + reinterpret_cast( data.data() ), + static_cast( stride ), + static_cast( flags ) ) ); + return createResultValue( result, + data, + VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d ) const + { + T data; + Result result = static_cast( d.vkGetQueryPoolResults( m_device, + static_cast( queryPool ), + firstQuery, + queryCount, + sizeof( T ), + reinterpret_cast( &data ), + static_cast( stride ), + static_cast( flags ) ) ); + return createResultValue( result, + data, + VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResult", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." ) + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + ArrayProxy const & data, + Dispatch const & d ) const + { + Result result = static_cast( + d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, + static_cast( pipeline ), + firstGroup, + groupCount, + data.size() * sizeof( T ), + reinterpret_cast( data.data() ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 ); + std::vector data( dataSize / sizeof( T ) ); + Result result = static_cast( + d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, + static_cast( pipeline ), + firstGroup, + groupCount, + data.size() * sizeof( T ), + reinterpret_cast( data.data() ) ) ); + return createResultValue( + result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getRayTracingCaptureReplayShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + Dispatch const & d ) const + { + T data; + Result result = + static_cast( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, + static_cast( pipeline ), + firstGroup, + groupCount, + sizeof( T ), + reinterpret_cast( &data ) ) ); + return createResultValue( + result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandleKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( + m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." ) + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type Device::getRayTracingShaderGroupHandlesKHR( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + ArrayProxy const & data, + Dispatch const & d ) const + { + Result result = + static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, + static_cast( pipeline ), + firstGroup, + groupCount, + data.size() * sizeof( T ), + reinterpret_cast( data.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 ); + std::vector data( dataSize / sizeof( T ) ); + Result result = + static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, + static_cast( pipeline ), + firstGroup, + groupCount, + data.size() * sizeof( T ), + reinterpret_cast( data.data() ) ) ); + return createResultValue( + result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + Dispatch const & d ) const + { + T data; + Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, + static_cast( pipeline ), + firstGroup, + groupCount, + sizeof( T ), + reinterpret_cast( &data ) ) ); + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetRayTracingShaderGroupHandlesNV( + m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." ) + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type Device::getRayTracingShaderGroupHandlesNV( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + ArrayProxy const & data, + Dispatch const & d ) const + { + Result result = + static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, + static_cast( pipeline ), + firstGroup, + groupCount, + data.size() * sizeof( T ), + reinterpret_cast( data.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 ); + std::vector data( dataSize / sizeof( T ) ); + Result result = + static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, + static_cast( pipeline ), + firstGroup, + groupCount, + data.size() * sizeof( T ), + reinterpret_cast( data.data() ) ) ); + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + Dispatch const & d ) const + { + T data; + Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, + static_cast( pipeline ), + firstGroup, + groupCount, + sizeof( T ), + reinterpret_cast( &data ) ) ); + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE DeviceSize + Device::getRayTracingShaderGroupStackSizeKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t group, + VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetRayTracingShaderGroupStackSizeKHR( + m_device, static_cast( pipeline ), group, static_cast( groupShader ) ) ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetRefreshCycleDurationGOOGLE( + m_device, + static_cast( swapchain ), + reinterpret_cast( pDisplayTimingProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties; + Result result = static_cast( d.vkGetRefreshCycleDurationGOOGLE( + m_device, + static_cast( swapchain ), + reinterpret_cast( &displayTimingProperties ) ) ); + return createResultValue( + result, displayTimingProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getRefreshCycleDurationGOOGLE" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetRenderAreaGranularity( + m_device, static_cast( renderPass ), reinterpret_cast( pGranularity ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D + Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Extent2D granularity; + d.vkGetRenderAreaGranularity( + m_device, static_cast( renderPass ), reinterpret_cast( &granularity ) ); + return granularity; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValue( + VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t * pValue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetSemaphoreCounterValue( m_device, static_cast( semaphore ), pValue ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const + { + uint64_t value; + Result result = + static_cast( d.vkGetSemaphoreCounterValue( m_device, static_cast( semaphore ), &value ) ); + return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValue" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValueKHR( + VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t * pValue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetSemaphoreCounterValueKHR( m_device, static_cast( semaphore ), pValue ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const + { + uint64_t value; + Result result = + static_cast( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast( semaphore ), &value ) ); + return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValueKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const & d ) const + { + int fd; + Result result = static_cast( + d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); + return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetSemaphoreWin32HandleKHR( + m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, + Dispatch const & d ) const + { + HANDLE handle; + Result result = static_cast( d.vkGetSemaphoreWin32HandleKHR( + m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); + return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreZirconHandleFUCHSIA( + const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetSemaphoreZirconHandleFUCHSIA( + m_device, + reinterpret_cast( pGetZirconHandleInfo ), + pZirconHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSemaphoreZirconHandleFUCHSIA( const SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, + Dispatch const & d ) const + { + zx_handle_t zirconHandle; + Result result = static_cast( d.vkGetSemaphoreZirconHandleFUCHSIA( + m_device, + reinterpret_cast( &getZirconHandleInfo ), + &zirconHandle ) ); + return createResultValue( + result, zirconHandle, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + size_t * pInfoSize, + void * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetShaderInfoAMD( m_device, + static_cast( pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + pInfoSize, + pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + Dispatch const & d ) const + { + std::vector info; + size_t infoSize; + Result result; + do + { + result = static_cast( d.vkGetShaderInfoAMD( m_device, + static_cast( pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + &infoSize, + nullptr ) ); + if ( ( result == Result::eSuccess ) && infoSize ) + { + info.resize( infoSize ); + result = static_cast( d.vkGetShaderInfoAMD( m_device, + static_cast( pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + &infoSize, + reinterpret_cast( info.data() ) ) ); + VULKAN_HPP_ASSERT( infoSize <= info.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( infoSize < info.size() ) ) + { + info.resize( infoSize ); + } + return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d ) const + { + std::vector info( uint8_tAllocator ); + size_t infoSize; + Result result; + do + { + result = static_cast( d.vkGetShaderInfoAMD( m_device, + static_cast( pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + &infoSize, + nullptr ) ); + if ( ( result == Result::eSuccess ) && infoSize ) + { + info.resize( infoSize ); + result = static_cast( d.vkGetShaderInfoAMD( m_device, + static_cast( pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + &infoSize, + reinterpret_cast( info.data() ) ) ); + VULKAN_HPP_ASSERT( infoSize <= info.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( infoSize < info.size() ) ) + { + info.resize( infoSize ); + } + return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, + uint64_t * pCounterValue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetSwapchainCounterEXT( m_device, + static_cast( swapchain ), + static_cast( counter ), + pCounterValue ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, + Dispatch const & d ) const + { + uint64_t counterValue; + Result result = + static_cast( d.vkGetSwapchainCounterEXT( m_device, + static_cast( swapchain ), + static_cast( counter ), + &counterValue ) ); + return createResultValue( result, counterValue, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainCounterEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint32_t * pSwapchainImageCount, + VULKAN_HPP_NAMESPACE::Image * pSwapchainImages, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetSwapchainImagesKHR( m_device, + static_cast( swapchain ), + pSwapchainImageCount, + reinterpret_cast( pSwapchainImages ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + { + std::vector swapchainImages; + uint32_t swapchainImageCount; + Result result; + do + { + result = static_cast( d.vkGetSwapchainImagesKHR( + m_device, static_cast( swapchain ), &swapchainImageCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && swapchainImageCount ) + { + swapchainImages.resize( swapchainImageCount ); + result = + static_cast( d.vkGetSwapchainImagesKHR( m_device, + static_cast( swapchain ), + &swapchainImageCount, + reinterpret_cast( swapchainImages.data() ) ) ); + VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( swapchainImageCount < swapchainImages.size() ) ) + { + swapchainImages.resize( swapchainImageCount ); + } + return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + ImageAllocator & imageAllocator, + Dispatch const & d ) const + { + std::vector swapchainImages( imageAllocator ); + uint32_t swapchainImageCount; + Result result; + do + { + result = static_cast( d.vkGetSwapchainImagesKHR( + m_device, static_cast( swapchain ), &swapchainImageCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && swapchainImageCount ) + { + swapchainImages.resize( swapchainImageCount ); + result = + static_cast( d.vkGetSwapchainImagesKHR( m_device, + static_cast( swapchain ), + &swapchainImageCount, + reinterpret_cast( swapchainImages.data() ) ) ); + VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( swapchainImageCount < swapchainImages.size() ) ) + { + swapchainImages.resize( swapchainImageCount ); + } + return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetSwapchainStatusKHR( m_device, static_cast( swapchain ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + { + Result result = + static_cast( d.vkGetSwapchainStatusKHR( m_device, static_cast( swapchain ) ) ); + return createResultValue( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainStatusKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + size_t * pDataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetValidationCacheDataEXT( + m_device, static_cast( validationCache ), pDataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Dispatch const & d ) const + { + std::vector data; + size_t dataSize; + Result result; + do + { + result = static_cast( d.vkGetValidationCacheDataEXT( + m_device, static_cast( validationCache ), &dataSize, nullptr ) ); + if ( ( result == Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = + static_cast( d.vkGetValidationCacheDataEXT( m_device, + static_cast( validationCache ), + &dataSize, + reinterpret_cast( data.data() ) ) ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( dataSize < data.size() ) ) + { + data.resize( dataSize ); + } + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d ) const + { + std::vector data( uint8_tAllocator ); + size_t dataSize; + Result result; + do + { + result = static_cast( d.vkGetValidationCacheDataEXT( + m_device, static_cast( validationCache ), &dataSize, nullptr ) ); + if ( ( result == Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = + static_cast( d.vkGetValidationCacheDataEXT( m_device, + static_cast( validationCache ), + &dataSize, + reinterpret_cast( data.data() ) ) ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( dataSize < data.size() ) ) + { + data.resize( dataSize ); + } + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getVideoSessionMemoryRequirementsKHR( + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + uint32_t * pVideoSessionMemoryRequirementsCount, + VULKAN_HPP_NAMESPACE::VideoGetMemoryPropertiesKHR * pVideoSessionMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetVideoSessionMemoryRequirementsKHR( + m_device, + static_cast( videoSession ), + pVideoSessionMemoryRequirementsCount, + reinterpret_cast( pVideoSessionMemoryRequirements ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + Dispatch const & d ) const + { + std::vector videoSessionMemoryRequirements; + uint32_t videoSessionMemoryRequirementsCount; + Result result; + do + { + result = static_cast( d.vkGetVideoSessionMemoryRequirementsKHR( + m_device, static_cast( videoSession ), &videoSessionMemoryRequirementsCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && videoSessionMemoryRequirementsCount ) + { + videoSessionMemoryRequirements.resize( videoSessionMemoryRequirementsCount ); + result = static_cast( d.vkGetVideoSessionMemoryRequirementsKHR( + m_device, + static_cast( videoSession ), + &videoSessionMemoryRequirementsCount, + reinterpret_cast( videoSessionMemoryRequirements.data() ) ) ); + VULKAN_HPP_ASSERT( videoSessionMemoryRequirementsCount <= videoSessionMemoryRequirements.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && + ( videoSessionMemoryRequirementsCount < videoSessionMemoryRequirements.size() ) ) + { + videoSessionMemoryRequirements.resize( videoSessionMemoryRequirementsCount ); + } + return createResultValue( result, + videoSessionMemoryRequirements, + VULKAN_HPP_NAMESPACE_STRING "::Device::getVideoSessionMemoryRequirementsKHR" ); + } + + template < + typename VideoGetMemoryPropertiesKHRAllocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getVideoSessionMemoryRequirementsKHR( + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + VideoGetMemoryPropertiesKHRAllocator & videoGetMemoryPropertiesKHRAllocator, + Dispatch const & d ) const + { + std::vector videoSessionMemoryRequirements( + videoGetMemoryPropertiesKHRAllocator ); + uint32_t videoSessionMemoryRequirementsCount; + Result result; + do + { + result = static_cast( d.vkGetVideoSessionMemoryRequirementsKHR( + m_device, static_cast( videoSession ), &videoSessionMemoryRequirementsCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && videoSessionMemoryRequirementsCount ) + { + videoSessionMemoryRequirements.resize( videoSessionMemoryRequirementsCount ); + result = static_cast( d.vkGetVideoSessionMemoryRequirementsKHR( + m_device, + static_cast( videoSession ), + &videoSessionMemoryRequirementsCount, + reinterpret_cast( videoSessionMemoryRequirements.data() ) ) ); + VULKAN_HPP_ASSERT( videoSessionMemoryRequirementsCount <= videoSessionMemoryRequirements.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && + ( videoSessionMemoryRequirementsCount < videoSessionMemoryRequirements.size() ) ) + { + videoSessionMemoryRequirements.resize( videoSessionMemoryRequirementsCount ); + } + return createResultValue( result, + videoSessionMemoryRequirements, + VULKAN_HPP_NAMESPACE_STRING "::Device::getVideoSessionMemoryRequirementsKHR" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkImportFenceFdKHR( m_device, reinterpret_cast( pImportFenceFdInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const & d ) const + { + Result result = static_cast( + d.vkImportFenceFdKHR( m_device, reinterpret_cast( &importFenceFdInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkImportFenceWin32HandleKHR( + m_device, reinterpret_cast( pImportFenceWin32HandleInfo ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, + Dispatch const & d ) const + { + Result result = static_cast( d.vkImportFenceWin32HandleKHR( + m_device, reinterpret_cast( &importFenceWin32HandleInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkImportSemaphoreFdKHR( + m_device, reinterpret_cast( pImportSemaphoreFdInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const & d ) const + { + Result result = static_cast( d.vkImportSemaphoreFdKHR( + m_device, reinterpret_cast( &importSemaphoreFdInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkImportSemaphoreWin32HandleKHR( + m_device, reinterpret_cast( pImportSemaphoreWin32HandleInfo ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, + Dispatch const & d ) const + { + Result result = static_cast( d.vkImportSemaphoreWin32HandleKHR( + m_device, reinterpret_cast( &importSemaphoreWin32HandleInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreZirconHandleFUCHSIA( + const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkImportSemaphoreZirconHandleFUCHSIA( + m_device, + reinterpret_cast( pImportSemaphoreZirconHandleInfo ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::importSemaphoreZirconHandleFUCHSIA( + const ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo, Dispatch const & d ) const + { + Result result = static_cast( d.vkImportSemaphoreZirconHandleFUCHSIA( + m_device, + reinterpret_cast( &importSemaphoreZirconHandleInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL( + const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkInitializePerformanceApiINTEL( + m_device, reinterpret_cast( pInitializeInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, + Dispatch const & d ) const + { + Result result = static_cast( d.vkInitializePerformanceApiINTEL( + m_device, reinterpret_cast( &initializeInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount, + const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkInvalidateMappedMemoryRanges( + m_device, memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::invalidateMappedMemoryRanges( + ArrayProxy const & memoryRanges, Dispatch const & d ) const + { + Result result = static_cast( d.vkInvalidateMappedMemoryRanges( + m_device, memoryRanges.size(), reinterpret_cast( memoryRanges.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, + void ** ppData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkMapMemory( m_device, + static_cast( memory ), + static_cast( offset ), + static_cast( size ), + static_cast( flags ), + ppData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, + Dispatch const & d ) const + { + void * pData; + Result result = static_cast( d.vkMapMemory( m_device, + static_cast( memory ), + static_cast( offset ), + static_cast( size ), + static_cast( flags ), + &pData ) ); + return createResultValue( result, pData, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, + uint32_t srcCacheCount, + const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkMergePipelineCaches( m_device, + static_cast( dstCache ), + srcCacheCount, + reinterpret_cast( pSrcCaches ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, + ArrayProxy const & srcCaches, + Dispatch const & d ) const + { + Result result = + static_cast( d.vkMergePipelineCaches( m_device, + static_cast( dstCache ), + srcCaches.size(), + reinterpret_cast( srcCaches.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergePipelineCaches" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, + uint32_t srcCacheCount, + const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkMergeValidationCachesEXT( m_device, + static_cast( dstCache ), + srcCacheCount, + reinterpret_cast( pSrcCaches ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, + ArrayProxy const & srcCaches, + Dispatch const & d ) const + { + Result result = static_cast( + d.vkMergeValidationCachesEXT( m_device, + static_cast( dstCache ), + srcCaches.size(), + reinterpret_cast( srcCaches.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergeValidationCachesEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkRegisterDeviceEventEXT( m_device, + reinterpret_cast( pDeviceEventInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pFence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Fence fence; + Result result = static_cast( + d.vkRegisterDeviceEventEXT( m_device, + reinterpret_cast( &deviceEventInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXT" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::registerEventEXTUnique( const DeviceEventInfoEXT & deviceEventInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Fence fence; + Result result = static_cast( + d.vkRegisterDeviceEventEXT( m_device, + reinterpret_cast( &deviceEventInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXTUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkRegisterDisplayEventEXT( m_device, + static_cast( display ), + reinterpret_cast( pDisplayEventInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pFence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const DisplayEventInfoEXT & displayEventInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Fence fence; + Result result = static_cast( + d.vkRegisterDisplayEventEXT( m_device, + static_cast( display ), + reinterpret_cast( &displayEventInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXT" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const DisplayEventInfoEXT & displayEventInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Fence fence; + Result result = static_cast( + d.vkRegisterDisplayEventEXT( m_device, + static_cast( display ), + reinterpret_cast( &displayEventInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXTUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT( + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); + } +# else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + { + Result result = static_cast( + d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseFullScreenExclusiveModeEXT" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL( + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkReleasePerformanceConfigurationINTEL( + m_device, static_cast( configuration ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d ) const + { + Result result = static_cast( d.vkReleasePerformanceConfigurationINTEL( + m_device, static_cast( configuration ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releasePerformanceConfigurationINTEL" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::release( + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkReleasePerformanceConfigurationINTEL( + m_device, static_cast( configuration ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const + { + Result result = static_cast( d.vkReleasePerformanceConfigurationINTEL( + m_device, static_cast( configuration ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::release" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkReleaseProfilingLockKHR( m_device ); + } + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkResetCommandPool( + m_device, static_cast( commandPool ), static_cast( flags ) ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, + Dispatch const & d ) const + { + Result result = static_cast( d.vkResetCommandPool( + m_device, static_cast( commandPool ), static_cast( flags ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetCommandPool" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkResetDescriptorPool( + m_device, static_cast( descriptorPool ), static_cast( flags ) ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, + Dispatch const & d ) const + { + Result result = static_cast( d.vkResetDescriptorPool( + m_device, static_cast( descriptorPool ), static_cast( flags ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetDescriptorPool" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkResetEvent( m_device, static_cast( event ) ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d ) const + { + Result result = static_cast( d.vkResetEvent( m_device, static_cast( event ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetEvent" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount, + const VULKAN_HPP_NAMESPACE::Fence * pFences, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkResetFences( m_device, fenceCount, reinterpret_cast( pFences ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::resetFences( ArrayProxy const & fences, Dispatch const & d ) const + { + Result result = static_cast( + d.vkResetFences( m_device, fences.size(), reinterpret_cast( fences.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkResetQueryPool( m_device, static_cast( queryPool ), firstQuery, queryCount ); + } + + template + VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkResetQueryPoolEXT( m_device, static_cast( queryPool ), firstQuery, queryCount ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( + const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkSetDebugUtilsObjectNameEXT( + m_device, reinterpret_cast( pNameInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const & d ) const + { + Result result = static_cast( d.vkSetDebugUtilsObjectNameEXT( + m_device, reinterpret_cast( &nameInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( + const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast( pTagInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const & d ) const + { + Result result = static_cast( + d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkSetEvent( m_device, static_cast( event ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const + { + Result result = static_cast( d.vkSetEvent( m_device, static_cast( event ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setEvent" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount, + const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, + const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkSetHdrMetadataEXT( m_device, + swapchainCount, + reinterpret_cast( pSwapchains ), + reinterpret_cast( pMetadata ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::setHdrMetadataEXT( ArrayProxy const & swapchains, + ArrayProxy const & metadata, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() ); +# else + if ( swapchains.size() != metadata.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkSetHdrMetadataEXT( m_device, + swapchains.size(), + reinterpret_cast( swapchains.data() ), + reinterpret_cast( metadata.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, + VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkSetLocalDimmingAMD( + m_device, static_cast( swapChain ), static_cast( localDimmingEnable ) ); + } + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, + uint64_t data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkSetPrivateDataEXT( m_device, + static_cast( objectType ), + objectHandle, + static_cast( privateDataSlot ), + data ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, + uint64_t data, + Dispatch const & d ) const + { + Result result = static_cast( d.vkSetPrivateDataEXT( m_device, + static_cast( objectType ), + objectHandle, + static_cast( privateDataSlot ), + data ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphore( + const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkSignalSemaphore( m_device, reinterpret_cast( pSignalInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::signalSemaphore( const SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const + { + Result result = static_cast( + d.vkSignalSemaphore( m_device, reinterpret_cast( &signalInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphoreKHR( + const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkSignalSemaphoreKHR( m_device, reinterpret_cast( pSignalInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::signalSemaphoreKHR( const SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const + { + Result result = static_cast( + d.vkSignalSemaphoreKHR( m_device, reinterpret_cast( &signalInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkTrimCommandPool( + m_device, static_cast( commandPool ), static_cast( flags ) ); + } + + template + VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkTrimCommandPoolKHR( + m_device, static_cast( commandPool ), static_cast( flags ) ); + } + + template + VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkUninitializePerformanceApiINTEL( m_device ); + } + + template + VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkUnmapMemory( m_device, static_cast( memory ) ); + } + + template + VULKAN_HPP_INLINE void + Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkUpdateDescriptorSetWithTemplate( m_device, + static_cast( descriptorSet ), + static_cast( descriptorUpdateTemplate ), + pData ); + } + + template + VULKAN_HPP_INLINE void + Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkUpdateDescriptorSetWithTemplateKHR( m_device, + static_cast( descriptorSet ), + static_cast( descriptorUpdateTemplate ), + pData ); + } + + template + VULKAN_HPP_INLINE void + Device::updateDescriptorSets( uint32_t descriptorWriteCount, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, + uint32_t descriptorCopyCount, + const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkUpdateDescriptorSets( m_device, + descriptorWriteCount, + reinterpret_cast( pDescriptorWrites ), + descriptorCopyCount, + reinterpret_cast( pDescriptorCopies ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::updateDescriptorSets( ArrayProxy const & descriptorWrites, + ArrayProxy const & descriptorCopies, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkUpdateDescriptorSets( m_device, + descriptorWrites.size(), + reinterpret_cast( descriptorWrites.data() ), + descriptorCopies.size(), + reinterpret_cast( descriptorCopies.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::updateVideoSessionParametersKHR( + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkUpdateVideoSessionParametersKHR( + m_device, + static_cast( videoSessionParameters ), + reinterpret_cast( pUpdateInfo ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VideoSessionParametersUpdateInfoKHR & updateInfo, + Dispatch const & d ) const + { + Result result = static_cast( d.vkUpdateVideoSessionParametersKHR( + m_device, + static_cast( videoSessionParameters ), + reinterpret_cast( &updateInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::updateVideoSessionParametersKHR" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount, + const VULKAN_HPP_NAMESPACE::Fence * pFences, + VULKAN_HPP_NAMESPACE::Bool32 waitAll, + uint64_t timeout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkWaitForFences( + m_device, fenceCount, reinterpret_cast( pFences ), static_cast( waitAll ), timeout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::waitForFences( ArrayProxy const & fences, + VULKAN_HPP_NAMESPACE::Bool32 waitAll, + uint64_t timeout, + Dispatch const & d ) const + { + Result result = static_cast( d.vkWaitForFences( m_device, + fences.size(), + reinterpret_cast( fences.data() ), + static_cast( waitAll ), + timeout ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, + uint64_t timeout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkWaitSemaphores( m_device, reinterpret_cast( pWaitInfo ), timeout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphores( const SemaphoreWaitInfo & waitInfo, + uint64_t timeout, + Dispatch const & d ) const + { + Result result = static_cast( + d.vkWaitSemaphores( m_device, reinterpret_cast( &waitInfo ), timeout ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, + uint64_t timeout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkWaitSemaphoresKHR( m_device, reinterpret_cast( pWaitInfo ), timeout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const SemaphoreWaitInfo & waitInfo, + uint64_t timeout, + Dispatch const & d ) const + { + Result result = static_cast( + d.vkWaitSemaphoresKHR( m_device, reinterpret_cast( &waitInfo ), timeout ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::writeAccelerationStructuresPropertiesKHR( + uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + void * pData, + size_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkWriteAccelerationStructuresPropertiesKHR( + m_device, + accelerationStructureCount, + reinterpret_cast( pAccelerationStructures ), + static_cast( queryType ), + dataSize, + pData, + stride ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." ) + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type Device::writeAccelerationStructuresPropertiesKHR( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + ArrayProxy const & data, + size_t stride, + Dispatch const & d ) const + { + Result result = static_cast( d.vkWriteAccelerationStructuresPropertiesKHR( + m_device, + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + data.size() * sizeof( T ), + reinterpret_cast( data.data() ), + stride ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type + Device::writeAccelerationStructuresPropertiesKHR( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + size_t stride, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 ); + std::vector data( dataSize / sizeof( T ) ); + Result result = static_cast( d.vkWriteAccelerationStructuresPropertiesKHR( + m_device, + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + data.size() * sizeof( T ), + reinterpret_cast( data.data() ), + stride ) ); + return createResultValue( + result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::writeAccelerationStructuresPropertyKHR( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t stride, + Dispatch const & d ) const + { + T data; + Result result = static_cast( d.vkWriteAccelerationStructuresPropertiesKHR( + m_device, + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + sizeof( T ), + reinterpret_cast( &data ), + stride ) ); + return createResultValue( + result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateAndroidSurfaceKHR( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateAndroidSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateAndroidSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateDebugReportCallbackEXT( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pCallback ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback; + Result result = static_cast( + d.vkCreateDebugReportCallbackEXT( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &callback ) ) ); + return createResultValue( + result, callback, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXT" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback; + Result result = static_cast( + d.vkCreateDebugReportCallbackEXT( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &callback ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, callback, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXTUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateDebugUtilsMessengerEXT( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pMessenger ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger; + Result result = static_cast( + d.vkCreateDebugUtilsMessengerEXT( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &messenger ) ) ); + return createResultValue( + result, messenger, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXT" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger; + Result result = static_cast( + d.vkCreateDebugUtilsMessengerEXT( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &messenger ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, messenger, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXTUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateDirectFBSurfaceEXT( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createDirectFBSurfaceEXT( const DirectFBSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateDirectFBSurfaceEXT( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXT" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createDirectFBSurfaceEXTUnique( const DirectFBSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateDirectFBSurfaceEXT( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXTUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateDisplayPlaneSurfaceKHR( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateDisplayPlaneSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateDisplayPlaneSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateHeadlessSurfaceEXT( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateHeadlessSurfaceEXT( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXT" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateHeadlessSurfaceEXT( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXTUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateIOSSurfaceMVK( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateIOSSurfaceMVK( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVK" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateIOSSurfaceMVK( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVKUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createImagePipeSurfaceFUCHSIA( + const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIA" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIAUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateMacOSSurfaceMVK( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateMacOSSurfaceMVK( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVK" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateMacOSSurfaceMVK( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVKUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateMetalSurfaceEXT( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateMetalSurfaceEXT( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXT" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createMetalSurfaceEXTUnique( const MetalSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateMetalSurfaceEXT( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXTUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateScreenSurfaceQNX( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createScreenSurfaceQNX( const ScreenSurfaceCreateInfoQNX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateScreenSurfaceQNX( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNX" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createScreenSurfaceQNXUnique( const ScreenSurfaceCreateInfoQNX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateScreenSurfaceQNX( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNXUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + +#if defined( VK_USE_PLATFORM_GGP ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createStreamDescriptorSurfaceGGP( + const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateStreamDescriptorSurfaceGGP( + m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateStreamDescriptorSurfaceGGP( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGP" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createStreamDescriptorSurfaceGGPUnique( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateStreamDescriptorSurfaceGGP( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGPUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_GGP*/ + +#if defined( VK_USE_PLATFORM_VI_NN ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateViSurfaceNN( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateViSurfaceNN( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNN" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateViSurfaceNN( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNNUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_VI_NN*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateWaylandSurfaceKHR( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateWaylandSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateWaylandSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateWin32SurfaceKHR( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateWin32SurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateWin32SurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateXcbSurfaceKHR( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateXcbSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateXcbSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateXlibSurfaceKHR( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateXlibSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateXlibSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + + template + VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char * pLayerPrefix, + const char * pMessage, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDebugReportMessageEXT( m_instance, + static_cast( flags ), + static_cast( objectType ), + object, + location, + messageCode, + pLayerPrefix, + pMessage ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const std::string & layerPrefix, + const std::string & message, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDebugReportMessageEXT( m_instance, + static_cast( flags ), + static_cast( objectType ), + object, + location, + messageCode, + layerPrefix.c_str(), + message.c_str() ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDebugReportCallbackEXT( m_instance, + static_cast( callback ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDebugReportCallbackEXT( + m_instance, + static_cast( callback ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDebugReportCallbackEXT( m_instance, + static_cast( callback ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDebugReportCallbackEXT( + m_instance, + static_cast( callback ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDebugUtilsMessengerEXT( m_instance, + static_cast( messenger ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDebugUtilsMessengerEXT( + m_instance, + static_cast( messenger ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDebugUtilsMessengerEXT( m_instance, + static_cast( messenger ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDebugUtilsMessengerEXT( + m_instance, + static_cast( messenger ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyInstance( m_instance, reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroy( Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyInstance( m_instance, + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySurfaceKHR( + m_instance, static_cast( surface ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySurfaceKHR( m_instance, + static_cast( surface ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySurfaceKHR( + m_instance, static_cast( surface ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySurfaceKHR( m_instance, + static_cast( surface ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroups( + uint32_t * pPhysicalDeviceGroupCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkEnumeratePhysicalDeviceGroups( + m_instance, + pPhysicalDeviceGroupCount, + reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::enumeratePhysicalDeviceGroups( Dispatch const & d ) const + { + std::vector physicalDeviceGroupProperties; + uint32_t physicalDeviceGroupCount; + Result result; + do + { + result = + static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceGroups( + m_instance, + &physicalDeviceGroupCount, + reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + return createResultValue( + result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); + } + + template < + typename PhysicalDeviceGroupPropertiesAllocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::enumeratePhysicalDeviceGroups( + PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const + { + std::vector physicalDeviceGroupProperties( + physicalDeviceGroupPropertiesAllocator ); + uint32_t physicalDeviceGroupCount; + Result result; + do + { + result = + static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceGroups( + m_instance, + &physicalDeviceGroupCount, + reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + return createResultValue( + result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroupsKHR( + uint32_t * pPhysicalDeviceGroupCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( + m_instance, + pPhysicalDeviceGroupCount, + reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::enumeratePhysicalDeviceGroupsKHR( Dispatch const & d ) const + { + std::vector physicalDeviceGroupProperties; + uint32_t physicalDeviceGroupCount; + Result result; + do + { + result = + static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( + m_instance, + &physicalDeviceGroupCount, + reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + return createResultValue( result, + physicalDeviceGroupProperties, + VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); + } + + template < + typename PhysicalDeviceGroupPropertiesAllocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::enumeratePhysicalDeviceGroupsKHR( + PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const + { + std::vector physicalDeviceGroupProperties( + physicalDeviceGroupPropertiesAllocator ); + uint32_t physicalDeviceGroupCount; + Result result; + do + { + result = + static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( + m_instance, + &physicalDeviceGroupCount, + reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + return createResultValue( result, + physicalDeviceGroupProperties, + VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::enumeratePhysicalDevices( uint32_t * pPhysicalDeviceCount, + VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkEnumeratePhysicalDevices( + m_instance, pPhysicalDeviceCount, reinterpret_cast( pPhysicalDevices ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::enumeratePhysicalDevices( Dispatch const & d ) const + { + std::vector physicalDevices; + uint32_t physicalDeviceCount; + Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceCount ) + { + physicalDevices.resize( physicalDeviceCount ); + result = static_cast( d.vkEnumeratePhysicalDevices( + m_instance, &physicalDeviceCount, reinterpret_cast( physicalDevices.data() ) ) ); + VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( physicalDeviceCount < physicalDevices.size() ) ) + { + physicalDevices.resize( physicalDeviceCount ); + } + return createResultValue( + result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator, Dispatch const & d ) const + { + std::vector physicalDevices( physicalDeviceAllocator ); + uint32_t physicalDeviceCount; + Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceCount ) + { + physicalDevices.resize( physicalDeviceCount ); + result = static_cast( d.vkEnumeratePhysicalDevices( + m_instance, &physicalDeviceCount, reinterpret_cast( physicalDevices.data() ) ) ); + VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( physicalDeviceCount < physicalDevices.size() ) ) + { + physicalDevices.resize( physicalDeviceCount ); + } + return createResultValue( + result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char * pName, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetInstanceProcAddr( m_instance, pName ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetInstanceProcAddr( m_instance, name.c_str() ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( + VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, + const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkSubmitDebugUtilsMessageEXT( m_instance, + static_cast( messageSeverity ), + static_cast( messageTypes ), + reinterpret_cast( pCallbackData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, + const DebugUtilsMessengerCallbackDataEXT & callbackData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkSubmitDebugUtilsMessageEXT( m_instance, + static_cast( messageSeverity ), + static_cast( messageTypes ), + reinterpret_cast( &callbackData ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireWinrtDisplayNV( + VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast( display ) ) ); + } +# else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const + { + Result result = + static_cast( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast( display ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireWinrtDisplayNV" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( + Display * dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast( display ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::acquireXlibDisplayEXT( Display & dpy, + VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d ) const + { + Result result = + static_cast( d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast( display ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Device * pDevice, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateDevice( m_physicalDevice, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pDevice ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::createDevice( const DeviceCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Device device; + Result result = static_cast( + d.vkCreateDevice( m_physicalDevice, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &device ) ) ); + return createResultValue( result, device, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDevice" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::createDeviceUnique( const DeviceCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Device device; + Result result = static_cast( + d.vkCreateDevice( m_physicalDevice, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &device ) ) ); + ObjectDestroy deleter( allocator, d ); + return createResultValue( + result, device, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDeviceUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateDisplayModeKHR( m_physicalDevice, + static_cast( display ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pMode ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const DisplayModeCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DisplayModeKHR mode; + Result result = static_cast( + d.vkCreateDisplayModeKHR( m_physicalDevice, + static_cast( display ), + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &mode ) ) ); + return createResultValue( result, mode, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const DisplayModeCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DisplayModeKHR mode; + Result result = static_cast( + d.vkCreateDisplayModeKHR( m_physicalDevice, + static_cast( display ), + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &mode ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, mode, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::enumerateDeviceExtensionProperties( const char * pLayerName, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkEnumerateDeviceExtensionProperties( + m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::enumerateDeviceExtensionProperties( Optional layerName, + Dispatch const & d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkEnumerateDeviceExtensionProperties( + m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, + layerName ? layerName->c_str() : nullptr, + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::enumerateDeviceExtensionProperties( Optional layerName, + ExtensionPropertiesAllocator & extensionPropertiesAllocator, + Dispatch const & d ) const + { + std::vector properties( extensionPropertiesAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkEnumerateDeviceExtensionProperties( + m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, + layerName ? layerName->c_str() : nullptr, + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::enumerateDeviceLayerProperties( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkEnumerateDeviceLayerProperties( + m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::enumerateDeviceLayerProperties( Dispatch const & d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkEnumerateDeviceLayerProperties( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, + Dispatch const & d ) const + { + std::vector properties( layerPropertiesAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkEnumerateDeviceLayerProperties( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( + uint32_t queueFamilyIndex, + uint32_t * pCounterCount, + VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters, + VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + m_physicalDevice, + queueFamilyIndex, + pCounterCount, + reinterpret_cast( pCounters ), + reinterpret_cast( pCounterDescriptions ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." ) + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( + uint32_t queueFamilyIndex, + ArrayProxy const & counters, + Dispatch const & d ) const + { + std::vector counterDescriptions; + uint32_t counterCount; + Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + m_physicalDevice, + queueFamilyIndex, + counters.size(), + reinterpret_cast( counters.data() ), + nullptr ) ); + if ( ( result == Result::eSuccess ) && counterCount ) + { + counterDescriptions.resize( counterCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + m_physicalDevice, + queueFamilyIndex, + counters.size(), + reinterpret_cast( counters.data() ), + reinterpret_cast( counterDescriptions.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( counterCount <= counterDescriptions.size() ); + counterDescriptions.resize( counterCount ); + } + return createResultValue( result, + counterDescriptions, + VULKAN_HPP_NAMESPACE_STRING + "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); + } + + template < + typename Allocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." ) + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( + uint32_t queueFamilyIndex, + ArrayProxy const & counters, + Allocator const & vectorAllocator, + Dispatch const & d ) const + { + std::vector counterDescriptions( vectorAllocator ); + uint32_t counterCount; + Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + m_physicalDevice, + queueFamilyIndex, + counters.size(), + reinterpret_cast( counters.data() ), + nullptr ) ); + if ( ( result == Result::eSuccess ) && counterCount ) + { + counterDescriptions.resize( counterCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + m_physicalDevice, + queueFamilyIndex, + counters.size(), + reinterpret_cast( counters.data() ), + reinterpret_cast( counterDescriptions.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( counterCount <= counterDescriptions.size() ); + counterDescriptions.resize( counterCount ); + } + return createResultValue( result, + counterDescriptions, + VULKAN_HPP_NAMESPACE_STRING + "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::pair, + std::vector>>::type + PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, + Dispatch const & d ) const + { + std::pair, + std::vector> + data; + std::vector & counters = data.first; + std::vector & counterDescriptions = + data.second; + uint32_t counterCount; + Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) ); + if ( ( result == Result::eSuccess ) && counterCount ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + m_physicalDevice, + queueFamilyIndex, + &counterCount, + reinterpret_cast( counters.data() ), + reinterpret_cast( counterDescriptions.data() ) ) ); + VULKAN_HPP_ASSERT( counterCount <= counters.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( counterCount < counters.size() ) ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + } + return createResultValue( + result, data, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); + } + + template ::value && + std::is_same::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::pair, + std::vector>>::type + PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( + uint32_t queueFamilyIndex, + PerformanceCounterKHRAllocator & performanceCounterKHRAllocator, + PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator, + Dispatch const & d ) const + { + std::pair, + std::vector> + data( std::piecewise_construct, + std::forward_as_tuple( performanceCounterKHRAllocator ), + std::forward_as_tuple( performanceCounterDescriptionKHRAllocator ) ); + std::vector & counters = data.first; + std::vector & counterDescriptions = + data.second; + uint32_t counterCount; + Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) ); + if ( ( result == Result::eSuccess ) && counterCount ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + m_physicalDevice, + queueFamilyIndex, + &counterCount, + reinterpret_cast( counters.data() ), + reinterpret_cast( counterDescriptions.data() ) ) ); + VULKAN_HPP_ASSERT( counterCount <= counters.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( counterCount < counters.size() ) ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + } + return createResultValue( + result, data, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetDisplayModeProperties2KHR( m_physicalDevice, + static_cast( display ), + pPropertyCount, + reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetDisplayModeProperties2KHR( + m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + d.vkGetDisplayModeProperties2KHR( m_physicalDevice, + static_cast( display ), + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayModeProperties2KHR( + VULKAN_HPP_NAMESPACE::DisplayKHR display, + DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator, + Dispatch const & d ) const + { + std::vector properties( + displayModeProperties2KHRAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetDisplayModeProperties2KHR( + m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + d.vkGetDisplayModeProperties2KHR( m_physicalDevice, + static_cast( display ), + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetDisplayModePropertiesKHR( m_physicalDevice, + static_cast( display ), + pPropertyCount, + reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetDisplayModePropertiesKHR( + m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + d.vkGetDisplayModePropertiesKHR( m_physicalDevice, + static_cast( display ), + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator, + Dispatch const & d ) const + { + std::vector properties( + displayModePropertiesKHRAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetDisplayModePropertiesKHR( + m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + d.vkGetDisplayModePropertiesKHR( m_physicalDevice, + static_cast( display ), + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilities2KHR( + const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo, + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, + reinterpret_cast( pDisplayPlaneInfo ), + reinterpret_cast( pCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities; + Result result = static_cast( + d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, + reinterpret_cast( &displayPlaneInfo ), + reinterpret_cast( &capabilities ) ) ); + return createResultValue( + result, capabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, + uint32_t planeIndex, + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, + static_cast( mode ), + planeIndex, + reinterpret_cast( pCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, + uint32_t planeIndex, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities; + Result result = static_cast( + d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, + static_cast( mode ), + planeIndex, + reinterpret_cast( &capabilities ) ) ); + return createResultValue( + result, capabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, + uint32_t * pDisplayCount, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( + m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast( pDisplays ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d ) const + { + std::vector displays; + uint32_t displayCount; + Result result; + do + { + result = static_cast( + d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && displayCount ) + { + displays.resize( displayCount ); + result = static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( + m_physicalDevice, planeIndex, &displayCount, reinterpret_cast( displays.data() ) ) ); + VULKAN_HPP_ASSERT( displayCount <= displays.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( displayCount < displays.size() ) ) + { + displays.resize( displayCount ); + } + return createResultValue( + result, displays, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, + DisplayKHRAllocator & displayKHRAllocator, + Dispatch const & d ) const + { + std::vector displays( displayKHRAllocator ); + uint32_t displayCount; + Result result; + do + { + result = static_cast( + d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && displayCount ) + { + displays.resize( displayCount ); + result = static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( + m_physicalDevice, planeIndex, &displayCount, reinterpret_cast( displays.data() ) ) ); + VULKAN_HPP_ASSERT( displayCount <= displays.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( displayCount < displays.size() ) ) + { + displays.resize( displayCount ); + } + return createResultValue( + result, displays, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t * pTimeDomainCount, + VULKAN_HPP_NAMESPACE::TimeDomainEXT * pTimeDomains, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( + m_physicalDevice, pTimeDomainCount, reinterpret_cast( pTimeDomains ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getCalibrateableTimeDomainsEXT( Dispatch const & d ) const + { + std::vector timeDomains; + uint32_t timeDomainCount; + Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && timeDomainCount ) + { + timeDomains.resize( timeDomainCount ); + result = static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( + m_physicalDevice, &timeDomainCount, reinterpret_cast( timeDomains.data() ) ) ); + VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( timeDomainCount < timeDomains.size() ) ) + { + timeDomains.resize( timeDomainCount ); + } + return createResultValue( + result, timeDomains, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getCalibrateableTimeDomainsEXT( TimeDomainEXTAllocator & timeDomainEXTAllocator, + Dispatch const & d ) const + { + std::vector timeDomains( timeDomainEXTAllocator ); + uint32_t timeDomainCount; + Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && timeDomainCount ) + { + timeDomains.resize( timeDomainCount ); + result = static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( + m_physicalDevice, &timeDomainCount, reinterpret_cast( timeDomains.data() ) ) ); + VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( timeDomainCount < timeDomains.size() ) ) + { + timeDomains.resize( timeDomainCount ); + } + return createResultValue( + result, timeDomains, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getCooperativeMatrixPropertiesNV( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getCooperativeMatrixPropertiesNV( Dispatch const & d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + m_physicalDevice, + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); + } + + template < + typename CooperativeMatrixPropertiesNVAllocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getCooperativeMatrixPropertiesNV( + CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator, Dispatch const & d ) const + { + std::vector properties( + cooperativeMatrixPropertiesNVAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + m_physicalDevice, + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT( + uint32_t queueFamilyIndex, IDirectFB * dfb, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, dfb ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT( + uint32_t queueFamilyIndex, IDirectFB & dfb, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, &dfb ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayPlaneProperties2KHR( Dispatch const & d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayPlaneProperties2KHR( + DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator, Dispatch const & d ) const + { + std::vector properties( + displayPlaneProperties2KHRAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayPlanePropertiesKHR( Dispatch const & d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayPlanePropertiesKHR( + DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator, Dispatch const & d ) const + { + std::vector properties( + displayPlanePropertiesKHRAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayProperties2KHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( + m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayProperties2KHR( Dispatch const & d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = + static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator, + Dispatch const & d ) const + { + std::vector properties( displayProperties2KHRAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = + static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayPropertiesKHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( + m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayPropertiesKHR( Dispatch const & d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = + static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator, + Dispatch const & d ) const + { + std::vector properties( displayPropertiesKHRAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = + static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceExternalBufferProperties( + m_physicalDevice, + reinterpret_cast( pExternalBufferInfo ), + reinterpret_cast( pExternalBufferProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties + PhysicalDevice::getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; + d.vkGetPhysicalDeviceExternalBufferProperties( + m_physicalDevice, + reinterpret_cast( &externalBufferInfo ), + reinterpret_cast( &externalBufferProperties ) ); + return externalBufferProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( + m_physicalDevice, + reinterpret_cast( pExternalBufferInfo ), + reinterpret_cast( pExternalBufferProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties + PhysicalDevice::getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; + d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( + m_physicalDevice, + reinterpret_cast( &externalBufferInfo ), + reinterpret_cast( &externalBufferProperties ) ); + return externalBufferProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceExternalFenceProperties( + m_physicalDevice, + reinterpret_cast( pExternalFenceInfo ), + reinterpret_cast( pExternalFenceProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties + PhysicalDevice::getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; + d.vkGetPhysicalDeviceExternalFenceProperties( + m_physicalDevice, + reinterpret_cast( &externalFenceInfo ), + reinterpret_cast( &externalFenceProperties ) ); + return externalFenceProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceExternalFencePropertiesKHR( + m_physicalDevice, + reinterpret_cast( pExternalFenceInfo ), + reinterpret_cast( pExternalFenceProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties + PhysicalDevice::getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; + d.vkGetPhysicalDeviceExternalFencePropertiesKHR( + m_physicalDevice, + reinterpret_cast( &externalFenceInfo ), + reinterpret_cast( &externalFenceProperties ) ); + return externalFenceProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getExternalImageFormatPropertiesNV( + VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, + VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( tiling ), + static_cast( usage ), + static_cast( flags ), + static_cast( externalHandleType ), + reinterpret_cast( pExternalImageFormatProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::getExternalImageFormatPropertiesNV( + VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties; + Result result = static_cast( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( tiling ), + static_cast( usage ), + static_cast( flags ), + static_cast( externalHandleType ), + reinterpret_cast( &externalImageFormatProperties ) ) ); + return createResultValue( result, + externalImageFormatProperties, + VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphoreProperties( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceExternalSemaphoreProperties( + m_physicalDevice, + reinterpret_cast( pExternalSemaphoreInfo ), + reinterpret_cast( pExternalSemaphoreProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties + PhysicalDevice::getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; + d.vkGetPhysicalDeviceExternalSemaphoreProperties( + m_physicalDevice, + reinterpret_cast( &externalSemaphoreInfo ), + reinterpret_cast( &externalSemaphoreProperties ) ); + return externalSemaphoreProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphorePropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( + m_physicalDevice, + reinterpret_cast( pExternalSemaphoreInfo ), + reinterpret_cast( pExternalSemaphoreProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties + PhysicalDevice::getExternalSemaphorePropertiesKHR( + const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; + d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( + m_physicalDevice, + reinterpret_cast( &externalSemaphoreInfo ), + reinterpret_cast( &externalSemaphoreProperties ) ); + return externalSemaphoreProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast( pFeatures ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures + PhysicalDevice::getFeatures( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features; + d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast( &features ) ); + return features; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( pFeatures ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 + PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; + d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( &features ) ); + return features; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = + structureChain.template get(); + d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( &features ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( pFeatures ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 + PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; + d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( &features ) ); + return features; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = + structureChain.template get(); + d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( &features ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceFormatProperties( + m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties + PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::FormatProperties formatProperties; + d.vkGetPhysicalDeviceFormatProperties( + m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + return formatProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceFormatProperties2( + m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 + PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; + d.vkGetPhysicalDeviceFormatProperties2( + m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + return formatProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = + structureChain.template get(); + d.vkGetPhysicalDeviceFormatProperties2( + m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceFormatProperties2KHR( + m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 + PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; + d.vkGetPhysicalDeviceFormatProperties2KHR( + m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + return formatProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = + structureChain.template get(); + d.vkGetPhysicalDeviceFormatProperties2KHR( + m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getFragmentShadingRatesKHR( + uint32_t * pFragmentShadingRateCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( + m_physicalDevice, + pFragmentShadingRateCount, + reinterpret_cast( pFragmentShadingRates ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::vector>::type + PhysicalDevice::getFragmentShadingRatesKHR( Dispatch const & d ) const + { + std::vector + fragmentShadingRates; + uint32_t fragmentShadingRateCount; + Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && fragmentShadingRateCount ) + { + fragmentShadingRates.resize( fragmentShadingRateCount ); + result = static_cast( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( + m_physicalDevice, + &fragmentShadingRateCount, + reinterpret_cast( fragmentShadingRates.data() ) ) ); + VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( fragmentShadingRateCount < fragmentShadingRates.size() ) ) + { + fragmentShadingRates.resize( fragmentShadingRateCount ); + } + return createResultValue( + result, fragmentShadingRates, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); + } + + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::vector>::type + PhysicalDevice::getFragmentShadingRatesKHR( + PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator, + Dispatch const & d ) const + { + std::vector + fragmentShadingRates( physicalDeviceFragmentShadingRateKHRAllocator ); + uint32_t fragmentShadingRateCount; + Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && fragmentShadingRateCount ) + { + fragmentShadingRates.resize( fragmentShadingRateCount ); + result = static_cast( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( + m_physicalDevice, + &fragmentShadingRateCount, + reinterpret_cast( fragmentShadingRates.data() ) ) ); + VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( fragmentShadingRateCount < fragmentShadingRates.size() ) ) + { + fragmentShadingRates.resize( fragmentShadingRateCount ); + } + return createResultValue( + result, fragmentShadingRates, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceImageFormatProperties( + m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( tiling ), + static_cast( usage ), + static_cast( flags ), + reinterpret_cast( pImageFormatProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties; + Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties( + m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( tiling ), + static_cast( usage ), + static_cast( flags ), + reinterpret_cast( &imageFormatProperties ) ) ); + return createResultValue( + result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( + m_physicalDevice, + reinterpret_cast( pImageFormatInfo ), + reinterpret_cast( pImageFormatProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; + Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( + m_physicalDevice, + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ) ); + return createResultValue( + result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, + Dispatch const & d ) const + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = + structureChain.template get(); + Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( + m_physicalDevice, + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ) ); + return createResultValue( + result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2KHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( + m_physicalDevice, + reinterpret_cast( pImageFormatInfo ), + reinterpret_cast( pImageFormatProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; + Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( + m_physicalDevice, + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ) ); + return createResultValue( + result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, + Dispatch const & d ) const + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = + structureChain.template get(); + Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( + m_physicalDevice, + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ) ); + return createResultValue( + result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, + reinterpret_cast( pMemoryProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties + PhysicalDevice::getMemoryProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties; + d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, + reinterpret_cast( &memoryProperties ) ); + return memoryProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceMemoryProperties2( + m_physicalDevice, reinterpret_cast( pMemoryProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 + PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; + d.vkGetPhysicalDeviceMemoryProperties2( + m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + return memoryProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = + structureChain.template get(); + d.vkGetPhysicalDeviceMemoryProperties2( + m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceMemoryProperties2KHR( + m_physicalDevice, reinterpret_cast( pMemoryProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 + PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; + d.vkGetPhysicalDeviceMemoryProperties2KHR( + m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + return memoryProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = + structureChain.template get(); + d.vkGetPhysicalDeviceMemoryProperties2KHR( + m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT( + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceMultisamplePropertiesEXT( + m_physicalDevice, + static_cast( samples ), + reinterpret_cast( pMultisampleProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT + PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties; + d.vkGetPhysicalDeviceMultisamplePropertiesEXT( + m_physicalDevice, + static_cast( samples ), + reinterpret_cast( &multisampleProperties ) ); + return multisampleProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pRectCount, + VULKAN_HPP_NAMESPACE::Rect2D * pRects, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( + m_physicalDevice, static_cast( surface ), pRectCount, reinterpret_cast( pRects ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + { + std::vector rects; + uint32_t rectCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( + m_physicalDevice, static_cast( surface ), &rectCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && rectCount ) + { + rects.resize( rectCount ); + result = static_cast( + d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, + static_cast( surface ), + &rectCount, + reinterpret_cast( rects.data() ) ) ); + VULKAN_HPP_ASSERT( rectCount <= rects.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( rectCount < rects.size() ) ) + { + rects.resize( rectCount ); + } + return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Rect2DAllocator & rect2DAllocator, + Dispatch const & d ) const + { + std::vector rects( rect2DAllocator ); + uint32_t rectCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( + m_physicalDevice, static_cast( surface ), &rectCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && rectCount ) + { + rects.resize( rectCount ); + result = static_cast( + d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, + static_cast( surface ), + &rectCount, + reinterpret_cast( rects.data() ) ) ); + VULKAN_HPP_ASSERT( rectCount <= rects.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( rectCount < rects.size() ) ) + { + rects.resize( rectCount ); + } + return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties + PhysicalDevice::getProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties; + d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast( &properties ) ); + return properties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceProperties2( m_physicalDevice, + reinterpret_cast( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 + PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; + d.vkGetPhysicalDeviceProperties2( m_physicalDevice, + reinterpret_cast( &properties ) ); + return properties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = + structureChain.template get(); + d.vkGetPhysicalDeviceProperties2( m_physicalDevice, + reinterpret_cast( &properties ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, + reinterpret_cast( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 + PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; + d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, + reinterpret_cast( &properties ) ); + return properties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = + structureChain.template get(); + d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, + reinterpret_cast( &properties ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( + const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, + uint32_t * pNumPasses, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + m_physicalDevice, + reinterpret_cast( pPerformanceQueryCreateInfo ), + pNumPasses ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( + const QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + uint32_t numPasses; + d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + m_physicalDevice, + reinterpret_cast( &performanceQueryCreateInfo ), + &numPasses ); + return numPasses; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, + pQueueFamilyPropertyCount, + reinterpret_cast( pQueueFamilyProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties( Dispatch const & d ) const + { + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties( + m_physicalDevice, + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + return queueFamilyProperties; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator, + Dispatch const & d ) const + { + std::vector queueFamilyProperties( + queueFamilyPropertiesAllocator ); + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties( + m_physicalDevice, + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + return queueFamilyProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getQueueFamilyProperties2( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceQueueFamilyProperties2( + m_physicalDevice, + pQueueFamilyPropertyCount, + reinterpret_cast( pQueueFamilyProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const + { + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties2( + m_physicalDevice, + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + return queueFamilyProperties; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, + Dispatch const & d ) const + { + std::vector queueFamilyProperties( + queueFamilyProperties2Allocator ); + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties2( + m_physicalDevice, + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + return queueFamilyProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const + { + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + std::vector returnVector( queueFamilyPropertyCount ); + std::vector queueFamilyProperties( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + queueFamilyProperties[i].pNext = + returnVector[i].template get().pNext; + } + d.vkGetPhysicalDeviceQueueFamilyProperties2( + m_physicalDevice, + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + returnVector[i].template get() = queueFamilyProperties[i]; + } + return returnVector; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator, + Dispatch const & d ) const + { + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + std::vector returnVector( queueFamilyPropertyCount, + structureChainAllocator ); + std::vector queueFamilyProperties( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + queueFamilyProperties[i].pNext = + returnVector[i].template get().pNext; + } + d.vkGetPhysicalDeviceQueueFamilyProperties2( + m_physicalDevice, + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + returnVector[i].template get() = queueFamilyProperties[i]; + } + return returnVector; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, + pQueueFamilyPropertyCount, + reinterpret_cast( pQueueFamilyProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const + { + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + return queueFamilyProperties; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2KHR( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, + Dispatch const & d ) const + { + std::vector queueFamilyProperties( + queueFamilyProperties2Allocator ); + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + return queueFamilyProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const + { + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + std::vector returnVector( queueFamilyPropertyCount ); + std::vector queueFamilyProperties( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + queueFamilyProperties[i].pNext = + returnVector[i].template get().pNext; + } + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + returnVector[i].template get() = queueFamilyProperties[i]; + } + return returnVector; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2KHR( StructureChainAllocator & structureChainAllocator, + Dispatch const & d ) const + { + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + std::vector returnVector( queueFamilyPropertyCount, + structureChainAllocator ); + std::vector queueFamilyProperties( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + queueFamilyProperties[i].pNext = + returnVector[i].template get().pNext; + } + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + returnVector[i].template get() = queueFamilyProperties[i]; + } + return returnVector; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getScreenPresentationSupportQNX( + uint32_t queueFamilyIndex, struct _screen_window * window, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, window ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getScreenPresentationSupportQNX( + uint32_t queueFamilyIndex, struct _screen_window & window, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, &window ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceSparseImageFormatProperties( + m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + pPropertyCount, + reinterpret_cast( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + Dispatch const & d ) const + { + std::vector properties; + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + &propertyCount, + nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties( + m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + return properties; + } + + template < + typename SparseImageFormatPropertiesAllocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties( + VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator, + Dispatch const & d ) const + { + std::vector properties( + sparseImageFormatPropertiesAllocator ); + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + &propertyCount, + nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties( + m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + return properties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceSparseImageFormatProperties2( + m_physicalDevice, + reinterpret_cast( pFormatInfo ), + pPropertyCount, + reinterpret_cast( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + Dispatch const & d ) const + { + std::vector properties; + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2( + m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2( + m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + return properties; + } + + template < + typename SparseImageFormatProperties2Allocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties2( + const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, + Dispatch const & d ) const + { + std::vector properties( + sparseImageFormatProperties2Allocator ); + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2( + m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2( + m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + return properties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + m_physicalDevice, + reinterpret_cast( pFormatInfo ), + pPropertyCount, + reinterpret_cast( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + Dispatch const & d ) const + { + std::vector properties; + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + return properties; + } + + template < + typename SparseImageFormatProperties2Allocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties2KHR( + const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, + Dispatch const & d ) const + { + std::vector properties( + sparseImageFormatProperties2Allocator ); + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + return properties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( + uint32_t * pCombinationCount, + VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + m_physicalDevice, + pCombinationCount, + reinterpret_cast( pCombinations ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::vector>::type + PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( Dispatch const & d ) const + { + std::vector combinations; + uint32_t combinationCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + m_physicalDevice, &combinationCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && combinationCount ) + { + combinations.resize( combinationCount ); + result = static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + m_physicalDevice, + &combinationCount, + reinterpret_cast( combinations.data() ) ) ); + VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( combinationCount < combinations.size() ) ) + { + combinations.resize( combinationCount ); + } + return createResultValue( result, + combinations, + VULKAN_HPP_NAMESPACE_STRING + "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); + } + + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::vector>::type + PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( + FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator, + Dispatch const & d ) const + { + std::vector combinations( + framebufferMixedSamplesCombinationNVAllocator ); + uint32_t combinationCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + m_physicalDevice, &combinationCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && combinationCount ) + { + combinations.resize( combinationCount ); + result = static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + m_physicalDevice, + &combinationCount, + reinterpret_cast( combinations.data() ) ) ); + VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( combinationCount < combinations.size() ) ) + { + combinations.resize( combinationCount ); + } + return createResultValue( result, + combinations, + VULKAN_HPP_NAMESPACE_STRING + "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( + m_physicalDevice, + static_cast( surface ), + reinterpret_cast( pSurfaceCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities; + Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( + m_physicalDevice, + static_cast( surface ), + reinterpret_cast( &surfaceCapabilities ) ) ); + return createResultValue( + result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2KHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( + m_physicalDevice, + reinterpret_cast( pSurfaceInfo ), + reinterpret_cast( pSurfaceCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities; + Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + reinterpret_cast( &surfaceCapabilities ) ) ); + return createResultValue( + result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d ) const + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities = + structureChain.template get(); + Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + reinterpret_cast( &surfaceCapabilities ) ) ); + return createResultValue( + result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( + m_physicalDevice, + static_cast( surface ), + reinterpret_cast( pSurfaceCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities; + Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( + m_physicalDevice, + static_cast( surface ), + reinterpret_cast( &surfaceCapabilities ) ) ); + return createResultValue( + result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pSurfaceFormatCount, + VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( + m_physicalDevice, + reinterpret_cast( pSurfaceInfo ), + pSurfaceFormatCount, + reinterpret_cast( pSurfaceFormats ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const + { + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + nullptr ) ); + if ( ( result == Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ) ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( surfaceFormatCount < surfaceFormats.size() ) ) + { + surfaceFormats.resize( surfaceFormatCount ); + } + return createResultValue( + result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator, + Dispatch const & d ) const + { + std::vector surfaceFormats( surfaceFormat2KHRAllocator ); + uint32_t surfaceFormatCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + nullptr ) ); + if ( ( result == Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ) ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( surfaceFormatCount < surfaceFormats.size() ) ) + { + surfaceFormats.resize( surfaceFormatCount ); + } + return createResultValue( + result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pSurfaceFormatCount, + VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, + static_cast( surface ), + pSurfaceFormatCount, + reinterpret_cast( pSurfaceFormats ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + { + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( + m_physicalDevice, static_cast( surface ), &surfaceFormatCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast( + d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, + static_cast( surface ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ) ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( surfaceFormatCount < surfaceFormats.size() ) ) + { + surfaceFormats.resize( surfaceFormatCount ); + } + return createResultValue( + result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator, + Dispatch const & d ) const + { + std::vector surfaceFormats( surfaceFormatKHRAllocator ); + uint32_t surfaceFormatCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( + m_physicalDevice, static_cast( surface ), &surfaceFormatCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast( + d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, + static_cast( surface ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ) ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( surfaceFormatCount < surfaceFormats.size() ) ) + { + surfaceFormats.resize( surfaceFormatCount ); + } + return createResultValue( + result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModes2EXT( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pPresentModeCount, + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( + m_physicalDevice, + reinterpret_cast( pSurfaceInfo ), + pPresentModeCount, + reinterpret_cast( pPresentModes ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d ) const + { + std::vector presentModes; + uint32_t presentModeCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &presentModeCount, + nullptr ) ); + if ( ( result == Result::eSuccess ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &presentModeCount, + reinterpret_cast( presentModes.data() ) ) ); + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( presentModeCount < presentModes.size() ) ) + { + presentModes.resize( presentModeCount ); + } + return createResultValue( + result, presentModes, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + PresentModeKHRAllocator & presentModeKHRAllocator, + Dispatch const & d ) const + { + std::vector presentModes( presentModeKHRAllocator ); + uint32_t presentModeCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &presentModeCount, + nullptr ) ); + if ( ( result == Result::eSuccess ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &presentModeCount, + reinterpret_cast( presentModes.data() ) ) ); + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( presentModeCount < presentModes.size() ) ) + { + presentModes.resize( presentModeCount ); + } + return createResultValue( + result, presentModes, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pPresentModeCount, + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, + static_cast( surface ), + pPresentModeCount, + reinterpret_cast( pPresentModes ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + { + std::vector presentModes; + uint32_t presentModeCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( + m_physicalDevice, static_cast( surface ), &presentModeCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = static_cast( + d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, + static_cast( surface ), + &presentModeCount, + reinterpret_cast( presentModes.data() ) ) ); + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( presentModeCount < presentModes.size() ) ) + { + presentModes.resize( presentModeCount ); + } + return createResultValue( + result, presentModes, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + PresentModeKHRAllocator & presentModeKHRAllocator, + Dispatch const & d ) const + { + std::vector presentModes( presentModeKHRAllocator ); + uint32_t presentModeCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( + m_physicalDevice, static_cast( surface ), &presentModeCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = static_cast( + d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, + static_cast( surface ), + &presentModeCount, + reinterpret_cast( presentModes.data() ) ) ); + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( presentModeCount < presentModes.size() ) ) + { + presentModes.resize( presentModeCount ); + } + return createResultValue( + result, presentModes, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::Bool32 * pSupported, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, + queueFamilyIndex, + static_cast( surface ), + reinterpret_cast( pSupported ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Bool32 supported; + Result result = + static_cast( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, + queueFamilyIndex, + static_cast( surface ), + reinterpret_cast( &supported ) ) ); + return createResultValue( result, supported, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getToolPropertiesEXT( uint32_t * pToolCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT * pToolProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( + m_physicalDevice, pToolCount, reinterpret_cast( pToolProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::vector>::type + PhysicalDevice::getToolPropertiesEXT( Dispatch const & d ) const + { + std::vector toolProperties; + uint32_t toolCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && toolCount ) + { + toolProperties.resize( toolCount ); + result = static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( + m_physicalDevice, + &toolCount, + reinterpret_cast( toolProperties.data() ) ) ); + VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( toolCount < toolProperties.size() ) ) + { + toolProperties.resize( toolCount ); + } + return createResultValue( + result, toolProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); + } + + template < + typename PhysicalDeviceToolPropertiesEXTAllocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::vector>::type + PhysicalDevice::getToolPropertiesEXT( + PhysicalDeviceToolPropertiesEXTAllocator & physicalDeviceToolPropertiesEXTAllocator, Dispatch const & d ) const + { + std::vector toolProperties( + physicalDeviceToolPropertiesEXTAllocator ); + uint32_t toolCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && toolCount ) + { + toolProperties.resize( toolCount ); + result = static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( + m_physicalDevice, + &toolCount, + reinterpret_cast( toolProperties.data() ) ) ); + VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( toolCount < toolProperties.size() ) ) + { + toolProperties.resize( toolCount ); + } + return createResultValue( + result, toolProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pVideoProfile, + VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetPhysicalDeviceVideoCapabilitiesKHR( m_physicalDevice, + reinterpret_cast( pVideoProfile ), + reinterpret_cast( pCapabilities ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::getVideoCapabilitiesKHR( const VideoProfileKHR & videoProfile, Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR capabilities; + Result result = static_cast( + d.vkGetPhysicalDeviceVideoCapabilitiesKHR( m_physicalDevice, + reinterpret_cast( &videoProfile ), + reinterpret_cast( &capabilities ) ) ); + return createResultValue( + result, capabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getVideoCapabilitiesKHR( const VideoProfileKHR & videoProfile, Dispatch const & d ) const + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR & capabilities = + structureChain.template get(); + Result result = static_cast( + d.vkGetPhysicalDeviceVideoCapabilitiesKHR( m_physicalDevice, + reinterpret_cast( &videoProfile ), + reinterpret_cast( &capabilities ) ) ); + return createResultValue( + result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getVideoFormatPropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, + uint32_t * pVideoFormatPropertyCount, + VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( + m_physicalDevice, + reinterpret_cast( pVideoFormatInfo ), + pVideoFormatPropertyCount, + reinterpret_cast( pVideoFormatProperties ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getVideoFormatPropertiesKHR( const PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, + Dispatch const & d ) const + { + std::vector videoFormatProperties; + uint32_t videoFormatPropertyCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( + m_physicalDevice, + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + nullptr ) ); + if ( ( result == Result::eSuccess ) && videoFormatPropertyCount ) + { + videoFormatProperties.resize( videoFormatPropertyCount ); + result = static_cast( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( + m_physicalDevice, + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + reinterpret_cast( videoFormatProperties.data() ) ) ); + VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( videoFormatPropertyCount < videoFormatProperties.size() ) ) + { + videoFormatProperties.resize( videoFormatPropertyCount ); + } + return createResultValue( + result, videoFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getVideoFormatPropertiesKHR( const PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, + VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator, + Dispatch const & d ) const + { + std::vector videoFormatProperties( + videoFormatPropertiesKHRAllocator ); + uint32_t videoFormatPropertyCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( + m_physicalDevice, + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + nullptr ) ); + if ( ( result == Result::eSuccess ) && videoFormatPropertyCount ) + { + videoFormatProperties.resize( videoFormatPropertyCount ); + result = static_cast( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( + m_physicalDevice, + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + reinterpret_cast( videoFormatProperties.data() ) ) ); + VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( videoFormatPropertyCount < videoFormatProperties.size() ) ) + { + videoFormatProperties.resize( videoFormatPropertyCount ); + } + return createResultValue( + result, videoFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( + uint32_t queueFamilyIndex, struct wl_display * display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( + uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( + uint32_t queueFamilyIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ) ); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, + xcb_connection_t * connection, + xcb_visualid_t visual_id, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, + xcb_connection_t & connection, + xcb_visualid_t visual_id, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, + Display * dpy, + VisualID visualID, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, + Display & dpy, + VisualID visualID, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getRandROutputDisplayEXT( Display * dpy, + RROutput rrOutput, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast( pDisplay ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DisplayKHR display; + Result result = static_cast( + d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast( &display ) ) ); + return createResultValue( + result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXT" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getRandROutputDisplayEXTUnique( Display & dpy, RROutput rrOutput, Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DisplayKHR display; + Result result = static_cast( + d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast( &display ) ) ); + ObjectRelease deleter( *this, d ); + return createResultValue( + result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXTUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast( pDisplay ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DisplayKHR display; + Result result = static_cast( + d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast( &display ) ) ); + return createResultValue( result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNV" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getWinrtDisplayNVUnique( uint32_t deviceRelativeId, Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DisplayKHR display; + Result result = static_cast( + d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast( &display ) ) ); + ObjectRelease deleter( *this, d ); + return createResultValue( + result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNVUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast( display ) ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const + { + Result result = + static_cast( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast( display ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::releaseDisplayEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Queue::getCheckpointData2NV( uint32_t * pCheckpointDataCount, + VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetQueueCheckpointData2NV( + m_queue, pCheckpointDataCount, reinterpret_cast( pCheckpointData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Queue::getCheckpointData2NV( Dispatch const & d ) const + { + std::vector checkpointData; + uint32_t checkpointDataCount; + d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + d.vkGetQueueCheckpointData2NV( + m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + return checkpointData; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Queue::getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d ) const + { + std::vector checkpointData( checkpointData2NVAllocator ); + uint32_t checkpointDataCount; + d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + d.vkGetQueueCheckpointData2NV( + m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + return checkpointData; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Queue::getCheckpointDataNV( uint32_t * pCheckpointDataCount, + VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetQueueCheckpointDataNV( + m_queue, pCheckpointDataCount, reinterpret_cast( pCheckpointData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Queue::getCheckpointDataNV( Dispatch const & d ) const + { + std::vector checkpointData; + uint32_t checkpointDataCount; + d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + d.vkGetQueueCheckpointDataNV( + m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + return checkpointData; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Queue::getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, Dispatch const & d ) const + { + std::vector checkpointData( checkpointDataNVAllocator ); + uint32_t checkpointDataCount; + d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + d.vkGetQueueCheckpointDataNV( + m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + return checkpointData; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast( pLabelInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast( &labelInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Queue::bindSparse( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkQueueBindSparse( m_queue, + bindInfoCount, + reinterpret_cast( pBindInfo ), + static_cast( fence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Queue::bindSparse( ArrayProxy const & bindInfo, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const + { + Result result = + static_cast( d.vkQueueBindSparse( m_queue, + bindInfo.size(), + reinterpret_cast( bindInfo.data() ), + static_cast( fence ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkQueueEndDebugUtilsLabelEXT( m_queue ); + } + + template + VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast( pLabelInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast( &labelInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR( + const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkQueuePresentKHR( m_queue, reinterpret_cast( pPresentInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR( const PresentInfoKHR & presentInfo, + Dispatch const & d ) const + { + Result result = + static_cast( d.vkQueuePresentKHR( m_queue, reinterpret_cast( &presentInfo ) ) ); + return createResultValue( + result, + VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL( + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkQueueSetPerformanceConfigurationINTEL( + m_queue, static_cast( configuration ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d ) const + { + Result result = static_cast( d.vkQueueSetPerformanceConfigurationINTEL( + m_queue, static_cast( configuration ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount, + const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkQueueSubmit( + m_queue, submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Queue::submit( ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const + { + Result result = static_cast( d.vkQueueSubmit( m_queue, + submits.size(), + reinterpret_cast( submits.data() ), + static_cast( fence ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Queue::submit2KHR( uint32_t submitCount, + const VULKAN_HPP_NAMESPACE::SubmitInfo2KHR * pSubmits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkQueueSubmit2KHR( + m_queue, submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Queue::submit2KHR( ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const + { + Result result = + static_cast( d.vkQueueSubmit2KHR( m_queue, + submits.size(), + reinterpret_cast( submits.data() ), + static_cast( fence ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkQueueWaitIdle( m_queue ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Queue::waitIdle( Dispatch const & d ) const + { + Result result = static_cast( d.vkQueueWaitIdle( m_queue ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#if defined( VK_USE_PLATFORM_GGP ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_USE_PLATFORM_GGP*/ + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + +#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL + class DynamicLoader + { + public: +# ifdef VULKAN_HPP_NO_EXCEPTIONS + DynamicLoader( std::string const & vulkanLibraryName = {} ) VULKAN_HPP_NOEXCEPT +# else + DynamicLoader( std::string const & vulkanLibraryName = {} ) +# endif + { + if ( !vulkanLibraryName.empty() ) + { +# if defined( __linux__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ ) + m_library = dlopen( vulkanLibraryName.c_str(), RTLD_NOW | RTLD_LOCAL ); +# elif defined( _WIN32 ) + m_library = ::LoadLibraryA( vulkanLibraryName.c_str() ); +# else +# error unsupported platform +# endif + } + else + { +# if defined( __linux__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ ) + m_library = dlopen( "libvulkan.so", RTLD_NOW | RTLD_LOCAL ); + if ( m_library == nullptr ) + { + m_library = dlopen( "libvulkan.so.1", RTLD_NOW | RTLD_LOCAL ); + } +# elif defined( __APPLE__ ) + m_library = dlopen( "libvulkan.dylib", RTLD_NOW | RTLD_LOCAL ); +# elif defined( _WIN32 ) + m_library = ::LoadLibraryA( "vulkan-1.dll" ); +# else +# error unsupported platform +# endif + } + +# ifndef VULKAN_HPP_NO_EXCEPTIONS + if ( m_library == nullptr ) + { + // NOTE there should be an InitializationFailedError, but msvc insists on the symbol does not exist within the + // scope of this function. + throw std::runtime_error( "Failed to load vulkan library!" ); + } +# endif + } + + DynamicLoader( DynamicLoader const & ) = delete; + + DynamicLoader( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT : m_library( other.m_library ) + { + other.m_library = nullptr; + } + + DynamicLoader & operator=( DynamicLoader const & ) = delete; + + DynamicLoader & operator=( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_library, other.m_library ); + return *this; + } + + ~DynamicLoader() VULKAN_HPP_NOEXCEPT + { + if ( m_library ) + { +# if defined( __linux__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ ) + dlclose( m_library ); +# elif defined( _WIN32 ) + ::FreeLibrary( m_library ); +# else +# error unsupported platform +# endif + } + } + + template + T getProcAddress( const char * function ) const VULKAN_HPP_NOEXCEPT + { +# if defined( __linux__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ ) + return (T)dlsym( m_library, function ); +# elif defined( _WIN32 ) + return ( T )::GetProcAddress( m_library, function ); +# else +# error unsupported platform +# endif + } + + bool success() const VULKAN_HPP_NOEXCEPT + { + return m_library != nullptr; + } + + private: +# if defined( __linux__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ ) + void * m_library; +# elif defined( _WIN32 ) + ::HINSTANCE m_library; +# else +# error unsupported platform +# endif + }; +#endif + + class DispatchLoaderDynamic + { + public: + using PFN_dummy = void ( * )(); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0; +#else + PFN_dummy placeholder_dont_call_vkAcquireFullScreenExclusiveModeEXT = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0; + PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0; + PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0; + PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV = 0; +#else + PFN_dummy placeholder_dont_call_vkAcquireWinrtDisplayNV = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0; +#else + PFN_dummy placeholder_dont_call_vkAcquireXlibDisplayEXT = 0; +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0; + PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0; + PFN_vkAllocateMemory vkAllocateMemory = 0; + PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0; + PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0; + PFN_vkBindBufferMemory vkBindBufferMemory = 0; + PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0; + PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0; + PFN_vkBindImageMemory vkBindImageMemory = 0; + PFN_vkBindImageMemory2 vkBindImageMemory2 = 0; + PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + PFN_vkBindVideoSessionMemoryKHR vkBindVideoSessionMemoryKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkBindVideoSessionMemoryKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + PFN_vkBuildAccelerationStructuresKHR vkBuildAccelerationStructuresKHR = 0; + PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0; + PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0; + PFN_vkCmdBeginQuery vkCmdBeginQuery = 0; + PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0; + PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0; + PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2 = 0; + PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0; + PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + PFN_vkCmdBeginVideoCodingKHR vkCmdBeginVideoCodingKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkCmdBeginVideoCodingKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0; + PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0; + PFN_vkCmdBindPipeline vkCmdBindPipeline = 0; + PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV = 0; + PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0; + PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0; + PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0; + PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT = 0; + PFN_vkCmdBlitImage vkCmdBlitImage = 0; + PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR = 0; + PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0; + PFN_vkCmdBuildAccelerationStructuresIndirectKHR vkCmdBuildAccelerationStructuresIndirectKHR = 0; + PFN_vkCmdBuildAccelerationStructuresKHR vkCmdBuildAccelerationStructuresKHR = 0; + PFN_vkCmdClearAttachments vkCmdClearAttachments = 0; + PFN_vkCmdClearColorImage vkCmdClearColorImage = 0; + PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + PFN_vkCmdControlVideoCodingKHR vkCmdControlVideoCodingKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkCmdControlVideoCodingKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR = 0; + PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = 0; + PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR = 0; + PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0; + PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR = 0; + PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0; + PFN_vkCmdCopyBufferToImage2KHR vkCmdCopyBufferToImage2KHR = 0; + PFN_vkCmdCopyImage vkCmdCopyImage = 0; + PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR = 0; + PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0; + PFN_vkCmdCopyImageToBuffer2KHR vkCmdCopyImageToBuffer2KHR = 0; + PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR = 0; + PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0; + PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX = 0; + PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0; + PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0; + PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + PFN_vkCmdDecodeVideoKHR vkCmdDecodeVideoKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkCmdDecodeVideoKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + PFN_vkCmdDispatch vkCmdDispatch = 0; + PFN_vkCmdDispatchBase vkCmdDispatchBase = 0; + PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0; + PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0; + PFN_vkCmdDraw vkCmdDraw = 0; + PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0; + PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0; + PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0; + PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0; + PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0; + PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0; + PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0; + PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0; + PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0; + PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0; + PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0; + PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0; + PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkCmdEncodeVideoKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT = 0; + PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0; + PFN_vkCmdEndQuery vkCmdEndQuery = 0; + PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0; + PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0; + PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2 = 0; + PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0; + PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + PFN_vkCmdEndVideoCodingKHR vkCmdEndVideoCodingKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkCmdEndVideoCodingKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0; + PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV = 0; + PFN_vkCmdFillBuffer vkCmdFillBuffer = 0; + PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0; + PFN_vkCmdNextSubpass vkCmdNextSubpass = 0; + PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0; + PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0; + PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0; + PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR = 0; + PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0; + PFN_vkCmdPushConstants vkCmdPushConstants = 0; + PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0; + PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0; + PFN_vkCmdResetEvent vkCmdResetEvent = 0; + PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR = 0; + PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0; + PFN_vkCmdResolveImage vkCmdResolveImage = 0; + PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0; + PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0; + PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0; + PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0; + PFN_vkCmdSetColorWriteEnableEXT vkCmdSetColorWriteEnableEXT = 0; + PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT = 0; + PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0; + PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT = 0; + PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0; + PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT = 0; + PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT = 0; + PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT = 0; + PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT = 0; + PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0; + PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0; + PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0; + PFN_vkCmdSetEvent vkCmdSetEvent = 0; + PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR = 0; + PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0; + PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV = 0; + PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR = 0; + PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT = 0; + PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0; + PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0; + PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT = 0; + PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT = 0; + PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL = 0; + PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL = 0; + PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL = 0; + PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT = 0; + PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT = 0; + PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT = 0; + PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR = 0; + PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0; + PFN_vkCmdSetScissor vkCmdSetScissor = 0; + PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT = 0; + PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0; + PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT = 0; + PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0; + PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT = 0; + PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0; + PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT = 0; + PFN_vkCmdSetViewport vkCmdSetViewport = 0; + PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0; + PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0; + PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT = 0; + PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR = 0; + PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR = 0; + PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0; + PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0; + PFN_vkCmdWaitEvents vkCmdWaitEvents = 0; + PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR = 0; + PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR = 0; + PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0; + PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD = 0; + PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0; + PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0; + PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR = 0; + PFN_vkCompileDeferredNV vkCompileDeferredNV = 0; + PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR = 0; + PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR = 0; + PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR = 0; + PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR = 0; + PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0; +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkCreateAndroidSurfaceKHR = 0; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + PFN_vkCreateBuffer vkCreateBuffer = 0; + PFN_vkCreateBufferView vkCreateBufferView = 0; + PFN_vkCreateCommandPool vkCreateCommandPool = 0; + PFN_vkCreateComputePipelines vkCreateComputePipelines = 0; + PFN_vkCreateCuFunctionNVX vkCreateCuFunctionNVX = 0; + PFN_vkCreateCuModuleNVX vkCreateCuModuleNVX = 0; + PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0; + PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0; + PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR = 0; + PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0; + PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0; + PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0; + PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0; + PFN_vkCreateDevice vkCreateDevice = 0; +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + PFN_vkCreateDirectFBSurfaceEXT vkCreateDirectFBSurfaceEXT = 0; +#else + PFN_dummy placeholder_dont_call_vkCreateDirectFBSurfaceEXT = 0; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0; + PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0; + PFN_vkCreateEvent vkCreateEvent = 0; + PFN_vkCreateFence vkCreateFence = 0; + PFN_vkCreateFramebuffer vkCreateFramebuffer = 0; + PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0; + PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0; +#if defined( VK_USE_PLATFORM_IOS_MVK ) + PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0; +#else + PFN_dummy placeholder_dont_call_vkCreateIOSSurfaceMVK = 0; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + PFN_vkCreateImage vkCreateImage = 0; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0; +#else + PFN_dummy placeholder_dont_call_vkCreateImagePipeSurfaceFUCHSIA = 0; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + PFN_vkCreateImageView vkCreateImageView = 0; + PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV = 0; + PFN_vkCreateInstance vkCreateInstance = 0; +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0; +#else + PFN_dummy placeholder_dont_call_vkCreateMacOSSurfaceMVK = 0; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ +#if defined( VK_USE_PLATFORM_METAL_EXT ) + PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0; +#else + PFN_dummy placeholder_dont_call_vkCreateMetalSurfaceEXT = 0; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + PFN_vkCreatePipelineCache vkCreatePipelineCache = 0; + PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0; + PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT = 0; + PFN_vkCreateQueryPool vkCreateQueryPool = 0; + PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR = 0; + PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0; + PFN_vkCreateRenderPass vkCreateRenderPass = 0; + PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0; + PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0; + PFN_vkCreateSampler vkCreateSampler = 0; + PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0; + PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0; +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX = 0; +#else + PFN_dummy placeholder_dont_call_vkCreateScreenSurfaceQNX = 0; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + PFN_vkCreateSemaphore vkCreateSemaphore = 0; + PFN_vkCreateShaderModule vkCreateShaderModule = 0; + PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0; +#if defined( VK_USE_PLATFORM_GGP ) + PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0; +#else + PFN_dummy placeholder_dont_call_vkCreateStreamDescriptorSurfaceGGP = 0; +#endif /*VK_USE_PLATFORM_GGP*/ + PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0; + PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0; +#if defined( VK_USE_PLATFORM_VI_NN ) + PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0; +#else + PFN_dummy placeholder_dont_call_vkCreateViSurfaceNN = 0; +#endif /*VK_USE_PLATFORM_VI_NN*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + PFN_vkCreateVideoSessionKHR vkCreateVideoSessionKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkCreateVideoSessionKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + PFN_vkCreateVideoSessionParametersKHR vkCreateVideoSessionParametersKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkCreateVideoSessionParametersKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkCreateWaylandSurfaceKHR = 0; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkCreateWin32SurfaceKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_XCB_KHR ) + PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkCreateXcbSurfaceKHR = 0; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkCreateXlibSurfaceKHR = 0; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0; + PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0; + PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0; + PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR = 0; + PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR = 0; + PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0; + PFN_vkDestroyBuffer vkDestroyBuffer = 0; + PFN_vkDestroyBufferView vkDestroyBufferView = 0; + PFN_vkDestroyCommandPool vkDestroyCommandPool = 0; + PFN_vkDestroyCuFunctionNVX vkDestroyCuFunctionNVX = 0; + PFN_vkDestroyCuModuleNVX vkDestroyCuModuleNVX = 0; + PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0; + PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0; + PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR = 0; + PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0; + PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0; + PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0; + PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0; + PFN_vkDestroyDevice vkDestroyDevice = 0; + PFN_vkDestroyEvent vkDestroyEvent = 0; + PFN_vkDestroyFence vkDestroyFence = 0; + PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0; + PFN_vkDestroyImage vkDestroyImage = 0; + PFN_vkDestroyImageView vkDestroyImageView = 0; + PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV = 0; + PFN_vkDestroyInstance vkDestroyInstance = 0; + PFN_vkDestroyPipeline vkDestroyPipeline = 0; + PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0; + PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0; + PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT = 0; + PFN_vkDestroyQueryPool vkDestroyQueryPool = 0; + PFN_vkDestroyRenderPass vkDestroyRenderPass = 0; + PFN_vkDestroySampler vkDestroySampler = 0; + PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0; + PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0; + PFN_vkDestroySemaphore vkDestroySemaphore = 0; + PFN_vkDestroyShaderModule vkDestroyShaderModule = 0; + PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0; + PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0; + PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + PFN_vkDestroyVideoSessionKHR vkDestroyVideoSessionKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkDestroyVideoSessionKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + PFN_vkDestroyVideoSessionParametersKHR vkDestroyVideoSessionParametersKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkDestroyVideoSessionParametersKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0; + PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0; + PFN_vkEndCommandBuffer vkEndCommandBuffer = 0; + PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0; + PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0; + PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0; + PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0; + PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0; + PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0; + PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0; + PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR + vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0; + PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0; + PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0; + PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0; + PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0; + PFN_vkFreeMemory vkFreeMemory = 0; + PFN_vkGetAccelerationStructureBuildSizesKHR vkGetAccelerationStructureBuildSizesKHR = 0; + PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR = 0; + PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV = 0; + PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0; +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0; +#else + PFN_dummy placeholder_dont_call_vkGetAndroidHardwareBufferPropertiesANDROID = 0; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress = 0; + PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0; + PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR = 0; + PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0; + PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0; + PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0; + PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0; + PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR = 0; + PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0; + PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR = 0; + PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR = 0; + PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0; + PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0; + PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR = 0; + PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0; + PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0; + PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0; +#else + PFN_dummy placeholder_dont_call_vkGetDeviceGroupSurfacePresentModes2EXT = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0; + PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0; + PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress = 0; + PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR = 0; + PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; + PFN_vkGetDeviceQueue vkGetDeviceQueue = 0; + PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0; + PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0; + PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0; + PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0; + PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0; + PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0; + PFN_vkGetEventStatus vkGetEventStatus = 0; + PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0; + PFN_vkGetFenceStatus vkGetFenceStatus = 0; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkGetFenceWin32HandleKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV = 0; + PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0; + PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0; + PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0; + PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0; + PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0; + PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0; + PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0; + PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0; + PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0; + PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; + PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0; +#else + PFN_dummy placeholder_dont_call_vkGetMemoryAndroidHardwareBufferANDROID = 0; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0; + PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0; + PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkGetMemoryWin32HandleKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV = 0; +#else + PFN_dummy placeholder_dont_call_vkGetMemoryWin32HandleNV = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkGetMemoryWin32HandlePropertiesKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_FUCHSIA ) + PFN_vkGetMemoryZirconHandleFUCHSIA vkGetMemoryZirconHandleFUCHSIA = 0; +#else + PFN_dummy placeholder_dont_call_vkGetMemoryZirconHandleFUCHSIA = 0; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ +#if defined( VK_USE_PLATFORM_FUCHSIA ) + PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA vkGetMemoryZirconHandlePropertiesFUCHSIA = 0; +#else + PFN_dummy placeholder_dont_call_vkGetMemoryZirconHandlePropertiesFUCHSIA = 0; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0; + PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL = 0; + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0; + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0; +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDeviceDirectFBPresentationSupportEXT = 0; +#else + PFN_dummy placeholder_dont_call_vkGetPhysicalDeviceDirectFBPresentationSupportEXT = 0; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0; + PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0; + PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0; + PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0; + PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0; + PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0; + PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0; + PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0; + PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0; + PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0; + PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0; + PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0; + PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0; + PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0; + PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0; + PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHR = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0; + PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0; + PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0; + PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0; + PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0; + PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0; + PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR + vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0; +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX vkGetPhysicalDeviceScreenPresentationSupportQNX = 0; +#else + PFN_dummy placeholder_dont_call_vkGetPhysicalDeviceScreenPresentationSupportQNX = 0; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV + vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0; + PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0; + PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0; + PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0; + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT = 0; +#else + PFN_dummy placeholder_dont_call_vkGetPhysicalDeviceSurfacePresentModes2EXT = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0; + PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT = 0; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR vkGetPhysicalDeviceVideoCapabilitiesKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkGetPhysicalDeviceVideoCapabilitiesKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR vkGetPhysicalDeviceVideoFormatPropertiesKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkGetPhysicalDeviceVideoFormatPropertiesKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkGetPhysicalDeviceWin32PresentationSupportKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_XCB_KHR ) + PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkGetPhysicalDeviceXcbPresentationSupportKHR = 0; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkGetPhysicalDeviceXlibPresentationSupportKHR = 0; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0; + PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0; + PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0; + PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0; + PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT = 0; + PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0; + PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV = 0; + PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0; +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0; +#else + PFN_dummy placeholder_dont_call_vkGetRandROutputDisplayEXT = 0; +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = 0; + PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR = 0; + PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0; + PFN_vkGetRayTracingShaderGroupStackSizeKHR vkGetRayTracingShaderGroupStackSizeKHR = 0; + PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0; + PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0; + PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue = 0; + PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0; + PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkGetSemaphoreWin32HandleKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_FUCHSIA ) + PFN_vkGetSemaphoreZirconHandleFUCHSIA vkGetSemaphoreZirconHandleFUCHSIA = 0; +#else + PFN_dummy placeholder_dont_call_vkGetSemaphoreZirconHandleFUCHSIA = 0; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0; + PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0; + PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0; + PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0; + PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + PFN_vkGetVideoSessionMemoryRequirementsKHR vkGetVideoSessionMemoryRequirementsKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkGetVideoSessionMemoryRequirementsKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + PFN_vkGetWinrtDisplayNV vkGetWinrtDisplayNV = 0; +#else + PFN_dummy placeholder_dont_call_vkGetWinrtDisplayNV = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkImportFenceWin32HandleKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkImportSemaphoreWin32HandleKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_FUCHSIA ) + PFN_vkImportSemaphoreZirconHandleFUCHSIA vkImportSemaphoreZirconHandleFUCHSIA = 0; +#else + PFN_dummy placeholder_dont_call_vkImportSemaphoreZirconHandleFUCHSIA = 0; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0; + PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0; + PFN_vkMapMemory vkMapMemory = 0; + PFN_vkMergePipelineCaches vkMergePipelineCaches = 0; + PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0; + PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0; + PFN_vkQueueBindSparse vkQueueBindSparse = 0; + PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0; + PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0; + PFN_vkQueuePresentKHR vkQueuePresentKHR = 0; + PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0; + PFN_vkQueueSubmit vkQueueSubmit = 0; + PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR = 0; + PFN_vkQueueWaitIdle vkQueueWaitIdle = 0; + PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0; + PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0; + PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0; +#else + PFN_dummy placeholder_dont_call_vkReleaseFullScreenExclusiveModeEXT = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0; + PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0; + PFN_vkResetCommandBuffer vkResetCommandBuffer = 0; + PFN_vkResetCommandPool vkResetCommandPool = 0; + PFN_vkResetDescriptorPool vkResetDescriptorPool = 0; + PFN_vkResetEvent vkResetEvent = 0; + PFN_vkResetFences vkResetFences = 0; + PFN_vkResetQueryPool vkResetQueryPool = 0; + PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0; + PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0; + PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0; + PFN_vkSetEvent vkSetEvent = 0; + PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0; + PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0; + PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT = 0; + PFN_vkSignalSemaphore vkSignalSemaphore = 0; + PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0; + PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0; + PFN_vkTrimCommandPool vkTrimCommandPool = 0; + PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0; + PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0; + PFN_vkUnmapMemory vkUnmapMemory = 0; + PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0; + PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0; + PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + PFN_vkUpdateVideoSessionParametersKHR vkUpdateVideoSessionParametersKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkUpdateVideoSessionParametersKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + PFN_vkWaitForFences vkWaitForFences = 0; + PFN_vkWaitSemaphores vkWaitSemaphores = 0; + PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0; + PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR = 0; + + public: + DispatchLoaderDynamic() VULKAN_HPP_NOEXCEPT = default; + DispatchLoaderDynamic( DispatchLoaderDynamic const & rhs ) VULKAN_HPP_NOEXCEPT = default; + +#if !defined( VK_NO_PROTOTYPES ) + // This interface is designed to be used for per-device function pointers in combination with a linked vulkan + // library. + template + void init( VULKAN_HPP_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::Device const & device, + DynamicLoader const & dl ) VULKAN_HPP_NOEXCEPT + { + PFN_vkGetInstanceProcAddr getInstanceProcAddr = + dl.template getProcAddress( "vkGetInstanceProcAddr" ); + PFN_vkGetDeviceProcAddr getDeviceProcAddr = + dl.template getProcAddress( "vkGetDeviceProcAddr" ); + init( static_cast( instance ), + getInstanceProcAddr, + static_cast( device ), + device ? getDeviceProcAddr : nullptr ); + } + + // This interface is designed to be used for per-device function pointers in combination with a linked vulkan + // library. + template + void init( VULKAN_HPP_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::Device const & device ) VULKAN_HPP_NOEXCEPT + { + static DynamicLoader dl; + init( instance, device, dl ); + } +#endif // !defined( VK_NO_PROTOTYPES ) + + DispatchLoaderDynamic( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT + { + init( getInstanceProcAddr ); + } + + void init( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getInstanceProcAddr ); + + vkGetInstanceProcAddr = getInstanceProcAddr; + vkCreateInstance = PFN_vkCreateInstance( vkGetInstanceProcAddr( NULL, "vkCreateInstance" ) ); + vkEnumerateInstanceExtensionProperties = PFN_vkEnumerateInstanceExtensionProperties( + vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) ); + vkEnumerateInstanceLayerProperties = + PFN_vkEnumerateInstanceLayerProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) ); + vkEnumerateInstanceVersion = + PFN_vkEnumerateInstanceVersion( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceVersion" ) ); + } + + // This interface does not require a linked vulkan library. + DispatchLoaderDynamic( VkInstance instance, + PFN_vkGetInstanceProcAddr getInstanceProcAddr, + VkDevice device = {}, + PFN_vkGetDeviceProcAddr getDeviceProcAddr = nullptr ) VULKAN_HPP_NOEXCEPT + { + init( instance, getInstanceProcAddr, device, getDeviceProcAddr ); + } + + // This interface does not require a linked vulkan library. + void init( VkInstance instance, + PFN_vkGetInstanceProcAddr getInstanceProcAddr, + VkDevice device = {}, + PFN_vkGetDeviceProcAddr /*getDeviceProcAddr*/ = nullptr ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( instance && getInstanceProcAddr ); + vkGetInstanceProcAddr = getInstanceProcAddr; + init( VULKAN_HPP_NAMESPACE::Instance( instance ) ); + if ( device ) + { + init( VULKAN_HPP_NAMESPACE::Device( device ) ); + } + } + + void init( VULKAN_HPP_NAMESPACE::Instance instanceCpp ) VULKAN_HPP_NOEXCEPT + { + VkInstance instance = static_cast( instanceCpp ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkAcquireWinrtDisplayNV = + PFN_vkAcquireWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkAcquireWinrtDisplayNV" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + vkAcquireXlibDisplayEXT = + PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) ); +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + vkCreateAndroidSurfaceKHR = + PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + vkCreateDebugReportCallbackEXT = + PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) ); + vkCreateDebugUtilsMessengerEXT = + PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) ); + vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) ); +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + vkCreateDirectFBSurfaceEXT = + PFN_vkCreateDirectFBSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateDirectFBSurfaceEXT" ) ); +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + vkCreateDisplayModeKHR = + PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) ); + vkCreateDisplayPlaneSurfaceKHR = + PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) ); + vkCreateHeadlessSurfaceEXT = + PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) ); +#if defined( VK_USE_PLATFORM_IOS_MVK ) + vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateIOSSurfaceMVK" ) ); +#endif /*VK_USE_PLATFORM_IOS_MVK*/ +#if defined( VK_USE_PLATFORM_FUCHSIA ) + vkCreateImagePipeSurfaceFUCHSIA = + PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + vkCreateMacOSSurfaceMVK = + PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) ); +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ +#if defined( VK_USE_PLATFORM_METAL_EXT ) + vkCreateMetalSurfaceEXT = + PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) ); +#endif /*VK_USE_PLATFORM_METAL_EXT*/ +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + vkCreateScreenSurfaceQNX = + PFN_vkCreateScreenSurfaceQNX( vkGetInstanceProcAddr( instance, "vkCreateScreenSurfaceQNX" ) ); +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ +#if defined( VK_USE_PLATFORM_GGP ) + vkCreateStreamDescriptorSurfaceGGP = PFN_vkCreateStreamDescriptorSurfaceGGP( + vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) ); +#endif /*VK_USE_PLATFORM_GGP*/ +#if defined( VK_USE_PLATFORM_VI_NN ) + vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN( vkGetInstanceProcAddr( instance, "vkCreateViSurfaceNN" ) ); +#endif /*VK_USE_PLATFORM_VI_NN*/ +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + vkCreateWaylandSurfaceKHR = + PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) ); +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkCreateWin32SurfaceKHR = + PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_XCB_KHR ) + vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXcbSurfaceKHR" ) ); +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + vkCreateXlibSurfaceKHR = + PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) ); +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + vkDebugReportMessageEXT = + PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) ); + vkDestroyDebugReportCallbackEXT = + PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) ); + vkDestroyDebugUtilsMessengerEXT = + PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) ); + vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) ); + vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) ); + vkEnumerateDeviceExtensionProperties = PFN_vkEnumerateDeviceExtensionProperties( + vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) ); + vkEnumerateDeviceLayerProperties = + PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) ); + vkEnumeratePhysicalDeviceGroups = + PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) ); + vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR( + vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) ); + if ( !vkEnumeratePhysicalDeviceGroups ) + vkEnumeratePhysicalDeviceGroups = vkEnumeratePhysicalDeviceGroupsKHR; + vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = + PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) ); + vkEnumeratePhysicalDevices = + PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) ); + vkGetDisplayModeProperties2KHR = + PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) ); + vkGetDisplayModePropertiesKHR = + PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) ); + vkGetDisplayPlaneCapabilities2KHR = + PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) ); + vkGetDisplayPlaneCapabilitiesKHR = + PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) ); + vkGetDisplayPlaneSupportedDisplaysKHR = PFN_vkGetDisplayPlaneSupportedDisplaysKHR( + vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) ); + vkGetInstanceProcAddr = PFN_vkGetInstanceProcAddr( vkGetInstanceProcAddr( instance, "vkGetInstanceProcAddr" ) ); + vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) ); + vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) ); +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + vkGetPhysicalDeviceDirectFBPresentationSupportEXT = PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT" ) ); +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + vkGetPhysicalDeviceDisplayPlaneProperties2KHR = PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) ); + vkGetPhysicalDeviceDisplayPlanePropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) ); + vkGetPhysicalDeviceDisplayProperties2KHR = PFN_vkGetPhysicalDeviceDisplayProperties2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) ); + vkGetPhysicalDeviceDisplayPropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) ); + vkGetPhysicalDeviceExternalBufferProperties = PFN_vkGetPhysicalDeviceExternalBufferProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) ); + vkGetPhysicalDeviceExternalBufferPropertiesKHR = PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalBufferProperties ) + vkGetPhysicalDeviceExternalBufferProperties = vkGetPhysicalDeviceExternalBufferPropertiesKHR; + vkGetPhysicalDeviceExternalFenceProperties = PFN_vkGetPhysicalDeviceExternalFenceProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) ); + vkGetPhysicalDeviceExternalFencePropertiesKHR = PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalFenceProperties ) + vkGetPhysicalDeviceExternalFenceProperties = vkGetPhysicalDeviceExternalFencePropertiesKHR; + vkGetPhysicalDeviceExternalImageFormatPropertiesNV = PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) ); + vkGetPhysicalDeviceExternalSemaphoreProperties = PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) ); + vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalSemaphoreProperties ) + vkGetPhysicalDeviceExternalSemaphoreProperties = vkGetPhysicalDeviceExternalSemaphorePropertiesKHR; + vkGetPhysicalDeviceFeatures = + PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) ); + vkGetPhysicalDeviceFeatures2 = + PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) ); + vkGetPhysicalDeviceFeatures2KHR = + PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) ); + if ( !vkGetPhysicalDeviceFeatures2 ) + vkGetPhysicalDeviceFeatures2 = vkGetPhysicalDeviceFeatures2KHR; + vkGetPhysicalDeviceFormatProperties = PFN_vkGetPhysicalDeviceFormatProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) ); + vkGetPhysicalDeviceFormatProperties2 = PFN_vkGetPhysicalDeviceFormatProperties2( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) ); + vkGetPhysicalDeviceFormatProperties2KHR = PFN_vkGetPhysicalDeviceFormatProperties2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceFormatProperties2 ) + vkGetPhysicalDeviceFormatProperties2 = vkGetPhysicalDeviceFormatProperties2KHR; + vkGetPhysicalDeviceFragmentShadingRatesKHR = PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR" ) ); + vkGetPhysicalDeviceImageFormatProperties = PFN_vkGetPhysicalDeviceImageFormatProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) ); + vkGetPhysicalDeviceImageFormatProperties2 = PFN_vkGetPhysicalDeviceImageFormatProperties2( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) ); + vkGetPhysicalDeviceImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceImageFormatProperties2 ) + vkGetPhysicalDeviceImageFormatProperties2 = vkGetPhysicalDeviceImageFormatProperties2KHR; + vkGetPhysicalDeviceMemoryProperties = PFN_vkGetPhysicalDeviceMemoryProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) ); + vkGetPhysicalDeviceMemoryProperties2 = PFN_vkGetPhysicalDeviceMemoryProperties2( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) ); + vkGetPhysicalDeviceMemoryProperties2KHR = PFN_vkGetPhysicalDeviceMemoryProperties2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceMemoryProperties2 ) + vkGetPhysicalDeviceMemoryProperties2 = vkGetPhysicalDeviceMemoryProperties2KHR; + vkGetPhysicalDeviceMultisamplePropertiesEXT = PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) ); + vkGetPhysicalDevicePresentRectanglesKHR = PFN_vkGetPhysicalDevicePresentRectanglesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) ); + vkGetPhysicalDeviceProperties = + PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) ); + vkGetPhysicalDeviceProperties2 = + PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) ); + vkGetPhysicalDeviceProperties2KHR = + PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceProperties2 ) + vkGetPhysicalDeviceProperties2 = vkGetPhysicalDeviceProperties2KHR; + vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = + PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) ); + vkGetPhysicalDeviceQueueFamilyProperties = PFN_vkGetPhysicalDeviceQueueFamilyProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) ); + vkGetPhysicalDeviceQueueFamilyProperties2 = PFN_vkGetPhysicalDeviceQueueFamilyProperties2( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) ); + vkGetPhysicalDeviceQueueFamilyProperties2KHR = PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceQueueFamilyProperties2 ) + vkGetPhysicalDeviceQueueFamilyProperties2 = vkGetPhysicalDeviceQueueFamilyProperties2KHR; +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + vkGetPhysicalDeviceScreenPresentationSupportQNX = PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceScreenPresentationSupportQNX" ) ); +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + vkGetPhysicalDeviceSparseImageFormatProperties = PFN_vkGetPhysicalDeviceSparseImageFormatProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) ); + vkGetPhysicalDeviceSparseImageFormatProperties2 = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) ); + vkGetPhysicalDeviceSparseImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceSparseImageFormatProperties2 ) + vkGetPhysicalDeviceSparseImageFormatProperties2 = vkGetPhysicalDeviceSparseImageFormatProperties2KHR; + vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = + PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) ); + vkGetPhysicalDeviceSurfaceCapabilities2EXT = PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) ); + vkGetPhysicalDeviceSurfaceCapabilities2KHR = PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) ); + vkGetPhysicalDeviceSurfaceCapabilitiesKHR = PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) ); + vkGetPhysicalDeviceSurfaceFormats2KHR = PFN_vkGetPhysicalDeviceSurfaceFormats2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) ); + vkGetPhysicalDeviceSurfaceFormatsKHR = PFN_vkGetPhysicalDeviceSurfaceFormatsKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkGetPhysicalDeviceSurfacePresentModes2EXT = PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkGetPhysicalDeviceSurfacePresentModesKHR = PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) ); + vkGetPhysicalDeviceSurfaceSupportKHR = PFN_vkGetPhysicalDeviceSurfaceSupportKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) ); + vkGetPhysicalDeviceToolPropertiesEXT = PFN_vkGetPhysicalDeviceToolPropertiesEXT( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkGetPhysicalDeviceVideoCapabilitiesKHR = PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoCapabilitiesKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkGetPhysicalDeviceVideoFormatPropertiesKHR = PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoFormatPropertiesKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + vkGetPhysicalDeviceWaylandPresentationSupportKHR = PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) ); +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkGetPhysicalDeviceWin32PresentationSupportKHR = PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_XCB_KHR ) + vkGetPhysicalDeviceXcbPresentationSupportKHR = PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) ); +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + vkGetPhysicalDeviceXlibPresentationSupportKHR = PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) ); +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + vkGetRandROutputDisplayEXT = + PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) ); +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkGetWinrtDisplayNV = PFN_vkGetWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkGetWinrtDisplayNV" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) ); + vkSubmitDebugUtilsMessageEXT = + PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( + vkGetInstanceProcAddr( instance, "vkAcquireFullScreenExclusiveModeEXT" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkAcquireNextImage2KHR = + PFN_vkAcquireNextImage2KHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImage2KHR" ) ); + vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImageKHR" ) ); + vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL( + vkGetInstanceProcAddr( instance, "vkAcquirePerformanceConfigurationINTEL" ) ); + vkAcquireProfilingLockKHR = + PFN_vkAcquireProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkAcquireProfilingLockKHR" ) ); + vkAllocateCommandBuffers = + PFN_vkAllocateCommandBuffers( vkGetInstanceProcAddr( instance, "vkAllocateCommandBuffers" ) ); + vkAllocateDescriptorSets = + PFN_vkAllocateDescriptorSets( vkGetInstanceProcAddr( instance, "vkAllocateDescriptorSets" ) ); + vkAllocateMemory = PFN_vkAllocateMemory( vkGetInstanceProcAddr( instance, "vkAllocateMemory" ) ); + vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetInstanceProcAddr( instance, "vkBeginCommandBuffer" ) ); + vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( + vkGetInstanceProcAddr( instance, "vkBindAccelerationStructureMemoryNV" ) ); + vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetInstanceProcAddr( instance, "vkBindBufferMemory" ) ); + vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2" ) ); + vkBindBufferMemory2KHR = + PFN_vkBindBufferMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2KHR" ) ); + if ( !vkBindBufferMemory2 ) + vkBindBufferMemory2 = vkBindBufferMemory2KHR; + vkBindImageMemory = PFN_vkBindImageMemory( vkGetInstanceProcAddr( instance, "vkBindImageMemory" ) ); + vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetInstanceProcAddr( instance, "vkBindImageMemory2" ) ); + vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindImageMemory2KHR" ) ); + if ( !vkBindImageMemory2 ) + vkBindImageMemory2 = vkBindImageMemory2KHR; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkBindVideoSessionMemoryKHR = + PFN_vkBindVideoSessionMemoryKHR( vkGetInstanceProcAddr( instance, "vkBindVideoSessionMemoryKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkBuildAccelerationStructuresKHR = + PFN_vkBuildAccelerationStructuresKHR( vkGetInstanceProcAddr( instance, "vkBuildAccelerationStructuresKHR" ) ); + vkCmdBeginConditionalRenderingEXT = + PFN_vkCmdBeginConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginConditionalRenderingEXT" ) ); + vkCmdBeginDebugUtilsLabelEXT = + PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginDebugUtilsLabelEXT" ) ); + vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetInstanceProcAddr( instance, "vkCmdBeginQuery" ) ); + vkCmdBeginQueryIndexedEXT = + PFN_vkCmdBeginQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginQueryIndexedEXT" ) ); + vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass" ) ); + vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2" ) ); + vkCmdBeginRenderPass2KHR = + PFN_vkCmdBeginRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2KHR" ) ); + if ( !vkCmdBeginRenderPass2 ) + vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR; + vkCmdBeginTransformFeedbackEXT = + PFN_vkCmdBeginTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginTransformFeedbackEXT" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkCmdBeginVideoCodingKHR = + PFN_vkCmdBeginVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdBeginVideoCodingKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkCmdBindDescriptorSets = + PFN_vkCmdBindDescriptorSets( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets" ) ); + vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer" ) ); + vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetInstanceProcAddr( instance, "vkCmdBindPipeline" ) ); + vkCmdBindPipelineShaderGroupNV = + PFN_vkCmdBindPipelineShaderGroupNV( vkGetInstanceProcAddr( instance, "vkCmdBindPipelineShaderGroupNV" ) ); + vkCmdBindShadingRateImageNV = + PFN_vkCmdBindShadingRateImageNV( vkGetInstanceProcAddr( instance, "vkCmdBindShadingRateImageNV" ) ); + vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( + vkGetInstanceProcAddr( instance, "vkCmdBindTransformFeedbackBuffersEXT" ) ); + vkCmdBindVertexBuffers = + PFN_vkCmdBindVertexBuffers( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers" ) ); + vkCmdBindVertexBuffers2EXT = + PFN_vkCmdBindVertexBuffers2EXT( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers2EXT" ) ); + vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetInstanceProcAddr( instance, "vkCmdBlitImage" ) ); + vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdBlitImage2KHR" ) ); + vkCmdBuildAccelerationStructureNV = + PFN_vkCmdBuildAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureNV" ) ); + vkCmdBuildAccelerationStructuresIndirectKHR = PFN_vkCmdBuildAccelerationStructuresIndirectKHR( + vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructuresIndirectKHR" ) ); + vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( + vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructuresKHR" ) ); + vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetInstanceProcAddr( instance, "vkCmdClearAttachments" ) ); + vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetInstanceProcAddr( instance, "vkCmdClearColorImage" ) ); + vkCmdClearDepthStencilImage = + PFN_vkCmdClearDepthStencilImage( vkGetInstanceProcAddr( instance, "vkCmdClearDepthStencilImage" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkCmdControlVideoCodingKHR = + PFN_vkCmdControlVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdControlVideoCodingKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkCmdCopyAccelerationStructureKHR = + PFN_vkCmdCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureKHR" ) ); + vkCmdCopyAccelerationStructureNV = + PFN_vkCmdCopyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureNV" ) ); + vkCmdCopyAccelerationStructureToMemoryKHR = PFN_vkCmdCopyAccelerationStructureToMemoryKHR( + vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); + vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer" ) ); + vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer2KHR" ) ); + vkCmdCopyBufferToImage = + PFN_vkCmdCopyBufferToImage( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage" ) ); + vkCmdCopyBufferToImage2KHR = + PFN_vkCmdCopyBufferToImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage2KHR" ) ); + vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetInstanceProcAddr( instance, "vkCmdCopyImage" ) ); + vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImage2KHR" ) ); + vkCmdCopyImageToBuffer = + PFN_vkCmdCopyImageToBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer" ) ); + vkCmdCopyImageToBuffer2KHR = + PFN_vkCmdCopyImageToBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer2KHR" ) ); + vkCmdCopyMemoryToAccelerationStructureKHR = PFN_vkCmdCopyMemoryToAccelerationStructureKHR( + vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); + vkCmdCopyQueryPoolResults = + PFN_vkCmdCopyQueryPoolResults( vkGetInstanceProcAddr( instance, "vkCmdCopyQueryPoolResults" ) ); + vkCmdCuLaunchKernelNVX = + PFN_vkCmdCuLaunchKernelNVX( vkGetInstanceProcAddr( instance, "vkCmdCuLaunchKernelNVX" ) ); + vkCmdDebugMarkerBeginEXT = + PFN_vkCmdDebugMarkerBeginEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerBeginEXT" ) ); + vkCmdDebugMarkerEndEXT = + PFN_vkCmdDebugMarkerEndEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerEndEXT" ) ); + vkCmdDebugMarkerInsertEXT = + PFN_vkCmdDebugMarkerInsertEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerInsertEXT" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetInstanceProcAddr( instance, "vkCmdDecodeVideoKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkCmdDispatch = PFN_vkCmdDispatch( vkGetInstanceProcAddr( instance, "vkCmdDispatch" ) ); + vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetInstanceProcAddr( instance, "vkCmdDispatchBase" ) ); + vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetInstanceProcAddr( instance, "vkCmdDispatchBaseKHR" ) ); + if ( !vkCmdDispatchBase ) + vkCmdDispatchBase = vkCmdDispatchBaseKHR; + vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetInstanceProcAddr( instance, "vkCmdDispatchIndirect" ) ); + vkCmdDraw = PFN_vkCmdDraw( vkGetInstanceProcAddr( instance, "vkCmdDraw" ) ); + vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexed" ) ); + vkCmdDrawIndexedIndirect = + PFN_vkCmdDrawIndexedIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirect" ) ); + vkCmdDrawIndexedIndirectCount = + PFN_vkCmdDrawIndexedIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCount" ) ); + vkCmdDrawIndexedIndirectCountAMD = + PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountAMD" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD; + vkCmdDrawIndexedIndirectCountKHR = + PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountKHR" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; + vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirect" ) ); + vkCmdDrawIndirectByteCountEXT = + PFN_vkCmdDrawIndirectByteCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectByteCountEXT" ) ); + vkCmdDrawIndirectCount = + PFN_vkCmdDrawIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCount" ) ); + vkCmdDrawIndirectCountAMD = + PFN_vkCmdDrawIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountAMD" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD; + vkCmdDrawIndirectCountKHR = + PFN_vkCmdDrawIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountKHR" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; + vkCmdDrawMeshTasksIndirectCountNV = + PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountNV" ) ); + vkCmdDrawMeshTasksIndirectNV = + PFN_vkCmdDrawMeshTasksIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectNV" ) ); + vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksNV" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetInstanceProcAddr( instance, "vkCmdEncodeVideoKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkCmdEndConditionalRenderingEXT = + PFN_vkCmdEndConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdEndConditionalRenderingEXT" ) ); + vkCmdEndDebugUtilsLabelEXT = + PFN_vkCmdEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdEndDebugUtilsLabelEXT" ) ); + vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetInstanceProcAddr( instance, "vkCmdEndQuery" ) ); + vkCmdEndQueryIndexedEXT = + PFN_vkCmdEndQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdEndQueryIndexedEXT" ) ); + vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass" ) ); + vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2" ) ); + vkCmdEndRenderPass2KHR = + PFN_vkCmdEndRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2KHR" ) ); + if ( !vkCmdEndRenderPass2 ) + vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR; + vkCmdEndTransformFeedbackEXT = + PFN_vkCmdEndTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdEndTransformFeedbackEXT" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkCmdEndVideoCodingKHR = + PFN_vkCmdEndVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdEndVideoCodingKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetInstanceProcAddr( instance, "vkCmdExecuteCommands" ) ); + vkCmdExecuteGeneratedCommandsNV = + PFN_vkCmdExecuteGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdExecuteGeneratedCommandsNV" ) ); + vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetInstanceProcAddr( instance, "vkCmdFillBuffer" ) ); + vkCmdInsertDebugUtilsLabelEXT = + PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdInsertDebugUtilsLabelEXT" ) ); + vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass" ) ); + vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2" ) ); + vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2KHR" ) ); + if ( !vkCmdNextSubpass2 ) + vkCmdNextSubpass2 = vkCmdNextSubpass2KHR; + vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier" ) ); + vkCmdPipelineBarrier2KHR = + PFN_vkCmdPipelineBarrier2KHR( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier2KHR" ) ); + vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( + vkGetInstanceProcAddr( instance, "vkCmdPreprocessGeneratedCommandsNV" ) ); + vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetInstanceProcAddr( instance, "vkCmdPushConstants" ) ); + vkCmdPushDescriptorSetKHR = + PFN_vkCmdPushDescriptorSetKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetKHR" ) ); + vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( + vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetInstanceProcAddr( instance, "vkCmdResetEvent" ) ); + vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetInstanceProcAddr( instance, "vkCmdResetEvent2KHR" ) ); + vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetInstanceProcAddr( instance, "vkCmdResetQueryPool" ) ); + vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetInstanceProcAddr( instance, "vkCmdResolveImage" ) ); + vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdResolveImage2KHR" ) ); + vkCmdSetBlendConstants = + PFN_vkCmdSetBlendConstants( vkGetInstanceProcAddr( instance, "vkCmdSetBlendConstants" ) ); + vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetInstanceProcAddr( instance, "vkCmdSetCheckpointNV" ) ); + vkCmdSetCoarseSampleOrderNV = + PFN_vkCmdSetCoarseSampleOrderNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoarseSampleOrderNV" ) ); + vkCmdSetColorWriteEnableEXT = + PFN_vkCmdSetColorWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorWriteEnableEXT" ) ); + vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetCullModeEXT" ) ); + vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias" ) ); + vkCmdSetDepthBiasEnableEXT = + PFN_vkCmdSetDepthBiasEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBiasEnableEXT" ) ); + vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBounds" ) ); + vkCmdSetDepthBoundsTestEnableEXT = + PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBoundsTestEnableEXT" ) ); + vkCmdSetDepthCompareOpEXT = + PFN_vkCmdSetDepthCompareOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthCompareOpEXT" ) ); + vkCmdSetDepthTestEnableEXT = + PFN_vkCmdSetDepthTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthTestEnableEXT" ) ); + vkCmdSetDepthWriteEnableEXT = + PFN_vkCmdSetDepthWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthWriteEnableEXT" ) ); + vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMask" ) ); + vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMaskKHR" ) ); + if ( !vkCmdSetDeviceMask ) + vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR; + vkCmdSetDiscardRectangleEXT = + PFN_vkCmdSetDiscardRectangleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEXT" ) ); + vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetInstanceProcAddr( instance, "vkCmdSetEvent" ) ); + vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetInstanceProcAddr( instance, "vkCmdSetEvent2KHR" ) ); + vkCmdSetExclusiveScissorNV = + PFN_vkCmdSetExclusiveScissorNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorNV" ) ); + vkCmdSetFragmentShadingRateEnumNV = + PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateEnumNV" ) ); + vkCmdSetFragmentShadingRateKHR = + PFN_vkCmdSetFragmentShadingRateKHR( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateKHR" ) ); + vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetInstanceProcAddr( instance, "vkCmdSetFrontFaceEXT" ) ); + vkCmdSetLineStippleEXT = + PFN_vkCmdSetLineStippleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEXT" ) ); + vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetInstanceProcAddr( instance, "vkCmdSetLineWidth" ) ); + vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLogicOpEXT" ) ); + vkCmdSetPatchControlPointsEXT = + PFN_vkCmdSetPatchControlPointsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPatchControlPointsEXT" ) ); + vkCmdSetPerformanceMarkerINTEL = + PFN_vkCmdSetPerformanceMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceMarkerINTEL" ) ); + vkCmdSetPerformanceOverrideINTEL = + PFN_vkCmdSetPerformanceOverrideINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceOverrideINTEL" ) ); + vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( + vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); + vkCmdSetPrimitiveRestartEnableEXT = + PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveRestartEnableEXT" ) ); + vkCmdSetPrimitiveTopologyEXT = + PFN_vkCmdSetPrimitiveTopologyEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveTopologyEXT" ) ); + vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( + vkGetInstanceProcAddr( instance, "vkCmdSetRasterizerDiscardEnableEXT" ) ); + vkCmdSetRayTracingPipelineStackSizeKHR = PFN_vkCmdSetRayTracingPipelineStackSizeKHR( + vkGetInstanceProcAddr( instance, "vkCmdSetRayTracingPipelineStackSizeKHR" ) ); + vkCmdSetSampleLocationsEXT = + PFN_vkCmdSetSampleLocationsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEXT" ) ); + vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetInstanceProcAddr( instance, "vkCmdSetScissor" ) ); + vkCmdSetScissorWithCountEXT = + PFN_vkCmdSetScissorWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetScissorWithCountEXT" ) ); + vkCmdSetStencilCompareMask = + PFN_vkCmdSetStencilCompareMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilCompareMask" ) ); + vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilOpEXT" ) ); + vkCmdSetStencilReference = + PFN_vkCmdSetStencilReference( vkGetInstanceProcAddr( instance, "vkCmdSetStencilReference" ) ); + vkCmdSetStencilTestEnableEXT = + PFN_vkCmdSetStencilTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilTestEnableEXT" ) ); + vkCmdSetStencilWriteMask = + PFN_vkCmdSetStencilWriteMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilWriteMask" ) ); + vkCmdSetVertexInputEXT = + PFN_vkCmdSetVertexInputEXT( vkGetInstanceProcAddr( instance, "vkCmdSetVertexInputEXT" ) ); + vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetInstanceProcAddr( instance, "vkCmdSetViewport" ) ); + vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( + vkGetInstanceProcAddr( instance, "vkCmdSetViewportShadingRatePaletteNV" ) ); + vkCmdSetViewportWScalingNV = + PFN_vkCmdSetViewportWScalingNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingNV" ) ); + vkCmdSetViewportWithCountEXT = + PFN_vkCmdSetViewportWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWithCountEXT" ) ); + vkCmdTraceRaysIndirectKHR = + PFN_vkCmdTraceRaysIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirectKHR" ) ); + vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysKHR" ) ); + vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysNV" ) ); + vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetInstanceProcAddr( instance, "vkCmdUpdateBuffer" ) ); + vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents" ) ); + vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents2KHR" ) ); + vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( + vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); + vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( + vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); + vkCmdWriteBufferMarker2AMD = + PFN_vkCmdWriteBufferMarker2AMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarker2AMD" ) ); + vkCmdWriteBufferMarkerAMD = + PFN_vkCmdWriteBufferMarkerAMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarkerAMD" ) ); + vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp" ) ); + vkCmdWriteTimestamp2KHR = + PFN_vkCmdWriteTimestamp2KHR( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp2KHR" ) ); + vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetInstanceProcAddr( instance, "vkCompileDeferredNV" ) ); + vkCopyAccelerationStructureKHR = + PFN_vkCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureKHR" ) ); + vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR( + vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureToMemoryKHR" ) ); + vkCopyMemoryToAccelerationStructureKHR = PFN_vkCopyMemoryToAccelerationStructureKHR( + vkGetInstanceProcAddr( instance, "vkCopyMemoryToAccelerationStructureKHR" ) ); + vkCreateAccelerationStructureKHR = + PFN_vkCreateAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureKHR" ) ); + vkCreateAccelerationStructureNV = + PFN_vkCreateAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureNV" ) ); + vkCreateBuffer = PFN_vkCreateBuffer( vkGetInstanceProcAddr( instance, "vkCreateBuffer" ) ); + vkCreateBufferView = PFN_vkCreateBufferView( vkGetInstanceProcAddr( instance, "vkCreateBufferView" ) ); + vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetInstanceProcAddr( instance, "vkCreateCommandPool" ) ); + vkCreateComputePipelines = + PFN_vkCreateComputePipelines( vkGetInstanceProcAddr( instance, "vkCreateComputePipelines" ) ); + vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetInstanceProcAddr( instance, "vkCreateCuFunctionNVX" ) ); + vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetInstanceProcAddr( instance, "vkCreateCuModuleNVX" ) ); + vkCreateDeferredOperationKHR = + PFN_vkCreateDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkCreateDeferredOperationKHR" ) ); + vkCreateDescriptorPool = + PFN_vkCreateDescriptorPool( vkGetInstanceProcAddr( instance, "vkCreateDescriptorPool" ) ); + vkCreateDescriptorSetLayout = + PFN_vkCreateDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkCreateDescriptorSetLayout" ) ); + vkCreateDescriptorUpdateTemplate = + PFN_vkCreateDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplate" ) ); + vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( + vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplateKHR" ) ); + if ( !vkCreateDescriptorUpdateTemplate ) + vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR; + vkCreateEvent = PFN_vkCreateEvent( vkGetInstanceProcAddr( instance, "vkCreateEvent" ) ); + vkCreateFence = PFN_vkCreateFence( vkGetInstanceProcAddr( instance, "vkCreateFence" ) ); + vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetInstanceProcAddr( instance, "vkCreateFramebuffer" ) ); + vkCreateGraphicsPipelines = + PFN_vkCreateGraphicsPipelines( vkGetInstanceProcAddr( instance, "vkCreateGraphicsPipelines" ) ); + vkCreateImage = PFN_vkCreateImage( vkGetInstanceProcAddr( instance, "vkCreateImage" ) ); + vkCreateImageView = PFN_vkCreateImageView( vkGetInstanceProcAddr( instance, "vkCreateImageView" ) ); + vkCreateIndirectCommandsLayoutNV = + PFN_vkCreateIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkCreateIndirectCommandsLayoutNV" ) ); + vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetInstanceProcAddr( instance, "vkCreatePipelineCache" ) ); + vkCreatePipelineLayout = + PFN_vkCreatePipelineLayout( vkGetInstanceProcAddr( instance, "vkCreatePipelineLayout" ) ); + vkCreatePrivateDataSlotEXT = + PFN_vkCreatePrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlotEXT" ) ); + vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetInstanceProcAddr( instance, "vkCreateQueryPool" ) ); + vkCreateRayTracingPipelinesKHR = + PFN_vkCreateRayTracingPipelinesKHR( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesKHR" ) ); + vkCreateRayTracingPipelinesNV = + PFN_vkCreateRayTracingPipelinesNV( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesNV" ) ); + vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetInstanceProcAddr( instance, "vkCreateRenderPass" ) ); + vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2" ) ); + vkCreateRenderPass2KHR = + PFN_vkCreateRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2KHR" ) ); + if ( !vkCreateRenderPass2 ) + vkCreateRenderPass2 = vkCreateRenderPass2KHR; + vkCreateSampler = PFN_vkCreateSampler( vkGetInstanceProcAddr( instance, "vkCreateSampler" ) ); + vkCreateSamplerYcbcrConversion = + PFN_vkCreateSamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversion" ) ); + vkCreateSamplerYcbcrConversionKHR = + PFN_vkCreateSamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversionKHR" ) ); + if ( !vkCreateSamplerYcbcrConversion ) + vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; + vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetInstanceProcAddr( instance, "vkCreateSemaphore" ) ); + vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetInstanceProcAddr( instance, "vkCreateShaderModule" ) ); + vkCreateSharedSwapchainsKHR = + PFN_vkCreateSharedSwapchainsKHR( vkGetInstanceProcAddr( instance, "vkCreateSharedSwapchainsKHR" ) ); + vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetInstanceProcAddr( instance, "vkCreateSwapchainKHR" ) ); + vkCreateValidationCacheEXT = + PFN_vkCreateValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkCreateValidationCacheEXT" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkCreateVideoSessionKHR = + PFN_vkCreateVideoSessionKHR( vkGetInstanceProcAddr( instance, "vkCreateVideoSessionKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkCreateVideoSessionParametersKHR = + PFN_vkCreateVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkCreateVideoSessionParametersKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkDebugMarkerSetObjectNameEXT = + PFN_vkDebugMarkerSetObjectNameEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectNameEXT" ) ); + vkDebugMarkerSetObjectTagEXT = + PFN_vkDebugMarkerSetObjectTagEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectTagEXT" ) ); + vkDeferredOperationJoinKHR = + PFN_vkDeferredOperationJoinKHR( vkGetInstanceProcAddr( instance, "vkDeferredOperationJoinKHR" ) ); + vkDestroyAccelerationStructureKHR = + PFN_vkDestroyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureKHR" ) ); + vkDestroyAccelerationStructureNV = + PFN_vkDestroyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureNV" ) ); + vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetInstanceProcAddr( instance, "vkDestroyBuffer" ) ); + vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetInstanceProcAddr( instance, "vkDestroyBufferView" ) ); + vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetInstanceProcAddr( instance, "vkDestroyCommandPool" ) ); + vkDestroyCuFunctionNVX = + PFN_vkDestroyCuFunctionNVX( vkGetInstanceProcAddr( instance, "vkDestroyCuFunctionNVX" ) ); + vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetInstanceProcAddr( instance, "vkDestroyCuModuleNVX" ) ); + vkDestroyDeferredOperationKHR = + PFN_vkDestroyDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkDestroyDeferredOperationKHR" ) ); + vkDestroyDescriptorPool = + PFN_vkDestroyDescriptorPool( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorPool" ) ); + vkDestroyDescriptorSetLayout = + PFN_vkDestroyDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorSetLayout" ) ); + vkDestroyDescriptorUpdateTemplate = + PFN_vkDestroyDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplate" ) ); + vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR( + vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplateKHR" ) ); + if ( !vkDestroyDescriptorUpdateTemplate ) + vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR; + vkDestroyDevice = PFN_vkDestroyDevice( vkGetInstanceProcAddr( instance, "vkDestroyDevice" ) ); + vkDestroyEvent = PFN_vkDestroyEvent( vkGetInstanceProcAddr( instance, "vkDestroyEvent" ) ); + vkDestroyFence = PFN_vkDestroyFence( vkGetInstanceProcAddr( instance, "vkDestroyFence" ) ); + vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetInstanceProcAddr( instance, "vkDestroyFramebuffer" ) ); + vkDestroyImage = PFN_vkDestroyImage( vkGetInstanceProcAddr( instance, "vkDestroyImage" ) ); + vkDestroyImageView = PFN_vkDestroyImageView( vkGetInstanceProcAddr( instance, "vkDestroyImageView" ) ); + vkDestroyIndirectCommandsLayoutNV = + PFN_vkDestroyIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkDestroyIndirectCommandsLayoutNV" ) ); + vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetInstanceProcAddr( instance, "vkDestroyPipeline" ) ); + vkDestroyPipelineCache = + PFN_vkDestroyPipelineCache( vkGetInstanceProcAddr( instance, "vkDestroyPipelineCache" ) ); + vkDestroyPipelineLayout = + PFN_vkDestroyPipelineLayout( vkGetInstanceProcAddr( instance, "vkDestroyPipelineLayout" ) ); + vkDestroyPrivateDataSlotEXT = + PFN_vkDestroyPrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkDestroyPrivateDataSlotEXT" ) ); + vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetInstanceProcAddr( instance, "vkDestroyQueryPool" ) ); + vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetInstanceProcAddr( instance, "vkDestroyRenderPass" ) ); + vkDestroySampler = PFN_vkDestroySampler( vkGetInstanceProcAddr( instance, "vkDestroySampler" ) ); + vkDestroySamplerYcbcrConversion = + PFN_vkDestroySamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversion" ) ); + vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( + vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversionKHR" ) ); + if ( !vkDestroySamplerYcbcrConversion ) + vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; + vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetInstanceProcAddr( instance, "vkDestroySemaphore" ) ); + vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetInstanceProcAddr( instance, "vkDestroyShaderModule" ) ); + vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetInstanceProcAddr( instance, "vkDestroySwapchainKHR" ) ); + vkDestroyValidationCacheEXT = + PFN_vkDestroyValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkDestroyValidationCacheEXT" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkDestroyVideoSessionKHR = + PFN_vkDestroyVideoSessionKHR( vkGetInstanceProcAddr( instance, "vkDestroyVideoSessionKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( + vkGetInstanceProcAddr( instance, "vkDestroyVideoSessionParametersKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetInstanceProcAddr( instance, "vkDeviceWaitIdle" ) ); + vkDisplayPowerControlEXT = + PFN_vkDisplayPowerControlEXT( vkGetInstanceProcAddr( instance, "vkDisplayPowerControlEXT" ) ); + vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetInstanceProcAddr( instance, "vkEndCommandBuffer" ) ); + vkFlushMappedMemoryRanges = + PFN_vkFlushMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkFlushMappedMemoryRanges" ) ); + vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetInstanceProcAddr( instance, "vkFreeCommandBuffers" ) ); + vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetInstanceProcAddr( instance, "vkFreeDescriptorSets" ) ); + vkFreeMemory = PFN_vkFreeMemory( vkGetInstanceProcAddr( instance, "vkFreeMemory" ) ); + vkGetAccelerationStructureBuildSizesKHR = PFN_vkGetAccelerationStructureBuildSizesKHR( + vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureBuildSizesKHR" ) ); + vkGetAccelerationStructureDeviceAddressKHR = PFN_vkGetAccelerationStructureDeviceAddressKHR( + vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureDeviceAddressKHR" ) ); + vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( + vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureHandleNV" ) ); + vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV( + vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID( + vkGetInstanceProcAddr( instance, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + vkGetBufferDeviceAddress = + PFN_vkGetBufferDeviceAddress( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddress" ) ); + vkGetBufferDeviceAddressEXT = + PFN_vkGetBufferDeviceAddressEXT( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressEXT" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; + vkGetBufferDeviceAddressKHR = + PFN_vkGetBufferDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressKHR" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR; + vkGetBufferMemoryRequirements = + PFN_vkGetBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements" ) ); + vkGetBufferMemoryRequirements2 = + PFN_vkGetBufferMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2" ) ); + vkGetBufferMemoryRequirements2KHR = + PFN_vkGetBufferMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2KHR" ) ); + if ( !vkGetBufferMemoryRequirements2 ) + vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR; + vkGetBufferOpaqueCaptureAddress = + PFN_vkGetBufferOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddress" ) ); + vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( + vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddressKHR" ) ); + if ( !vkGetBufferOpaqueCaptureAddress ) + vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR; + vkGetCalibratedTimestampsEXT = + PFN_vkGetCalibratedTimestampsEXT( vkGetInstanceProcAddr( instance, "vkGetCalibratedTimestampsEXT" ) ); + vkGetDeferredOperationMaxConcurrencyKHR = PFN_vkGetDeferredOperationMaxConcurrencyKHR( + vkGetInstanceProcAddr( instance, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); + vkGetDeferredOperationResultKHR = + PFN_vkGetDeferredOperationResultKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationResultKHR" ) ); + vkGetDescriptorSetLayoutSupport = + PFN_vkGetDescriptorSetLayoutSupport( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupport" ) ); + vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( + vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupportKHR" ) ); + if ( !vkGetDescriptorSetLayoutSupport ) + vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR; + vkGetDeviceAccelerationStructureCompatibilityKHR = PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( + vkGetInstanceProcAddr( instance, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); + vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( + vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeatures" ) ); + vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( + vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); + if ( !vkGetDeviceGroupPeerMemoryFeatures ) + vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR; + vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR( + vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT( + vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR( + vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); + vkGetDeviceMemoryCommitment = + PFN_vkGetDeviceMemoryCommitment( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryCommitment" ) ); + vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress( + vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); + vkGetDeviceMemoryOpaqueCaptureAddressKHR = PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( + vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); + if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) + vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR; + vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) ); + vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue" ) ); + vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue2" ) ); + vkGetEventStatus = PFN_vkGetEventStatus( vkGetInstanceProcAddr( instance, "vkGetEventStatus" ) ); + vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetInstanceProcAddr( instance, "vkGetFenceFdKHR" ) ); + vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetInstanceProcAddr( instance, "vkGetFenceStatus" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkGetFenceWin32HandleKHR = + PFN_vkGetFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetFenceWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkGetGeneratedCommandsMemoryRequirementsNV = PFN_vkGetGeneratedCommandsMemoryRequirementsNV( + vkGetInstanceProcAddr( instance, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); + vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT( + vkGetInstanceProcAddr( instance, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); + vkGetImageMemoryRequirements = + PFN_vkGetImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements" ) ); + vkGetImageMemoryRequirements2 = + PFN_vkGetImageMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2" ) ); + vkGetImageMemoryRequirements2KHR = + PFN_vkGetImageMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2KHR" ) ); + if ( !vkGetImageMemoryRequirements2 ) + vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR; + vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( + vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements" ) ); + vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( + vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2" ) ); + vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR( + vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2KHR" ) ); + if ( !vkGetImageSparseMemoryRequirements2 ) + vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR; + vkGetImageSubresourceLayout = + PFN_vkGetImageSubresourceLayout( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout" ) ); + vkGetImageViewAddressNVX = + PFN_vkGetImageViewAddressNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewAddressNVX" ) ); + vkGetImageViewHandleNVX = + PFN_vkGetImageViewHandleNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewHandleNVX" ) ); +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID( + vkGetInstanceProcAddr( instance, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdKHR" ) ); + vkGetMemoryFdPropertiesKHR = + PFN_vkGetMemoryFdPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdPropertiesKHR" ) ); + vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( + vkGetInstanceProcAddr( instance, "vkGetMemoryHostPointerPropertiesEXT" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkGetMemoryWin32HandleKHR = + PFN_vkGetMemoryWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkGetMemoryWin32HandleNV = + PFN_vkGetMemoryWin32HandleNV( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleNV" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandlePropertiesKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_FUCHSIA ) + vkGetMemoryZirconHandleFUCHSIA = + PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetMemoryZirconHandleFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ +#if defined( VK_USE_PLATFORM_FUCHSIA ) + vkGetMemoryZirconHandlePropertiesFUCHSIA = PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( + vkGetInstanceProcAddr( instance, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + vkGetPastPresentationTimingGOOGLE = + PFN_vkGetPastPresentationTimingGOOGLE( vkGetInstanceProcAddr( instance, "vkGetPastPresentationTimingGOOGLE" ) ); + vkGetPerformanceParameterINTEL = + PFN_vkGetPerformanceParameterINTEL( vkGetInstanceProcAddr( instance, "vkGetPerformanceParameterINTEL" ) ); + vkGetPipelineCacheData = + PFN_vkGetPipelineCacheData( vkGetInstanceProcAddr( instance, "vkGetPipelineCacheData" ) ); + vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR( + vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); + vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPipelineExecutablePropertiesKHR" ) ); + vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( + vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableStatisticsKHR" ) ); + vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkGetPrivateDataEXT" ) ); + vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetInstanceProcAddr( instance, "vkGetQueryPoolResults" ) ); + vkGetQueueCheckpointData2NV = + PFN_vkGetQueueCheckpointData2NV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointData2NV" ) ); + vkGetQueueCheckpointDataNV = + PFN_vkGetQueueCheckpointDataNV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointDataNV" ) ); + vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + vkGetInstanceProcAddr( instance, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); + vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR( + vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesKHR" ) ); + vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( + vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesNV" ) ); + if ( !vkGetRayTracingShaderGroupHandlesKHR ) + vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV; + vkGetRayTracingShaderGroupStackSizeKHR = PFN_vkGetRayTracingShaderGroupStackSizeKHR( + vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupStackSizeKHR" ) ); + vkGetRefreshCycleDurationGOOGLE = + PFN_vkGetRefreshCycleDurationGOOGLE( vkGetInstanceProcAddr( instance, "vkGetRefreshCycleDurationGOOGLE" ) ); + vkGetRenderAreaGranularity = + PFN_vkGetRenderAreaGranularity( vkGetInstanceProcAddr( instance, "vkGetRenderAreaGranularity" ) ); + vkGetSemaphoreCounterValue = + PFN_vkGetSemaphoreCounterValue( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValue" ) ); + vkGetSemaphoreCounterValueKHR = + PFN_vkGetSemaphoreCounterValueKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValueKHR" ) ); + if ( !vkGetSemaphoreCounterValue ) + vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR; + vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreFdKHR" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkGetSemaphoreWin32HandleKHR = + PFN_vkGetSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_FUCHSIA ) + vkGetSemaphoreZirconHandleFUCHSIA = + PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetSemaphoreZirconHandleFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetInstanceProcAddr( instance, "vkGetShaderInfoAMD" ) ); + vkGetSwapchainCounterEXT = + PFN_vkGetSwapchainCounterEXT( vkGetInstanceProcAddr( instance, "vkGetSwapchainCounterEXT" ) ); + vkGetSwapchainImagesKHR = + PFN_vkGetSwapchainImagesKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainImagesKHR" ) ); + vkGetSwapchainStatusKHR = + PFN_vkGetSwapchainStatusKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainStatusKHR" ) ); + vkGetValidationCacheDataEXT = + PFN_vkGetValidationCacheDataEXT( vkGetInstanceProcAddr( instance, "vkGetValidationCacheDataEXT" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkGetVideoSessionMemoryRequirementsKHR = PFN_vkGetVideoSessionMemoryRequirementsKHR( + vkGetInstanceProcAddr( instance, "vkGetVideoSessionMemoryRequirementsKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetInstanceProcAddr( instance, "vkImportFenceFdKHR" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkImportFenceWin32HandleKHR = + PFN_vkImportFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportFenceWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkImportSemaphoreFdKHR = + PFN_vkImportSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreFdKHR" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkImportSemaphoreWin32HandleKHR = + PFN_vkImportSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_FUCHSIA ) + vkImportSemaphoreZirconHandleFUCHSIA = PFN_vkImportSemaphoreZirconHandleFUCHSIA( + vkGetInstanceProcAddr( instance, "vkImportSemaphoreZirconHandleFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + vkInitializePerformanceApiINTEL = + PFN_vkInitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkInitializePerformanceApiINTEL" ) ); + vkInvalidateMappedMemoryRanges = + PFN_vkInvalidateMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkInvalidateMappedMemoryRanges" ) ); + vkMapMemory = PFN_vkMapMemory( vkGetInstanceProcAddr( instance, "vkMapMemory" ) ); + vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetInstanceProcAddr( instance, "vkMergePipelineCaches" ) ); + vkMergeValidationCachesEXT = + PFN_vkMergeValidationCachesEXT( vkGetInstanceProcAddr( instance, "vkMergeValidationCachesEXT" ) ); + vkQueueBeginDebugUtilsLabelEXT = + PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueBeginDebugUtilsLabelEXT" ) ); + vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetInstanceProcAddr( instance, "vkQueueBindSparse" ) ); + vkQueueEndDebugUtilsLabelEXT = + PFN_vkQueueEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueEndDebugUtilsLabelEXT" ) ); + vkQueueInsertDebugUtilsLabelEXT = + PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueInsertDebugUtilsLabelEXT" ) ); + vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetInstanceProcAddr( instance, "vkQueuePresentKHR" ) ); + vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( + vkGetInstanceProcAddr( instance, "vkQueueSetPerformanceConfigurationINTEL" ) ); + vkQueueSubmit = PFN_vkQueueSubmit( vkGetInstanceProcAddr( instance, "vkQueueSubmit" ) ); + vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetInstanceProcAddr( instance, "vkQueueSubmit2KHR" ) ); + vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetInstanceProcAddr( instance, "vkQueueWaitIdle" ) ); + vkRegisterDeviceEventEXT = + PFN_vkRegisterDeviceEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDeviceEventEXT" ) ); + vkRegisterDisplayEventEXT = + PFN_vkRegisterDisplayEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDisplayEventEXT" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( + vkGetInstanceProcAddr( instance, "vkReleaseFullScreenExclusiveModeEXT" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL( + vkGetInstanceProcAddr( instance, "vkReleasePerformanceConfigurationINTEL" ) ); + vkReleaseProfilingLockKHR = + PFN_vkReleaseProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkReleaseProfilingLockKHR" ) ); + vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetInstanceProcAddr( instance, "vkResetCommandBuffer" ) ); + vkResetCommandPool = PFN_vkResetCommandPool( vkGetInstanceProcAddr( instance, "vkResetCommandPool" ) ); + vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetInstanceProcAddr( instance, "vkResetDescriptorPool" ) ); + vkResetEvent = PFN_vkResetEvent( vkGetInstanceProcAddr( instance, "vkResetEvent" ) ); + vkResetFences = PFN_vkResetFences( vkGetInstanceProcAddr( instance, "vkResetFences" ) ); + vkResetQueryPool = PFN_vkResetQueryPool( vkGetInstanceProcAddr( instance, "vkResetQueryPool" ) ); + vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetInstanceProcAddr( instance, "vkResetQueryPoolEXT" ) ); + if ( !vkResetQueryPool ) + vkResetQueryPool = vkResetQueryPoolEXT; + vkSetDebugUtilsObjectNameEXT = + PFN_vkSetDebugUtilsObjectNameEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectNameEXT" ) ); + vkSetDebugUtilsObjectTagEXT = + PFN_vkSetDebugUtilsObjectTagEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectTagEXT" ) ); + vkSetEvent = PFN_vkSetEvent( vkGetInstanceProcAddr( instance, "vkSetEvent" ) ); + vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetInstanceProcAddr( instance, "vkSetHdrMetadataEXT" ) ); + vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetInstanceProcAddr( instance, "vkSetLocalDimmingAMD" ) ); + vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkSetPrivateDataEXT" ) ); + vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetInstanceProcAddr( instance, "vkSignalSemaphore" ) ); + vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetInstanceProcAddr( instance, "vkSignalSemaphoreKHR" ) ); + if ( !vkSignalSemaphore ) + vkSignalSemaphore = vkSignalSemaphoreKHR; + vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetInstanceProcAddr( instance, "vkTrimCommandPool" ) ); + vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetInstanceProcAddr( instance, "vkTrimCommandPoolKHR" ) ); + if ( !vkTrimCommandPool ) + vkTrimCommandPool = vkTrimCommandPoolKHR; + vkUninitializePerformanceApiINTEL = + PFN_vkUninitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkUninitializePerformanceApiINTEL" ) ); + vkUnmapMemory = PFN_vkUnmapMemory( vkGetInstanceProcAddr( instance, "vkUnmapMemory" ) ); + vkUpdateDescriptorSetWithTemplate = + PFN_vkUpdateDescriptorSetWithTemplate( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplate" ) ); + vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( + vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplateKHR" ) ); + if ( !vkUpdateDescriptorSetWithTemplate ) + vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR; + vkUpdateDescriptorSets = + PFN_vkUpdateDescriptorSets( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSets" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkUpdateVideoSessionParametersKHR = + PFN_vkUpdateVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkUpdateVideoSessionParametersKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkWaitForFences = PFN_vkWaitForFences( vkGetInstanceProcAddr( instance, "vkWaitForFences" ) ); + vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetInstanceProcAddr( instance, "vkWaitSemaphores" ) ); + vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetInstanceProcAddr( instance, "vkWaitSemaphoresKHR" ) ); + if ( !vkWaitSemaphores ) + vkWaitSemaphores = vkWaitSemaphoresKHR; + vkWriteAccelerationStructuresPropertiesKHR = PFN_vkWriteAccelerationStructuresPropertiesKHR( + vkGetInstanceProcAddr( instance, "vkWriteAccelerationStructuresPropertiesKHR" ) ); + } + + void init( VULKAN_HPP_NAMESPACE::Device deviceCpp ) VULKAN_HPP_NOEXCEPT + { + VkDevice device = static_cast( deviceCpp ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkAcquireFullScreenExclusiveModeEXT = + PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) ); + vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) ); + vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL( + vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) ); + vkAcquireProfilingLockKHR = + PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) ); + vkAllocateCommandBuffers = + PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) ); + vkAllocateDescriptorSets = + PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) ); + vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) ); + vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) ); + vkBindAccelerationStructureMemoryNV = + PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) ); + vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) ); + vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) ); + vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) ); + if ( !vkBindBufferMemory2 ) + vkBindBufferMemory2 = vkBindBufferMemory2KHR; + vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) ); + vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) ); + vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) ); + if ( !vkBindImageMemory2 ) + vkBindImageMemory2 = vkBindImageMemory2KHR; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkBindVideoSessionMemoryKHR = + PFN_vkBindVideoSessionMemoryKHR( vkGetDeviceProcAddr( device, "vkBindVideoSessionMemoryKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkBuildAccelerationStructuresKHR = + PFN_vkBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructuresKHR" ) ); + vkCmdBeginConditionalRenderingEXT = + PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) ); + vkCmdBeginDebugUtilsLabelEXT = + PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) ); + vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) ); + vkCmdBeginQueryIndexedEXT = + PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) ); + vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) ); + vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) ); + vkCmdBeginRenderPass2KHR = + PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) ); + if ( !vkCmdBeginRenderPass2 ) + vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR; + vkCmdBeginTransformFeedbackEXT = + PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkCmdBeginVideoCodingKHR = + PFN_vkCmdBeginVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginVideoCodingKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) ); + vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) ); + vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) ); + vkCmdBindPipelineShaderGroupNV = + PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) ); + vkCmdBindShadingRateImageNV = + PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) ); + vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( + vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) ); + vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) ); + vkCmdBindVertexBuffers2EXT = + PFN_vkCmdBindVertexBuffers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2EXT" ) ); + vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) ); + vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetDeviceProcAddr( device, "vkCmdBlitImage2KHR" ) ); + vkCmdBuildAccelerationStructureNV = + PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) ); + vkCmdBuildAccelerationStructuresIndirectKHR = PFN_vkCmdBuildAccelerationStructuresIndirectKHR( + vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresIndirectKHR" ) ); + vkCmdBuildAccelerationStructuresKHR = + PFN_vkCmdBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresKHR" ) ); + vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) ); + vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) ); + vkCmdClearDepthStencilImage = + PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkCmdControlVideoCodingKHR = + PFN_vkCmdControlVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdControlVideoCodingKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkCmdCopyAccelerationStructureKHR = + PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) ); + vkCmdCopyAccelerationStructureNV = + PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) ); + vkCmdCopyAccelerationStructureToMemoryKHR = PFN_vkCmdCopyAccelerationStructureToMemoryKHR( + vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); + vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) ); + vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2KHR" ) ); + vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) ); + vkCmdCopyBufferToImage2KHR = + PFN_vkCmdCopyBufferToImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2KHR" ) ); + vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) ); + vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImage2KHR" ) ); + vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) ); + vkCmdCopyImageToBuffer2KHR = + PFN_vkCmdCopyImageToBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2KHR" ) ); + vkCmdCopyMemoryToAccelerationStructureKHR = PFN_vkCmdCopyMemoryToAccelerationStructureKHR( + vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); + vkCmdCopyQueryPoolResults = + PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) ); + vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetDeviceProcAddr( device, "vkCmdCuLaunchKernelNVX" ) ); + vkCmdDebugMarkerBeginEXT = + PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) ); + vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) ); + vkCmdDebugMarkerInsertEXT = + PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdDecodeVideoKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) ); + vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) ); + vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) ); + if ( !vkCmdDispatchBase ) + vkCmdDispatchBase = vkCmdDispatchBaseKHR; + vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) ); + vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) ); + vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) ); + vkCmdDrawIndexedIndirect = + PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) ); + vkCmdDrawIndexedIndirectCount = + PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) ); + vkCmdDrawIndexedIndirectCountAMD = + PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD; + vkCmdDrawIndexedIndirectCountKHR = + PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; + vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) ); + vkCmdDrawIndirectByteCountEXT = + PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) ); + vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) ); + vkCmdDrawIndirectCountAMD = + PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD; + vkCmdDrawIndirectCountKHR = + PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; + vkCmdDrawMeshTasksIndirectCountNV = + PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) ); + vkCmdDrawMeshTasksIndirectNV = + PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) ); + vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdEncodeVideoKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkCmdEndConditionalRenderingEXT = + PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) ); + vkCmdEndDebugUtilsLabelEXT = + PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) ); + vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) ); + vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) ); + vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) ); + vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) ); + vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) ); + if ( !vkCmdEndRenderPass2 ) + vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR; + vkCmdEndTransformFeedbackEXT = + PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdEndVideoCodingKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) ); + vkCmdExecuteGeneratedCommandsNV = + PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) ); + vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) ); + vkCmdInsertDebugUtilsLabelEXT = + PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) ); + vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) ); + vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) ); + vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) ); + if ( !vkCmdNextSubpass2 ) + vkCmdNextSubpass2 = vkCmdNextSubpass2KHR; + vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) ); + vkCmdPipelineBarrier2KHR = + PFN_vkCmdPipelineBarrier2KHR( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2KHR" ) ); + vkCmdPreprocessGeneratedCommandsNV = + PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) ); + vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) ); + vkCmdPushDescriptorSetKHR = + PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) ); + vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( + vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) ); + vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdResetEvent2KHR" ) ); + vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) ); + vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) ); + vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetDeviceProcAddr( device, "vkCmdResolveImage2KHR" ) ); + vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) ); + vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) ); + vkCmdSetCoarseSampleOrderNV = + PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) ); + vkCmdSetColorWriteEnableEXT = + PFN_vkCmdSetColorWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteEnableEXT" ) ); + vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetCullModeEXT" ) ); + vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) ); + vkCmdSetDepthBiasEnableEXT = + PFN_vkCmdSetDepthBiasEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnableEXT" ) ); + vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) ); + vkCmdSetDepthBoundsTestEnableEXT = + PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnableEXT" ) ); + vkCmdSetDepthCompareOpEXT = + PFN_vkCmdSetDepthCompareOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOpEXT" ) ); + vkCmdSetDepthTestEnableEXT = + PFN_vkCmdSetDepthTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnableEXT" ) ); + vkCmdSetDepthWriteEnableEXT = + PFN_vkCmdSetDepthWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnableEXT" ) ); + vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) ); + vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) ); + if ( !vkCmdSetDeviceMask ) + vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR; + vkCmdSetDiscardRectangleEXT = + PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) ); + vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) ); + vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdSetEvent2KHR" ) ); + vkCmdSetExclusiveScissorNV = + PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) ); + vkCmdSetFragmentShadingRateEnumNV = + PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateEnumNV" ) ); + vkCmdSetFragmentShadingRateKHR = + PFN_vkCmdSetFragmentShadingRateKHR( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateKHR" ) ); + vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetDeviceProcAddr( device, "vkCmdSetFrontFaceEXT" ) ); + vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) ); + vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) ); + vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEXT" ) ); + vkCmdSetPatchControlPointsEXT = + PFN_vkCmdSetPatchControlPointsEXT( vkGetDeviceProcAddr( device, "vkCmdSetPatchControlPointsEXT" ) ); + vkCmdSetPerformanceMarkerINTEL = + PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) ); + vkCmdSetPerformanceOverrideINTEL = + PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) ); + vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( + vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); + vkCmdSetPrimitiveRestartEnableEXT = + PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnableEXT" ) ); + vkCmdSetPrimitiveTopologyEXT = + PFN_vkCmdSetPrimitiveTopologyEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopologyEXT" ) ); + vkCmdSetRasterizerDiscardEnableEXT = + PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnableEXT" ) ); + vkCmdSetRayTracingPipelineStackSizeKHR = PFN_vkCmdSetRayTracingPipelineStackSizeKHR( + vkGetDeviceProcAddr( device, "vkCmdSetRayTracingPipelineStackSizeKHR" ) ); + vkCmdSetSampleLocationsEXT = + PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) ); + vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) ); + vkCmdSetScissorWithCountEXT = + PFN_vkCmdSetScissorWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCountEXT" ) ); + vkCmdSetStencilCompareMask = + PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) ); + vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilOpEXT" ) ); + vkCmdSetStencilReference = + PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) ); + vkCmdSetStencilTestEnableEXT = + PFN_vkCmdSetStencilTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnableEXT" ) ); + vkCmdSetStencilWriteMask = + PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) ); + vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetDeviceProcAddr( device, "vkCmdSetVertexInputEXT" ) ); + vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) ); + vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( + vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) ); + vkCmdSetViewportWScalingNV = + PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) ); + vkCmdSetViewportWithCountEXT = + PFN_vkCmdSetViewportWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCountEXT" ) ); + vkCmdTraceRaysIndirectKHR = + PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) ); + vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) ); + vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) ); + vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) ); + vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) ); + vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2KHR" ) ); + vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( + vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); + vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( + vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); + vkCmdWriteBufferMarker2AMD = + PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) ); + vkCmdWriteBufferMarkerAMD = + PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) ); + vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) ); + vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2KHR" ) ); + vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) ); + vkCopyAccelerationStructureKHR = + PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) ); + vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR( + vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) ); + vkCopyMemoryToAccelerationStructureKHR = PFN_vkCopyMemoryToAccelerationStructureKHR( + vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) ); + vkCreateAccelerationStructureKHR = + PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) ); + vkCreateAccelerationStructureNV = + PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) ); + vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) ); + vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) ); + vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) ); + vkCreateComputePipelines = + PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) ); + vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetDeviceProcAddr( device, "vkCreateCuFunctionNVX" ) ); + vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetDeviceProcAddr( device, "vkCreateCuModuleNVX" ) ); + vkCreateDeferredOperationKHR = + PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) ); + vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) ); + vkCreateDescriptorSetLayout = + PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) ); + vkCreateDescriptorUpdateTemplate = + PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) ); + vkCreateDescriptorUpdateTemplateKHR = + PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) ); + if ( !vkCreateDescriptorUpdateTemplate ) + vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR; + vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) ); + vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) ); + vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) ); + vkCreateGraphicsPipelines = + PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) ); + vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) ); + vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) ); + vkCreateIndirectCommandsLayoutNV = + PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) ); + vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) ); + vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) ); + vkCreatePrivateDataSlotEXT = + PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) ); + vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) ); + vkCreateRayTracingPipelinesKHR = + PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) ); + vkCreateRayTracingPipelinesNV = + PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) ); + vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) ); + vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) ); + vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) ); + if ( !vkCreateRenderPass2 ) + vkCreateRenderPass2 = vkCreateRenderPass2KHR; + vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) ); + vkCreateSamplerYcbcrConversion = + PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) ); + vkCreateSamplerYcbcrConversionKHR = + PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) ); + if ( !vkCreateSamplerYcbcrConversion ) + vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; + vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) ); + vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) ); + vkCreateSharedSwapchainsKHR = + PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) ); + vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) ); + vkCreateValidationCacheEXT = + PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkCreateVideoSessionParametersKHR = + PFN_vkCreateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionParametersKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkDebugMarkerSetObjectNameEXT = + PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) ); + vkDebugMarkerSetObjectTagEXT = + PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) ); + vkDeferredOperationJoinKHR = + PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) ); + vkDestroyAccelerationStructureKHR = + PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) ); + vkDestroyAccelerationStructureNV = + PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) ); + vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) ); + vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) ); + vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) ); + vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetDeviceProcAddr( device, "vkDestroyCuFunctionNVX" ) ); + vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetDeviceProcAddr( device, "vkDestroyCuModuleNVX" ) ); + vkDestroyDeferredOperationKHR = + PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) ); + vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) ); + vkDestroyDescriptorSetLayout = + PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) ); + vkDestroyDescriptorUpdateTemplate = + PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) ); + vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR( + vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) ); + if ( !vkDestroyDescriptorUpdateTemplate ) + vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR; + vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) ); + vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) ); + vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) ); + vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) ); + vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) ); + vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) ); + vkDestroyIndirectCommandsLayoutNV = + PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) ); + vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) ); + vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) ); + vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) ); + vkDestroyPrivateDataSlotEXT = + PFN_vkDestroyPrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlotEXT" ) ); + vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) ); + vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) ); + vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) ); + vkDestroySamplerYcbcrConversion = + PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) ); + vkDestroySamplerYcbcrConversionKHR = + PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) ); + if ( !vkDestroySamplerYcbcrConversion ) + vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; + vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) ); + vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) ); + vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) ); + vkDestroyValidationCacheEXT = + PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkDestroyVideoSessionKHR = + PFN_vkDestroyVideoSessionKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkDestroyVideoSessionParametersKHR = + PFN_vkDestroyVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionParametersKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) ); + vkDisplayPowerControlEXT = + PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) ); + vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) ); + vkFlushMappedMemoryRanges = + PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) ); + vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) ); + vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) ); + vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) ); + vkGetAccelerationStructureBuildSizesKHR = PFN_vkGetAccelerationStructureBuildSizesKHR( + vkGetDeviceProcAddr( device, "vkGetAccelerationStructureBuildSizesKHR" ) ); + vkGetAccelerationStructureDeviceAddressKHR = PFN_vkGetAccelerationStructureDeviceAddressKHR( + vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) ); + vkGetAccelerationStructureHandleNV = + PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) ); + vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV( + vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID( + vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + vkGetBufferDeviceAddress = + PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) ); + vkGetBufferDeviceAddressEXT = + PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; + vkGetBufferDeviceAddressKHR = + PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR; + vkGetBufferMemoryRequirements = + PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) ); + vkGetBufferMemoryRequirements2 = + PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) ); + vkGetBufferMemoryRequirements2KHR = + PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) ); + if ( !vkGetBufferMemoryRequirements2 ) + vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR; + vkGetBufferOpaqueCaptureAddress = + PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) ); + vkGetBufferOpaqueCaptureAddressKHR = + PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) ); + if ( !vkGetBufferOpaqueCaptureAddress ) + vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR; + vkGetCalibratedTimestampsEXT = + PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) ); + vkGetDeferredOperationMaxConcurrencyKHR = PFN_vkGetDeferredOperationMaxConcurrencyKHR( + vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); + vkGetDeferredOperationResultKHR = + PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) ); + vkGetDescriptorSetLayoutSupport = + PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) ); + vkGetDescriptorSetLayoutSupportKHR = + PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) ); + if ( !vkGetDescriptorSetLayoutSupport ) + vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR; + vkGetDeviceAccelerationStructureCompatibilityKHR = PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( + vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); + vkGetDeviceGroupPeerMemoryFeatures = + PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) ); + vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( + vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); + if ( !vkGetDeviceGroupPeerMemoryFeatures ) + vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR; + vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR( + vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT( + vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR( + vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); + vkGetDeviceMemoryCommitment = + PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) ); + vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress( + vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); + vkGetDeviceMemoryOpaqueCaptureAddressKHR = PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( + vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); + if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) + vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR; + vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) ); + vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) ); + vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) ); + vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) ); + vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) ); + vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkGetFenceWin32HandleKHR = + PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkGetGeneratedCommandsMemoryRequirementsNV = PFN_vkGetGeneratedCommandsMemoryRequirementsNV( + vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); + vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT( + vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); + vkGetImageMemoryRequirements = + PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) ); + vkGetImageMemoryRequirements2 = + PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) ); + vkGetImageMemoryRequirements2KHR = + PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) ); + if ( !vkGetImageMemoryRequirements2 ) + vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR; + vkGetImageSparseMemoryRequirements = + PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) ); + vkGetImageSparseMemoryRequirements2 = + PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) ); + vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR( + vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) ); + if ( !vkGetImageSparseMemoryRequirements2 ) + vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR; + vkGetImageSubresourceLayout = + PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) ); + vkGetImageViewAddressNVX = + PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) ); + vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) ); +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID( + vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) ); + vkGetMemoryFdPropertiesKHR = + PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) ); + vkGetMemoryHostPointerPropertiesEXT = + PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkGetMemoryWin32HandleKHR = + PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkGetMemoryWin32HandleNV = + PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkGetMemoryWin32HandlePropertiesKHR = + PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_FUCHSIA ) + vkGetMemoryZirconHandleFUCHSIA = + PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandleFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ +#if defined( VK_USE_PLATFORM_FUCHSIA ) + vkGetMemoryZirconHandlePropertiesFUCHSIA = PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( + vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + vkGetPastPresentationTimingGOOGLE = + PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) ); + vkGetPerformanceParameterINTEL = + PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) ); + vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) ); + vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR( + vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); + vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( + vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) ); + vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( + vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) ); + vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkGetPrivateDataEXT" ) ); + vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) ); + vkGetQueueCheckpointData2NV = + PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) ); + vkGetQueueCheckpointDataNV = + PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) ); + vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); + vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR( + vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) ); + vkGetRayTracingShaderGroupHandlesNV = + PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) ); + if ( !vkGetRayTracingShaderGroupHandlesKHR ) + vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV; + vkGetRayTracingShaderGroupStackSizeKHR = PFN_vkGetRayTracingShaderGroupStackSizeKHR( + vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupStackSizeKHR" ) ); + vkGetRefreshCycleDurationGOOGLE = + PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) ); + vkGetRenderAreaGranularity = + PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) ); + vkGetSemaphoreCounterValue = + PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) ); + vkGetSemaphoreCounterValueKHR = + PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) ); + if ( !vkGetSemaphoreCounterValue ) + vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR; + vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkGetSemaphoreWin32HandleKHR = + PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_FUCHSIA ) + vkGetSemaphoreZirconHandleFUCHSIA = + PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetSemaphoreZirconHandleFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetDeviceProcAddr( device, "vkGetShaderInfoAMD" ) ); + vkGetSwapchainCounterEXT = + PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) ); + vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainImagesKHR" ) ); + vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainStatusKHR" ) ); + vkGetValidationCacheDataEXT = + PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkGetVideoSessionMemoryRequirementsKHR = PFN_vkGetVideoSessionMemoryRequirementsKHR( + vkGetDeviceProcAddr( device, "vkGetVideoSessionMemoryRequirementsKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetDeviceProcAddr( device, "vkImportFenceFdKHR" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkImportFenceWin32HandleKHR = + PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreFdKHR" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkImportSemaphoreWin32HandleKHR = + PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_FUCHSIA ) + vkImportSemaphoreZirconHandleFUCHSIA = PFN_vkImportSemaphoreZirconHandleFUCHSIA( + vkGetDeviceProcAddr( device, "vkImportSemaphoreZirconHandleFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + vkInitializePerformanceApiINTEL = + PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) ); + vkInvalidateMappedMemoryRanges = + PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) ); + vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) ); + vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) ); + vkMergeValidationCachesEXT = + PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) ); + vkQueueBeginDebugUtilsLabelEXT = + PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) ); + vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) ); + vkQueueEndDebugUtilsLabelEXT = + PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) ); + vkQueueInsertDebugUtilsLabelEXT = + PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) ); + vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) ); + vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( + vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) ); + vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) ); + vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetDeviceProcAddr( device, "vkQueueSubmit2KHR" ) ); + vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) ); + vkRegisterDeviceEventEXT = + PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) ); + vkRegisterDisplayEventEXT = + PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkReleaseFullScreenExclusiveModeEXT = + PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL( + vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) ); + vkReleaseProfilingLockKHR = + PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) ); + vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) ); + vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) ); + vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) ); + vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) ); + vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) ); + vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) ); + vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) ); + if ( !vkResetQueryPool ) + vkResetQueryPool = vkResetQueryPoolEXT; + vkSetDebugUtilsObjectNameEXT = + PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) ); + vkSetDebugUtilsObjectTagEXT = + PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) ); + vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) ); + vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) ); + vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) ); + vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkSetPrivateDataEXT" ) ); + vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) ); + vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetDeviceProcAddr( device, "vkSignalSemaphoreKHR" ) ); + if ( !vkSignalSemaphore ) + vkSignalSemaphore = vkSignalSemaphoreKHR; + vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) ); + vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) ); + if ( !vkTrimCommandPool ) + vkTrimCommandPool = vkTrimCommandPoolKHR; + vkUninitializePerformanceApiINTEL = + PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) ); + vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) ); + vkUpdateDescriptorSetWithTemplate = + PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) ); + vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( + vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) ); + if ( !vkUpdateDescriptorSetWithTemplate ) + vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR; + vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkUpdateVideoSessionParametersKHR = + PFN_vkUpdateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkUpdateVideoSessionParametersKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) ); + vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) ); + vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) ); + if ( !vkWaitSemaphores ) + vkWaitSemaphores = vkWaitSemaphoresKHR; + vkWriteAccelerationStructuresPropertiesKHR = PFN_vkWriteAccelerationStructuresPropertiesKHR( + vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) ); + } + }; + +} // namespace VULKAN_HPP_NAMESPACE + +namespace std +{ + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const & accelerationStructureKHR ) const + VULKAN_HPP_NOEXCEPT + { + return std::hash{}( + static_cast( accelerationStructureKHR ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureNV const & accelerationStructureNV ) const + VULKAN_HPP_NOEXCEPT + { + return std::hash{}( + static_cast( accelerationStructureNV ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Buffer const & buffer ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( buffer ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferView const & bufferView ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( bufferView ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBuffer const & commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( commandBuffer ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandPool const & commandPool ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( commandPool ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CuFunctionNVX const & cuFunctionNVX ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( cuFunctionNVX ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CuModuleNVX const & cuModuleNVX ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( cuModuleNVX ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT const & debugReportCallbackEXT ) const + VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( debugReportCallbackEXT ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT const & debugUtilsMessengerEXT ) const + VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( debugUtilsMessengerEXT ) ); + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DeferredOperationKHR const & deferredOperationKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( deferredOperationKHR ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorPool const & descriptorPool ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( descriptorPool ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSet const & descriptorSet ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( descriptorSet ) ); + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DescriptorSetLayout const & descriptorSetLayout ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( descriptorSetLayout ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate const & descriptorUpdateTemplate ) const + VULKAN_HPP_NOEXCEPT + { + return std::hash{}( + static_cast( descriptorUpdateTemplate ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Device const & device ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( device ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceMemory const & deviceMemory ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( deviceMemory ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayKHR const & displayKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( displayKHR ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayModeKHR const & displayModeKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( displayModeKHR ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Event const & event ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( event ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Fence const & fence ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( fence ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Framebuffer const & framebuffer ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( framebuffer ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Image const & image ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( image ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageView const & imageView ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( imageView ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV const & indirectCommandsLayoutNV ) const + VULKAN_HPP_NOEXCEPT + { + return std::hash{}( + static_cast( indirectCommandsLayoutNV ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Instance const & instance ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( instance ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL const & performanceConfigurationINTEL ) + const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( + static_cast( performanceConfigurationINTEL ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevice const & physicalDevice ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( physicalDevice ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Pipeline const & pipeline ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( pipeline ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCache const & pipelineCache ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( pipelineCache ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineLayout const & pipelineLayout ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( pipelineLayout ) ); + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT const & privateDataSlotEXT ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( privateDataSlotEXT ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::QueryPool const & queryPool ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( queryPool ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Queue const & queue ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( queue ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPass const & renderPass ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( renderPass ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Sampler const & sampler ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( sampler ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion const & samplerYcbcrConversion ) const + VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( samplerYcbcrConversion ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Semaphore const & semaphore ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( semaphore ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderModule const & shaderModule ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( shaderModule ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceKHR const & surfaceKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( surfaceKHR ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainKHR const & swapchainKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( swapchainKHR ) ); + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ValidationCacheEXT const & validationCacheEXT ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( validationCacheEXT ) ); + } + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoSessionKHR const & videoSessionKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( videoSessionKHR ) ); + } + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR const & videoSessionParametersKHR ) const + VULKAN_HPP_NOEXCEPT + { + return std::hash{}( + static_cast( videoSessionParametersKHR ) ); + } + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +} // namespace std +#endif diff --git a/vendor/swiftshader/include/vulkan/vulkan_android.h b/vendor/swiftshader/include/vulkan/vulkan_android.h new file mode 100644 index 00000000..2160e3e7 --- /dev/null +++ b/vendor/swiftshader/include/vulkan/vulkan_android.h @@ -0,0 +1,112 @@ +#ifndef VULKAN_ANDROID_H_ +#define VULKAN_ANDROID_H_ 1 + +/* +** Copyright 2015-2021 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +#define VK_KHR_android_surface 1 +struct ANativeWindow; +#define VK_KHR_ANDROID_SURFACE_SPEC_VERSION 6 +#define VK_KHR_ANDROID_SURFACE_EXTENSION_NAME "VK_KHR_android_surface" +typedef VkFlags VkAndroidSurfaceCreateFlagsKHR; +typedef struct VkAndroidSurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkAndroidSurfaceCreateFlagsKHR flags; + struct ANativeWindow* window; +} VkAndroidSurfaceCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateAndroidSurfaceKHR)(VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateAndroidSurfaceKHR( + VkInstance instance, + const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + + +#define VK_ANDROID_external_memory_android_hardware_buffer 1 +struct AHardwareBuffer; +#define VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION 3 +#define VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME "VK_ANDROID_external_memory_android_hardware_buffer" +typedef struct VkAndroidHardwareBufferUsageANDROID { + VkStructureType sType; + void* pNext; + uint64_t androidHardwareBufferUsage; +} VkAndroidHardwareBufferUsageANDROID; + +typedef struct VkAndroidHardwareBufferPropertiesANDROID { + VkStructureType sType; + void* pNext; + VkDeviceSize allocationSize; + uint32_t memoryTypeBits; +} VkAndroidHardwareBufferPropertiesANDROID; + +typedef struct VkAndroidHardwareBufferFormatPropertiesANDROID { + VkStructureType sType; + void* pNext; + VkFormat format; + uint64_t externalFormat; + VkFormatFeatureFlags formatFeatures; + VkComponentMapping samplerYcbcrConversionComponents; + VkSamplerYcbcrModelConversion suggestedYcbcrModel; + VkSamplerYcbcrRange suggestedYcbcrRange; + VkChromaLocation suggestedXChromaOffset; + VkChromaLocation suggestedYChromaOffset; +} VkAndroidHardwareBufferFormatPropertiesANDROID; + +typedef struct VkImportAndroidHardwareBufferInfoANDROID { + VkStructureType sType; + const void* pNext; + struct AHardwareBuffer* buffer; +} VkImportAndroidHardwareBufferInfoANDROID; + +typedef struct VkMemoryGetAndroidHardwareBufferInfoANDROID { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; +} VkMemoryGetAndroidHardwareBufferInfoANDROID; + +typedef struct VkExternalFormatANDROID { + VkStructureType sType; + void* pNext; + uint64_t externalFormat; +} VkExternalFormatANDROID; + +typedef VkResult (VKAPI_PTR *PFN_vkGetAndroidHardwareBufferPropertiesANDROID)(VkDevice device, const struct AHardwareBuffer* buffer, VkAndroidHardwareBufferPropertiesANDROID* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryAndroidHardwareBufferANDROID)(VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetAndroidHardwareBufferPropertiesANDROID( + VkDevice device, + const struct AHardwareBuffer* buffer, + VkAndroidHardwareBufferPropertiesANDROID* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryAndroidHardwareBufferANDROID( + VkDevice device, + const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, + struct AHardwareBuffer** pBuffer); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/swiftshader/include/vulkan/vulkan_beta.h b/vendor/swiftshader/include/vulkan/vulkan_beta.h new file mode 100644 index 00000000..9aebb1ab --- /dev/null +++ b/vendor/swiftshader/include/vulkan/vulkan_beta.h @@ -0,0 +1,704 @@ +#ifndef VULKAN_BETA_H_ +#define VULKAN_BETA_H_ 1 + +/* +** Copyright 2015-2021 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +#define VK_KHR_video_queue 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkVideoSessionKHR) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkVideoSessionParametersKHR) +#define VK_KHR_VIDEO_QUEUE_SPEC_VERSION 1 +#define VK_KHR_VIDEO_QUEUE_EXTENSION_NAME "VK_KHR_video_queue" + +typedef enum VkQueryResultStatusKHR { + VK_QUERY_RESULT_STATUS_ERROR_KHR = -1, + VK_QUERY_RESULT_STATUS_NOT_READY_KHR = 0, + VK_QUERY_RESULT_STATUS_COMPLETE_KHR = 1, + VK_QUERY_RESULT_STATUS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkQueryResultStatusKHR; + +typedef enum VkVideoCodecOperationFlagBitsKHR { + VK_VIDEO_CODEC_OPERATION_INVALID_BIT_KHR = 0, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_EXT = 0x00010000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_EXT = 0x00000001, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_EXT = 0x00000002, +#endif + VK_VIDEO_CODEC_OPERATION_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoCodecOperationFlagBitsKHR; +typedef VkFlags VkVideoCodecOperationFlagsKHR; + +typedef enum VkVideoChromaSubsamplingFlagBitsKHR { + VK_VIDEO_CHROMA_SUBSAMPLING_INVALID_BIT_KHR = 0, + VK_VIDEO_CHROMA_SUBSAMPLING_MONOCHROME_BIT_KHR = 0x00000001, + VK_VIDEO_CHROMA_SUBSAMPLING_420_BIT_KHR = 0x00000002, + VK_VIDEO_CHROMA_SUBSAMPLING_422_BIT_KHR = 0x00000004, + VK_VIDEO_CHROMA_SUBSAMPLING_444_BIT_KHR = 0x00000008, + VK_VIDEO_CHROMA_SUBSAMPLING_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoChromaSubsamplingFlagBitsKHR; +typedef VkFlags VkVideoChromaSubsamplingFlagsKHR; + +typedef enum VkVideoComponentBitDepthFlagBitsKHR { + VK_VIDEO_COMPONENT_BIT_DEPTH_INVALID_KHR = 0, + VK_VIDEO_COMPONENT_BIT_DEPTH_8_BIT_KHR = 0x00000001, + VK_VIDEO_COMPONENT_BIT_DEPTH_10_BIT_KHR = 0x00000004, + VK_VIDEO_COMPONENT_BIT_DEPTH_12_BIT_KHR = 0x00000010, + VK_VIDEO_COMPONENT_BIT_DEPTH_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoComponentBitDepthFlagBitsKHR; +typedef VkFlags VkVideoComponentBitDepthFlagsKHR; + +typedef enum VkVideoCapabilitiesFlagBitsKHR { + VK_VIDEO_CAPABILITIES_PROTECTED_CONTENT_BIT_KHR = 0x00000001, + VK_VIDEO_CAPABILITIES_SEPARATE_REFERENCE_IMAGES_BIT_KHR = 0x00000002, + VK_VIDEO_CAPABILITIES_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoCapabilitiesFlagBitsKHR; +typedef VkFlags VkVideoCapabilitiesFlagsKHR; + +typedef enum VkVideoSessionCreateFlagBitsKHR { + VK_VIDEO_SESSION_CREATE_DEFAULT_KHR = 0, + VK_VIDEO_SESSION_CREATE_PROTECTED_CONTENT_BIT_KHR = 0x00000001, + VK_VIDEO_SESSION_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoSessionCreateFlagBitsKHR; +typedef VkFlags VkVideoSessionCreateFlagsKHR; +typedef VkFlags VkVideoBeginCodingFlagsKHR; +typedef VkFlags VkVideoEndCodingFlagsKHR; + +typedef enum VkVideoCodingControlFlagBitsKHR { + VK_VIDEO_CODING_CONTROL_DEFAULT_KHR = 0, + VK_VIDEO_CODING_CONTROL_RESET_BIT_KHR = 0x00000001, + VK_VIDEO_CODING_CONTROL_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoCodingControlFlagBitsKHR; +typedef VkFlags VkVideoCodingControlFlagsKHR; + +typedef enum VkVideoCodingQualityPresetFlagBitsKHR { + VK_VIDEO_CODING_QUALITY_PRESET_DEFAULT_BIT_KHR = 0, + VK_VIDEO_CODING_QUALITY_PRESET_NORMAL_BIT_KHR = 0x00000001, + VK_VIDEO_CODING_QUALITY_PRESET_POWER_BIT_KHR = 0x00000002, + VK_VIDEO_CODING_QUALITY_PRESET_QUALITY_BIT_KHR = 0x00000004, + VK_VIDEO_CODING_QUALITY_PRESET_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoCodingQualityPresetFlagBitsKHR; +typedef VkFlags VkVideoCodingQualityPresetFlagsKHR; +typedef struct VkVideoQueueFamilyProperties2KHR { + VkStructureType sType; + void* pNext; + VkVideoCodecOperationFlagsKHR videoCodecOperations; +} VkVideoQueueFamilyProperties2KHR; + +typedef struct VkVideoProfileKHR { + VkStructureType sType; + void* pNext; + VkVideoCodecOperationFlagBitsKHR videoCodecOperation; + VkVideoChromaSubsamplingFlagsKHR chromaSubsampling; + VkVideoComponentBitDepthFlagsKHR lumaBitDepth; + VkVideoComponentBitDepthFlagsKHR chromaBitDepth; +} VkVideoProfileKHR; + +typedef struct VkVideoProfilesKHR { + VkStructureType sType; + void* pNext; + uint32_t profileCount; + const VkVideoProfileKHR* pProfiles; +} VkVideoProfilesKHR; + +typedef struct VkVideoCapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkVideoCapabilitiesFlagsKHR capabilityFlags; + VkDeviceSize minBitstreamBufferOffsetAlignment; + VkDeviceSize minBitstreamBufferSizeAlignment; + VkExtent2D videoPictureExtentGranularity; + VkExtent2D minExtent; + VkExtent2D maxExtent; + uint32_t maxReferencePicturesSlotsCount; + uint32_t maxReferencePicturesActiveCount; +} VkVideoCapabilitiesKHR; + +typedef struct VkPhysicalDeviceVideoFormatInfoKHR { + VkStructureType sType; + const void* pNext; + VkImageUsageFlags imageUsage; + const VkVideoProfilesKHR* pVideoProfiles; +} VkPhysicalDeviceVideoFormatInfoKHR; + +typedef struct VkVideoFormatPropertiesKHR { + VkStructureType sType; + void* pNext; + VkFormat format; +} VkVideoFormatPropertiesKHR; + +typedef struct VkVideoPictureResourceKHR { + VkStructureType sType; + const void* pNext; + VkOffset2D codedOffset; + VkExtent2D codedExtent; + uint32_t baseArrayLayer; + VkImageView imageViewBinding; +} VkVideoPictureResourceKHR; + +typedef struct VkVideoReferenceSlotKHR { + VkStructureType sType; + const void* pNext; + int8_t slotIndex; + const VkVideoPictureResourceKHR* pPictureResource; +} VkVideoReferenceSlotKHR; + +typedef struct VkVideoGetMemoryPropertiesKHR { + VkStructureType sType; + const void* pNext; + uint32_t memoryBindIndex; + VkMemoryRequirements2* pMemoryRequirements; +} VkVideoGetMemoryPropertiesKHR; + +typedef struct VkVideoBindMemoryKHR { + VkStructureType sType; + const void* pNext; + uint32_t memoryBindIndex; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; + VkDeviceSize memorySize; +} VkVideoBindMemoryKHR; + +typedef struct VkVideoSessionCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t queueFamilyIndex; + VkVideoSessionCreateFlagsKHR flags; + const VkVideoProfileKHR* pVideoProfile; + VkFormat pictureFormat; + VkExtent2D maxCodedExtent; + VkFormat referencePicturesFormat; + uint32_t maxReferencePicturesSlotsCount; + uint32_t maxReferencePicturesActiveCount; +} VkVideoSessionCreateInfoKHR; + +typedef struct VkVideoSessionParametersCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoSessionParametersKHR videoSessionParametersTemplate; + VkVideoSessionKHR videoSession; +} VkVideoSessionParametersCreateInfoKHR; + +typedef struct VkVideoSessionParametersUpdateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t updateSequenceCount; +} VkVideoSessionParametersUpdateInfoKHR; + +typedef struct VkVideoBeginCodingInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoBeginCodingFlagsKHR flags; + VkVideoCodingQualityPresetFlagsKHR codecQualityPreset; + VkVideoSessionKHR videoSession; + VkVideoSessionParametersKHR videoSessionParameters; + uint32_t referenceSlotCount; + const VkVideoReferenceSlotKHR* pReferenceSlots; +} VkVideoBeginCodingInfoKHR; + +typedef struct VkVideoEndCodingInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEndCodingFlagsKHR flags; +} VkVideoEndCodingInfoKHR; + +typedef struct VkVideoCodingControlInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoCodingControlFlagsKHR flags; +} VkVideoCodingControlInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR)(VkPhysicalDevice physicalDevice, const VkVideoProfileKHR* pVideoProfile, VkVideoCapabilitiesKHR* pCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceVideoFormatInfoKHR* pVideoFormatInfo, uint32_t* pVideoFormatPropertyCount, VkVideoFormatPropertiesKHR* pVideoFormatProperties); +typedef VkResult (VKAPI_PTR *PFN_vkCreateVideoSessionKHR)(VkDevice device, const VkVideoSessionCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkVideoSessionKHR* pVideoSession); +typedef void (VKAPI_PTR *PFN_vkDestroyVideoSessionKHR)(VkDevice device, VkVideoSessionKHR videoSession, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetVideoSessionMemoryRequirementsKHR)(VkDevice device, VkVideoSessionKHR videoSession, uint32_t* pVideoSessionMemoryRequirementsCount, VkVideoGetMemoryPropertiesKHR* pVideoSessionMemoryRequirements); +typedef VkResult (VKAPI_PTR *PFN_vkBindVideoSessionMemoryKHR)(VkDevice device, VkVideoSessionKHR videoSession, uint32_t videoSessionBindMemoryCount, const VkVideoBindMemoryKHR* pVideoSessionBindMemories); +typedef VkResult (VKAPI_PTR *PFN_vkCreateVideoSessionParametersKHR)(VkDevice device, const VkVideoSessionParametersCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkVideoSessionParametersKHR* pVideoSessionParameters); +typedef VkResult (VKAPI_PTR *PFN_vkUpdateVideoSessionParametersKHR)(VkDevice device, VkVideoSessionParametersKHR videoSessionParameters, const VkVideoSessionParametersUpdateInfoKHR* pUpdateInfo); +typedef void (VKAPI_PTR *PFN_vkDestroyVideoSessionParametersKHR)(VkDevice device, VkVideoSessionParametersKHR videoSessionParameters, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkCmdBeginVideoCodingKHR)(VkCommandBuffer commandBuffer, const VkVideoBeginCodingInfoKHR* pBeginInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndVideoCodingKHR)(VkCommandBuffer commandBuffer, const VkVideoEndCodingInfoKHR* pEndCodingInfo); +typedef void (VKAPI_PTR *PFN_vkCmdControlVideoCodingKHR)(VkCommandBuffer commandBuffer, const VkVideoCodingControlInfoKHR* pCodingControlInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceVideoCapabilitiesKHR( + VkPhysicalDevice physicalDevice, + const VkVideoProfileKHR* pVideoProfile, + VkVideoCapabilitiesKHR* pCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceVideoFormatPropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceVideoFormatInfoKHR* pVideoFormatInfo, + uint32_t* pVideoFormatPropertyCount, + VkVideoFormatPropertiesKHR* pVideoFormatProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateVideoSessionKHR( + VkDevice device, + const VkVideoSessionCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkVideoSessionKHR* pVideoSession); + +VKAPI_ATTR void VKAPI_CALL vkDestroyVideoSessionKHR( + VkDevice device, + VkVideoSessionKHR videoSession, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetVideoSessionMemoryRequirementsKHR( + VkDevice device, + VkVideoSessionKHR videoSession, + uint32_t* pVideoSessionMemoryRequirementsCount, + VkVideoGetMemoryPropertiesKHR* pVideoSessionMemoryRequirements); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindVideoSessionMemoryKHR( + VkDevice device, + VkVideoSessionKHR videoSession, + uint32_t videoSessionBindMemoryCount, + const VkVideoBindMemoryKHR* pVideoSessionBindMemories); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateVideoSessionParametersKHR( + VkDevice device, + const VkVideoSessionParametersCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkVideoSessionParametersKHR* pVideoSessionParameters); + +VKAPI_ATTR VkResult VKAPI_CALL vkUpdateVideoSessionParametersKHR( + VkDevice device, + VkVideoSessionParametersKHR videoSessionParameters, + const VkVideoSessionParametersUpdateInfoKHR* pUpdateInfo); + +VKAPI_ATTR void VKAPI_CALL vkDestroyVideoSessionParametersKHR( + VkDevice device, + VkVideoSessionParametersKHR videoSessionParameters, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginVideoCodingKHR( + VkCommandBuffer commandBuffer, + const VkVideoBeginCodingInfoKHR* pBeginInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndVideoCodingKHR( + VkCommandBuffer commandBuffer, + const VkVideoEndCodingInfoKHR* pEndCodingInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdControlVideoCodingKHR( + VkCommandBuffer commandBuffer, + const VkVideoCodingControlInfoKHR* pCodingControlInfo); +#endif + + +#define VK_KHR_video_decode_queue 1 +#define VK_KHR_VIDEO_DECODE_QUEUE_SPEC_VERSION 1 +#define VK_KHR_VIDEO_DECODE_QUEUE_EXTENSION_NAME "VK_KHR_video_decode_queue" + +typedef enum VkVideoDecodeFlagBitsKHR { + VK_VIDEO_DECODE_DEFAULT_KHR = 0, + VK_VIDEO_DECODE_RESERVED_0_BIT_KHR = 0x00000001, + VK_VIDEO_DECODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoDecodeFlagBitsKHR; +typedef VkFlags VkVideoDecodeFlagsKHR; +typedef struct VkVideoDecodeInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoDecodeFlagsKHR flags; + VkOffset2D codedOffset; + VkExtent2D codedExtent; + VkBuffer srcBuffer; + VkDeviceSize srcBufferOffset; + VkDeviceSize srcBufferRange; + VkVideoPictureResourceKHR dstPictureResource; + const VkVideoReferenceSlotKHR* pSetupReferenceSlot; + uint32_t referenceSlotCount; + const VkVideoReferenceSlotKHR* pReferenceSlots; +} VkVideoDecodeInfoKHR; + +typedef void (VKAPI_PTR *PFN_vkCmdDecodeVideoKHR)(VkCommandBuffer commandBuffer, const VkVideoDecodeInfoKHR* pFrameInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDecodeVideoKHR( + VkCommandBuffer commandBuffer, + const VkVideoDecodeInfoKHR* pFrameInfo); +#endif + + +#define VK_KHR_portability_subset 1 +#define VK_KHR_PORTABILITY_SUBSET_SPEC_VERSION 1 +#define VK_KHR_PORTABILITY_SUBSET_EXTENSION_NAME "VK_KHR_portability_subset" +typedef struct VkPhysicalDevicePortabilitySubsetFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 constantAlphaColorBlendFactors; + VkBool32 events; + VkBool32 imageViewFormatReinterpretation; + VkBool32 imageViewFormatSwizzle; + VkBool32 imageView2DOn3DImage; + VkBool32 multisampleArrayImage; + VkBool32 mutableComparisonSamplers; + VkBool32 pointPolygons; + VkBool32 samplerMipLodBias; + VkBool32 separateStencilMaskRef; + VkBool32 shaderSampleRateInterpolationFunctions; + VkBool32 tessellationIsolines; + VkBool32 tessellationPointMode; + VkBool32 triangleFans; + VkBool32 vertexAttributeAccessBeyondStride; +} VkPhysicalDevicePortabilitySubsetFeaturesKHR; + +typedef struct VkPhysicalDevicePortabilitySubsetPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t minVertexInputBindingStrideAlignment; +} VkPhysicalDevicePortabilitySubsetPropertiesKHR; + + + +#define VK_KHR_video_encode_queue 1 +#define VK_KHR_VIDEO_ENCODE_QUEUE_SPEC_VERSION 2 +#define VK_KHR_VIDEO_ENCODE_QUEUE_EXTENSION_NAME "VK_KHR_video_encode_queue" + +typedef enum VkVideoEncodeFlagBitsKHR { + VK_VIDEO_ENCODE_DEFAULT_KHR = 0, + VK_VIDEO_ENCODE_RESERVED_0_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeFlagBitsKHR; +typedef VkFlags VkVideoEncodeFlagsKHR; + +typedef enum VkVideoEncodeRateControlFlagBitsKHR { + VK_VIDEO_ENCODE_RATE_CONTROL_DEFAULT_KHR = 0, + VK_VIDEO_ENCODE_RATE_CONTROL_RESET_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_RATE_CONTROL_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeRateControlFlagBitsKHR; +typedef VkFlags VkVideoEncodeRateControlFlagsKHR; + +typedef enum VkVideoEncodeRateControlModeFlagBitsKHR { + VK_VIDEO_ENCODE_RATE_CONTROL_MODE_NONE_BIT_KHR = 0, + VK_VIDEO_ENCODE_RATE_CONTROL_MODE_CBR_BIT_KHR = 1, + VK_VIDEO_ENCODE_RATE_CONTROL_MODE_VBR_BIT_KHR = 2, + VK_VIDEO_ENCODE_RATE_CONTROL_MODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeRateControlModeFlagBitsKHR; +typedef VkFlags VkVideoEncodeRateControlModeFlagsKHR; +typedef struct VkVideoEncodeInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEncodeFlagsKHR flags; + uint32_t qualityLevel; + VkExtent2D codedExtent; + VkBuffer dstBitstreamBuffer; + VkDeviceSize dstBitstreamBufferOffset; + VkDeviceSize dstBitstreamBufferMaxRange; + VkVideoPictureResourceKHR srcPictureResource; + const VkVideoReferenceSlotKHR* pSetupReferenceSlot; + uint32_t referenceSlotCount; + const VkVideoReferenceSlotKHR* pReferenceSlots; +} VkVideoEncodeInfoKHR; + +typedef struct VkVideoEncodeRateControlInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEncodeRateControlFlagsKHR flags; + VkVideoEncodeRateControlModeFlagBitsKHR rateControlMode; + uint32_t averageBitrate; + uint16_t peakToAverageBitrateRatio; + uint16_t frameRateNumerator; + uint16_t frameRateDenominator; + uint32_t virtualBufferSizeInMs; +} VkVideoEncodeRateControlInfoKHR; + +typedef void (VKAPI_PTR *PFN_vkCmdEncodeVideoKHR)(VkCommandBuffer commandBuffer, const VkVideoEncodeInfoKHR* pEncodeInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdEncodeVideoKHR( + VkCommandBuffer commandBuffer, + const VkVideoEncodeInfoKHR* pEncodeInfo); +#endif + + +#define VK_EXT_video_encode_h264 1 +#include "vk_video/vulkan_video_codec_h264std.h" +#include "vk_video/vulkan_video_codec_h264std_encode.h" +#define VK_EXT_VIDEO_ENCODE_H264_SPEC_VERSION 1 +#define VK_EXT_VIDEO_ENCODE_H264_EXTENSION_NAME "VK_EXT_video_encode_h264" + +typedef enum VkVideoEncodeH264CapabilitiesFlagBitsEXT { + VK_VIDEO_ENCODE_H264_CAPABILITY_CABAC_BIT_EXT = 0x00000001, + VK_VIDEO_ENCODE_H264_CAPABILITY_CAVLC_BIT_EXT = 0x00000002, + VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_BI_PRED_IMPLICIT_BIT_EXT = 0x00000004, + VK_VIDEO_ENCODE_H264_CAPABILITY_TRANSFORM_8X8_BIT_EXT = 0x00000008, + VK_VIDEO_ENCODE_H264_CAPABILITY_CHROMA_QP_OFFSET_BIT_EXT = 0x00000010, + VK_VIDEO_ENCODE_H264_CAPABILITY_SECOND_CHROMA_QP_OFFSET_BIT_EXT = 0x00000020, + VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_DISABLED_BIT_EXT = 0x00000040, + VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_ENABLED_BIT_EXT = 0x00000080, + VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_PARTIAL_BIT_EXT = 0x00000100, + VK_VIDEO_ENCODE_H264_CAPABILITY_MULTIPLE_SLICE_PER_FRAME_BIT_EXT = 0x00000200, + VK_VIDEO_ENCODE_H264_CAPABILITY_EVENLY_DISTRIBUTED_SLICE_SIZE_BIT_EXT = 0x00000400, + VK_VIDEO_ENCODE_H264_CAPABILITIES_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkVideoEncodeH264CapabilitiesFlagBitsEXT; +typedef VkFlags VkVideoEncodeH264CapabilitiesFlagsEXT; + +typedef enum VkVideoEncodeH264InputModeFlagBitsEXT { + VK_VIDEO_ENCODE_H264_INPUT_MODE_FRAME_BIT_EXT = 0x00000001, + VK_VIDEO_ENCODE_H264_INPUT_MODE_SLICE_BIT_EXT = 0x00000002, + VK_VIDEO_ENCODE_H264_INPUT_MODE_NON_VCL_BIT_EXT = 0x00000004, + VK_VIDEO_ENCODE_H264_INPUT_MODE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkVideoEncodeH264InputModeFlagBitsEXT; +typedef VkFlags VkVideoEncodeH264InputModeFlagsEXT; + +typedef enum VkVideoEncodeH264OutputModeFlagBitsEXT { + VK_VIDEO_ENCODE_H264_OUTPUT_MODE_FRAME_BIT_EXT = 0x00000001, + VK_VIDEO_ENCODE_H264_OUTPUT_MODE_SLICE_BIT_EXT = 0x00000002, + VK_VIDEO_ENCODE_H264_OUTPUT_MODE_NON_VCL_BIT_EXT = 0x00000004, + VK_VIDEO_ENCODE_H264_OUTPUT_MODE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkVideoEncodeH264OutputModeFlagBitsEXT; +typedef VkFlags VkVideoEncodeH264OutputModeFlagsEXT; + +typedef enum VkVideoEncodeH264CreateFlagBitsEXT { + VK_VIDEO_ENCODE_H264_CREATE_DEFAULT_EXT = 0, + VK_VIDEO_ENCODE_H264_CREATE_RESERVED_0_BIT_EXT = 0x00000001, + VK_VIDEO_ENCODE_H264_CREATE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkVideoEncodeH264CreateFlagBitsEXT; +typedef VkFlags VkVideoEncodeH264CreateFlagsEXT; +typedef struct VkVideoEncodeH264CapabilitiesEXT { + VkStructureType sType; + const void* pNext; + VkVideoEncodeH264CapabilitiesFlagsEXT flags; + VkVideoEncodeH264InputModeFlagsEXT inputModeFlags; + VkVideoEncodeH264OutputModeFlagsEXT outputModeFlags; + VkExtent2D minPictureSizeInMbs; + VkExtent2D maxPictureSizeInMbs; + VkExtent2D inputImageDataAlignment; + uint8_t maxNumL0ReferenceForP; + uint8_t maxNumL0ReferenceForB; + uint8_t maxNumL1Reference; + uint8_t qualityLevelCount; + VkExtensionProperties stdExtensionVersion; +} VkVideoEncodeH264CapabilitiesEXT; + +typedef struct VkVideoEncodeH264SessionCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkVideoEncodeH264CreateFlagsEXT flags; + VkExtent2D maxPictureSizeInMbs; + const VkExtensionProperties* pStdExtensionVersion; +} VkVideoEncodeH264SessionCreateInfoEXT; + +typedef struct VkVideoEncodeH264SessionParametersAddInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t spsStdCount; + const StdVideoH264SequenceParameterSet* pSpsStd; + uint32_t ppsStdCount; + const StdVideoH264PictureParameterSet* pPpsStd; +} VkVideoEncodeH264SessionParametersAddInfoEXT; + +typedef struct VkVideoEncodeH264SessionParametersCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t maxSpsStdCount; + uint32_t maxPpsStdCount; + const VkVideoEncodeH264SessionParametersAddInfoEXT* pParametersAddInfo; +} VkVideoEncodeH264SessionParametersCreateInfoEXT; + +typedef struct VkVideoEncodeH264DpbSlotInfoEXT { + VkStructureType sType; + const void* pNext; + int8_t slotIndex; + const StdVideoEncodeH264PictureInfo* pStdPictureInfo; +} VkVideoEncodeH264DpbSlotInfoEXT; + +typedef struct VkVideoEncodeH264NaluSliceEXT { + VkStructureType sType; + const void* pNext; + const StdVideoEncodeH264SliceHeader* pSliceHeaderStd; + uint32_t mbCount; + uint8_t refFinalList0EntryCount; + const VkVideoEncodeH264DpbSlotInfoEXT* pRefFinalList0Entries; + uint8_t refFinalList1EntryCount; + const VkVideoEncodeH264DpbSlotInfoEXT* pRefFinalList1Entries; + uint32_t precedingNaluBytes; + uint8_t minQp; + uint8_t maxQp; +} VkVideoEncodeH264NaluSliceEXT; + +typedef struct VkVideoEncodeH264VclFrameInfoEXT { + VkStructureType sType; + const void* pNext; + uint8_t refDefaultFinalList0EntryCount; + const VkVideoEncodeH264DpbSlotInfoEXT* pRefDefaultFinalList0Entries; + uint8_t refDefaultFinalList1EntryCount; + const VkVideoEncodeH264DpbSlotInfoEXT* pRefDefaultFinalList1Entries; + uint32_t naluSliceEntryCount; + const VkVideoEncodeH264NaluSliceEXT* pNaluSliceEntries; + const VkVideoEncodeH264DpbSlotInfoEXT* pCurrentPictureInfo; +} VkVideoEncodeH264VclFrameInfoEXT; + +typedef struct VkVideoEncodeH264EmitPictureParametersEXT { + VkStructureType sType; + const void* pNext; + uint8_t spsId; + VkBool32 emitSpsEnable; + uint32_t ppsIdEntryCount; + const uint8_t* ppsIdEntries; +} VkVideoEncodeH264EmitPictureParametersEXT; + +typedef struct VkVideoEncodeH264ProfileEXT { + VkStructureType sType; + const void* pNext; + StdVideoH264ProfileIdc stdProfileIdc; +} VkVideoEncodeH264ProfileEXT; + + + +#define VK_EXT_video_decode_h264 1 +#include "vk_video/vulkan_video_codec_h264std_decode.h" +#define VK_EXT_VIDEO_DECODE_H264_SPEC_VERSION 1 +#define VK_EXT_VIDEO_DECODE_H264_EXTENSION_NAME "VK_EXT_video_decode_h264" + +typedef enum VkVideoDecodeH264FieldLayoutFlagBitsEXT { + VK_VIDEO_DECODE_H264_PROGRESSIVE_PICTURES_ONLY_EXT = 0, + VK_VIDEO_DECODE_H264_FIELD_LAYOUT_LINE_INTERLACED_PLANE_BIT_EXT = 0x00000001, + VK_VIDEO_DECODE_H264_FIELD_LAYOUT_SEPARATE_INTERLACED_PLANE_BIT_EXT = 0x00000002, + VK_VIDEO_DECODE_H264_FIELD_LAYOUT_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkVideoDecodeH264FieldLayoutFlagBitsEXT; +typedef VkFlags VkVideoDecodeH264FieldLayoutFlagsEXT; +typedef VkFlags VkVideoDecodeH264CreateFlagsEXT; +typedef struct VkVideoDecodeH264ProfileEXT { + VkStructureType sType; + const void* pNext; + StdVideoH264ProfileIdc stdProfileIdc; + VkVideoDecodeH264FieldLayoutFlagsEXT fieldLayout; +} VkVideoDecodeH264ProfileEXT; + +typedef struct VkVideoDecodeH264CapabilitiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxLevel; + VkOffset2D fieldOffsetGranularity; + VkExtensionProperties stdExtensionVersion; +} VkVideoDecodeH264CapabilitiesEXT; + +typedef struct VkVideoDecodeH264SessionCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkVideoDecodeH264CreateFlagsEXT flags; + const VkExtensionProperties* pStdExtensionVersion; +} VkVideoDecodeH264SessionCreateInfoEXT; + +typedef struct VkVideoDecodeH264SessionParametersAddInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t spsStdCount; + const StdVideoH264SequenceParameterSet* pSpsStd; + uint32_t ppsStdCount; + const StdVideoH264PictureParameterSet* pPpsStd; +} VkVideoDecodeH264SessionParametersAddInfoEXT; + +typedef struct VkVideoDecodeH264SessionParametersCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t maxSpsStdCount; + uint32_t maxPpsStdCount; + const VkVideoDecodeH264SessionParametersAddInfoEXT* pParametersAddInfo; +} VkVideoDecodeH264SessionParametersCreateInfoEXT; + +typedef struct VkVideoDecodeH264PictureInfoEXT { + VkStructureType sType; + const void* pNext; + const StdVideoDecodeH264PictureInfo* pStdPictureInfo; + uint32_t slicesCount; + const uint32_t* pSlicesDataOffsets; +} VkVideoDecodeH264PictureInfoEXT; + +typedef struct VkVideoDecodeH264MvcEXT { + VkStructureType sType; + const void* pNext; + const StdVideoDecodeH264Mvc* pStdMvc; +} VkVideoDecodeH264MvcEXT; + +typedef struct VkVideoDecodeH264DpbSlotInfoEXT { + VkStructureType sType; + const void* pNext; + const StdVideoDecodeH264ReferenceInfo* pStdReferenceInfo; +} VkVideoDecodeH264DpbSlotInfoEXT; + + + +#define VK_EXT_video_decode_h265 1 +#include "vk_video/vulkan_video_codec_h265std.h" +#include "vk_video/vulkan_video_codec_h265std_decode.h" +#define VK_EXT_VIDEO_DECODE_H265_SPEC_VERSION 1 +#define VK_EXT_VIDEO_DECODE_H265_EXTENSION_NAME "VK_EXT_video_decode_h265" +typedef VkFlags VkVideoDecodeH265CreateFlagsEXT; +typedef struct VkVideoDecodeH265ProfileEXT { + VkStructureType sType; + const void* pNext; + StdVideoH265ProfileIdc stdProfileIdc; +} VkVideoDecodeH265ProfileEXT; + +typedef struct VkVideoDecodeH265CapabilitiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxLevel; + VkExtensionProperties stdExtensionVersion; +} VkVideoDecodeH265CapabilitiesEXT; + +typedef struct VkVideoDecodeH265SessionCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkVideoDecodeH265CreateFlagsEXT flags; + const VkExtensionProperties* pStdExtensionVersion; +} VkVideoDecodeH265SessionCreateInfoEXT; + +typedef struct VkVideoDecodeH265SessionParametersAddInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t spsStdCount; + const StdVideoH265SequenceParameterSet* pSpsStd; + uint32_t ppsStdCount; + const StdVideoH265PictureParameterSet* pPpsStd; +} VkVideoDecodeH265SessionParametersAddInfoEXT; + +typedef struct VkVideoDecodeH265SessionParametersCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t maxSpsStdCount; + uint32_t maxPpsStdCount; + const VkVideoDecodeH265SessionParametersAddInfoEXT* pParametersAddInfo; +} VkVideoDecodeH265SessionParametersCreateInfoEXT; + +typedef struct VkVideoDecodeH265PictureInfoEXT { + VkStructureType sType; + const void* pNext; + StdVideoDecodeH265PictureInfo* pStdPictureInfo; + uint32_t slicesCount; + const uint32_t* pSlicesDataOffsets; +} VkVideoDecodeH265PictureInfoEXT; + +typedef struct VkVideoDecodeH265DpbSlotInfoEXT { + VkStructureType sType; + const void* pNext; + const StdVideoDecodeH265ReferenceInfo* pStdReferenceInfo; +} VkVideoDecodeH265DpbSlotInfoEXT; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/swiftshader/include/vulkan/vulkan_core.h b/vendor/swiftshader/include/vulkan/vulkan_core.h new file mode 100644 index 00000000..ccb6d0c9 --- /dev/null +++ b/vendor/swiftshader/include/vulkan/vulkan_core.h @@ -0,0 +1,12731 @@ +#ifndef VULKAN_CORE_H_ +#define VULKAN_CORE_H_ 1 + +/* +** Copyright 2015-2021 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +#define VK_VERSION_1_0 1 +#include "vk_platform.h" + +#define VK_DEFINE_HANDLE(object) typedef struct object##_T* object; + + +#ifndef VK_USE_64_BIT_PTR_DEFINES + #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) + #define VK_USE_64_BIT_PTR_DEFINES 1 + #else + #define VK_USE_64_BIT_PTR_DEFINES 0 + #endif +#endif + + +#ifndef VK_DEFINE_NON_DISPATCHABLE_HANDLE + #if (VK_USE_64_BIT_PTR_DEFINES==1) + #if __cplusplus >= 201103L || (defined(_MSVC_LANG) && (_MSVC_LANG >= 201103L)) + #define VK_NULL_HANDLE nullptr + #else + #define VK_NULL_HANDLE ((void*)0) + #endif + #else + #define VK_NULL_HANDLE 0ULL + #endif +#endif +#ifndef VK_NULL_HANDLE + #define VK_NULL_HANDLE 0 +#endif + + +#ifndef VK_DEFINE_NON_DISPATCHABLE_HANDLE + #if (VK_USE_64_BIT_PTR_DEFINES==1) + #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef struct object##_T *object; + #else + #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef uint64_t object; + #endif +#endif + +// DEPRECATED: This define is deprecated. VK_MAKE_API_VERSION should be used instead. +#define VK_MAKE_VERSION(major, minor, patch) \ + ((((uint32_t)(major)) << 22) | (((uint32_t)(minor)) << 12) | ((uint32_t)(patch))) + +// DEPRECATED: This define has been removed. Specific version defines (e.g. VK_API_VERSION_1_0), or the VK_MAKE_VERSION macro, should be used instead. +//#define VK_API_VERSION VK_MAKE_VERSION(1, 0, 0) // Patch version should always be set to 0 + +#define VK_MAKE_API_VERSION(variant, major, minor, patch) \ + ((((uint32_t)(variant)) << 29) | (((uint32_t)(major)) << 22) | (((uint32_t)(minor)) << 12) | ((uint32_t)(patch))) + +// Vulkan 1.0 version number +#define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0)// Patch version should always be set to 0 + +// Version of this file +#define VK_HEADER_VERSION 178 + +// Complete version of this file +#define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 2, VK_HEADER_VERSION) + +// DEPRECATED: This define is deprecated. VK_API_VERSION_MAJOR should be used instead. +#define VK_VERSION_MAJOR(version) ((uint32_t)(version) >> 22) + +// DEPRECATED: This define is deprecated. VK_API_VERSION_MINOR should be used instead. +#define VK_VERSION_MINOR(version) (((uint32_t)(version) >> 12) & 0x3FFU) + +// DEPRECATED: This define is deprecated. VK_API_VERSION_PATCH should be used instead. +#define VK_VERSION_PATCH(version) ((uint32_t)(version) & 0xFFFU) + +#define VK_API_VERSION_VARIANT(version) ((uint32_t)(version) >> 29) +#define VK_API_VERSION_MAJOR(version) (((uint32_t)(version) >> 22) & 0x7FU) +#define VK_API_VERSION_MINOR(version) (((uint32_t)(version) >> 12) & 0x3FFU) +#define VK_API_VERSION_PATCH(version) ((uint32_t)(version) & 0xFFFU) +typedef uint32_t VkBool32; +typedef uint64_t VkDeviceAddress; +typedef uint64_t VkDeviceSize; +typedef uint32_t VkFlags; +typedef uint32_t VkSampleMask; +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBuffer) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImage) +VK_DEFINE_HANDLE(VkInstance) +VK_DEFINE_HANDLE(VkPhysicalDevice) +VK_DEFINE_HANDLE(VkDevice) +VK_DEFINE_HANDLE(VkQueue) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSemaphore) +VK_DEFINE_HANDLE(VkCommandBuffer) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFence) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeviceMemory) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkEvent) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkQueryPool) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBufferView) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImageView) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkShaderModule) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineCache) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineLayout) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipeline) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkRenderPass) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSetLayout) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSampler) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSet) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorPool) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFramebuffer) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCommandPool) +#define VK_ATTACHMENT_UNUSED (~0U) +#define VK_FALSE 0U +#define VK_LOD_CLAMP_NONE 1000.0F +#define VK_QUEUE_FAMILY_IGNORED (~0U) +#define VK_REMAINING_ARRAY_LAYERS (~0U) +#define VK_REMAINING_MIP_LEVELS (~0U) +#define VK_SUBPASS_EXTERNAL (~0U) +#define VK_TRUE 1U +#define VK_WHOLE_SIZE (~0ULL) +#define VK_MAX_MEMORY_TYPES 32U +#define VK_MAX_MEMORY_HEAPS 16U +#define VK_MAX_PHYSICAL_DEVICE_NAME_SIZE 256U +#define VK_UUID_SIZE 16U +#define VK_MAX_EXTENSION_NAME_SIZE 256U +#define VK_MAX_DESCRIPTION_SIZE 256U + +typedef enum VkResult { + VK_SUCCESS = 0, + VK_NOT_READY = 1, + VK_TIMEOUT = 2, + VK_EVENT_SET = 3, + VK_EVENT_RESET = 4, + VK_INCOMPLETE = 5, + VK_ERROR_OUT_OF_HOST_MEMORY = -1, + VK_ERROR_OUT_OF_DEVICE_MEMORY = -2, + VK_ERROR_INITIALIZATION_FAILED = -3, + VK_ERROR_DEVICE_LOST = -4, + VK_ERROR_MEMORY_MAP_FAILED = -5, + VK_ERROR_LAYER_NOT_PRESENT = -6, + VK_ERROR_EXTENSION_NOT_PRESENT = -7, + VK_ERROR_FEATURE_NOT_PRESENT = -8, + VK_ERROR_INCOMPATIBLE_DRIVER = -9, + VK_ERROR_TOO_MANY_OBJECTS = -10, + VK_ERROR_FORMAT_NOT_SUPPORTED = -11, + VK_ERROR_FRAGMENTED_POOL = -12, + VK_ERROR_UNKNOWN = -13, + VK_ERROR_OUT_OF_POOL_MEMORY = -1000069000, + VK_ERROR_INVALID_EXTERNAL_HANDLE = -1000072003, + VK_ERROR_FRAGMENTATION = -1000161000, + VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS = -1000257000, + VK_ERROR_SURFACE_LOST_KHR = -1000000000, + VK_ERROR_NATIVE_WINDOW_IN_USE_KHR = -1000000001, + VK_SUBOPTIMAL_KHR = 1000001003, + VK_ERROR_OUT_OF_DATE_KHR = -1000001004, + VK_ERROR_INCOMPATIBLE_DISPLAY_KHR = -1000003001, + VK_ERROR_VALIDATION_FAILED_EXT = -1000011001, + VK_ERROR_INVALID_SHADER_NV = -1000012000, + VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT = -1000158000, + VK_ERROR_NOT_PERMITTED_EXT = -1000174001, + VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT = -1000255000, + VK_THREAD_IDLE_KHR = 1000268000, + VK_THREAD_DONE_KHR = 1000268001, + VK_OPERATION_DEFERRED_KHR = 1000268002, + VK_OPERATION_NOT_DEFERRED_KHR = 1000268003, + VK_PIPELINE_COMPILE_REQUIRED_EXT = 1000297000, + VK_ERROR_OUT_OF_POOL_MEMORY_KHR = VK_ERROR_OUT_OF_POOL_MEMORY, + VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR = VK_ERROR_INVALID_EXTERNAL_HANDLE, + VK_ERROR_FRAGMENTATION_EXT = VK_ERROR_FRAGMENTATION, + VK_ERROR_INVALID_DEVICE_ADDRESS_EXT = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS, + VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS, + VK_ERROR_PIPELINE_COMPILE_REQUIRED_EXT = VK_PIPELINE_COMPILE_REQUIRED_EXT, + VK_RESULT_MAX_ENUM = 0x7FFFFFFF +} VkResult; + +typedef enum VkStructureType { + VK_STRUCTURE_TYPE_APPLICATION_INFO = 0, + VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO = 1, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO = 2, + VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO = 3, + VK_STRUCTURE_TYPE_SUBMIT_INFO = 4, + VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO = 5, + VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE = 6, + VK_STRUCTURE_TYPE_BIND_SPARSE_INFO = 7, + VK_STRUCTURE_TYPE_FENCE_CREATE_INFO = 8, + VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO = 9, + VK_STRUCTURE_TYPE_EVENT_CREATE_INFO = 10, + VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO = 11, + VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO = 12, + VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO = 13, + VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO = 14, + VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO = 15, + VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO = 16, + VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO = 17, + VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO = 18, + VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO = 19, + VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO = 20, + VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO = 21, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO = 22, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO = 23, + VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO = 24, + VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO = 25, + VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO = 26, + VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO = 27, + VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO = 28, + VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO = 29, + VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO = 30, + VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO = 31, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO = 32, + VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO = 33, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO = 34, + VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET = 35, + VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET = 36, + VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO = 37, + VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO = 38, + VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO = 39, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO = 40, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO = 41, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO = 42, + VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO = 43, + VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER = 44, + VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER = 45, + VK_STRUCTURE_TYPE_MEMORY_BARRIER = 46, + VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO = 47, + VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO = 48, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES = 1000094000, + VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO = 1000157000, + VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO = 1000157001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES = 1000083000, + VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS = 1000127000, + VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO = 1000127001, + VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO = 1000060000, + VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO = 1000060003, + VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO = 1000060004, + VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO = 1000060005, + VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO = 1000060006, + VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO = 1000060013, + VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO = 1000060014, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES = 1000070000, + VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO = 1000070001, + VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2 = 1000146000, + VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2 = 1000146001, + VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2 = 1000146002, + VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2 = 1000146003, + VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2 = 1000146004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2 = 1000059000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2 = 1000059001, + VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2 = 1000059002, + VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2 = 1000059003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2 = 1000059004, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2 = 1000059005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2 = 1000059006, + VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2 = 1000059007, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2 = 1000059008, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES = 1000117000, + VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO = 1000117001, + VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO = 1000117002, + VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO = 1000117003, + VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO = 1000053000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES = 1000053001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES = 1000053002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES = 1000120000, + VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO = 1000145000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES = 1000145001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES = 1000145002, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2 = 1000145003, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO = 1000156000, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO = 1000156001, + VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO = 1000156002, + VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO = 1000156003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES = 1000156004, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES = 1000156005, + VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO = 1000085000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO = 1000071000, + VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES = 1000071001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO = 1000071002, + VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES = 1000071003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES = 1000071004, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO = 1000072000, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO = 1000072001, + VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO = 1000072002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO = 1000112000, + VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES = 1000112001, + VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO = 1000113000, + VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO = 1000077000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO = 1000076000, + VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES = 1000076001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES = 1000168000, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT = 1000168001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES = 1000063000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES = 49, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES = 50, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES = 51, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES = 52, + VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO = 1000147000, + VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2 = 1000109000, + VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2 = 1000109001, + VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2 = 1000109002, + VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2 = 1000109003, + VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2 = 1000109004, + VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO = 1000109005, + VK_STRUCTURE_TYPE_SUBPASS_END_INFO = 1000109006, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES = 1000177000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES = 1000196000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES = 1000180000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES = 1000082000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES = 1000197000, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO = 1000161000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES = 1000161001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES = 1000161002, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO = 1000161003, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT = 1000161004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES = 1000199000, + VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE = 1000199001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES = 1000221000, + VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO = 1000246000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES = 1000130000, + VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO = 1000130001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES = 1000211000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES = 1000108000, + VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO = 1000108001, + VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO = 1000108002, + VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO = 1000108003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES = 1000253000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES = 1000175000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES = 1000241000, + VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT = 1000241001, + VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT = 1000241002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES = 1000261000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES = 1000207000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES = 1000207001, + VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO = 1000207002, + VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO = 1000207003, + VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO = 1000207004, + VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO = 1000207005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES = 1000257000, + VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO = 1000244001, + VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO = 1000257002, + VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO = 1000257003, + VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO = 1000257004, + VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR = 1000001000, + VK_STRUCTURE_TYPE_PRESENT_INFO_KHR = 1000001001, + VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR = 1000060007, + VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR = 1000060008, + VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR = 1000060009, + VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR = 1000060010, + VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR = 1000060011, + VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR = 1000060012, + VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR = 1000002000, + VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR = 1000002001, + VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR = 1000003000, + VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR = 1000004000, + VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR = 1000005000, + VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR = 1000006000, + VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR = 1000008000, + VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR = 1000009000, + VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT = 1000011000, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD = 1000018000, + VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT = 1000022000, + VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT = 1000022001, + VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT = 1000022002, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_PROFILE_KHR = 1000023000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_CAPABILITIES_KHR = 1000023001, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_PICTURE_RESOURCE_KHR = 1000023002, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_GET_MEMORY_PROPERTIES_KHR = 1000023003, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_BIND_MEMORY_KHR = 1000023004, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_SESSION_CREATE_INFO_KHR = 1000023005, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000023006, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_UPDATE_INFO_KHR = 1000023007, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_BEGIN_CODING_INFO_KHR = 1000023008, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_END_CODING_INFO_KHR = 1000023009, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_CODING_CONTROL_INFO_KHR = 1000023010, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_REFERENCE_SLOT_KHR = 1000023011, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_QUEUE_FAMILY_PROPERTIES_2_KHR = 1000023012, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_PROFILES_KHR = 1000023013, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_FORMAT_INFO_KHR = 1000023014, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_FORMAT_PROPERTIES_KHR = 1000023015, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_INFO_KHR = 1000024000, +#endif + VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV = 1000026000, + VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV = 1000026001, + VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV = 1000026002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT = 1000028000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT = 1000028001, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT = 1000028002, + VK_STRUCTURE_TYPE_CU_MODULE_CREATE_INFO_NVX = 1000029000, + VK_STRUCTURE_TYPE_CU_FUNCTION_CREATE_INFO_NVX = 1000029001, + VK_STRUCTURE_TYPE_CU_LAUNCH_INFO_NVX = 1000029002, + VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX = 1000030000, + VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX = 1000030001, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_CAPABILITIES_EXT = 1000038000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_CREATE_INFO_EXT = 1000038001, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_EXT = 1000038002, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT = 1000038003, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_VCL_FRAME_INFO_EXT = 1000038004, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_DPB_SLOT_INFO_EXT = 1000038005, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_NALU_SLICE_EXT = 1000038006, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_EMIT_PICTURE_PARAMETERS_EXT = 1000038007, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PROFILE_EXT = 1000038008, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_CAPABILITIES_EXT = 1000040000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_CREATE_INFO_EXT = 1000040001, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PICTURE_INFO_EXT = 1000040002, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_MVC_EXT = 1000040003, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PROFILE_EXT = 1000040004, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_EXT = 1000040005, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT = 1000040006, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_DPB_SLOT_INFO_EXT = 1000040007, +#endif + VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD = 1000041000, + VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP = 1000049000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV = 1000050000, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV = 1000056000, + VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV = 1000056001, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV = 1000057000, + VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV = 1000057001, + VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV = 1000058000, + VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT = 1000061000, + VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN = 1000062000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT = 1000066000, + VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT = 1000067000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT = 1000067001, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR = 1000073000, + VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR = 1000073001, + VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR = 1000073002, + VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR = 1000073003, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR = 1000074000, + VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR = 1000074001, + VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR = 1000074002, + VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR = 1000075000, + VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR = 1000078000, + VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR = 1000078001, + VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR = 1000078002, + VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR = 1000078003, + VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR = 1000079000, + VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR = 1000079001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR = 1000080000, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT = 1000081000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT = 1000081001, + VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT = 1000081002, + VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR = 1000084000, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV = 1000087000, + VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT = 1000090000, + VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT = 1000091000, + VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT = 1000091001, + VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT = 1000091002, + VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT = 1000091003, + VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE = 1000092000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX = 1000097000, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV = 1000098000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT = 1000099000, + VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT = 1000099001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT = 1000101000, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT = 1000101001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT = 1000102000, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT = 1000102001, + VK_STRUCTURE_TYPE_HDR_METADATA_EXT = 1000105000, + VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR = 1000111000, + VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR = 1000114000, + VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR = 1000114001, + VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR = 1000114002, + VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR = 1000115000, + VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR = 1000115001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR = 1000116000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR = 1000116001, + VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR = 1000116002, + VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR = 1000116003, + VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR = 1000116004, + VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR = 1000116005, + VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR = 1000116006, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR = 1000119000, + VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR = 1000119001, + VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR = 1000119002, + VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR = 1000121000, + VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR = 1000121001, + VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR = 1000121002, + VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR = 1000121003, + VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR = 1000121004, + VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK = 1000122000, + VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK = 1000123000, + VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT = 1000128000, + VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT = 1000128001, + VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT = 1000128002, + VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT = 1000128003, + VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT = 1000128004, + VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID = 1000129000, + VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID = 1000129001, + VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID = 1000129002, + VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID = 1000129003, + VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID = 1000129004, + VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID = 1000129005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT = 1000138000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT = 1000138001, + VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT = 1000138002, + VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT = 1000138003, + VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT = 1000143000, + VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT = 1000143001, + VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT = 1000143002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT = 1000143003, + VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT = 1000143004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT = 1000148000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT = 1000148001, + VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT = 1000148002, + VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV = 1000149000, + VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR = 1000150007, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR = 1000150000, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR = 1000150002, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR = 1000150003, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR = 1000150004, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR = 1000150005, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR = 1000150006, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_INFO_KHR = 1000150009, + VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR = 1000150010, + VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR = 1000150011, + VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR = 1000150012, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_FEATURES_KHR = 1000150013, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_PROPERTIES_KHR = 1000150014, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR = 1000150017, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_SIZES_INFO_KHR = 1000150020, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR = 1000347000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR = 1000347001, + VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR = 1000150015, + VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR = 1000150016, + VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR = 1000150018, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR = 1000348013, + VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV = 1000152000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV = 1000154000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV = 1000154001, + VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT = 1000158000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT = 1000158002, + VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT = 1000158003, + VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT = 1000158004, + VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT = 1000158005, + VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT = 1000160000, + VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT = 1000160001, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR = 1000163000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR = 1000163001, +#endif + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV = 1000164000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV = 1000164001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV = 1000164002, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV = 1000164005, + VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV = 1000165000, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV = 1000165001, + VK_STRUCTURE_TYPE_GEOMETRY_NV = 1000165003, + VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV = 1000165004, + VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV = 1000165005, + VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV = 1000165006, + VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV = 1000165007, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV = 1000165008, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV = 1000165009, + VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV = 1000165011, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV = 1000165012, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV = 1000166000, + VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV = 1000166001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT = 1000170000, + VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT = 1000170001, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT = 1000174000, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT = 1000178000, + VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT = 1000178001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT = 1000178002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR = 1000181000, + VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD = 1000183000, + VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT = 1000184000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD = 1000185000, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_CAPABILITIES_EXT = 1000187000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_CREATE_INFO_EXT = 1000187001, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_EXT = 1000187002, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_EXT = 1000187003, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_EXT = 1000187004, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PICTURE_INFO_EXT = 1000187005, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_DPB_SLOT_INFO_EXT = 1000187006, +#endif + VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD = 1000189000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT = 1000190000, + VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT = 1000190001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT = 1000190002, + VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP = 1000191000, + VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT = 1000192000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV = 1000201000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV = 1000202000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV = 1000202001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV = 1000203000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV = 1000204000, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV = 1000205000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV = 1000205002, + VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV = 1000206000, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV = 1000206001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL = 1000209000, + VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL = 1000210000, + VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL = 1000210001, + VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL = 1000210002, + VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL = 1000210003, + VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL = 1000210004, + VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL = 1000210005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT = 1000212000, + VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD = 1000213000, + VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD = 1000213001, + VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA = 1000214000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES_KHR = 1000215000, + VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT = 1000217000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT = 1000218000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT = 1000218001, + VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT = 1000218002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT = 1000225000, + VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT = 1000225001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT = 1000225002, + VK_STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR = 1000226000, + VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR = 1000226001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR = 1000226002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR = 1000226003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR = 1000226004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD = 1000227000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD = 1000229000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT = 1000234000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT = 1000237000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT = 1000238000, + VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT = 1000238001, + VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR = 1000239000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV = 1000240000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT = 1000244000, + VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT = 1000244002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT = 1000245000, + VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT = 1000247000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV = 1000249000, + VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV = 1000249001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV = 1000249002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV = 1000250000, + VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV = 1000250001, + VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV = 1000250002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT = 1000251000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT = 1000252000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT = 1000254000, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_PROVOKING_VERTEX_STATE_CREATE_INFO_EXT = 1000254001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT = 1000254002, + VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT = 1000255000, + VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT = 1000255002, + VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT = 1000255001, + VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT = 1000256000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT = 1000259000, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT = 1000259001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT = 1000259002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT = 1000260000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT = 1000265000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT = 1000267000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR = 1000269000, + VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR = 1000269001, + VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR = 1000269002, + VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR = 1000269003, + VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR = 1000269004, + VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR = 1000269005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT = 1000276000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV = 1000277000, + VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV = 1000277001, + VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV = 1000277002, + VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_NV = 1000277003, + VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV = 1000277004, + VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_NV = 1000277005, + VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV = 1000277006, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV = 1000277007, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INHERITED_VIEWPORT_SCISSOR_FEATURES_NV = 1000278000, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_VIEWPORT_SCISSOR_INFO_NV = 1000278001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT = 1000281000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT = 1000281001, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM = 1000282000, + VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM = 1000282001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT = 1000284000, + VK_STRUCTURE_TYPE_DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT = 1000284001, + VK_STRUCTURE_TYPE_DEVICE_MEMORY_REPORT_CALLBACK_DATA_EXT = 1000284002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT = 1000286000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT = 1000286001, + VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT = 1000287000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT = 1000287001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT = 1000287002, + VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR = 1000290000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT = 1000295000, + VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO_EXT = 1000295001, + VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO_EXT = 1000295002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT = 1000297000, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_INFO_KHR = 1000299000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_INFO_KHR = 1000299001, +#endif + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV = 1000300000, + VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV = 1000300001, + VK_STRUCTURE_TYPE_MEMORY_BARRIER_2_KHR = 1000314000, + VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2_KHR = 1000314001, + VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2_KHR = 1000314002, + VK_STRUCTURE_TYPE_DEPENDENCY_INFO_KHR = 1000314003, + VK_STRUCTURE_TYPE_SUBMIT_INFO_2_KHR = 1000314004, + VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO_KHR = 1000314005, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO_KHR = 1000314006, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR = 1000314007, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV = 1000314008, + VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV = 1000314009, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES_KHR = 1000325000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV = 1000326000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV = 1000326001, + VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_ENUM_STATE_CREATE_INFO_NV = 1000326002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT = 1000330000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT = 1000332000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT = 1000332001, + VK_STRUCTURE_TYPE_COPY_COMMAND_TRANSFORM_INFO_QCOM = 1000333000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES_EXT = 1000335000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR = 1000336000, + VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2_KHR = 1000337000, + VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2_KHR = 1000337001, + VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2_KHR = 1000337002, + VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2_KHR = 1000337003, + VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2_KHR = 1000337004, + VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2_KHR = 1000337005, + VK_STRUCTURE_TYPE_BUFFER_COPY_2_KHR = 1000337006, + VK_STRUCTURE_TYPE_IMAGE_COPY_2_KHR = 1000337007, + VK_STRUCTURE_TYPE_IMAGE_BLIT_2_KHR = 1000337008, + VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2_KHR = 1000337009, + VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2_KHR = 1000337010, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT = 1000340000, + VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT = 1000346000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_VALVE = 1000351000, + VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE = 1000351002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT = 1000352000, + VK_STRUCTURE_TYPE_VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT = 1000352001, + VK_STRUCTURE_TYPE_VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT = 1000352002, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA = 1000364000, + VK_STRUCTURE_TYPE_MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA = 1000364001, + VK_STRUCTURE_TYPE_MEMORY_GET_ZIRCON_HANDLE_INFO_FUCHSIA = 1000364002, + VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_ZIRCON_HANDLE_INFO_FUCHSIA = 1000365000, + VK_STRUCTURE_TYPE_SEMAPHORE_GET_ZIRCON_HANDLE_INFO_FUCHSIA = 1000365001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT = 1000377000, + VK_STRUCTURE_TYPE_SCREEN_SURFACE_CREATE_INFO_QNX = 1000378000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT = 1000381000, + VK_STRUCTURE_TYPE_PIPELINE_COLOR_WRITE_CREATE_INFO_EXT = 1000381001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES, + VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT, + VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2, + VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, + VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2, + VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2, + VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO, + VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO, + VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO, + VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO, + VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO, + VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO, + VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES, + VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO, + VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO, + VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, + VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO, + VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES, + VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES, + VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO, + VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES2_EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES, + VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO, + VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO, + VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO, + VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2, + VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2, + VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2, + VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2, + VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2, + VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO, + VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR = VK_STRUCTURE_TYPE_SUBPASS_END_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO, + VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES, + VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES, + VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO, + VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO, + VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR, + VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS, + VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES, + VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO, + VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2, + VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2, + VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2, + VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, + VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2, + VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO, + VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO, + VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES, + VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO, + VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES, + VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES, + VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO, + VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO, + VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO, + VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO, + VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES, + VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT, + VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT, + VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO, + VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES, + VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO, + VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO, + VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO, + VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES, + VK_STRUCTURE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkStructureType; + +typedef enum VkImageLayout { + VK_IMAGE_LAYOUT_UNDEFINED = 0, + VK_IMAGE_LAYOUT_GENERAL = 1, + VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL = 2, + VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL = 3, + VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL = 4, + VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL = 5, + VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL = 6, + VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL = 7, + VK_IMAGE_LAYOUT_PREINITIALIZED = 8, + VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL = 1000117000, + VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL = 1000117001, + VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL = 1000241000, + VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL = 1000241001, + VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL = 1000241002, + VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL = 1000241003, + VK_IMAGE_LAYOUT_PRESENT_SRC_KHR = 1000001002, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_LAYOUT_VIDEO_DECODE_DST_KHR = 1000024000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_LAYOUT_VIDEO_DECODE_SRC_KHR = 1000024001, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_LAYOUT_VIDEO_DECODE_DPB_KHR = 1000024002, +#endif + VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR = 1000111000, + VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV = 1000164003, + VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT = 1000218000, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_LAYOUT_VIDEO_ENCODE_DST_KHR = 1000299000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_LAYOUT_VIDEO_ENCODE_SRC_KHR = 1000299001, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_LAYOUT_VIDEO_ENCODE_DPB_KHR = 1000299002, +#endif + VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL_KHR = 1000314000, + VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL_KHR = 1000314001, + VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, + VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, + VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV, + VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL, + VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL, + VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL, + VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL, + VK_IMAGE_LAYOUT_MAX_ENUM = 0x7FFFFFFF +} VkImageLayout; + +typedef enum VkObjectType { + VK_OBJECT_TYPE_UNKNOWN = 0, + VK_OBJECT_TYPE_INSTANCE = 1, + VK_OBJECT_TYPE_PHYSICAL_DEVICE = 2, + VK_OBJECT_TYPE_DEVICE = 3, + VK_OBJECT_TYPE_QUEUE = 4, + VK_OBJECT_TYPE_SEMAPHORE = 5, + VK_OBJECT_TYPE_COMMAND_BUFFER = 6, + VK_OBJECT_TYPE_FENCE = 7, + VK_OBJECT_TYPE_DEVICE_MEMORY = 8, + VK_OBJECT_TYPE_BUFFER = 9, + VK_OBJECT_TYPE_IMAGE = 10, + VK_OBJECT_TYPE_EVENT = 11, + VK_OBJECT_TYPE_QUERY_POOL = 12, + VK_OBJECT_TYPE_BUFFER_VIEW = 13, + VK_OBJECT_TYPE_IMAGE_VIEW = 14, + VK_OBJECT_TYPE_SHADER_MODULE = 15, + VK_OBJECT_TYPE_PIPELINE_CACHE = 16, + VK_OBJECT_TYPE_PIPELINE_LAYOUT = 17, + VK_OBJECT_TYPE_RENDER_PASS = 18, + VK_OBJECT_TYPE_PIPELINE = 19, + VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT = 20, + VK_OBJECT_TYPE_SAMPLER = 21, + VK_OBJECT_TYPE_DESCRIPTOR_POOL = 22, + VK_OBJECT_TYPE_DESCRIPTOR_SET = 23, + VK_OBJECT_TYPE_FRAMEBUFFER = 24, + VK_OBJECT_TYPE_COMMAND_POOL = 25, + VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION = 1000156000, + VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE = 1000085000, + VK_OBJECT_TYPE_SURFACE_KHR = 1000000000, + VK_OBJECT_TYPE_SWAPCHAIN_KHR = 1000001000, + VK_OBJECT_TYPE_DISPLAY_KHR = 1000002000, + VK_OBJECT_TYPE_DISPLAY_MODE_KHR = 1000002001, + VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT = 1000011000, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_OBJECT_TYPE_VIDEO_SESSION_KHR = 1000023000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_OBJECT_TYPE_VIDEO_SESSION_PARAMETERS_KHR = 1000023001, +#endif + VK_OBJECT_TYPE_CU_MODULE_NVX = 1000029000, + VK_OBJECT_TYPE_CU_FUNCTION_NVX = 1000029001, + VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT = 1000128000, + VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR = 1000150000, + VK_OBJECT_TYPE_VALIDATION_CACHE_EXT = 1000160000, + VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV = 1000165000, + VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL = 1000210000, + VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR = 1000268000, + VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV = 1000277000, + VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT = 1000295000, + VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, + VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, + VK_OBJECT_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkObjectType; + +typedef enum VkVendorId { + VK_VENDOR_ID_VIV = 0x10001, + VK_VENDOR_ID_VSI = 0x10002, + VK_VENDOR_ID_KAZAN = 0x10003, + VK_VENDOR_ID_CODEPLAY = 0x10004, + VK_VENDOR_ID_MESA = 0x10005, + VK_VENDOR_ID_POCL = 0x10006, + VK_VENDOR_ID_MAX_ENUM = 0x7FFFFFFF +} VkVendorId; + +typedef enum VkPipelineCacheHeaderVersion { + VK_PIPELINE_CACHE_HEADER_VERSION_ONE = 1, + VK_PIPELINE_CACHE_HEADER_VERSION_MAX_ENUM = 0x7FFFFFFF +} VkPipelineCacheHeaderVersion; + +typedef enum VkSystemAllocationScope { + VK_SYSTEM_ALLOCATION_SCOPE_COMMAND = 0, + VK_SYSTEM_ALLOCATION_SCOPE_OBJECT = 1, + VK_SYSTEM_ALLOCATION_SCOPE_CACHE = 2, + VK_SYSTEM_ALLOCATION_SCOPE_DEVICE = 3, + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE = 4, + VK_SYSTEM_ALLOCATION_SCOPE_MAX_ENUM = 0x7FFFFFFF +} VkSystemAllocationScope; + +typedef enum VkInternalAllocationType { + VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE = 0, + VK_INTERNAL_ALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkInternalAllocationType; + +typedef enum VkFormat { + VK_FORMAT_UNDEFINED = 0, + VK_FORMAT_R4G4_UNORM_PACK8 = 1, + VK_FORMAT_R4G4B4A4_UNORM_PACK16 = 2, + VK_FORMAT_B4G4R4A4_UNORM_PACK16 = 3, + VK_FORMAT_R5G6B5_UNORM_PACK16 = 4, + VK_FORMAT_B5G6R5_UNORM_PACK16 = 5, + VK_FORMAT_R5G5B5A1_UNORM_PACK16 = 6, + VK_FORMAT_B5G5R5A1_UNORM_PACK16 = 7, + VK_FORMAT_A1R5G5B5_UNORM_PACK16 = 8, + VK_FORMAT_R8_UNORM = 9, + VK_FORMAT_R8_SNORM = 10, + VK_FORMAT_R8_USCALED = 11, + VK_FORMAT_R8_SSCALED = 12, + VK_FORMAT_R8_UINT = 13, + VK_FORMAT_R8_SINT = 14, + VK_FORMAT_R8_SRGB = 15, + VK_FORMAT_R8G8_UNORM = 16, + VK_FORMAT_R8G8_SNORM = 17, + VK_FORMAT_R8G8_USCALED = 18, + VK_FORMAT_R8G8_SSCALED = 19, + VK_FORMAT_R8G8_UINT = 20, + VK_FORMAT_R8G8_SINT = 21, + VK_FORMAT_R8G8_SRGB = 22, + VK_FORMAT_R8G8B8_UNORM = 23, + VK_FORMAT_R8G8B8_SNORM = 24, + VK_FORMAT_R8G8B8_USCALED = 25, + VK_FORMAT_R8G8B8_SSCALED = 26, + VK_FORMAT_R8G8B8_UINT = 27, + VK_FORMAT_R8G8B8_SINT = 28, + VK_FORMAT_R8G8B8_SRGB = 29, + VK_FORMAT_B8G8R8_UNORM = 30, + VK_FORMAT_B8G8R8_SNORM = 31, + VK_FORMAT_B8G8R8_USCALED = 32, + VK_FORMAT_B8G8R8_SSCALED = 33, + VK_FORMAT_B8G8R8_UINT = 34, + VK_FORMAT_B8G8R8_SINT = 35, + VK_FORMAT_B8G8R8_SRGB = 36, + VK_FORMAT_R8G8B8A8_UNORM = 37, + VK_FORMAT_R8G8B8A8_SNORM = 38, + VK_FORMAT_R8G8B8A8_USCALED = 39, + VK_FORMAT_R8G8B8A8_SSCALED = 40, + VK_FORMAT_R8G8B8A8_UINT = 41, + VK_FORMAT_R8G8B8A8_SINT = 42, + VK_FORMAT_R8G8B8A8_SRGB = 43, + VK_FORMAT_B8G8R8A8_UNORM = 44, + VK_FORMAT_B8G8R8A8_SNORM = 45, + VK_FORMAT_B8G8R8A8_USCALED = 46, + VK_FORMAT_B8G8R8A8_SSCALED = 47, + VK_FORMAT_B8G8R8A8_UINT = 48, + VK_FORMAT_B8G8R8A8_SINT = 49, + VK_FORMAT_B8G8R8A8_SRGB = 50, + VK_FORMAT_A8B8G8R8_UNORM_PACK32 = 51, + VK_FORMAT_A8B8G8R8_SNORM_PACK32 = 52, + VK_FORMAT_A8B8G8R8_USCALED_PACK32 = 53, + VK_FORMAT_A8B8G8R8_SSCALED_PACK32 = 54, + VK_FORMAT_A8B8G8R8_UINT_PACK32 = 55, + VK_FORMAT_A8B8G8R8_SINT_PACK32 = 56, + VK_FORMAT_A8B8G8R8_SRGB_PACK32 = 57, + VK_FORMAT_A2R10G10B10_UNORM_PACK32 = 58, + VK_FORMAT_A2R10G10B10_SNORM_PACK32 = 59, + VK_FORMAT_A2R10G10B10_USCALED_PACK32 = 60, + VK_FORMAT_A2R10G10B10_SSCALED_PACK32 = 61, + VK_FORMAT_A2R10G10B10_UINT_PACK32 = 62, + VK_FORMAT_A2R10G10B10_SINT_PACK32 = 63, + VK_FORMAT_A2B10G10R10_UNORM_PACK32 = 64, + VK_FORMAT_A2B10G10R10_SNORM_PACK32 = 65, + VK_FORMAT_A2B10G10R10_USCALED_PACK32 = 66, + VK_FORMAT_A2B10G10R10_SSCALED_PACK32 = 67, + VK_FORMAT_A2B10G10R10_UINT_PACK32 = 68, + VK_FORMAT_A2B10G10R10_SINT_PACK32 = 69, + VK_FORMAT_R16_UNORM = 70, + VK_FORMAT_R16_SNORM = 71, + VK_FORMAT_R16_USCALED = 72, + VK_FORMAT_R16_SSCALED = 73, + VK_FORMAT_R16_UINT = 74, + VK_FORMAT_R16_SINT = 75, + VK_FORMAT_R16_SFLOAT = 76, + VK_FORMAT_R16G16_UNORM = 77, + VK_FORMAT_R16G16_SNORM = 78, + VK_FORMAT_R16G16_USCALED = 79, + VK_FORMAT_R16G16_SSCALED = 80, + VK_FORMAT_R16G16_UINT = 81, + VK_FORMAT_R16G16_SINT = 82, + VK_FORMAT_R16G16_SFLOAT = 83, + VK_FORMAT_R16G16B16_UNORM = 84, + VK_FORMAT_R16G16B16_SNORM = 85, + VK_FORMAT_R16G16B16_USCALED = 86, + VK_FORMAT_R16G16B16_SSCALED = 87, + VK_FORMAT_R16G16B16_UINT = 88, + VK_FORMAT_R16G16B16_SINT = 89, + VK_FORMAT_R16G16B16_SFLOAT = 90, + VK_FORMAT_R16G16B16A16_UNORM = 91, + VK_FORMAT_R16G16B16A16_SNORM = 92, + VK_FORMAT_R16G16B16A16_USCALED = 93, + VK_FORMAT_R16G16B16A16_SSCALED = 94, + VK_FORMAT_R16G16B16A16_UINT = 95, + VK_FORMAT_R16G16B16A16_SINT = 96, + VK_FORMAT_R16G16B16A16_SFLOAT = 97, + VK_FORMAT_R32_UINT = 98, + VK_FORMAT_R32_SINT = 99, + VK_FORMAT_R32_SFLOAT = 100, + VK_FORMAT_R32G32_UINT = 101, + VK_FORMAT_R32G32_SINT = 102, + VK_FORMAT_R32G32_SFLOAT = 103, + VK_FORMAT_R32G32B32_UINT = 104, + VK_FORMAT_R32G32B32_SINT = 105, + VK_FORMAT_R32G32B32_SFLOAT = 106, + VK_FORMAT_R32G32B32A32_UINT = 107, + VK_FORMAT_R32G32B32A32_SINT = 108, + VK_FORMAT_R32G32B32A32_SFLOAT = 109, + VK_FORMAT_R64_UINT = 110, + VK_FORMAT_R64_SINT = 111, + VK_FORMAT_R64_SFLOAT = 112, + VK_FORMAT_R64G64_UINT = 113, + VK_FORMAT_R64G64_SINT = 114, + VK_FORMAT_R64G64_SFLOAT = 115, + VK_FORMAT_R64G64B64_UINT = 116, + VK_FORMAT_R64G64B64_SINT = 117, + VK_FORMAT_R64G64B64_SFLOAT = 118, + VK_FORMAT_R64G64B64A64_UINT = 119, + VK_FORMAT_R64G64B64A64_SINT = 120, + VK_FORMAT_R64G64B64A64_SFLOAT = 121, + VK_FORMAT_B10G11R11_UFLOAT_PACK32 = 122, + VK_FORMAT_E5B9G9R9_UFLOAT_PACK32 = 123, + VK_FORMAT_D16_UNORM = 124, + VK_FORMAT_X8_D24_UNORM_PACK32 = 125, + VK_FORMAT_D32_SFLOAT = 126, + VK_FORMAT_S8_UINT = 127, + VK_FORMAT_D16_UNORM_S8_UINT = 128, + VK_FORMAT_D24_UNORM_S8_UINT = 129, + VK_FORMAT_D32_SFLOAT_S8_UINT = 130, + VK_FORMAT_BC1_RGB_UNORM_BLOCK = 131, + VK_FORMAT_BC1_RGB_SRGB_BLOCK = 132, + VK_FORMAT_BC1_RGBA_UNORM_BLOCK = 133, + VK_FORMAT_BC1_RGBA_SRGB_BLOCK = 134, + VK_FORMAT_BC2_UNORM_BLOCK = 135, + VK_FORMAT_BC2_SRGB_BLOCK = 136, + VK_FORMAT_BC3_UNORM_BLOCK = 137, + VK_FORMAT_BC3_SRGB_BLOCK = 138, + VK_FORMAT_BC4_UNORM_BLOCK = 139, + VK_FORMAT_BC4_SNORM_BLOCK = 140, + VK_FORMAT_BC5_UNORM_BLOCK = 141, + VK_FORMAT_BC5_SNORM_BLOCK = 142, + VK_FORMAT_BC6H_UFLOAT_BLOCK = 143, + VK_FORMAT_BC6H_SFLOAT_BLOCK = 144, + VK_FORMAT_BC7_UNORM_BLOCK = 145, + VK_FORMAT_BC7_SRGB_BLOCK = 146, + VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK = 147, + VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK = 148, + VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK = 149, + VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK = 150, + VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK = 151, + VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK = 152, + VK_FORMAT_EAC_R11_UNORM_BLOCK = 153, + VK_FORMAT_EAC_R11_SNORM_BLOCK = 154, + VK_FORMAT_EAC_R11G11_UNORM_BLOCK = 155, + VK_FORMAT_EAC_R11G11_SNORM_BLOCK = 156, + VK_FORMAT_ASTC_4x4_UNORM_BLOCK = 157, + VK_FORMAT_ASTC_4x4_SRGB_BLOCK = 158, + VK_FORMAT_ASTC_5x4_UNORM_BLOCK = 159, + VK_FORMAT_ASTC_5x4_SRGB_BLOCK = 160, + VK_FORMAT_ASTC_5x5_UNORM_BLOCK = 161, + VK_FORMAT_ASTC_5x5_SRGB_BLOCK = 162, + VK_FORMAT_ASTC_6x5_UNORM_BLOCK = 163, + VK_FORMAT_ASTC_6x5_SRGB_BLOCK = 164, + VK_FORMAT_ASTC_6x6_UNORM_BLOCK = 165, + VK_FORMAT_ASTC_6x6_SRGB_BLOCK = 166, + VK_FORMAT_ASTC_8x5_UNORM_BLOCK = 167, + VK_FORMAT_ASTC_8x5_SRGB_BLOCK = 168, + VK_FORMAT_ASTC_8x6_UNORM_BLOCK = 169, + VK_FORMAT_ASTC_8x6_SRGB_BLOCK = 170, + VK_FORMAT_ASTC_8x8_UNORM_BLOCK = 171, + VK_FORMAT_ASTC_8x8_SRGB_BLOCK = 172, + VK_FORMAT_ASTC_10x5_UNORM_BLOCK = 173, + VK_FORMAT_ASTC_10x5_SRGB_BLOCK = 174, + VK_FORMAT_ASTC_10x6_UNORM_BLOCK = 175, + VK_FORMAT_ASTC_10x6_SRGB_BLOCK = 176, + VK_FORMAT_ASTC_10x8_UNORM_BLOCK = 177, + VK_FORMAT_ASTC_10x8_SRGB_BLOCK = 178, + VK_FORMAT_ASTC_10x10_UNORM_BLOCK = 179, + VK_FORMAT_ASTC_10x10_SRGB_BLOCK = 180, + VK_FORMAT_ASTC_12x10_UNORM_BLOCK = 181, + VK_FORMAT_ASTC_12x10_SRGB_BLOCK = 182, + VK_FORMAT_ASTC_12x12_UNORM_BLOCK = 183, + VK_FORMAT_ASTC_12x12_SRGB_BLOCK = 184, + VK_FORMAT_G8B8G8R8_422_UNORM = 1000156000, + VK_FORMAT_B8G8R8G8_422_UNORM = 1000156001, + VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM = 1000156002, + VK_FORMAT_G8_B8R8_2PLANE_420_UNORM = 1000156003, + VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM = 1000156004, + VK_FORMAT_G8_B8R8_2PLANE_422_UNORM = 1000156005, + VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM = 1000156006, + VK_FORMAT_R10X6_UNORM_PACK16 = 1000156007, + VK_FORMAT_R10X6G10X6_UNORM_2PACK16 = 1000156008, + VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16 = 1000156009, + VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16 = 1000156010, + VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16 = 1000156011, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16 = 1000156012, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16 = 1000156013, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16 = 1000156014, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16 = 1000156015, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16 = 1000156016, + VK_FORMAT_R12X4_UNORM_PACK16 = 1000156017, + VK_FORMAT_R12X4G12X4_UNORM_2PACK16 = 1000156018, + VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16 = 1000156019, + VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16 = 1000156020, + VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16 = 1000156021, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16 = 1000156022, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16 = 1000156023, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16 = 1000156024, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16 = 1000156025, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16 = 1000156026, + VK_FORMAT_G16B16G16R16_422_UNORM = 1000156027, + VK_FORMAT_B16G16R16G16_422_UNORM = 1000156028, + VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM = 1000156029, + VK_FORMAT_G16_B16R16_2PLANE_420_UNORM = 1000156030, + VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM = 1000156031, + VK_FORMAT_G16_B16R16_2PLANE_422_UNORM = 1000156032, + VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM = 1000156033, + VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG = 1000054000, + VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG = 1000054001, + VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG = 1000054002, + VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG = 1000054003, + VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG = 1000054004, + VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG = 1000054005, + VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG = 1000054006, + VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG = 1000054007, + VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT = 1000066000, + VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT = 1000066001, + VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT = 1000066002, + VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT = 1000066003, + VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT = 1000066004, + VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT = 1000066005, + VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT = 1000066006, + VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT = 1000066007, + VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT = 1000066008, + VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT = 1000066009, + VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT = 1000066010, + VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT = 1000066011, + VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT = 1000066012, + VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT = 1000066013, + VK_FORMAT_G8_B8R8_2PLANE_444_UNORM_EXT = 1000330000, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16_EXT = 1000330001, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16_EXT = 1000330002, + VK_FORMAT_G16_B16R16_2PLANE_444_UNORM_EXT = 1000330003, + VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT = 1000340000, + VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT = 1000340001, + VK_FORMAT_G8B8G8R8_422_UNORM_KHR = VK_FORMAT_G8B8G8R8_422_UNORM, + VK_FORMAT_B8G8R8G8_422_UNORM_KHR = VK_FORMAT_B8G8R8G8_422_UNORM, + VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM, + VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM, + VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM, + VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM, + VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM, + VK_FORMAT_R10X6_UNORM_PACK16_KHR = VK_FORMAT_R10X6_UNORM_PACK16, + VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR = VK_FORMAT_R10X6G10X6_UNORM_2PACK16, + VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16, + VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16, + VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16, + VK_FORMAT_R12X4_UNORM_PACK16_KHR = VK_FORMAT_R12X4_UNORM_PACK16, + VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR = VK_FORMAT_R12X4G12X4_UNORM_2PACK16, + VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16, + VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16, + VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16, + VK_FORMAT_G16B16G16R16_422_UNORM_KHR = VK_FORMAT_G16B16G16R16_422_UNORM, + VK_FORMAT_B16G16R16G16_422_UNORM_KHR = VK_FORMAT_B16G16R16G16_422_UNORM, + VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, + VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM, + VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM, + VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, + VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM, + VK_FORMAT_MAX_ENUM = 0x7FFFFFFF +} VkFormat; + +typedef enum VkImageTiling { + VK_IMAGE_TILING_OPTIMAL = 0, + VK_IMAGE_TILING_LINEAR = 1, + VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT = 1000158000, + VK_IMAGE_TILING_MAX_ENUM = 0x7FFFFFFF +} VkImageTiling; + +typedef enum VkImageType { + VK_IMAGE_TYPE_1D = 0, + VK_IMAGE_TYPE_2D = 1, + VK_IMAGE_TYPE_3D = 2, + VK_IMAGE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkImageType; + +typedef enum VkPhysicalDeviceType { + VK_PHYSICAL_DEVICE_TYPE_OTHER = 0, + VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU = 1, + VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU = 2, + VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU = 3, + VK_PHYSICAL_DEVICE_TYPE_CPU = 4, + VK_PHYSICAL_DEVICE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkPhysicalDeviceType; + +typedef enum VkQueryType { + VK_QUERY_TYPE_OCCLUSION = 0, + VK_QUERY_TYPE_PIPELINE_STATISTICS = 1, + VK_QUERY_TYPE_TIMESTAMP = 2, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_QUERY_TYPE_RESULT_STATUS_ONLY_KHR = 1000023000, +#endif + VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT = 1000028004, + VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR = 1000116000, + VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR = 1000150000, + VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR = 1000150001, + VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV = 1000165000, + VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL = 1000210000, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_QUERY_TYPE_VIDEO_ENCODE_BITSTREAM_BUFFER_RANGE_KHR = 1000299000, +#endif + VK_QUERY_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkQueryType; + +typedef enum VkSharingMode { + VK_SHARING_MODE_EXCLUSIVE = 0, + VK_SHARING_MODE_CONCURRENT = 1, + VK_SHARING_MODE_MAX_ENUM = 0x7FFFFFFF +} VkSharingMode; + +typedef enum VkComponentSwizzle { + VK_COMPONENT_SWIZZLE_IDENTITY = 0, + VK_COMPONENT_SWIZZLE_ZERO = 1, + VK_COMPONENT_SWIZZLE_ONE = 2, + VK_COMPONENT_SWIZZLE_R = 3, + VK_COMPONENT_SWIZZLE_G = 4, + VK_COMPONENT_SWIZZLE_B = 5, + VK_COMPONENT_SWIZZLE_A = 6, + VK_COMPONENT_SWIZZLE_MAX_ENUM = 0x7FFFFFFF +} VkComponentSwizzle; + +typedef enum VkImageViewType { + VK_IMAGE_VIEW_TYPE_1D = 0, + VK_IMAGE_VIEW_TYPE_2D = 1, + VK_IMAGE_VIEW_TYPE_3D = 2, + VK_IMAGE_VIEW_TYPE_CUBE = 3, + VK_IMAGE_VIEW_TYPE_1D_ARRAY = 4, + VK_IMAGE_VIEW_TYPE_2D_ARRAY = 5, + VK_IMAGE_VIEW_TYPE_CUBE_ARRAY = 6, + VK_IMAGE_VIEW_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkImageViewType; + +typedef enum VkBlendFactor { + VK_BLEND_FACTOR_ZERO = 0, + VK_BLEND_FACTOR_ONE = 1, + VK_BLEND_FACTOR_SRC_COLOR = 2, + VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR = 3, + VK_BLEND_FACTOR_DST_COLOR = 4, + VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR = 5, + VK_BLEND_FACTOR_SRC_ALPHA = 6, + VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA = 7, + VK_BLEND_FACTOR_DST_ALPHA = 8, + VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA = 9, + VK_BLEND_FACTOR_CONSTANT_COLOR = 10, + VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR = 11, + VK_BLEND_FACTOR_CONSTANT_ALPHA = 12, + VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA = 13, + VK_BLEND_FACTOR_SRC_ALPHA_SATURATE = 14, + VK_BLEND_FACTOR_SRC1_COLOR = 15, + VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR = 16, + VK_BLEND_FACTOR_SRC1_ALPHA = 17, + VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA = 18, + VK_BLEND_FACTOR_MAX_ENUM = 0x7FFFFFFF +} VkBlendFactor; + +typedef enum VkBlendOp { + VK_BLEND_OP_ADD = 0, + VK_BLEND_OP_SUBTRACT = 1, + VK_BLEND_OP_REVERSE_SUBTRACT = 2, + VK_BLEND_OP_MIN = 3, + VK_BLEND_OP_MAX = 4, + VK_BLEND_OP_ZERO_EXT = 1000148000, + VK_BLEND_OP_SRC_EXT = 1000148001, + VK_BLEND_OP_DST_EXT = 1000148002, + VK_BLEND_OP_SRC_OVER_EXT = 1000148003, + VK_BLEND_OP_DST_OVER_EXT = 1000148004, + VK_BLEND_OP_SRC_IN_EXT = 1000148005, + VK_BLEND_OP_DST_IN_EXT = 1000148006, + VK_BLEND_OP_SRC_OUT_EXT = 1000148007, + VK_BLEND_OP_DST_OUT_EXT = 1000148008, + VK_BLEND_OP_SRC_ATOP_EXT = 1000148009, + VK_BLEND_OP_DST_ATOP_EXT = 1000148010, + VK_BLEND_OP_XOR_EXT = 1000148011, + VK_BLEND_OP_MULTIPLY_EXT = 1000148012, + VK_BLEND_OP_SCREEN_EXT = 1000148013, + VK_BLEND_OP_OVERLAY_EXT = 1000148014, + VK_BLEND_OP_DARKEN_EXT = 1000148015, + VK_BLEND_OP_LIGHTEN_EXT = 1000148016, + VK_BLEND_OP_COLORDODGE_EXT = 1000148017, + VK_BLEND_OP_COLORBURN_EXT = 1000148018, + VK_BLEND_OP_HARDLIGHT_EXT = 1000148019, + VK_BLEND_OP_SOFTLIGHT_EXT = 1000148020, + VK_BLEND_OP_DIFFERENCE_EXT = 1000148021, + VK_BLEND_OP_EXCLUSION_EXT = 1000148022, + VK_BLEND_OP_INVERT_EXT = 1000148023, + VK_BLEND_OP_INVERT_RGB_EXT = 1000148024, + VK_BLEND_OP_LINEARDODGE_EXT = 1000148025, + VK_BLEND_OP_LINEARBURN_EXT = 1000148026, + VK_BLEND_OP_VIVIDLIGHT_EXT = 1000148027, + VK_BLEND_OP_LINEARLIGHT_EXT = 1000148028, + VK_BLEND_OP_PINLIGHT_EXT = 1000148029, + VK_BLEND_OP_HARDMIX_EXT = 1000148030, + VK_BLEND_OP_HSL_HUE_EXT = 1000148031, + VK_BLEND_OP_HSL_SATURATION_EXT = 1000148032, + VK_BLEND_OP_HSL_COLOR_EXT = 1000148033, + VK_BLEND_OP_HSL_LUMINOSITY_EXT = 1000148034, + VK_BLEND_OP_PLUS_EXT = 1000148035, + VK_BLEND_OP_PLUS_CLAMPED_EXT = 1000148036, + VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT = 1000148037, + VK_BLEND_OP_PLUS_DARKER_EXT = 1000148038, + VK_BLEND_OP_MINUS_EXT = 1000148039, + VK_BLEND_OP_MINUS_CLAMPED_EXT = 1000148040, + VK_BLEND_OP_CONTRAST_EXT = 1000148041, + VK_BLEND_OP_INVERT_OVG_EXT = 1000148042, + VK_BLEND_OP_RED_EXT = 1000148043, + VK_BLEND_OP_GREEN_EXT = 1000148044, + VK_BLEND_OP_BLUE_EXT = 1000148045, + VK_BLEND_OP_MAX_ENUM = 0x7FFFFFFF +} VkBlendOp; + +typedef enum VkCompareOp { + VK_COMPARE_OP_NEVER = 0, + VK_COMPARE_OP_LESS = 1, + VK_COMPARE_OP_EQUAL = 2, + VK_COMPARE_OP_LESS_OR_EQUAL = 3, + VK_COMPARE_OP_GREATER = 4, + VK_COMPARE_OP_NOT_EQUAL = 5, + VK_COMPARE_OP_GREATER_OR_EQUAL = 6, + VK_COMPARE_OP_ALWAYS = 7, + VK_COMPARE_OP_MAX_ENUM = 0x7FFFFFFF +} VkCompareOp; + +typedef enum VkDynamicState { + VK_DYNAMIC_STATE_VIEWPORT = 0, + VK_DYNAMIC_STATE_SCISSOR = 1, + VK_DYNAMIC_STATE_LINE_WIDTH = 2, + VK_DYNAMIC_STATE_DEPTH_BIAS = 3, + VK_DYNAMIC_STATE_BLEND_CONSTANTS = 4, + VK_DYNAMIC_STATE_DEPTH_BOUNDS = 5, + VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK = 6, + VK_DYNAMIC_STATE_STENCIL_WRITE_MASK = 7, + VK_DYNAMIC_STATE_STENCIL_REFERENCE = 8, + VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV = 1000087000, + VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT = 1000099000, + VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT = 1000143000, + VK_DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR = 1000347000, + VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV = 1000164004, + VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV = 1000164006, + VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV = 1000205001, + VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR = 1000226000, + VK_DYNAMIC_STATE_LINE_STIPPLE_EXT = 1000259000, + VK_DYNAMIC_STATE_CULL_MODE_EXT = 1000267000, + VK_DYNAMIC_STATE_FRONT_FACE_EXT = 1000267001, + VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT = 1000267002, + VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT = 1000267003, + VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT = 1000267004, + VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT = 1000267005, + VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT = 1000267006, + VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT = 1000267007, + VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT = 1000267008, + VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT = 1000267009, + VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT = 1000267010, + VK_DYNAMIC_STATE_STENCIL_OP_EXT = 1000267011, + VK_DYNAMIC_STATE_VERTEX_INPUT_EXT = 1000352000, + VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT = 1000377000, + VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT = 1000377001, + VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT = 1000377002, + VK_DYNAMIC_STATE_LOGIC_OP_EXT = 1000377003, + VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT = 1000377004, + VK_DYNAMIC_STATE_COLOR_WRITE_ENABLE_EXT = 1000381000, + VK_DYNAMIC_STATE_MAX_ENUM = 0x7FFFFFFF +} VkDynamicState; + +typedef enum VkFrontFace { + VK_FRONT_FACE_COUNTER_CLOCKWISE = 0, + VK_FRONT_FACE_CLOCKWISE = 1, + VK_FRONT_FACE_MAX_ENUM = 0x7FFFFFFF +} VkFrontFace; + +typedef enum VkVertexInputRate { + VK_VERTEX_INPUT_RATE_VERTEX = 0, + VK_VERTEX_INPUT_RATE_INSTANCE = 1, + VK_VERTEX_INPUT_RATE_MAX_ENUM = 0x7FFFFFFF +} VkVertexInputRate; + +typedef enum VkPrimitiveTopology { + VK_PRIMITIVE_TOPOLOGY_POINT_LIST = 0, + VK_PRIMITIVE_TOPOLOGY_LINE_LIST = 1, + VK_PRIMITIVE_TOPOLOGY_LINE_STRIP = 2, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST = 3, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP = 4, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN = 5, + VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY = 6, + VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY = 7, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY = 8, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY = 9, + VK_PRIMITIVE_TOPOLOGY_PATCH_LIST = 10, + VK_PRIMITIVE_TOPOLOGY_MAX_ENUM = 0x7FFFFFFF +} VkPrimitiveTopology; + +typedef enum VkPolygonMode { + VK_POLYGON_MODE_FILL = 0, + VK_POLYGON_MODE_LINE = 1, + VK_POLYGON_MODE_POINT = 2, + VK_POLYGON_MODE_FILL_RECTANGLE_NV = 1000153000, + VK_POLYGON_MODE_MAX_ENUM = 0x7FFFFFFF +} VkPolygonMode; + +typedef enum VkStencilOp { + VK_STENCIL_OP_KEEP = 0, + VK_STENCIL_OP_ZERO = 1, + VK_STENCIL_OP_REPLACE = 2, + VK_STENCIL_OP_INCREMENT_AND_CLAMP = 3, + VK_STENCIL_OP_DECREMENT_AND_CLAMP = 4, + VK_STENCIL_OP_INVERT = 5, + VK_STENCIL_OP_INCREMENT_AND_WRAP = 6, + VK_STENCIL_OP_DECREMENT_AND_WRAP = 7, + VK_STENCIL_OP_MAX_ENUM = 0x7FFFFFFF +} VkStencilOp; + +typedef enum VkLogicOp { + VK_LOGIC_OP_CLEAR = 0, + VK_LOGIC_OP_AND = 1, + VK_LOGIC_OP_AND_REVERSE = 2, + VK_LOGIC_OP_COPY = 3, + VK_LOGIC_OP_AND_INVERTED = 4, + VK_LOGIC_OP_NO_OP = 5, + VK_LOGIC_OP_XOR = 6, + VK_LOGIC_OP_OR = 7, + VK_LOGIC_OP_NOR = 8, + VK_LOGIC_OP_EQUIVALENT = 9, + VK_LOGIC_OP_INVERT = 10, + VK_LOGIC_OP_OR_REVERSE = 11, + VK_LOGIC_OP_COPY_INVERTED = 12, + VK_LOGIC_OP_OR_INVERTED = 13, + VK_LOGIC_OP_NAND = 14, + VK_LOGIC_OP_SET = 15, + VK_LOGIC_OP_MAX_ENUM = 0x7FFFFFFF +} VkLogicOp; + +typedef enum VkBorderColor { + VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK = 0, + VK_BORDER_COLOR_INT_TRANSPARENT_BLACK = 1, + VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK = 2, + VK_BORDER_COLOR_INT_OPAQUE_BLACK = 3, + VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE = 4, + VK_BORDER_COLOR_INT_OPAQUE_WHITE = 5, + VK_BORDER_COLOR_FLOAT_CUSTOM_EXT = 1000287003, + VK_BORDER_COLOR_INT_CUSTOM_EXT = 1000287004, + VK_BORDER_COLOR_MAX_ENUM = 0x7FFFFFFF +} VkBorderColor; + +typedef enum VkFilter { + VK_FILTER_NEAREST = 0, + VK_FILTER_LINEAR = 1, + VK_FILTER_CUBIC_IMG = 1000015000, + VK_FILTER_CUBIC_EXT = VK_FILTER_CUBIC_IMG, + VK_FILTER_MAX_ENUM = 0x7FFFFFFF +} VkFilter; + +typedef enum VkSamplerAddressMode { + VK_SAMPLER_ADDRESS_MODE_REPEAT = 0, + VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT = 1, + VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE = 2, + VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER = 3, + VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE = 4, + VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE, + VK_SAMPLER_ADDRESS_MODE_MAX_ENUM = 0x7FFFFFFF +} VkSamplerAddressMode; + +typedef enum VkSamplerMipmapMode { + VK_SAMPLER_MIPMAP_MODE_NEAREST = 0, + VK_SAMPLER_MIPMAP_MODE_LINEAR = 1, + VK_SAMPLER_MIPMAP_MODE_MAX_ENUM = 0x7FFFFFFF +} VkSamplerMipmapMode; + +typedef enum VkDescriptorType { + VK_DESCRIPTOR_TYPE_SAMPLER = 0, + VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER = 1, + VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE = 2, + VK_DESCRIPTOR_TYPE_STORAGE_IMAGE = 3, + VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER = 4, + VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER = 5, + VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER = 6, + VK_DESCRIPTOR_TYPE_STORAGE_BUFFER = 7, + VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC = 8, + VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC = 9, + VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT = 10, + VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT = 1000138000, + VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR = 1000150000, + VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV = 1000165000, + VK_DESCRIPTOR_TYPE_MUTABLE_VALVE = 1000351000, + VK_DESCRIPTOR_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkDescriptorType; + +typedef enum VkAttachmentLoadOp { + VK_ATTACHMENT_LOAD_OP_LOAD = 0, + VK_ATTACHMENT_LOAD_OP_CLEAR = 1, + VK_ATTACHMENT_LOAD_OP_DONT_CARE = 2, + VK_ATTACHMENT_LOAD_OP_MAX_ENUM = 0x7FFFFFFF +} VkAttachmentLoadOp; + +typedef enum VkAttachmentStoreOp { + VK_ATTACHMENT_STORE_OP_STORE = 0, + VK_ATTACHMENT_STORE_OP_DONT_CARE = 1, + VK_ATTACHMENT_STORE_OP_NONE_QCOM = 1000301000, + VK_ATTACHMENT_STORE_OP_MAX_ENUM = 0x7FFFFFFF +} VkAttachmentStoreOp; + +typedef enum VkPipelineBindPoint { + VK_PIPELINE_BIND_POINT_GRAPHICS = 0, + VK_PIPELINE_BIND_POINT_COMPUTE = 1, + VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR = 1000165000, + VK_PIPELINE_BIND_POINT_RAY_TRACING_NV = VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR, + VK_PIPELINE_BIND_POINT_MAX_ENUM = 0x7FFFFFFF +} VkPipelineBindPoint; + +typedef enum VkCommandBufferLevel { + VK_COMMAND_BUFFER_LEVEL_PRIMARY = 0, + VK_COMMAND_BUFFER_LEVEL_SECONDARY = 1, + VK_COMMAND_BUFFER_LEVEL_MAX_ENUM = 0x7FFFFFFF +} VkCommandBufferLevel; + +typedef enum VkIndexType { + VK_INDEX_TYPE_UINT16 = 0, + VK_INDEX_TYPE_UINT32 = 1, + VK_INDEX_TYPE_NONE_KHR = 1000165000, + VK_INDEX_TYPE_UINT8_EXT = 1000265000, + VK_INDEX_TYPE_NONE_NV = VK_INDEX_TYPE_NONE_KHR, + VK_INDEX_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkIndexType; + +typedef enum VkSubpassContents { + VK_SUBPASS_CONTENTS_INLINE = 0, + VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS = 1, + VK_SUBPASS_CONTENTS_MAX_ENUM = 0x7FFFFFFF +} VkSubpassContents; + +typedef enum VkAccessFlagBits { + VK_ACCESS_INDIRECT_COMMAND_READ_BIT = 0x00000001, + VK_ACCESS_INDEX_READ_BIT = 0x00000002, + VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT = 0x00000004, + VK_ACCESS_UNIFORM_READ_BIT = 0x00000008, + VK_ACCESS_INPUT_ATTACHMENT_READ_BIT = 0x00000010, + VK_ACCESS_SHADER_READ_BIT = 0x00000020, + VK_ACCESS_SHADER_WRITE_BIT = 0x00000040, + VK_ACCESS_COLOR_ATTACHMENT_READ_BIT = 0x00000080, + VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT = 0x00000100, + VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT = 0x00000200, + VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT = 0x00000400, + VK_ACCESS_TRANSFER_READ_BIT = 0x00000800, + VK_ACCESS_TRANSFER_WRITE_BIT = 0x00001000, + VK_ACCESS_HOST_READ_BIT = 0x00002000, + VK_ACCESS_HOST_WRITE_BIT = 0x00004000, + VK_ACCESS_MEMORY_READ_BIT = 0x00008000, + VK_ACCESS_MEMORY_WRITE_BIT = 0x00010000, + VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT = 0x02000000, + VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT = 0x04000000, + VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT = 0x08000000, + VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT = 0x00100000, + VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 0x00080000, + VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR = 0x00200000, + VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR = 0x00400000, + VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV = 0x00800000, + VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT = 0x01000000, + VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV = 0x00020000, + VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV = 0x00040000, + VK_ACCESS_NONE_KHR = 0, + VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR, + VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR, + VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR = VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV, + VK_ACCESS_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkAccessFlagBits; +typedef VkFlags VkAccessFlags; + +typedef enum VkImageAspectFlagBits { + VK_IMAGE_ASPECT_COLOR_BIT = 0x00000001, + VK_IMAGE_ASPECT_DEPTH_BIT = 0x00000002, + VK_IMAGE_ASPECT_STENCIL_BIT = 0x00000004, + VK_IMAGE_ASPECT_METADATA_BIT = 0x00000008, + VK_IMAGE_ASPECT_PLANE_0_BIT = 0x00000010, + VK_IMAGE_ASPECT_PLANE_1_BIT = 0x00000020, + VK_IMAGE_ASPECT_PLANE_2_BIT = 0x00000040, + VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT = 0x00000080, + VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT = 0x00000100, + VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT = 0x00000200, + VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT = 0x00000400, + VK_IMAGE_ASPECT_PLANE_0_BIT_KHR = VK_IMAGE_ASPECT_PLANE_0_BIT, + VK_IMAGE_ASPECT_PLANE_1_BIT_KHR = VK_IMAGE_ASPECT_PLANE_1_BIT, + VK_IMAGE_ASPECT_PLANE_2_BIT_KHR = VK_IMAGE_ASPECT_PLANE_2_BIT, + VK_IMAGE_ASPECT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkImageAspectFlagBits; +typedef VkFlags VkImageAspectFlags; + +typedef enum VkFormatFeatureFlagBits { + VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT = 0x00000001, + VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT = 0x00000002, + VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT = 0x00000004, + VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT = 0x00000008, + VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT = 0x00000010, + VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT = 0x00000020, + VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT = 0x00000040, + VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT = 0x00000080, + VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT = 0x00000100, + VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000200, + VK_FORMAT_FEATURE_BLIT_SRC_BIT = 0x00000400, + VK_FORMAT_FEATURE_BLIT_DST_BIT = 0x00000800, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT = 0x00001000, + VK_FORMAT_FEATURE_TRANSFER_SRC_BIT = 0x00004000, + VK_FORMAT_FEATURE_TRANSFER_DST_BIT = 0x00008000, + VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT = 0x00020000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT = 0x00040000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT = 0x00080000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT = 0x00100000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT = 0x00200000, + VK_FORMAT_FEATURE_DISJOINT_BIT = 0x00400000, + VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT = 0x00800000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT = 0x00010000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG = 0x00002000, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_FORMAT_FEATURE_VIDEO_DECODE_OUTPUT_BIT_KHR = 0x02000000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_FORMAT_FEATURE_VIDEO_DECODE_DPB_BIT_KHR = 0x04000000, +#endif + VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR = 0x20000000, + VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x01000000, + VK_FORMAT_FEATURE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x40000000, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_FORMAT_FEATURE_VIDEO_ENCODE_INPUT_BIT_KHR = 0x08000000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_FORMAT_FEATURE_VIDEO_ENCODE_DPB_BIT_KHR = 0x10000000, +#endif + VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, + VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT, + VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT, + VK_FORMAT_FEATURE_DISJOINT_BIT_KHR = VK_FORMAT_FEATURE_DISJOINT_BIT, + VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG, + VK_FORMAT_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkFormatFeatureFlagBits; +typedef VkFlags VkFormatFeatureFlags; + +typedef enum VkImageCreateFlagBits { + VK_IMAGE_CREATE_SPARSE_BINDING_BIT = 0x00000001, + VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002, + VK_IMAGE_CREATE_SPARSE_ALIASED_BIT = 0x00000004, + VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT = 0x00000008, + VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT = 0x00000010, + VK_IMAGE_CREATE_ALIAS_BIT = 0x00000400, + VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT = 0x00000040, + VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT = 0x00000020, + VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT = 0x00000080, + VK_IMAGE_CREATE_EXTENDED_USAGE_BIT = 0x00000100, + VK_IMAGE_CREATE_PROTECTED_BIT = 0x00000800, + VK_IMAGE_CREATE_DISJOINT_BIT = 0x00000200, + VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV = 0x00002000, + VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT = 0x00001000, + VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT = 0x00004000, + VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT, + VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT, + VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT, + VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT, + VK_IMAGE_CREATE_DISJOINT_BIT_KHR = VK_IMAGE_CREATE_DISJOINT_BIT, + VK_IMAGE_CREATE_ALIAS_BIT_KHR = VK_IMAGE_CREATE_ALIAS_BIT, + VK_IMAGE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkImageCreateFlagBits; +typedef VkFlags VkImageCreateFlags; + +typedef enum VkSampleCountFlagBits { + VK_SAMPLE_COUNT_1_BIT = 0x00000001, + VK_SAMPLE_COUNT_2_BIT = 0x00000002, + VK_SAMPLE_COUNT_4_BIT = 0x00000004, + VK_SAMPLE_COUNT_8_BIT = 0x00000008, + VK_SAMPLE_COUNT_16_BIT = 0x00000010, + VK_SAMPLE_COUNT_32_BIT = 0x00000020, + VK_SAMPLE_COUNT_64_BIT = 0x00000040, + VK_SAMPLE_COUNT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSampleCountFlagBits; +typedef VkFlags VkSampleCountFlags; + +typedef enum VkImageUsageFlagBits { + VK_IMAGE_USAGE_TRANSFER_SRC_BIT = 0x00000001, + VK_IMAGE_USAGE_TRANSFER_DST_BIT = 0x00000002, + VK_IMAGE_USAGE_SAMPLED_BIT = 0x00000004, + VK_IMAGE_USAGE_STORAGE_BIT = 0x00000008, + VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT = 0x00000010, + VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000020, + VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT = 0x00000040, + VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT = 0x00000080, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR = 0x00000400, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_USAGE_VIDEO_DECODE_SRC_BIT_KHR = 0x00000800, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR = 0x00001000, +#endif + VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV = 0x00000100, + VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x00000200, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR = 0x00002000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR = 0x00004000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR = 0x00008000, +#endif + VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, + VK_IMAGE_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkImageUsageFlagBits; +typedef VkFlags VkImageUsageFlags; +typedef VkFlags VkInstanceCreateFlags; + +typedef enum VkMemoryHeapFlagBits { + VK_MEMORY_HEAP_DEVICE_LOCAL_BIT = 0x00000001, + VK_MEMORY_HEAP_MULTI_INSTANCE_BIT = 0x00000002, + VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT, + VK_MEMORY_HEAP_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkMemoryHeapFlagBits; +typedef VkFlags VkMemoryHeapFlags; + +typedef enum VkMemoryPropertyFlagBits { + VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT = 0x00000001, + VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT = 0x00000002, + VK_MEMORY_PROPERTY_HOST_COHERENT_BIT = 0x00000004, + VK_MEMORY_PROPERTY_HOST_CACHED_BIT = 0x00000008, + VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT = 0x00000010, + VK_MEMORY_PROPERTY_PROTECTED_BIT = 0x00000020, + VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD = 0x00000040, + VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD = 0x00000080, + VK_MEMORY_PROPERTY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkMemoryPropertyFlagBits; +typedef VkFlags VkMemoryPropertyFlags; + +typedef enum VkQueueFlagBits { + VK_QUEUE_GRAPHICS_BIT = 0x00000001, + VK_QUEUE_COMPUTE_BIT = 0x00000002, + VK_QUEUE_TRANSFER_BIT = 0x00000004, + VK_QUEUE_SPARSE_BINDING_BIT = 0x00000008, + VK_QUEUE_PROTECTED_BIT = 0x00000010, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_QUEUE_VIDEO_DECODE_BIT_KHR = 0x00000020, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_QUEUE_VIDEO_ENCODE_BIT_KHR = 0x00000040, +#endif + VK_QUEUE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkQueueFlagBits; +typedef VkFlags VkQueueFlags; +typedef VkFlags VkDeviceCreateFlags; + +typedef enum VkDeviceQueueCreateFlagBits { + VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT = 0x00000001, + VK_DEVICE_QUEUE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkDeviceQueueCreateFlagBits; +typedef VkFlags VkDeviceQueueCreateFlags; + +typedef enum VkPipelineStageFlagBits { + VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT = 0x00000001, + VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT = 0x00000002, + VK_PIPELINE_STAGE_VERTEX_INPUT_BIT = 0x00000004, + VK_PIPELINE_STAGE_VERTEX_SHADER_BIT = 0x00000008, + VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT = 0x00000010, + VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT = 0x00000020, + VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT = 0x00000040, + VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT = 0x00000080, + VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT = 0x00000100, + VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT = 0x00000200, + VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT = 0x00000400, + VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT = 0x00000800, + VK_PIPELINE_STAGE_TRANSFER_BIT = 0x00001000, + VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT = 0x00002000, + VK_PIPELINE_STAGE_HOST_BIT = 0x00004000, + VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT = 0x00008000, + VK_PIPELINE_STAGE_ALL_COMMANDS_BIT = 0x00010000, + VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT = 0x01000000, + VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT = 0x00040000, + VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR = 0x02000000, + VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR = 0x00200000, + VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV = 0x00400000, + VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV = 0x00080000, + VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV = 0x00100000, + VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT = 0x00800000, + VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV = 0x00020000, + VK_PIPELINE_STAGE_NONE_KHR = 0, + VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR, + VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, + VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV, + VK_PIPELINE_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineStageFlagBits; +typedef VkFlags VkPipelineStageFlags; +typedef VkFlags VkMemoryMapFlags; + +typedef enum VkSparseMemoryBindFlagBits { + VK_SPARSE_MEMORY_BIND_METADATA_BIT = 0x00000001, + VK_SPARSE_MEMORY_BIND_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSparseMemoryBindFlagBits; +typedef VkFlags VkSparseMemoryBindFlags; + +typedef enum VkSparseImageFormatFlagBits { + VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT = 0x00000001, + VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT = 0x00000002, + VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT = 0x00000004, + VK_SPARSE_IMAGE_FORMAT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSparseImageFormatFlagBits; +typedef VkFlags VkSparseImageFormatFlags; + +typedef enum VkFenceCreateFlagBits { + VK_FENCE_CREATE_SIGNALED_BIT = 0x00000001, + VK_FENCE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkFenceCreateFlagBits; +typedef VkFlags VkFenceCreateFlags; +typedef VkFlags VkSemaphoreCreateFlags; + +typedef enum VkEventCreateFlagBits { + VK_EVENT_CREATE_DEVICE_ONLY_BIT_KHR = 0x00000001, + VK_EVENT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkEventCreateFlagBits; +typedef VkFlags VkEventCreateFlags; + +typedef enum VkQueryPipelineStatisticFlagBits { + VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT = 0x00000001, + VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT = 0x00000002, + VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT = 0x00000004, + VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT = 0x00000008, + VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT = 0x00000010, + VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT = 0x00000020, + VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT = 0x00000040, + VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT = 0x00000080, + VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT = 0x00000100, + VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT = 0x00000200, + VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT = 0x00000400, + VK_QUERY_PIPELINE_STATISTIC_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkQueryPipelineStatisticFlagBits; +typedef VkFlags VkQueryPipelineStatisticFlags; +typedef VkFlags VkQueryPoolCreateFlags; + +typedef enum VkQueryResultFlagBits { + VK_QUERY_RESULT_64_BIT = 0x00000001, + VK_QUERY_RESULT_WAIT_BIT = 0x00000002, + VK_QUERY_RESULT_WITH_AVAILABILITY_BIT = 0x00000004, + VK_QUERY_RESULT_PARTIAL_BIT = 0x00000008, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_QUERY_RESULT_WITH_STATUS_BIT_KHR = 0x00000010, +#endif + VK_QUERY_RESULT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkQueryResultFlagBits; +typedef VkFlags VkQueryResultFlags; + +typedef enum VkBufferCreateFlagBits { + VK_BUFFER_CREATE_SPARSE_BINDING_BIT = 0x00000001, + VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002, + VK_BUFFER_CREATE_SPARSE_ALIASED_BIT = 0x00000004, + VK_BUFFER_CREATE_PROTECTED_BIT = 0x00000008, + VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT = 0x00000010, + VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT, + VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT, + VK_BUFFER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkBufferCreateFlagBits; +typedef VkFlags VkBufferCreateFlags; + +typedef enum VkBufferUsageFlagBits { + VK_BUFFER_USAGE_TRANSFER_SRC_BIT = 0x00000001, + VK_BUFFER_USAGE_TRANSFER_DST_BIT = 0x00000002, + VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT = 0x00000004, + VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT = 0x00000008, + VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT = 0x00000010, + VK_BUFFER_USAGE_STORAGE_BUFFER_BIT = 0x00000020, + VK_BUFFER_USAGE_INDEX_BUFFER_BIT = 0x00000040, + VK_BUFFER_USAGE_VERTEX_BUFFER_BIT = 0x00000080, + VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT = 0x00000100, + VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT = 0x00020000, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_BUFFER_USAGE_VIDEO_DECODE_SRC_BIT_KHR = 0x00002000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_BUFFER_USAGE_VIDEO_DECODE_DST_BIT_KHR = 0x00004000, +#endif + VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT = 0x00000800, + VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT = 0x00001000, + VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT = 0x00000200, + VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR = 0x00080000, + VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR = 0x00100000, + VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR = 0x00000400, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_BUFFER_USAGE_VIDEO_ENCODE_DST_BIT_KHR = 0x00008000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_BUFFER_USAGE_VIDEO_ENCODE_SRC_BIT_KHR = 0x00010000, +#endif + VK_BUFFER_USAGE_RAY_TRACING_BIT_NV = VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR, + VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT, + VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT, + VK_BUFFER_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkBufferUsageFlagBits; +typedef VkFlags VkBufferUsageFlags; +typedef VkFlags VkBufferViewCreateFlags; + +typedef enum VkImageViewCreateFlagBits { + VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT = 0x00000001, + VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DEFERRED_BIT_EXT = 0x00000002, + VK_IMAGE_VIEW_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkImageViewCreateFlagBits; +typedef VkFlags VkImageViewCreateFlags; + +typedef enum VkShaderModuleCreateFlagBits { + VK_SHADER_MODULE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkShaderModuleCreateFlagBits; +typedef VkFlags VkShaderModuleCreateFlags; + +typedef enum VkPipelineCacheCreateFlagBits { + VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT = 0x00000001, + VK_PIPELINE_CACHE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineCacheCreateFlagBits; +typedef VkFlags VkPipelineCacheCreateFlags; + +typedef enum VkColorComponentFlagBits { + VK_COLOR_COMPONENT_R_BIT = 0x00000001, + VK_COLOR_COMPONENT_G_BIT = 0x00000002, + VK_COLOR_COMPONENT_B_BIT = 0x00000004, + VK_COLOR_COMPONENT_A_BIT = 0x00000008, + VK_COLOR_COMPONENT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkColorComponentFlagBits; +typedef VkFlags VkColorComponentFlags; + +typedef enum VkPipelineCreateFlagBits { + VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT = 0x00000001, + VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT = 0x00000002, + VK_PIPELINE_CREATE_DERIVATIVE_BIT = 0x00000004, + VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT = 0x00000008, + VK_PIPELINE_CREATE_DISPATCH_BASE_BIT = 0x00000010, + VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR = 0x00004000, + VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR = 0x00008000, + VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR = 0x00010000, + VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR = 0x00020000, + VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR = 0x00001000, + VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR = 0x00002000, + VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR = 0x00080000, + VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV = 0x00000020, + VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR = 0x00000040, + VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR = 0x00000080, + VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV = 0x00040000, + VK_PIPELINE_CREATE_LIBRARY_BIT_KHR = 0x00000800, + VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT = 0x00000100, + VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT = 0x00000200, + VK_PIPELINE_CREATE_DISPATCH_BASE = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT, + VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT, + VK_PIPELINE_CREATE_DISPATCH_BASE_KHR = VK_PIPELINE_CREATE_DISPATCH_BASE, + VK_PIPELINE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineCreateFlagBits; +typedef VkFlags VkPipelineCreateFlags; + +typedef enum VkPipelineShaderStageCreateFlagBits { + VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT = 0x00000001, + VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT = 0x00000002, + VK_PIPELINE_SHADER_STAGE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineShaderStageCreateFlagBits; +typedef VkFlags VkPipelineShaderStageCreateFlags; + +typedef enum VkShaderStageFlagBits { + VK_SHADER_STAGE_VERTEX_BIT = 0x00000001, + VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT = 0x00000002, + VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT = 0x00000004, + VK_SHADER_STAGE_GEOMETRY_BIT = 0x00000008, + VK_SHADER_STAGE_FRAGMENT_BIT = 0x00000010, + VK_SHADER_STAGE_COMPUTE_BIT = 0x00000020, + VK_SHADER_STAGE_ALL_GRAPHICS = 0x0000001F, + VK_SHADER_STAGE_ALL = 0x7FFFFFFF, + VK_SHADER_STAGE_RAYGEN_BIT_KHR = 0x00000100, + VK_SHADER_STAGE_ANY_HIT_BIT_KHR = 0x00000200, + VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR = 0x00000400, + VK_SHADER_STAGE_MISS_BIT_KHR = 0x00000800, + VK_SHADER_STAGE_INTERSECTION_BIT_KHR = 0x00001000, + VK_SHADER_STAGE_CALLABLE_BIT_KHR = 0x00002000, + VK_SHADER_STAGE_TASK_BIT_NV = 0x00000040, + VK_SHADER_STAGE_MESH_BIT_NV = 0x00000080, + VK_SHADER_STAGE_RAYGEN_BIT_NV = VK_SHADER_STAGE_RAYGEN_BIT_KHR, + VK_SHADER_STAGE_ANY_HIT_BIT_NV = VK_SHADER_STAGE_ANY_HIT_BIT_KHR, + VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR, + VK_SHADER_STAGE_MISS_BIT_NV = VK_SHADER_STAGE_MISS_BIT_KHR, + VK_SHADER_STAGE_INTERSECTION_BIT_NV = VK_SHADER_STAGE_INTERSECTION_BIT_KHR, + VK_SHADER_STAGE_CALLABLE_BIT_NV = VK_SHADER_STAGE_CALLABLE_BIT_KHR, + VK_SHADER_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkShaderStageFlagBits; + +typedef enum VkCullModeFlagBits { + VK_CULL_MODE_NONE = 0, + VK_CULL_MODE_FRONT_BIT = 0x00000001, + VK_CULL_MODE_BACK_BIT = 0x00000002, + VK_CULL_MODE_FRONT_AND_BACK = 0x00000003, + VK_CULL_MODE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkCullModeFlagBits; +typedef VkFlags VkCullModeFlags; +typedef VkFlags VkPipelineVertexInputStateCreateFlags; +typedef VkFlags VkPipelineInputAssemblyStateCreateFlags; +typedef VkFlags VkPipelineTessellationStateCreateFlags; +typedef VkFlags VkPipelineViewportStateCreateFlags; +typedef VkFlags VkPipelineRasterizationStateCreateFlags; +typedef VkFlags VkPipelineMultisampleStateCreateFlags; +typedef VkFlags VkPipelineDepthStencilStateCreateFlags; +typedef VkFlags VkPipelineColorBlendStateCreateFlags; +typedef VkFlags VkPipelineDynamicStateCreateFlags; +typedef VkFlags VkPipelineLayoutCreateFlags; +typedef VkFlags VkShaderStageFlags; + +typedef enum VkSamplerCreateFlagBits { + VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT = 0x00000001, + VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT = 0x00000002, + VK_SAMPLER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSamplerCreateFlagBits; +typedef VkFlags VkSamplerCreateFlags; + +typedef enum VkDescriptorPoolCreateFlagBits { + VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT = 0x00000001, + VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT = 0x00000002, + VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE = 0x00000004, + VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT, + VK_DESCRIPTOR_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkDescriptorPoolCreateFlagBits; +typedef VkFlags VkDescriptorPoolCreateFlags; +typedef VkFlags VkDescriptorPoolResetFlags; + +typedef enum VkDescriptorSetLayoutCreateFlagBits { + VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT = 0x00000002, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR = 0x00000001, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE = 0x00000004, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkDescriptorSetLayoutCreateFlagBits; +typedef VkFlags VkDescriptorSetLayoutCreateFlags; + +typedef enum VkAttachmentDescriptionFlagBits { + VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT = 0x00000001, + VK_ATTACHMENT_DESCRIPTION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkAttachmentDescriptionFlagBits; +typedef VkFlags VkAttachmentDescriptionFlags; + +typedef enum VkDependencyFlagBits { + VK_DEPENDENCY_BY_REGION_BIT = 0x00000001, + VK_DEPENDENCY_DEVICE_GROUP_BIT = 0x00000004, + VK_DEPENDENCY_VIEW_LOCAL_BIT = 0x00000002, + VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR = VK_DEPENDENCY_VIEW_LOCAL_BIT, + VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR = VK_DEPENDENCY_DEVICE_GROUP_BIT, + VK_DEPENDENCY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkDependencyFlagBits; +typedef VkFlags VkDependencyFlags; + +typedef enum VkFramebufferCreateFlagBits { + VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT = 0x00000001, + VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, + VK_FRAMEBUFFER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkFramebufferCreateFlagBits; +typedef VkFlags VkFramebufferCreateFlags; + +typedef enum VkRenderPassCreateFlagBits { + VK_RENDER_PASS_CREATE_TRANSFORM_BIT_QCOM = 0x00000002, + VK_RENDER_PASS_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkRenderPassCreateFlagBits; +typedef VkFlags VkRenderPassCreateFlags; + +typedef enum VkSubpassDescriptionFlagBits { + VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX = 0x00000001, + VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX = 0x00000002, + VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM = 0x00000004, + VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM = 0x00000008, + VK_SUBPASS_DESCRIPTION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSubpassDescriptionFlagBits; +typedef VkFlags VkSubpassDescriptionFlags; + +typedef enum VkCommandPoolCreateFlagBits { + VK_COMMAND_POOL_CREATE_TRANSIENT_BIT = 0x00000001, + VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT = 0x00000002, + VK_COMMAND_POOL_CREATE_PROTECTED_BIT = 0x00000004, + VK_COMMAND_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkCommandPoolCreateFlagBits; +typedef VkFlags VkCommandPoolCreateFlags; + +typedef enum VkCommandPoolResetFlagBits { + VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT = 0x00000001, + VK_COMMAND_POOL_RESET_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkCommandPoolResetFlagBits; +typedef VkFlags VkCommandPoolResetFlags; + +typedef enum VkCommandBufferUsageFlagBits { + VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT = 0x00000001, + VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT = 0x00000002, + VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT = 0x00000004, + VK_COMMAND_BUFFER_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkCommandBufferUsageFlagBits; +typedef VkFlags VkCommandBufferUsageFlags; + +typedef enum VkQueryControlFlagBits { + VK_QUERY_CONTROL_PRECISE_BIT = 0x00000001, + VK_QUERY_CONTROL_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkQueryControlFlagBits; +typedef VkFlags VkQueryControlFlags; + +typedef enum VkCommandBufferResetFlagBits { + VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT = 0x00000001, + VK_COMMAND_BUFFER_RESET_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkCommandBufferResetFlagBits; +typedef VkFlags VkCommandBufferResetFlags; + +typedef enum VkStencilFaceFlagBits { + VK_STENCIL_FACE_FRONT_BIT = 0x00000001, + VK_STENCIL_FACE_BACK_BIT = 0x00000002, + VK_STENCIL_FACE_FRONT_AND_BACK = 0x00000003, + VK_STENCIL_FRONT_AND_BACK = VK_STENCIL_FACE_FRONT_AND_BACK, + VK_STENCIL_FACE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkStencilFaceFlagBits; +typedef VkFlags VkStencilFaceFlags; +typedef struct VkExtent2D { + uint32_t width; + uint32_t height; +} VkExtent2D; + +typedef struct VkExtent3D { + uint32_t width; + uint32_t height; + uint32_t depth; +} VkExtent3D; + +typedef struct VkOffset2D { + int32_t x; + int32_t y; +} VkOffset2D; + +typedef struct VkOffset3D { + int32_t x; + int32_t y; + int32_t z; +} VkOffset3D; + +typedef struct VkRect2D { + VkOffset2D offset; + VkExtent2D extent; +} VkRect2D; + +typedef struct VkBaseInStructure { + VkStructureType sType; + const struct VkBaseInStructure* pNext; +} VkBaseInStructure; + +typedef struct VkBaseOutStructure { + VkStructureType sType; + struct VkBaseOutStructure* pNext; +} VkBaseOutStructure; + +typedef struct VkBufferMemoryBarrier { + VkStructureType sType; + const void* pNext; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; + uint32_t srcQueueFamilyIndex; + uint32_t dstQueueFamilyIndex; + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize size; +} VkBufferMemoryBarrier; + +typedef struct VkDispatchIndirectCommand { + uint32_t x; + uint32_t y; + uint32_t z; +} VkDispatchIndirectCommand; + +typedef struct VkDrawIndexedIndirectCommand { + uint32_t indexCount; + uint32_t instanceCount; + uint32_t firstIndex; + int32_t vertexOffset; + uint32_t firstInstance; +} VkDrawIndexedIndirectCommand; + +typedef struct VkDrawIndirectCommand { + uint32_t vertexCount; + uint32_t instanceCount; + uint32_t firstVertex; + uint32_t firstInstance; +} VkDrawIndirectCommand; + +typedef struct VkImageSubresourceRange { + VkImageAspectFlags aspectMask; + uint32_t baseMipLevel; + uint32_t levelCount; + uint32_t baseArrayLayer; + uint32_t layerCount; +} VkImageSubresourceRange; + +typedef struct VkImageMemoryBarrier { + VkStructureType sType; + const void* pNext; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; + VkImageLayout oldLayout; + VkImageLayout newLayout; + uint32_t srcQueueFamilyIndex; + uint32_t dstQueueFamilyIndex; + VkImage image; + VkImageSubresourceRange subresourceRange; +} VkImageMemoryBarrier; + +typedef struct VkMemoryBarrier { + VkStructureType sType; + const void* pNext; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; +} VkMemoryBarrier; + +typedef void* (VKAPI_PTR *PFN_vkAllocationFunction)( + void* pUserData, + size_t size, + size_t alignment, + VkSystemAllocationScope allocationScope); + +typedef void (VKAPI_PTR *PFN_vkFreeFunction)( + void* pUserData, + void* pMemory); + +typedef void (VKAPI_PTR *PFN_vkInternalAllocationNotification)( + void* pUserData, + size_t size, + VkInternalAllocationType allocationType, + VkSystemAllocationScope allocationScope); + +typedef void (VKAPI_PTR *PFN_vkInternalFreeNotification)( + void* pUserData, + size_t size, + VkInternalAllocationType allocationType, + VkSystemAllocationScope allocationScope); + +typedef void* (VKAPI_PTR *PFN_vkReallocationFunction)( + void* pUserData, + void* pOriginal, + size_t size, + size_t alignment, + VkSystemAllocationScope allocationScope); + +typedef void (VKAPI_PTR *PFN_vkVoidFunction)(void); +typedef struct VkAllocationCallbacks { + void* pUserData; + PFN_vkAllocationFunction pfnAllocation; + PFN_vkReallocationFunction pfnReallocation; + PFN_vkFreeFunction pfnFree; + PFN_vkInternalAllocationNotification pfnInternalAllocation; + PFN_vkInternalFreeNotification pfnInternalFree; +} VkAllocationCallbacks; + +typedef struct VkApplicationInfo { + VkStructureType sType; + const void* pNext; + const char* pApplicationName; + uint32_t applicationVersion; + const char* pEngineName; + uint32_t engineVersion; + uint32_t apiVersion; +} VkApplicationInfo; + +typedef struct VkFormatProperties { + VkFormatFeatureFlags linearTilingFeatures; + VkFormatFeatureFlags optimalTilingFeatures; + VkFormatFeatureFlags bufferFeatures; +} VkFormatProperties; + +typedef struct VkImageFormatProperties { + VkExtent3D maxExtent; + uint32_t maxMipLevels; + uint32_t maxArrayLayers; + VkSampleCountFlags sampleCounts; + VkDeviceSize maxResourceSize; +} VkImageFormatProperties; + +typedef struct VkInstanceCreateInfo { + VkStructureType sType; + const void* pNext; + VkInstanceCreateFlags flags; + const VkApplicationInfo* pApplicationInfo; + uint32_t enabledLayerCount; + const char* const* ppEnabledLayerNames; + uint32_t enabledExtensionCount; + const char* const* ppEnabledExtensionNames; +} VkInstanceCreateInfo; + +typedef struct VkMemoryHeap { + VkDeviceSize size; + VkMemoryHeapFlags flags; +} VkMemoryHeap; + +typedef struct VkMemoryType { + VkMemoryPropertyFlags propertyFlags; + uint32_t heapIndex; +} VkMemoryType; + +typedef struct VkPhysicalDeviceFeatures { + VkBool32 robustBufferAccess; + VkBool32 fullDrawIndexUint32; + VkBool32 imageCubeArray; + VkBool32 independentBlend; + VkBool32 geometryShader; + VkBool32 tessellationShader; + VkBool32 sampleRateShading; + VkBool32 dualSrcBlend; + VkBool32 logicOp; + VkBool32 multiDrawIndirect; + VkBool32 drawIndirectFirstInstance; + VkBool32 depthClamp; + VkBool32 depthBiasClamp; + VkBool32 fillModeNonSolid; + VkBool32 depthBounds; + VkBool32 wideLines; + VkBool32 largePoints; + VkBool32 alphaToOne; + VkBool32 multiViewport; + VkBool32 samplerAnisotropy; + VkBool32 textureCompressionETC2; + VkBool32 textureCompressionASTC_LDR; + VkBool32 textureCompressionBC; + VkBool32 occlusionQueryPrecise; + VkBool32 pipelineStatisticsQuery; + VkBool32 vertexPipelineStoresAndAtomics; + VkBool32 fragmentStoresAndAtomics; + VkBool32 shaderTessellationAndGeometryPointSize; + VkBool32 shaderImageGatherExtended; + VkBool32 shaderStorageImageExtendedFormats; + VkBool32 shaderStorageImageMultisample; + VkBool32 shaderStorageImageReadWithoutFormat; + VkBool32 shaderStorageImageWriteWithoutFormat; + VkBool32 shaderUniformBufferArrayDynamicIndexing; + VkBool32 shaderSampledImageArrayDynamicIndexing; + VkBool32 shaderStorageBufferArrayDynamicIndexing; + VkBool32 shaderStorageImageArrayDynamicIndexing; + VkBool32 shaderClipDistance; + VkBool32 shaderCullDistance; + VkBool32 shaderFloat64; + VkBool32 shaderInt64; + VkBool32 shaderInt16; + VkBool32 shaderResourceResidency; + VkBool32 shaderResourceMinLod; + VkBool32 sparseBinding; + VkBool32 sparseResidencyBuffer; + VkBool32 sparseResidencyImage2D; + VkBool32 sparseResidencyImage3D; + VkBool32 sparseResidency2Samples; + VkBool32 sparseResidency4Samples; + VkBool32 sparseResidency8Samples; + VkBool32 sparseResidency16Samples; + VkBool32 sparseResidencyAliased; + VkBool32 variableMultisampleRate; + VkBool32 inheritedQueries; +} VkPhysicalDeviceFeatures; + +typedef struct VkPhysicalDeviceLimits { + uint32_t maxImageDimension1D; + uint32_t maxImageDimension2D; + uint32_t maxImageDimension3D; + uint32_t maxImageDimensionCube; + uint32_t maxImageArrayLayers; + uint32_t maxTexelBufferElements; + uint32_t maxUniformBufferRange; + uint32_t maxStorageBufferRange; + uint32_t maxPushConstantsSize; + uint32_t maxMemoryAllocationCount; + uint32_t maxSamplerAllocationCount; + VkDeviceSize bufferImageGranularity; + VkDeviceSize sparseAddressSpaceSize; + uint32_t maxBoundDescriptorSets; + uint32_t maxPerStageDescriptorSamplers; + uint32_t maxPerStageDescriptorUniformBuffers; + uint32_t maxPerStageDescriptorStorageBuffers; + uint32_t maxPerStageDescriptorSampledImages; + uint32_t maxPerStageDescriptorStorageImages; + uint32_t maxPerStageDescriptorInputAttachments; + uint32_t maxPerStageResources; + uint32_t maxDescriptorSetSamplers; + uint32_t maxDescriptorSetUniformBuffers; + uint32_t maxDescriptorSetUniformBuffersDynamic; + uint32_t maxDescriptorSetStorageBuffers; + uint32_t maxDescriptorSetStorageBuffersDynamic; + uint32_t maxDescriptorSetSampledImages; + uint32_t maxDescriptorSetStorageImages; + uint32_t maxDescriptorSetInputAttachments; + uint32_t maxVertexInputAttributes; + uint32_t maxVertexInputBindings; + uint32_t maxVertexInputAttributeOffset; + uint32_t maxVertexInputBindingStride; + uint32_t maxVertexOutputComponents; + uint32_t maxTessellationGenerationLevel; + uint32_t maxTessellationPatchSize; + uint32_t maxTessellationControlPerVertexInputComponents; + uint32_t maxTessellationControlPerVertexOutputComponents; + uint32_t maxTessellationControlPerPatchOutputComponents; + uint32_t maxTessellationControlTotalOutputComponents; + uint32_t maxTessellationEvaluationInputComponents; + uint32_t maxTessellationEvaluationOutputComponents; + uint32_t maxGeometryShaderInvocations; + uint32_t maxGeometryInputComponents; + uint32_t maxGeometryOutputComponents; + uint32_t maxGeometryOutputVertices; + uint32_t maxGeometryTotalOutputComponents; + uint32_t maxFragmentInputComponents; + uint32_t maxFragmentOutputAttachments; + uint32_t maxFragmentDualSrcAttachments; + uint32_t maxFragmentCombinedOutputResources; + uint32_t maxComputeSharedMemorySize; + uint32_t maxComputeWorkGroupCount[3]; + uint32_t maxComputeWorkGroupInvocations; + uint32_t maxComputeWorkGroupSize[3]; + uint32_t subPixelPrecisionBits; + uint32_t subTexelPrecisionBits; + uint32_t mipmapPrecisionBits; + uint32_t maxDrawIndexedIndexValue; + uint32_t maxDrawIndirectCount; + float maxSamplerLodBias; + float maxSamplerAnisotropy; + uint32_t maxViewports; + uint32_t maxViewportDimensions[2]; + float viewportBoundsRange[2]; + uint32_t viewportSubPixelBits; + size_t minMemoryMapAlignment; + VkDeviceSize minTexelBufferOffsetAlignment; + VkDeviceSize minUniformBufferOffsetAlignment; + VkDeviceSize minStorageBufferOffsetAlignment; + int32_t minTexelOffset; + uint32_t maxTexelOffset; + int32_t minTexelGatherOffset; + uint32_t maxTexelGatherOffset; + float minInterpolationOffset; + float maxInterpolationOffset; + uint32_t subPixelInterpolationOffsetBits; + uint32_t maxFramebufferWidth; + uint32_t maxFramebufferHeight; + uint32_t maxFramebufferLayers; + VkSampleCountFlags framebufferColorSampleCounts; + VkSampleCountFlags framebufferDepthSampleCounts; + VkSampleCountFlags framebufferStencilSampleCounts; + VkSampleCountFlags framebufferNoAttachmentsSampleCounts; + uint32_t maxColorAttachments; + VkSampleCountFlags sampledImageColorSampleCounts; + VkSampleCountFlags sampledImageIntegerSampleCounts; + VkSampleCountFlags sampledImageDepthSampleCounts; + VkSampleCountFlags sampledImageStencilSampleCounts; + VkSampleCountFlags storageImageSampleCounts; + uint32_t maxSampleMaskWords; + VkBool32 timestampComputeAndGraphics; + float timestampPeriod; + uint32_t maxClipDistances; + uint32_t maxCullDistances; + uint32_t maxCombinedClipAndCullDistances; + uint32_t discreteQueuePriorities; + float pointSizeRange[2]; + float lineWidthRange[2]; + float pointSizeGranularity; + float lineWidthGranularity; + VkBool32 strictLines; + VkBool32 standardSampleLocations; + VkDeviceSize optimalBufferCopyOffsetAlignment; + VkDeviceSize optimalBufferCopyRowPitchAlignment; + VkDeviceSize nonCoherentAtomSize; +} VkPhysicalDeviceLimits; + +typedef struct VkPhysicalDeviceMemoryProperties { + uint32_t memoryTypeCount; + VkMemoryType memoryTypes[VK_MAX_MEMORY_TYPES]; + uint32_t memoryHeapCount; + VkMemoryHeap memoryHeaps[VK_MAX_MEMORY_HEAPS]; +} VkPhysicalDeviceMemoryProperties; + +typedef struct VkPhysicalDeviceSparseProperties { + VkBool32 residencyStandard2DBlockShape; + VkBool32 residencyStandard2DMultisampleBlockShape; + VkBool32 residencyStandard3DBlockShape; + VkBool32 residencyAlignedMipSize; + VkBool32 residencyNonResidentStrict; +} VkPhysicalDeviceSparseProperties; + +typedef struct VkPhysicalDeviceProperties { + uint32_t apiVersion; + uint32_t driverVersion; + uint32_t vendorID; + uint32_t deviceID; + VkPhysicalDeviceType deviceType; + char deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE]; + uint8_t pipelineCacheUUID[VK_UUID_SIZE]; + VkPhysicalDeviceLimits limits; + VkPhysicalDeviceSparseProperties sparseProperties; +} VkPhysicalDeviceProperties; + +typedef struct VkQueueFamilyProperties { + VkQueueFlags queueFlags; + uint32_t queueCount; + uint32_t timestampValidBits; + VkExtent3D minImageTransferGranularity; +} VkQueueFamilyProperties; + +typedef struct VkDeviceQueueCreateInfo { + VkStructureType sType; + const void* pNext; + VkDeviceQueueCreateFlags flags; + uint32_t queueFamilyIndex; + uint32_t queueCount; + const float* pQueuePriorities; +} VkDeviceQueueCreateInfo; + +typedef struct VkDeviceCreateInfo { + VkStructureType sType; + const void* pNext; + VkDeviceCreateFlags flags; + uint32_t queueCreateInfoCount; + const VkDeviceQueueCreateInfo* pQueueCreateInfos; + uint32_t enabledLayerCount; + const char* const* ppEnabledLayerNames; + uint32_t enabledExtensionCount; + const char* const* ppEnabledExtensionNames; + const VkPhysicalDeviceFeatures* pEnabledFeatures; +} VkDeviceCreateInfo; + +typedef struct VkExtensionProperties { + char extensionName[VK_MAX_EXTENSION_NAME_SIZE]; + uint32_t specVersion; +} VkExtensionProperties; + +typedef struct VkLayerProperties { + char layerName[VK_MAX_EXTENSION_NAME_SIZE]; + uint32_t specVersion; + uint32_t implementationVersion; + char description[VK_MAX_DESCRIPTION_SIZE]; +} VkLayerProperties; + +typedef struct VkSubmitInfo { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreCount; + const VkSemaphore* pWaitSemaphores; + const VkPipelineStageFlags* pWaitDstStageMask; + uint32_t commandBufferCount; + const VkCommandBuffer* pCommandBuffers; + uint32_t signalSemaphoreCount; + const VkSemaphore* pSignalSemaphores; +} VkSubmitInfo; + +typedef struct VkMappedMemoryRange { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkDeviceSize offset; + VkDeviceSize size; +} VkMappedMemoryRange; + +typedef struct VkMemoryAllocateInfo { + VkStructureType sType; + const void* pNext; + VkDeviceSize allocationSize; + uint32_t memoryTypeIndex; +} VkMemoryAllocateInfo; + +typedef struct VkMemoryRequirements { + VkDeviceSize size; + VkDeviceSize alignment; + uint32_t memoryTypeBits; +} VkMemoryRequirements; + +typedef struct VkSparseMemoryBind { + VkDeviceSize resourceOffset; + VkDeviceSize size; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; + VkSparseMemoryBindFlags flags; +} VkSparseMemoryBind; + +typedef struct VkSparseBufferMemoryBindInfo { + VkBuffer buffer; + uint32_t bindCount; + const VkSparseMemoryBind* pBinds; +} VkSparseBufferMemoryBindInfo; + +typedef struct VkSparseImageOpaqueMemoryBindInfo { + VkImage image; + uint32_t bindCount; + const VkSparseMemoryBind* pBinds; +} VkSparseImageOpaqueMemoryBindInfo; + +typedef struct VkImageSubresource { + VkImageAspectFlags aspectMask; + uint32_t mipLevel; + uint32_t arrayLayer; +} VkImageSubresource; + +typedef struct VkSparseImageMemoryBind { + VkImageSubresource subresource; + VkOffset3D offset; + VkExtent3D extent; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; + VkSparseMemoryBindFlags flags; +} VkSparseImageMemoryBind; + +typedef struct VkSparseImageMemoryBindInfo { + VkImage image; + uint32_t bindCount; + const VkSparseImageMemoryBind* pBinds; +} VkSparseImageMemoryBindInfo; + +typedef struct VkBindSparseInfo { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreCount; + const VkSemaphore* pWaitSemaphores; + uint32_t bufferBindCount; + const VkSparseBufferMemoryBindInfo* pBufferBinds; + uint32_t imageOpaqueBindCount; + const VkSparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds; + uint32_t imageBindCount; + const VkSparseImageMemoryBindInfo* pImageBinds; + uint32_t signalSemaphoreCount; + const VkSemaphore* pSignalSemaphores; +} VkBindSparseInfo; + +typedef struct VkSparseImageFormatProperties { + VkImageAspectFlags aspectMask; + VkExtent3D imageGranularity; + VkSparseImageFormatFlags flags; +} VkSparseImageFormatProperties; + +typedef struct VkSparseImageMemoryRequirements { + VkSparseImageFormatProperties formatProperties; + uint32_t imageMipTailFirstLod; + VkDeviceSize imageMipTailSize; + VkDeviceSize imageMipTailOffset; + VkDeviceSize imageMipTailStride; +} VkSparseImageMemoryRequirements; + +typedef struct VkFenceCreateInfo { + VkStructureType sType; + const void* pNext; + VkFenceCreateFlags flags; +} VkFenceCreateInfo; + +typedef struct VkSemaphoreCreateInfo { + VkStructureType sType; + const void* pNext; + VkSemaphoreCreateFlags flags; +} VkSemaphoreCreateInfo; + +typedef struct VkEventCreateInfo { + VkStructureType sType; + const void* pNext; + VkEventCreateFlags flags; +} VkEventCreateInfo; + +typedef struct VkQueryPoolCreateInfo { + VkStructureType sType; + const void* pNext; + VkQueryPoolCreateFlags flags; + VkQueryType queryType; + uint32_t queryCount; + VkQueryPipelineStatisticFlags pipelineStatistics; +} VkQueryPoolCreateInfo; + +typedef struct VkBufferCreateInfo { + VkStructureType sType; + const void* pNext; + VkBufferCreateFlags flags; + VkDeviceSize size; + VkBufferUsageFlags usage; + VkSharingMode sharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; +} VkBufferCreateInfo; + +typedef struct VkBufferViewCreateInfo { + VkStructureType sType; + const void* pNext; + VkBufferViewCreateFlags flags; + VkBuffer buffer; + VkFormat format; + VkDeviceSize offset; + VkDeviceSize range; +} VkBufferViewCreateInfo; + +typedef struct VkImageCreateInfo { + VkStructureType sType; + const void* pNext; + VkImageCreateFlags flags; + VkImageType imageType; + VkFormat format; + VkExtent3D extent; + uint32_t mipLevels; + uint32_t arrayLayers; + VkSampleCountFlagBits samples; + VkImageTiling tiling; + VkImageUsageFlags usage; + VkSharingMode sharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; + VkImageLayout initialLayout; +} VkImageCreateInfo; + +typedef struct VkSubresourceLayout { + VkDeviceSize offset; + VkDeviceSize size; + VkDeviceSize rowPitch; + VkDeviceSize arrayPitch; + VkDeviceSize depthPitch; +} VkSubresourceLayout; + +typedef struct VkComponentMapping { + VkComponentSwizzle r; + VkComponentSwizzle g; + VkComponentSwizzle b; + VkComponentSwizzle a; +} VkComponentMapping; + +typedef struct VkImageViewCreateInfo { + VkStructureType sType; + const void* pNext; + VkImageViewCreateFlags flags; + VkImage image; + VkImageViewType viewType; + VkFormat format; + VkComponentMapping components; + VkImageSubresourceRange subresourceRange; +} VkImageViewCreateInfo; + +typedef struct VkShaderModuleCreateInfo { + VkStructureType sType; + const void* pNext; + VkShaderModuleCreateFlags flags; + size_t codeSize; + const uint32_t* pCode; +} VkShaderModuleCreateInfo; + +typedef struct VkPipelineCacheCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineCacheCreateFlags flags; + size_t initialDataSize; + const void* pInitialData; +} VkPipelineCacheCreateInfo; + +typedef struct VkSpecializationMapEntry { + uint32_t constantID; + uint32_t offset; + size_t size; +} VkSpecializationMapEntry; + +typedef struct VkSpecializationInfo { + uint32_t mapEntryCount; + const VkSpecializationMapEntry* pMapEntries; + size_t dataSize; + const void* pData; +} VkSpecializationInfo; + +typedef struct VkPipelineShaderStageCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineShaderStageCreateFlags flags; + VkShaderStageFlagBits stage; + VkShaderModule module; + const char* pName; + const VkSpecializationInfo* pSpecializationInfo; +} VkPipelineShaderStageCreateInfo; + +typedef struct VkComputePipelineCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + VkPipelineShaderStageCreateInfo stage; + VkPipelineLayout layout; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkComputePipelineCreateInfo; + +typedef struct VkVertexInputBindingDescription { + uint32_t binding; + uint32_t stride; + VkVertexInputRate inputRate; +} VkVertexInputBindingDescription; + +typedef struct VkVertexInputAttributeDescription { + uint32_t location; + uint32_t binding; + VkFormat format; + uint32_t offset; +} VkVertexInputAttributeDescription; + +typedef struct VkPipelineVertexInputStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineVertexInputStateCreateFlags flags; + uint32_t vertexBindingDescriptionCount; + const VkVertexInputBindingDescription* pVertexBindingDescriptions; + uint32_t vertexAttributeDescriptionCount; + const VkVertexInputAttributeDescription* pVertexAttributeDescriptions; +} VkPipelineVertexInputStateCreateInfo; + +typedef struct VkPipelineInputAssemblyStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineInputAssemblyStateCreateFlags flags; + VkPrimitiveTopology topology; + VkBool32 primitiveRestartEnable; +} VkPipelineInputAssemblyStateCreateInfo; + +typedef struct VkPipelineTessellationStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineTessellationStateCreateFlags flags; + uint32_t patchControlPoints; +} VkPipelineTessellationStateCreateInfo; + +typedef struct VkViewport { + float x; + float y; + float width; + float height; + float minDepth; + float maxDepth; +} VkViewport; + +typedef struct VkPipelineViewportStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineViewportStateCreateFlags flags; + uint32_t viewportCount; + const VkViewport* pViewports; + uint32_t scissorCount; + const VkRect2D* pScissors; +} VkPipelineViewportStateCreateInfo; + +typedef struct VkPipelineRasterizationStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineRasterizationStateCreateFlags flags; + VkBool32 depthClampEnable; + VkBool32 rasterizerDiscardEnable; + VkPolygonMode polygonMode; + VkCullModeFlags cullMode; + VkFrontFace frontFace; + VkBool32 depthBiasEnable; + float depthBiasConstantFactor; + float depthBiasClamp; + float depthBiasSlopeFactor; + float lineWidth; +} VkPipelineRasterizationStateCreateInfo; + +typedef struct VkPipelineMultisampleStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineMultisampleStateCreateFlags flags; + VkSampleCountFlagBits rasterizationSamples; + VkBool32 sampleShadingEnable; + float minSampleShading; + const VkSampleMask* pSampleMask; + VkBool32 alphaToCoverageEnable; + VkBool32 alphaToOneEnable; +} VkPipelineMultisampleStateCreateInfo; + +typedef struct VkStencilOpState { + VkStencilOp failOp; + VkStencilOp passOp; + VkStencilOp depthFailOp; + VkCompareOp compareOp; + uint32_t compareMask; + uint32_t writeMask; + uint32_t reference; +} VkStencilOpState; + +typedef struct VkPipelineDepthStencilStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineDepthStencilStateCreateFlags flags; + VkBool32 depthTestEnable; + VkBool32 depthWriteEnable; + VkCompareOp depthCompareOp; + VkBool32 depthBoundsTestEnable; + VkBool32 stencilTestEnable; + VkStencilOpState front; + VkStencilOpState back; + float minDepthBounds; + float maxDepthBounds; +} VkPipelineDepthStencilStateCreateInfo; + +typedef struct VkPipelineColorBlendAttachmentState { + VkBool32 blendEnable; + VkBlendFactor srcColorBlendFactor; + VkBlendFactor dstColorBlendFactor; + VkBlendOp colorBlendOp; + VkBlendFactor srcAlphaBlendFactor; + VkBlendFactor dstAlphaBlendFactor; + VkBlendOp alphaBlendOp; + VkColorComponentFlags colorWriteMask; +} VkPipelineColorBlendAttachmentState; + +typedef struct VkPipelineColorBlendStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineColorBlendStateCreateFlags flags; + VkBool32 logicOpEnable; + VkLogicOp logicOp; + uint32_t attachmentCount; + const VkPipelineColorBlendAttachmentState* pAttachments; + float blendConstants[4]; +} VkPipelineColorBlendStateCreateInfo; + +typedef struct VkPipelineDynamicStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineDynamicStateCreateFlags flags; + uint32_t dynamicStateCount; + const VkDynamicState* pDynamicStates; +} VkPipelineDynamicStateCreateInfo; + +typedef struct VkGraphicsPipelineCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + uint32_t stageCount; + const VkPipelineShaderStageCreateInfo* pStages; + const VkPipelineVertexInputStateCreateInfo* pVertexInputState; + const VkPipelineInputAssemblyStateCreateInfo* pInputAssemblyState; + const VkPipelineTessellationStateCreateInfo* pTessellationState; + const VkPipelineViewportStateCreateInfo* pViewportState; + const VkPipelineRasterizationStateCreateInfo* pRasterizationState; + const VkPipelineMultisampleStateCreateInfo* pMultisampleState; + const VkPipelineDepthStencilStateCreateInfo* pDepthStencilState; + const VkPipelineColorBlendStateCreateInfo* pColorBlendState; + const VkPipelineDynamicStateCreateInfo* pDynamicState; + VkPipelineLayout layout; + VkRenderPass renderPass; + uint32_t subpass; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkGraphicsPipelineCreateInfo; + +typedef struct VkPushConstantRange { + VkShaderStageFlags stageFlags; + uint32_t offset; + uint32_t size; +} VkPushConstantRange; + +typedef struct VkPipelineLayoutCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineLayoutCreateFlags flags; + uint32_t setLayoutCount; + const VkDescriptorSetLayout* pSetLayouts; + uint32_t pushConstantRangeCount; + const VkPushConstantRange* pPushConstantRanges; +} VkPipelineLayoutCreateInfo; + +typedef struct VkSamplerCreateInfo { + VkStructureType sType; + const void* pNext; + VkSamplerCreateFlags flags; + VkFilter magFilter; + VkFilter minFilter; + VkSamplerMipmapMode mipmapMode; + VkSamplerAddressMode addressModeU; + VkSamplerAddressMode addressModeV; + VkSamplerAddressMode addressModeW; + float mipLodBias; + VkBool32 anisotropyEnable; + float maxAnisotropy; + VkBool32 compareEnable; + VkCompareOp compareOp; + float minLod; + float maxLod; + VkBorderColor borderColor; + VkBool32 unnormalizedCoordinates; +} VkSamplerCreateInfo; + +typedef struct VkCopyDescriptorSet { + VkStructureType sType; + const void* pNext; + VkDescriptorSet srcSet; + uint32_t srcBinding; + uint32_t srcArrayElement; + VkDescriptorSet dstSet; + uint32_t dstBinding; + uint32_t dstArrayElement; + uint32_t descriptorCount; +} VkCopyDescriptorSet; + +typedef struct VkDescriptorBufferInfo { + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize range; +} VkDescriptorBufferInfo; + +typedef struct VkDescriptorImageInfo { + VkSampler sampler; + VkImageView imageView; + VkImageLayout imageLayout; +} VkDescriptorImageInfo; + +typedef struct VkDescriptorPoolSize { + VkDescriptorType type; + uint32_t descriptorCount; +} VkDescriptorPoolSize; + +typedef struct VkDescriptorPoolCreateInfo { + VkStructureType sType; + const void* pNext; + VkDescriptorPoolCreateFlags flags; + uint32_t maxSets; + uint32_t poolSizeCount; + const VkDescriptorPoolSize* pPoolSizes; +} VkDescriptorPoolCreateInfo; + +typedef struct VkDescriptorSetAllocateInfo { + VkStructureType sType; + const void* pNext; + VkDescriptorPool descriptorPool; + uint32_t descriptorSetCount; + const VkDescriptorSetLayout* pSetLayouts; +} VkDescriptorSetAllocateInfo; + +typedef struct VkDescriptorSetLayoutBinding { + uint32_t binding; + VkDescriptorType descriptorType; + uint32_t descriptorCount; + VkShaderStageFlags stageFlags; + const VkSampler* pImmutableSamplers; +} VkDescriptorSetLayoutBinding; + +typedef struct VkDescriptorSetLayoutCreateInfo { + VkStructureType sType; + const void* pNext; + VkDescriptorSetLayoutCreateFlags flags; + uint32_t bindingCount; + const VkDescriptorSetLayoutBinding* pBindings; +} VkDescriptorSetLayoutCreateInfo; + +typedef struct VkWriteDescriptorSet { + VkStructureType sType; + const void* pNext; + VkDescriptorSet dstSet; + uint32_t dstBinding; + uint32_t dstArrayElement; + uint32_t descriptorCount; + VkDescriptorType descriptorType; + const VkDescriptorImageInfo* pImageInfo; + const VkDescriptorBufferInfo* pBufferInfo; + const VkBufferView* pTexelBufferView; +} VkWriteDescriptorSet; + +typedef struct VkAttachmentDescription { + VkAttachmentDescriptionFlags flags; + VkFormat format; + VkSampleCountFlagBits samples; + VkAttachmentLoadOp loadOp; + VkAttachmentStoreOp storeOp; + VkAttachmentLoadOp stencilLoadOp; + VkAttachmentStoreOp stencilStoreOp; + VkImageLayout initialLayout; + VkImageLayout finalLayout; +} VkAttachmentDescription; + +typedef struct VkAttachmentReference { + uint32_t attachment; + VkImageLayout layout; +} VkAttachmentReference; + +typedef struct VkFramebufferCreateInfo { + VkStructureType sType; + const void* pNext; + VkFramebufferCreateFlags flags; + VkRenderPass renderPass; + uint32_t attachmentCount; + const VkImageView* pAttachments; + uint32_t width; + uint32_t height; + uint32_t layers; +} VkFramebufferCreateInfo; + +typedef struct VkSubpassDescription { + VkSubpassDescriptionFlags flags; + VkPipelineBindPoint pipelineBindPoint; + uint32_t inputAttachmentCount; + const VkAttachmentReference* pInputAttachments; + uint32_t colorAttachmentCount; + const VkAttachmentReference* pColorAttachments; + const VkAttachmentReference* pResolveAttachments; + const VkAttachmentReference* pDepthStencilAttachment; + uint32_t preserveAttachmentCount; + const uint32_t* pPreserveAttachments; +} VkSubpassDescription; + +typedef struct VkSubpassDependency { + uint32_t srcSubpass; + uint32_t dstSubpass; + VkPipelineStageFlags srcStageMask; + VkPipelineStageFlags dstStageMask; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; + VkDependencyFlags dependencyFlags; +} VkSubpassDependency; + +typedef struct VkRenderPassCreateInfo { + VkStructureType sType; + const void* pNext; + VkRenderPassCreateFlags flags; + uint32_t attachmentCount; + const VkAttachmentDescription* pAttachments; + uint32_t subpassCount; + const VkSubpassDescription* pSubpasses; + uint32_t dependencyCount; + const VkSubpassDependency* pDependencies; +} VkRenderPassCreateInfo; + +typedef struct VkCommandPoolCreateInfo { + VkStructureType sType; + const void* pNext; + VkCommandPoolCreateFlags flags; + uint32_t queueFamilyIndex; +} VkCommandPoolCreateInfo; + +typedef struct VkCommandBufferAllocateInfo { + VkStructureType sType; + const void* pNext; + VkCommandPool commandPool; + VkCommandBufferLevel level; + uint32_t commandBufferCount; +} VkCommandBufferAllocateInfo; + +typedef struct VkCommandBufferInheritanceInfo { + VkStructureType sType; + const void* pNext; + VkRenderPass renderPass; + uint32_t subpass; + VkFramebuffer framebuffer; + VkBool32 occlusionQueryEnable; + VkQueryControlFlags queryFlags; + VkQueryPipelineStatisticFlags pipelineStatistics; +} VkCommandBufferInheritanceInfo; + +typedef struct VkCommandBufferBeginInfo { + VkStructureType sType; + const void* pNext; + VkCommandBufferUsageFlags flags; + const VkCommandBufferInheritanceInfo* pInheritanceInfo; +} VkCommandBufferBeginInfo; + +typedef struct VkBufferCopy { + VkDeviceSize srcOffset; + VkDeviceSize dstOffset; + VkDeviceSize size; +} VkBufferCopy; + +typedef struct VkImageSubresourceLayers { + VkImageAspectFlags aspectMask; + uint32_t mipLevel; + uint32_t baseArrayLayer; + uint32_t layerCount; +} VkImageSubresourceLayers; + +typedef struct VkBufferImageCopy { + VkDeviceSize bufferOffset; + uint32_t bufferRowLength; + uint32_t bufferImageHeight; + VkImageSubresourceLayers imageSubresource; + VkOffset3D imageOffset; + VkExtent3D imageExtent; +} VkBufferImageCopy; + +typedef union VkClearColorValue { + float float32[4]; + int32_t int32[4]; + uint32_t uint32[4]; +} VkClearColorValue; + +typedef struct VkClearDepthStencilValue { + float depth; + uint32_t stencil; +} VkClearDepthStencilValue; + +typedef union VkClearValue { + VkClearColorValue color; + VkClearDepthStencilValue depthStencil; +} VkClearValue; + +typedef struct VkClearAttachment { + VkImageAspectFlags aspectMask; + uint32_t colorAttachment; + VkClearValue clearValue; +} VkClearAttachment; + +typedef struct VkClearRect { + VkRect2D rect; + uint32_t baseArrayLayer; + uint32_t layerCount; +} VkClearRect; + +typedef struct VkImageBlit { + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffsets[2]; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffsets[2]; +} VkImageBlit; + +typedef struct VkImageCopy { + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffset; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffset; + VkExtent3D extent; +} VkImageCopy; + +typedef struct VkImageResolve { + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffset; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffset; + VkExtent3D extent; +} VkImageResolve; + +typedef struct VkRenderPassBeginInfo { + VkStructureType sType; + const void* pNext; + VkRenderPass renderPass; + VkFramebuffer framebuffer; + VkRect2D renderArea; + uint32_t clearValueCount; + const VkClearValue* pClearValues; +} VkRenderPassBeginInfo; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateInstance)(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance); +typedef void (VKAPI_PTR *PFN_vkDestroyInstance)(VkInstance instance, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDevices)(VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties); +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vkGetInstanceProcAddr)(VkInstance instance, const char* pName); +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vkGetDeviceProcAddr)(VkDevice device, const char* pName); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDevice)(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice); +typedef void (VKAPI_PTR *PFN_vkDestroyDevice)(VkDevice device, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceExtensionProperties)(const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateDeviceExtensionProperties)(VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceLayerProperties)(uint32_t* pPropertyCount, VkLayerProperties* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateDeviceLayerProperties)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties); +typedef void (VKAPI_PTR *PFN_vkGetDeviceQueue)(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue); +typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit)(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence); +typedef VkResult (VKAPI_PTR *PFN_vkQueueWaitIdle)(VkQueue queue); +typedef VkResult (VKAPI_PTR *PFN_vkDeviceWaitIdle)(VkDevice device); +typedef VkResult (VKAPI_PTR *PFN_vkAllocateMemory)(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory); +typedef void (VKAPI_PTR *PFN_vkFreeMemory)(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkMapMemory)(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData); +typedef void (VKAPI_PTR *PFN_vkUnmapMemory)(VkDevice device, VkDeviceMemory memory); +typedef VkResult (VKAPI_PTR *PFN_vkFlushMappedMemoryRanges)(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges); +typedef VkResult (VKAPI_PTR *PFN_vkInvalidateMappedMemoryRanges)(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges); +typedef void (VKAPI_PTR *PFN_vkGetDeviceMemoryCommitment)(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes); +typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory)(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset); +typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory)(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset); +typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements)(VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements)(VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements)(VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkQueueBindSparse)(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence); +typedef VkResult (VKAPI_PTR *PFN_vkCreateFence)(VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence); +typedef void (VKAPI_PTR *PFN_vkDestroyFence)(VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkResetFences)(VkDevice device, uint32_t fenceCount, const VkFence* pFences); +typedef VkResult (VKAPI_PTR *PFN_vkGetFenceStatus)(VkDevice device, VkFence fence); +typedef VkResult (VKAPI_PTR *PFN_vkWaitForFences)(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout); +typedef VkResult (VKAPI_PTR *PFN_vkCreateSemaphore)(VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore); +typedef void (VKAPI_PTR *PFN_vkDestroySemaphore)(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateEvent)(VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent); +typedef void (VKAPI_PTR *PFN_vkDestroyEvent)(VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetEventStatus)(VkDevice device, VkEvent event); +typedef VkResult (VKAPI_PTR *PFN_vkSetEvent)(VkDevice device, VkEvent event); +typedef VkResult (VKAPI_PTR *PFN_vkResetEvent)(VkDevice device, VkEvent event); +typedef VkResult (VKAPI_PTR *PFN_vkCreateQueryPool)(VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool); +typedef void (VKAPI_PTR *PFN_vkDestroyQueryPool)(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetQueryPoolResults)(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags); +typedef VkResult (VKAPI_PTR *PFN_vkCreateBuffer)(VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer); +typedef void (VKAPI_PTR *PFN_vkDestroyBuffer)(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateBufferView)(VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView); +typedef void (VKAPI_PTR *PFN_vkDestroyBufferView)(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateImage)(VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage); +typedef void (VKAPI_PTR *PFN_vkDestroyImage)(VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout)(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout); +typedef VkResult (VKAPI_PTR *PFN_vkCreateImageView)(VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView); +typedef void (VKAPI_PTR *PFN_vkDestroyImageView)(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateShaderModule)(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule); +typedef void (VKAPI_PTR *PFN_vkDestroyShaderModule)(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineCache)(VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache); +typedef void (VKAPI_PTR *PFN_vkDestroyPipelineCache)(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineCacheData)(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkMergePipelineCaches)(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches); +typedef VkResult (VKAPI_PTR *PFN_vkCreateGraphicsPipelines)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef VkResult (VKAPI_PTR *PFN_vkCreateComputePipelines)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef void (VKAPI_PTR *PFN_vkDestroyPipeline)(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineLayout)(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout); +typedef void (VKAPI_PTR *PFN_vkDestroyPipelineLayout)(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateSampler)(VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler); +typedef void (VKAPI_PTR *PFN_vkDestroySampler)(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorSetLayout)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout); +typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorSetLayout)(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorPool)(VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool); +typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorPool)(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkResetDescriptorPool)(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags); +typedef VkResult (VKAPI_PTR *PFN_vkAllocateDescriptorSets)(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets); +typedef VkResult (VKAPI_PTR *PFN_vkFreeDescriptorSets)(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets); +typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSets)(VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies); +typedef VkResult (VKAPI_PTR *PFN_vkCreateFramebuffer)(VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer); +typedef void (VKAPI_PTR *PFN_vkDestroyFramebuffer)(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass)(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass); +typedef void (VKAPI_PTR *PFN_vkDestroyRenderPass)(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkGetRenderAreaGranularity)(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity); +typedef VkResult (VKAPI_PTR *PFN_vkCreateCommandPool)(VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool); +typedef void (VKAPI_PTR *PFN_vkDestroyCommandPool)(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkResetCommandPool)(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags); +typedef VkResult (VKAPI_PTR *PFN_vkAllocateCommandBuffers)(VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers); +typedef void (VKAPI_PTR *PFN_vkFreeCommandBuffers)(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers); +typedef VkResult (VKAPI_PTR *PFN_vkBeginCommandBuffer)(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo); +typedef VkResult (VKAPI_PTR *PFN_vkEndCommandBuffer)(VkCommandBuffer commandBuffer); +typedef VkResult (VKAPI_PTR *PFN_vkResetCommandBuffer)(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags); +typedef void (VKAPI_PTR *PFN_vkCmdBindPipeline)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline); +typedef void (VKAPI_PTR *PFN_vkCmdSetViewport)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports); +typedef void (VKAPI_PTR *PFN_vkCmdSetScissor)(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors); +typedef void (VKAPI_PTR *PFN_vkCmdSetLineWidth)(VkCommandBuffer commandBuffer, float lineWidth); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBias)(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor); +typedef void (VKAPI_PTR *PFN_vkCmdSetBlendConstants)(VkCommandBuffer commandBuffer, const float blendConstants[4]); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBounds)(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilCompareMask)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilWriteMask)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilReference)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference); +typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorSets)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets); +typedef void (VKAPI_PTR *PFN_vkCmdBindIndexBuffer)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType); +typedef void (VKAPI_PTR *PFN_vkCmdBindVertexBuffers)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets); +typedef void (VKAPI_PTR *PFN_vkCmdDraw)(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexed)(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDispatch)(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset); +typedef void (VKAPI_PTR *PFN_vkCmdCopyBuffer)(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdBlitImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter); +typedef void (VKAPI_PTR *PFN_vkCmdCopyBufferToImage)(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImageToBuffer)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdUpdateBuffer)(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void* pData); +typedef void (VKAPI_PTR *PFN_vkCmdFillBuffer)(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data); +typedef void (VKAPI_PTR *PFN_vkCmdClearColorImage)(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges); +typedef void (VKAPI_PTR *PFN_vkCmdClearDepthStencilImage)(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges); +typedef void (VKAPI_PTR *PFN_vkCmdClearAttachments)(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects); +typedef void (VKAPI_PTR *PFN_vkCmdResolveImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdSetEvent)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask); +typedef void (VKAPI_PTR *PFN_vkCmdResetEvent)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask); +typedef void (VKAPI_PTR *PFN_vkCmdWaitEvents)(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers); +typedef void (VKAPI_PTR *PFN_vkCmdPipelineBarrier)(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers); +typedef void (VKAPI_PTR *PFN_vkCmdBeginQuery)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags); +typedef void (VKAPI_PTR *PFN_vkCmdEndQuery)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query); +typedef void (VKAPI_PTR *PFN_vkCmdResetQueryPool)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount); +typedef void (VKAPI_PTR *PFN_vkCmdWriteTimestamp)(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query); +typedef void (VKAPI_PTR *PFN_vkCmdCopyQueryPoolResults)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags); +typedef void (VKAPI_PTR *PFN_vkCmdPushConstants)(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues); +typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderPass)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents); +typedef void (VKAPI_PTR *PFN_vkCmdNextSubpass)(VkCommandBuffer commandBuffer, VkSubpassContents contents); +typedef void (VKAPI_PTR *PFN_vkCmdEndRenderPass)(VkCommandBuffer commandBuffer); +typedef void (VKAPI_PTR *PFN_vkCmdExecuteCommands)(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateInstance( + const VkInstanceCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkInstance* pInstance); + +VKAPI_ATTR void VKAPI_CALL vkDestroyInstance( + VkInstance instance, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDevices( + VkInstance instance, + uint32_t* pPhysicalDeviceCount, + VkPhysicalDevice* pPhysicalDevices); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures* pFeatures); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties* pFormatProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkImageFormatProperties* pImageFormatProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties( + VkPhysicalDevice physicalDevice, + uint32_t* pQueueFamilyPropertyCount, + VkQueueFamilyProperties* pQueueFamilyProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties* pMemoryProperties); + +VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr( + VkInstance instance, + const char* pName); + +VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr( + VkDevice device, + const char* pName); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDevice( + VkPhysicalDevice physicalDevice, + const VkDeviceCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDevice* pDevice); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDevice( + VkDevice device, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties( + const char* pLayerName, + uint32_t* pPropertyCount, + VkExtensionProperties* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties( + VkPhysicalDevice physicalDevice, + const char* pLayerName, + uint32_t* pPropertyCount, + VkExtensionProperties* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties( + uint32_t* pPropertyCount, + VkLayerProperties* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkLayerProperties* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue( + VkDevice device, + uint32_t queueFamilyIndex, + uint32_t queueIndex, + VkQueue* pQueue); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit( + VkQueue queue, + uint32_t submitCount, + const VkSubmitInfo* pSubmits, + VkFence fence); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueWaitIdle( + VkQueue queue); + +VKAPI_ATTR VkResult VKAPI_CALL vkDeviceWaitIdle( + VkDevice device); + +VKAPI_ATTR VkResult VKAPI_CALL vkAllocateMemory( + VkDevice device, + const VkMemoryAllocateInfo* pAllocateInfo, + const VkAllocationCallbacks* pAllocator, + VkDeviceMemory* pMemory); + +VKAPI_ATTR void VKAPI_CALL vkFreeMemory( + VkDevice device, + VkDeviceMemory memory, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkMapMemory( + VkDevice device, + VkDeviceMemory memory, + VkDeviceSize offset, + VkDeviceSize size, + VkMemoryMapFlags flags, + void** ppData); + +VKAPI_ATTR void VKAPI_CALL vkUnmapMemory( + VkDevice device, + VkDeviceMemory memory); + +VKAPI_ATTR VkResult VKAPI_CALL vkFlushMappedMemoryRanges( + VkDevice device, + uint32_t memoryRangeCount, + const VkMappedMemoryRange* pMemoryRanges); + +VKAPI_ATTR VkResult VKAPI_CALL vkInvalidateMappedMemoryRanges( + VkDevice device, + uint32_t memoryRangeCount, + const VkMappedMemoryRange* pMemoryRanges); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceMemoryCommitment( + VkDevice device, + VkDeviceMemory memory, + VkDeviceSize* pCommittedMemoryInBytes); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory( + VkDevice device, + VkBuffer buffer, + VkDeviceMemory memory, + VkDeviceSize memoryOffset); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory( + VkDevice device, + VkImage image, + VkDeviceMemory memory, + VkDeviceSize memoryOffset); + +VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements( + VkDevice device, + VkBuffer buffer, + VkMemoryRequirements* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements( + VkDevice device, + VkImage image, + VkMemoryRequirements* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements( + VkDevice device, + VkImage image, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements* pSparseMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkSampleCountFlagBits samples, + VkImageUsageFlags usage, + VkImageTiling tiling, + uint32_t* pPropertyCount, + VkSparseImageFormatProperties* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueBindSparse( + VkQueue queue, + uint32_t bindInfoCount, + const VkBindSparseInfo* pBindInfo, + VkFence fence); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateFence( + VkDevice device, + const VkFenceCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkFence* pFence); + +VKAPI_ATTR void VKAPI_CALL vkDestroyFence( + VkDevice device, + VkFence fence, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetFences( + VkDevice device, + uint32_t fenceCount, + const VkFence* pFences); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceStatus( + VkDevice device, + VkFence fence); + +VKAPI_ATTR VkResult VKAPI_CALL vkWaitForFences( + VkDevice device, + uint32_t fenceCount, + const VkFence* pFences, + VkBool32 waitAll, + uint64_t timeout); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSemaphore( + VkDevice device, + const VkSemaphoreCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSemaphore* pSemaphore); + +VKAPI_ATTR void VKAPI_CALL vkDestroySemaphore( + VkDevice device, + VkSemaphore semaphore, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateEvent( + VkDevice device, + const VkEventCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkEvent* pEvent); + +VKAPI_ATTR void VKAPI_CALL vkDestroyEvent( + VkDevice device, + VkEvent event, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetEventStatus( + VkDevice device, + VkEvent event); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetEvent( + VkDevice device, + VkEvent event); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetEvent( + VkDevice device, + VkEvent event); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateQueryPool( + VkDevice device, + const VkQueryPoolCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkQueryPool* pQueryPool); + +VKAPI_ATTR void VKAPI_CALL vkDestroyQueryPool( + VkDevice device, + VkQueryPool queryPool, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetQueryPoolResults( + VkDevice device, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + void* pData, + VkDeviceSize stride, + VkQueryResultFlags flags); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateBuffer( + VkDevice device, + const VkBufferCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkBuffer* pBuffer); + +VKAPI_ATTR void VKAPI_CALL vkDestroyBuffer( + VkDevice device, + VkBuffer buffer, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateBufferView( + VkDevice device, + const VkBufferViewCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkBufferView* pView); + +VKAPI_ATTR void VKAPI_CALL vkDestroyBufferView( + VkDevice device, + VkBufferView bufferView, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateImage( + VkDevice device, + const VkImageCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkImage* pImage); + +VKAPI_ATTR void VKAPI_CALL vkDestroyImage( + VkDevice device, + VkImage image, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout( + VkDevice device, + VkImage image, + const VkImageSubresource* pSubresource, + VkSubresourceLayout* pLayout); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateImageView( + VkDevice device, + const VkImageViewCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkImageView* pView); + +VKAPI_ATTR void VKAPI_CALL vkDestroyImageView( + VkDevice device, + VkImageView imageView, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateShaderModule( + VkDevice device, + const VkShaderModuleCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkShaderModule* pShaderModule); + +VKAPI_ATTR void VKAPI_CALL vkDestroyShaderModule( + VkDevice device, + VkShaderModule shaderModule, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineCache( + VkDevice device, + const VkPipelineCacheCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPipelineCache* pPipelineCache); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineCache( + VkDevice device, + VkPipelineCache pipelineCache, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineCacheData( + VkDevice device, + VkPipelineCache pipelineCache, + size_t* pDataSize, + void* pData); + +VKAPI_ATTR VkResult VKAPI_CALL vkMergePipelineCaches( + VkDevice device, + VkPipelineCache dstCache, + uint32_t srcCacheCount, + const VkPipelineCache* pSrcCaches); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateGraphicsPipelines( + VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkGraphicsPipelineCreateInfo* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateComputePipelines( + VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkComputePipelineCreateInfo* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPipeline( + VkDevice device, + VkPipeline pipeline, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineLayout( + VkDevice device, + const VkPipelineLayoutCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPipelineLayout* pPipelineLayout); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineLayout( + VkDevice device, + VkPipelineLayout pipelineLayout, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSampler( + VkDevice device, + const VkSamplerCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSampler* pSampler); + +VKAPI_ATTR void VKAPI_CALL vkDestroySampler( + VkDevice device, + VkSampler sampler, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorSetLayout( + VkDevice device, + const VkDescriptorSetLayoutCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDescriptorSetLayout* pSetLayout); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorSetLayout( + VkDevice device, + VkDescriptorSetLayout descriptorSetLayout, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorPool( + VkDevice device, + const VkDescriptorPoolCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDescriptorPool* pDescriptorPool); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorPool( + VkDevice device, + VkDescriptorPool descriptorPool, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetDescriptorPool( + VkDevice device, + VkDescriptorPool descriptorPool, + VkDescriptorPoolResetFlags flags); + +VKAPI_ATTR VkResult VKAPI_CALL vkAllocateDescriptorSets( + VkDevice device, + const VkDescriptorSetAllocateInfo* pAllocateInfo, + VkDescriptorSet* pDescriptorSets); + +VKAPI_ATTR VkResult VKAPI_CALL vkFreeDescriptorSets( + VkDevice device, + VkDescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VkDescriptorSet* pDescriptorSets); + +VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSets( + VkDevice device, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet* pDescriptorWrites, + uint32_t descriptorCopyCount, + const VkCopyDescriptorSet* pDescriptorCopies); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateFramebuffer( + VkDevice device, + const VkFramebufferCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkFramebuffer* pFramebuffer); + +VKAPI_ATTR void VKAPI_CALL vkDestroyFramebuffer( + VkDevice device, + VkFramebuffer framebuffer, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass( + VkDevice device, + const VkRenderPassCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkRenderPass* pRenderPass); + +VKAPI_ATTR void VKAPI_CALL vkDestroyRenderPass( + VkDevice device, + VkRenderPass renderPass, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkGetRenderAreaGranularity( + VkDevice device, + VkRenderPass renderPass, + VkExtent2D* pGranularity); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateCommandPool( + VkDevice device, + const VkCommandPoolCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkCommandPool* pCommandPool); + +VKAPI_ATTR void VKAPI_CALL vkDestroyCommandPool( + VkDevice device, + VkCommandPool commandPool, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandPool( + VkDevice device, + VkCommandPool commandPool, + VkCommandPoolResetFlags flags); + +VKAPI_ATTR VkResult VKAPI_CALL vkAllocateCommandBuffers( + VkDevice device, + const VkCommandBufferAllocateInfo* pAllocateInfo, + VkCommandBuffer* pCommandBuffers); + +VKAPI_ATTR void VKAPI_CALL vkFreeCommandBuffers( + VkDevice device, + VkCommandPool commandPool, + uint32_t commandBufferCount, + const VkCommandBuffer* pCommandBuffers); + +VKAPI_ATTR VkResult VKAPI_CALL vkBeginCommandBuffer( + VkCommandBuffer commandBuffer, + const VkCommandBufferBeginInfo* pBeginInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkEndCommandBuffer( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandBuffer( + VkCommandBuffer commandBuffer, + VkCommandBufferResetFlags flags); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindPipeline( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewport( + VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewport* pViewports); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetScissor( + VkCommandBuffer commandBuffer, + uint32_t firstScissor, + uint32_t scissorCount, + const VkRect2D* pScissors); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetLineWidth( + VkCommandBuffer commandBuffer, + float lineWidth); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBias( + VkCommandBuffer commandBuffer, + float depthBiasConstantFactor, + float depthBiasClamp, + float depthBiasSlopeFactor); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetBlendConstants( + VkCommandBuffer commandBuffer, + const float blendConstants[4]); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBounds( + VkCommandBuffer commandBuffer, + float minDepthBounds, + float maxDepthBounds); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilCompareMask( + VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t compareMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilWriteMask( + VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t writeMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilReference( + VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t reference); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t firstSet, + uint32_t descriptorSetCount, + const VkDescriptorSet* pDescriptorSets, + uint32_t dynamicOffsetCount, + const uint32_t* pDynamicOffsets); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkIndexType indexType); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers( + VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer* pBuffers, + const VkDeviceSize* pOffsets); + +VKAPI_ATTR void VKAPI_CALL vkCmdDraw( + VkCommandBuffer commandBuffer, + uint32_t vertexCount, + uint32_t instanceCount, + uint32_t firstVertex, + uint32_t firstInstance); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexed( + VkCommandBuffer commandBuffer, + uint32_t indexCount, + uint32_t instanceCount, + uint32_t firstIndex, + int32_t vertexOffset, + uint32_t firstInstance); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirect( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirect( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatch( + VkCommandBuffer commandBuffer, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchIndirect( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer( + VkCommandBuffer commandBuffer, + VkBuffer srcBuffer, + VkBuffer dstBuffer, + uint32_t regionCount, + const VkBufferCopy* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage( + VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageCopy* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage( + VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageBlit* pRegions, + VkFilter filter); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage( + VkCommandBuffer commandBuffer, + VkBuffer srcBuffer, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkBufferImageCopy* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer( + VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkBuffer dstBuffer, + uint32_t regionCount, + const VkBufferImageCopy* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdUpdateBuffer( + VkCommandBuffer commandBuffer, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize dataSize, + const void* pData); + +VKAPI_ATTR void VKAPI_CALL vkCmdFillBuffer( + VkCommandBuffer commandBuffer, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize size, + uint32_t data); + +VKAPI_ATTR void VKAPI_CALL vkCmdClearColorImage( + VkCommandBuffer commandBuffer, + VkImage image, + VkImageLayout imageLayout, + const VkClearColorValue* pColor, + uint32_t rangeCount, + const VkImageSubresourceRange* pRanges); + +VKAPI_ATTR void VKAPI_CALL vkCmdClearDepthStencilImage( + VkCommandBuffer commandBuffer, + VkImage image, + VkImageLayout imageLayout, + const VkClearDepthStencilValue* pDepthStencil, + uint32_t rangeCount, + const VkImageSubresourceRange* pRanges); + +VKAPI_ATTR void VKAPI_CALL vkCmdClearAttachments( + VkCommandBuffer commandBuffer, + uint32_t attachmentCount, + const VkClearAttachment* pAttachments, + uint32_t rectCount, + const VkClearRect* pRects); + +VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage( + VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageResolve* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent( + VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags stageMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent( + VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags stageMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents( + VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent* pEvents, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, + uint32_t memoryBarrierCount, + const VkMemoryBarrier* pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier* pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier* pImageMemoryBarriers); + +VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier( + VkCommandBuffer commandBuffer, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, + VkDependencyFlags dependencyFlags, + uint32_t memoryBarrierCount, + const VkMemoryBarrier* pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier* pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier* pImageMemoryBarriers); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginQuery( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + VkQueryControlFlags flags); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndQuery( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query); + +VKAPI_ATTR void VKAPI_CALL vkCmdResetQueryPool( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp( + VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkQueryPool queryPool, + uint32_t query); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyQueryPoolResults( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize stride, + VkQueryResultFlags flags); + +VKAPI_ATTR void VKAPI_CALL vkCmdPushConstants( + VkCommandBuffer commandBuffer, + VkPipelineLayout layout, + VkShaderStageFlags stageFlags, + uint32_t offset, + uint32_t size, + const void* pValues); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass( + VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo* pRenderPassBegin, + VkSubpassContents contents); + +VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass( + VkCommandBuffer commandBuffer, + VkSubpassContents contents); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR void VKAPI_CALL vkCmdExecuteCommands( + VkCommandBuffer commandBuffer, + uint32_t commandBufferCount, + const VkCommandBuffer* pCommandBuffers); +#endif + + +#define VK_VERSION_1_1 1 +// Vulkan 1.1 version number +#define VK_API_VERSION_1_1 VK_MAKE_API_VERSION(0, 1, 1, 0)// Patch version should always be set to 0 + +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSamplerYcbcrConversion) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorUpdateTemplate) +#define VK_MAX_DEVICE_GROUP_SIZE 32U +#define VK_LUID_SIZE 8U +#define VK_QUEUE_FAMILY_EXTERNAL (~1U) + +typedef enum VkPointClippingBehavior { + VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES = 0, + VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY = 1, + VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES_KHR = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES, + VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY_KHR = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY, + VK_POINT_CLIPPING_BEHAVIOR_MAX_ENUM = 0x7FFFFFFF +} VkPointClippingBehavior; + +typedef enum VkTessellationDomainOrigin { + VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT = 0, + VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT = 1, + VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT_KHR = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT, + VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT_KHR = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT, + VK_TESSELLATION_DOMAIN_ORIGIN_MAX_ENUM = 0x7FFFFFFF +} VkTessellationDomainOrigin; + +typedef enum VkSamplerYcbcrModelConversion { + VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY = 0, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY = 1, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709 = 2, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601 = 3, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020 = 4, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_MAX_ENUM = 0x7FFFFFFF +} VkSamplerYcbcrModelConversion; + +typedef enum VkSamplerYcbcrRange { + VK_SAMPLER_YCBCR_RANGE_ITU_FULL = 0, + VK_SAMPLER_YCBCR_RANGE_ITU_NARROW = 1, + VK_SAMPLER_YCBCR_RANGE_ITU_FULL_KHR = VK_SAMPLER_YCBCR_RANGE_ITU_FULL, + VK_SAMPLER_YCBCR_RANGE_ITU_NARROW_KHR = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW, + VK_SAMPLER_YCBCR_RANGE_MAX_ENUM = 0x7FFFFFFF +} VkSamplerYcbcrRange; + +typedef enum VkChromaLocation { + VK_CHROMA_LOCATION_COSITED_EVEN = 0, + VK_CHROMA_LOCATION_MIDPOINT = 1, + VK_CHROMA_LOCATION_COSITED_EVEN_KHR = VK_CHROMA_LOCATION_COSITED_EVEN, + VK_CHROMA_LOCATION_MIDPOINT_KHR = VK_CHROMA_LOCATION_MIDPOINT, + VK_CHROMA_LOCATION_MAX_ENUM = 0x7FFFFFFF +} VkChromaLocation; + +typedef enum VkDescriptorUpdateTemplateType { + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET = 0, + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR = 1, + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkDescriptorUpdateTemplateType; + +typedef enum VkSubgroupFeatureFlagBits { + VK_SUBGROUP_FEATURE_BASIC_BIT = 0x00000001, + VK_SUBGROUP_FEATURE_VOTE_BIT = 0x00000002, + VK_SUBGROUP_FEATURE_ARITHMETIC_BIT = 0x00000004, + VK_SUBGROUP_FEATURE_BALLOT_BIT = 0x00000008, + VK_SUBGROUP_FEATURE_SHUFFLE_BIT = 0x00000010, + VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT = 0x00000020, + VK_SUBGROUP_FEATURE_CLUSTERED_BIT = 0x00000040, + VK_SUBGROUP_FEATURE_QUAD_BIT = 0x00000080, + VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV = 0x00000100, + VK_SUBGROUP_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSubgroupFeatureFlagBits; +typedef VkFlags VkSubgroupFeatureFlags; + +typedef enum VkPeerMemoryFeatureFlagBits { + VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT = 0x00000001, + VK_PEER_MEMORY_FEATURE_COPY_DST_BIT = 0x00000002, + VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT = 0x00000004, + VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT = 0x00000008, + VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT_KHR = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT, + VK_PEER_MEMORY_FEATURE_COPY_DST_BIT_KHR = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT, + VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT_KHR = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT, + VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT_KHR = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT, + VK_PEER_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPeerMemoryFeatureFlagBits; +typedef VkFlags VkPeerMemoryFeatureFlags; + +typedef enum VkMemoryAllocateFlagBits { + VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT = 0x00000001, + VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT = 0x00000002, + VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT = 0x00000004, + VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT_KHR = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT, + VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT, + VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT, + VK_MEMORY_ALLOCATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkMemoryAllocateFlagBits; +typedef VkFlags VkMemoryAllocateFlags; +typedef VkFlags VkCommandPoolTrimFlags; +typedef VkFlags VkDescriptorUpdateTemplateCreateFlags; + +typedef enum VkExternalMemoryHandleTypeFlagBits { + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT = 0x00000001, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT = 0x00000002, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT = 0x00000004, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT = 0x00000008, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT = 0x00000010, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT = 0x00000020, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT = 0x00000040, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT = 0x00000200, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID = 0x00000400, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT = 0x00000080, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT = 0x00000100, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA = 0x00000800, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalMemoryHandleTypeFlagBits; +typedef VkFlags VkExternalMemoryHandleTypeFlags; + +typedef enum VkExternalMemoryFeatureFlagBits { + VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT = 0x00000001, + VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT = 0x00000002, + VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT = 0x00000004, + VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_KHR = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT, + VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_KHR = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT, + VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_KHR = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT, + VK_EXTERNAL_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalMemoryFeatureFlagBits; +typedef VkFlags VkExternalMemoryFeatureFlags; + +typedef enum VkExternalFenceHandleTypeFlagBits { + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT = 0x00000001, + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT = 0x00000002, + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT = 0x00000004, + VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT = 0x00000008, + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT, + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT, + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT, + VK_EXTERNAL_FENCE_HANDLE_TYPE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalFenceHandleTypeFlagBits; +typedef VkFlags VkExternalFenceHandleTypeFlags; + +typedef enum VkExternalFenceFeatureFlagBits { + VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT = 0x00000001, + VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT = 0x00000002, + VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT_KHR = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT, + VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT_KHR = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT, + VK_EXTERNAL_FENCE_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalFenceFeatureFlagBits; +typedef VkFlags VkExternalFenceFeatureFlags; + +typedef enum VkFenceImportFlagBits { + VK_FENCE_IMPORT_TEMPORARY_BIT = 0x00000001, + VK_FENCE_IMPORT_TEMPORARY_BIT_KHR = VK_FENCE_IMPORT_TEMPORARY_BIT, + VK_FENCE_IMPORT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkFenceImportFlagBits; +typedef VkFlags VkFenceImportFlags; + +typedef enum VkSemaphoreImportFlagBits { + VK_SEMAPHORE_IMPORT_TEMPORARY_BIT = 0x00000001, + VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT, + VK_SEMAPHORE_IMPORT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSemaphoreImportFlagBits; +typedef VkFlags VkSemaphoreImportFlags; + +typedef enum VkExternalSemaphoreHandleTypeFlagBits { + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT = 0x00000001, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT = 0x00000002, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT = 0x00000004, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT = 0x00000008, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT = 0x00000010, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_ZIRCON_EVENT_BIT_FUCHSIA = 0x00000080, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D11_FENCE_BIT = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalSemaphoreHandleTypeFlagBits; +typedef VkFlags VkExternalSemaphoreHandleTypeFlags; + +typedef enum VkExternalSemaphoreFeatureFlagBits { + VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT = 0x00000001, + VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT = 0x00000002, + VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT_KHR = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT, + VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT_KHR = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT, + VK_EXTERNAL_SEMAPHORE_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalSemaphoreFeatureFlagBits; +typedef VkFlags VkExternalSemaphoreFeatureFlags; +typedef struct VkPhysicalDeviceSubgroupProperties { + VkStructureType sType; + void* pNext; + uint32_t subgroupSize; + VkShaderStageFlags supportedStages; + VkSubgroupFeatureFlags supportedOperations; + VkBool32 quadOperationsInAllStages; +} VkPhysicalDeviceSubgroupProperties; + +typedef struct VkBindBufferMemoryInfo { + VkStructureType sType; + const void* pNext; + VkBuffer buffer; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; +} VkBindBufferMemoryInfo; + +typedef struct VkBindImageMemoryInfo { + VkStructureType sType; + const void* pNext; + VkImage image; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; +} VkBindImageMemoryInfo; + +typedef struct VkPhysicalDevice16BitStorageFeatures { + VkStructureType sType; + void* pNext; + VkBool32 storageBuffer16BitAccess; + VkBool32 uniformAndStorageBuffer16BitAccess; + VkBool32 storagePushConstant16; + VkBool32 storageInputOutput16; +} VkPhysicalDevice16BitStorageFeatures; + +typedef struct VkMemoryDedicatedRequirements { + VkStructureType sType; + void* pNext; + VkBool32 prefersDedicatedAllocation; + VkBool32 requiresDedicatedAllocation; +} VkMemoryDedicatedRequirements; + +typedef struct VkMemoryDedicatedAllocateInfo { + VkStructureType sType; + const void* pNext; + VkImage image; + VkBuffer buffer; +} VkMemoryDedicatedAllocateInfo; + +typedef struct VkMemoryAllocateFlagsInfo { + VkStructureType sType; + const void* pNext; + VkMemoryAllocateFlags flags; + uint32_t deviceMask; +} VkMemoryAllocateFlagsInfo; + +typedef struct VkDeviceGroupRenderPassBeginInfo { + VkStructureType sType; + const void* pNext; + uint32_t deviceMask; + uint32_t deviceRenderAreaCount; + const VkRect2D* pDeviceRenderAreas; +} VkDeviceGroupRenderPassBeginInfo; + +typedef struct VkDeviceGroupCommandBufferBeginInfo { + VkStructureType sType; + const void* pNext; + uint32_t deviceMask; +} VkDeviceGroupCommandBufferBeginInfo; + +typedef struct VkDeviceGroupSubmitInfo { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreCount; + const uint32_t* pWaitSemaphoreDeviceIndices; + uint32_t commandBufferCount; + const uint32_t* pCommandBufferDeviceMasks; + uint32_t signalSemaphoreCount; + const uint32_t* pSignalSemaphoreDeviceIndices; +} VkDeviceGroupSubmitInfo; + +typedef struct VkDeviceGroupBindSparseInfo { + VkStructureType sType; + const void* pNext; + uint32_t resourceDeviceIndex; + uint32_t memoryDeviceIndex; +} VkDeviceGroupBindSparseInfo; + +typedef struct VkBindBufferMemoryDeviceGroupInfo { + VkStructureType sType; + const void* pNext; + uint32_t deviceIndexCount; + const uint32_t* pDeviceIndices; +} VkBindBufferMemoryDeviceGroupInfo; + +typedef struct VkBindImageMemoryDeviceGroupInfo { + VkStructureType sType; + const void* pNext; + uint32_t deviceIndexCount; + const uint32_t* pDeviceIndices; + uint32_t splitInstanceBindRegionCount; + const VkRect2D* pSplitInstanceBindRegions; +} VkBindImageMemoryDeviceGroupInfo; + +typedef struct VkPhysicalDeviceGroupProperties { + VkStructureType sType; + void* pNext; + uint32_t physicalDeviceCount; + VkPhysicalDevice physicalDevices[VK_MAX_DEVICE_GROUP_SIZE]; + VkBool32 subsetAllocation; +} VkPhysicalDeviceGroupProperties; + +typedef struct VkDeviceGroupDeviceCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t physicalDeviceCount; + const VkPhysicalDevice* pPhysicalDevices; +} VkDeviceGroupDeviceCreateInfo; + +typedef struct VkBufferMemoryRequirementsInfo2 { + VkStructureType sType; + const void* pNext; + VkBuffer buffer; +} VkBufferMemoryRequirementsInfo2; + +typedef struct VkImageMemoryRequirementsInfo2 { + VkStructureType sType; + const void* pNext; + VkImage image; +} VkImageMemoryRequirementsInfo2; + +typedef struct VkImageSparseMemoryRequirementsInfo2 { + VkStructureType sType; + const void* pNext; + VkImage image; +} VkImageSparseMemoryRequirementsInfo2; + +typedef struct VkMemoryRequirements2 { + VkStructureType sType; + void* pNext; + VkMemoryRequirements memoryRequirements; +} VkMemoryRequirements2; + +typedef struct VkSparseImageMemoryRequirements2 { + VkStructureType sType; + void* pNext; + VkSparseImageMemoryRequirements memoryRequirements; +} VkSparseImageMemoryRequirements2; + +typedef struct VkPhysicalDeviceFeatures2 { + VkStructureType sType; + void* pNext; + VkPhysicalDeviceFeatures features; +} VkPhysicalDeviceFeatures2; + +typedef struct VkPhysicalDeviceProperties2 { + VkStructureType sType; + void* pNext; + VkPhysicalDeviceProperties properties; +} VkPhysicalDeviceProperties2; + +typedef struct VkFormatProperties2 { + VkStructureType sType; + void* pNext; + VkFormatProperties formatProperties; +} VkFormatProperties2; + +typedef struct VkImageFormatProperties2 { + VkStructureType sType; + void* pNext; + VkImageFormatProperties imageFormatProperties; +} VkImageFormatProperties2; + +typedef struct VkPhysicalDeviceImageFormatInfo2 { + VkStructureType sType; + const void* pNext; + VkFormat format; + VkImageType type; + VkImageTiling tiling; + VkImageUsageFlags usage; + VkImageCreateFlags flags; +} VkPhysicalDeviceImageFormatInfo2; + +typedef struct VkQueueFamilyProperties2 { + VkStructureType sType; + void* pNext; + VkQueueFamilyProperties queueFamilyProperties; +} VkQueueFamilyProperties2; + +typedef struct VkPhysicalDeviceMemoryProperties2 { + VkStructureType sType; + void* pNext; + VkPhysicalDeviceMemoryProperties memoryProperties; +} VkPhysicalDeviceMemoryProperties2; + +typedef struct VkSparseImageFormatProperties2 { + VkStructureType sType; + void* pNext; + VkSparseImageFormatProperties properties; +} VkSparseImageFormatProperties2; + +typedef struct VkPhysicalDeviceSparseImageFormatInfo2 { + VkStructureType sType; + const void* pNext; + VkFormat format; + VkImageType type; + VkSampleCountFlagBits samples; + VkImageUsageFlags usage; + VkImageTiling tiling; +} VkPhysicalDeviceSparseImageFormatInfo2; + +typedef struct VkPhysicalDevicePointClippingProperties { + VkStructureType sType; + void* pNext; + VkPointClippingBehavior pointClippingBehavior; +} VkPhysicalDevicePointClippingProperties; + +typedef struct VkInputAttachmentAspectReference { + uint32_t subpass; + uint32_t inputAttachmentIndex; + VkImageAspectFlags aspectMask; +} VkInputAttachmentAspectReference; + +typedef struct VkRenderPassInputAttachmentAspectCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t aspectReferenceCount; + const VkInputAttachmentAspectReference* pAspectReferences; +} VkRenderPassInputAttachmentAspectCreateInfo; + +typedef struct VkImageViewUsageCreateInfo { + VkStructureType sType; + const void* pNext; + VkImageUsageFlags usage; +} VkImageViewUsageCreateInfo; + +typedef struct VkPipelineTessellationDomainOriginStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkTessellationDomainOrigin domainOrigin; +} VkPipelineTessellationDomainOriginStateCreateInfo; + +typedef struct VkRenderPassMultiviewCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t subpassCount; + const uint32_t* pViewMasks; + uint32_t dependencyCount; + const int32_t* pViewOffsets; + uint32_t correlationMaskCount; + const uint32_t* pCorrelationMasks; +} VkRenderPassMultiviewCreateInfo; + +typedef struct VkPhysicalDeviceMultiviewFeatures { + VkStructureType sType; + void* pNext; + VkBool32 multiview; + VkBool32 multiviewGeometryShader; + VkBool32 multiviewTessellationShader; +} VkPhysicalDeviceMultiviewFeatures; + +typedef struct VkPhysicalDeviceMultiviewProperties { + VkStructureType sType; + void* pNext; + uint32_t maxMultiviewViewCount; + uint32_t maxMultiviewInstanceIndex; +} VkPhysicalDeviceMultiviewProperties; + +typedef struct VkPhysicalDeviceVariablePointersFeatures { + VkStructureType sType; + void* pNext; + VkBool32 variablePointersStorageBuffer; + VkBool32 variablePointers; +} VkPhysicalDeviceVariablePointersFeatures; + +typedef VkPhysicalDeviceVariablePointersFeatures VkPhysicalDeviceVariablePointerFeatures; + +typedef struct VkPhysicalDeviceProtectedMemoryFeatures { + VkStructureType sType; + void* pNext; + VkBool32 protectedMemory; +} VkPhysicalDeviceProtectedMemoryFeatures; + +typedef struct VkPhysicalDeviceProtectedMemoryProperties { + VkStructureType sType; + void* pNext; + VkBool32 protectedNoFault; +} VkPhysicalDeviceProtectedMemoryProperties; + +typedef struct VkDeviceQueueInfo2 { + VkStructureType sType; + const void* pNext; + VkDeviceQueueCreateFlags flags; + uint32_t queueFamilyIndex; + uint32_t queueIndex; +} VkDeviceQueueInfo2; + +typedef struct VkProtectedSubmitInfo { + VkStructureType sType; + const void* pNext; + VkBool32 protectedSubmit; +} VkProtectedSubmitInfo; + +typedef struct VkSamplerYcbcrConversionCreateInfo { + VkStructureType sType; + const void* pNext; + VkFormat format; + VkSamplerYcbcrModelConversion ycbcrModel; + VkSamplerYcbcrRange ycbcrRange; + VkComponentMapping components; + VkChromaLocation xChromaOffset; + VkChromaLocation yChromaOffset; + VkFilter chromaFilter; + VkBool32 forceExplicitReconstruction; +} VkSamplerYcbcrConversionCreateInfo; + +typedef struct VkSamplerYcbcrConversionInfo { + VkStructureType sType; + const void* pNext; + VkSamplerYcbcrConversion conversion; +} VkSamplerYcbcrConversionInfo; + +typedef struct VkBindImagePlaneMemoryInfo { + VkStructureType sType; + const void* pNext; + VkImageAspectFlagBits planeAspect; +} VkBindImagePlaneMemoryInfo; + +typedef struct VkImagePlaneMemoryRequirementsInfo { + VkStructureType sType; + const void* pNext; + VkImageAspectFlagBits planeAspect; +} VkImagePlaneMemoryRequirementsInfo; + +typedef struct VkPhysicalDeviceSamplerYcbcrConversionFeatures { + VkStructureType sType; + void* pNext; + VkBool32 samplerYcbcrConversion; +} VkPhysicalDeviceSamplerYcbcrConversionFeatures; + +typedef struct VkSamplerYcbcrConversionImageFormatProperties { + VkStructureType sType; + void* pNext; + uint32_t combinedImageSamplerDescriptorCount; +} VkSamplerYcbcrConversionImageFormatProperties; + +typedef struct VkDescriptorUpdateTemplateEntry { + uint32_t dstBinding; + uint32_t dstArrayElement; + uint32_t descriptorCount; + VkDescriptorType descriptorType; + size_t offset; + size_t stride; +} VkDescriptorUpdateTemplateEntry; + +typedef struct VkDescriptorUpdateTemplateCreateInfo { + VkStructureType sType; + const void* pNext; + VkDescriptorUpdateTemplateCreateFlags flags; + uint32_t descriptorUpdateEntryCount; + const VkDescriptorUpdateTemplateEntry* pDescriptorUpdateEntries; + VkDescriptorUpdateTemplateType templateType; + VkDescriptorSetLayout descriptorSetLayout; + VkPipelineBindPoint pipelineBindPoint; + VkPipelineLayout pipelineLayout; + uint32_t set; +} VkDescriptorUpdateTemplateCreateInfo; + +typedef struct VkExternalMemoryProperties { + VkExternalMemoryFeatureFlags externalMemoryFeatures; + VkExternalMemoryHandleTypeFlags exportFromImportedHandleTypes; + VkExternalMemoryHandleTypeFlags compatibleHandleTypes; +} VkExternalMemoryProperties; + +typedef struct VkPhysicalDeviceExternalImageFormatInfo { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkPhysicalDeviceExternalImageFormatInfo; + +typedef struct VkExternalImageFormatProperties { + VkStructureType sType; + void* pNext; + VkExternalMemoryProperties externalMemoryProperties; +} VkExternalImageFormatProperties; + +typedef struct VkPhysicalDeviceExternalBufferInfo { + VkStructureType sType; + const void* pNext; + VkBufferCreateFlags flags; + VkBufferUsageFlags usage; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkPhysicalDeviceExternalBufferInfo; + +typedef struct VkExternalBufferProperties { + VkStructureType sType; + void* pNext; + VkExternalMemoryProperties externalMemoryProperties; +} VkExternalBufferProperties; + +typedef struct VkPhysicalDeviceIDProperties { + VkStructureType sType; + void* pNext; + uint8_t deviceUUID[VK_UUID_SIZE]; + uint8_t driverUUID[VK_UUID_SIZE]; + uint8_t deviceLUID[VK_LUID_SIZE]; + uint32_t deviceNodeMask; + VkBool32 deviceLUIDValid; +} VkPhysicalDeviceIDProperties; + +typedef struct VkExternalMemoryImageCreateInfo { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlags handleTypes; +} VkExternalMemoryImageCreateInfo; + +typedef struct VkExternalMemoryBufferCreateInfo { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlags handleTypes; +} VkExternalMemoryBufferCreateInfo; + +typedef struct VkExportMemoryAllocateInfo { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlags handleTypes; +} VkExportMemoryAllocateInfo; + +typedef struct VkPhysicalDeviceExternalFenceInfo { + VkStructureType sType; + const void* pNext; + VkExternalFenceHandleTypeFlagBits handleType; +} VkPhysicalDeviceExternalFenceInfo; + +typedef struct VkExternalFenceProperties { + VkStructureType sType; + void* pNext; + VkExternalFenceHandleTypeFlags exportFromImportedHandleTypes; + VkExternalFenceHandleTypeFlags compatibleHandleTypes; + VkExternalFenceFeatureFlags externalFenceFeatures; +} VkExternalFenceProperties; + +typedef struct VkExportFenceCreateInfo { + VkStructureType sType; + const void* pNext; + VkExternalFenceHandleTypeFlags handleTypes; +} VkExportFenceCreateInfo; + +typedef struct VkExportSemaphoreCreateInfo { + VkStructureType sType; + const void* pNext; + VkExternalSemaphoreHandleTypeFlags handleTypes; +} VkExportSemaphoreCreateInfo; + +typedef struct VkPhysicalDeviceExternalSemaphoreInfo { + VkStructureType sType; + const void* pNext; + VkExternalSemaphoreHandleTypeFlagBits handleType; +} VkPhysicalDeviceExternalSemaphoreInfo; + +typedef struct VkExternalSemaphoreProperties { + VkStructureType sType; + void* pNext; + VkExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes; + VkExternalSemaphoreHandleTypeFlags compatibleHandleTypes; + VkExternalSemaphoreFeatureFlags externalSemaphoreFeatures; +} VkExternalSemaphoreProperties; + +typedef struct VkPhysicalDeviceMaintenance3Properties { + VkStructureType sType; + void* pNext; + uint32_t maxPerSetDescriptors; + VkDeviceSize maxMemoryAllocationSize; +} VkPhysicalDeviceMaintenance3Properties; + +typedef struct VkDescriptorSetLayoutSupport { + VkStructureType sType; + void* pNext; + VkBool32 supported; +} VkDescriptorSetLayoutSupport; + +typedef struct VkPhysicalDeviceShaderDrawParametersFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderDrawParameters; +} VkPhysicalDeviceShaderDrawParametersFeatures; + +typedef VkPhysicalDeviceShaderDrawParametersFeatures VkPhysicalDeviceShaderDrawParameterFeatures; + +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceVersion)(uint32_t* pApiVersion); +typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory2)(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos); +typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory2)(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos); +typedef void (VKAPI_PTR *PFN_vkGetDeviceGroupPeerMemoryFeatures)(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); +typedef void (VKAPI_PTR *PFN_vkCmdSetDeviceMask)(VkCommandBuffer commandBuffer, uint32_t deviceMask); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchBase)(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); +typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceGroups)(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); +typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements2)(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements2)(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements2)(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures2)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties2)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties2)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties2)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties2)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties2)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties2)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties); +typedef void (VKAPI_PTR *PFN_vkTrimCommandPool)(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags); +typedef void (VKAPI_PTR *PFN_vkGetDeviceQueue2)(VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue); +typedef VkResult (VKAPI_PTR *PFN_vkCreateSamplerYcbcrConversion)(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion); +typedef void (VKAPI_PTR *PFN_vkDestroySamplerYcbcrConversion)(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorUpdateTemplate)(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); +typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorUpdateTemplate)(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSetWithTemplate)(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalBufferProperties)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalFenceProperties)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalSemaphoreProperties)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties); +typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutSupport)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceVersion( + uint32_t* pApiVersion); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory2( + VkDevice device, + uint32_t bindInfoCount, + const VkBindBufferMemoryInfo* pBindInfos); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2( + VkDevice device, + uint32_t bindInfoCount, + const VkBindImageMemoryInfo* pBindInfos); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceGroupPeerMemoryFeatures( + VkDevice device, + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDeviceMask( + VkCommandBuffer commandBuffer, + uint32_t deviceMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBase( + VkCommandBuffer commandBuffer, + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroups( + VkInstance instance, + uint32_t* pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements2( + VkDevice device, + const VkImageMemoryRequirementsInfo2* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements2( + VkDevice device, + const VkBufferMemoryRequirementsInfo2* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2( + VkDevice device, + const VkImageSparseMemoryRequirementsInfo2* pInfo, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures2( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures2* pFeatures); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties2( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties2* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties2( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties2* pFormatProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties2( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, + VkImageFormatProperties2* pImageFormatProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties2( + VkPhysicalDevice physicalDevice, + uint32_t* pQueueFamilyPropertyCount, + VkQueueFamilyProperties2* pQueueFamilyProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties2( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties2* pMemoryProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, + uint32_t* pPropertyCount, + VkSparseImageFormatProperties2* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkTrimCommandPool( + VkDevice device, + VkCommandPool commandPool, + VkCommandPoolTrimFlags flags); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue2( + VkDevice device, + const VkDeviceQueueInfo2* pQueueInfo, + VkQueue* pQueue); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSamplerYcbcrConversion( + VkDevice device, + const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSamplerYcbcrConversion* pYcbcrConversion); + +VKAPI_ATTR void VKAPI_CALL vkDestroySamplerYcbcrConversion( + VkDevice device, + VkSamplerYcbcrConversion ycbcrConversion, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorUpdateTemplate( + VkDevice device, + const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorUpdateTemplate( + VkDevice device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplate( + VkDevice device, + VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const void* pData); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferProperties( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, + VkExternalBufferProperties* pExternalBufferProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFenceProperties( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, + VkExternalFenceProperties* pExternalFenceProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphoreProperties( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, + VkExternalSemaphoreProperties* pExternalSemaphoreProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupport( + VkDevice device, + const VkDescriptorSetLayoutCreateInfo* pCreateInfo, + VkDescriptorSetLayoutSupport* pSupport); +#endif + + +#define VK_VERSION_1_2 1 +// Vulkan 1.2 version number +#define VK_API_VERSION_1_2 VK_MAKE_API_VERSION(0, 1, 2, 0)// Patch version should always be set to 0 + +#define VK_MAX_DRIVER_NAME_SIZE 256U +#define VK_MAX_DRIVER_INFO_SIZE 256U + +typedef enum VkDriverId { + VK_DRIVER_ID_AMD_PROPRIETARY = 1, + VK_DRIVER_ID_AMD_OPEN_SOURCE = 2, + VK_DRIVER_ID_MESA_RADV = 3, + VK_DRIVER_ID_NVIDIA_PROPRIETARY = 4, + VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS = 5, + VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA = 6, + VK_DRIVER_ID_IMAGINATION_PROPRIETARY = 7, + VK_DRIVER_ID_QUALCOMM_PROPRIETARY = 8, + VK_DRIVER_ID_ARM_PROPRIETARY = 9, + VK_DRIVER_ID_GOOGLE_SWIFTSHADER = 10, + VK_DRIVER_ID_GGP_PROPRIETARY = 11, + VK_DRIVER_ID_BROADCOM_PROPRIETARY = 12, + VK_DRIVER_ID_MESA_LLVMPIPE = 13, + VK_DRIVER_ID_MOLTENVK = 14, + VK_DRIVER_ID_COREAVI_PROPRIETARY = 15, + VK_DRIVER_ID_JUICE_PROPRIETARY = 16, + VK_DRIVER_ID_AMD_PROPRIETARY_KHR = VK_DRIVER_ID_AMD_PROPRIETARY, + VK_DRIVER_ID_AMD_OPEN_SOURCE_KHR = VK_DRIVER_ID_AMD_OPEN_SOURCE, + VK_DRIVER_ID_MESA_RADV_KHR = VK_DRIVER_ID_MESA_RADV, + VK_DRIVER_ID_NVIDIA_PROPRIETARY_KHR = VK_DRIVER_ID_NVIDIA_PROPRIETARY, + VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS_KHR = VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS, + VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA, + VK_DRIVER_ID_IMAGINATION_PROPRIETARY_KHR = VK_DRIVER_ID_IMAGINATION_PROPRIETARY, + VK_DRIVER_ID_QUALCOMM_PROPRIETARY_KHR = VK_DRIVER_ID_QUALCOMM_PROPRIETARY, + VK_DRIVER_ID_ARM_PROPRIETARY_KHR = VK_DRIVER_ID_ARM_PROPRIETARY, + VK_DRIVER_ID_GOOGLE_SWIFTSHADER_KHR = VK_DRIVER_ID_GOOGLE_SWIFTSHADER, + VK_DRIVER_ID_GGP_PROPRIETARY_KHR = VK_DRIVER_ID_GGP_PROPRIETARY, + VK_DRIVER_ID_BROADCOM_PROPRIETARY_KHR = VK_DRIVER_ID_BROADCOM_PROPRIETARY, + VK_DRIVER_ID_MAX_ENUM = 0x7FFFFFFF +} VkDriverId; + +typedef enum VkShaderFloatControlsIndependence { + VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY = 0, + VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL = 1, + VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE = 2, + VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY, + VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL, + VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE, + VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_MAX_ENUM = 0x7FFFFFFF +} VkShaderFloatControlsIndependence; + +typedef enum VkSamplerReductionMode { + VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE = 0, + VK_SAMPLER_REDUCTION_MODE_MIN = 1, + VK_SAMPLER_REDUCTION_MODE_MAX = 2, + VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE, + VK_SAMPLER_REDUCTION_MODE_MIN_EXT = VK_SAMPLER_REDUCTION_MODE_MIN, + VK_SAMPLER_REDUCTION_MODE_MAX_EXT = VK_SAMPLER_REDUCTION_MODE_MAX, + VK_SAMPLER_REDUCTION_MODE_MAX_ENUM = 0x7FFFFFFF +} VkSamplerReductionMode; + +typedef enum VkSemaphoreType { + VK_SEMAPHORE_TYPE_BINARY = 0, + VK_SEMAPHORE_TYPE_TIMELINE = 1, + VK_SEMAPHORE_TYPE_BINARY_KHR = VK_SEMAPHORE_TYPE_BINARY, + VK_SEMAPHORE_TYPE_TIMELINE_KHR = VK_SEMAPHORE_TYPE_TIMELINE, + VK_SEMAPHORE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkSemaphoreType; + +typedef enum VkResolveModeFlagBits { + VK_RESOLVE_MODE_NONE = 0, + VK_RESOLVE_MODE_SAMPLE_ZERO_BIT = 0x00000001, + VK_RESOLVE_MODE_AVERAGE_BIT = 0x00000002, + VK_RESOLVE_MODE_MIN_BIT = 0x00000004, + VK_RESOLVE_MODE_MAX_BIT = 0x00000008, + VK_RESOLVE_MODE_NONE_KHR = VK_RESOLVE_MODE_NONE, + VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT, + VK_RESOLVE_MODE_AVERAGE_BIT_KHR = VK_RESOLVE_MODE_AVERAGE_BIT, + VK_RESOLVE_MODE_MIN_BIT_KHR = VK_RESOLVE_MODE_MIN_BIT, + VK_RESOLVE_MODE_MAX_BIT_KHR = VK_RESOLVE_MODE_MAX_BIT, + VK_RESOLVE_MODE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkResolveModeFlagBits; +typedef VkFlags VkResolveModeFlags; + +typedef enum VkDescriptorBindingFlagBits { + VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT = 0x00000001, + VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT = 0x00000002, + VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT = 0x00000004, + VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT = 0x00000008, + VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT, + VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT = VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT, + VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT, + VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT = VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT, + VK_DESCRIPTOR_BINDING_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkDescriptorBindingFlagBits; +typedef VkFlags VkDescriptorBindingFlags; + +typedef enum VkSemaphoreWaitFlagBits { + VK_SEMAPHORE_WAIT_ANY_BIT = 0x00000001, + VK_SEMAPHORE_WAIT_ANY_BIT_KHR = VK_SEMAPHORE_WAIT_ANY_BIT, + VK_SEMAPHORE_WAIT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSemaphoreWaitFlagBits; +typedef VkFlags VkSemaphoreWaitFlags; +typedef struct VkPhysicalDeviceVulkan11Features { + VkStructureType sType; + void* pNext; + VkBool32 storageBuffer16BitAccess; + VkBool32 uniformAndStorageBuffer16BitAccess; + VkBool32 storagePushConstant16; + VkBool32 storageInputOutput16; + VkBool32 multiview; + VkBool32 multiviewGeometryShader; + VkBool32 multiviewTessellationShader; + VkBool32 variablePointersStorageBuffer; + VkBool32 variablePointers; + VkBool32 protectedMemory; + VkBool32 samplerYcbcrConversion; + VkBool32 shaderDrawParameters; +} VkPhysicalDeviceVulkan11Features; + +typedef struct VkPhysicalDeviceVulkan11Properties { + VkStructureType sType; + void* pNext; + uint8_t deviceUUID[VK_UUID_SIZE]; + uint8_t driverUUID[VK_UUID_SIZE]; + uint8_t deviceLUID[VK_LUID_SIZE]; + uint32_t deviceNodeMask; + VkBool32 deviceLUIDValid; + uint32_t subgroupSize; + VkShaderStageFlags subgroupSupportedStages; + VkSubgroupFeatureFlags subgroupSupportedOperations; + VkBool32 subgroupQuadOperationsInAllStages; + VkPointClippingBehavior pointClippingBehavior; + uint32_t maxMultiviewViewCount; + uint32_t maxMultiviewInstanceIndex; + VkBool32 protectedNoFault; + uint32_t maxPerSetDescriptors; + VkDeviceSize maxMemoryAllocationSize; +} VkPhysicalDeviceVulkan11Properties; + +typedef struct VkPhysicalDeviceVulkan12Features { + VkStructureType sType; + void* pNext; + VkBool32 samplerMirrorClampToEdge; + VkBool32 drawIndirectCount; + VkBool32 storageBuffer8BitAccess; + VkBool32 uniformAndStorageBuffer8BitAccess; + VkBool32 storagePushConstant8; + VkBool32 shaderBufferInt64Atomics; + VkBool32 shaderSharedInt64Atomics; + VkBool32 shaderFloat16; + VkBool32 shaderInt8; + VkBool32 descriptorIndexing; + VkBool32 shaderInputAttachmentArrayDynamicIndexing; + VkBool32 shaderUniformTexelBufferArrayDynamicIndexing; + VkBool32 shaderStorageTexelBufferArrayDynamicIndexing; + VkBool32 shaderUniformBufferArrayNonUniformIndexing; + VkBool32 shaderSampledImageArrayNonUniformIndexing; + VkBool32 shaderStorageBufferArrayNonUniformIndexing; + VkBool32 shaderStorageImageArrayNonUniformIndexing; + VkBool32 shaderInputAttachmentArrayNonUniformIndexing; + VkBool32 shaderUniformTexelBufferArrayNonUniformIndexing; + VkBool32 shaderStorageTexelBufferArrayNonUniformIndexing; + VkBool32 descriptorBindingUniformBufferUpdateAfterBind; + VkBool32 descriptorBindingSampledImageUpdateAfterBind; + VkBool32 descriptorBindingStorageImageUpdateAfterBind; + VkBool32 descriptorBindingStorageBufferUpdateAfterBind; + VkBool32 descriptorBindingUniformTexelBufferUpdateAfterBind; + VkBool32 descriptorBindingStorageTexelBufferUpdateAfterBind; + VkBool32 descriptorBindingUpdateUnusedWhilePending; + VkBool32 descriptorBindingPartiallyBound; + VkBool32 descriptorBindingVariableDescriptorCount; + VkBool32 runtimeDescriptorArray; + VkBool32 samplerFilterMinmax; + VkBool32 scalarBlockLayout; + VkBool32 imagelessFramebuffer; + VkBool32 uniformBufferStandardLayout; + VkBool32 shaderSubgroupExtendedTypes; + VkBool32 separateDepthStencilLayouts; + VkBool32 hostQueryReset; + VkBool32 timelineSemaphore; + VkBool32 bufferDeviceAddress; + VkBool32 bufferDeviceAddressCaptureReplay; + VkBool32 bufferDeviceAddressMultiDevice; + VkBool32 vulkanMemoryModel; + VkBool32 vulkanMemoryModelDeviceScope; + VkBool32 vulkanMemoryModelAvailabilityVisibilityChains; + VkBool32 shaderOutputViewportIndex; + VkBool32 shaderOutputLayer; + VkBool32 subgroupBroadcastDynamicId; +} VkPhysicalDeviceVulkan12Features; + +typedef struct VkConformanceVersion { + uint8_t major; + uint8_t minor; + uint8_t subminor; + uint8_t patch; +} VkConformanceVersion; + +typedef struct VkPhysicalDeviceVulkan12Properties { + VkStructureType sType; + void* pNext; + VkDriverId driverID; + char driverName[VK_MAX_DRIVER_NAME_SIZE]; + char driverInfo[VK_MAX_DRIVER_INFO_SIZE]; + VkConformanceVersion conformanceVersion; + VkShaderFloatControlsIndependence denormBehaviorIndependence; + VkShaderFloatControlsIndependence roundingModeIndependence; + VkBool32 shaderSignedZeroInfNanPreserveFloat16; + VkBool32 shaderSignedZeroInfNanPreserveFloat32; + VkBool32 shaderSignedZeroInfNanPreserveFloat64; + VkBool32 shaderDenormPreserveFloat16; + VkBool32 shaderDenormPreserveFloat32; + VkBool32 shaderDenormPreserveFloat64; + VkBool32 shaderDenormFlushToZeroFloat16; + VkBool32 shaderDenormFlushToZeroFloat32; + VkBool32 shaderDenormFlushToZeroFloat64; + VkBool32 shaderRoundingModeRTEFloat16; + VkBool32 shaderRoundingModeRTEFloat32; + VkBool32 shaderRoundingModeRTEFloat64; + VkBool32 shaderRoundingModeRTZFloat16; + VkBool32 shaderRoundingModeRTZFloat32; + VkBool32 shaderRoundingModeRTZFloat64; + uint32_t maxUpdateAfterBindDescriptorsInAllPools; + VkBool32 shaderUniformBufferArrayNonUniformIndexingNative; + VkBool32 shaderSampledImageArrayNonUniformIndexingNative; + VkBool32 shaderStorageBufferArrayNonUniformIndexingNative; + VkBool32 shaderStorageImageArrayNonUniformIndexingNative; + VkBool32 shaderInputAttachmentArrayNonUniformIndexingNative; + VkBool32 robustBufferAccessUpdateAfterBind; + VkBool32 quadDivergentImplicitLod; + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers; + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers; + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages; + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments; + uint32_t maxPerStageUpdateAfterBindResources; + uint32_t maxDescriptorSetUpdateAfterBindSamplers; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindSampledImages; + uint32_t maxDescriptorSetUpdateAfterBindStorageImages; + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments; + VkResolveModeFlags supportedDepthResolveModes; + VkResolveModeFlags supportedStencilResolveModes; + VkBool32 independentResolveNone; + VkBool32 independentResolve; + VkBool32 filterMinmaxSingleComponentFormats; + VkBool32 filterMinmaxImageComponentMapping; + uint64_t maxTimelineSemaphoreValueDifference; + VkSampleCountFlags framebufferIntegerColorSampleCounts; +} VkPhysicalDeviceVulkan12Properties; + +typedef struct VkImageFormatListCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t viewFormatCount; + const VkFormat* pViewFormats; +} VkImageFormatListCreateInfo; + +typedef struct VkAttachmentDescription2 { + VkStructureType sType; + const void* pNext; + VkAttachmentDescriptionFlags flags; + VkFormat format; + VkSampleCountFlagBits samples; + VkAttachmentLoadOp loadOp; + VkAttachmentStoreOp storeOp; + VkAttachmentLoadOp stencilLoadOp; + VkAttachmentStoreOp stencilStoreOp; + VkImageLayout initialLayout; + VkImageLayout finalLayout; +} VkAttachmentDescription2; + +typedef struct VkAttachmentReference2 { + VkStructureType sType; + const void* pNext; + uint32_t attachment; + VkImageLayout layout; + VkImageAspectFlags aspectMask; +} VkAttachmentReference2; + +typedef struct VkSubpassDescription2 { + VkStructureType sType; + const void* pNext; + VkSubpassDescriptionFlags flags; + VkPipelineBindPoint pipelineBindPoint; + uint32_t viewMask; + uint32_t inputAttachmentCount; + const VkAttachmentReference2* pInputAttachments; + uint32_t colorAttachmentCount; + const VkAttachmentReference2* pColorAttachments; + const VkAttachmentReference2* pResolveAttachments; + const VkAttachmentReference2* pDepthStencilAttachment; + uint32_t preserveAttachmentCount; + const uint32_t* pPreserveAttachments; +} VkSubpassDescription2; + +typedef struct VkSubpassDependency2 { + VkStructureType sType; + const void* pNext; + uint32_t srcSubpass; + uint32_t dstSubpass; + VkPipelineStageFlags srcStageMask; + VkPipelineStageFlags dstStageMask; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; + VkDependencyFlags dependencyFlags; + int32_t viewOffset; +} VkSubpassDependency2; + +typedef struct VkRenderPassCreateInfo2 { + VkStructureType sType; + const void* pNext; + VkRenderPassCreateFlags flags; + uint32_t attachmentCount; + const VkAttachmentDescription2* pAttachments; + uint32_t subpassCount; + const VkSubpassDescription2* pSubpasses; + uint32_t dependencyCount; + const VkSubpassDependency2* pDependencies; + uint32_t correlatedViewMaskCount; + const uint32_t* pCorrelatedViewMasks; +} VkRenderPassCreateInfo2; + +typedef struct VkSubpassBeginInfo { + VkStructureType sType; + const void* pNext; + VkSubpassContents contents; +} VkSubpassBeginInfo; + +typedef struct VkSubpassEndInfo { + VkStructureType sType; + const void* pNext; +} VkSubpassEndInfo; + +typedef struct VkPhysicalDevice8BitStorageFeatures { + VkStructureType sType; + void* pNext; + VkBool32 storageBuffer8BitAccess; + VkBool32 uniformAndStorageBuffer8BitAccess; + VkBool32 storagePushConstant8; +} VkPhysicalDevice8BitStorageFeatures; + +typedef struct VkPhysicalDeviceDriverProperties { + VkStructureType sType; + void* pNext; + VkDriverId driverID; + char driverName[VK_MAX_DRIVER_NAME_SIZE]; + char driverInfo[VK_MAX_DRIVER_INFO_SIZE]; + VkConformanceVersion conformanceVersion; +} VkPhysicalDeviceDriverProperties; + +typedef struct VkPhysicalDeviceShaderAtomicInt64Features { + VkStructureType sType; + void* pNext; + VkBool32 shaderBufferInt64Atomics; + VkBool32 shaderSharedInt64Atomics; +} VkPhysicalDeviceShaderAtomicInt64Features; + +typedef struct VkPhysicalDeviceShaderFloat16Int8Features { + VkStructureType sType; + void* pNext; + VkBool32 shaderFloat16; + VkBool32 shaderInt8; +} VkPhysicalDeviceShaderFloat16Int8Features; + +typedef struct VkPhysicalDeviceFloatControlsProperties { + VkStructureType sType; + void* pNext; + VkShaderFloatControlsIndependence denormBehaviorIndependence; + VkShaderFloatControlsIndependence roundingModeIndependence; + VkBool32 shaderSignedZeroInfNanPreserveFloat16; + VkBool32 shaderSignedZeroInfNanPreserveFloat32; + VkBool32 shaderSignedZeroInfNanPreserveFloat64; + VkBool32 shaderDenormPreserveFloat16; + VkBool32 shaderDenormPreserveFloat32; + VkBool32 shaderDenormPreserveFloat64; + VkBool32 shaderDenormFlushToZeroFloat16; + VkBool32 shaderDenormFlushToZeroFloat32; + VkBool32 shaderDenormFlushToZeroFloat64; + VkBool32 shaderRoundingModeRTEFloat16; + VkBool32 shaderRoundingModeRTEFloat32; + VkBool32 shaderRoundingModeRTEFloat64; + VkBool32 shaderRoundingModeRTZFloat16; + VkBool32 shaderRoundingModeRTZFloat32; + VkBool32 shaderRoundingModeRTZFloat64; +} VkPhysicalDeviceFloatControlsProperties; + +typedef struct VkDescriptorSetLayoutBindingFlagsCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t bindingCount; + const VkDescriptorBindingFlags* pBindingFlags; +} VkDescriptorSetLayoutBindingFlagsCreateInfo; + +typedef struct VkPhysicalDeviceDescriptorIndexingFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderInputAttachmentArrayDynamicIndexing; + VkBool32 shaderUniformTexelBufferArrayDynamicIndexing; + VkBool32 shaderStorageTexelBufferArrayDynamicIndexing; + VkBool32 shaderUniformBufferArrayNonUniformIndexing; + VkBool32 shaderSampledImageArrayNonUniformIndexing; + VkBool32 shaderStorageBufferArrayNonUniformIndexing; + VkBool32 shaderStorageImageArrayNonUniformIndexing; + VkBool32 shaderInputAttachmentArrayNonUniformIndexing; + VkBool32 shaderUniformTexelBufferArrayNonUniformIndexing; + VkBool32 shaderStorageTexelBufferArrayNonUniformIndexing; + VkBool32 descriptorBindingUniformBufferUpdateAfterBind; + VkBool32 descriptorBindingSampledImageUpdateAfterBind; + VkBool32 descriptorBindingStorageImageUpdateAfterBind; + VkBool32 descriptorBindingStorageBufferUpdateAfterBind; + VkBool32 descriptorBindingUniformTexelBufferUpdateAfterBind; + VkBool32 descriptorBindingStorageTexelBufferUpdateAfterBind; + VkBool32 descriptorBindingUpdateUnusedWhilePending; + VkBool32 descriptorBindingPartiallyBound; + VkBool32 descriptorBindingVariableDescriptorCount; + VkBool32 runtimeDescriptorArray; +} VkPhysicalDeviceDescriptorIndexingFeatures; + +typedef struct VkPhysicalDeviceDescriptorIndexingProperties { + VkStructureType sType; + void* pNext; + uint32_t maxUpdateAfterBindDescriptorsInAllPools; + VkBool32 shaderUniformBufferArrayNonUniformIndexingNative; + VkBool32 shaderSampledImageArrayNonUniformIndexingNative; + VkBool32 shaderStorageBufferArrayNonUniformIndexingNative; + VkBool32 shaderStorageImageArrayNonUniformIndexingNative; + VkBool32 shaderInputAttachmentArrayNonUniformIndexingNative; + VkBool32 robustBufferAccessUpdateAfterBind; + VkBool32 quadDivergentImplicitLod; + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers; + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers; + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages; + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments; + uint32_t maxPerStageUpdateAfterBindResources; + uint32_t maxDescriptorSetUpdateAfterBindSamplers; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindSampledImages; + uint32_t maxDescriptorSetUpdateAfterBindStorageImages; + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments; +} VkPhysicalDeviceDescriptorIndexingProperties; + +typedef struct VkDescriptorSetVariableDescriptorCountAllocateInfo { + VkStructureType sType; + const void* pNext; + uint32_t descriptorSetCount; + const uint32_t* pDescriptorCounts; +} VkDescriptorSetVariableDescriptorCountAllocateInfo; + +typedef struct VkDescriptorSetVariableDescriptorCountLayoutSupport { + VkStructureType sType; + void* pNext; + uint32_t maxVariableDescriptorCount; +} VkDescriptorSetVariableDescriptorCountLayoutSupport; + +typedef struct VkSubpassDescriptionDepthStencilResolve { + VkStructureType sType; + const void* pNext; + VkResolveModeFlagBits depthResolveMode; + VkResolveModeFlagBits stencilResolveMode; + const VkAttachmentReference2* pDepthStencilResolveAttachment; +} VkSubpassDescriptionDepthStencilResolve; + +typedef struct VkPhysicalDeviceDepthStencilResolveProperties { + VkStructureType sType; + void* pNext; + VkResolveModeFlags supportedDepthResolveModes; + VkResolveModeFlags supportedStencilResolveModes; + VkBool32 independentResolveNone; + VkBool32 independentResolve; +} VkPhysicalDeviceDepthStencilResolveProperties; + +typedef struct VkPhysicalDeviceScalarBlockLayoutFeatures { + VkStructureType sType; + void* pNext; + VkBool32 scalarBlockLayout; +} VkPhysicalDeviceScalarBlockLayoutFeatures; + +typedef struct VkImageStencilUsageCreateInfo { + VkStructureType sType; + const void* pNext; + VkImageUsageFlags stencilUsage; +} VkImageStencilUsageCreateInfo; + +typedef struct VkSamplerReductionModeCreateInfo { + VkStructureType sType; + const void* pNext; + VkSamplerReductionMode reductionMode; +} VkSamplerReductionModeCreateInfo; + +typedef struct VkPhysicalDeviceSamplerFilterMinmaxProperties { + VkStructureType sType; + void* pNext; + VkBool32 filterMinmaxSingleComponentFormats; + VkBool32 filterMinmaxImageComponentMapping; +} VkPhysicalDeviceSamplerFilterMinmaxProperties; + +typedef struct VkPhysicalDeviceVulkanMemoryModelFeatures { + VkStructureType sType; + void* pNext; + VkBool32 vulkanMemoryModel; + VkBool32 vulkanMemoryModelDeviceScope; + VkBool32 vulkanMemoryModelAvailabilityVisibilityChains; +} VkPhysicalDeviceVulkanMemoryModelFeatures; + +typedef struct VkPhysicalDeviceImagelessFramebufferFeatures { + VkStructureType sType; + void* pNext; + VkBool32 imagelessFramebuffer; +} VkPhysicalDeviceImagelessFramebufferFeatures; + +typedef struct VkFramebufferAttachmentImageInfo { + VkStructureType sType; + const void* pNext; + VkImageCreateFlags flags; + VkImageUsageFlags usage; + uint32_t width; + uint32_t height; + uint32_t layerCount; + uint32_t viewFormatCount; + const VkFormat* pViewFormats; +} VkFramebufferAttachmentImageInfo; + +typedef struct VkFramebufferAttachmentsCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t attachmentImageInfoCount; + const VkFramebufferAttachmentImageInfo* pAttachmentImageInfos; +} VkFramebufferAttachmentsCreateInfo; + +typedef struct VkRenderPassAttachmentBeginInfo { + VkStructureType sType; + const void* pNext; + uint32_t attachmentCount; + const VkImageView* pAttachments; +} VkRenderPassAttachmentBeginInfo; + +typedef struct VkPhysicalDeviceUniformBufferStandardLayoutFeatures { + VkStructureType sType; + void* pNext; + VkBool32 uniformBufferStandardLayout; +} VkPhysicalDeviceUniformBufferStandardLayoutFeatures; + +typedef struct VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderSubgroupExtendedTypes; +} VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures; + +typedef struct VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures { + VkStructureType sType; + void* pNext; + VkBool32 separateDepthStencilLayouts; +} VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures; + +typedef struct VkAttachmentReferenceStencilLayout { + VkStructureType sType; + void* pNext; + VkImageLayout stencilLayout; +} VkAttachmentReferenceStencilLayout; + +typedef struct VkAttachmentDescriptionStencilLayout { + VkStructureType sType; + void* pNext; + VkImageLayout stencilInitialLayout; + VkImageLayout stencilFinalLayout; +} VkAttachmentDescriptionStencilLayout; + +typedef struct VkPhysicalDeviceHostQueryResetFeatures { + VkStructureType sType; + void* pNext; + VkBool32 hostQueryReset; +} VkPhysicalDeviceHostQueryResetFeatures; + +typedef struct VkPhysicalDeviceTimelineSemaphoreFeatures { + VkStructureType sType; + void* pNext; + VkBool32 timelineSemaphore; +} VkPhysicalDeviceTimelineSemaphoreFeatures; + +typedef struct VkPhysicalDeviceTimelineSemaphoreProperties { + VkStructureType sType; + void* pNext; + uint64_t maxTimelineSemaphoreValueDifference; +} VkPhysicalDeviceTimelineSemaphoreProperties; + +typedef struct VkSemaphoreTypeCreateInfo { + VkStructureType sType; + const void* pNext; + VkSemaphoreType semaphoreType; + uint64_t initialValue; +} VkSemaphoreTypeCreateInfo; + +typedef struct VkTimelineSemaphoreSubmitInfo { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreValueCount; + const uint64_t* pWaitSemaphoreValues; + uint32_t signalSemaphoreValueCount; + const uint64_t* pSignalSemaphoreValues; +} VkTimelineSemaphoreSubmitInfo; + +typedef struct VkSemaphoreWaitInfo { + VkStructureType sType; + const void* pNext; + VkSemaphoreWaitFlags flags; + uint32_t semaphoreCount; + const VkSemaphore* pSemaphores; + const uint64_t* pValues; +} VkSemaphoreWaitInfo; + +typedef struct VkSemaphoreSignalInfo { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + uint64_t value; +} VkSemaphoreSignalInfo; + +typedef struct VkPhysicalDeviceBufferDeviceAddressFeatures { + VkStructureType sType; + void* pNext; + VkBool32 bufferDeviceAddress; + VkBool32 bufferDeviceAddressCaptureReplay; + VkBool32 bufferDeviceAddressMultiDevice; +} VkPhysicalDeviceBufferDeviceAddressFeatures; + +typedef struct VkBufferDeviceAddressInfo { + VkStructureType sType; + const void* pNext; + VkBuffer buffer; +} VkBufferDeviceAddressInfo; + +typedef struct VkBufferOpaqueCaptureAddressCreateInfo { + VkStructureType sType; + const void* pNext; + uint64_t opaqueCaptureAddress; +} VkBufferOpaqueCaptureAddressCreateInfo; + +typedef struct VkMemoryOpaqueCaptureAddressAllocateInfo { + VkStructureType sType; + const void* pNext; + uint64_t opaqueCaptureAddress; +} VkMemoryOpaqueCaptureAddressAllocateInfo; + +typedef struct VkDeviceMemoryOpaqueCaptureAddressInfo { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; +} VkDeviceMemoryOpaqueCaptureAddressInfo; + +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCount)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCount)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); +typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass2)(VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass); +typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderPass2)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfo* pSubpassBeginInfo); +typedef void (VKAPI_PTR *PFN_vkCmdNextSubpass2)(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo* pSubpassBeginInfo, const VkSubpassEndInfo* pSubpassEndInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndRenderPass2)(VkCommandBuffer commandBuffer, const VkSubpassEndInfo* pSubpassEndInfo); +typedef void (VKAPI_PTR *PFN_vkResetQueryPool)(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount); +typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreCounterValue)(VkDevice device, VkSemaphore semaphore, uint64_t* pValue); +typedef VkResult (VKAPI_PTR *PFN_vkWaitSemaphores)(VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout); +typedef VkResult (VKAPI_PTR *PFN_vkSignalSemaphore)(VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo); +typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetBufferDeviceAddress)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo); +typedef uint64_t (VKAPI_PTR *PFN_vkGetBufferOpaqueCaptureAddress)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo); +typedef uint64_t (VKAPI_PTR *PFN_vkGetDeviceMemoryOpaqueCaptureAddress)(VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCount( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCount( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass2( + VkDevice device, + const VkRenderPassCreateInfo2* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkRenderPass* pRenderPass); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass2( + VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo* pRenderPassBegin, + const VkSubpassBeginInfo* pSubpassBeginInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass2( + VkCommandBuffer commandBuffer, + const VkSubpassBeginInfo* pSubpassBeginInfo, + const VkSubpassEndInfo* pSubpassEndInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass2( + VkCommandBuffer commandBuffer, + const VkSubpassEndInfo* pSubpassEndInfo); + +VKAPI_ATTR void VKAPI_CALL vkResetQueryPool( + VkDevice device, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreCounterValue( + VkDevice device, + VkSemaphore semaphore, + uint64_t* pValue); + +VKAPI_ATTR VkResult VKAPI_CALL vkWaitSemaphores( + VkDevice device, + const VkSemaphoreWaitInfo* pWaitInfo, + uint64_t timeout); + +VKAPI_ATTR VkResult VKAPI_CALL vkSignalSemaphore( + VkDevice device, + const VkSemaphoreSignalInfo* pSignalInfo); + +VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetBufferDeviceAddress( + VkDevice device, + const VkBufferDeviceAddressInfo* pInfo); + +VKAPI_ATTR uint64_t VKAPI_CALL vkGetBufferOpaqueCaptureAddress( + VkDevice device, + const VkBufferDeviceAddressInfo* pInfo); + +VKAPI_ATTR uint64_t VKAPI_CALL vkGetDeviceMemoryOpaqueCaptureAddress( + VkDevice device, + const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo); +#endif + + +#define VK_KHR_surface 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSurfaceKHR) +#define VK_KHR_SURFACE_SPEC_VERSION 25 +#define VK_KHR_SURFACE_EXTENSION_NAME "VK_KHR_surface" + +typedef enum VkPresentModeKHR { + VK_PRESENT_MODE_IMMEDIATE_KHR = 0, + VK_PRESENT_MODE_MAILBOX_KHR = 1, + VK_PRESENT_MODE_FIFO_KHR = 2, + VK_PRESENT_MODE_FIFO_RELAXED_KHR = 3, + VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR = 1000111000, + VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR = 1000111001, + VK_PRESENT_MODE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPresentModeKHR; + +typedef enum VkColorSpaceKHR { + VK_COLOR_SPACE_SRGB_NONLINEAR_KHR = 0, + VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT = 1000104001, + VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT = 1000104002, + VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT = 1000104003, + VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT = 1000104004, + VK_COLOR_SPACE_BT709_LINEAR_EXT = 1000104005, + VK_COLOR_SPACE_BT709_NONLINEAR_EXT = 1000104006, + VK_COLOR_SPACE_BT2020_LINEAR_EXT = 1000104007, + VK_COLOR_SPACE_HDR10_ST2084_EXT = 1000104008, + VK_COLOR_SPACE_DOLBYVISION_EXT = 1000104009, + VK_COLOR_SPACE_HDR10_HLG_EXT = 1000104010, + VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT = 1000104011, + VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT = 1000104012, + VK_COLOR_SPACE_PASS_THROUGH_EXT = 1000104013, + VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT = 1000104014, + VK_COLOR_SPACE_DISPLAY_NATIVE_AMD = 1000213000, + VK_COLORSPACE_SRGB_NONLINEAR_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, + VK_COLOR_SPACE_DCI_P3_LINEAR_EXT = VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT, + VK_COLOR_SPACE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkColorSpaceKHR; + +typedef enum VkSurfaceTransformFlagBitsKHR { + VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR = 0x00000001, + VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR = 0x00000002, + VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR = 0x00000004, + VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR = 0x00000008, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR = 0x00000010, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR = 0x00000020, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR = 0x00000040, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR = 0x00000080, + VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR = 0x00000100, + VK_SURFACE_TRANSFORM_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkSurfaceTransformFlagBitsKHR; + +typedef enum VkCompositeAlphaFlagBitsKHR { + VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR = 0x00000001, + VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR = 0x00000002, + VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR = 0x00000004, + VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR = 0x00000008, + VK_COMPOSITE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkCompositeAlphaFlagBitsKHR; +typedef VkFlags VkCompositeAlphaFlagsKHR; +typedef VkFlags VkSurfaceTransformFlagsKHR; +typedef struct VkSurfaceCapabilitiesKHR { + uint32_t minImageCount; + uint32_t maxImageCount; + VkExtent2D currentExtent; + VkExtent2D minImageExtent; + VkExtent2D maxImageExtent; + uint32_t maxImageArrayLayers; + VkSurfaceTransformFlagsKHR supportedTransforms; + VkSurfaceTransformFlagBitsKHR currentTransform; + VkCompositeAlphaFlagsKHR supportedCompositeAlpha; + VkImageUsageFlags supportedUsageFlags; +} VkSurfaceCapabilitiesKHR; + +typedef struct VkSurfaceFormatKHR { + VkFormat format; + VkColorSpaceKHR colorSpace; +} VkSurfaceFormatKHR; + +typedef void (VKAPI_PTR *PFN_vkDestroySurfaceKHR)(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormatsKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfacePresentModesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR( + VkInstance instance, + VkSurfaceKHR surface, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + VkSurfaceKHR surface, + VkBool32* pSupported); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilitiesKHR* pSurfaceCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormatsKHR( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t* pSurfaceFormatCount, + VkSurfaceFormatKHR* pSurfaceFormats); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t* pPresentModeCount, + VkPresentModeKHR* pPresentModes); +#endif + + +#define VK_KHR_swapchain 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSwapchainKHR) +#define VK_KHR_SWAPCHAIN_SPEC_VERSION 70 +#define VK_KHR_SWAPCHAIN_EXTENSION_NAME "VK_KHR_swapchain" + +typedef enum VkSwapchainCreateFlagBitsKHR { + VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR = 0x00000001, + VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR = 0x00000002, + VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR = 0x00000004, + VK_SWAPCHAIN_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkSwapchainCreateFlagBitsKHR; +typedef VkFlags VkSwapchainCreateFlagsKHR; + +typedef enum VkDeviceGroupPresentModeFlagBitsKHR { + VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR = 0x00000001, + VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR = 0x00000002, + VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR = 0x00000004, + VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR = 0x00000008, + VK_DEVICE_GROUP_PRESENT_MODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkDeviceGroupPresentModeFlagBitsKHR; +typedef VkFlags VkDeviceGroupPresentModeFlagsKHR; +typedef struct VkSwapchainCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainCreateFlagsKHR flags; + VkSurfaceKHR surface; + uint32_t minImageCount; + VkFormat imageFormat; + VkColorSpaceKHR imageColorSpace; + VkExtent2D imageExtent; + uint32_t imageArrayLayers; + VkImageUsageFlags imageUsage; + VkSharingMode imageSharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; + VkSurfaceTransformFlagBitsKHR preTransform; + VkCompositeAlphaFlagBitsKHR compositeAlpha; + VkPresentModeKHR presentMode; + VkBool32 clipped; + VkSwapchainKHR oldSwapchain; +} VkSwapchainCreateInfoKHR; + +typedef struct VkPresentInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreCount; + const VkSemaphore* pWaitSemaphores; + uint32_t swapchainCount; + const VkSwapchainKHR* pSwapchains; + const uint32_t* pImageIndices; + VkResult* pResults; +} VkPresentInfoKHR; + +typedef struct VkImageSwapchainCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainKHR swapchain; +} VkImageSwapchainCreateInfoKHR; + +typedef struct VkBindImageMemorySwapchainInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainKHR swapchain; + uint32_t imageIndex; +} VkBindImageMemorySwapchainInfoKHR; + +typedef struct VkAcquireNextImageInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainKHR swapchain; + uint64_t timeout; + VkSemaphore semaphore; + VkFence fence; + uint32_t deviceMask; +} VkAcquireNextImageInfoKHR; + +typedef struct VkDeviceGroupPresentCapabilitiesKHR { + VkStructureType sType; + const void* pNext; + uint32_t presentMask[VK_MAX_DEVICE_GROUP_SIZE]; + VkDeviceGroupPresentModeFlagsKHR modes; +} VkDeviceGroupPresentCapabilitiesKHR; + +typedef struct VkDeviceGroupPresentInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const uint32_t* pDeviceMasks; + VkDeviceGroupPresentModeFlagBitsKHR mode; +} VkDeviceGroupPresentInfoKHR; + +typedef struct VkDeviceGroupSwapchainCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceGroupPresentModeFlagsKHR modes; +} VkDeviceGroupSwapchainCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateSwapchainKHR)(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain); +typedef void (VKAPI_PTR *PFN_vkDestroySwapchainKHR)(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainImagesKHR)(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages); +typedef VkResult (VKAPI_PTR *PFN_vkAcquireNextImageKHR)(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex); +typedef VkResult (VKAPI_PTR *PFN_vkQueuePresentKHR)(VkQueue queue, const VkPresentInfoKHR* pPresentInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupPresentCapabilitiesKHR)(VkDevice device, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupSurfacePresentModesKHR)(VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDevicePresentRectanglesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects); +typedef VkResult (VKAPI_PTR *PFN_vkAcquireNextImage2KHR)(VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSwapchainKHR( + VkDevice device, + const VkSwapchainCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSwapchainKHR* pSwapchain); + +VKAPI_ATTR void VKAPI_CALL vkDestroySwapchainKHR( + VkDevice device, + VkSwapchainKHR swapchain, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainImagesKHR( + VkDevice device, + VkSwapchainKHR swapchain, + uint32_t* pSwapchainImageCount, + VkImage* pSwapchainImages); + +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImageKHR( + VkDevice device, + VkSwapchainKHR swapchain, + uint64_t timeout, + VkSemaphore semaphore, + VkFence fence, + uint32_t* pImageIndex); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueuePresentKHR( + VkQueue queue, + const VkPresentInfoKHR* pPresentInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupPresentCapabilitiesKHR( + VkDevice device, + VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModesKHR( + VkDevice device, + VkSurfaceKHR surface, + VkDeviceGroupPresentModeFlagsKHR* pModes); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDevicePresentRectanglesKHR( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t* pRectCount, + VkRect2D* pRects); + +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImage2KHR( + VkDevice device, + const VkAcquireNextImageInfoKHR* pAcquireInfo, + uint32_t* pImageIndex); +#endif + + +#define VK_KHR_display 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayKHR) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayModeKHR) +#define VK_KHR_DISPLAY_SPEC_VERSION 23 +#define VK_KHR_DISPLAY_EXTENSION_NAME "VK_KHR_display" +typedef VkFlags VkDisplayModeCreateFlagsKHR; + +typedef enum VkDisplayPlaneAlphaFlagBitsKHR { + VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR = 0x00000001, + VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR = 0x00000002, + VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR = 0x00000004, + VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR = 0x00000008, + VK_DISPLAY_PLANE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkDisplayPlaneAlphaFlagBitsKHR; +typedef VkFlags VkDisplayPlaneAlphaFlagsKHR; +typedef VkFlags VkDisplaySurfaceCreateFlagsKHR; +typedef struct VkDisplayModeParametersKHR { + VkExtent2D visibleRegion; + uint32_t refreshRate; +} VkDisplayModeParametersKHR; + +typedef struct VkDisplayModeCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkDisplayModeCreateFlagsKHR flags; + VkDisplayModeParametersKHR parameters; +} VkDisplayModeCreateInfoKHR; + +typedef struct VkDisplayModePropertiesKHR { + VkDisplayModeKHR displayMode; + VkDisplayModeParametersKHR parameters; +} VkDisplayModePropertiesKHR; + +typedef struct VkDisplayPlaneCapabilitiesKHR { + VkDisplayPlaneAlphaFlagsKHR supportedAlpha; + VkOffset2D minSrcPosition; + VkOffset2D maxSrcPosition; + VkExtent2D minSrcExtent; + VkExtent2D maxSrcExtent; + VkOffset2D minDstPosition; + VkOffset2D maxDstPosition; + VkExtent2D minDstExtent; + VkExtent2D maxDstExtent; +} VkDisplayPlaneCapabilitiesKHR; + +typedef struct VkDisplayPlanePropertiesKHR { + VkDisplayKHR currentDisplay; + uint32_t currentStackIndex; +} VkDisplayPlanePropertiesKHR; + +typedef struct VkDisplayPropertiesKHR { + VkDisplayKHR display; + const char* displayName; + VkExtent2D physicalDimensions; + VkExtent2D physicalResolution; + VkSurfaceTransformFlagsKHR supportedTransforms; + VkBool32 planeReorderPossible; + VkBool32 persistentContent; +} VkDisplayPropertiesKHR; + +typedef struct VkDisplaySurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkDisplaySurfaceCreateFlagsKHR flags; + VkDisplayModeKHR displayMode; + uint32_t planeIndex; + uint32_t planeStackIndex; + VkSurfaceTransformFlagBitsKHR transform; + float globalAlpha; + VkDisplayPlaneAlphaFlagBitsKHR alphaMode; + VkExtent2D imageExtent; +} VkDisplaySurfaceCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneSupportedDisplaysKHR)(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayModePropertiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDisplayModeKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDisplayPlaneSurfaceKHR)(VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPropertiesKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkDisplayPropertiesKHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkDisplayPlanePropertiesKHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneSupportedDisplaysKHR( + VkPhysicalDevice physicalDevice, + uint32_t planeIndex, + uint32_t* pDisplayCount, + VkDisplayKHR* pDisplays); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModePropertiesKHR( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + uint32_t* pPropertyCount, + VkDisplayModePropertiesKHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayModeKHR( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + const VkDisplayModeCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDisplayModeKHR* pMode); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilitiesKHR( + VkPhysicalDevice physicalDevice, + VkDisplayModeKHR mode, + uint32_t planeIndex, + VkDisplayPlaneCapabilitiesKHR* pCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayPlaneSurfaceKHR( + VkInstance instance, + const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + + +#define VK_KHR_display_swapchain 1 +#define VK_KHR_DISPLAY_SWAPCHAIN_SPEC_VERSION 10 +#define VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME "VK_KHR_display_swapchain" +typedef struct VkDisplayPresentInfoKHR { + VkStructureType sType; + const void* pNext; + VkRect2D srcRect; + VkRect2D dstRect; + VkBool32 persistent; +} VkDisplayPresentInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateSharedSwapchainsKHR)(VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSharedSwapchainsKHR( + VkDevice device, + uint32_t swapchainCount, + const VkSwapchainCreateInfoKHR* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkSwapchainKHR* pSwapchains); +#endif + + +#define VK_KHR_sampler_mirror_clamp_to_edge 1 +#define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_SPEC_VERSION 3 +#define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME "VK_KHR_sampler_mirror_clamp_to_edge" + + +#define VK_KHR_multiview 1 +#define VK_KHR_MULTIVIEW_SPEC_VERSION 1 +#define VK_KHR_MULTIVIEW_EXTENSION_NAME "VK_KHR_multiview" +typedef VkRenderPassMultiviewCreateInfo VkRenderPassMultiviewCreateInfoKHR; + +typedef VkPhysicalDeviceMultiviewFeatures VkPhysicalDeviceMultiviewFeaturesKHR; + +typedef VkPhysicalDeviceMultiviewProperties VkPhysicalDeviceMultiviewPropertiesKHR; + + + +#define VK_KHR_get_physical_device_properties2 1 +#define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION 2 +#define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME "VK_KHR_get_physical_device_properties2" +typedef VkPhysicalDeviceFeatures2 VkPhysicalDeviceFeatures2KHR; + +typedef VkPhysicalDeviceProperties2 VkPhysicalDeviceProperties2KHR; + +typedef VkFormatProperties2 VkFormatProperties2KHR; + +typedef VkImageFormatProperties2 VkImageFormatProperties2KHR; + +typedef VkPhysicalDeviceImageFormatInfo2 VkPhysicalDeviceImageFormatInfo2KHR; + +typedef VkQueueFamilyProperties2 VkQueueFamilyProperties2KHR; + +typedef VkPhysicalDeviceMemoryProperties2 VkPhysicalDeviceMemoryProperties2KHR; + +typedef VkSparseImageFormatProperties2 VkSparseImageFormatProperties2KHR; + +typedef VkPhysicalDeviceSparseImageFormatInfo2 VkPhysicalDeviceSparseImageFormatInfo2KHR; + +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties2KHR)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures2KHR( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures2* pFeatures); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties2KHR( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties2* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties2KHR( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties2* pFormatProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties2KHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, + VkImageFormatProperties2* pImageFormatProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties2KHR( + VkPhysicalDevice physicalDevice, + uint32_t* pQueueFamilyPropertyCount, + VkQueueFamilyProperties2* pQueueFamilyProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties2KHR( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties2* pMemoryProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, + uint32_t* pPropertyCount, + VkSparseImageFormatProperties2* pProperties); +#endif + + +#define VK_KHR_device_group 1 +#define VK_KHR_DEVICE_GROUP_SPEC_VERSION 4 +#define VK_KHR_DEVICE_GROUP_EXTENSION_NAME "VK_KHR_device_group" +typedef VkPeerMemoryFeatureFlags VkPeerMemoryFeatureFlagsKHR; + +typedef VkPeerMemoryFeatureFlagBits VkPeerMemoryFeatureFlagBitsKHR; + +typedef VkMemoryAllocateFlags VkMemoryAllocateFlagsKHR; + +typedef VkMemoryAllocateFlagBits VkMemoryAllocateFlagBitsKHR; + +typedef VkMemoryAllocateFlagsInfo VkMemoryAllocateFlagsInfoKHR; + +typedef VkDeviceGroupRenderPassBeginInfo VkDeviceGroupRenderPassBeginInfoKHR; + +typedef VkDeviceGroupCommandBufferBeginInfo VkDeviceGroupCommandBufferBeginInfoKHR; + +typedef VkDeviceGroupSubmitInfo VkDeviceGroupSubmitInfoKHR; + +typedef VkDeviceGroupBindSparseInfo VkDeviceGroupBindSparseInfoKHR; + +typedef VkBindBufferMemoryDeviceGroupInfo VkBindBufferMemoryDeviceGroupInfoKHR; + +typedef VkBindImageMemoryDeviceGroupInfo VkBindImageMemoryDeviceGroupInfoKHR; + +typedef void (VKAPI_PTR *PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR)(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); +typedef void (VKAPI_PTR *PFN_vkCmdSetDeviceMaskKHR)(VkCommandBuffer commandBuffer, uint32_t deviceMask); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchBaseKHR)(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetDeviceGroupPeerMemoryFeaturesKHR( + VkDevice device, + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDeviceMaskKHR( + VkCommandBuffer commandBuffer, + uint32_t deviceMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBaseKHR( + VkCommandBuffer commandBuffer, + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ); +#endif + + +#define VK_KHR_shader_draw_parameters 1 +#define VK_KHR_SHADER_DRAW_PARAMETERS_SPEC_VERSION 1 +#define VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME "VK_KHR_shader_draw_parameters" + + +#define VK_KHR_maintenance1 1 +#define VK_KHR_MAINTENANCE1_SPEC_VERSION 2 +#define VK_KHR_MAINTENANCE1_EXTENSION_NAME "VK_KHR_maintenance1" +typedef VkCommandPoolTrimFlags VkCommandPoolTrimFlagsKHR; + +typedef void (VKAPI_PTR *PFN_vkTrimCommandPoolKHR)(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkTrimCommandPoolKHR( + VkDevice device, + VkCommandPool commandPool, + VkCommandPoolTrimFlags flags); +#endif + + +#define VK_KHR_device_group_creation 1 +#define VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION 1 +#define VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME "VK_KHR_device_group_creation" +#define VK_MAX_DEVICE_GROUP_SIZE_KHR VK_MAX_DEVICE_GROUP_SIZE +typedef VkPhysicalDeviceGroupProperties VkPhysicalDeviceGroupPropertiesKHR; + +typedef VkDeviceGroupDeviceCreateInfo VkDeviceGroupDeviceCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceGroupsKHR)(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroupsKHR( + VkInstance instance, + uint32_t* pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); +#endif + + +#define VK_KHR_external_memory_capabilities 1 +#define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_memory_capabilities" +#define VK_LUID_SIZE_KHR VK_LUID_SIZE +typedef VkExternalMemoryHandleTypeFlags VkExternalMemoryHandleTypeFlagsKHR; + +typedef VkExternalMemoryHandleTypeFlagBits VkExternalMemoryHandleTypeFlagBitsKHR; + +typedef VkExternalMemoryFeatureFlags VkExternalMemoryFeatureFlagsKHR; + +typedef VkExternalMemoryFeatureFlagBits VkExternalMemoryFeatureFlagBitsKHR; + +typedef VkExternalMemoryProperties VkExternalMemoryPropertiesKHR; + +typedef VkPhysicalDeviceExternalImageFormatInfo VkPhysicalDeviceExternalImageFormatInfoKHR; + +typedef VkExternalImageFormatProperties VkExternalImageFormatPropertiesKHR; + +typedef VkPhysicalDeviceExternalBufferInfo VkPhysicalDeviceExternalBufferInfoKHR; + +typedef VkExternalBufferProperties VkExternalBufferPropertiesKHR; + +typedef VkPhysicalDeviceIDProperties VkPhysicalDeviceIDPropertiesKHR; + +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferPropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, + VkExternalBufferProperties* pExternalBufferProperties); +#endif + + +#define VK_KHR_external_memory 1 +#define VK_KHR_EXTERNAL_MEMORY_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME "VK_KHR_external_memory" +#define VK_QUEUE_FAMILY_EXTERNAL_KHR VK_QUEUE_FAMILY_EXTERNAL +typedef VkExternalMemoryImageCreateInfo VkExternalMemoryImageCreateInfoKHR; + +typedef VkExternalMemoryBufferCreateInfo VkExternalMemoryBufferCreateInfoKHR; + +typedef VkExportMemoryAllocateInfo VkExportMemoryAllocateInfoKHR; + + + +#define VK_KHR_external_memory_fd 1 +#define VK_KHR_EXTERNAL_MEMORY_FD_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME "VK_KHR_external_memory_fd" +typedef struct VkImportMemoryFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; + int fd; +} VkImportMemoryFdInfoKHR; + +typedef struct VkMemoryFdPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; +} VkMemoryFdPropertiesKHR; + +typedef struct VkMemoryGetFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkMemoryGetFdInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryFdKHR)(VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd); +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryFdPropertiesKHR)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdKHR( + VkDevice device, + const VkMemoryGetFdInfoKHR* pGetFdInfo, + int* pFd); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdPropertiesKHR( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + int fd, + VkMemoryFdPropertiesKHR* pMemoryFdProperties); +#endif + + +#define VK_KHR_external_semaphore_capabilities 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_semaphore_capabilities" +typedef VkExternalSemaphoreHandleTypeFlags VkExternalSemaphoreHandleTypeFlagsKHR; + +typedef VkExternalSemaphoreHandleTypeFlagBits VkExternalSemaphoreHandleTypeFlagBitsKHR; + +typedef VkExternalSemaphoreFeatureFlags VkExternalSemaphoreFeatureFlagsKHR; + +typedef VkExternalSemaphoreFeatureFlagBits VkExternalSemaphoreFeatureFlagBitsKHR; + +typedef VkPhysicalDeviceExternalSemaphoreInfo VkPhysicalDeviceExternalSemaphoreInfoKHR; + +typedef VkExternalSemaphoreProperties VkExternalSemaphorePropertiesKHR; + +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, + VkExternalSemaphoreProperties* pExternalSemaphoreProperties); +#endif + + +#define VK_KHR_external_semaphore 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME "VK_KHR_external_semaphore" +typedef VkSemaphoreImportFlags VkSemaphoreImportFlagsKHR; + +typedef VkSemaphoreImportFlagBits VkSemaphoreImportFlagBitsKHR; + +typedef VkExportSemaphoreCreateInfo VkExportSemaphoreCreateInfoKHR; + + + +#define VK_KHR_external_semaphore_fd 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME "VK_KHR_external_semaphore_fd" +typedef struct VkImportSemaphoreFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkSemaphoreImportFlags flags; + VkExternalSemaphoreHandleTypeFlagBits handleType; + int fd; +} VkImportSemaphoreFdInfoKHR; + +typedef struct VkSemaphoreGetFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkExternalSemaphoreHandleTypeFlagBits handleType; +} VkSemaphoreGetFdInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreFdKHR)(VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreFdKHR)(VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreFdKHR( + VkDevice device, + const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreFdKHR( + VkDevice device, + const VkSemaphoreGetFdInfoKHR* pGetFdInfo, + int* pFd); +#endif + + +#define VK_KHR_push_descriptor 1 +#define VK_KHR_PUSH_DESCRIPTOR_SPEC_VERSION 2 +#define VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME "VK_KHR_push_descriptor" +typedef struct VkPhysicalDevicePushDescriptorPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t maxPushDescriptors; +} VkPhysicalDevicePushDescriptorPropertiesKHR; + +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetKHR)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites); +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetWithTemplateKHR)(VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetKHR( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet* pDescriptorWrites); + +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetWithTemplateKHR( + VkCommandBuffer commandBuffer, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + VkPipelineLayout layout, + uint32_t set, + const void* pData); +#endif + + +#define VK_KHR_shader_float16_int8 1 +#define VK_KHR_SHADER_FLOAT16_INT8_SPEC_VERSION 1 +#define VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME "VK_KHR_shader_float16_int8" +typedef VkPhysicalDeviceShaderFloat16Int8Features VkPhysicalDeviceShaderFloat16Int8FeaturesKHR; + +typedef VkPhysicalDeviceShaderFloat16Int8Features VkPhysicalDeviceFloat16Int8FeaturesKHR; + + + +#define VK_KHR_16bit_storage 1 +#define VK_KHR_16BIT_STORAGE_SPEC_VERSION 1 +#define VK_KHR_16BIT_STORAGE_EXTENSION_NAME "VK_KHR_16bit_storage" +typedef VkPhysicalDevice16BitStorageFeatures VkPhysicalDevice16BitStorageFeaturesKHR; + + + +#define VK_KHR_incremental_present 1 +#define VK_KHR_INCREMENTAL_PRESENT_SPEC_VERSION 2 +#define VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME "VK_KHR_incremental_present" +typedef struct VkRectLayerKHR { + VkOffset2D offset; + VkExtent2D extent; + uint32_t layer; +} VkRectLayerKHR; + +typedef struct VkPresentRegionKHR { + uint32_t rectangleCount; + const VkRectLayerKHR* pRectangles; +} VkPresentRegionKHR; + +typedef struct VkPresentRegionsKHR { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const VkPresentRegionKHR* pRegions; +} VkPresentRegionsKHR; + + + +#define VK_KHR_descriptor_update_template 1 +typedef VkDescriptorUpdateTemplate VkDescriptorUpdateTemplateKHR; + +#define VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_SPEC_VERSION 1 +#define VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME "VK_KHR_descriptor_update_template" +typedef VkDescriptorUpdateTemplateType VkDescriptorUpdateTemplateTypeKHR; + +typedef VkDescriptorUpdateTemplateCreateFlags VkDescriptorUpdateTemplateCreateFlagsKHR; + +typedef VkDescriptorUpdateTemplateEntry VkDescriptorUpdateTemplateEntryKHR; + +typedef VkDescriptorUpdateTemplateCreateInfo VkDescriptorUpdateTemplateCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorUpdateTemplateKHR)(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); +typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorUpdateTemplateKHR)(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSetWithTemplateKHR)(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorUpdateTemplateKHR( + VkDevice device, + const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorUpdateTemplateKHR( + VkDevice device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplateKHR( + VkDevice device, + VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const void* pData); +#endif + + +#define VK_KHR_imageless_framebuffer 1 +#define VK_KHR_IMAGELESS_FRAMEBUFFER_SPEC_VERSION 1 +#define VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME "VK_KHR_imageless_framebuffer" +typedef VkPhysicalDeviceImagelessFramebufferFeatures VkPhysicalDeviceImagelessFramebufferFeaturesKHR; + +typedef VkFramebufferAttachmentsCreateInfo VkFramebufferAttachmentsCreateInfoKHR; + +typedef VkFramebufferAttachmentImageInfo VkFramebufferAttachmentImageInfoKHR; + +typedef VkRenderPassAttachmentBeginInfo VkRenderPassAttachmentBeginInfoKHR; + + + +#define VK_KHR_create_renderpass2 1 +#define VK_KHR_CREATE_RENDERPASS_2_SPEC_VERSION 1 +#define VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME "VK_KHR_create_renderpass2" +typedef VkRenderPassCreateInfo2 VkRenderPassCreateInfo2KHR; + +typedef VkAttachmentDescription2 VkAttachmentDescription2KHR; + +typedef VkAttachmentReference2 VkAttachmentReference2KHR; + +typedef VkSubpassDescription2 VkSubpassDescription2KHR; + +typedef VkSubpassDependency2 VkSubpassDependency2KHR; + +typedef VkSubpassBeginInfo VkSubpassBeginInfoKHR; + +typedef VkSubpassEndInfo VkSubpassEndInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass2KHR)(VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass); +typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderPass2KHR)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfo* pSubpassBeginInfo); +typedef void (VKAPI_PTR *PFN_vkCmdNextSubpass2KHR)(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo* pSubpassBeginInfo, const VkSubpassEndInfo* pSubpassEndInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndRenderPass2KHR)(VkCommandBuffer commandBuffer, const VkSubpassEndInfo* pSubpassEndInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass2KHR( + VkDevice device, + const VkRenderPassCreateInfo2* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkRenderPass* pRenderPass); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass2KHR( + VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo* pRenderPassBegin, + const VkSubpassBeginInfo* pSubpassBeginInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass2KHR( + VkCommandBuffer commandBuffer, + const VkSubpassBeginInfo* pSubpassBeginInfo, + const VkSubpassEndInfo* pSubpassEndInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass2KHR( + VkCommandBuffer commandBuffer, + const VkSubpassEndInfo* pSubpassEndInfo); +#endif + + +#define VK_KHR_shared_presentable_image 1 +#define VK_KHR_SHARED_PRESENTABLE_IMAGE_SPEC_VERSION 1 +#define VK_KHR_SHARED_PRESENTABLE_IMAGE_EXTENSION_NAME "VK_KHR_shared_presentable_image" +typedef struct VkSharedPresentSurfaceCapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkImageUsageFlags sharedPresentSupportedUsageFlags; +} VkSharedPresentSurfaceCapabilitiesKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainStatusKHR)(VkDevice device, VkSwapchainKHR swapchain); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainStatusKHR( + VkDevice device, + VkSwapchainKHR swapchain); +#endif + + +#define VK_KHR_external_fence_capabilities 1 +#define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_fence_capabilities" +typedef VkExternalFenceHandleTypeFlags VkExternalFenceHandleTypeFlagsKHR; + +typedef VkExternalFenceHandleTypeFlagBits VkExternalFenceHandleTypeFlagBitsKHR; + +typedef VkExternalFenceFeatureFlags VkExternalFenceFeatureFlagsKHR; + +typedef VkExternalFenceFeatureFlagBits VkExternalFenceFeatureFlagBitsKHR; + +typedef VkPhysicalDeviceExternalFenceInfo VkPhysicalDeviceExternalFenceInfoKHR; + +typedef VkExternalFenceProperties VkExternalFencePropertiesKHR; + +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFencePropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, + VkExternalFenceProperties* pExternalFenceProperties); +#endif + + +#define VK_KHR_external_fence 1 +#define VK_KHR_EXTERNAL_FENCE_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME "VK_KHR_external_fence" +typedef VkFenceImportFlags VkFenceImportFlagsKHR; + +typedef VkFenceImportFlagBits VkFenceImportFlagBitsKHR; + +typedef VkExportFenceCreateInfo VkExportFenceCreateInfoKHR; + + + +#define VK_KHR_external_fence_fd 1 +#define VK_KHR_EXTERNAL_FENCE_FD_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME "VK_KHR_external_fence_fd" +typedef struct VkImportFenceFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkFence fence; + VkFenceImportFlags flags; + VkExternalFenceHandleTypeFlagBits handleType; + int fd; +} VkImportFenceFdInfoKHR; + +typedef struct VkFenceGetFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkFence fence; + VkExternalFenceHandleTypeFlagBits handleType; +} VkFenceGetFdInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkImportFenceFdKHR)(VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetFenceFdKHR)(VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportFenceFdKHR( + VkDevice device, + const VkImportFenceFdInfoKHR* pImportFenceFdInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceFdKHR( + VkDevice device, + const VkFenceGetFdInfoKHR* pGetFdInfo, + int* pFd); +#endif + + +#define VK_KHR_performance_query 1 +#define VK_KHR_PERFORMANCE_QUERY_SPEC_VERSION 1 +#define VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME "VK_KHR_performance_query" + +typedef enum VkPerformanceCounterUnitKHR { + VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR = 0, + VK_PERFORMANCE_COUNTER_UNIT_PERCENTAGE_KHR = 1, + VK_PERFORMANCE_COUNTER_UNIT_NANOSECONDS_KHR = 2, + VK_PERFORMANCE_COUNTER_UNIT_BYTES_KHR = 3, + VK_PERFORMANCE_COUNTER_UNIT_BYTES_PER_SECOND_KHR = 4, + VK_PERFORMANCE_COUNTER_UNIT_KELVIN_KHR = 5, + VK_PERFORMANCE_COUNTER_UNIT_WATTS_KHR = 6, + VK_PERFORMANCE_COUNTER_UNIT_VOLTS_KHR = 7, + VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR = 8, + VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR = 9, + VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR = 10, + VK_PERFORMANCE_COUNTER_UNIT_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPerformanceCounterUnitKHR; + +typedef enum VkPerformanceCounterScopeKHR { + VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR = 0, + VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR = 1, + VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR = 2, + VK_QUERY_SCOPE_COMMAND_BUFFER_KHR = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR, + VK_QUERY_SCOPE_RENDER_PASS_KHR = VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR, + VK_QUERY_SCOPE_COMMAND_KHR = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR, + VK_PERFORMANCE_COUNTER_SCOPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPerformanceCounterScopeKHR; + +typedef enum VkPerformanceCounterStorageKHR { + VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR = 0, + VK_PERFORMANCE_COUNTER_STORAGE_INT64_KHR = 1, + VK_PERFORMANCE_COUNTER_STORAGE_UINT32_KHR = 2, + VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR = 3, + VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR = 4, + VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR = 5, + VK_PERFORMANCE_COUNTER_STORAGE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPerformanceCounterStorageKHR; + +typedef enum VkPerformanceCounterDescriptionFlagBitsKHR { + VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR = 0x00000001, + VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR = 0x00000002, + VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_KHR = VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR, + VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_KHR = VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR, + VK_PERFORMANCE_COUNTER_DESCRIPTION_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPerformanceCounterDescriptionFlagBitsKHR; +typedef VkFlags VkPerformanceCounterDescriptionFlagsKHR; + +typedef enum VkAcquireProfilingLockFlagBitsKHR { + VK_ACQUIRE_PROFILING_LOCK_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAcquireProfilingLockFlagBitsKHR; +typedef VkFlags VkAcquireProfilingLockFlagsKHR; +typedef struct VkPhysicalDevicePerformanceQueryFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 performanceCounterQueryPools; + VkBool32 performanceCounterMultipleQueryPools; +} VkPhysicalDevicePerformanceQueryFeaturesKHR; + +typedef struct VkPhysicalDevicePerformanceQueryPropertiesKHR { + VkStructureType sType; + void* pNext; + VkBool32 allowCommandBufferQueryCopies; +} VkPhysicalDevicePerformanceQueryPropertiesKHR; + +typedef struct VkPerformanceCounterKHR { + VkStructureType sType; + const void* pNext; + VkPerformanceCounterUnitKHR unit; + VkPerformanceCounterScopeKHR scope; + VkPerformanceCounterStorageKHR storage; + uint8_t uuid[VK_UUID_SIZE]; +} VkPerformanceCounterKHR; + +typedef struct VkPerformanceCounterDescriptionKHR { + VkStructureType sType; + const void* pNext; + VkPerformanceCounterDescriptionFlagsKHR flags; + char name[VK_MAX_DESCRIPTION_SIZE]; + char category[VK_MAX_DESCRIPTION_SIZE]; + char description[VK_MAX_DESCRIPTION_SIZE]; +} VkPerformanceCounterDescriptionKHR; + +typedef struct VkQueryPoolPerformanceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t queueFamilyIndex; + uint32_t counterIndexCount; + const uint32_t* pCounterIndices; +} VkQueryPoolPerformanceCreateInfoKHR; + +typedef union VkPerformanceCounterResultKHR { + int32_t int32; + int64_t int64; + uint32_t uint32; + uint64_t uint64; + float float32; + double float64; +} VkPerformanceCounterResultKHR; + +typedef struct VkAcquireProfilingLockInfoKHR { + VkStructureType sType; + const void* pNext; + VkAcquireProfilingLockFlagsKHR flags; + uint64_t timeout; +} VkAcquireProfilingLockInfoKHR; + +typedef struct VkPerformanceQuerySubmitInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t counterPassIndex; +} VkPerformanceQuerySubmitInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t* pCounterCount, VkPerformanceCounterKHR* pCounters, VkPerformanceCounterDescriptionKHR* pCounterDescriptions); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR)(VkPhysicalDevice physicalDevice, const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses); +typedef VkResult (VKAPI_PTR *PFN_vkAcquireProfilingLockKHR)(VkDevice device, const VkAcquireProfilingLockInfoKHR* pInfo); +typedef void (VKAPI_PTR *PFN_vkReleaseProfilingLockKHR)(VkDevice device); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + uint32_t* pCounterCount, + VkPerformanceCounterKHR* pCounters, + VkPerformanceCounterDescriptionKHR* pCounterDescriptions); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + VkPhysicalDevice physicalDevice, + const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, + uint32_t* pNumPasses); + +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireProfilingLockKHR( + VkDevice device, + const VkAcquireProfilingLockInfoKHR* pInfo); + +VKAPI_ATTR void VKAPI_CALL vkReleaseProfilingLockKHR( + VkDevice device); +#endif + + +#define VK_KHR_maintenance2 1 +#define VK_KHR_MAINTENANCE2_SPEC_VERSION 1 +#define VK_KHR_MAINTENANCE2_EXTENSION_NAME "VK_KHR_maintenance2" +typedef VkPointClippingBehavior VkPointClippingBehaviorKHR; + +typedef VkTessellationDomainOrigin VkTessellationDomainOriginKHR; + +typedef VkPhysicalDevicePointClippingProperties VkPhysicalDevicePointClippingPropertiesKHR; + +typedef VkRenderPassInputAttachmentAspectCreateInfo VkRenderPassInputAttachmentAspectCreateInfoKHR; + +typedef VkInputAttachmentAspectReference VkInputAttachmentAspectReferenceKHR; + +typedef VkImageViewUsageCreateInfo VkImageViewUsageCreateInfoKHR; + +typedef VkPipelineTessellationDomainOriginStateCreateInfo VkPipelineTessellationDomainOriginStateCreateInfoKHR; + + + +#define VK_KHR_get_surface_capabilities2 1 +#define VK_KHR_GET_SURFACE_CAPABILITIES_2_SPEC_VERSION 1 +#define VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME "VK_KHR_get_surface_capabilities2" +typedef struct VkPhysicalDeviceSurfaceInfo2KHR { + VkStructureType sType; + const void* pNext; + VkSurfaceKHR surface; +} VkPhysicalDeviceSurfaceInfo2KHR; + +typedef struct VkSurfaceCapabilities2KHR { + VkStructureType sType; + void* pNext; + VkSurfaceCapabilitiesKHR surfaceCapabilities; +} VkSurfaceCapabilities2KHR; + +typedef struct VkSurfaceFormat2KHR { + VkStructureType sType; + void* pNext; + VkSurfaceFormatKHR surfaceFormat; +} VkSurfaceFormat2KHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormats2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2KHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, + VkSurfaceCapabilities2KHR* pSurfaceCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormats2KHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, + uint32_t* pSurfaceFormatCount, + VkSurfaceFormat2KHR* pSurfaceFormats); +#endif + + +#define VK_KHR_variable_pointers 1 +#define VK_KHR_VARIABLE_POINTERS_SPEC_VERSION 1 +#define VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME "VK_KHR_variable_pointers" +typedef VkPhysicalDeviceVariablePointersFeatures VkPhysicalDeviceVariablePointerFeaturesKHR; + +typedef VkPhysicalDeviceVariablePointersFeatures VkPhysicalDeviceVariablePointersFeaturesKHR; + + + +#define VK_KHR_get_display_properties2 1 +#define VK_KHR_GET_DISPLAY_PROPERTIES_2_SPEC_VERSION 1 +#define VK_KHR_GET_DISPLAY_PROPERTIES_2_EXTENSION_NAME "VK_KHR_get_display_properties2" +typedef struct VkDisplayProperties2KHR { + VkStructureType sType; + void* pNext; + VkDisplayPropertiesKHR displayProperties; +} VkDisplayProperties2KHR; + +typedef struct VkDisplayPlaneProperties2KHR { + VkStructureType sType; + void* pNext; + VkDisplayPlanePropertiesKHR displayPlaneProperties; +} VkDisplayPlaneProperties2KHR; + +typedef struct VkDisplayModeProperties2KHR { + VkStructureType sType; + void* pNext; + VkDisplayModePropertiesKHR displayModeProperties; +} VkDisplayModeProperties2KHR; + +typedef struct VkDisplayPlaneInfo2KHR { + VkStructureType sType; + const void* pNext; + VkDisplayModeKHR mode; + uint32_t planeIndex; +} VkDisplayPlaneInfo2KHR; + +typedef struct VkDisplayPlaneCapabilities2KHR { + VkStructureType sType; + void* pNext; + VkDisplayPlaneCapabilitiesKHR capabilities; +} VkDisplayPlaneCapabilities2KHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayProperties2KHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlaneProperties2KHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayModeProperties2KHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModeProperties2KHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneCapabilities2KHR)(VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR* pCapabilities); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayProperties2KHR( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkDisplayProperties2KHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkDisplayPlaneProperties2KHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModeProperties2KHR( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + uint32_t* pPropertyCount, + VkDisplayModeProperties2KHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilities2KHR( + VkPhysicalDevice physicalDevice, + const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, + VkDisplayPlaneCapabilities2KHR* pCapabilities); +#endif + + +#define VK_KHR_dedicated_allocation 1 +#define VK_KHR_DEDICATED_ALLOCATION_SPEC_VERSION 3 +#define VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_KHR_dedicated_allocation" +typedef VkMemoryDedicatedRequirements VkMemoryDedicatedRequirementsKHR; + +typedef VkMemoryDedicatedAllocateInfo VkMemoryDedicatedAllocateInfoKHR; + + + +#define VK_KHR_storage_buffer_storage_class 1 +#define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_SPEC_VERSION 1 +#define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME "VK_KHR_storage_buffer_storage_class" + + +#define VK_KHR_relaxed_block_layout 1 +#define VK_KHR_RELAXED_BLOCK_LAYOUT_SPEC_VERSION 1 +#define VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME "VK_KHR_relaxed_block_layout" + + +#define VK_KHR_get_memory_requirements2 1 +#define VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION 1 +#define VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME "VK_KHR_get_memory_requirements2" +typedef VkBufferMemoryRequirementsInfo2 VkBufferMemoryRequirementsInfo2KHR; + +typedef VkImageMemoryRequirementsInfo2 VkImageMemoryRequirementsInfo2KHR; + +typedef VkImageSparseMemoryRequirementsInfo2 VkImageSparseMemoryRequirementsInfo2KHR; + +typedef VkMemoryRequirements2 VkMemoryRequirements2KHR; + +typedef VkSparseImageMemoryRequirements2 VkSparseImageMemoryRequirements2KHR; + +typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements2KHR)(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements2KHR)(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements2KHR)(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements2KHR( + VkDevice device, + const VkImageMemoryRequirementsInfo2* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements2KHR( + VkDevice device, + const VkBufferMemoryRequirementsInfo2* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2KHR( + VkDevice device, + const VkImageSparseMemoryRequirementsInfo2* pInfo, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); +#endif + + +#define VK_KHR_image_format_list 1 +#define VK_KHR_IMAGE_FORMAT_LIST_SPEC_VERSION 1 +#define VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME "VK_KHR_image_format_list" +typedef VkImageFormatListCreateInfo VkImageFormatListCreateInfoKHR; + + + +#define VK_KHR_sampler_ycbcr_conversion 1 +typedef VkSamplerYcbcrConversion VkSamplerYcbcrConversionKHR; + +#define VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION 14 +#define VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME "VK_KHR_sampler_ycbcr_conversion" +typedef VkSamplerYcbcrModelConversion VkSamplerYcbcrModelConversionKHR; + +typedef VkSamplerYcbcrRange VkSamplerYcbcrRangeKHR; + +typedef VkChromaLocation VkChromaLocationKHR; + +typedef VkSamplerYcbcrConversionCreateInfo VkSamplerYcbcrConversionCreateInfoKHR; + +typedef VkSamplerYcbcrConversionInfo VkSamplerYcbcrConversionInfoKHR; + +typedef VkBindImagePlaneMemoryInfo VkBindImagePlaneMemoryInfoKHR; + +typedef VkImagePlaneMemoryRequirementsInfo VkImagePlaneMemoryRequirementsInfoKHR; + +typedef VkPhysicalDeviceSamplerYcbcrConversionFeatures VkPhysicalDeviceSamplerYcbcrConversionFeaturesKHR; + +typedef VkSamplerYcbcrConversionImageFormatProperties VkSamplerYcbcrConversionImageFormatPropertiesKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateSamplerYcbcrConversionKHR)(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion); +typedef void (VKAPI_PTR *PFN_vkDestroySamplerYcbcrConversionKHR)(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSamplerYcbcrConversionKHR( + VkDevice device, + const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSamplerYcbcrConversion* pYcbcrConversion); + +VKAPI_ATTR void VKAPI_CALL vkDestroySamplerYcbcrConversionKHR( + VkDevice device, + VkSamplerYcbcrConversion ycbcrConversion, + const VkAllocationCallbacks* pAllocator); +#endif + + +#define VK_KHR_bind_memory2 1 +#define VK_KHR_BIND_MEMORY_2_SPEC_VERSION 1 +#define VK_KHR_BIND_MEMORY_2_EXTENSION_NAME "VK_KHR_bind_memory2" +typedef VkBindBufferMemoryInfo VkBindBufferMemoryInfoKHR; + +typedef VkBindImageMemoryInfo VkBindImageMemoryInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory2KHR)(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos); +typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory2KHR)(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory2KHR( + VkDevice device, + uint32_t bindInfoCount, + const VkBindBufferMemoryInfo* pBindInfos); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2KHR( + VkDevice device, + uint32_t bindInfoCount, + const VkBindImageMemoryInfo* pBindInfos); +#endif + + +#define VK_KHR_maintenance3 1 +#define VK_KHR_MAINTENANCE3_SPEC_VERSION 1 +#define VK_KHR_MAINTENANCE3_EXTENSION_NAME "VK_KHR_maintenance3" +typedef VkPhysicalDeviceMaintenance3Properties VkPhysicalDeviceMaintenance3PropertiesKHR; + +typedef VkDescriptorSetLayoutSupport VkDescriptorSetLayoutSupportKHR; + +typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutSupportKHR)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupportKHR( + VkDevice device, + const VkDescriptorSetLayoutCreateInfo* pCreateInfo, + VkDescriptorSetLayoutSupport* pSupport); +#endif + + +#define VK_KHR_draw_indirect_count 1 +#define VK_KHR_DRAW_INDIRECT_COUNT_SPEC_VERSION 1 +#define VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME "VK_KHR_draw_indirect_count" +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCountKHR)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCountKHR)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCountKHR( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCountKHR( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); +#endif + + +#define VK_KHR_shader_subgroup_extended_types 1 +#define VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_SPEC_VERSION 1 +#define VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_EXTENSION_NAME "VK_KHR_shader_subgroup_extended_types" +typedef VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR; + + + +#define VK_KHR_8bit_storage 1 +#define VK_KHR_8BIT_STORAGE_SPEC_VERSION 1 +#define VK_KHR_8BIT_STORAGE_EXTENSION_NAME "VK_KHR_8bit_storage" +typedef VkPhysicalDevice8BitStorageFeatures VkPhysicalDevice8BitStorageFeaturesKHR; + + + +#define VK_KHR_shader_atomic_int64 1 +#define VK_KHR_SHADER_ATOMIC_INT64_SPEC_VERSION 1 +#define VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME "VK_KHR_shader_atomic_int64" +typedef VkPhysicalDeviceShaderAtomicInt64Features VkPhysicalDeviceShaderAtomicInt64FeaturesKHR; + + + +#define VK_KHR_shader_clock 1 +#define VK_KHR_SHADER_CLOCK_SPEC_VERSION 1 +#define VK_KHR_SHADER_CLOCK_EXTENSION_NAME "VK_KHR_shader_clock" +typedef struct VkPhysicalDeviceShaderClockFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 shaderSubgroupClock; + VkBool32 shaderDeviceClock; +} VkPhysicalDeviceShaderClockFeaturesKHR; + + + +#define VK_KHR_driver_properties 1 +#define VK_KHR_DRIVER_PROPERTIES_SPEC_VERSION 1 +#define VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME "VK_KHR_driver_properties" +#define VK_MAX_DRIVER_NAME_SIZE_KHR VK_MAX_DRIVER_NAME_SIZE +#define VK_MAX_DRIVER_INFO_SIZE_KHR VK_MAX_DRIVER_INFO_SIZE +typedef VkDriverId VkDriverIdKHR; + +typedef VkConformanceVersion VkConformanceVersionKHR; + +typedef VkPhysicalDeviceDriverProperties VkPhysicalDeviceDriverPropertiesKHR; + + + +#define VK_KHR_shader_float_controls 1 +#define VK_KHR_SHADER_FLOAT_CONTROLS_SPEC_VERSION 4 +#define VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME "VK_KHR_shader_float_controls" +typedef VkShaderFloatControlsIndependence VkShaderFloatControlsIndependenceKHR; + +typedef VkPhysicalDeviceFloatControlsProperties VkPhysicalDeviceFloatControlsPropertiesKHR; + + + +#define VK_KHR_depth_stencil_resolve 1 +#define VK_KHR_DEPTH_STENCIL_RESOLVE_SPEC_VERSION 1 +#define VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME "VK_KHR_depth_stencil_resolve" +typedef VkResolveModeFlagBits VkResolveModeFlagBitsKHR; + +typedef VkResolveModeFlags VkResolveModeFlagsKHR; + +typedef VkSubpassDescriptionDepthStencilResolve VkSubpassDescriptionDepthStencilResolveKHR; + +typedef VkPhysicalDeviceDepthStencilResolveProperties VkPhysicalDeviceDepthStencilResolvePropertiesKHR; + + + +#define VK_KHR_swapchain_mutable_format 1 +#define VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_SPEC_VERSION 1 +#define VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME "VK_KHR_swapchain_mutable_format" + + +#define VK_KHR_timeline_semaphore 1 +#define VK_KHR_TIMELINE_SEMAPHORE_SPEC_VERSION 2 +#define VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME "VK_KHR_timeline_semaphore" +typedef VkSemaphoreType VkSemaphoreTypeKHR; + +typedef VkSemaphoreWaitFlagBits VkSemaphoreWaitFlagBitsKHR; + +typedef VkSemaphoreWaitFlags VkSemaphoreWaitFlagsKHR; + +typedef VkPhysicalDeviceTimelineSemaphoreFeatures VkPhysicalDeviceTimelineSemaphoreFeaturesKHR; + +typedef VkPhysicalDeviceTimelineSemaphoreProperties VkPhysicalDeviceTimelineSemaphorePropertiesKHR; + +typedef VkSemaphoreTypeCreateInfo VkSemaphoreTypeCreateInfoKHR; + +typedef VkTimelineSemaphoreSubmitInfo VkTimelineSemaphoreSubmitInfoKHR; + +typedef VkSemaphoreWaitInfo VkSemaphoreWaitInfoKHR; + +typedef VkSemaphoreSignalInfo VkSemaphoreSignalInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreCounterValueKHR)(VkDevice device, VkSemaphore semaphore, uint64_t* pValue); +typedef VkResult (VKAPI_PTR *PFN_vkWaitSemaphoresKHR)(VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout); +typedef VkResult (VKAPI_PTR *PFN_vkSignalSemaphoreKHR)(VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreCounterValueKHR( + VkDevice device, + VkSemaphore semaphore, + uint64_t* pValue); + +VKAPI_ATTR VkResult VKAPI_CALL vkWaitSemaphoresKHR( + VkDevice device, + const VkSemaphoreWaitInfo* pWaitInfo, + uint64_t timeout); + +VKAPI_ATTR VkResult VKAPI_CALL vkSignalSemaphoreKHR( + VkDevice device, + const VkSemaphoreSignalInfo* pSignalInfo); +#endif + + +#define VK_KHR_vulkan_memory_model 1 +#define VK_KHR_VULKAN_MEMORY_MODEL_SPEC_VERSION 3 +#define VK_KHR_VULKAN_MEMORY_MODEL_EXTENSION_NAME "VK_KHR_vulkan_memory_model" +typedef VkPhysicalDeviceVulkanMemoryModelFeatures VkPhysicalDeviceVulkanMemoryModelFeaturesKHR; + + + +#define VK_KHR_shader_terminate_invocation 1 +#define VK_KHR_SHADER_TERMINATE_INVOCATION_SPEC_VERSION 1 +#define VK_KHR_SHADER_TERMINATE_INVOCATION_EXTENSION_NAME "VK_KHR_shader_terminate_invocation" +typedef struct VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 shaderTerminateInvocation; +} VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR; + + + +#define VK_KHR_fragment_shading_rate 1 +#define VK_KHR_FRAGMENT_SHADING_RATE_SPEC_VERSION 1 +#define VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME "VK_KHR_fragment_shading_rate" + +typedef enum VkFragmentShadingRateCombinerOpKHR { + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR = 0, + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_REPLACE_KHR = 1, + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MIN_KHR = 2, + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_KHR = 3, + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR = 4, + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_ENUM_KHR = 0x7FFFFFFF +} VkFragmentShadingRateCombinerOpKHR; +typedef struct VkFragmentShadingRateAttachmentInfoKHR { + VkStructureType sType; + const void* pNext; + const VkAttachmentReference2* pFragmentShadingRateAttachment; + VkExtent2D shadingRateAttachmentTexelSize; +} VkFragmentShadingRateAttachmentInfoKHR; + +typedef struct VkPipelineFragmentShadingRateStateCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkExtent2D fragmentSize; + VkFragmentShadingRateCombinerOpKHR combinerOps[2]; +} VkPipelineFragmentShadingRateStateCreateInfoKHR; + +typedef struct VkPhysicalDeviceFragmentShadingRateFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 pipelineFragmentShadingRate; + VkBool32 primitiveFragmentShadingRate; + VkBool32 attachmentFragmentShadingRate; +} VkPhysicalDeviceFragmentShadingRateFeaturesKHR; + +typedef struct VkPhysicalDeviceFragmentShadingRatePropertiesKHR { + VkStructureType sType; + void* pNext; + VkExtent2D minFragmentShadingRateAttachmentTexelSize; + VkExtent2D maxFragmentShadingRateAttachmentTexelSize; + uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio; + VkBool32 primitiveFragmentShadingRateWithMultipleViewports; + VkBool32 layeredShadingRateAttachments; + VkBool32 fragmentShadingRateNonTrivialCombinerOps; + VkExtent2D maxFragmentSize; + uint32_t maxFragmentSizeAspectRatio; + uint32_t maxFragmentShadingRateCoverageSamples; + VkSampleCountFlagBits maxFragmentShadingRateRasterizationSamples; + VkBool32 fragmentShadingRateWithShaderDepthStencilWrites; + VkBool32 fragmentShadingRateWithSampleMask; + VkBool32 fragmentShadingRateWithShaderSampleMask; + VkBool32 fragmentShadingRateWithConservativeRasterization; + VkBool32 fragmentShadingRateWithFragmentShaderInterlock; + VkBool32 fragmentShadingRateWithCustomSampleLocations; + VkBool32 fragmentShadingRateStrictMultiplyCombiner; +} VkPhysicalDeviceFragmentShadingRatePropertiesKHR; + +typedef struct VkPhysicalDeviceFragmentShadingRateKHR { + VkStructureType sType; + void* pNext; + VkSampleCountFlags sampleCounts; + VkExtent2D fragmentSize; +} VkPhysicalDeviceFragmentShadingRateKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pFragmentShadingRateCount, VkPhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates); +typedef void (VKAPI_PTR *PFN_vkCmdSetFragmentShadingRateKHR)(VkCommandBuffer commandBuffer, const VkExtent2D* pFragmentSize, const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceFragmentShadingRatesKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pFragmentShadingRateCount, + VkPhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetFragmentShadingRateKHR( + VkCommandBuffer commandBuffer, + const VkExtent2D* pFragmentSize, + const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); +#endif + + +#define VK_KHR_spirv_1_4 1 +#define VK_KHR_SPIRV_1_4_SPEC_VERSION 1 +#define VK_KHR_SPIRV_1_4_EXTENSION_NAME "VK_KHR_spirv_1_4" + + +#define VK_KHR_surface_protected_capabilities 1 +#define VK_KHR_SURFACE_PROTECTED_CAPABILITIES_SPEC_VERSION 1 +#define VK_KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME "VK_KHR_surface_protected_capabilities" +typedef struct VkSurfaceProtectedCapabilitiesKHR { + VkStructureType sType; + const void* pNext; + VkBool32 supportsProtected; +} VkSurfaceProtectedCapabilitiesKHR; + + + +#define VK_KHR_separate_depth_stencil_layouts 1 +#define VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_SPEC_VERSION 1 +#define VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME "VK_KHR_separate_depth_stencil_layouts" +typedef VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR; + +typedef VkAttachmentReferenceStencilLayout VkAttachmentReferenceStencilLayoutKHR; + +typedef VkAttachmentDescriptionStencilLayout VkAttachmentDescriptionStencilLayoutKHR; + + + +#define VK_KHR_uniform_buffer_standard_layout 1 +#define VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_SPEC_VERSION 1 +#define VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME "VK_KHR_uniform_buffer_standard_layout" +typedef VkPhysicalDeviceUniformBufferStandardLayoutFeatures VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR; + + + +#define VK_KHR_buffer_device_address 1 +#define VK_KHR_BUFFER_DEVICE_ADDRESS_SPEC_VERSION 1 +#define VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME "VK_KHR_buffer_device_address" +typedef VkPhysicalDeviceBufferDeviceAddressFeatures VkPhysicalDeviceBufferDeviceAddressFeaturesKHR; + +typedef VkBufferDeviceAddressInfo VkBufferDeviceAddressInfoKHR; + +typedef VkBufferOpaqueCaptureAddressCreateInfo VkBufferOpaqueCaptureAddressCreateInfoKHR; + +typedef VkMemoryOpaqueCaptureAddressAllocateInfo VkMemoryOpaqueCaptureAddressAllocateInfoKHR; + +typedef VkDeviceMemoryOpaqueCaptureAddressInfo VkDeviceMemoryOpaqueCaptureAddressInfoKHR; + +typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetBufferDeviceAddressKHR)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo); +typedef uint64_t (VKAPI_PTR *PFN_vkGetBufferOpaqueCaptureAddressKHR)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo); +typedef uint64_t (VKAPI_PTR *PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR)(VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetBufferDeviceAddressKHR( + VkDevice device, + const VkBufferDeviceAddressInfo* pInfo); + +VKAPI_ATTR uint64_t VKAPI_CALL vkGetBufferOpaqueCaptureAddressKHR( + VkDevice device, + const VkBufferDeviceAddressInfo* pInfo); + +VKAPI_ATTR uint64_t VKAPI_CALL vkGetDeviceMemoryOpaqueCaptureAddressKHR( + VkDevice device, + const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo); +#endif + + +#define VK_KHR_deferred_host_operations 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeferredOperationKHR) +#define VK_KHR_DEFERRED_HOST_OPERATIONS_SPEC_VERSION 4 +#define VK_KHR_DEFERRED_HOST_OPERATIONS_EXTENSION_NAME "VK_KHR_deferred_host_operations" +typedef VkResult (VKAPI_PTR *PFN_vkCreateDeferredOperationKHR)(VkDevice device, const VkAllocationCallbacks* pAllocator, VkDeferredOperationKHR* pDeferredOperation); +typedef void (VKAPI_PTR *PFN_vkDestroyDeferredOperationKHR)(VkDevice device, VkDeferredOperationKHR operation, const VkAllocationCallbacks* pAllocator); +typedef uint32_t (VKAPI_PTR *PFN_vkGetDeferredOperationMaxConcurrencyKHR)(VkDevice device, VkDeferredOperationKHR operation); +typedef VkResult (VKAPI_PTR *PFN_vkGetDeferredOperationResultKHR)(VkDevice device, VkDeferredOperationKHR operation); +typedef VkResult (VKAPI_PTR *PFN_vkDeferredOperationJoinKHR)(VkDevice device, VkDeferredOperationKHR operation); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDeferredOperationKHR( + VkDevice device, + const VkAllocationCallbacks* pAllocator, + VkDeferredOperationKHR* pDeferredOperation); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDeferredOperationKHR( + VkDevice device, + VkDeferredOperationKHR operation, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR uint32_t VKAPI_CALL vkGetDeferredOperationMaxConcurrencyKHR( + VkDevice device, + VkDeferredOperationKHR operation); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeferredOperationResultKHR( + VkDevice device, + VkDeferredOperationKHR operation); + +VKAPI_ATTR VkResult VKAPI_CALL vkDeferredOperationJoinKHR( + VkDevice device, + VkDeferredOperationKHR operation); +#endif + + +#define VK_KHR_pipeline_executable_properties 1 +#define VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_SPEC_VERSION 1 +#define VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME "VK_KHR_pipeline_executable_properties" + +typedef enum VkPipelineExecutableStatisticFormatKHR { + VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR = 0, + VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR = 1, + VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR = 2, + VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR = 3, + VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPipelineExecutableStatisticFormatKHR; +typedef struct VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 pipelineExecutableInfo; +} VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + +typedef struct VkPipelineInfoKHR { + VkStructureType sType; + const void* pNext; + VkPipeline pipeline; +} VkPipelineInfoKHR; + +typedef struct VkPipelineExecutablePropertiesKHR { + VkStructureType sType; + void* pNext; + VkShaderStageFlags stages; + char name[VK_MAX_DESCRIPTION_SIZE]; + char description[VK_MAX_DESCRIPTION_SIZE]; + uint32_t subgroupSize; +} VkPipelineExecutablePropertiesKHR; + +typedef struct VkPipelineExecutableInfoKHR { + VkStructureType sType; + const void* pNext; + VkPipeline pipeline; + uint32_t executableIndex; +} VkPipelineExecutableInfoKHR; + +typedef union VkPipelineExecutableStatisticValueKHR { + VkBool32 b32; + int64_t i64; + uint64_t u64; + double f64; +} VkPipelineExecutableStatisticValueKHR; + +typedef struct VkPipelineExecutableStatisticKHR { + VkStructureType sType; + void* pNext; + char name[VK_MAX_DESCRIPTION_SIZE]; + char description[VK_MAX_DESCRIPTION_SIZE]; + VkPipelineExecutableStatisticFormatKHR format; + VkPipelineExecutableStatisticValueKHR value; +} VkPipelineExecutableStatisticKHR; + +typedef struct VkPipelineExecutableInternalRepresentationKHR { + VkStructureType sType; + void* pNext; + char name[VK_MAX_DESCRIPTION_SIZE]; + char description[VK_MAX_DESCRIPTION_SIZE]; + VkBool32 isText; + size_t dataSize; + void* pData; +} VkPipelineExecutableInternalRepresentationKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineExecutablePropertiesKHR)(VkDevice device, const VkPipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VkPipelineExecutablePropertiesKHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineExecutableStatisticsKHR)(VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VkPipelineExecutableStatisticKHR* pStatistics); +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineExecutableInternalRepresentationsKHR)(VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutablePropertiesKHR( + VkDevice device, + const VkPipelineInfoKHR* pPipelineInfo, + uint32_t* pExecutableCount, + VkPipelineExecutablePropertiesKHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutableStatisticsKHR( + VkDevice device, + const VkPipelineExecutableInfoKHR* pExecutableInfo, + uint32_t* pStatisticCount, + VkPipelineExecutableStatisticKHR* pStatistics); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutableInternalRepresentationsKHR( + VkDevice device, + const VkPipelineExecutableInfoKHR* pExecutableInfo, + uint32_t* pInternalRepresentationCount, + VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations); +#endif + + +#define VK_KHR_pipeline_library 1 +#define VK_KHR_PIPELINE_LIBRARY_SPEC_VERSION 1 +#define VK_KHR_PIPELINE_LIBRARY_EXTENSION_NAME "VK_KHR_pipeline_library" +typedef struct VkPipelineLibraryCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t libraryCount; + const VkPipeline* pLibraries; +} VkPipelineLibraryCreateInfoKHR; + + + +#define VK_KHR_shader_non_semantic_info 1 +#define VK_KHR_SHADER_NON_SEMANTIC_INFO_SPEC_VERSION 1 +#define VK_KHR_SHADER_NON_SEMANTIC_INFO_EXTENSION_NAME "VK_KHR_shader_non_semantic_info" + + +#define VK_KHR_synchronization2 1 +typedef uint64_t VkFlags64; +#define VK_KHR_SYNCHRONIZATION_2_SPEC_VERSION 1 +#define VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME "VK_KHR_synchronization2" +typedef VkFlags64 VkPipelineStageFlags2KHR; + +// Flag bits for VkPipelineStageFlagBits2KHR +typedef VkFlags64 VkPipelineStageFlagBits2KHR; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_NONE_KHR = 0ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR = 0x00000001ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR = 0x00000002ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR = 0x00000004ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT_KHR = 0x00000008ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR = 0x00000010ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR = 0x00000020ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR = 0x00000040ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR = 0x00000080ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR = 0x00000100ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR = 0x00000200ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR = 0x00000400ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT_KHR = 0x00000800ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR = 0x00001000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR = 0x00001000; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT_KHR = 0x00002000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_HOST_BIT_KHR = 0x00004000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR = 0x00008000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR = 0x00010000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_COPY_BIT_KHR = 0x100000000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR = 0x200000000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_BLIT_BIT_KHR = 0x400000000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR = 0x800000000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT_KHR = 0x1000000000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT_KHR = 0x2000000000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT_KHR = 0x4000000000ULL; +#ifdef VK_ENABLE_BETA_EXTENSIONS +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR = 0x04000000ULL; +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR = 0x08000000ULL; +#endif +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT = 0x01000000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT = 0x00040000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV = 0x00020000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00400000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV = 0x00400000; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR = 0x02000000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR = 0x00200000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_NV = 0x00200000; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_NV = 0x02000000; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT = 0x00800000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV = 0x00080000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV = 0x00100000ULL; + +typedef VkFlags64 VkAccessFlags2KHR; + +// Flag bits for VkAccessFlagBits2KHR +typedef VkFlags64 VkAccessFlagBits2KHR; +static const VkAccessFlagBits2KHR VK_ACCESS_2_NONE_KHR = 0ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT_KHR = 0x00000001ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_INDEX_READ_BIT_KHR = 0x00000002ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT_KHR = 0x00000004ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_UNIFORM_READ_BIT_KHR = 0x00000008ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT_KHR = 0x00000010ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_SHADER_READ_BIT_KHR = 0x00000020ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_SHADER_WRITE_BIT_KHR = 0x00000040ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT_KHR = 0x00000080ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR = 0x00000100ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT_KHR = 0x00000200ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR = 0x00000400ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_TRANSFER_READ_BIT_KHR = 0x00000800ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR = 0x00001000ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_HOST_READ_BIT_KHR = 0x00002000ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_HOST_WRITE_BIT_KHR = 0x00004000ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_MEMORY_READ_BIT_KHR = 0x00008000ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_MEMORY_WRITE_BIT_KHR = 0x00010000ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_SHADER_SAMPLED_READ_BIT_KHR = 0x100000000ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR = 0x200000000ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR = 0x400000000ULL; +#ifdef VK_ENABLE_BETA_EXTENSIONS +static const VkAccessFlagBits2KHR VK_ACCESS_2_VIDEO_DECODE_READ_BIT_KHR = 0x800000000ULL; +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS +static const VkAccessFlagBits2KHR VK_ACCESS_2_VIDEO_DECODE_WRITE_BIT_KHR = 0x1000000000ULL; +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS +static const VkAccessFlagBits2KHR VK_ACCESS_2_VIDEO_ENCODE_READ_BIT_KHR = 0x2000000000ULL; +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS +static const VkAccessFlagBits2KHR VK_ACCESS_2_VIDEO_ENCODE_WRITE_BIT_KHR = 0x4000000000ULL; +#endif +static const VkAccessFlagBits2KHR VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT = 0x02000000ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT = 0x04000000ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT = 0x08000000ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT = 0x00100000ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV = 0x00020000ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV = 0x00040000ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR = 0x00800000ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV = 0x00800000; +static const VkAccessFlagBits2KHR VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR = 0x00200000ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR = 0x00400000ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_NV = 0x00200000; +static const VkAccessFlagBits2KHR VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_NV = 0x00400000; +static const VkAccessFlagBits2KHR VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT = 0x01000000ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 0x00080000ULL; + + +typedef enum VkSubmitFlagBitsKHR { + VK_SUBMIT_PROTECTED_BIT_KHR = 0x00000001, + VK_SUBMIT_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkSubmitFlagBitsKHR; +typedef VkFlags VkSubmitFlagsKHR; +typedef struct VkMemoryBarrier2KHR { + VkStructureType sType; + const void* pNext; + VkPipelineStageFlags2KHR srcStageMask; + VkAccessFlags2KHR srcAccessMask; + VkPipelineStageFlags2KHR dstStageMask; + VkAccessFlags2KHR dstAccessMask; +} VkMemoryBarrier2KHR; + +typedef struct VkBufferMemoryBarrier2KHR { + VkStructureType sType; + const void* pNext; + VkPipelineStageFlags2KHR srcStageMask; + VkAccessFlags2KHR srcAccessMask; + VkPipelineStageFlags2KHR dstStageMask; + VkAccessFlags2KHR dstAccessMask; + uint32_t srcQueueFamilyIndex; + uint32_t dstQueueFamilyIndex; + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize size; +} VkBufferMemoryBarrier2KHR; + +typedef struct VkImageMemoryBarrier2KHR { + VkStructureType sType; + const void* pNext; + VkPipelineStageFlags2KHR srcStageMask; + VkAccessFlags2KHR srcAccessMask; + VkPipelineStageFlags2KHR dstStageMask; + VkAccessFlags2KHR dstAccessMask; + VkImageLayout oldLayout; + VkImageLayout newLayout; + uint32_t srcQueueFamilyIndex; + uint32_t dstQueueFamilyIndex; + VkImage image; + VkImageSubresourceRange subresourceRange; +} VkImageMemoryBarrier2KHR; + +typedef struct VkDependencyInfoKHR { + VkStructureType sType; + const void* pNext; + VkDependencyFlags dependencyFlags; + uint32_t memoryBarrierCount; + const VkMemoryBarrier2KHR* pMemoryBarriers; + uint32_t bufferMemoryBarrierCount; + const VkBufferMemoryBarrier2KHR* pBufferMemoryBarriers; + uint32_t imageMemoryBarrierCount; + const VkImageMemoryBarrier2KHR* pImageMemoryBarriers; +} VkDependencyInfoKHR; + +typedef struct VkSemaphoreSubmitInfoKHR { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + uint64_t value; + VkPipelineStageFlags2KHR stageMask; + uint32_t deviceIndex; +} VkSemaphoreSubmitInfoKHR; + +typedef struct VkCommandBufferSubmitInfoKHR { + VkStructureType sType; + const void* pNext; + VkCommandBuffer commandBuffer; + uint32_t deviceMask; +} VkCommandBufferSubmitInfoKHR; + +typedef struct VkSubmitInfo2KHR { + VkStructureType sType; + const void* pNext; + VkSubmitFlagsKHR flags; + uint32_t waitSemaphoreInfoCount; + const VkSemaphoreSubmitInfoKHR* pWaitSemaphoreInfos; + uint32_t commandBufferInfoCount; + const VkCommandBufferSubmitInfoKHR* pCommandBufferInfos; + uint32_t signalSemaphoreInfoCount; + const VkSemaphoreSubmitInfoKHR* pSignalSemaphoreInfos; +} VkSubmitInfo2KHR; + +typedef struct VkPhysicalDeviceSynchronization2FeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 synchronization2; +} VkPhysicalDeviceSynchronization2FeaturesKHR; + +typedef struct VkQueueFamilyCheckpointProperties2NV { + VkStructureType sType; + void* pNext; + VkPipelineStageFlags2KHR checkpointExecutionStageMask; +} VkQueueFamilyCheckpointProperties2NV; + +typedef struct VkCheckpointData2NV { + VkStructureType sType; + void* pNext; + VkPipelineStageFlags2KHR stage; + void* pCheckpointMarker; +} VkCheckpointData2NV; + +typedef void (VKAPI_PTR *PFN_vkCmdSetEvent2KHR)(VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfoKHR* pDependencyInfo); +typedef void (VKAPI_PTR *PFN_vkCmdResetEvent2KHR)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2KHR stageMask); +typedef void (VKAPI_PTR *PFN_vkCmdWaitEvents2KHR)(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, const VkDependencyInfoKHR* pDependencyInfos); +typedef void (VKAPI_PTR *PFN_vkCmdPipelineBarrier2KHR)(VkCommandBuffer commandBuffer, const VkDependencyInfoKHR* pDependencyInfo); +typedef void (VKAPI_PTR *PFN_vkCmdWriteTimestamp2KHR)(VkCommandBuffer commandBuffer, VkPipelineStageFlags2KHR stage, VkQueryPool queryPool, uint32_t query); +typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit2KHR)(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2KHR* pSubmits, VkFence fence); +typedef void (VKAPI_PTR *PFN_vkCmdWriteBufferMarker2AMD)(VkCommandBuffer commandBuffer, VkPipelineStageFlags2KHR stage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker); +typedef void (VKAPI_PTR *PFN_vkGetQueueCheckpointData2NV)(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointData2NV* pCheckpointData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent2KHR( + VkCommandBuffer commandBuffer, + VkEvent event, + const VkDependencyInfoKHR* pDependencyInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent2KHR( + VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags2KHR stageMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents2KHR( + VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent* pEvents, + const VkDependencyInfoKHR* pDependencyInfos); + +VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier2KHR( + VkCommandBuffer commandBuffer, + const VkDependencyInfoKHR* pDependencyInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp2KHR( + VkCommandBuffer commandBuffer, + VkPipelineStageFlags2KHR stage, + VkQueryPool queryPool, + uint32_t query); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit2KHR( + VkQueue queue, + uint32_t submitCount, + const VkSubmitInfo2KHR* pSubmits, + VkFence fence); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarker2AMD( + VkCommandBuffer commandBuffer, + VkPipelineStageFlags2KHR stage, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + uint32_t marker); + +VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointData2NV( + VkQueue queue, + uint32_t* pCheckpointDataCount, + VkCheckpointData2NV* pCheckpointData); +#endif + + +#define VK_KHR_zero_initialize_workgroup_memory 1 +#define VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_SPEC_VERSION 1 +#define VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_EXTENSION_NAME "VK_KHR_zero_initialize_workgroup_memory" +typedef struct VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 shaderZeroInitializeWorkgroupMemory; +} VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR; + + + +#define VK_KHR_workgroup_memory_explicit_layout 1 +#define VK_KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_SPEC_VERSION 1 +#define VK_KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_EXTENSION_NAME "VK_KHR_workgroup_memory_explicit_layout" +typedef struct VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 workgroupMemoryExplicitLayout; + VkBool32 workgroupMemoryExplicitLayoutScalarBlockLayout; + VkBool32 workgroupMemoryExplicitLayout8BitAccess; + VkBool32 workgroupMemoryExplicitLayout16BitAccess; +} VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; + + + +#define VK_KHR_copy_commands2 1 +#define VK_KHR_COPY_COMMANDS_2_SPEC_VERSION 1 +#define VK_KHR_COPY_COMMANDS_2_EXTENSION_NAME "VK_KHR_copy_commands2" +typedef struct VkBufferCopy2KHR { + VkStructureType sType; + const void* pNext; + VkDeviceSize srcOffset; + VkDeviceSize dstOffset; + VkDeviceSize size; +} VkBufferCopy2KHR; + +typedef struct VkCopyBufferInfo2KHR { + VkStructureType sType; + const void* pNext; + VkBuffer srcBuffer; + VkBuffer dstBuffer; + uint32_t regionCount; + const VkBufferCopy2KHR* pRegions; +} VkCopyBufferInfo2KHR; + +typedef struct VkImageCopy2KHR { + VkStructureType sType; + const void* pNext; + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffset; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffset; + VkExtent3D extent; +} VkImageCopy2KHR; + +typedef struct VkCopyImageInfo2KHR { + VkStructureType sType; + const void* pNext; + VkImage srcImage; + VkImageLayout srcImageLayout; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkImageCopy2KHR* pRegions; +} VkCopyImageInfo2KHR; + +typedef struct VkBufferImageCopy2KHR { + VkStructureType sType; + const void* pNext; + VkDeviceSize bufferOffset; + uint32_t bufferRowLength; + uint32_t bufferImageHeight; + VkImageSubresourceLayers imageSubresource; + VkOffset3D imageOffset; + VkExtent3D imageExtent; +} VkBufferImageCopy2KHR; + +typedef struct VkCopyBufferToImageInfo2KHR { + VkStructureType sType; + const void* pNext; + VkBuffer srcBuffer; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkBufferImageCopy2KHR* pRegions; +} VkCopyBufferToImageInfo2KHR; + +typedef struct VkCopyImageToBufferInfo2KHR { + VkStructureType sType; + const void* pNext; + VkImage srcImage; + VkImageLayout srcImageLayout; + VkBuffer dstBuffer; + uint32_t regionCount; + const VkBufferImageCopy2KHR* pRegions; +} VkCopyImageToBufferInfo2KHR; + +typedef struct VkImageBlit2KHR { + VkStructureType sType; + const void* pNext; + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffsets[2]; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffsets[2]; +} VkImageBlit2KHR; + +typedef struct VkBlitImageInfo2KHR { + VkStructureType sType; + const void* pNext; + VkImage srcImage; + VkImageLayout srcImageLayout; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkImageBlit2KHR* pRegions; + VkFilter filter; +} VkBlitImageInfo2KHR; + +typedef struct VkImageResolve2KHR { + VkStructureType sType; + const void* pNext; + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffset; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffset; + VkExtent3D extent; +} VkImageResolve2KHR; + +typedef struct VkResolveImageInfo2KHR { + VkStructureType sType; + const void* pNext; + VkImage srcImage; + VkImageLayout srcImageLayout; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkImageResolve2KHR* pRegions; +} VkResolveImageInfo2KHR; + +typedef void (VKAPI_PTR *PFN_vkCmdCopyBuffer2KHR)(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2KHR* pCopyBufferInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImage2KHR)(VkCommandBuffer commandBuffer, const VkCopyImageInfo2KHR* pCopyImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyBufferToImage2KHR)(VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2KHR* pCopyBufferToImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImageToBuffer2KHR)(VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2KHR* pCopyImageToBufferInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBlitImage2KHR)(VkCommandBuffer commandBuffer, const VkBlitImageInfo2KHR* pBlitImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdResolveImage2KHR)(VkCommandBuffer commandBuffer, const VkResolveImageInfo2KHR* pResolveImageInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer2KHR( + VkCommandBuffer commandBuffer, + const VkCopyBufferInfo2KHR* pCopyBufferInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage2KHR( + VkCommandBuffer commandBuffer, + const VkCopyImageInfo2KHR* pCopyImageInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage2KHR( + VkCommandBuffer commandBuffer, + const VkCopyBufferToImageInfo2KHR* pCopyBufferToImageInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer2KHR( + VkCommandBuffer commandBuffer, + const VkCopyImageToBufferInfo2KHR* pCopyImageToBufferInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage2KHR( + VkCommandBuffer commandBuffer, + const VkBlitImageInfo2KHR* pBlitImageInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage2KHR( + VkCommandBuffer commandBuffer, + const VkResolveImageInfo2KHR* pResolveImageInfo); +#endif + + +#define VK_EXT_debug_report 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugReportCallbackEXT) +#define VK_EXT_DEBUG_REPORT_SPEC_VERSION 10 +#define VK_EXT_DEBUG_REPORT_EXTENSION_NAME "VK_EXT_debug_report" + +typedef enum VkDebugReportObjectTypeEXT { + VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT = 0, + VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT = 1, + VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT = 2, + VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT = 3, + VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT = 4, + VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT = 5, + VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT = 6, + VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT = 7, + VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT = 8, + VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT = 9, + VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT = 10, + VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT = 11, + VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT = 12, + VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT = 13, + VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT = 14, + VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT = 15, + VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT = 16, + VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT = 17, + VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT = 18, + VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT = 19, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT = 20, + VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT = 21, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT = 22, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT = 23, + VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT = 24, + VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT = 25, + VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT = 26, + VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT = 27, + VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT = 28, + VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT = 29, + VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT = 30, + VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT = 33, + VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT = 1000156000, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT = 1000085000, + VK_DEBUG_REPORT_OBJECT_TYPE_CU_MODULE_NVX_EXT = 1000029000, + VK_DEBUG_REPORT_OBJECT_TYPE_CU_FUNCTION_NVX_EXT = 1000029001, + VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT = 1000150000, + VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT = 1000165000, + VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT, + VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, + VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, + VK_DEBUG_REPORT_OBJECT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDebugReportObjectTypeEXT; + +typedef enum VkDebugReportFlagBitsEXT { + VK_DEBUG_REPORT_INFORMATION_BIT_EXT = 0x00000001, + VK_DEBUG_REPORT_WARNING_BIT_EXT = 0x00000002, + VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT = 0x00000004, + VK_DEBUG_REPORT_ERROR_BIT_EXT = 0x00000008, + VK_DEBUG_REPORT_DEBUG_BIT_EXT = 0x00000010, + VK_DEBUG_REPORT_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDebugReportFlagBitsEXT; +typedef VkFlags VkDebugReportFlagsEXT; +typedef VkBool32 (VKAPI_PTR *PFN_vkDebugReportCallbackEXT)( + VkDebugReportFlagsEXT flags, + VkDebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char* pLayerPrefix, + const char* pMessage, + void* pUserData); + +typedef struct VkDebugReportCallbackCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugReportFlagsEXT flags; + PFN_vkDebugReportCallbackEXT pfnCallback; + void* pUserData; +} VkDebugReportCallbackCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateDebugReportCallbackEXT)(VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback); +typedef void (VKAPI_PTR *PFN_vkDestroyDebugReportCallbackEXT)(VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkDebugReportMessageEXT)(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT( + VkInstance instance, + const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDebugReportCallbackEXT* pCallback); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT( + VkInstance instance, + VkDebugReportCallbackEXT callback, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT( + VkInstance instance, + VkDebugReportFlagsEXT flags, + VkDebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char* pLayerPrefix, + const char* pMessage); +#endif + + +#define VK_NV_glsl_shader 1 +#define VK_NV_GLSL_SHADER_SPEC_VERSION 1 +#define VK_NV_GLSL_SHADER_EXTENSION_NAME "VK_NV_glsl_shader" + + +#define VK_EXT_depth_range_unrestricted 1 +#define VK_EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION 1 +#define VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME "VK_EXT_depth_range_unrestricted" + + +#define VK_IMG_filter_cubic 1 +#define VK_IMG_FILTER_CUBIC_SPEC_VERSION 1 +#define VK_IMG_FILTER_CUBIC_EXTENSION_NAME "VK_IMG_filter_cubic" + + +#define VK_AMD_rasterization_order 1 +#define VK_AMD_RASTERIZATION_ORDER_SPEC_VERSION 1 +#define VK_AMD_RASTERIZATION_ORDER_EXTENSION_NAME "VK_AMD_rasterization_order" + +typedef enum VkRasterizationOrderAMD { + VK_RASTERIZATION_ORDER_STRICT_AMD = 0, + VK_RASTERIZATION_ORDER_RELAXED_AMD = 1, + VK_RASTERIZATION_ORDER_MAX_ENUM_AMD = 0x7FFFFFFF +} VkRasterizationOrderAMD; +typedef struct VkPipelineRasterizationStateRasterizationOrderAMD { + VkStructureType sType; + const void* pNext; + VkRasterizationOrderAMD rasterizationOrder; +} VkPipelineRasterizationStateRasterizationOrderAMD; + + + +#define VK_AMD_shader_trinary_minmax 1 +#define VK_AMD_SHADER_TRINARY_MINMAX_SPEC_VERSION 1 +#define VK_AMD_SHADER_TRINARY_MINMAX_EXTENSION_NAME "VK_AMD_shader_trinary_minmax" + + +#define VK_AMD_shader_explicit_vertex_parameter 1 +#define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION 1 +#define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME "VK_AMD_shader_explicit_vertex_parameter" + + +#define VK_EXT_debug_marker 1 +#define VK_EXT_DEBUG_MARKER_SPEC_VERSION 4 +#define VK_EXT_DEBUG_MARKER_EXTENSION_NAME "VK_EXT_debug_marker" +typedef struct VkDebugMarkerObjectNameInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugReportObjectTypeEXT objectType; + uint64_t object; + const char* pObjectName; +} VkDebugMarkerObjectNameInfoEXT; + +typedef struct VkDebugMarkerObjectTagInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugReportObjectTypeEXT objectType; + uint64_t object; + uint64_t tagName; + size_t tagSize; + const void* pTag; +} VkDebugMarkerObjectTagInfoEXT; + +typedef struct VkDebugMarkerMarkerInfoEXT { + VkStructureType sType; + const void* pNext; + const char* pMarkerName; + float color[4]; +} VkDebugMarkerMarkerInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkDebugMarkerSetObjectTagEXT)(VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo); +typedef VkResult (VKAPI_PTR *PFN_vkDebugMarkerSetObjectNameEXT)(VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo); +typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerBeginEXT)(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); +typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerEndEXT)(VkCommandBuffer commandBuffer); +typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerInsertEXT)(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkDebugMarkerSetObjectTagEXT( + VkDevice device, + const VkDebugMarkerObjectTagInfoEXT* pTagInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkDebugMarkerSetObjectNameEXT( + VkDevice device, + const VkDebugMarkerObjectNameInfoEXT* pNameInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerBeginEXT( + VkCommandBuffer commandBuffer, + const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerEndEXT( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerInsertEXT( + VkCommandBuffer commandBuffer, + const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); +#endif + + +#define VK_AMD_gcn_shader 1 +#define VK_AMD_GCN_SHADER_SPEC_VERSION 1 +#define VK_AMD_GCN_SHADER_EXTENSION_NAME "VK_AMD_gcn_shader" + + +#define VK_NV_dedicated_allocation 1 +#define VK_NV_DEDICATED_ALLOCATION_SPEC_VERSION 1 +#define VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_NV_dedicated_allocation" +typedef struct VkDedicatedAllocationImageCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 dedicatedAllocation; +} VkDedicatedAllocationImageCreateInfoNV; + +typedef struct VkDedicatedAllocationBufferCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 dedicatedAllocation; +} VkDedicatedAllocationBufferCreateInfoNV; + +typedef struct VkDedicatedAllocationMemoryAllocateInfoNV { + VkStructureType sType; + const void* pNext; + VkImage image; + VkBuffer buffer; +} VkDedicatedAllocationMemoryAllocateInfoNV; + + + +#define VK_EXT_transform_feedback 1 +#define VK_EXT_TRANSFORM_FEEDBACK_SPEC_VERSION 1 +#define VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME "VK_EXT_transform_feedback" +typedef VkFlags VkPipelineRasterizationStateStreamCreateFlagsEXT; +typedef struct VkPhysicalDeviceTransformFeedbackFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 transformFeedback; + VkBool32 geometryStreams; +} VkPhysicalDeviceTransformFeedbackFeaturesEXT; + +typedef struct VkPhysicalDeviceTransformFeedbackPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxTransformFeedbackStreams; + uint32_t maxTransformFeedbackBuffers; + VkDeviceSize maxTransformFeedbackBufferSize; + uint32_t maxTransformFeedbackStreamDataSize; + uint32_t maxTransformFeedbackBufferDataSize; + uint32_t maxTransformFeedbackBufferDataStride; + VkBool32 transformFeedbackQueries; + VkBool32 transformFeedbackStreamsLinesTriangles; + VkBool32 transformFeedbackRasterizationStreamSelect; + VkBool32 transformFeedbackDraw; +} VkPhysicalDeviceTransformFeedbackPropertiesEXT; + +typedef struct VkPipelineRasterizationStateStreamCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipelineRasterizationStateStreamCreateFlagsEXT flags; + uint32_t rasterizationStream; +} VkPipelineRasterizationStateStreamCreateInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdBindTransformFeedbackBuffersEXT)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes); +typedef void (VKAPI_PTR *PFN_vkCmdBeginTransformFeedbackEXT)(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets); +typedef void (VKAPI_PTR *PFN_vkCmdEndTransformFeedbackEXT)(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets); +typedef void (VKAPI_PTR *PFN_vkCmdBeginQueryIndexedEXT)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index); +typedef void (VKAPI_PTR *PFN_vkCmdEndQueryIndexedEXT)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectByteCountEXT)(VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBindTransformFeedbackBuffersEXT( + VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer* pBuffers, + const VkDeviceSize* pOffsets, + const VkDeviceSize* pSizes); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginTransformFeedbackEXT( + VkCommandBuffer commandBuffer, + uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VkBuffer* pCounterBuffers, + const VkDeviceSize* pCounterBufferOffsets); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndTransformFeedbackEXT( + VkCommandBuffer commandBuffer, + uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VkBuffer* pCounterBuffers, + const VkDeviceSize* pCounterBufferOffsets); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginQueryIndexedEXT( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + VkQueryControlFlags flags, + uint32_t index); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndQueryIndexedEXT( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + uint32_t index); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectByteCountEXT( + VkCommandBuffer commandBuffer, + uint32_t instanceCount, + uint32_t firstInstance, + VkBuffer counterBuffer, + VkDeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride); +#endif + + +#define VK_NVX_binary_import 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCuModuleNVX) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCuFunctionNVX) +#define VK_NVX_BINARY_IMPORT_SPEC_VERSION 1 +#define VK_NVX_BINARY_IMPORT_EXTENSION_NAME "VK_NVX_binary_import" +typedef struct VkCuModuleCreateInfoNVX { + VkStructureType sType; + const void* pNext; + size_t dataSize; + const void* pData; +} VkCuModuleCreateInfoNVX; + +typedef struct VkCuFunctionCreateInfoNVX { + VkStructureType sType; + const void* pNext; + VkCuModuleNVX module; + const char* pName; +} VkCuFunctionCreateInfoNVX; + +typedef struct VkCuLaunchInfoNVX { + VkStructureType sType; + const void* pNext; + VkCuFunctionNVX function; + uint32_t gridDimX; + uint32_t gridDimY; + uint32_t gridDimZ; + uint32_t blockDimX; + uint32_t blockDimY; + uint32_t blockDimZ; + uint32_t sharedMemBytes; + size_t paramCount; + const void* const * pParams; + size_t extraCount; + const void* const * pExtras; +} VkCuLaunchInfoNVX; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateCuModuleNVX)(VkDevice device, const VkCuModuleCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCuModuleNVX* pModule); +typedef VkResult (VKAPI_PTR *PFN_vkCreateCuFunctionNVX)(VkDevice device, const VkCuFunctionCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCuFunctionNVX* pFunction); +typedef void (VKAPI_PTR *PFN_vkDestroyCuModuleNVX)(VkDevice device, VkCuModuleNVX module, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkDestroyCuFunctionNVX)(VkDevice device, VkCuFunctionNVX function, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkCmdCuLaunchKernelNVX)(VkCommandBuffer commandBuffer, const VkCuLaunchInfoNVX* pLaunchInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateCuModuleNVX( + VkDevice device, + const VkCuModuleCreateInfoNVX* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkCuModuleNVX* pModule); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateCuFunctionNVX( + VkDevice device, + const VkCuFunctionCreateInfoNVX* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkCuFunctionNVX* pFunction); + +VKAPI_ATTR void VKAPI_CALL vkDestroyCuModuleNVX( + VkDevice device, + VkCuModuleNVX module, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkDestroyCuFunctionNVX( + VkDevice device, + VkCuFunctionNVX function, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkCmdCuLaunchKernelNVX( + VkCommandBuffer commandBuffer, + const VkCuLaunchInfoNVX* pLaunchInfo); +#endif + + +#define VK_NVX_image_view_handle 1 +#define VK_NVX_IMAGE_VIEW_HANDLE_SPEC_VERSION 2 +#define VK_NVX_IMAGE_VIEW_HANDLE_EXTENSION_NAME "VK_NVX_image_view_handle" +typedef struct VkImageViewHandleInfoNVX { + VkStructureType sType; + const void* pNext; + VkImageView imageView; + VkDescriptorType descriptorType; + VkSampler sampler; +} VkImageViewHandleInfoNVX; + +typedef struct VkImageViewAddressPropertiesNVX { + VkStructureType sType; + void* pNext; + VkDeviceAddress deviceAddress; + VkDeviceSize size; +} VkImageViewAddressPropertiesNVX; + +typedef uint32_t (VKAPI_PTR *PFN_vkGetImageViewHandleNVX)(VkDevice device, const VkImageViewHandleInfoNVX* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetImageViewAddressNVX)(VkDevice device, VkImageView imageView, VkImageViewAddressPropertiesNVX* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR uint32_t VKAPI_CALL vkGetImageViewHandleNVX( + VkDevice device, + const VkImageViewHandleInfoNVX* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetImageViewAddressNVX( + VkDevice device, + VkImageView imageView, + VkImageViewAddressPropertiesNVX* pProperties); +#endif + + +#define VK_AMD_draw_indirect_count 1 +#define VK_AMD_DRAW_INDIRECT_COUNT_SPEC_VERSION 2 +#define VK_AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME "VK_AMD_draw_indirect_count" +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCountAMD( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCountAMD( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); +#endif + + +#define VK_AMD_negative_viewport_height 1 +#define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_SPEC_VERSION 1 +#define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME "VK_AMD_negative_viewport_height" + + +#define VK_AMD_gpu_shader_half_float 1 +#define VK_AMD_GPU_SHADER_HALF_FLOAT_SPEC_VERSION 2 +#define VK_AMD_GPU_SHADER_HALF_FLOAT_EXTENSION_NAME "VK_AMD_gpu_shader_half_float" + + +#define VK_AMD_shader_ballot 1 +#define VK_AMD_SHADER_BALLOT_SPEC_VERSION 1 +#define VK_AMD_SHADER_BALLOT_EXTENSION_NAME "VK_AMD_shader_ballot" + + +#define VK_AMD_texture_gather_bias_lod 1 +#define VK_AMD_TEXTURE_GATHER_BIAS_LOD_SPEC_VERSION 1 +#define VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME "VK_AMD_texture_gather_bias_lod" +typedef struct VkTextureLODGatherFormatPropertiesAMD { + VkStructureType sType; + void* pNext; + VkBool32 supportsTextureGatherLODBiasAMD; +} VkTextureLODGatherFormatPropertiesAMD; + + + +#define VK_AMD_shader_info 1 +#define VK_AMD_SHADER_INFO_SPEC_VERSION 1 +#define VK_AMD_SHADER_INFO_EXTENSION_NAME "VK_AMD_shader_info" + +typedef enum VkShaderInfoTypeAMD { + VK_SHADER_INFO_TYPE_STATISTICS_AMD = 0, + VK_SHADER_INFO_TYPE_BINARY_AMD = 1, + VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD = 2, + VK_SHADER_INFO_TYPE_MAX_ENUM_AMD = 0x7FFFFFFF +} VkShaderInfoTypeAMD; +typedef struct VkShaderResourceUsageAMD { + uint32_t numUsedVgprs; + uint32_t numUsedSgprs; + uint32_t ldsSizePerLocalWorkGroup; + size_t ldsUsageSizeInBytes; + size_t scratchMemUsageInBytes; +} VkShaderResourceUsageAMD; + +typedef struct VkShaderStatisticsInfoAMD { + VkShaderStageFlags shaderStageMask; + VkShaderResourceUsageAMD resourceUsage; + uint32_t numPhysicalVgprs; + uint32_t numPhysicalSgprs; + uint32_t numAvailableVgprs; + uint32_t numAvailableSgprs; + uint32_t computeWorkGroupSize[3]; +} VkShaderStatisticsInfoAMD; + +typedef VkResult (VKAPI_PTR *PFN_vkGetShaderInfoAMD)(VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetShaderInfoAMD( + VkDevice device, + VkPipeline pipeline, + VkShaderStageFlagBits shaderStage, + VkShaderInfoTypeAMD infoType, + size_t* pInfoSize, + void* pInfo); +#endif + + +#define VK_AMD_shader_image_load_store_lod 1 +#define VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_SPEC_VERSION 1 +#define VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_EXTENSION_NAME "VK_AMD_shader_image_load_store_lod" + + +#define VK_NV_corner_sampled_image 1 +#define VK_NV_CORNER_SAMPLED_IMAGE_SPEC_VERSION 2 +#define VK_NV_CORNER_SAMPLED_IMAGE_EXTENSION_NAME "VK_NV_corner_sampled_image" +typedef struct VkPhysicalDeviceCornerSampledImageFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 cornerSampledImage; +} VkPhysicalDeviceCornerSampledImageFeaturesNV; + + + +#define VK_IMG_format_pvrtc 1 +#define VK_IMG_FORMAT_PVRTC_SPEC_VERSION 1 +#define VK_IMG_FORMAT_PVRTC_EXTENSION_NAME "VK_IMG_format_pvrtc" + + +#define VK_NV_external_memory_capabilities 1 +#define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1 +#define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_NV_external_memory_capabilities" + +typedef enum VkExternalMemoryHandleTypeFlagBitsNV { + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV = 0x00000001, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV = 0x00000002, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV = 0x00000004, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV = 0x00000008, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkExternalMemoryHandleTypeFlagBitsNV; +typedef VkFlags VkExternalMemoryHandleTypeFlagsNV; + +typedef enum VkExternalMemoryFeatureFlagBitsNV { + VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV = 0x00000001, + VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV = 0x00000002, + VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV = 0x00000004, + VK_EXTERNAL_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkExternalMemoryFeatureFlagBitsNV; +typedef VkFlags VkExternalMemoryFeatureFlagsNV; +typedef struct VkExternalImageFormatPropertiesNV { + VkImageFormatProperties imageFormatProperties; + VkExternalMemoryFeatureFlagsNV externalMemoryFeatures; + VkExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes; + VkExternalMemoryHandleTypeFlagsNV compatibleHandleTypes; +} VkExternalImageFormatPropertiesNV; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkExternalMemoryHandleTypeFlagsNV externalHandleType, + VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties); +#endif + + +#define VK_NV_external_memory 1 +#define VK_NV_EXTERNAL_MEMORY_SPEC_VERSION 1 +#define VK_NV_EXTERNAL_MEMORY_EXTENSION_NAME "VK_NV_external_memory" +typedef struct VkExternalMemoryImageCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagsNV handleTypes; +} VkExternalMemoryImageCreateInfoNV; + +typedef struct VkExportMemoryAllocateInfoNV { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagsNV handleTypes; +} VkExportMemoryAllocateInfoNV; + + + +#define VK_EXT_validation_flags 1 +#define VK_EXT_VALIDATION_FLAGS_SPEC_VERSION 2 +#define VK_EXT_VALIDATION_FLAGS_EXTENSION_NAME "VK_EXT_validation_flags" + +typedef enum VkValidationCheckEXT { + VK_VALIDATION_CHECK_ALL_EXT = 0, + VK_VALIDATION_CHECK_SHADERS_EXT = 1, + VK_VALIDATION_CHECK_MAX_ENUM_EXT = 0x7FFFFFFF +} VkValidationCheckEXT; +typedef struct VkValidationFlagsEXT { + VkStructureType sType; + const void* pNext; + uint32_t disabledValidationCheckCount; + const VkValidationCheckEXT* pDisabledValidationChecks; +} VkValidationFlagsEXT; + + + +#define VK_EXT_shader_subgroup_ballot 1 +#define VK_EXT_SHADER_SUBGROUP_BALLOT_SPEC_VERSION 1 +#define VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME "VK_EXT_shader_subgroup_ballot" + + +#define VK_EXT_shader_subgroup_vote 1 +#define VK_EXT_SHADER_SUBGROUP_VOTE_SPEC_VERSION 1 +#define VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME "VK_EXT_shader_subgroup_vote" + + +#define VK_EXT_texture_compression_astc_hdr 1 +#define VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_SPEC_VERSION 1 +#define VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_EXTENSION_NAME "VK_EXT_texture_compression_astc_hdr" +typedef struct VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 textureCompressionASTC_HDR; +} VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT; + + + +#define VK_EXT_astc_decode_mode 1 +#define VK_EXT_ASTC_DECODE_MODE_SPEC_VERSION 1 +#define VK_EXT_ASTC_DECODE_MODE_EXTENSION_NAME "VK_EXT_astc_decode_mode" +typedef struct VkImageViewASTCDecodeModeEXT { + VkStructureType sType; + const void* pNext; + VkFormat decodeMode; +} VkImageViewASTCDecodeModeEXT; + +typedef struct VkPhysicalDeviceASTCDecodeFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 decodeModeSharedExponent; +} VkPhysicalDeviceASTCDecodeFeaturesEXT; + + + +#define VK_EXT_conditional_rendering 1 +#define VK_EXT_CONDITIONAL_RENDERING_SPEC_VERSION 2 +#define VK_EXT_CONDITIONAL_RENDERING_EXTENSION_NAME "VK_EXT_conditional_rendering" + +typedef enum VkConditionalRenderingFlagBitsEXT { + VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT = 0x00000001, + VK_CONDITIONAL_RENDERING_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkConditionalRenderingFlagBitsEXT; +typedef VkFlags VkConditionalRenderingFlagsEXT; +typedef struct VkConditionalRenderingBeginInfoEXT { + VkStructureType sType; + const void* pNext; + VkBuffer buffer; + VkDeviceSize offset; + VkConditionalRenderingFlagsEXT flags; +} VkConditionalRenderingBeginInfoEXT; + +typedef struct VkPhysicalDeviceConditionalRenderingFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 conditionalRendering; + VkBool32 inheritedConditionalRendering; +} VkPhysicalDeviceConditionalRenderingFeaturesEXT; + +typedef struct VkCommandBufferInheritanceConditionalRenderingInfoEXT { + VkStructureType sType; + const void* pNext; + VkBool32 conditionalRenderingEnable; +} VkCommandBufferInheritanceConditionalRenderingInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdBeginConditionalRenderingEXT)(VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin); +typedef void (VKAPI_PTR *PFN_vkCmdEndConditionalRenderingEXT)(VkCommandBuffer commandBuffer); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBeginConditionalRenderingEXT( + VkCommandBuffer commandBuffer, + const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndConditionalRenderingEXT( + VkCommandBuffer commandBuffer); +#endif + + +#define VK_NV_clip_space_w_scaling 1 +#define VK_NV_CLIP_SPACE_W_SCALING_SPEC_VERSION 1 +#define VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME "VK_NV_clip_space_w_scaling" +typedef struct VkViewportWScalingNV { + float xcoeff; + float ycoeff; +} VkViewportWScalingNV; + +typedef struct VkPipelineViewportWScalingStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 viewportWScalingEnable; + uint32_t viewportCount; + const VkViewportWScalingNV* pViewportWScalings; +} VkPipelineViewportWScalingStateCreateInfoNV; + +typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWScalingNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWScalingNV( + VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewportWScalingNV* pViewportWScalings); +#endif + + +#define VK_EXT_direct_mode_display 1 +#define VK_EXT_DIRECT_MODE_DISPLAY_SPEC_VERSION 1 +#define VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME "VK_EXT_direct_mode_display" +typedef VkResult (VKAPI_PTR *PFN_vkReleaseDisplayEXT)(VkPhysicalDevice physicalDevice, VkDisplayKHR display); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkReleaseDisplayEXT( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display); +#endif + + +#define VK_EXT_display_surface_counter 1 +#define VK_EXT_DISPLAY_SURFACE_COUNTER_SPEC_VERSION 1 +#define VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME "VK_EXT_display_surface_counter" + +typedef enum VkSurfaceCounterFlagBitsEXT { + VK_SURFACE_COUNTER_VBLANK_BIT_EXT = 0x00000001, + VK_SURFACE_COUNTER_VBLANK_EXT = VK_SURFACE_COUNTER_VBLANK_BIT_EXT, + VK_SURFACE_COUNTER_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkSurfaceCounterFlagBitsEXT; +typedef VkFlags VkSurfaceCounterFlagsEXT; +typedef struct VkSurfaceCapabilities2EXT { + VkStructureType sType; + void* pNext; + uint32_t minImageCount; + uint32_t maxImageCount; + VkExtent2D currentExtent; + VkExtent2D minImageExtent; + VkExtent2D maxImageExtent; + uint32_t maxImageArrayLayers; + VkSurfaceTransformFlagsKHR supportedTransforms; + VkSurfaceTransformFlagBitsKHR currentTransform; + VkCompositeAlphaFlagsKHR supportedCompositeAlpha; + VkImageUsageFlags supportedUsageFlags; + VkSurfaceCounterFlagsEXT supportedSurfaceCounters; +} VkSurfaceCapabilities2EXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2EXT( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilities2EXT* pSurfaceCapabilities); +#endif + + +#define VK_EXT_display_control 1 +#define VK_EXT_DISPLAY_CONTROL_SPEC_VERSION 1 +#define VK_EXT_DISPLAY_CONTROL_EXTENSION_NAME "VK_EXT_display_control" + +typedef enum VkDisplayPowerStateEXT { + VK_DISPLAY_POWER_STATE_OFF_EXT = 0, + VK_DISPLAY_POWER_STATE_SUSPEND_EXT = 1, + VK_DISPLAY_POWER_STATE_ON_EXT = 2, + VK_DISPLAY_POWER_STATE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDisplayPowerStateEXT; + +typedef enum VkDeviceEventTypeEXT { + VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT = 0, + VK_DEVICE_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDeviceEventTypeEXT; + +typedef enum VkDisplayEventTypeEXT { + VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT = 0, + VK_DISPLAY_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDisplayEventTypeEXT; +typedef struct VkDisplayPowerInfoEXT { + VkStructureType sType; + const void* pNext; + VkDisplayPowerStateEXT powerState; +} VkDisplayPowerInfoEXT; + +typedef struct VkDeviceEventInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceEventTypeEXT deviceEvent; +} VkDeviceEventInfoEXT; + +typedef struct VkDisplayEventInfoEXT { + VkStructureType sType; + const void* pNext; + VkDisplayEventTypeEXT displayEvent; +} VkDisplayEventInfoEXT; + +typedef struct VkSwapchainCounterCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkSurfaceCounterFlagsEXT surfaceCounters; +} VkSwapchainCounterCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkDisplayPowerControlEXT)(VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo); +typedef VkResult (VKAPI_PTR *PFN_vkRegisterDeviceEventEXT)(VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence); +typedef VkResult (VKAPI_PTR *PFN_vkRegisterDisplayEventEXT)(VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence); +typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainCounterEXT)(VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkDisplayPowerControlEXT( + VkDevice device, + VkDisplayKHR display, + const VkDisplayPowerInfoEXT* pDisplayPowerInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkRegisterDeviceEventEXT( + VkDevice device, + const VkDeviceEventInfoEXT* pDeviceEventInfo, + const VkAllocationCallbacks* pAllocator, + VkFence* pFence); + +VKAPI_ATTR VkResult VKAPI_CALL vkRegisterDisplayEventEXT( + VkDevice device, + VkDisplayKHR display, + const VkDisplayEventInfoEXT* pDisplayEventInfo, + const VkAllocationCallbacks* pAllocator, + VkFence* pFence); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainCounterEXT( + VkDevice device, + VkSwapchainKHR swapchain, + VkSurfaceCounterFlagBitsEXT counter, + uint64_t* pCounterValue); +#endif + + +#define VK_GOOGLE_display_timing 1 +#define VK_GOOGLE_DISPLAY_TIMING_SPEC_VERSION 1 +#define VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME "VK_GOOGLE_display_timing" +typedef struct VkRefreshCycleDurationGOOGLE { + uint64_t refreshDuration; +} VkRefreshCycleDurationGOOGLE; + +typedef struct VkPastPresentationTimingGOOGLE { + uint32_t presentID; + uint64_t desiredPresentTime; + uint64_t actualPresentTime; + uint64_t earliestPresentTime; + uint64_t presentMargin; +} VkPastPresentationTimingGOOGLE; + +typedef struct VkPresentTimeGOOGLE { + uint32_t presentID; + uint64_t desiredPresentTime; +} VkPresentTimeGOOGLE; + +typedef struct VkPresentTimesInfoGOOGLE { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const VkPresentTimeGOOGLE* pTimes; +} VkPresentTimesInfoGOOGLE; + +typedef VkResult (VKAPI_PTR *PFN_vkGetRefreshCycleDurationGOOGLE)(VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPastPresentationTimingGOOGLE)(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetRefreshCycleDurationGOOGLE( + VkDevice device, + VkSwapchainKHR swapchain, + VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPastPresentationTimingGOOGLE( + VkDevice device, + VkSwapchainKHR swapchain, + uint32_t* pPresentationTimingCount, + VkPastPresentationTimingGOOGLE* pPresentationTimings); +#endif + + +#define VK_NV_sample_mask_override_coverage 1 +#define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_SPEC_VERSION 1 +#define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME "VK_NV_sample_mask_override_coverage" + + +#define VK_NV_geometry_shader_passthrough 1 +#define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_SPEC_VERSION 1 +#define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME "VK_NV_geometry_shader_passthrough" + + +#define VK_NV_viewport_array2 1 +#define VK_NV_VIEWPORT_ARRAY2_SPEC_VERSION 1 +#define VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME "VK_NV_viewport_array2" + + +#define VK_NVX_multiview_per_view_attributes 1 +#define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION 1 +#define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME "VK_NVX_multiview_per_view_attributes" +typedef struct VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX { + VkStructureType sType; + void* pNext; + VkBool32 perViewPositionAllComponents; +} VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + + + +#define VK_NV_viewport_swizzle 1 +#define VK_NV_VIEWPORT_SWIZZLE_SPEC_VERSION 1 +#define VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME "VK_NV_viewport_swizzle" + +typedef enum VkViewportCoordinateSwizzleNV { + VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV = 0, + VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV = 1, + VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV = 2, + VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV = 3, + VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV = 4, + VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV = 5, + VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV = 6, + VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV = 7, + VK_VIEWPORT_COORDINATE_SWIZZLE_MAX_ENUM_NV = 0x7FFFFFFF +} VkViewportCoordinateSwizzleNV; +typedef VkFlags VkPipelineViewportSwizzleStateCreateFlagsNV; +typedef struct VkViewportSwizzleNV { + VkViewportCoordinateSwizzleNV x; + VkViewportCoordinateSwizzleNV y; + VkViewportCoordinateSwizzleNV z; + VkViewportCoordinateSwizzleNV w; +} VkViewportSwizzleNV; + +typedef struct VkPipelineViewportSwizzleStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineViewportSwizzleStateCreateFlagsNV flags; + uint32_t viewportCount; + const VkViewportSwizzleNV* pViewportSwizzles; +} VkPipelineViewportSwizzleStateCreateInfoNV; + + + +#define VK_EXT_discard_rectangles 1 +#define VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION 1 +#define VK_EXT_DISCARD_RECTANGLES_EXTENSION_NAME "VK_EXT_discard_rectangles" + +typedef enum VkDiscardRectangleModeEXT { + VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT = 0, + VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT = 1, + VK_DISCARD_RECTANGLE_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDiscardRectangleModeEXT; +typedef VkFlags VkPipelineDiscardRectangleStateCreateFlagsEXT; +typedef struct VkPhysicalDeviceDiscardRectanglePropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxDiscardRectangles; +} VkPhysicalDeviceDiscardRectanglePropertiesEXT; + +typedef struct VkPipelineDiscardRectangleStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipelineDiscardRectangleStateCreateFlagsEXT flags; + VkDiscardRectangleModeEXT discardRectangleMode; + uint32_t discardRectangleCount; + const VkRect2D* pDiscardRectangles; +} VkPipelineDiscardRectangleStateCreateInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetDiscardRectangleEXT)(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetDiscardRectangleEXT( + VkCommandBuffer commandBuffer, + uint32_t firstDiscardRectangle, + uint32_t discardRectangleCount, + const VkRect2D* pDiscardRectangles); +#endif + + +#define VK_EXT_conservative_rasterization 1 +#define VK_EXT_CONSERVATIVE_RASTERIZATION_SPEC_VERSION 1 +#define VK_EXT_CONSERVATIVE_RASTERIZATION_EXTENSION_NAME "VK_EXT_conservative_rasterization" + +typedef enum VkConservativeRasterizationModeEXT { + VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT = 0, + VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT = 1, + VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT = 2, + VK_CONSERVATIVE_RASTERIZATION_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkConservativeRasterizationModeEXT; +typedef VkFlags VkPipelineRasterizationConservativeStateCreateFlagsEXT; +typedef struct VkPhysicalDeviceConservativeRasterizationPropertiesEXT { + VkStructureType sType; + void* pNext; + float primitiveOverestimationSize; + float maxExtraPrimitiveOverestimationSize; + float extraPrimitiveOverestimationSizeGranularity; + VkBool32 primitiveUnderestimation; + VkBool32 conservativePointAndLineRasterization; + VkBool32 degenerateTrianglesRasterized; + VkBool32 degenerateLinesRasterized; + VkBool32 fullyCoveredFragmentShaderInputVariable; + VkBool32 conservativeRasterizationPostDepthCoverage; +} VkPhysicalDeviceConservativeRasterizationPropertiesEXT; + +typedef struct VkPipelineRasterizationConservativeStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipelineRasterizationConservativeStateCreateFlagsEXT flags; + VkConservativeRasterizationModeEXT conservativeRasterizationMode; + float extraPrimitiveOverestimationSize; +} VkPipelineRasterizationConservativeStateCreateInfoEXT; + + + +#define VK_EXT_depth_clip_enable 1 +#define VK_EXT_DEPTH_CLIP_ENABLE_SPEC_VERSION 1 +#define VK_EXT_DEPTH_CLIP_ENABLE_EXTENSION_NAME "VK_EXT_depth_clip_enable" +typedef VkFlags VkPipelineRasterizationDepthClipStateCreateFlagsEXT; +typedef struct VkPhysicalDeviceDepthClipEnableFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 depthClipEnable; +} VkPhysicalDeviceDepthClipEnableFeaturesEXT; + +typedef struct VkPipelineRasterizationDepthClipStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipelineRasterizationDepthClipStateCreateFlagsEXT flags; + VkBool32 depthClipEnable; +} VkPipelineRasterizationDepthClipStateCreateInfoEXT; + + + +#define VK_EXT_swapchain_colorspace 1 +#define VK_EXT_SWAPCHAIN_COLOR_SPACE_SPEC_VERSION 4 +#define VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME "VK_EXT_swapchain_colorspace" + + +#define VK_EXT_hdr_metadata 1 +#define VK_EXT_HDR_METADATA_SPEC_VERSION 2 +#define VK_EXT_HDR_METADATA_EXTENSION_NAME "VK_EXT_hdr_metadata" +typedef struct VkXYColorEXT { + float x; + float y; +} VkXYColorEXT; + +typedef struct VkHdrMetadataEXT { + VkStructureType sType; + const void* pNext; + VkXYColorEXT displayPrimaryRed; + VkXYColorEXT displayPrimaryGreen; + VkXYColorEXT displayPrimaryBlue; + VkXYColorEXT whitePoint; + float maxLuminance; + float minLuminance; + float maxContentLightLevel; + float maxFrameAverageLightLevel; +} VkHdrMetadataEXT; + +typedef void (VKAPI_PTR *PFN_vkSetHdrMetadataEXT)(VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkSetHdrMetadataEXT( + VkDevice device, + uint32_t swapchainCount, + const VkSwapchainKHR* pSwapchains, + const VkHdrMetadataEXT* pMetadata); +#endif + + +#define VK_EXT_external_memory_dma_buf 1 +#define VK_EXT_EXTERNAL_MEMORY_DMA_BUF_SPEC_VERSION 1 +#define VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME "VK_EXT_external_memory_dma_buf" + + +#define VK_EXT_queue_family_foreign 1 +#define VK_EXT_QUEUE_FAMILY_FOREIGN_SPEC_VERSION 1 +#define VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME "VK_EXT_queue_family_foreign" +#define VK_QUEUE_FAMILY_FOREIGN_EXT (~2U) + + +#define VK_EXT_debug_utils 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugUtilsMessengerEXT) +#define VK_EXT_DEBUG_UTILS_SPEC_VERSION 2 +#define VK_EXT_DEBUG_UTILS_EXTENSION_NAME "VK_EXT_debug_utils" +typedef VkFlags VkDebugUtilsMessengerCallbackDataFlagsEXT; + +typedef enum VkDebugUtilsMessageSeverityFlagBitsEXT { + VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT = 0x00000001, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT = 0x00000010, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT = 0x00000100, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT = 0x00001000, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDebugUtilsMessageSeverityFlagBitsEXT; + +typedef enum VkDebugUtilsMessageTypeFlagBitsEXT { + VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT = 0x00000001, + VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT = 0x00000002, + VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT = 0x00000004, + VK_DEBUG_UTILS_MESSAGE_TYPE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDebugUtilsMessageTypeFlagBitsEXT; +typedef VkFlags VkDebugUtilsMessageTypeFlagsEXT; +typedef VkFlags VkDebugUtilsMessageSeverityFlagsEXT; +typedef VkFlags VkDebugUtilsMessengerCreateFlagsEXT; +typedef struct VkDebugUtilsLabelEXT { + VkStructureType sType; + const void* pNext; + const char* pLabelName; + float color[4]; +} VkDebugUtilsLabelEXT; + +typedef struct VkDebugUtilsObjectNameInfoEXT { + VkStructureType sType; + const void* pNext; + VkObjectType objectType; + uint64_t objectHandle; + const char* pObjectName; +} VkDebugUtilsObjectNameInfoEXT; + +typedef struct VkDebugUtilsMessengerCallbackDataEXT { + VkStructureType sType; + const void* pNext; + VkDebugUtilsMessengerCallbackDataFlagsEXT flags; + const char* pMessageIdName; + int32_t messageIdNumber; + const char* pMessage; + uint32_t queueLabelCount; + const VkDebugUtilsLabelEXT* pQueueLabels; + uint32_t cmdBufLabelCount; + const VkDebugUtilsLabelEXT* pCmdBufLabels; + uint32_t objectCount; + const VkDebugUtilsObjectNameInfoEXT* pObjects; +} VkDebugUtilsMessengerCallbackDataEXT; + +typedef VkBool32 (VKAPI_PTR *PFN_vkDebugUtilsMessengerCallbackEXT)( + VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VkDebugUtilsMessageTypeFlagsEXT messageTypes, + const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData, + void* pUserData); + +typedef struct VkDebugUtilsMessengerCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugUtilsMessengerCreateFlagsEXT flags; + VkDebugUtilsMessageSeverityFlagsEXT messageSeverity; + VkDebugUtilsMessageTypeFlagsEXT messageType; + PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback; + void* pUserData; +} VkDebugUtilsMessengerCreateInfoEXT; + +typedef struct VkDebugUtilsObjectTagInfoEXT { + VkStructureType sType; + const void* pNext; + VkObjectType objectType; + uint64_t objectHandle; + uint64_t tagName; + size_t tagSize; + const void* pTag; +} VkDebugUtilsObjectTagInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkSetDebugUtilsObjectNameEXT)(VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo); +typedef VkResult (VKAPI_PTR *PFN_vkSetDebugUtilsObjectTagEXT)(VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo); +typedef void (VKAPI_PTR *PFN_vkQueueBeginDebugUtilsLabelEXT)(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo); +typedef void (VKAPI_PTR *PFN_vkQueueEndDebugUtilsLabelEXT)(VkQueue queue); +typedef void (VKAPI_PTR *PFN_vkQueueInsertDebugUtilsLabelEXT)(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBeginDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer); +typedef void (VKAPI_PTR *PFN_vkCmdInsertDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDebugUtilsMessengerEXT)(VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger); +typedef void (VKAPI_PTR *PFN_vkDestroyDebugUtilsMessengerEXT)(VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkSubmitDebugUtilsMessageEXT)(VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectNameEXT( + VkDevice device, + const VkDebugUtilsObjectNameInfoEXT* pNameInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectTagEXT( + VkDevice device, + const VkDebugUtilsObjectTagInfoEXT* pTagInfo); + +VKAPI_ATTR void VKAPI_CALL vkQueueBeginDebugUtilsLabelEXT( + VkQueue queue, + const VkDebugUtilsLabelEXT* pLabelInfo); + +VKAPI_ATTR void VKAPI_CALL vkQueueEndDebugUtilsLabelEXT( + VkQueue queue); + +VKAPI_ATTR void VKAPI_CALL vkQueueInsertDebugUtilsLabelEXT( + VkQueue queue, + const VkDebugUtilsLabelEXT* pLabelInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginDebugUtilsLabelEXT( + VkCommandBuffer commandBuffer, + const VkDebugUtilsLabelEXT* pLabelInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndDebugUtilsLabelEXT( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR void VKAPI_CALL vkCmdInsertDebugUtilsLabelEXT( + VkCommandBuffer commandBuffer, + const VkDebugUtilsLabelEXT* pLabelInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugUtilsMessengerEXT( + VkInstance instance, + const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDebugUtilsMessengerEXT* pMessenger); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDebugUtilsMessengerEXT( + VkInstance instance, + VkDebugUtilsMessengerEXT messenger, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkSubmitDebugUtilsMessageEXT( + VkInstance instance, + VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VkDebugUtilsMessageTypeFlagsEXT messageTypes, + const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData); +#endif + + +#define VK_EXT_sampler_filter_minmax 1 +#define VK_EXT_SAMPLER_FILTER_MINMAX_SPEC_VERSION 2 +#define VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME "VK_EXT_sampler_filter_minmax" +typedef VkSamplerReductionMode VkSamplerReductionModeEXT; + +typedef VkSamplerReductionModeCreateInfo VkSamplerReductionModeCreateInfoEXT; + +typedef VkPhysicalDeviceSamplerFilterMinmaxProperties VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT; + + + +#define VK_AMD_gpu_shader_int16 1 +#define VK_AMD_GPU_SHADER_INT16_SPEC_VERSION 2 +#define VK_AMD_GPU_SHADER_INT16_EXTENSION_NAME "VK_AMD_gpu_shader_int16" + + +#define VK_AMD_mixed_attachment_samples 1 +#define VK_AMD_MIXED_ATTACHMENT_SAMPLES_SPEC_VERSION 1 +#define VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME "VK_AMD_mixed_attachment_samples" + + +#define VK_AMD_shader_fragment_mask 1 +#define VK_AMD_SHADER_FRAGMENT_MASK_SPEC_VERSION 1 +#define VK_AMD_SHADER_FRAGMENT_MASK_EXTENSION_NAME "VK_AMD_shader_fragment_mask" + + +#define VK_EXT_inline_uniform_block 1 +#define VK_EXT_INLINE_UNIFORM_BLOCK_SPEC_VERSION 1 +#define VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME "VK_EXT_inline_uniform_block" +typedef struct VkPhysicalDeviceInlineUniformBlockFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 inlineUniformBlock; + VkBool32 descriptorBindingInlineUniformBlockUpdateAfterBind; +} VkPhysicalDeviceInlineUniformBlockFeaturesEXT; + +typedef struct VkPhysicalDeviceInlineUniformBlockPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxInlineUniformBlockSize; + uint32_t maxPerStageDescriptorInlineUniformBlocks; + uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks; + uint32_t maxDescriptorSetInlineUniformBlocks; + uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks; +} VkPhysicalDeviceInlineUniformBlockPropertiesEXT; + +typedef struct VkWriteDescriptorSetInlineUniformBlockEXT { + VkStructureType sType; + const void* pNext; + uint32_t dataSize; + const void* pData; +} VkWriteDescriptorSetInlineUniformBlockEXT; + +typedef struct VkDescriptorPoolInlineUniformBlockCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t maxInlineUniformBlockBindings; +} VkDescriptorPoolInlineUniformBlockCreateInfoEXT; + + + +#define VK_EXT_shader_stencil_export 1 +#define VK_EXT_SHADER_STENCIL_EXPORT_SPEC_VERSION 1 +#define VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME "VK_EXT_shader_stencil_export" + + +#define VK_EXT_sample_locations 1 +#define VK_EXT_SAMPLE_LOCATIONS_SPEC_VERSION 1 +#define VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME "VK_EXT_sample_locations" +typedef struct VkSampleLocationEXT { + float x; + float y; +} VkSampleLocationEXT; + +typedef struct VkSampleLocationsInfoEXT { + VkStructureType sType; + const void* pNext; + VkSampleCountFlagBits sampleLocationsPerPixel; + VkExtent2D sampleLocationGridSize; + uint32_t sampleLocationsCount; + const VkSampleLocationEXT* pSampleLocations; +} VkSampleLocationsInfoEXT; + +typedef struct VkAttachmentSampleLocationsEXT { + uint32_t attachmentIndex; + VkSampleLocationsInfoEXT sampleLocationsInfo; +} VkAttachmentSampleLocationsEXT; + +typedef struct VkSubpassSampleLocationsEXT { + uint32_t subpassIndex; + VkSampleLocationsInfoEXT sampleLocationsInfo; +} VkSubpassSampleLocationsEXT; + +typedef struct VkRenderPassSampleLocationsBeginInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t attachmentInitialSampleLocationsCount; + const VkAttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations; + uint32_t postSubpassSampleLocationsCount; + const VkSubpassSampleLocationsEXT* pPostSubpassSampleLocations; +} VkRenderPassSampleLocationsBeginInfoEXT; + +typedef struct VkPipelineSampleLocationsStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkBool32 sampleLocationsEnable; + VkSampleLocationsInfoEXT sampleLocationsInfo; +} VkPipelineSampleLocationsStateCreateInfoEXT; + +typedef struct VkPhysicalDeviceSampleLocationsPropertiesEXT { + VkStructureType sType; + void* pNext; + VkSampleCountFlags sampleLocationSampleCounts; + VkExtent2D maxSampleLocationGridSize; + float sampleLocationCoordinateRange[2]; + uint32_t sampleLocationSubPixelBits; + VkBool32 variableSampleLocations; +} VkPhysicalDeviceSampleLocationsPropertiesEXT; + +typedef struct VkMultisamplePropertiesEXT { + VkStructureType sType; + void* pNext; + VkExtent2D maxSampleLocationGridSize; +} VkMultisamplePropertiesEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetSampleLocationsEXT)(VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT)(VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetSampleLocationsEXT( + VkCommandBuffer commandBuffer, + const VkSampleLocationsInfoEXT* pSampleLocationsInfo); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMultisamplePropertiesEXT( + VkPhysicalDevice physicalDevice, + VkSampleCountFlagBits samples, + VkMultisamplePropertiesEXT* pMultisampleProperties); +#endif + + +#define VK_EXT_blend_operation_advanced 1 +#define VK_EXT_BLEND_OPERATION_ADVANCED_SPEC_VERSION 2 +#define VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME "VK_EXT_blend_operation_advanced" + +typedef enum VkBlendOverlapEXT { + VK_BLEND_OVERLAP_UNCORRELATED_EXT = 0, + VK_BLEND_OVERLAP_DISJOINT_EXT = 1, + VK_BLEND_OVERLAP_CONJOINT_EXT = 2, + VK_BLEND_OVERLAP_MAX_ENUM_EXT = 0x7FFFFFFF +} VkBlendOverlapEXT; +typedef struct VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 advancedBlendCoherentOperations; +} VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT; + +typedef struct VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t advancedBlendMaxColorAttachments; + VkBool32 advancedBlendIndependentBlend; + VkBool32 advancedBlendNonPremultipliedSrcColor; + VkBool32 advancedBlendNonPremultipliedDstColor; + VkBool32 advancedBlendCorrelatedOverlap; + VkBool32 advancedBlendAllOperations; +} VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT; + +typedef struct VkPipelineColorBlendAdvancedStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkBool32 srcPremultiplied; + VkBool32 dstPremultiplied; + VkBlendOverlapEXT blendOverlap; +} VkPipelineColorBlendAdvancedStateCreateInfoEXT; + + + +#define VK_NV_fragment_coverage_to_color 1 +#define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_SPEC_VERSION 1 +#define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME "VK_NV_fragment_coverage_to_color" +typedef VkFlags VkPipelineCoverageToColorStateCreateFlagsNV; +typedef struct VkPipelineCoverageToColorStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineCoverageToColorStateCreateFlagsNV flags; + VkBool32 coverageToColorEnable; + uint32_t coverageToColorLocation; +} VkPipelineCoverageToColorStateCreateInfoNV; + + + +#define VK_NV_framebuffer_mixed_samples 1 +#define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION 1 +#define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME "VK_NV_framebuffer_mixed_samples" + +typedef enum VkCoverageModulationModeNV { + VK_COVERAGE_MODULATION_MODE_NONE_NV = 0, + VK_COVERAGE_MODULATION_MODE_RGB_NV = 1, + VK_COVERAGE_MODULATION_MODE_ALPHA_NV = 2, + VK_COVERAGE_MODULATION_MODE_RGBA_NV = 3, + VK_COVERAGE_MODULATION_MODE_MAX_ENUM_NV = 0x7FFFFFFF +} VkCoverageModulationModeNV; +typedef VkFlags VkPipelineCoverageModulationStateCreateFlagsNV; +typedef struct VkPipelineCoverageModulationStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineCoverageModulationStateCreateFlagsNV flags; + VkCoverageModulationModeNV coverageModulationMode; + VkBool32 coverageModulationTableEnable; + uint32_t coverageModulationTableCount; + const float* pCoverageModulationTable; +} VkPipelineCoverageModulationStateCreateInfoNV; + + + +#define VK_NV_fill_rectangle 1 +#define VK_NV_FILL_RECTANGLE_SPEC_VERSION 1 +#define VK_NV_FILL_RECTANGLE_EXTENSION_NAME "VK_NV_fill_rectangle" + + +#define VK_NV_shader_sm_builtins 1 +#define VK_NV_SHADER_SM_BUILTINS_SPEC_VERSION 1 +#define VK_NV_SHADER_SM_BUILTINS_EXTENSION_NAME "VK_NV_shader_sm_builtins" +typedef struct VkPhysicalDeviceShaderSMBuiltinsPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t shaderSMCount; + uint32_t shaderWarpsPerSM; +} VkPhysicalDeviceShaderSMBuiltinsPropertiesNV; + +typedef struct VkPhysicalDeviceShaderSMBuiltinsFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 shaderSMBuiltins; +} VkPhysicalDeviceShaderSMBuiltinsFeaturesNV; + + + +#define VK_EXT_post_depth_coverage 1 +#define VK_EXT_POST_DEPTH_COVERAGE_SPEC_VERSION 1 +#define VK_EXT_POST_DEPTH_COVERAGE_EXTENSION_NAME "VK_EXT_post_depth_coverage" + + +#define VK_EXT_image_drm_format_modifier 1 +#define VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_SPEC_VERSION 1 +#define VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME "VK_EXT_image_drm_format_modifier" +typedef struct VkDrmFormatModifierPropertiesEXT { + uint64_t drmFormatModifier; + uint32_t drmFormatModifierPlaneCount; + VkFormatFeatureFlags drmFormatModifierTilingFeatures; +} VkDrmFormatModifierPropertiesEXT; + +typedef struct VkDrmFormatModifierPropertiesListEXT { + VkStructureType sType; + void* pNext; + uint32_t drmFormatModifierCount; + VkDrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties; +} VkDrmFormatModifierPropertiesListEXT; + +typedef struct VkPhysicalDeviceImageDrmFormatModifierInfoEXT { + VkStructureType sType; + const void* pNext; + uint64_t drmFormatModifier; + VkSharingMode sharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; +} VkPhysicalDeviceImageDrmFormatModifierInfoEXT; + +typedef struct VkImageDrmFormatModifierListCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t drmFormatModifierCount; + const uint64_t* pDrmFormatModifiers; +} VkImageDrmFormatModifierListCreateInfoEXT; + +typedef struct VkImageDrmFormatModifierExplicitCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint64_t drmFormatModifier; + uint32_t drmFormatModifierPlaneCount; + const VkSubresourceLayout* pPlaneLayouts; +} VkImageDrmFormatModifierExplicitCreateInfoEXT; + +typedef struct VkImageDrmFormatModifierPropertiesEXT { + VkStructureType sType; + void* pNext; + uint64_t drmFormatModifier; +} VkImageDrmFormatModifierPropertiesEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetImageDrmFormatModifierPropertiesEXT)(VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetImageDrmFormatModifierPropertiesEXT( + VkDevice device, + VkImage image, + VkImageDrmFormatModifierPropertiesEXT* pProperties); +#endif + + +#define VK_EXT_validation_cache 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkValidationCacheEXT) +#define VK_EXT_VALIDATION_CACHE_SPEC_VERSION 1 +#define VK_EXT_VALIDATION_CACHE_EXTENSION_NAME "VK_EXT_validation_cache" + +typedef enum VkValidationCacheHeaderVersionEXT { + VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT = 1, + VK_VALIDATION_CACHE_HEADER_VERSION_MAX_ENUM_EXT = 0x7FFFFFFF +} VkValidationCacheHeaderVersionEXT; +typedef VkFlags VkValidationCacheCreateFlagsEXT; +typedef struct VkValidationCacheCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkValidationCacheCreateFlagsEXT flags; + size_t initialDataSize; + const void* pInitialData; +} VkValidationCacheCreateInfoEXT; + +typedef struct VkShaderModuleValidationCacheCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkValidationCacheEXT validationCache; +} VkShaderModuleValidationCacheCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateValidationCacheEXT)(VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache); +typedef void (VKAPI_PTR *PFN_vkDestroyValidationCacheEXT)(VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkMergeValidationCachesEXT)(VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches); +typedef VkResult (VKAPI_PTR *PFN_vkGetValidationCacheDataEXT)(VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateValidationCacheEXT( + VkDevice device, + const VkValidationCacheCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkValidationCacheEXT* pValidationCache); + +VKAPI_ATTR void VKAPI_CALL vkDestroyValidationCacheEXT( + VkDevice device, + VkValidationCacheEXT validationCache, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkMergeValidationCachesEXT( + VkDevice device, + VkValidationCacheEXT dstCache, + uint32_t srcCacheCount, + const VkValidationCacheEXT* pSrcCaches); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetValidationCacheDataEXT( + VkDevice device, + VkValidationCacheEXT validationCache, + size_t* pDataSize, + void* pData); +#endif + + +#define VK_EXT_descriptor_indexing 1 +#define VK_EXT_DESCRIPTOR_INDEXING_SPEC_VERSION 2 +#define VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME "VK_EXT_descriptor_indexing" +typedef VkDescriptorBindingFlagBits VkDescriptorBindingFlagBitsEXT; + +typedef VkDescriptorBindingFlags VkDescriptorBindingFlagsEXT; + +typedef VkDescriptorSetLayoutBindingFlagsCreateInfo VkDescriptorSetLayoutBindingFlagsCreateInfoEXT; + +typedef VkPhysicalDeviceDescriptorIndexingFeatures VkPhysicalDeviceDescriptorIndexingFeaturesEXT; + +typedef VkPhysicalDeviceDescriptorIndexingProperties VkPhysicalDeviceDescriptorIndexingPropertiesEXT; + +typedef VkDescriptorSetVariableDescriptorCountAllocateInfo VkDescriptorSetVariableDescriptorCountAllocateInfoEXT; + +typedef VkDescriptorSetVariableDescriptorCountLayoutSupport VkDescriptorSetVariableDescriptorCountLayoutSupportEXT; + + + +#define VK_EXT_shader_viewport_index_layer 1 +#define VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_SPEC_VERSION 1 +#define VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME "VK_EXT_shader_viewport_index_layer" + + +#define VK_NV_shading_rate_image 1 +#define VK_NV_SHADING_RATE_IMAGE_SPEC_VERSION 3 +#define VK_NV_SHADING_RATE_IMAGE_EXTENSION_NAME "VK_NV_shading_rate_image" + +typedef enum VkShadingRatePaletteEntryNV { + VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV = 0, + VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV = 1, + VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV = 2, + VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV = 3, + VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV = 4, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV = 5, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV = 6, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV = 7, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV = 8, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV = 9, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV = 10, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV = 11, + VK_SHADING_RATE_PALETTE_ENTRY_MAX_ENUM_NV = 0x7FFFFFFF +} VkShadingRatePaletteEntryNV; + +typedef enum VkCoarseSampleOrderTypeNV { + VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV = 0, + VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV = 1, + VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV = 2, + VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV = 3, + VK_COARSE_SAMPLE_ORDER_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkCoarseSampleOrderTypeNV; +typedef struct VkShadingRatePaletteNV { + uint32_t shadingRatePaletteEntryCount; + const VkShadingRatePaletteEntryNV* pShadingRatePaletteEntries; +} VkShadingRatePaletteNV; + +typedef struct VkPipelineViewportShadingRateImageStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 shadingRateImageEnable; + uint32_t viewportCount; + const VkShadingRatePaletteNV* pShadingRatePalettes; +} VkPipelineViewportShadingRateImageStateCreateInfoNV; + +typedef struct VkPhysicalDeviceShadingRateImageFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 shadingRateImage; + VkBool32 shadingRateCoarseSampleOrder; +} VkPhysicalDeviceShadingRateImageFeaturesNV; + +typedef struct VkPhysicalDeviceShadingRateImagePropertiesNV { + VkStructureType sType; + void* pNext; + VkExtent2D shadingRateTexelSize; + uint32_t shadingRatePaletteSize; + uint32_t shadingRateMaxCoarseSamples; +} VkPhysicalDeviceShadingRateImagePropertiesNV; + +typedef struct VkCoarseSampleLocationNV { + uint32_t pixelX; + uint32_t pixelY; + uint32_t sample; +} VkCoarseSampleLocationNV; + +typedef struct VkCoarseSampleOrderCustomNV { + VkShadingRatePaletteEntryNV shadingRate; + uint32_t sampleCount; + uint32_t sampleLocationCount; + const VkCoarseSampleLocationNV* pSampleLocations; +} VkCoarseSampleOrderCustomNV; + +typedef struct VkPipelineViewportCoarseSampleOrderStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkCoarseSampleOrderTypeNV sampleOrderType; + uint32_t customSampleOrderCount; + const VkCoarseSampleOrderCustomNV* pCustomSampleOrders; +} VkPipelineViewportCoarseSampleOrderStateCreateInfoNV; + +typedef void (VKAPI_PTR *PFN_vkCmdBindShadingRateImageNV)(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout); +typedef void (VKAPI_PTR *PFN_vkCmdSetViewportShadingRatePaletteNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV* pShadingRatePalettes); +typedef void (VKAPI_PTR *PFN_vkCmdSetCoarseSampleOrderNV)(VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV* pCustomSampleOrders); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBindShadingRateImageNV( + VkCommandBuffer commandBuffer, + VkImageView imageView, + VkImageLayout imageLayout); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportShadingRatePaletteNV( + VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkShadingRatePaletteNV* pShadingRatePalettes); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetCoarseSampleOrderNV( + VkCommandBuffer commandBuffer, + VkCoarseSampleOrderTypeNV sampleOrderType, + uint32_t customSampleOrderCount, + const VkCoarseSampleOrderCustomNV* pCustomSampleOrders); +#endif + + +#define VK_NV_ray_tracing 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureNV) +#define VK_NV_RAY_TRACING_SPEC_VERSION 3 +#define VK_NV_RAY_TRACING_EXTENSION_NAME "VK_NV_ray_tracing" +#define VK_SHADER_UNUSED_KHR (~0U) +#define VK_SHADER_UNUSED_NV VK_SHADER_UNUSED_KHR + +typedef enum VkRayTracingShaderGroupTypeKHR { + VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR = 0, + VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR = 1, + VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR = 2, + VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR, + VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR, + VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR, + VK_RAY_TRACING_SHADER_GROUP_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkRayTracingShaderGroupTypeKHR; +typedef VkRayTracingShaderGroupTypeKHR VkRayTracingShaderGroupTypeNV; + + +typedef enum VkGeometryTypeKHR { + VK_GEOMETRY_TYPE_TRIANGLES_KHR = 0, + VK_GEOMETRY_TYPE_AABBS_KHR = 1, + VK_GEOMETRY_TYPE_INSTANCES_KHR = 2, + VK_GEOMETRY_TYPE_TRIANGLES_NV = VK_GEOMETRY_TYPE_TRIANGLES_KHR, + VK_GEOMETRY_TYPE_AABBS_NV = VK_GEOMETRY_TYPE_AABBS_KHR, + VK_GEOMETRY_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkGeometryTypeKHR; +typedef VkGeometryTypeKHR VkGeometryTypeNV; + + +typedef enum VkAccelerationStructureTypeKHR { + VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR = 0, + VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR = 1, + VK_ACCELERATION_STRUCTURE_TYPE_GENERIC_KHR = 2, + VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR, + VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR, + VK_ACCELERATION_STRUCTURE_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAccelerationStructureTypeKHR; +typedef VkAccelerationStructureTypeKHR VkAccelerationStructureTypeNV; + + +typedef enum VkCopyAccelerationStructureModeKHR { + VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR = 0, + VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR = 1, + VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR = 2, + VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR = 3, + VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR, + VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR, + VK_COPY_ACCELERATION_STRUCTURE_MODE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkCopyAccelerationStructureModeKHR; +typedef VkCopyAccelerationStructureModeKHR VkCopyAccelerationStructureModeNV; + + +typedef enum VkAccelerationStructureMemoryRequirementsTypeNV { + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV = 0, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV = 1, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV = 2, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkAccelerationStructureMemoryRequirementsTypeNV; + +typedef enum VkGeometryFlagBitsKHR { + VK_GEOMETRY_OPAQUE_BIT_KHR = 0x00000001, + VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR = 0x00000002, + VK_GEOMETRY_OPAQUE_BIT_NV = VK_GEOMETRY_OPAQUE_BIT_KHR, + VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR, + VK_GEOMETRY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkGeometryFlagBitsKHR; +typedef VkFlags VkGeometryFlagsKHR; +typedef VkGeometryFlagsKHR VkGeometryFlagsNV; + +typedef VkGeometryFlagBitsKHR VkGeometryFlagBitsNV; + + +typedef enum VkGeometryInstanceFlagBitsKHR { + VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR = 0x00000001, + VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR = 0x00000002, + VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR = 0x00000004, + VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR = 0x00000008, + VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV = VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR, + VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR, + VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR, + VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR, + VK_GEOMETRY_INSTANCE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkGeometryInstanceFlagBitsKHR; +typedef VkFlags VkGeometryInstanceFlagsKHR; +typedef VkGeometryInstanceFlagsKHR VkGeometryInstanceFlagsNV; + +typedef VkGeometryInstanceFlagBitsKHR VkGeometryInstanceFlagBitsNV; + + +typedef enum VkBuildAccelerationStructureFlagBitsKHR { + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR = 0x00000001, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR = 0x00000002, + VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR = 0x00000004, + VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR = 0x00000008, + VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR = 0x00000010, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkBuildAccelerationStructureFlagBitsKHR; +typedef VkFlags VkBuildAccelerationStructureFlagsKHR; +typedef VkBuildAccelerationStructureFlagsKHR VkBuildAccelerationStructureFlagsNV; + +typedef VkBuildAccelerationStructureFlagBitsKHR VkBuildAccelerationStructureFlagBitsNV; + +typedef struct VkRayTracingShaderGroupCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkRayTracingShaderGroupTypeKHR type; + uint32_t generalShader; + uint32_t closestHitShader; + uint32_t anyHitShader; + uint32_t intersectionShader; +} VkRayTracingShaderGroupCreateInfoNV; + +typedef struct VkRayTracingPipelineCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + uint32_t stageCount; + const VkPipelineShaderStageCreateInfo* pStages; + uint32_t groupCount; + const VkRayTracingShaderGroupCreateInfoNV* pGroups; + uint32_t maxRecursionDepth; + VkPipelineLayout layout; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkRayTracingPipelineCreateInfoNV; + +typedef struct VkGeometryTrianglesNV { + VkStructureType sType; + const void* pNext; + VkBuffer vertexData; + VkDeviceSize vertexOffset; + uint32_t vertexCount; + VkDeviceSize vertexStride; + VkFormat vertexFormat; + VkBuffer indexData; + VkDeviceSize indexOffset; + uint32_t indexCount; + VkIndexType indexType; + VkBuffer transformData; + VkDeviceSize transformOffset; +} VkGeometryTrianglesNV; + +typedef struct VkGeometryAABBNV { + VkStructureType sType; + const void* pNext; + VkBuffer aabbData; + uint32_t numAABBs; + uint32_t stride; + VkDeviceSize offset; +} VkGeometryAABBNV; + +typedef struct VkGeometryDataNV { + VkGeometryTrianglesNV triangles; + VkGeometryAABBNV aabbs; +} VkGeometryDataNV; + +typedef struct VkGeometryNV { + VkStructureType sType; + const void* pNext; + VkGeometryTypeKHR geometryType; + VkGeometryDataNV geometry; + VkGeometryFlagsKHR flags; +} VkGeometryNV; + +typedef struct VkAccelerationStructureInfoNV { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureTypeNV type; + VkBuildAccelerationStructureFlagsNV flags; + uint32_t instanceCount; + uint32_t geometryCount; + const VkGeometryNV* pGeometries; +} VkAccelerationStructureInfoNV; + +typedef struct VkAccelerationStructureCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkDeviceSize compactedSize; + VkAccelerationStructureInfoNV info; +} VkAccelerationStructureCreateInfoNV; + +typedef struct VkBindAccelerationStructureMemoryInfoNV { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureNV accelerationStructure; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; + uint32_t deviceIndexCount; + const uint32_t* pDeviceIndices; +} VkBindAccelerationStructureMemoryInfoNV; + +typedef struct VkWriteDescriptorSetAccelerationStructureNV { + VkStructureType sType; + const void* pNext; + uint32_t accelerationStructureCount; + const VkAccelerationStructureNV* pAccelerationStructures; +} VkWriteDescriptorSetAccelerationStructureNV; + +typedef struct VkAccelerationStructureMemoryRequirementsInfoNV { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureMemoryRequirementsTypeNV type; + VkAccelerationStructureNV accelerationStructure; +} VkAccelerationStructureMemoryRequirementsInfoNV; + +typedef struct VkPhysicalDeviceRayTracingPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t shaderGroupHandleSize; + uint32_t maxRecursionDepth; + uint32_t maxShaderGroupStride; + uint32_t shaderGroupBaseAlignment; + uint64_t maxGeometryCount; + uint64_t maxInstanceCount; + uint64_t maxTriangleCount; + uint32_t maxDescriptorSetAccelerationStructures; +} VkPhysicalDeviceRayTracingPropertiesNV; + +typedef struct VkTransformMatrixKHR { + float matrix[3][4]; +} VkTransformMatrixKHR; + +typedef VkTransformMatrixKHR VkTransformMatrixNV; + +typedef struct VkAabbPositionsKHR { + float minX; + float minY; + float minZ; + float maxX; + float maxY; + float maxZ; +} VkAabbPositionsKHR; + +typedef VkAabbPositionsKHR VkAabbPositionsNV; + +typedef struct VkAccelerationStructureInstanceKHR { + VkTransformMatrixKHR transform; + uint32_t instanceCustomIndex:24; + uint32_t mask:8; + uint32_t instanceShaderBindingTableRecordOffset:24; + VkGeometryInstanceFlagsKHR flags:8; + uint64_t accelerationStructureReference; +} VkAccelerationStructureInstanceKHR; + +typedef VkAccelerationStructureInstanceKHR VkAccelerationStructureInstanceNV; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateAccelerationStructureNV)(VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure); +typedef void (VKAPI_PTR *PFN_vkDestroyAccelerationStructureNV)(VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkGetAccelerationStructureMemoryRequirementsNV)(VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements); +typedef VkResult (VKAPI_PTR *PFN_vkBindAccelerationStructureMemoryNV)(VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos); +typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructureNV)(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset); +typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureNV)(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeKHR mode); +typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysNV)(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth); +typedef VkResult (VKAPI_PTR *PFN_vkCreateRayTracingPipelinesNV)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupHandlesKHR)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupHandlesNV)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkGetAccelerationStructureHandleNV)(VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData); +typedef void (VKAPI_PTR *PFN_vkCmdWriteAccelerationStructuresPropertiesNV)(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery); +typedef VkResult (VKAPI_PTR *PFN_vkCompileDeferredNV)(VkDevice device, VkPipeline pipeline, uint32_t shader); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateAccelerationStructureNV( + VkDevice device, + const VkAccelerationStructureCreateInfoNV* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkAccelerationStructureNV* pAccelerationStructure); + +VKAPI_ATTR void VKAPI_CALL vkDestroyAccelerationStructureNV( + VkDevice device, + VkAccelerationStructureNV accelerationStructure, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkGetAccelerationStructureMemoryRequirementsNV( + VkDevice device, + const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, + VkMemoryRequirements2KHR* pMemoryRequirements); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindAccelerationStructureMemoryNV( + VkDevice device, + uint32_t bindInfoCount, + const VkBindAccelerationStructureMemoryInfoNV* pBindInfos); + +VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructureNV( + VkCommandBuffer commandBuffer, + const VkAccelerationStructureInfoNV* pInfo, + VkBuffer instanceData, + VkDeviceSize instanceOffset, + VkBool32 update, + VkAccelerationStructureNV dst, + VkAccelerationStructureNV src, + VkBuffer scratch, + VkDeviceSize scratchOffset); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureNV( + VkCommandBuffer commandBuffer, + VkAccelerationStructureNV dst, + VkAccelerationStructureNV src, + VkCopyAccelerationStructureModeKHR mode); + +VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysNV( + VkCommandBuffer commandBuffer, + VkBuffer raygenShaderBindingTableBuffer, + VkDeviceSize raygenShaderBindingOffset, + VkBuffer missShaderBindingTableBuffer, + VkDeviceSize missShaderBindingOffset, + VkDeviceSize missShaderBindingStride, + VkBuffer hitShaderBindingTableBuffer, + VkDeviceSize hitShaderBindingOffset, + VkDeviceSize hitShaderBindingStride, + VkBuffer callableShaderBindingTableBuffer, + VkDeviceSize callableShaderBindingOffset, + VkDeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateRayTracingPipelinesNV( + VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkRayTracingPipelineCreateInfoNV* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingShaderGroupHandlesKHR( + VkDevice device, + VkPipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void* pData); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingShaderGroupHandlesNV( + VkDevice device, + VkPipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void* pData); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetAccelerationStructureHandleNV( + VkDevice device, + VkAccelerationStructureNV accelerationStructure, + size_t dataSize, + void* pData); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteAccelerationStructuresPropertiesNV( + VkCommandBuffer commandBuffer, + uint32_t accelerationStructureCount, + const VkAccelerationStructureNV* pAccelerationStructures, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery); + +VKAPI_ATTR VkResult VKAPI_CALL vkCompileDeferredNV( + VkDevice device, + VkPipeline pipeline, + uint32_t shader); +#endif + + +#define VK_NV_representative_fragment_test 1 +#define VK_NV_REPRESENTATIVE_FRAGMENT_TEST_SPEC_VERSION 2 +#define VK_NV_REPRESENTATIVE_FRAGMENT_TEST_EXTENSION_NAME "VK_NV_representative_fragment_test" +typedef struct VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 representativeFragmentTest; +} VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV; + +typedef struct VkPipelineRepresentativeFragmentTestStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 representativeFragmentTestEnable; +} VkPipelineRepresentativeFragmentTestStateCreateInfoNV; + + + +#define VK_EXT_filter_cubic 1 +#define VK_EXT_FILTER_CUBIC_SPEC_VERSION 3 +#define VK_EXT_FILTER_CUBIC_EXTENSION_NAME "VK_EXT_filter_cubic" +typedef struct VkPhysicalDeviceImageViewImageFormatInfoEXT { + VkStructureType sType; + void* pNext; + VkImageViewType imageViewType; +} VkPhysicalDeviceImageViewImageFormatInfoEXT; + +typedef struct VkFilterCubicImageViewImageFormatPropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 filterCubic; + VkBool32 filterCubicMinmax; +} VkFilterCubicImageViewImageFormatPropertiesEXT; + + + +#define VK_QCOM_render_pass_shader_resolve 1 +#define VK_QCOM_RENDER_PASS_SHADER_RESOLVE_SPEC_VERSION 4 +#define VK_QCOM_RENDER_PASS_SHADER_RESOLVE_EXTENSION_NAME "VK_QCOM_render_pass_shader_resolve" + + +#define VK_EXT_global_priority 1 +#define VK_EXT_GLOBAL_PRIORITY_SPEC_VERSION 2 +#define VK_EXT_GLOBAL_PRIORITY_EXTENSION_NAME "VK_EXT_global_priority" + +typedef enum VkQueueGlobalPriorityEXT { + VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT = 128, + VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT = 256, + VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT = 512, + VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT = 1024, + VK_QUEUE_GLOBAL_PRIORITY_MAX_ENUM_EXT = 0x7FFFFFFF +} VkQueueGlobalPriorityEXT; +typedef struct VkDeviceQueueGlobalPriorityCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkQueueGlobalPriorityEXT globalPriority; +} VkDeviceQueueGlobalPriorityCreateInfoEXT; + + + +#define VK_EXT_external_memory_host 1 +#define VK_EXT_EXTERNAL_MEMORY_HOST_SPEC_VERSION 1 +#define VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME "VK_EXT_external_memory_host" +typedef struct VkImportMemoryHostPointerInfoEXT { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; + void* pHostPointer; +} VkImportMemoryHostPointerInfoEXT; + +typedef struct VkMemoryHostPointerPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; +} VkMemoryHostPointerPropertiesEXT; + +typedef struct VkPhysicalDeviceExternalMemoryHostPropertiesEXT { + VkStructureType sType; + void* pNext; + VkDeviceSize minImportedHostPointerAlignment; +} VkPhysicalDeviceExternalMemoryHostPropertiesEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryHostPointerPropertiesEXT)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryHostPointerPropertiesEXT( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + const void* pHostPointer, + VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties); +#endif + + +#define VK_AMD_buffer_marker 1 +#define VK_AMD_BUFFER_MARKER_SPEC_VERSION 1 +#define VK_AMD_BUFFER_MARKER_EXTENSION_NAME "VK_AMD_buffer_marker" +typedef void (VKAPI_PTR *PFN_vkCmdWriteBufferMarkerAMD)(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarkerAMD( + VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + uint32_t marker); +#endif + + +#define VK_AMD_pipeline_compiler_control 1 +#define VK_AMD_PIPELINE_COMPILER_CONTROL_SPEC_VERSION 1 +#define VK_AMD_PIPELINE_COMPILER_CONTROL_EXTENSION_NAME "VK_AMD_pipeline_compiler_control" + +typedef enum VkPipelineCompilerControlFlagBitsAMD { + VK_PIPELINE_COMPILER_CONTROL_FLAG_BITS_MAX_ENUM_AMD = 0x7FFFFFFF +} VkPipelineCompilerControlFlagBitsAMD; +typedef VkFlags VkPipelineCompilerControlFlagsAMD; +typedef struct VkPipelineCompilerControlCreateInfoAMD { + VkStructureType sType; + const void* pNext; + VkPipelineCompilerControlFlagsAMD compilerControlFlags; +} VkPipelineCompilerControlCreateInfoAMD; + + + +#define VK_EXT_calibrated_timestamps 1 +#define VK_EXT_CALIBRATED_TIMESTAMPS_SPEC_VERSION 2 +#define VK_EXT_CALIBRATED_TIMESTAMPS_EXTENSION_NAME "VK_EXT_calibrated_timestamps" + +typedef enum VkTimeDomainEXT { + VK_TIME_DOMAIN_DEVICE_EXT = 0, + VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT = 1, + VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT = 2, + VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT = 3, + VK_TIME_DOMAIN_MAX_ENUM_EXT = 0x7FFFFFFF +} VkTimeDomainEXT; +typedef struct VkCalibratedTimestampInfoEXT { + VkStructureType sType; + const void* pNext; + VkTimeDomainEXT timeDomain; +} VkCalibratedTimestampInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT)(VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainEXT* pTimeDomains); +typedef VkResult (VKAPI_PTR *PFN_vkGetCalibratedTimestampsEXT)(VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( + VkPhysicalDevice physicalDevice, + uint32_t* pTimeDomainCount, + VkTimeDomainEXT* pTimeDomains); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetCalibratedTimestampsEXT( + VkDevice device, + uint32_t timestampCount, + const VkCalibratedTimestampInfoEXT* pTimestampInfos, + uint64_t* pTimestamps, + uint64_t* pMaxDeviation); +#endif + + +#define VK_AMD_shader_core_properties 1 +#define VK_AMD_SHADER_CORE_PROPERTIES_SPEC_VERSION 2 +#define VK_AMD_SHADER_CORE_PROPERTIES_EXTENSION_NAME "VK_AMD_shader_core_properties" +typedef struct VkPhysicalDeviceShaderCorePropertiesAMD { + VkStructureType sType; + void* pNext; + uint32_t shaderEngineCount; + uint32_t shaderArraysPerEngineCount; + uint32_t computeUnitsPerShaderArray; + uint32_t simdPerComputeUnit; + uint32_t wavefrontsPerSimd; + uint32_t wavefrontSize; + uint32_t sgprsPerSimd; + uint32_t minSgprAllocation; + uint32_t maxSgprAllocation; + uint32_t sgprAllocationGranularity; + uint32_t vgprsPerSimd; + uint32_t minVgprAllocation; + uint32_t maxVgprAllocation; + uint32_t vgprAllocationGranularity; +} VkPhysicalDeviceShaderCorePropertiesAMD; + + + +#define VK_AMD_memory_overallocation_behavior 1 +#define VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_SPEC_VERSION 1 +#define VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_EXTENSION_NAME "VK_AMD_memory_overallocation_behavior" + +typedef enum VkMemoryOverallocationBehaviorAMD { + VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD = 0, + VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD = 1, + VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD = 2, + VK_MEMORY_OVERALLOCATION_BEHAVIOR_MAX_ENUM_AMD = 0x7FFFFFFF +} VkMemoryOverallocationBehaviorAMD; +typedef struct VkDeviceMemoryOverallocationCreateInfoAMD { + VkStructureType sType; + const void* pNext; + VkMemoryOverallocationBehaviorAMD overallocationBehavior; +} VkDeviceMemoryOverallocationCreateInfoAMD; + + + +#define VK_EXT_vertex_attribute_divisor 1 +#define VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION 3 +#define VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME "VK_EXT_vertex_attribute_divisor" +typedef struct VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxVertexAttribDivisor; +} VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT; + +typedef struct VkVertexInputBindingDivisorDescriptionEXT { + uint32_t binding; + uint32_t divisor; +} VkVertexInputBindingDivisorDescriptionEXT; + +typedef struct VkPipelineVertexInputDivisorStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t vertexBindingDivisorCount; + const VkVertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors; +} VkPipelineVertexInputDivisorStateCreateInfoEXT; + +typedef struct VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 vertexAttributeInstanceRateDivisor; + VkBool32 vertexAttributeInstanceRateZeroDivisor; +} VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT; + + + +#define VK_EXT_pipeline_creation_feedback 1 +#define VK_EXT_PIPELINE_CREATION_FEEDBACK_SPEC_VERSION 1 +#define VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME "VK_EXT_pipeline_creation_feedback" + +typedef enum VkPipelineCreationFeedbackFlagBitsEXT { + VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT_EXT = 0x00000001, + VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT_EXT = 0x00000002, + VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT_EXT = 0x00000004, + VK_PIPELINE_CREATION_FEEDBACK_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkPipelineCreationFeedbackFlagBitsEXT; +typedef VkFlags VkPipelineCreationFeedbackFlagsEXT; +typedef struct VkPipelineCreationFeedbackEXT { + VkPipelineCreationFeedbackFlagsEXT flags; + uint64_t duration; +} VkPipelineCreationFeedbackEXT; + +typedef struct VkPipelineCreationFeedbackCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipelineCreationFeedbackEXT* pPipelineCreationFeedback; + uint32_t pipelineStageCreationFeedbackCount; + VkPipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks; +} VkPipelineCreationFeedbackCreateInfoEXT; + + + +#define VK_NV_shader_subgroup_partitioned 1 +#define VK_NV_SHADER_SUBGROUP_PARTITIONED_SPEC_VERSION 1 +#define VK_NV_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME "VK_NV_shader_subgroup_partitioned" + + +#define VK_NV_compute_shader_derivatives 1 +#define VK_NV_COMPUTE_SHADER_DERIVATIVES_SPEC_VERSION 1 +#define VK_NV_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME "VK_NV_compute_shader_derivatives" +typedef struct VkPhysicalDeviceComputeShaderDerivativesFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 computeDerivativeGroupQuads; + VkBool32 computeDerivativeGroupLinear; +} VkPhysicalDeviceComputeShaderDerivativesFeaturesNV; + + + +#define VK_NV_mesh_shader 1 +#define VK_NV_MESH_SHADER_SPEC_VERSION 1 +#define VK_NV_MESH_SHADER_EXTENSION_NAME "VK_NV_mesh_shader" +typedef struct VkPhysicalDeviceMeshShaderFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 taskShader; + VkBool32 meshShader; +} VkPhysicalDeviceMeshShaderFeaturesNV; + +typedef struct VkPhysicalDeviceMeshShaderPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t maxDrawMeshTasksCount; + uint32_t maxTaskWorkGroupInvocations; + uint32_t maxTaskWorkGroupSize[3]; + uint32_t maxTaskTotalMemorySize; + uint32_t maxTaskOutputCount; + uint32_t maxMeshWorkGroupInvocations; + uint32_t maxMeshWorkGroupSize[3]; + uint32_t maxMeshTotalMemorySize; + uint32_t maxMeshOutputVertices; + uint32_t maxMeshOutputPrimitives; + uint32_t maxMeshMultiviewViewCount; + uint32_t meshOutputPerVertexGranularity; + uint32_t meshOutputPerPrimitiveGranularity; +} VkPhysicalDeviceMeshShaderPropertiesNV; + +typedef struct VkDrawMeshTasksIndirectCommandNV { + uint32_t taskCount; + uint32_t firstTask; +} VkDrawMeshTasksIndirectCommandNV; + +typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksNV)(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask); +typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksIndirectNV)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksIndirectCountNV)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksNV( + VkCommandBuffer commandBuffer, + uint32_t taskCount, + uint32_t firstTask); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectNV( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectCountNV( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); +#endif + + +#define VK_NV_fragment_shader_barycentric 1 +#define VK_NV_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION 1 +#define VK_NV_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME "VK_NV_fragment_shader_barycentric" +typedef struct VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 fragmentShaderBarycentric; +} VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV; + + + +#define VK_NV_shader_image_footprint 1 +#define VK_NV_SHADER_IMAGE_FOOTPRINT_SPEC_VERSION 2 +#define VK_NV_SHADER_IMAGE_FOOTPRINT_EXTENSION_NAME "VK_NV_shader_image_footprint" +typedef struct VkPhysicalDeviceShaderImageFootprintFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 imageFootprint; +} VkPhysicalDeviceShaderImageFootprintFeaturesNV; + + + +#define VK_NV_scissor_exclusive 1 +#define VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION 1 +#define VK_NV_SCISSOR_EXCLUSIVE_EXTENSION_NAME "VK_NV_scissor_exclusive" +typedef struct VkPipelineViewportExclusiveScissorStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t exclusiveScissorCount; + const VkRect2D* pExclusiveScissors; +} VkPipelineViewportExclusiveScissorStateCreateInfoNV; + +typedef struct VkPhysicalDeviceExclusiveScissorFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 exclusiveScissor; +} VkPhysicalDeviceExclusiveScissorFeaturesNV; + +typedef void (VKAPI_PTR *PFN_vkCmdSetExclusiveScissorNV)(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetExclusiveScissorNV( + VkCommandBuffer commandBuffer, + uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VkRect2D* pExclusiveScissors); +#endif + + +#define VK_NV_device_diagnostic_checkpoints 1 +#define VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_SPEC_VERSION 2 +#define VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_EXTENSION_NAME "VK_NV_device_diagnostic_checkpoints" +typedef struct VkQueueFamilyCheckpointPropertiesNV { + VkStructureType sType; + void* pNext; + VkPipelineStageFlags checkpointExecutionStageMask; +} VkQueueFamilyCheckpointPropertiesNV; + +typedef struct VkCheckpointDataNV { + VkStructureType sType; + void* pNext; + VkPipelineStageFlagBits stage; + void* pCheckpointMarker; +} VkCheckpointDataNV; + +typedef void (VKAPI_PTR *PFN_vkCmdSetCheckpointNV)(VkCommandBuffer commandBuffer, const void* pCheckpointMarker); +typedef void (VKAPI_PTR *PFN_vkGetQueueCheckpointDataNV)(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetCheckpointNV( + VkCommandBuffer commandBuffer, + const void* pCheckpointMarker); + +VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointDataNV( + VkQueue queue, + uint32_t* pCheckpointDataCount, + VkCheckpointDataNV* pCheckpointData); +#endif + + +#define VK_INTEL_shader_integer_functions2 1 +#define VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_SPEC_VERSION 1 +#define VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_EXTENSION_NAME "VK_INTEL_shader_integer_functions2" +typedef struct VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL { + VkStructureType sType; + void* pNext; + VkBool32 shaderIntegerFunctions2; +} VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + + + +#define VK_INTEL_performance_query 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPerformanceConfigurationINTEL) +#define VK_INTEL_PERFORMANCE_QUERY_SPEC_VERSION 2 +#define VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME "VK_INTEL_performance_query" + +typedef enum VkPerformanceConfigurationTypeINTEL { + VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL = 0, + VK_PERFORMANCE_CONFIGURATION_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF +} VkPerformanceConfigurationTypeINTEL; + +typedef enum VkQueryPoolSamplingModeINTEL { + VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL = 0, + VK_QUERY_POOL_SAMPLING_MODE_MAX_ENUM_INTEL = 0x7FFFFFFF +} VkQueryPoolSamplingModeINTEL; + +typedef enum VkPerformanceOverrideTypeINTEL { + VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL = 0, + VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL = 1, + VK_PERFORMANCE_OVERRIDE_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF +} VkPerformanceOverrideTypeINTEL; + +typedef enum VkPerformanceParameterTypeINTEL { + VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL = 0, + VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL = 1, + VK_PERFORMANCE_PARAMETER_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF +} VkPerformanceParameterTypeINTEL; + +typedef enum VkPerformanceValueTypeINTEL { + VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL = 0, + VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL = 1, + VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL = 2, + VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL = 3, + VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL = 4, + VK_PERFORMANCE_VALUE_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF +} VkPerformanceValueTypeINTEL; +typedef union VkPerformanceValueDataINTEL { + uint32_t value32; + uint64_t value64; + float valueFloat; + VkBool32 valueBool; + const char* valueString; +} VkPerformanceValueDataINTEL; + +typedef struct VkPerformanceValueINTEL { + VkPerformanceValueTypeINTEL type; + VkPerformanceValueDataINTEL data; +} VkPerformanceValueINTEL; + +typedef struct VkInitializePerformanceApiInfoINTEL { + VkStructureType sType; + const void* pNext; + void* pUserData; +} VkInitializePerformanceApiInfoINTEL; + +typedef struct VkQueryPoolPerformanceQueryCreateInfoINTEL { + VkStructureType sType; + const void* pNext; + VkQueryPoolSamplingModeINTEL performanceCountersSampling; +} VkQueryPoolPerformanceQueryCreateInfoINTEL; + +typedef VkQueryPoolPerformanceQueryCreateInfoINTEL VkQueryPoolCreateInfoINTEL; + +typedef struct VkPerformanceMarkerInfoINTEL { + VkStructureType sType; + const void* pNext; + uint64_t marker; +} VkPerformanceMarkerInfoINTEL; + +typedef struct VkPerformanceStreamMarkerInfoINTEL { + VkStructureType sType; + const void* pNext; + uint32_t marker; +} VkPerformanceStreamMarkerInfoINTEL; + +typedef struct VkPerformanceOverrideInfoINTEL { + VkStructureType sType; + const void* pNext; + VkPerformanceOverrideTypeINTEL type; + VkBool32 enable; + uint64_t parameter; +} VkPerformanceOverrideInfoINTEL; + +typedef struct VkPerformanceConfigurationAcquireInfoINTEL { + VkStructureType sType; + const void* pNext; + VkPerformanceConfigurationTypeINTEL type; +} VkPerformanceConfigurationAcquireInfoINTEL; + +typedef VkResult (VKAPI_PTR *PFN_vkInitializePerformanceApiINTEL)(VkDevice device, const VkInitializePerformanceApiInfoINTEL* pInitializeInfo); +typedef void (VKAPI_PTR *PFN_vkUninitializePerformanceApiINTEL)(VkDevice device); +typedef VkResult (VKAPI_PTR *PFN_vkCmdSetPerformanceMarkerINTEL)(VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL* pMarkerInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCmdSetPerformanceStreamMarkerINTEL)(VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCmdSetPerformanceOverrideINTEL)(VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL* pOverrideInfo); +typedef VkResult (VKAPI_PTR *PFN_vkAcquirePerformanceConfigurationINTEL)(VkDevice device, const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VkPerformanceConfigurationINTEL* pConfiguration); +typedef VkResult (VKAPI_PTR *PFN_vkReleasePerformanceConfigurationINTEL)(VkDevice device, VkPerformanceConfigurationINTEL configuration); +typedef VkResult (VKAPI_PTR *PFN_vkQueueSetPerformanceConfigurationINTEL)(VkQueue queue, VkPerformanceConfigurationINTEL configuration); +typedef VkResult (VKAPI_PTR *PFN_vkGetPerformanceParameterINTEL)(VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL* pValue); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkInitializePerformanceApiINTEL( + VkDevice device, + const VkInitializePerformanceApiInfoINTEL* pInitializeInfo); + +VKAPI_ATTR void VKAPI_CALL vkUninitializePerformanceApiINTEL( + VkDevice device); + +VKAPI_ATTR VkResult VKAPI_CALL vkCmdSetPerformanceMarkerINTEL( + VkCommandBuffer commandBuffer, + const VkPerformanceMarkerInfoINTEL* pMarkerInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCmdSetPerformanceStreamMarkerINTEL( + VkCommandBuffer commandBuffer, + const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCmdSetPerformanceOverrideINTEL( + VkCommandBuffer commandBuffer, + const VkPerformanceOverrideInfoINTEL* pOverrideInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkAcquirePerformanceConfigurationINTEL( + VkDevice device, + const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, + VkPerformanceConfigurationINTEL* pConfiguration); + +VKAPI_ATTR VkResult VKAPI_CALL vkReleasePerformanceConfigurationINTEL( + VkDevice device, + VkPerformanceConfigurationINTEL configuration); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueSetPerformanceConfigurationINTEL( + VkQueue queue, + VkPerformanceConfigurationINTEL configuration); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPerformanceParameterINTEL( + VkDevice device, + VkPerformanceParameterTypeINTEL parameter, + VkPerformanceValueINTEL* pValue); +#endif + + +#define VK_EXT_pci_bus_info 1 +#define VK_EXT_PCI_BUS_INFO_SPEC_VERSION 2 +#define VK_EXT_PCI_BUS_INFO_EXTENSION_NAME "VK_EXT_pci_bus_info" +typedef struct VkPhysicalDevicePCIBusInfoPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t pciDomain; + uint32_t pciBus; + uint32_t pciDevice; + uint32_t pciFunction; +} VkPhysicalDevicePCIBusInfoPropertiesEXT; + + + +#define VK_AMD_display_native_hdr 1 +#define VK_AMD_DISPLAY_NATIVE_HDR_SPEC_VERSION 1 +#define VK_AMD_DISPLAY_NATIVE_HDR_EXTENSION_NAME "VK_AMD_display_native_hdr" +typedef struct VkDisplayNativeHdrSurfaceCapabilitiesAMD { + VkStructureType sType; + void* pNext; + VkBool32 localDimmingSupport; +} VkDisplayNativeHdrSurfaceCapabilitiesAMD; + +typedef struct VkSwapchainDisplayNativeHdrCreateInfoAMD { + VkStructureType sType; + const void* pNext; + VkBool32 localDimmingEnable; +} VkSwapchainDisplayNativeHdrCreateInfoAMD; + +typedef void (VKAPI_PTR *PFN_vkSetLocalDimmingAMD)(VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkSetLocalDimmingAMD( + VkDevice device, + VkSwapchainKHR swapChain, + VkBool32 localDimmingEnable); +#endif + + +#define VK_EXT_fragment_density_map 1 +#define VK_EXT_FRAGMENT_DENSITY_MAP_SPEC_VERSION 1 +#define VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME "VK_EXT_fragment_density_map" +typedef struct VkPhysicalDeviceFragmentDensityMapFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 fragmentDensityMap; + VkBool32 fragmentDensityMapDynamic; + VkBool32 fragmentDensityMapNonSubsampledImages; +} VkPhysicalDeviceFragmentDensityMapFeaturesEXT; + +typedef struct VkPhysicalDeviceFragmentDensityMapPropertiesEXT { + VkStructureType sType; + void* pNext; + VkExtent2D minFragmentDensityTexelSize; + VkExtent2D maxFragmentDensityTexelSize; + VkBool32 fragmentDensityInvocations; +} VkPhysicalDeviceFragmentDensityMapPropertiesEXT; + +typedef struct VkRenderPassFragmentDensityMapCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkAttachmentReference fragmentDensityMapAttachment; +} VkRenderPassFragmentDensityMapCreateInfoEXT; + + + +#define VK_EXT_scalar_block_layout 1 +#define VK_EXT_SCALAR_BLOCK_LAYOUT_SPEC_VERSION 1 +#define VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME "VK_EXT_scalar_block_layout" +typedef VkPhysicalDeviceScalarBlockLayoutFeatures VkPhysicalDeviceScalarBlockLayoutFeaturesEXT; + + + +#define VK_GOOGLE_hlsl_functionality1 1 +#define VK_GOOGLE_HLSL_FUNCTIONALITY1_SPEC_VERSION 1 +#define VK_GOOGLE_HLSL_FUNCTIONALITY1_EXTENSION_NAME "VK_GOOGLE_hlsl_functionality1" + + +#define VK_GOOGLE_decorate_string 1 +#define VK_GOOGLE_DECORATE_STRING_SPEC_VERSION 1 +#define VK_GOOGLE_DECORATE_STRING_EXTENSION_NAME "VK_GOOGLE_decorate_string" + + +#define VK_EXT_subgroup_size_control 1 +#define VK_EXT_SUBGROUP_SIZE_CONTROL_SPEC_VERSION 2 +#define VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME "VK_EXT_subgroup_size_control" +typedef struct VkPhysicalDeviceSubgroupSizeControlFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 subgroupSizeControl; + VkBool32 computeFullSubgroups; +} VkPhysicalDeviceSubgroupSizeControlFeaturesEXT; + +typedef struct VkPhysicalDeviceSubgroupSizeControlPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t minSubgroupSize; + uint32_t maxSubgroupSize; + uint32_t maxComputeWorkgroupSubgroups; + VkShaderStageFlags requiredSubgroupSizeStages; +} VkPhysicalDeviceSubgroupSizeControlPropertiesEXT; + +typedef struct VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT { + VkStructureType sType; + void* pNext; + uint32_t requiredSubgroupSize; +} VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT; + + + +#define VK_AMD_shader_core_properties2 1 +#define VK_AMD_SHADER_CORE_PROPERTIES_2_SPEC_VERSION 1 +#define VK_AMD_SHADER_CORE_PROPERTIES_2_EXTENSION_NAME "VK_AMD_shader_core_properties2" + +typedef enum VkShaderCorePropertiesFlagBitsAMD { + VK_SHADER_CORE_PROPERTIES_FLAG_BITS_MAX_ENUM_AMD = 0x7FFFFFFF +} VkShaderCorePropertiesFlagBitsAMD; +typedef VkFlags VkShaderCorePropertiesFlagsAMD; +typedef struct VkPhysicalDeviceShaderCoreProperties2AMD { + VkStructureType sType; + void* pNext; + VkShaderCorePropertiesFlagsAMD shaderCoreFeatures; + uint32_t activeComputeUnitCount; +} VkPhysicalDeviceShaderCoreProperties2AMD; + + + +#define VK_AMD_device_coherent_memory 1 +#define VK_AMD_DEVICE_COHERENT_MEMORY_SPEC_VERSION 1 +#define VK_AMD_DEVICE_COHERENT_MEMORY_EXTENSION_NAME "VK_AMD_device_coherent_memory" +typedef struct VkPhysicalDeviceCoherentMemoryFeaturesAMD { + VkStructureType sType; + void* pNext; + VkBool32 deviceCoherentMemory; +} VkPhysicalDeviceCoherentMemoryFeaturesAMD; + + + +#define VK_EXT_shader_image_atomic_int64 1 +#define VK_EXT_SHADER_IMAGE_ATOMIC_INT64_SPEC_VERSION 1 +#define VK_EXT_SHADER_IMAGE_ATOMIC_INT64_EXTENSION_NAME "VK_EXT_shader_image_atomic_int64" +typedef struct VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderImageInt64Atomics; + VkBool32 sparseImageInt64Atomics; +} VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + + + +#define VK_EXT_memory_budget 1 +#define VK_EXT_MEMORY_BUDGET_SPEC_VERSION 1 +#define VK_EXT_MEMORY_BUDGET_EXTENSION_NAME "VK_EXT_memory_budget" +typedef struct VkPhysicalDeviceMemoryBudgetPropertiesEXT { + VkStructureType sType; + void* pNext; + VkDeviceSize heapBudget[VK_MAX_MEMORY_HEAPS]; + VkDeviceSize heapUsage[VK_MAX_MEMORY_HEAPS]; +} VkPhysicalDeviceMemoryBudgetPropertiesEXT; + + + +#define VK_EXT_memory_priority 1 +#define VK_EXT_MEMORY_PRIORITY_SPEC_VERSION 1 +#define VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME "VK_EXT_memory_priority" +typedef struct VkPhysicalDeviceMemoryPriorityFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 memoryPriority; +} VkPhysicalDeviceMemoryPriorityFeaturesEXT; + +typedef struct VkMemoryPriorityAllocateInfoEXT { + VkStructureType sType; + const void* pNext; + float priority; +} VkMemoryPriorityAllocateInfoEXT; + + + +#define VK_NV_dedicated_allocation_image_aliasing 1 +#define VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_SPEC_VERSION 1 +#define VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_EXTENSION_NAME "VK_NV_dedicated_allocation_image_aliasing" +typedef struct VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 dedicatedAllocationImageAliasing; +} VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + + + +#define VK_EXT_buffer_device_address 1 +#define VK_EXT_BUFFER_DEVICE_ADDRESS_SPEC_VERSION 2 +#define VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME "VK_EXT_buffer_device_address" +typedef struct VkPhysicalDeviceBufferDeviceAddressFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 bufferDeviceAddress; + VkBool32 bufferDeviceAddressCaptureReplay; + VkBool32 bufferDeviceAddressMultiDevice; +} VkPhysicalDeviceBufferDeviceAddressFeaturesEXT; + +typedef VkPhysicalDeviceBufferDeviceAddressFeaturesEXT VkPhysicalDeviceBufferAddressFeaturesEXT; + +typedef VkBufferDeviceAddressInfo VkBufferDeviceAddressInfoEXT; + +typedef struct VkBufferDeviceAddressCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceAddress deviceAddress; +} VkBufferDeviceAddressCreateInfoEXT; + +typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetBufferDeviceAddressEXT)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetBufferDeviceAddressEXT( + VkDevice device, + const VkBufferDeviceAddressInfo* pInfo); +#endif + + +#define VK_EXT_tooling_info 1 +#define VK_EXT_TOOLING_INFO_SPEC_VERSION 1 +#define VK_EXT_TOOLING_INFO_EXTENSION_NAME "VK_EXT_tooling_info" + +typedef enum VkToolPurposeFlagBitsEXT { + VK_TOOL_PURPOSE_VALIDATION_BIT_EXT = 0x00000001, + VK_TOOL_PURPOSE_PROFILING_BIT_EXT = 0x00000002, + VK_TOOL_PURPOSE_TRACING_BIT_EXT = 0x00000004, + VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT_EXT = 0x00000008, + VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT_EXT = 0x00000010, + VK_TOOL_PURPOSE_DEBUG_REPORTING_BIT_EXT = 0x00000020, + VK_TOOL_PURPOSE_DEBUG_MARKERS_BIT_EXT = 0x00000040, + VK_TOOL_PURPOSE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkToolPurposeFlagBitsEXT; +typedef VkFlags VkToolPurposeFlagsEXT; +typedef struct VkPhysicalDeviceToolPropertiesEXT { + VkStructureType sType; + void* pNext; + char name[VK_MAX_EXTENSION_NAME_SIZE]; + char version[VK_MAX_EXTENSION_NAME_SIZE]; + VkToolPurposeFlagsEXT purposes; + char description[VK_MAX_DESCRIPTION_SIZE]; + char layer[VK_MAX_EXTENSION_NAME_SIZE]; +} VkPhysicalDeviceToolPropertiesEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceToolPropertiesEXT)(VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolPropertiesEXT* pToolProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceToolPropertiesEXT( + VkPhysicalDevice physicalDevice, + uint32_t* pToolCount, + VkPhysicalDeviceToolPropertiesEXT* pToolProperties); +#endif + + +#define VK_EXT_separate_stencil_usage 1 +#define VK_EXT_SEPARATE_STENCIL_USAGE_SPEC_VERSION 1 +#define VK_EXT_SEPARATE_STENCIL_USAGE_EXTENSION_NAME "VK_EXT_separate_stencil_usage" +typedef VkImageStencilUsageCreateInfo VkImageStencilUsageCreateInfoEXT; + + + +#define VK_EXT_validation_features 1 +#define VK_EXT_VALIDATION_FEATURES_SPEC_VERSION 4 +#define VK_EXT_VALIDATION_FEATURES_EXTENSION_NAME "VK_EXT_validation_features" + +typedef enum VkValidationFeatureEnableEXT { + VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT = 0, + VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT = 1, + VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT = 2, + VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT = 3, + VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT = 4, + VK_VALIDATION_FEATURE_ENABLE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkValidationFeatureEnableEXT; + +typedef enum VkValidationFeatureDisableEXT { + VK_VALIDATION_FEATURE_DISABLE_ALL_EXT = 0, + VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT = 1, + VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT = 2, + VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT = 3, + VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT = 4, + VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT = 5, + VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT = 6, + VK_VALIDATION_FEATURE_DISABLE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkValidationFeatureDisableEXT; +typedef struct VkValidationFeaturesEXT { + VkStructureType sType; + const void* pNext; + uint32_t enabledValidationFeatureCount; + const VkValidationFeatureEnableEXT* pEnabledValidationFeatures; + uint32_t disabledValidationFeatureCount; + const VkValidationFeatureDisableEXT* pDisabledValidationFeatures; +} VkValidationFeaturesEXT; + + + +#define VK_NV_cooperative_matrix 1 +#define VK_NV_COOPERATIVE_MATRIX_SPEC_VERSION 1 +#define VK_NV_COOPERATIVE_MATRIX_EXTENSION_NAME "VK_NV_cooperative_matrix" + +typedef enum VkComponentTypeNV { + VK_COMPONENT_TYPE_FLOAT16_NV = 0, + VK_COMPONENT_TYPE_FLOAT32_NV = 1, + VK_COMPONENT_TYPE_FLOAT64_NV = 2, + VK_COMPONENT_TYPE_SINT8_NV = 3, + VK_COMPONENT_TYPE_SINT16_NV = 4, + VK_COMPONENT_TYPE_SINT32_NV = 5, + VK_COMPONENT_TYPE_SINT64_NV = 6, + VK_COMPONENT_TYPE_UINT8_NV = 7, + VK_COMPONENT_TYPE_UINT16_NV = 8, + VK_COMPONENT_TYPE_UINT32_NV = 9, + VK_COMPONENT_TYPE_UINT64_NV = 10, + VK_COMPONENT_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkComponentTypeNV; + +typedef enum VkScopeNV { + VK_SCOPE_DEVICE_NV = 1, + VK_SCOPE_WORKGROUP_NV = 2, + VK_SCOPE_SUBGROUP_NV = 3, + VK_SCOPE_QUEUE_FAMILY_NV = 5, + VK_SCOPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkScopeNV; +typedef struct VkCooperativeMatrixPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t MSize; + uint32_t NSize; + uint32_t KSize; + VkComponentTypeNV AType; + VkComponentTypeNV BType; + VkComponentTypeNV CType; + VkComponentTypeNV DType; + VkScopeNV scope; +} VkCooperativeMatrixPropertiesNV; + +typedef struct VkPhysicalDeviceCooperativeMatrixFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 cooperativeMatrix; + VkBool32 cooperativeMatrixRobustBufferAccess; +} VkPhysicalDeviceCooperativeMatrixFeaturesNV; + +typedef struct VkPhysicalDeviceCooperativeMatrixPropertiesNV { + VkStructureType sType; + void* pNext; + VkShaderStageFlags cooperativeMatrixSupportedStages; +} VkPhysicalDeviceCooperativeMatrixPropertiesNV; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixPropertiesNV* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkCooperativeMatrixPropertiesNV* pProperties); +#endif + + +#define VK_NV_coverage_reduction_mode 1 +#define VK_NV_COVERAGE_REDUCTION_MODE_SPEC_VERSION 1 +#define VK_NV_COVERAGE_REDUCTION_MODE_EXTENSION_NAME "VK_NV_coverage_reduction_mode" + +typedef enum VkCoverageReductionModeNV { + VK_COVERAGE_REDUCTION_MODE_MERGE_NV = 0, + VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV = 1, + VK_COVERAGE_REDUCTION_MODE_MAX_ENUM_NV = 0x7FFFFFFF +} VkCoverageReductionModeNV; +typedef VkFlags VkPipelineCoverageReductionStateCreateFlagsNV; +typedef struct VkPhysicalDeviceCoverageReductionModeFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 coverageReductionMode; +} VkPhysicalDeviceCoverageReductionModeFeaturesNV; + +typedef struct VkPipelineCoverageReductionStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineCoverageReductionStateCreateFlagsNV flags; + VkCoverageReductionModeNV coverageReductionMode; +} VkPipelineCoverageReductionStateCreateInfoNV; + +typedef struct VkFramebufferMixedSamplesCombinationNV { + VkStructureType sType; + void* pNext; + VkCoverageReductionModeNV coverageReductionMode; + VkSampleCountFlagBits rasterizationSamples; + VkSampleCountFlags depthStencilSamples; + VkSampleCountFlags colorSamples; +} VkFramebufferMixedSamplesCombinationNV; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV)(VkPhysicalDevice physicalDevice, uint32_t* pCombinationCount, VkFramebufferMixedSamplesCombinationNV* pCombinations); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + VkPhysicalDevice physicalDevice, + uint32_t* pCombinationCount, + VkFramebufferMixedSamplesCombinationNV* pCombinations); +#endif + + +#define VK_EXT_fragment_shader_interlock 1 +#define VK_EXT_FRAGMENT_SHADER_INTERLOCK_SPEC_VERSION 1 +#define VK_EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME "VK_EXT_fragment_shader_interlock" +typedef struct VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 fragmentShaderSampleInterlock; + VkBool32 fragmentShaderPixelInterlock; + VkBool32 fragmentShaderShadingRateInterlock; +} VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT; + + + +#define VK_EXT_ycbcr_image_arrays 1 +#define VK_EXT_YCBCR_IMAGE_ARRAYS_SPEC_VERSION 1 +#define VK_EXT_YCBCR_IMAGE_ARRAYS_EXTENSION_NAME "VK_EXT_ycbcr_image_arrays" +typedef struct VkPhysicalDeviceYcbcrImageArraysFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 ycbcrImageArrays; +} VkPhysicalDeviceYcbcrImageArraysFeaturesEXT; + + + +#define VK_EXT_provoking_vertex 1 +#define VK_EXT_PROVOKING_VERTEX_SPEC_VERSION 1 +#define VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME "VK_EXT_provoking_vertex" + +typedef enum VkProvokingVertexModeEXT { + VK_PROVOKING_VERTEX_MODE_FIRST_VERTEX_EXT = 0, + VK_PROVOKING_VERTEX_MODE_LAST_VERTEX_EXT = 1, + VK_PROVOKING_VERTEX_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkProvokingVertexModeEXT; +typedef struct VkPhysicalDeviceProvokingVertexFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 provokingVertexLast; + VkBool32 transformFeedbackPreservesProvokingVertex; +} VkPhysicalDeviceProvokingVertexFeaturesEXT; + +typedef struct VkPhysicalDeviceProvokingVertexPropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 provokingVertexModePerPipeline; + VkBool32 transformFeedbackPreservesTriangleFanProvokingVertex; +} VkPhysicalDeviceProvokingVertexPropertiesEXT; + +typedef struct VkPipelineRasterizationProvokingVertexStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkProvokingVertexModeEXT provokingVertexMode; +} VkPipelineRasterizationProvokingVertexStateCreateInfoEXT; + + + +#define VK_EXT_headless_surface 1 +#define VK_EXT_HEADLESS_SURFACE_SPEC_VERSION 1 +#define VK_EXT_HEADLESS_SURFACE_EXTENSION_NAME "VK_EXT_headless_surface" +typedef VkFlags VkHeadlessSurfaceCreateFlagsEXT; +typedef struct VkHeadlessSurfaceCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkHeadlessSurfaceCreateFlagsEXT flags; +} VkHeadlessSurfaceCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateHeadlessSurfaceEXT)(VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateHeadlessSurfaceEXT( + VkInstance instance, + const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + + +#define VK_EXT_line_rasterization 1 +#define VK_EXT_LINE_RASTERIZATION_SPEC_VERSION 1 +#define VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME "VK_EXT_line_rasterization" + +typedef enum VkLineRasterizationModeEXT { + VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT = 0, + VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT = 1, + VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT = 2, + VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT = 3, + VK_LINE_RASTERIZATION_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkLineRasterizationModeEXT; +typedef struct VkPhysicalDeviceLineRasterizationFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 rectangularLines; + VkBool32 bresenhamLines; + VkBool32 smoothLines; + VkBool32 stippledRectangularLines; + VkBool32 stippledBresenhamLines; + VkBool32 stippledSmoothLines; +} VkPhysicalDeviceLineRasterizationFeaturesEXT; + +typedef struct VkPhysicalDeviceLineRasterizationPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t lineSubPixelPrecisionBits; +} VkPhysicalDeviceLineRasterizationPropertiesEXT; + +typedef struct VkPipelineRasterizationLineStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkLineRasterizationModeEXT lineRasterizationMode; + VkBool32 stippledLineEnable; + uint32_t lineStippleFactor; + uint16_t lineStipplePattern; +} VkPipelineRasterizationLineStateCreateInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetLineStippleEXT)(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetLineStippleEXT( + VkCommandBuffer commandBuffer, + uint32_t lineStippleFactor, + uint16_t lineStipplePattern); +#endif + + +#define VK_EXT_shader_atomic_float 1 +#define VK_EXT_SHADER_ATOMIC_FLOAT_SPEC_VERSION 1 +#define VK_EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME "VK_EXT_shader_atomic_float" +typedef struct VkPhysicalDeviceShaderAtomicFloatFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderBufferFloat32Atomics; + VkBool32 shaderBufferFloat32AtomicAdd; + VkBool32 shaderBufferFloat64Atomics; + VkBool32 shaderBufferFloat64AtomicAdd; + VkBool32 shaderSharedFloat32Atomics; + VkBool32 shaderSharedFloat32AtomicAdd; + VkBool32 shaderSharedFloat64Atomics; + VkBool32 shaderSharedFloat64AtomicAdd; + VkBool32 shaderImageFloat32Atomics; + VkBool32 shaderImageFloat32AtomicAdd; + VkBool32 sparseImageFloat32Atomics; + VkBool32 sparseImageFloat32AtomicAdd; +} VkPhysicalDeviceShaderAtomicFloatFeaturesEXT; + + + +#define VK_EXT_host_query_reset 1 +#define VK_EXT_HOST_QUERY_RESET_SPEC_VERSION 1 +#define VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME "VK_EXT_host_query_reset" +typedef VkPhysicalDeviceHostQueryResetFeatures VkPhysicalDeviceHostQueryResetFeaturesEXT; + +typedef void (VKAPI_PTR *PFN_vkResetQueryPoolEXT)(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkResetQueryPoolEXT( + VkDevice device, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount); +#endif + + +#define VK_EXT_index_type_uint8 1 +#define VK_EXT_INDEX_TYPE_UINT8_SPEC_VERSION 1 +#define VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME "VK_EXT_index_type_uint8" +typedef struct VkPhysicalDeviceIndexTypeUint8FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 indexTypeUint8; +} VkPhysicalDeviceIndexTypeUint8FeaturesEXT; + + + +#define VK_EXT_extended_dynamic_state 1 +#define VK_EXT_EXTENDED_DYNAMIC_STATE_SPEC_VERSION 1 +#define VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME "VK_EXT_extended_dynamic_state" +typedef struct VkPhysicalDeviceExtendedDynamicStateFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 extendedDynamicState; +} VkPhysicalDeviceExtendedDynamicStateFeaturesEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetCullModeEXT)(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode); +typedef void (VKAPI_PTR *PFN_vkCmdSetFrontFaceEXT)(VkCommandBuffer commandBuffer, VkFrontFace frontFace); +typedef void (VKAPI_PTR *PFN_vkCmdSetPrimitiveTopologyEXT)(VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology); +typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWithCountEXT)(VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport* pViewports); +typedef void (VKAPI_PTR *PFN_vkCmdSetScissorWithCountEXT)(VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D* pScissors); +typedef void (VKAPI_PTR *PFN_vkCmdBindVertexBuffers2EXT)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes, const VkDeviceSize* pStrides); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthTestEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthWriteEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthCompareOpEXT)(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBoundsTestEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilTestEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilOpEXT)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetCullModeEXT( + VkCommandBuffer commandBuffer, + VkCullModeFlags cullMode); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetFrontFaceEXT( + VkCommandBuffer commandBuffer, + VkFrontFace frontFace); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveTopologyEXT( + VkCommandBuffer commandBuffer, + VkPrimitiveTopology primitiveTopology); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWithCountEXT( + VkCommandBuffer commandBuffer, + uint32_t viewportCount, + const VkViewport* pViewports); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetScissorWithCountEXT( + VkCommandBuffer commandBuffer, + uint32_t scissorCount, + const VkRect2D* pScissors); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers2EXT( + VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer* pBuffers, + const VkDeviceSize* pOffsets, + const VkDeviceSize* pSizes, + const VkDeviceSize* pStrides); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthTestEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthTestEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthWriteEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthWriteEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthCompareOpEXT( + VkCommandBuffer commandBuffer, + VkCompareOp depthCompareOp); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBoundsTestEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthBoundsTestEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilTestEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 stencilTestEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilOpEXT( + VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + VkStencilOp failOp, + VkStencilOp passOp, + VkStencilOp depthFailOp, + VkCompareOp compareOp); +#endif + + +#define VK_EXT_shader_demote_to_helper_invocation 1 +#define VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_SPEC_VERSION 1 +#define VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME "VK_EXT_shader_demote_to_helper_invocation" +typedef struct VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderDemoteToHelperInvocation; +} VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT; + + + +#define VK_NV_device_generated_commands 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectCommandsLayoutNV) +#define VK_NV_DEVICE_GENERATED_COMMANDS_SPEC_VERSION 3 +#define VK_NV_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME "VK_NV_device_generated_commands" + +typedef enum VkIndirectCommandsTokenTypeNV { + VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV = 0, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV = 1, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV = 2, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV = 3, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV = 4, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV = 5, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV = 6, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV = 7, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkIndirectCommandsTokenTypeNV; + +typedef enum VkIndirectStateFlagBitsNV { + VK_INDIRECT_STATE_FLAG_FRONTFACE_BIT_NV = 0x00000001, + VK_INDIRECT_STATE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkIndirectStateFlagBitsNV; +typedef VkFlags VkIndirectStateFlagsNV; + +typedef enum VkIndirectCommandsLayoutUsageFlagBitsNV { + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_NV = 0x00000001, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NV = 0x00000002, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NV = 0x00000004, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkIndirectCommandsLayoutUsageFlagBitsNV; +typedef VkFlags VkIndirectCommandsLayoutUsageFlagsNV; +typedef struct VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t maxGraphicsShaderGroupCount; + uint32_t maxIndirectSequenceCount; + uint32_t maxIndirectCommandsTokenCount; + uint32_t maxIndirectCommandsStreamCount; + uint32_t maxIndirectCommandsTokenOffset; + uint32_t maxIndirectCommandsStreamStride; + uint32_t minSequencesCountBufferOffsetAlignment; + uint32_t minSequencesIndexBufferOffsetAlignment; + uint32_t minIndirectCommandsBufferOffsetAlignment; +} VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + +typedef struct VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 deviceGeneratedCommands; +} VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + +typedef struct VkGraphicsShaderGroupCreateInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t stageCount; + const VkPipelineShaderStageCreateInfo* pStages; + const VkPipelineVertexInputStateCreateInfo* pVertexInputState; + const VkPipelineTessellationStateCreateInfo* pTessellationState; +} VkGraphicsShaderGroupCreateInfoNV; + +typedef struct VkGraphicsPipelineShaderGroupsCreateInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t groupCount; + const VkGraphicsShaderGroupCreateInfoNV* pGroups; + uint32_t pipelineCount; + const VkPipeline* pPipelines; +} VkGraphicsPipelineShaderGroupsCreateInfoNV; + +typedef struct VkBindShaderGroupIndirectCommandNV { + uint32_t groupIndex; +} VkBindShaderGroupIndirectCommandNV; + +typedef struct VkBindIndexBufferIndirectCommandNV { + VkDeviceAddress bufferAddress; + uint32_t size; + VkIndexType indexType; +} VkBindIndexBufferIndirectCommandNV; + +typedef struct VkBindVertexBufferIndirectCommandNV { + VkDeviceAddress bufferAddress; + uint32_t size; + uint32_t stride; +} VkBindVertexBufferIndirectCommandNV; + +typedef struct VkSetStateFlagsIndirectCommandNV { + uint32_t data; +} VkSetStateFlagsIndirectCommandNV; + +typedef struct VkIndirectCommandsStreamNV { + VkBuffer buffer; + VkDeviceSize offset; +} VkIndirectCommandsStreamNV; + +typedef struct VkIndirectCommandsLayoutTokenNV { + VkStructureType sType; + const void* pNext; + VkIndirectCommandsTokenTypeNV tokenType; + uint32_t stream; + uint32_t offset; + uint32_t vertexBindingUnit; + VkBool32 vertexDynamicStride; + VkPipelineLayout pushconstantPipelineLayout; + VkShaderStageFlags pushconstantShaderStageFlags; + uint32_t pushconstantOffset; + uint32_t pushconstantSize; + VkIndirectStateFlagsNV indirectStateFlags; + uint32_t indexTypeCount; + const VkIndexType* pIndexTypes; + const uint32_t* pIndexTypeValues; +} VkIndirectCommandsLayoutTokenNV; + +typedef struct VkIndirectCommandsLayoutCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkIndirectCommandsLayoutUsageFlagsNV flags; + VkPipelineBindPoint pipelineBindPoint; + uint32_t tokenCount; + const VkIndirectCommandsLayoutTokenNV* pTokens; + uint32_t streamCount; + const uint32_t* pStreamStrides; +} VkIndirectCommandsLayoutCreateInfoNV; + +typedef struct VkGeneratedCommandsInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineBindPoint pipelineBindPoint; + VkPipeline pipeline; + VkIndirectCommandsLayoutNV indirectCommandsLayout; + uint32_t streamCount; + const VkIndirectCommandsStreamNV* pStreams; + uint32_t sequencesCount; + VkBuffer preprocessBuffer; + VkDeviceSize preprocessOffset; + VkDeviceSize preprocessSize; + VkBuffer sequencesCountBuffer; + VkDeviceSize sequencesCountOffset; + VkBuffer sequencesIndexBuffer; + VkDeviceSize sequencesIndexOffset; +} VkGeneratedCommandsInfoNV; + +typedef struct VkGeneratedCommandsMemoryRequirementsInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineBindPoint pipelineBindPoint; + VkPipeline pipeline; + VkIndirectCommandsLayoutNV indirectCommandsLayout; + uint32_t maxSequencesCount; +} VkGeneratedCommandsMemoryRequirementsInfoNV; + +typedef void (VKAPI_PTR *PFN_vkGetGeneratedCommandsMemoryRequirementsNV)(VkDevice device, const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkCmdPreprocessGeneratedCommandsNV)(VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdExecuteGeneratedCommandsNV)(VkCommandBuffer commandBuffer, VkBool32 isPreprocessed, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBindPipelineShaderGroupNV)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline, uint32_t groupIndex); +typedef VkResult (VKAPI_PTR *PFN_vkCreateIndirectCommandsLayoutNV)(VkDevice device, const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNV* pIndirectCommandsLayout); +typedef void (VKAPI_PTR *PFN_vkDestroyIndirectCommandsLayoutNV)(VkDevice device, VkIndirectCommandsLayoutNV indirectCommandsLayout, const VkAllocationCallbacks* pAllocator); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetGeneratedCommandsMemoryRequirementsNV( + VkDevice device, + const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkCmdPreprocessGeneratedCommandsNV( + VkCommandBuffer commandBuffer, + const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdExecuteGeneratedCommandsNV( + VkCommandBuffer commandBuffer, + VkBool32 isPreprocessed, + const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindPipelineShaderGroupNV( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline, + uint32_t groupIndex); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateIndirectCommandsLayoutNV( + VkDevice device, + const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkIndirectCommandsLayoutNV* pIndirectCommandsLayout); + +VKAPI_ATTR void VKAPI_CALL vkDestroyIndirectCommandsLayoutNV( + VkDevice device, + VkIndirectCommandsLayoutNV indirectCommandsLayout, + const VkAllocationCallbacks* pAllocator); +#endif + + +#define VK_NV_inherited_viewport_scissor 1 +#define VK_NV_INHERITED_VIEWPORT_SCISSOR_SPEC_VERSION 1 +#define VK_NV_INHERITED_VIEWPORT_SCISSOR_EXTENSION_NAME "VK_NV_inherited_viewport_scissor" +typedef struct VkPhysicalDeviceInheritedViewportScissorFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 inheritedViewportScissor2D; +} VkPhysicalDeviceInheritedViewportScissorFeaturesNV; + +typedef struct VkCommandBufferInheritanceViewportScissorInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 viewportScissor2D; + uint32_t viewportDepthCount; + const VkViewport* pViewportDepths; +} VkCommandBufferInheritanceViewportScissorInfoNV; + + + +#define VK_EXT_texel_buffer_alignment 1 +#define VK_EXT_TEXEL_BUFFER_ALIGNMENT_SPEC_VERSION 1 +#define VK_EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME "VK_EXT_texel_buffer_alignment" +typedef struct VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 texelBufferAlignment; +} VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT; + +typedef struct VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT { + VkStructureType sType; + void* pNext; + VkDeviceSize storageTexelBufferOffsetAlignmentBytes; + VkBool32 storageTexelBufferOffsetSingleTexelAlignment; + VkDeviceSize uniformTexelBufferOffsetAlignmentBytes; + VkBool32 uniformTexelBufferOffsetSingleTexelAlignment; +} VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT; + + + +#define VK_QCOM_render_pass_transform 1 +#define VK_QCOM_RENDER_PASS_TRANSFORM_SPEC_VERSION 2 +#define VK_QCOM_RENDER_PASS_TRANSFORM_EXTENSION_NAME "VK_QCOM_render_pass_transform" +typedef struct VkRenderPassTransformBeginInfoQCOM { + VkStructureType sType; + void* pNext; + VkSurfaceTransformFlagBitsKHR transform; +} VkRenderPassTransformBeginInfoQCOM; + +typedef struct VkCommandBufferInheritanceRenderPassTransformInfoQCOM { + VkStructureType sType; + void* pNext; + VkSurfaceTransformFlagBitsKHR transform; + VkRect2D renderArea; +} VkCommandBufferInheritanceRenderPassTransformInfoQCOM; + + + +#define VK_EXT_device_memory_report 1 +#define VK_EXT_DEVICE_MEMORY_REPORT_SPEC_VERSION 2 +#define VK_EXT_DEVICE_MEMORY_REPORT_EXTENSION_NAME "VK_EXT_device_memory_report" + +typedef enum VkDeviceMemoryReportEventTypeEXT { + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATE_EXT = 0, + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_FREE_EXT = 1, + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_IMPORT_EXT = 2, + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_UNIMPORT_EXT = 3, + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATION_FAILED_EXT = 4, + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDeviceMemoryReportEventTypeEXT; +typedef VkFlags VkDeviceMemoryReportFlagsEXT; +typedef struct VkPhysicalDeviceDeviceMemoryReportFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 deviceMemoryReport; +} VkPhysicalDeviceDeviceMemoryReportFeaturesEXT; + +typedef struct VkDeviceMemoryReportCallbackDataEXT { + VkStructureType sType; + const void* pNext; + VkDeviceMemoryReportFlagsEXT flags; + VkDeviceMemoryReportEventTypeEXT type; + uint64_t memoryObjectId; + VkDeviceSize size; + VkObjectType objectType; + uint64_t objectHandle; + uint32_t heapIndex; +} VkDeviceMemoryReportCallbackDataEXT; + +typedef void (VKAPI_PTR *PFN_vkDeviceMemoryReportCallbackEXT)( + const VkDeviceMemoryReportCallbackDataEXT* pCallbackData, + void* pUserData); + +typedef struct VkDeviceDeviceMemoryReportCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceMemoryReportFlagsEXT flags; + PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback; + void* pUserData; +} VkDeviceDeviceMemoryReportCreateInfoEXT; + + + +#define VK_EXT_robustness2 1 +#define VK_EXT_ROBUSTNESS_2_SPEC_VERSION 1 +#define VK_EXT_ROBUSTNESS_2_EXTENSION_NAME "VK_EXT_robustness2" +typedef struct VkPhysicalDeviceRobustness2FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 robustBufferAccess2; + VkBool32 robustImageAccess2; + VkBool32 nullDescriptor; +} VkPhysicalDeviceRobustness2FeaturesEXT; + +typedef struct VkPhysicalDeviceRobustness2PropertiesEXT { + VkStructureType sType; + void* pNext; + VkDeviceSize robustStorageBufferAccessSizeAlignment; + VkDeviceSize robustUniformBufferAccessSizeAlignment; +} VkPhysicalDeviceRobustness2PropertiesEXT; + + + +#define VK_EXT_custom_border_color 1 +#define VK_EXT_CUSTOM_BORDER_COLOR_SPEC_VERSION 12 +#define VK_EXT_CUSTOM_BORDER_COLOR_EXTENSION_NAME "VK_EXT_custom_border_color" +typedef struct VkSamplerCustomBorderColorCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkClearColorValue customBorderColor; + VkFormat format; +} VkSamplerCustomBorderColorCreateInfoEXT; + +typedef struct VkPhysicalDeviceCustomBorderColorPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxCustomBorderColorSamplers; +} VkPhysicalDeviceCustomBorderColorPropertiesEXT; + +typedef struct VkPhysicalDeviceCustomBorderColorFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 customBorderColors; + VkBool32 customBorderColorWithoutFormat; +} VkPhysicalDeviceCustomBorderColorFeaturesEXT; + + + +#define VK_GOOGLE_user_type 1 +#define VK_GOOGLE_USER_TYPE_SPEC_VERSION 1 +#define VK_GOOGLE_USER_TYPE_EXTENSION_NAME "VK_GOOGLE_user_type" + + +#define VK_EXT_private_data 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPrivateDataSlotEXT) +#define VK_EXT_PRIVATE_DATA_SPEC_VERSION 1 +#define VK_EXT_PRIVATE_DATA_EXTENSION_NAME "VK_EXT_private_data" + +typedef enum VkPrivateDataSlotCreateFlagBitsEXT { + VK_PRIVATE_DATA_SLOT_CREATE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkPrivateDataSlotCreateFlagBitsEXT; +typedef VkFlags VkPrivateDataSlotCreateFlagsEXT; +typedef struct VkPhysicalDevicePrivateDataFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 privateData; +} VkPhysicalDevicePrivateDataFeaturesEXT; + +typedef struct VkDevicePrivateDataCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t privateDataSlotRequestCount; +} VkDevicePrivateDataCreateInfoEXT; + +typedef struct VkPrivateDataSlotCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPrivateDataSlotCreateFlagsEXT flags; +} VkPrivateDataSlotCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreatePrivateDataSlotEXT)(VkDevice device, const VkPrivateDataSlotCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPrivateDataSlotEXT* pPrivateDataSlot); +typedef void (VKAPI_PTR *PFN_vkDestroyPrivateDataSlotEXT)(VkDevice device, VkPrivateDataSlotEXT privateDataSlot, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkSetPrivateDataEXT)(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlotEXT privateDataSlot, uint64_t data); +typedef void (VKAPI_PTR *PFN_vkGetPrivateDataEXT)(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlotEXT privateDataSlot, uint64_t* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreatePrivateDataSlotEXT( + VkDevice device, + const VkPrivateDataSlotCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPrivateDataSlotEXT* pPrivateDataSlot); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPrivateDataSlotEXT( + VkDevice device, + VkPrivateDataSlotEXT privateDataSlot, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetPrivateDataEXT( + VkDevice device, + VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlotEXT privateDataSlot, + uint64_t data); + +VKAPI_ATTR void VKAPI_CALL vkGetPrivateDataEXT( + VkDevice device, + VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlotEXT privateDataSlot, + uint64_t* pData); +#endif + + +#define VK_EXT_pipeline_creation_cache_control 1 +#define VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_SPEC_VERSION 3 +#define VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_EXTENSION_NAME "VK_EXT_pipeline_creation_cache_control" +typedef struct VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 pipelineCreationCacheControl; +} VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT; + + + +#define VK_NV_device_diagnostics_config 1 +#define VK_NV_DEVICE_DIAGNOSTICS_CONFIG_SPEC_VERSION 1 +#define VK_NV_DEVICE_DIAGNOSTICS_CONFIG_EXTENSION_NAME "VK_NV_device_diagnostics_config" + +typedef enum VkDeviceDiagnosticsConfigFlagBitsNV { + VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV = 0x00000001, + VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV = 0x00000002, + VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV = 0x00000004, + VK_DEVICE_DIAGNOSTICS_CONFIG_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkDeviceDiagnosticsConfigFlagBitsNV; +typedef VkFlags VkDeviceDiagnosticsConfigFlagsNV; +typedef struct VkPhysicalDeviceDiagnosticsConfigFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 diagnosticsConfig; +} VkPhysicalDeviceDiagnosticsConfigFeaturesNV; + +typedef struct VkDeviceDiagnosticsConfigCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkDeviceDiagnosticsConfigFlagsNV flags; +} VkDeviceDiagnosticsConfigCreateInfoNV; + + + +#define VK_QCOM_render_pass_store_ops 1 +#define VK_QCOM_RENDER_PASS_STORE_OPS_SPEC_VERSION 2 +#define VK_QCOM_RENDER_PASS_STORE_OPS_EXTENSION_NAME "VK_QCOM_render_pass_store_ops" + + +#define VK_NV_fragment_shading_rate_enums 1 +#define VK_NV_FRAGMENT_SHADING_RATE_ENUMS_SPEC_VERSION 1 +#define VK_NV_FRAGMENT_SHADING_RATE_ENUMS_EXTENSION_NAME "VK_NV_fragment_shading_rate_enums" + +typedef enum VkFragmentShadingRateTypeNV { + VK_FRAGMENT_SHADING_RATE_TYPE_FRAGMENT_SIZE_NV = 0, + VK_FRAGMENT_SHADING_RATE_TYPE_ENUMS_NV = 1, + VK_FRAGMENT_SHADING_RATE_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkFragmentShadingRateTypeNV; + +typedef enum VkFragmentShadingRateNV { + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_PIXEL_NV = 0, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_1X2_PIXELS_NV = 1, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X1_PIXELS_NV = 4, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X2_PIXELS_NV = 5, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X4_PIXELS_NV = 6, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X2_PIXELS_NV = 9, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X4_PIXELS_NV = 10, + VK_FRAGMENT_SHADING_RATE_2_INVOCATIONS_PER_PIXEL_NV = 11, + VK_FRAGMENT_SHADING_RATE_4_INVOCATIONS_PER_PIXEL_NV = 12, + VK_FRAGMENT_SHADING_RATE_8_INVOCATIONS_PER_PIXEL_NV = 13, + VK_FRAGMENT_SHADING_RATE_16_INVOCATIONS_PER_PIXEL_NV = 14, + VK_FRAGMENT_SHADING_RATE_NO_INVOCATIONS_NV = 15, + VK_FRAGMENT_SHADING_RATE_MAX_ENUM_NV = 0x7FFFFFFF +} VkFragmentShadingRateNV; +typedef struct VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 fragmentShadingRateEnums; + VkBool32 supersampleFragmentShadingRates; + VkBool32 noInvocationFragmentShadingRates; +} VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV; + +typedef struct VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV { + VkStructureType sType; + void* pNext; + VkSampleCountFlagBits maxFragmentShadingRateInvocationCount; +} VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV; + +typedef struct VkPipelineFragmentShadingRateEnumStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkFragmentShadingRateTypeNV shadingRateType; + VkFragmentShadingRateNV shadingRate; + VkFragmentShadingRateCombinerOpKHR combinerOps[2]; +} VkPipelineFragmentShadingRateEnumStateCreateInfoNV; + +typedef void (VKAPI_PTR *PFN_vkCmdSetFragmentShadingRateEnumNV)(VkCommandBuffer commandBuffer, VkFragmentShadingRateNV shadingRate, const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetFragmentShadingRateEnumNV( + VkCommandBuffer commandBuffer, + VkFragmentShadingRateNV shadingRate, + const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); +#endif + + +#define VK_EXT_ycbcr_2plane_444_formats 1 +#define VK_EXT_YCBCR_2PLANE_444_FORMATS_SPEC_VERSION 1 +#define VK_EXT_YCBCR_2PLANE_444_FORMATS_EXTENSION_NAME "VK_EXT_ycbcr_2plane_444_formats" +typedef struct VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 ycbcr2plane444Formats; +} VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; + + + +#define VK_EXT_fragment_density_map2 1 +#define VK_EXT_FRAGMENT_DENSITY_MAP_2_SPEC_VERSION 1 +#define VK_EXT_FRAGMENT_DENSITY_MAP_2_EXTENSION_NAME "VK_EXT_fragment_density_map2" +typedef struct VkPhysicalDeviceFragmentDensityMap2FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 fragmentDensityMapDeferred; +} VkPhysicalDeviceFragmentDensityMap2FeaturesEXT; + +typedef struct VkPhysicalDeviceFragmentDensityMap2PropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 subsampledLoads; + VkBool32 subsampledCoarseReconstructionEarlyAccess; + uint32_t maxSubsampledArrayLayers; + uint32_t maxDescriptorSetSubsampledSamplers; +} VkPhysicalDeviceFragmentDensityMap2PropertiesEXT; + + + +#define VK_QCOM_rotated_copy_commands 1 +#define VK_QCOM_ROTATED_COPY_COMMANDS_SPEC_VERSION 1 +#define VK_QCOM_ROTATED_COPY_COMMANDS_EXTENSION_NAME "VK_QCOM_rotated_copy_commands" +typedef struct VkCopyCommandTransformInfoQCOM { + VkStructureType sType; + const void* pNext; + VkSurfaceTransformFlagBitsKHR transform; +} VkCopyCommandTransformInfoQCOM; + + + +#define VK_EXT_image_robustness 1 +#define VK_EXT_IMAGE_ROBUSTNESS_SPEC_VERSION 1 +#define VK_EXT_IMAGE_ROBUSTNESS_EXTENSION_NAME "VK_EXT_image_robustness" +typedef struct VkPhysicalDeviceImageRobustnessFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 robustImageAccess; +} VkPhysicalDeviceImageRobustnessFeaturesEXT; + + + +#define VK_EXT_4444_formats 1 +#define VK_EXT_4444_FORMATS_SPEC_VERSION 1 +#define VK_EXT_4444_FORMATS_EXTENSION_NAME "VK_EXT_4444_formats" +typedef struct VkPhysicalDevice4444FormatsFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 formatA4R4G4B4; + VkBool32 formatA4B4G4R4; +} VkPhysicalDevice4444FormatsFeaturesEXT; + + + +#define VK_NV_acquire_winrt_display 1 +#define VK_NV_ACQUIRE_WINRT_DISPLAY_SPEC_VERSION 1 +#define VK_NV_ACQUIRE_WINRT_DISPLAY_EXTENSION_NAME "VK_NV_acquire_winrt_display" +typedef VkResult (VKAPI_PTR *PFN_vkAcquireWinrtDisplayNV)(VkPhysicalDevice physicalDevice, VkDisplayKHR display); +typedef VkResult (VKAPI_PTR *PFN_vkGetWinrtDisplayNV)(VkPhysicalDevice physicalDevice, uint32_t deviceRelativeId, VkDisplayKHR* pDisplay); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireWinrtDisplayNV( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetWinrtDisplayNV( + VkPhysicalDevice physicalDevice, + uint32_t deviceRelativeId, + VkDisplayKHR* pDisplay); +#endif + + +#define VK_VALVE_mutable_descriptor_type 1 +#define VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION 1 +#define VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME "VK_VALVE_mutable_descriptor_type" +typedef struct VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE { + VkStructureType sType; + void* pNext; + VkBool32 mutableDescriptorType; +} VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE; + +typedef struct VkMutableDescriptorTypeListVALVE { + uint32_t descriptorTypeCount; + const VkDescriptorType* pDescriptorTypes; +} VkMutableDescriptorTypeListVALVE; + +typedef struct VkMutableDescriptorTypeCreateInfoVALVE { + VkStructureType sType; + const void* pNext; + uint32_t mutableDescriptorTypeListCount; + const VkMutableDescriptorTypeListVALVE* pMutableDescriptorTypeLists; +} VkMutableDescriptorTypeCreateInfoVALVE; + + + +#define VK_EXT_vertex_input_dynamic_state 1 +#define VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_SPEC_VERSION 2 +#define VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME "VK_EXT_vertex_input_dynamic_state" +typedef struct VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 vertexInputDynamicState; +} VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT; + +typedef struct VkVertexInputBindingDescription2EXT { + VkStructureType sType; + void* pNext; + uint32_t binding; + uint32_t stride; + VkVertexInputRate inputRate; + uint32_t divisor; +} VkVertexInputBindingDescription2EXT; + +typedef struct VkVertexInputAttributeDescription2EXT { + VkStructureType sType; + void* pNext; + uint32_t location; + uint32_t binding; + VkFormat format; + uint32_t offset; +} VkVertexInputAttributeDescription2EXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetVertexInputEXT)(VkCommandBuffer commandBuffer, uint32_t vertexBindingDescriptionCount, const VkVertexInputBindingDescription2EXT* pVertexBindingDescriptions, uint32_t vertexAttributeDescriptionCount, const VkVertexInputAttributeDescription2EXT* pVertexAttributeDescriptions); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetVertexInputEXT( + VkCommandBuffer commandBuffer, + uint32_t vertexBindingDescriptionCount, + const VkVertexInputBindingDescription2EXT* pVertexBindingDescriptions, + uint32_t vertexAttributeDescriptionCount, + const VkVertexInputAttributeDescription2EXT* pVertexAttributeDescriptions); +#endif + + +#define VK_EXT_extended_dynamic_state2 1 +#define VK_EXT_EXTENDED_DYNAMIC_STATE_2_SPEC_VERSION 1 +#define VK_EXT_EXTENDED_DYNAMIC_STATE_2_EXTENSION_NAME "VK_EXT_extended_dynamic_state2" +typedef struct VkPhysicalDeviceExtendedDynamicState2FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 extendedDynamicState2; + VkBool32 extendedDynamicState2LogicOp; + VkBool32 extendedDynamicState2PatchControlPoints; +} VkPhysicalDeviceExtendedDynamicState2FeaturesEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetPatchControlPointsEXT)(VkCommandBuffer commandBuffer, uint32_t patchControlPoints); +typedef void (VKAPI_PTR *PFN_vkCmdSetRasterizerDiscardEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBiasEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetLogicOpEXT)(VkCommandBuffer commandBuffer, VkLogicOp logicOp); +typedef void (VKAPI_PTR *PFN_vkCmdSetPrimitiveRestartEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetPatchControlPointsEXT( + VkCommandBuffer commandBuffer, + uint32_t patchControlPoints); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetRasterizerDiscardEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 rasterizerDiscardEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBiasEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthBiasEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetLogicOpEXT( + VkCommandBuffer commandBuffer, + VkLogicOp logicOp); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveRestartEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 primitiveRestartEnable); +#endif + + +#define VK_EXT_color_write_enable 1 +#define VK_EXT_COLOR_WRITE_ENABLE_SPEC_VERSION 1 +#define VK_EXT_COLOR_WRITE_ENABLE_EXTENSION_NAME "VK_EXT_color_write_enable" +typedef struct VkPhysicalDeviceColorWriteEnableFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 colorWriteEnable; +} VkPhysicalDeviceColorWriteEnableFeaturesEXT; + +typedef struct VkPipelineColorWriteCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t attachmentCount; + const VkBool32* pColorWriteEnables; +} VkPipelineColorWriteCreateInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetColorWriteEnableEXT)(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkBool32* pColorWriteEnables); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetColorWriteEnableEXT( + VkCommandBuffer commandBuffer, + uint32_t attachmentCount, + const VkBool32* pColorWriteEnables); +#endif + + +#define VK_KHR_acceleration_structure 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureKHR) +#define VK_KHR_ACCELERATION_STRUCTURE_SPEC_VERSION 11 +#define VK_KHR_ACCELERATION_STRUCTURE_EXTENSION_NAME "VK_KHR_acceleration_structure" + +typedef enum VkBuildAccelerationStructureModeKHR { + VK_BUILD_ACCELERATION_STRUCTURE_MODE_BUILD_KHR = 0, + VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR = 1, + VK_BUILD_ACCELERATION_STRUCTURE_MODE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkBuildAccelerationStructureModeKHR; + +typedef enum VkAccelerationStructureBuildTypeKHR { + VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR = 0, + VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR = 1, + VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR = 2, + VK_ACCELERATION_STRUCTURE_BUILD_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAccelerationStructureBuildTypeKHR; + +typedef enum VkAccelerationStructureCompatibilityKHR { + VK_ACCELERATION_STRUCTURE_COMPATIBILITY_COMPATIBLE_KHR = 0, + VK_ACCELERATION_STRUCTURE_COMPATIBILITY_INCOMPATIBLE_KHR = 1, + VK_ACCELERATION_STRUCTURE_COMPATIBILITY_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAccelerationStructureCompatibilityKHR; + +typedef enum VkAccelerationStructureCreateFlagBitsKHR { + VK_ACCELERATION_STRUCTURE_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR = 0x00000001, + VK_ACCELERATION_STRUCTURE_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAccelerationStructureCreateFlagBitsKHR; +typedef VkFlags VkAccelerationStructureCreateFlagsKHR; +typedef union VkDeviceOrHostAddressKHR { + VkDeviceAddress deviceAddress; + void* hostAddress; +} VkDeviceOrHostAddressKHR; + +typedef union VkDeviceOrHostAddressConstKHR { + VkDeviceAddress deviceAddress; + const void* hostAddress; +} VkDeviceOrHostAddressConstKHR; + +typedef struct VkAccelerationStructureBuildRangeInfoKHR { + uint32_t primitiveCount; + uint32_t primitiveOffset; + uint32_t firstVertex; + uint32_t transformOffset; +} VkAccelerationStructureBuildRangeInfoKHR; + +typedef struct VkAccelerationStructureGeometryTrianglesDataKHR { + VkStructureType sType; + const void* pNext; + VkFormat vertexFormat; + VkDeviceOrHostAddressConstKHR vertexData; + VkDeviceSize vertexStride; + uint32_t maxVertex; + VkIndexType indexType; + VkDeviceOrHostAddressConstKHR indexData; + VkDeviceOrHostAddressConstKHR transformData; +} VkAccelerationStructureGeometryTrianglesDataKHR; + +typedef struct VkAccelerationStructureGeometryAabbsDataKHR { + VkStructureType sType; + const void* pNext; + VkDeviceOrHostAddressConstKHR data; + VkDeviceSize stride; +} VkAccelerationStructureGeometryAabbsDataKHR; + +typedef struct VkAccelerationStructureGeometryInstancesDataKHR { + VkStructureType sType; + const void* pNext; + VkBool32 arrayOfPointers; + VkDeviceOrHostAddressConstKHR data; +} VkAccelerationStructureGeometryInstancesDataKHR; + +typedef union VkAccelerationStructureGeometryDataKHR { + VkAccelerationStructureGeometryTrianglesDataKHR triangles; + VkAccelerationStructureGeometryAabbsDataKHR aabbs; + VkAccelerationStructureGeometryInstancesDataKHR instances; +} VkAccelerationStructureGeometryDataKHR; + +typedef struct VkAccelerationStructureGeometryKHR { + VkStructureType sType; + const void* pNext; + VkGeometryTypeKHR geometryType; + VkAccelerationStructureGeometryDataKHR geometry; + VkGeometryFlagsKHR flags; +} VkAccelerationStructureGeometryKHR; + +typedef struct VkAccelerationStructureBuildGeometryInfoKHR { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureTypeKHR type; + VkBuildAccelerationStructureFlagsKHR flags; + VkBuildAccelerationStructureModeKHR mode; + VkAccelerationStructureKHR srcAccelerationStructure; + VkAccelerationStructureKHR dstAccelerationStructure; + uint32_t geometryCount; + const VkAccelerationStructureGeometryKHR* pGeometries; + const VkAccelerationStructureGeometryKHR* const* ppGeometries; + VkDeviceOrHostAddressKHR scratchData; +} VkAccelerationStructureBuildGeometryInfoKHR; + +typedef struct VkAccelerationStructureCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureCreateFlagsKHR createFlags; + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize size; + VkAccelerationStructureTypeKHR type; + VkDeviceAddress deviceAddress; +} VkAccelerationStructureCreateInfoKHR; + +typedef struct VkWriteDescriptorSetAccelerationStructureKHR { + VkStructureType sType; + const void* pNext; + uint32_t accelerationStructureCount; + const VkAccelerationStructureKHR* pAccelerationStructures; +} VkWriteDescriptorSetAccelerationStructureKHR; + +typedef struct VkPhysicalDeviceAccelerationStructureFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 accelerationStructure; + VkBool32 accelerationStructureCaptureReplay; + VkBool32 accelerationStructureIndirectBuild; + VkBool32 accelerationStructureHostCommands; + VkBool32 descriptorBindingAccelerationStructureUpdateAfterBind; +} VkPhysicalDeviceAccelerationStructureFeaturesKHR; + +typedef struct VkPhysicalDeviceAccelerationStructurePropertiesKHR { + VkStructureType sType; + void* pNext; + uint64_t maxGeometryCount; + uint64_t maxInstanceCount; + uint64_t maxPrimitiveCount; + uint32_t maxPerStageDescriptorAccelerationStructures; + uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures; + uint32_t maxDescriptorSetAccelerationStructures; + uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures; + uint32_t minAccelerationStructureScratchOffsetAlignment; +} VkPhysicalDeviceAccelerationStructurePropertiesKHR; + +typedef struct VkAccelerationStructureDeviceAddressInfoKHR { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureKHR accelerationStructure; +} VkAccelerationStructureDeviceAddressInfoKHR; + +typedef struct VkAccelerationStructureVersionInfoKHR { + VkStructureType sType; + const void* pNext; + const uint8_t* pVersionData; +} VkAccelerationStructureVersionInfoKHR; + +typedef struct VkCopyAccelerationStructureToMemoryInfoKHR { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureKHR src; + VkDeviceOrHostAddressKHR dst; + VkCopyAccelerationStructureModeKHR mode; +} VkCopyAccelerationStructureToMemoryInfoKHR; + +typedef struct VkCopyMemoryToAccelerationStructureInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceOrHostAddressConstKHR src; + VkAccelerationStructureKHR dst; + VkCopyAccelerationStructureModeKHR mode; +} VkCopyMemoryToAccelerationStructureInfoKHR; + +typedef struct VkCopyAccelerationStructureInfoKHR { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureKHR src; + VkAccelerationStructureKHR dst; + VkCopyAccelerationStructureModeKHR mode; +} VkCopyAccelerationStructureInfoKHR; + +typedef struct VkAccelerationStructureBuildSizesInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceSize accelerationStructureSize; + VkDeviceSize updateScratchSize; + VkDeviceSize buildScratchSize; +} VkAccelerationStructureBuildSizesInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateAccelerationStructureKHR)(VkDevice device, const VkAccelerationStructureCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureKHR* pAccelerationStructure); +typedef void (VKAPI_PTR *PFN_vkDestroyAccelerationStructureKHR)(VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructuresKHR)(VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos); +typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructuresIndirectKHR)(VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkDeviceAddress* pIndirectDeviceAddresses, const uint32_t* pIndirectStrides, const uint32_t* const* ppMaxPrimitiveCounts); +typedef VkResult (VKAPI_PTR *PFN_vkBuildAccelerationStructuresKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos); +typedef VkResult (VKAPI_PTR *PFN_vkCopyAccelerationStructureKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyAccelerationStructureInfoKHR* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyAccelerationStructureToMemoryKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyMemoryToAccelerationStructureKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkWriteAccelerationStructuresPropertiesKHR)(VkDevice device, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, size_t dataSize, void* pData, size_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureKHR)(VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR* pInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureToMemoryKHR)(VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyMemoryToAccelerationStructureKHR)(VkCommandBuffer commandBuffer, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo); +typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetAccelerationStructureDeviceAddressKHR)(VkDevice device, const VkAccelerationStructureDeviceAddressInfoKHR* pInfo); +typedef void (VKAPI_PTR *PFN_vkCmdWriteAccelerationStructuresPropertiesKHR)(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery); +typedef void (VKAPI_PTR *PFN_vkGetDeviceAccelerationStructureCompatibilityKHR)(VkDevice device, const VkAccelerationStructureVersionInfoKHR* pVersionInfo, VkAccelerationStructureCompatibilityKHR* pCompatibility); +typedef void (VKAPI_PTR *PFN_vkGetAccelerationStructureBuildSizesKHR)(VkDevice device, VkAccelerationStructureBuildTypeKHR buildType, const VkAccelerationStructureBuildGeometryInfoKHR* pBuildInfo, const uint32_t* pMaxPrimitiveCounts, VkAccelerationStructureBuildSizesInfoKHR* pSizeInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateAccelerationStructureKHR( + VkDevice device, + const VkAccelerationStructureCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkAccelerationStructureKHR* pAccelerationStructure); + +VKAPI_ATTR void VKAPI_CALL vkDestroyAccelerationStructureKHR( + VkDevice device, + VkAccelerationStructureKHR accelerationStructure, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructuresKHR( + VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, + const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos); + +VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructuresIndirectKHR( + VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, + const VkDeviceAddress* pIndirectDeviceAddresses, + const uint32_t* pIndirectStrides, + const uint32_t* const* ppMaxPrimitiveCounts); + +VKAPI_ATTR VkResult VKAPI_CALL vkBuildAccelerationStructuresKHR( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, + const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyAccelerationStructureKHR( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyAccelerationStructureInfoKHR* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyAccelerationStructureToMemoryKHR( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyMemoryToAccelerationStructureKHR( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkWriteAccelerationStructuresPropertiesKHR( + VkDevice device, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR* pAccelerationStructures, + VkQueryType queryType, + size_t dataSize, + void* pData, + size_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureKHR( + VkCommandBuffer commandBuffer, + const VkCopyAccelerationStructureInfoKHR* pInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureToMemoryKHR( + VkCommandBuffer commandBuffer, + const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyMemoryToAccelerationStructureKHR( + VkCommandBuffer commandBuffer, + const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo); + +VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetAccelerationStructureDeviceAddressKHR( + VkDevice device, + const VkAccelerationStructureDeviceAddressInfoKHR* pInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteAccelerationStructuresPropertiesKHR( + VkCommandBuffer commandBuffer, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR* pAccelerationStructures, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceAccelerationStructureCompatibilityKHR( + VkDevice device, + const VkAccelerationStructureVersionInfoKHR* pVersionInfo, + VkAccelerationStructureCompatibilityKHR* pCompatibility); + +VKAPI_ATTR void VKAPI_CALL vkGetAccelerationStructureBuildSizesKHR( + VkDevice device, + VkAccelerationStructureBuildTypeKHR buildType, + const VkAccelerationStructureBuildGeometryInfoKHR* pBuildInfo, + const uint32_t* pMaxPrimitiveCounts, + VkAccelerationStructureBuildSizesInfoKHR* pSizeInfo); +#endif + + +#define VK_KHR_ray_tracing_pipeline 1 +#define VK_KHR_RAY_TRACING_PIPELINE_SPEC_VERSION 1 +#define VK_KHR_RAY_TRACING_PIPELINE_EXTENSION_NAME "VK_KHR_ray_tracing_pipeline" + +typedef enum VkShaderGroupShaderKHR { + VK_SHADER_GROUP_SHADER_GENERAL_KHR = 0, + VK_SHADER_GROUP_SHADER_CLOSEST_HIT_KHR = 1, + VK_SHADER_GROUP_SHADER_ANY_HIT_KHR = 2, + VK_SHADER_GROUP_SHADER_INTERSECTION_KHR = 3, + VK_SHADER_GROUP_SHADER_MAX_ENUM_KHR = 0x7FFFFFFF +} VkShaderGroupShaderKHR; +typedef struct VkRayTracingShaderGroupCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkRayTracingShaderGroupTypeKHR type; + uint32_t generalShader; + uint32_t closestHitShader; + uint32_t anyHitShader; + uint32_t intersectionShader; + const void* pShaderGroupCaptureReplayHandle; +} VkRayTracingShaderGroupCreateInfoKHR; + +typedef struct VkRayTracingPipelineInterfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t maxPipelineRayPayloadSize; + uint32_t maxPipelineRayHitAttributeSize; +} VkRayTracingPipelineInterfaceCreateInfoKHR; + +typedef struct VkRayTracingPipelineCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + uint32_t stageCount; + const VkPipelineShaderStageCreateInfo* pStages; + uint32_t groupCount; + const VkRayTracingShaderGroupCreateInfoKHR* pGroups; + uint32_t maxPipelineRayRecursionDepth; + const VkPipelineLibraryCreateInfoKHR* pLibraryInfo; + const VkRayTracingPipelineInterfaceCreateInfoKHR* pLibraryInterface; + const VkPipelineDynamicStateCreateInfo* pDynamicState; + VkPipelineLayout layout; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkRayTracingPipelineCreateInfoKHR; + +typedef struct VkPhysicalDeviceRayTracingPipelineFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 rayTracingPipeline; + VkBool32 rayTracingPipelineShaderGroupHandleCaptureReplay; + VkBool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed; + VkBool32 rayTracingPipelineTraceRaysIndirect; + VkBool32 rayTraversalPrimitiveCulling; +} VkPhysicalDeviceRayTracingPipelineFeaturesKHR; + +typedef struct VkPhysicalDeviceRayTracingPipelinePropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t shaderGroupHandleSize; + uint32_t maxRayRecursionDepth; + uint32_t maxShaderGroupStride; + uint32_t shaderGroupBaseAlignment; + uint32_t shaderGroupHandleCaptureReplaySize; + uint32_t maxRayDispatchInvocationCount; + uint32_t shaderGroupHandleAlignment; + uint32_t maxRayHitAttributeSize; +} VkPhysicalDeviceRayTracingPipelinePropertiesKHR; + +typedef struct VkStridedDeviceAddressRegionKHR { + VkDeviceAddress deviceAddress; + VkDeviceSize stride; + VkDeviceSize size; +} VkStridedDeviceAddressRegionKHR; + +typedef struct VkTraceRaysIndirectCommandKHR { + uint32_t width; + uint32_t height; + uint32_t depth; +} VkTraceRaysIndirectCommandKHR; + +typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysKHR)(VkCommandBuffer commandBuffer, const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth); +typedef VkResult (VKAPI_PTR *PFN_vkCreateRayTracingPipelinesKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData); +typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysIndirectKHR)(VkCommandBuffer commandBuffer, const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, VkDeviceAddress indirectDeviceAddress); +typedef VkDeviceSize (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupStackSizeKHR)(VkDevice device, VkPipeline pipeline, uint32_t group, VkShaderGroupShaderKHR groupShader); +typedef void (VKAPI_PTR *PFN_vkCmdSetRayTracingPipelineStackSizeKHR)(VkCommandBuffer commandBuffer, uint32_t pipelineStackSize); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysKHR( + VkCommandBuffer commandBuffer, + const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateRayTracingPipelinesKHR( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + VkDevice device, + VkPipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void* pData); + +VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysIndirectKHR( + VkCommandBuffer commandBuffer, + const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, + VkDeviceAddress indirectDeviceAddress); + +VKAPI_ATTR VkDeviceSize VKAPI_CALL vkGetRayTracingShaderGroupStackSizeKHR( + VkDevice device, + VkPipeline pipeline, + uint32_t group, + VkShaderGroupShaderKHR groupShader); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetRayTracingPipelineStackSizeKHR( + VkCommandBuffer commandBuffer, + uint32_t pipelineStackSize); +#endif + + +#define VK_KHR_ray_query 1 +#define VK_KHR_RAY_QUERY_SPEC_VERSION 1 +#define VK_KHR_RAY_QUERY_EXTENSION_NAME "VK_KHR_ray_query" +typedef struct VkPhysicalDeviceRayQueryFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 rayQuery; +} VkPhysicalDeviceRayQueryFeaturesKHR; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/swiftshader/include/vulkan/vulkan_directfb.h b/vendor/swiftshader/include/vulkan/vulkan_directfb.h new file mode 100644 index 00000000..8eaac6e4 --- /dev/null +++ b/vendor/swiftshader/include/vulkan/vulkan_directfb.h @@ -0,0 +1,54 @@ +#ifndef VULKAN_DIRECTFB_H_ +#define VULKAN_DIRECTFB_H_ 1 + +/* +** Copyright 2015-2021 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +#define VK_EXT_directfb_surface 1 +#define VK_EXT_DIRECTFB_SURFACE_SPEC_VERSION 1 +#define VK_EXT_DIRECTFB_SURFACE_EXTENSION_NAME "VK_EXT_directfb_surface" +typedef VkFlags VkDirectFBSurfaceCreateFlagsEXT; +typedef struct VkDirectFBSurfaceCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDirectFBSurfaceCreateFlagsEXT flags; + IDirectFB* dfb; + IDirectFBSurface* surface; +} VkDirectFBSurfaceCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateDirectFBSurfaceEXT)(VkInstance instance, const VkDirectFBSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, IDirectFB* dfb); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDirectFBSurfaceEXT( + VkInstance instance, + const VkDirectFBSurfaceCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceDirectFBPresentationSupportEXT( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + IDirectFB* dfb); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/swiftshader/include/vulkan/vulkan_fuchsia.h b/vendor/swiftshader/include/vulkan/vulkan_fuchsia.h new file mode 100644 index 00000000..d5587157 --- /dev/null +++ b/vendor/swiftshader/include/vulkan/vulkan_fuchsia.h @@ -0,0 +1,121 @@ +#ifndef VULKAN_FUCHSIA_H_ +#define VULKAN_FUCHSIA_H_ 1 + +/* +** Copyright 2015-2021 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +#define VK_FUCHSIA_imagepipe_surface 1 +#define VK_FUCHSIA_IMAGEPIPE_SURFACE_SPEC_VERSION 1 +#define VK_FUCHSIA_IMAGEPIPE_SURFACE_EXTENSION_NAME "VK_FUCHSIA_imagepipe_surface" +typedef VkFlags VkImagePipeSurfaceCreateFlagsFUCHSIA; +typedef struct VkImagePipeSurfaceCreateInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkImagePipeSurfaceCreateFlagsFUCHSIA flags; + zx_handle_t imagePipeHandle; +} VkImagePipeSurfaceCreateInfoFUCHSIA; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateImagePipeSurfaceFUCHSIA)(VkInstance instance, const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateImagePipeSurfaceFUCHSIA( + VkInstance instance, + const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + + +#define VK_FUCHSIA_external_memory 1 +#define VK_FUCHSIA_EXTERNAL_MEMORY_SPEC_VERSION 1 +#define VK_FUCHSIA_EXTERNAL_MEMORY_EXTENSION_NAME "VK_FUCHSIA_external_memory" +typedef struct VkImportMemoryZirconHandleInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; + zx_handle_t handle; +} VkImportMemoryZirconHandleInfoFUCHSIA; + +typedef struct VkMemoryZirconHandlePropertiesFUCHSIA { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; +} VkMemoryZirconHandlePropertiesFUCHSIA; + +typedef struct VkMemoryGetZirconHandleInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkMemoryGetZirconHandleInfoFUCHSIA; + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryZirconHandleFUCHSIA)(VkDevice device, const VkMemoryGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo, zx_handle_t* pZirconHandle); +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, zx_handle_t zirconHandle, VkMemoryZirconHandlePropertiesFUCHSIA* pMemoryZirconHandleProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryZirconHandleFUCHSIA( + VkDevice device, + const VkMemoryGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo, + zx_handle_t* pZirconHandle); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryZirconHandlePropertiesFUCHSIA( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + VkMemoryZirconHandlePropertiesFUCHSIA* pMemoryZirconHandleProperties); +#endif + + +#define VK_FUCHSIA_external_semaphore 1 +#define VK_FUCHSIA_EXTERNAL_SEMAPHORE_SPEC_VERSION 1 +#define VK_FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME "VK_FUCHSIA_external_semaphore" +typedef struct VkImportSemaphoreZirconHandleInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkSemaphoreImportFlags flags; + VkExternalSemaphoreHandleTypeFlagBits handleType; + zx_handle_t zirconHandle; +} VkImportSemaphoreZirconHandleInfoFUCHSIA; + +typedef struct VkSemaphoreGetZirconHandleInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkExternalSemaphoreHandleTypeFlagBits handleType; +} VkSemaphoreGetZirconHandleInfoFUCHSIA; + +typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreZirconHandleFUCHSIA)(VkDevice device, const VkImportSemaphoreZirconHandleInfoFUCHSIA* pImportSemaphoreZirconHandleInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreZirconHandleFUCHSIA)(VkDevice device, const VkSemaphoreGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo, zx_handle_t* pZirconHandle); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreZirconHandleFUCHSIA( + VkDevice device, + const VkImportSemaphoreZirconHandleInfoFUCHSIA* pImportSemaphoreZirconHandleInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreZirconHandleFUCHSIA( + VkDevice device, + const VkSemaphoreGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo, + zx_handle_t* pZirconHandle); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/swiftshader/include/vulkan/vulkan_fuchsia_extras.h b/vendor/swiftshader/include/vulkan/vulkan_fuchsia_extras.h new file mode 100644 index 00000000..c60a3cb5 --- /dev/null +++ b/vendor/swiftshader/include/vulkan/vulkan_fuchsia_extras.h @@ -0,0 +1,156 @@ +#ifndef VULKAN_FUCHSIA_EXTRAS_H_ +#define VULKAN_FUCHSIA_EXTRAS_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2015-2019 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +// IMPORTANT: +// +// The following declarations are not part of the upstream Khronos headers yet +// but the Fuchsia platform currently relies on them to implement various +// extensions required by its compositor and graphics libraries. + + +////////////////////////////////////////////////////////////////////////// +/// +/// The following will be part of vulkan_core.h +/// + +#define VK_STRUCTURE_TYPE_BUFFER_COLLECTION_CREATE_INFO_FUCHSIA ((VkStructureType)1001004000) +#define VK_STRUCTURE_TYPE_FUCHSIA_IMAGE_FORMAT_FUCHSIA ((VkStructureType)1001004001) +#define VK_STRUCTURE_TYPE_IMPORT_MEMORY_BUFFER_COLLECTION_FUCHSIA ((VkStructureType)1001004004) +#define VK_STRUCTURE_TYPE_BUFFER_COLLECTION_IMAGE_CREATE_INFO_FUCHSIA ((VkStructureType)1001004005) +#define VK_STRUCTURE_TYPE_BUFFER_COLLECTION_PROPERTIES_FUCHSIA ((VkStructureType)1001004006) +#define VK_STRUCTURE_TYPE_BUFFER_CONSTRAINTS_INFO_FUCHSIA ((VkStructureType)1001004007) +#define VK_STRUCTURE_TYPE_BUFFER_COLLECTION_BUFFER_CREATE_INFO_FUCHSIA ((VkStructureType)1001004008) +#define VK_STRUCTURE_TYPE_TEMP_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA ((VkStructureType)1001005000) +#define VK_STRUCTURE_TYPE_TEMP_MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA ((VkStructureType)1001005001) +#define VK_STRUCTURE_TYPE_TEMP_MEMORY_GET_ZIRCON_HANDLE_INFO_FUCHSIA ((VkStructureType)1001005002) +#define VK_STRUCTURE_TYPE_TEMP_IMPORT_SEMAPHORE_ZIRCON_HANDLE_INFO_FUCHSIA ((VkStructureType)1001006000) +#define VK_STRUCTURE_TYPE_TEMP_SEMAPHORE_GET_ZIRCON_HANDLE_INFO_FUCHSIA ((VkStructureType)1001006001) + +#define VK_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA ((VkObjectType)1001004002) + +#define VK_EXTERNAL_MEMORY_HANDLE_TYPE_TEMP_ZIRCON_VMO_BIT_FUCHSIA ((VkExternalMemoryHandleTypeFlagBits)0x00100000) +#define VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_TEMP_ZIRCON_EVENT_BIT_FUCHSIA ((VkExternalSemaphoreHandleTypeFlagBits) 0x00100000) + +#define VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA_EXT ((VkDebugReportObjectTypeEXT) 1001004003) + + +////////////////////////////////////////////////////////////////////////// +/// +/// The following will be part of vulkan_fuchsia.h +/// + +#define VK_FUCHSIA_buffer_collection 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBufferCollectionFUCHSIA) + +#define VK_FUCHSIA_BUFFER_COLLECTION_SPEC_VERSION 1 +#define VK_FUCHSIA_BUFFER_COLLECTION_EXTENSION_NAME "VK_FUCHSIA_buffer_collection" + +typedef struct VkBufferCollectionCreateInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + zx_handle_t collectionToken; +} VkBufferCollectionCreateInfoFUCHSIA; + +typedef struct VkFuchsiaImageFormatFUCHSIA { + VkStructureType sType; + const void* pNext; + const void* imageFormat; + uint32_t imageFormatSize; +} VkFuchsiaImageFormatFUCHSIA; + +typedef struct VkImportMemoryBufferCollectionFUCHSIA { + VkStructureType sType; + const void* pNext; + VkBufferCollectionFUCHSIA collection; + uint32_t index; +} VkImportMemoryBufferCollectionFUCHSIA; + +typedef struct VkBufferCollectionImageCreateInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkBufferCollectionFUCHSIA collection; + uint32_t index; +} VkBufferCollectionImageCreateInfoFUCHSIA; + +typedef struct VkBufferConstraintsInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + const VkBufferCreateInfo* pBufferCreateInfo; + VkFormatFeatureFlags requiredFormatFeatures; + uint32_t minCount; +} VkBufferConstraintsInfoFUCHSIA; + +typedef struct VkBufferCollectionBufferCreateInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkBufferCollectionFUCHSIA collection; + uint32_t index; +} VkBufferCollectionBufferCreateInfoFUCHSIA; + +typedef struct VkBufferCollectionPropertiesFUCHSIA { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; + uint32_t count; +} VkBufferCollectionPropertiesFUCHSIA; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateBufferCollectionFUCHSIA)(VkDevice device, const VkBufferCollectionCreateInfoFUCHSIA* pImportInfo, const VkAllocationCallbacks* pAllocator, VkBufferCollectionFUCHSIA* pCollection); +typedef VkResult (VKAPI_PTR *PFN_vkSetBufferCollectionConstraintsFUCHSIA)(VkDevice device, VkBufferCollectionFUCHSIA collection, const VkImageCreateInfo* pImageInfo); +typedef VkResult (VKAPI_PTR *PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA)(VkDevice device, VkBufferCollectionFUCHSIA collection, const VkBufferConstraintsInfoFUCHSIA* pBufferConstraintsInfo); +typedef void (VKAPI_PTR *PFN_vkDestroyBufferCollectionFUCHSIA)(VkDevice device, VkBufferCollectionFUCHSIA collection, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetBufferCollectionPropertiesFUCHSIA)(VkDevice device, VkBufferCollectionFUCHSIA collection, VkBufferCollectionPropertiesFUCHSIA* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateBufferCollectionFUCHSIA( + VkDevice device, + const VkBufferCollectionCreateInfoFUCHSIA* pImportInfo, + const VkAllocationCallbacks* pAllocator, + VkBufferCollectionFUCHSIA* pCollection); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetBufferCollectionConstraintsFUCHSIA( + VkDevice device, + VkBufferCollectionFUCHSIA collection, + const VkImageCreateInfo* pImageInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetBufferCollectionBufferConstraintsFUCHSIA( + VkDevice device, + VkBufferCollectionFUCHSIA collection, + const VkBufferConstraintsInfoFUCHSIA* pBufferConstraintsInfo); + +VKAPI_ATTR void VKAPI_CALL vkDestroyBufferCollectionFUCHSIA( + VkDevice device, + VkBufferCollectionFUCHSIA collection, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetBufferCollectionPropertiesFUCHSIA( + VkDevice device, + VkBufferCollectionFUCHSIA collection, + VkBufferCollectionPropertiesFUCHSIA* pProperties); +#endif + +#ifdef __cplusplus +} +#endif + +#endif // VULKAN_FUCHSIA_EXTRAS_H_ diff --git a/vendor/swiftshader/include/vulkan/vulkan_ggp.h b/vendor/swiftshader/include/vulkan/vulkan_ggp.h new file mode 100644 index 00000000..9a6a582c --- /dev/null +++ b/vendor/swiftshader/include/vulkan/vulkan_ggp.h @@ -0,0 +1,58 @@ +#ifndef VULKAN_GGP_H_ +#define VULKAN_GGP_H_ 1 + +/* +** Copyright 2015-2021 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +#define VK_GGP_stream_descriptor_surface 1 +#define VK_GGP_STREAM_DESCRIPTOR_SURFACE_SPEC_VERSION 1 +#define VK_GGP_STREAM_DESCRIPTOR_SURFACE_EXTENSION_NAME "VK_GGP_stream_descriptor_surface" +typedef VkFlags VkStreamDescriptorSurfaceCreateFlagsGGP; +typedef struct VkStreamDescriptorSurfaceCreateInfoGGP { + VkStructureType sType; + const void* pNext; + VkStreamDescriptorSurfaceCreateFlagsGGP flags; + GgpStreamDescriptor streamDescriptor; +} VkStreamDescriptorSurfaceCreateInfoGGP; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateStreamDescriptorSurfaceGGP)(VkInstance instance, const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateStreamDescriptorSurfaceGGP( + VkInstance instance, + const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + + +#define VK_GGP_frame_token 1 +#define VK_GGP_FRAME_TOKEN_SPEC_VERSION 1 +#define VK_GGP_FRAME_TOKEN_EXTENSION_NAME "VK_GGP_frame_token" +typedef struct VkPresentFrameTokenGGP { + VkStructureType sType; + const void* pNext; + GgpFrameToken frameToken; +} VkPresentFrameTokenGGP; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/swiftshader/include/vulkan/vulkan_ios.h b/vendor/swiftshader/include/vulkan/vulkan_ios.h new file mode 100644 index 00000000..6e7e6afe --- /dev/null +++ b/vendor/swiftshader/include/vulkan/vulkan_ios.h @@ -0,0 +1,47 @@ +#ifndef VULKAN_IOS_H_ +#define VULKAN_IOS_H_ 1 + +/* +** Copyright 2015-2021 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +#define VK_MVK_ios_surface 1 +#define VK_MVK_IOS_SURFACE_SPEC_VERSION 3 +#define VK_MVK_IOS_SURFACE_EXTENSION_NAME "VK_MVK_ios_surface" +typedef VkFlags VkIOSSurfaceCreateFlagsMVK; +typedef struct VkIOSSurfaceCreateInfoMVK { + VkStructureType sType; + const void* pNext; + VkIOSSurfaceCreateFlagsMVK flags; + const void* pView; +} VkIOSSurfaceCreateInfoMVK; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateIOSSurfaceMVK)(VkInstance instance, const VkIOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateIOSSurfaceMVK( + VkInstance instance, + const VkIOSSurfaceCreateInfoMVK* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/swiftshader/include/vulkan/vulkan_macos.h b/vendor/swiftshader/include/vulkan/vulkan_macos.h new file mode 100644 index 00000000..c49b123d --- /dev/null +++ b/vendor/swiftshader/include/vulkan/vulkan_macos.h @@ -0,0 +1,47 @@ +#ifndef VULKAN_MACOS_H_ +#define VULKAN_MACOS_H_ 1 + +/* +** Copyright 2015-2021 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +#define VK_MVK_macos_surface 1 +#define VK_MVK_MACOS_SURFACE_SPEC_VERSION 3 +#define VK_MVK_MACOS_SURFACE_EXTENSION_NAME "VK_MVK_macos_surface" +typedef VkFlags VkMacOSSurfaceCreateFlagsMVK; +typedef struct VkMacOSSurfaceCreateInfoMVK { + VkStructureType sType; + const void* pNext; + VkMacOSSurfaceCreateFlagsMVK flags; + const void* pView; +} VkMacOSSurfaceCreateInfoMVK; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateMacOSSurfaceMVK)(VkInstance instance, const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateMacOSSurfaceMVK( + VkInstance instance, + const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/swiftshader/include/vulkan/vulkan_metal.h b/vendor/swiftshader/include/vulkan/vulkan_metal.h new file mode 100644 index 00000000..5cf4a703 --- /dev/null +++ b/vendor/swiftshader/include/vulkan/vulkan_metal.h @@ -0,0 +1,54 @@ +#ifndef VULKAN_METAL_H_ +#define VULKAN_METAL_H_ 1 + +/* +** Copyright 2015-2021 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +#define VK_EXT_metal_surface 1 + +#ifdef __OBJC__ +@class CAMetalLayer; +#else +typedef void CAMetalLayer; +#endif + +#define VK_EXT_METAL_SURFACE_SPEC_VERSION 1 +#define VK_EXT_METAL_SURFACE_EXTENSION_NAME "VK_EXT_metal_surface" +typedef VkFlags VkMetalSurfaceCreateFlagsEXT; +typedef struct VkMetalSurfaceCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkMetalSurfaceCreateFlagsEXT flags; + const CAMetalLayer* pLayer; +} VkMetalSurfaceCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateMetalSurfaceEXT)(VkInstance instance, const VkMetalSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateMetalSurfaceEXT( + VkInstance instance, + const VkMetalSurfaceCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/swiftshader/include/vulkan/vulkan_screen.h b/vendor/swiftshader/include/vulkan/vulkan_screen.h new file mode 100644 index 00000000..92ad9bfa --- /dev/null +++ b/vendor/swiftshader/include/vulkan/vulkan_screen.h @@ -0,0 +1,54 @@ +#ifndef VULKAN_SCREEN_H_ +#define VULKAN_SCREEN_H_ 1 + +/* +** Copyright 2015-2021 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +#define VK_QNX_screen_surface 1 +#define VK_QNX_SCREEN_SURFACE_SPEC_VERSION 1 +#define VK_QNX_SCREEN_SURFACE_EXTENSION_NAME "VK_QNX_screen_surface" +typedef VkFlags VkScreenSurfaceCreateFlagsQNX; +typedef struct VkScreenSurfaceCreateInfoQNX { + VkStructureType sType; + const void* pNext; + VkScreenSurfaceCreateFlagsQNX flags; + struct _screen_context* context; + struct _screen_window* window; +} VkScreenSurfaceCreateInfoQNX; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateScreenSurfaceQNX)(VkInstance instance, const VkScreenSurfaceCreateInfoQNX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct _screen_window* window); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateScreenSurfaceQNX( + VkInstance instance, + const VkScreenSurfaceCreateInfoQNX* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceScreenPresentationSupportQNX( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + struct _screen_window* window); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/swiftshader/include/vulkan/vulkan_vi.h b/vendor/swiftshader/include/vulkan/vulkan_vi.h new file mode 100644 index 00000000..9e0dcca2 --- /dev/null +++ b/vendor/swiftshader/include/vulkan/vulkan_vi.h @@ -0,0 +1,47 @@ +#ifndef VULKAN_VI_H_ +#define VULKAN_VI_H_ 1 + +/* +** Copyright 2015-2021 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +#define VK_NN_vi_surface 1 +#define VK_NN_VI_SURFACE_SPEC_VERSION 1 +#define VK_NN_VI_SURFACE_EXTENSION_NAME "VK_NN_vi_surface" +typedef VkFlags VkViSurfaceCreateFlagsNN; +typedef struct VkViSurfaceCreateInfoNN { + VkStructureType sType; + const void* pNext; + VkViSurfaceCreateFlagsNN flags; + void* window; +} VkViSurfaceCreateInfoNN; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateViSurfaceNN)(VkInstance instance, const VkViSurfaceCreateInfoNN* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateViSurfaceNN( + VkInstance instance, + const VkViSurfaceCreateInfoNN* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/swiftshader/include/vulkan/vulkan_wayland.h b/vendor/swiftshader/include/vulkan/vulkan_wayland.h new file mode 100644 index 00000000..2a329be9 --- /dev/null +++ b/vendor/swiftshader/include/vulkan/vulkan_wayland.h @@ -0,0 +1,54 @@ +#ifndef VULKAN_WAYLAND_H_ +#define VULKAN_WAYLAND_H_ 1 + +/* +** Copyright 2015-2021 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +#define VK_KHR_wayland_surface 1 +#define VK_KHR_WAYLAND_SURFACE_SPEC_VERSION 6 +#define VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME "VK_KHR_wayland_surface" +typedef VkFlags VkWaylandSurfaceCreateFlagsKHR; +typedef struct VkWaylandSurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkWaylandSurfaceCreateFlagsKHR flags; + struct wl_display* display; + struct wl_surface* surface; +} VkWaylandSurfaceCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateWaylandSurfaceKHR)(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display* display); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateWaylandSurfaceKHR( + VkInstance instance, + const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWaylandPresentationSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + struct wl_display* display); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/swiftshader/include/vulkan/vulkan_win32.h b/vendor/swiftshader/include/vulkan/vulkan_win32.h new file mode 100644 index 00000000..1b680f0b --- /dev/null +++ b/vendor/swiftshader/include/vulkan/vulkan_win32.h @@ -0,0 +1,315 @@ +#ifndef VULKAN_WIN32_H_ +#define VULKAN_WIN32_H_ 1 + +/* +** Copyright 2015-2021 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +#define VK_KHR_win32_surface 1 +#define VK_KHR_WIN32_SURFACE_SPEC_VERSION 6 +#define VK_KHR_WIN32_SURFACE_EXTENSION_NAME "VK_KHR_win32_surface" +typedef VkFlags VkWin32SurfaceCreateFlagsKHR; +typedef struct VkWin32SurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkWin32SurfaceCreateFlagsKHR flags; + HINSTANCE hinstance; + HWND hwnd; +} VkWin32SurfaceCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateWin32SurfaceKHR)(VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateWin32SurfaceKHR( + VkInstance instance, + const VkWin32SurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWin32PresentationSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex); +#endif + + +#define VK_KHR_external_memory_win32 1 +#define VK_KHR_EXTERNAL_MEMORY_WIN32_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME "VK_KHR_external_memory_win32" +typedef struct VkImportMemoryWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; + HANDLE handle; + LPCWSTR name; +} VkImportMemoryWin32HandleInfoKHR; + +typedef struct VkExportMemoryWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + const SECURITY_ATTRIBUTES* pAttributes; + DWORD dwAccess; + LPCWSTR name; +} VkExportMemoryWin32HandleInfoKHR; + +typedef struct VkMemoryWin32HandlePropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; +} VkMemoryWin32HandlePropertiesKHR; + +typedef struct VkMemoryGetWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkMemoryGetWin32HandleInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryWin32HandleKHR)(VkDevice device, const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle); +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryWin32HandlePropertiesKHR)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandleKHR( + VkDevice device, + const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, + HANDLE* pHandle); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandlePropertiesKHR( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties); +#endif + + +#define VK_KHR_win32_keyed_mutex 1 +#define VK_KHR_WIN32_KEYED_MUTEX_SPEC_VERSION 1 +#define VK_KHR_WIN32_KEYED_MUTEX_EXTENSION_NAME "VK_KHR_win32_keyed_mutex" +typedef struct VkWin32KeyedMutexAcquireReleaseInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t acquireCount; + const VkDeviceMemory* pAcquireSyncs; + const uint64_t* pAcquireKeys; + const uint32_t* pAcquireTimeouts; + uint32_t releaseCount; + const VkDeviceMemory* pReleaseSyncs; + const uint64_t* pReleaseKeys; +} VkWin32KeyedMutexAcquireReleaseInfoKHR; + + + +#define VK_KHR_external_semaphore_win32 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_WIN32_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME "VK_KHR_external_semaphore_win32" +typedef struct VkImportSemaphoreWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkSemaphoreImportFlags flags; + VkExternalSemaphoreHandleTypeFlagBits handleType; + HANDLE handle; + LPCWSTR name; +} VkImportSemaphoreWin32HandleInfoKHR; + +typedef struct VkExportSemaphoreWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + const SECURITY_ATTRIBUTES* pAttributes; + DWORD dwAccess; + LPCWSTR name; +} VkExportSemaphoreWin32HandleInfoKHR; + +typedef struct VkD3D12FenceSubmitInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreValuesCount; + const uint64_t* pWaitSemaphoreValues; + uint32_t signalSemaphoreValuesCount; + const uint64_t* pSignalSemaphoreValues; +} VkD3D12FenceSubmitInfoKHR; + +typedef struct VkSemaphoreGetWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkExternalSemaphoreHandleTypeFlagBits handleType; +} VkSemaphoreGetWin32HandleInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreWin32HandleKHR)(VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreWin32HandleKHR)(VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreWin32HandleKHR( + VkDevice device, + const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreWin32HandleKHR( + VkDevice device, + const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, + HANDLE* pHandle); +#endif + + +#define VK_KHR_external_fence_win32 1 +#define VK_KHR_EXTERNAL_FENCE_WIN32_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME "VK_KHR_external_fence_win32" +typedef struct VkImportFenceWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkFence fence; + VkFenceImportFlags flags; + VkExternalFenceHandleTypeFlagBits handleType; + HANDLE handle; + LPCWSTR name; +} VkImportFenceWin32HandleInfoKHR; + +typedef struct VkExportFenceWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + const SECURITY_ATTRIBUTES* pAttributes; + DWORD dwAccess; + LPCWSTR name; +} VkExportFenceWin32HandleInfoKHR; + +typedef struct VkFenceGetWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkFence fence; + VkExternalFenceHandleTypeFlagBits handleType; +} VkFenceGetWin32HandleInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkImportFenceWin32HandleKHR)(VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetFenceWin32HandleKHR)(VkDevice device, const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportFenceWin32HandleKHR( + VkDevice device, + const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceWin32HandleKHR( + VkDevice device, + const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, + HANDLE* pHandle); +#endif + + +#define VK_NV_external_memory_win32 1 +#define VK_NV_EXTERNAL_MEMORY_WIN32_SPEC_VERSION 1 +#define VK_NV_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME "VK_NV_external_memory_win32" +typedef struct VkImportMemoryWin32HandleInfoNV { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagsNV handleType; + HANDLE handle; +} VkImportMemoryWin32HandleInfoNV; + +typedef struct VkExportMemoryWin32HandleInfoNV { + VkStructureType sType; + const void* pNext; + const SECURITY_ATTRIBUTES* pAttributes; + DWORD dwAccess; +} VkExportMemoryWin32HandleInfoNV; + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryWin32HandleNV)(VkDevice device, VkDeviceMemory memory, VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandleNV( + VkDevice device, + VkDeviceMemory memory, + VkExternalMemoryHandleTypeFlagsNV handleType, + HANDLE* pHandle); +#endif + + +#define VK_NV_win32_keyed_mutex 1 +#define VK_NV_WIN32_KEYED_MUTEX_SPEC_VERSION 2 +#define VK_NV_WIN32_KEYED_MUTEX_EXTENSION_NAME "VK_NV_win32_keyed_mutex" +typedef struct VkWin32KeyedMutexAcquireReleaseInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t acquireCount; + const VkDeviceMemory* pAcquireSyncs; + const uint64_t* pAcquireKeys; + const uint32_t* pAcquireTimeoutMilliseconds; + uint32_t releaseCount; + const VkDeviceMemory* pReleaseSyncs; + const uint64_t* pReleaseKeys; +} VkWin32KeyedMutexAcquireReleaseInfoNV; + + + +#define VK_EXT_full_screen_exclusive 1 +#define VK_EXT_FULL_SCREEN_EXCLUSIVE_SPEC_VERSION 4 +#define VK_EXT_FULL_SCREEN_EXCLUSIVE_EXTENSION_NAME "VK_EXT_full_screen_exclusive" + +typedef enum VkFullScreenExclusiveEXT { + VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT = 0, + VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT = 1, + VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT = 2, + VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT = 3, + VK_FULL_SCREEN_EXCLUSIVE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkFullScreenExclusiveEXT; +typedef struct VkSurfaceFullScreenExclusiveInfoEXT { + VkStructureType sType; + void* pNext; + VkFullScreenExclusiveEXT fullScreenExclusive; +} VkSurfaceFullScreenExclusiveInfoEXT; + +typedef struct VkSurfaceCapabilitiesFullScreenExclusiveEXT { + VkStructureType sType; + void* pNext; + VkBool32 fullScreenExclusiveSupported; +} VkSurfaceCapabilitiesFullScreenExclusiveEXT; + +typedef struct VkSurfaceFullScreenExclusiveWin32InfoEXT { + VkStructureType sType; + const void* pNext; + HMONITOR hmonitor; +} VkSurfaceFullScreenExclusiveWin32InfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes); +typedef VkResult (VKAPI_PTR *PFN_vkAcquireFullScreenExclusiveModeEXT)(VkDevice device, VkSwapchainKHR swapchain); +typedef VkResult (VKAPI_PTR *PFN_vkReleaseFullScreenExclusiveModeEXT)(VkDevice device, VkSwapchainKHR swapchain); +typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupSurfacePresentModes2EXT)(VkDevice device, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkDeviceGroupPresentModeFlagsKHR* pModes); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModes2EXT( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, + uint32_t* pPresentModeCount, + VkPresentModeKHR* pPresentModes); + +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireFullScreenExclusiveModeEXT( + VkDevice device, + VkSwapchainKHR swapchain); + +VKAPI_ATTR VkResult VKAPI_CALL vkReleaseFullScreenExclusiveModeEXT( + VkDevice device, + VkSwapchainKHR swapchain); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModes2EXT( + VkDevice device, + const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, + VkDeviceGroupPresentModeFlagsKHR* pModes); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/swiftshader/include/vulkan/vulkan_xcb.h b/vendor/swiftshader/include/vulkan/vulkan_xcb.h new file mode 100644 index 00000000..5ba2ad85 --- /dev/null +++ b/vendor/swiftshader/include/vulkan/vulkan_xcb.h @@ -0,0 +1,55 @@ +#ifndef VULKAN_XCB_H_ +#define VULKAN_XCB_H_ 1 + +/* +** Copyright 2015-2021 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +#define VK_KHR_xcb_surface 1 +#define VK_KHR_XCB_SURFACE_SPEC_VERSION 6 +#define VK_KHR_XCB_SURFACE_EXTENSION_NAME "VK_KHR_xcb_surface" +typedef VkFlags VkXcbSurfaceCreateFlagsKHR; +typedef struct VkXcbSurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkXcbSurfaceCreateFlagsKHR flags; + xcb_connection_t* connection; + xcb_window_t window; +} VkXcbSurfaceCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateXcbSurfaceKHR)(VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateXcbSurfaceKHR( + VkInstance instance, + const VkXcbSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXcbPresentationSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + xcb_connection_t* connection, + xcb_visualid_t visual_id); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/swiftshader/include/vulkan/vulkan_xlib.h b/vendor/swiftshader/include/vulkan/vulkan_xlib.h new file mode 100644 index 00000000..75c75dc2 --- /dev/null +++ b/vendor/swiftshader/include/vulkan/vulkan_xlib.h @@ -0,0 +1,55 @@ +#ifndef VULKAN_XLIB_H_ +#define VULKAN_XLIB_H_ 1 + +/* +** Copyright 2015-2021 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +#define VK_KHR_xlib_surface 1 +#define VK_KHR_XLIB_SURFACE_SPEC_VERSION 6 +#define VK_KHR_XLIB_SURFACE_EXTENSION_NAME "VK_KHR_xlib_surface" +typedef VkFlags VkXlibSurfaceCreateFlagsKHR; +typedef struct VkXlibSurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkXlibSurfaceCreateFlagsKHR flags; + Display* dpy; + Window window; +} VkXlibSurfaceCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateXlibSurfaceKHR)(VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display* dpy, VisualID visualID); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateXlibSurfaceKHR( + VkInstance instance, + const VkXlibSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXlibPresentationSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + Display* dpy, + VisualID visualID); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/swiftshader/include/vulkan/vulkan_xlib_xrandr.h b/vendor/swiftshader/include/vulkan/vulkan_xlib_xrandr.h new file mode 100644 index 00000000..fa274934 --- /dev/null +++ b/vendor/swiftshader/include/vulkan/vulkan_xlib_xrandr.h @@ -0,0 +1,45 @@ +#ifndef VULKAN_XLIB_XRANDR_H_ +#define VULKAN_XLIB_XRANDR_H_ 1 + +/* +** Copyright 2015-2021 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +#define VK_EXT_acquire_xlib_display 1 +#define VK_EXT_ACQUIRE_XLIB_DISPLAY_SPEC_VERSION 1 +#define VK_EXT_ACQUIRE_XLIB_DISPLAY_EXTENSION_NAME "VK_EXT_acquire_xlib_display" +typedef VkResult (VKAPI_PTR *PFN_vkAcquireXlibDisplayEXT)(VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display); +typedef VkResult (VKAPI_PTR *PFN_vkGetRandROutputDisplayEXT)(VkPhysicalDevice physicalDevice, Display* dpy, RROutput rrOutput, VkDisplayKHR* pDisplay); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireXlibDisplayEXT( + VkPhysicalDevice physicalDevice, + Display* dpy, + VkDisplayKHR display); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetRandROutputDisplayEXT( + VkPhysicalDevice physicalDevice, + Display* dpy, + RROutput rrOutput, + VkDisplayKHR* pDisplay); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/vendor/swiftshader/linux/x64/libEGL.so b/vendor/swiftshader/linux/x64/libEGL.so new file mode 100755 index 00000000..6bfbc2a7 --- /dev/null +++ b/vendor/swiftshader/linux/x64/libEGL.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bf32939eb7508ac4c129b775e775ad7e781f7271025c5c53643b81e76e8d896d +size 59544 diff --git a/vendor/swiftshader/linux/x64/libGLESv2.so b/vendor/swiftshader/linux/x64/libGLESv2.so new file mode 100755 index 00000000..eda84d58 --- /dev/null +++ b/vendor/swiftshader/linux/x64/libGLESv2.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:81411c37faee128ccddd468553ec8c0a1f451f53c5c15a732a0ab6f8d43ed520 +size 19293376 diff --git a/vendor/swiftshader/mac/arm64/libEGL.dylib b/vendor/swiftshader/mac/arm64/libEGL.dylib new file mode 100755 index 00000000..835d204b --- /dev/null +++ b/vendor/swiftshader/mac/arm64/libEGL.dylib @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6e787fd91ff325db711cd3f4eca1e667e7a88f83e571c100c496ebb35c229ef0 +size 106841 diff --git a/vendor/swiftshader/mac/arm64/libGLESv2.dylib b/vendor/swiftshader/mac/arm64/libGLESv2.dylib new file mode 100755 index 00000000..2f3b9a61 --- /dev/null +++ b/vendor/swiftshader/mac/arm64/libGLESv2.dylib @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3560c1919c50ba0d272bfbd3fd457d085c00258b5eb49efcf1b3c9c77cd25343 +size 31171340 diff --git a/vendor/swiftshader/mac/x64/libEGL.dylib b/vendor/swiftshader/mac/x64/libEGL.dylib new file mode 100755 index 00000000..84663c9b --- /dev/null +++ b/vendor/swiftshader/mac/x64/libEGL.dylib @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1cd3134ee5598ae4ebd29e9596cdb159691aec90f23045da1a8772f4632bdb27 +size 106008 diff --git a/vendor/swiftshader/mac/x64/libGLESv2.dylib b/vendor/swiftshader/mac/x64/libGLESv2.dylib new file mode 100755 index 00000000..b011e89c --- /dev/null +++ b/vendor/swiftshader/mac/x64/libGLESv2.dylib @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bda40fe826aac13efa834dcfbfaf8079547ce201c47562359a4248dc9868078f +size 30750008 diff --git a/vendor/swiftshader/win/x64/libEGL.dll b/vendor/swiftshader/win/x64/libEGL.dll new file mode 100644 index 00000000..22c0043c --- /dev/null +++ b/vendor/swiftshader/win/x64/libEGL.dll @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8e9e00149c4ebd1c2032a663bbd13a0f727cbc853dca9a431f1b7f8c707902a4 +size 75264 diff --git a/vendor/swiftshader/win/x64/libEGL.lib b/vendor/swiftshader/win/x64/libEGL.lib new file mode 100644 index 00000000..547a1c0c --- /dev/null +++ b/vendor/swiftshader/win/x64/libEGL.lib @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:640526c99e064217d2ddee8671f2ab82ceb70f4e6c2989ca26c35e02eae05440 +size 10794 diff --git a/vendor/swiftshader/win/x64/libGLESv2.dll b/vendor/swiftshader/win/x64/libGLESv2.dll new file mode 100644 index 00000000..3a30d00c --- /dev/null +++ b/vendor/swiftshader/win/x64/libGLESv2.dll @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:aec272704fde4718a7ecfec109036d8bc5931f6ab9658a6a0d1bdb0f7aa4a6c2 +size 21512192 diff --git a/vendor/swiftshader/win/x64/libGLESv2.lib b/vendor/swiftshader/win/x64/libGLESv2.lib new file mode 100644 index 00000000..aa44d1dd --- /dev/null +++ b/vendor/swiftshader/win/x64/libGLESv2.lib @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:799ae83656007cb207caf49a632c80be8c866e640a71bed7cc9d007ff88d6a05 +size 63206 diff --git a/vendor/swiftshader/win/x86/libEGL.dll b/vendor/swiftshader/win/x86/libEGL.dll new file mode 100644 index 00000000..821a5aa2 --- /dev/null +++ b/vendor/swiftshader/win/x86/libEGL.dll @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5b39b95f5efec4332a84cea539f4dafae82ea1d70185e21e52b5e506715ef669 +size 59904 diff --git a/vendor/swiftshader/win/x86/libEGL.lib b/vendor/swiftshader/win/x86/libEGL.lib new file mode 100644 index 00000000..4c1cfd17 --- /dev/null +++ b/vendor/swiftshader/win/x86/libEGL.lib @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bb77ec1821d60d7fc83807a0c2c4d777af45baf7748e2a85c5fb587f0f2bb491 +size 11550 diff --git a/vendor/swiftshader/win/x86/libGLESv2.dll b/vendor/swiftshader/win/x86/libGLESv2.dll new file mode 100644 index 00000000..20f2a621 --- /dev/null +++ b/vendor/swiftshader/win/x86/libGLESv2.dll @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:626e97850cb7a39faff9eeddc85d573c992f204e58dd47099ca04f3ef781c4f0 +size 17480192 diff --git a/vendor/swiftshader/win/x86/libGLESv2.lib b/vendor/swiftshader/win/x86/libGLESv2.lib new file mode 100644 index 00000000..517a02d1 --- /dev/null +++ b/vendor/swiftshader/win/x86/libGLESv2.lib @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4b67a2743d224cc5f88a03d5670717795cbb92c37705ffee2f527dae6787944a +size 68332 diff --git a/web/.gitignore b/web/.gitignore new file mode 100644 index 00000000..0e234910 --- /dev/null +++ b/web/.gitignore @@ -0,0 +1,6 @@ +package-lock.json +lib +types +*.js.map +demo/*.js +src/wasm \ No newline at end of file diff --git a/web/demo/index.css b/web/demo/index.css new file mode 100644 index 00000000..07585d56 --- /dev/null +++ b/web/demo/index.css @@ -0,0 +1,21 @@ +.container { + display: flex; + position: relative; +} +.canvas { + position: relative; + z-index: 999999; +} +.tablecloth { + position: absolute; + z-index: 1; + top: 0; + left: 0; + background: rgb(180, 180, 180); + background-image: linear-gradient(rgba(255, 255, 255, 0.3) 1px, transparent 0), + linear-gradient(90deg, rgba(255, 255, 255, 0.3) 1px, transparent 0), linear-gradient(white 1px, transparent 0), + linear-gradient(90deg, white 1px, transparent 0); + background-size: 15px 15px, 15px 15px, 75px 75px, 75px 75px; + width: 640px; + height: 640px; +} \ No newline at end of file diff --git a/web/demo/index.html b/web/demo/index.html new file mode 100644 index 00000000..bab5b07c --- /dev/null +++ b/web/demo/index.html @@ -0,0 +1,22 @@ + + + + + + + Document + + + +
loading...
+ + + + diff --git a/web/demo/index.ts b/web/demo/index.ts new file mode 100644 index 00000000..6d80c02e --- /dev/null +++ b/web/demo/index.ts @@ -0,0 +1,12 @@ +import { TGFXInit, types } from '../src/main'; + +let TGFXModule: types.TGFX; + +window.onload = async () => { + TGFXModule = await TGFXInit({ locateFile: (file: string) => '../lib/' + file }); + console.log(`wasm loaded!`, TGFXModule); + document.getElementById('waiting')!.style.display = 'none'; + document.getElementById('container')!.style.display = 'block'; + TGFXModule.ScalerContext.isEmoji('测试'); + TGFXModule.ScalerContext.isEmoji('👍'); +}; diff --git a/web/package.json b/web/package.json new file mode 100644 index 00000000..91b8260b --- /dev/null +++ b/web/package.json @@ -0,0 +1,51 @@ +{ + "name": "tgfx", + "version": "4.0.0", + "description": "Tencent Graphics", + "main": "lib/tgfx.cjs.js", + "module": "lib/tgfx.esm.js", + "browser": "lib/tgfx.umd.js", + "typings": "types/tgfx.d.ts", + "scripts": { + "dev": "rollup -c ./script/rollup.config.dev.js -w", + "server": "http-server -p 8081 . -c-1 --cors", + "build": "rimraf lib/*.js lib/*.map types/* && tsc -p ./tsconfig.type.json && rollup -c ./script/rollup.config.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/Tencent/tgfx/tree/main/web" + }, + "devDependencies": { + "@babel/core": "~7.20.2", + "@babel/preset-env": "~7.20.2", + "@rollup/plugin-babel": "~6.0.2", + "@rollup/plugin-commonjs": "~23.0.2", + "@rollup/plugin-json": "~5.0.1", + "@rollup/plugin-node-resolve": "~15.0.1", + "@types/emscripten": "~1.39.6", + "@typescript-eslint/eslint-plugin": "~5.43.0", + "@typescript-eslint/parser": "~5.43.0", + "esbuild": "~0.15.14", + "eslint": "~8.27.0", + "eslint-config-alloy": "~4.7.0", + "http-server": "^14.1.1", + "rimraf": "~3.0.2", + "rollup": "~2.79.1", + "rollup-plugin-esbuild": "~4.10.3", + "rollup-plugin-terser": "~7.0.2", + "tslib": "~2.4.1", + "typedoc": "~0.23.21", + "typescript": "~4.9.3" + }, + "files": [ + "lib", + "types", + "src" + ], + "license": "Apache-2.0", + "contributors": [ + "zenoslin", + "lvpengwei" + ], + "author": "Tencent" +} diff --git a/web/script/build.sh b/web/script/build.sh new file mode 100755 index 00000000..898d7ee6 --- /dev/null +++ b/web/script/build.sh @@ -0,0 +1,68 @@ +#!/bin/bash -e + +SOURCE_DIR=../.. +BUILD_DIR=../../build + +echo "-----begin-----" + +if [ ! -d "../src/wasm" ]; then + mkdir ../src/wasm +fi + +RELEASE_CONF="-Oz -s" +CMAKE_BUILD_TYPE=Relese + +if [[ $@ == *debug* ]]; then + CMAKE_BUILD_TYPE=Debug + RELEASE_CONF="-O0 -g3 -s SAFE_HEAP=1" +fi + +emcmake cmake -S $SOURCE_DIR -B $BUILD_DIR -G Ninja -DCMAKE_BUILD_TYPE="$CMAKE_BUILD_TYPE" + +cmake --build $BUILD_DIR --target tgfx + +emcc $RELEASE_CONF -std=c++17 \ + -I$SOURCE_DIR/include/ \ + -I$SOURCE_DIR/src/ \ + -Wl,--whole-archive $BUILD_DIR/libtgfx.a \ + --no-entry \ + --bind \ + -s WASM=1 \ + -s ASYNCIFY \ + -s USE_WEBGL2=1 \ + -s "EXPORTED_RUNTIME_METHODS=['GL','Asyncify']" \ + -s ALLOW_MEMORY_GROWTH=1 \ + -s EXPORT_NAME="TGFXInit" \ + -s ERROR_ON_UNDEFINED_SYMBOLS=0 \ + -s MODULARIZE=1 \ + -s NO_EXIT_RUNTIME=1 \ + -s ENVIRONMENT="web,worker" \ + -s EXPORT_ES6=1 \ + -s USE_ES6_IMPORT_META=0 \ + -o ../src/wasm/tgfx.js + +if test $? -eq 0; then + echo "~~~~~~~~~~~~~~~~~~~wasm build success~~~~~~~~~~~~~~~~~~" +else + echo "~~~~~~~~~~~~~~~~~~~wasm build failed~~~~~~~~~~~~~~~~~~~" + exit 1 +fi + +echo $USER_EM_VERSION >../../build/emscripten_version.txt +echo "-----add emscripten_version.txt-----" + +if [ ! -d "../lib" ]; then + mkdir ../lib +fi + +cp -f ../src/wasm/tgfx.wasm ../lib + +if [ ! -d "../node_modules" ]; then + npm install +fi + +if [[ ! $@ == *debug* ]]; then + npm run build +fi + +echo "-----end-----" diff --git a/web/script/rollup.config.dev.js b/web/script/rollup.config.dev.js new file mode 100644 index 00000000..2ac76496 --- /dev/null +++ b/web/script/rollup.config.dev.js @@ -0,0 +1,43 @@ +import esbuild from 'rollup-plugin-esbuild'; +import resolve from '@rollup/plugin-node-resolve'; +import commonJs from '@rollup/plugin-commonjs'; +import json from '@rollup/plugin-json'; + +const banner = `///////////////////////////////////////////////////////////////////////////////////////////////// +// +// Tencent is pleased to support the open source community by making tgfx available. +// +// Copyright (C) 2023 THL A29 Limited, a Tencent company. All rights reserved. +// +// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// https://opensource.org/licenses/BSD-3-Clause +// +// unless required by applicable law or agreed to in writing, software distributed under the +// license is distributed on an "as is" basis, without warranties or conditions of any kind, +// either express or implied. see the license for the specific language governing permissions +// and limitations under the license. +// +///////////////////////////////////////////////////////////////////////////////////////////////// +`; + +const plugins = [ + esbuild({ tsconfig: "tsconfig.json", minify: false }), + json(), + resolve(), + commonJs(), +]; + +export default [ + { + input: 'demo/index.ts', + output: { banner, file: 'demo/index.js', format: 'esm', sourcemap: true }, + plugins: plugins, + }, + { + input: 'src/main.ts', + output: { name: 'tgfx', banner, file: 'demo/tgfx.js', format: 'umd', exports: 'named', sourcemap: true }, + plugins: plugins, + }, +]; diff --git a/web/script/rollup.config.js b/web/script/rollup.config.js new file mode 100644 index 00000000..18c3d1e0 --- /dev/null +++ b/web/script/rollup.config.js @@ -0,0 +1,71 @@ +import resolve from '@rollup/plugin-node-resolve'; +import commonJs from '@rollup/plugin-commonjs'; +import json from '@rollup/plugin-json'; +import { terser } from 'rollup-plugin-terser'; +import esbuild from 'rollup-plugin-esbuild'; + +const umdConfig = { + input: 'src/main.ts', + output: [ + { + name: 'tgfx', + format: 'umd', + exports: 'named', + sourcemap: true, + file: 'lib/tgfx.js', + }, + ], + plugins: [ + esbuild({ + tsconfig: 'tsconfig.json', + minify: false, + }), + json(), + resolve(), + commonJs(), + ], +}; + +const umdMinConfig = { + input: 'src/main.ts', + output: [ + { + name: 'tgfx', + format: 'umd', + exports: 'named', + sourcemap: true, + file: 'lib/tgfx.min.js', + }, + ], + plugins: [ + esbuild({ + tsconfig: 'tsconfig.json', + minify: false, + }), + json(), + resolve(), + commonJs(), + terser(), + ], +}; + +export default [ + umdConfig, + umdMinConfig, + { + input: 'src/main.ts', + output: [ + { file: 'lib/tgfx.esm.js', format: 'esm', sourcemap: true }, + { file: 'lib/tgfx.cjs.js', format: 'cjs', exports: 'auto', sourcemap: true }, + ], + plugins: [ + esbuild({ + tsconfig: 'tsconfig.json', + minify: false + }), + json(), + resolve(), + commonJs() + ], + }, +]; diff --git a/web/src/binding.ts b/web/src/binding.ts new file mode 100644 index 00000000..f8cf0bdd --- /dev/null +++ b/web/src/binding.ts @@ -0,0 +1,15 @@ +import { TGFX } from './types' +import { setTGFXModule } from './tgfx-module' +import * as tgfx from './tgfx' +import { Matrix } from './core/matrix' +import { ScalerContext } from './core/scaler-context'; +import { WebMask } from './core/web-mask'; + +export const binding = (module: TGFX) => { + setTGFXModule(module) + module.module = module + module.ScalerContext = ScalerContext + module.WebMask = WebMask + module.Matrix = Matrix + module.tgfx = { ...tgfx }; +} diff --git a/web/src/constant.ts b/web/src/constant.ts new file mode 100644 index 00000000..32e4aff2 --- /dev/null +++ b/web/src/constant.ts @@ -0,0 +1 @@ +export const CANVAS_POOL_MAX_SIZE = 10; diff --git a/web/src/core/bitmap-image.ts b/web/src/core/bitmap-image.ts new file mode 100644 index 00000000..84ef1ed2 --- /dev/null +++ b/web/src/core/bitmap-image.ts @@ -0,0 +1,14 @@ +export class BitmapImage { + public bitmap: ImageBitmap | null; + + public constructor(bitmap: ImageBitmap | null) { + this.bitmap = bitmap; + } + + public setBitmap(bitmap: ImageBitmap) { + if (this.bitmap) { + this.bitmap.close(); + } + this.bitmap = bitmap; + } +} diff --git a/web/src/core/matrix.ts b/web/src/core/matrix.ts new file mode 100644 index 00000000..e04c7d18 --- /dev/null +++ b/web/src/core/matrix.ts @@ -0,0 +1,85 @@ +import { destroyVerify, wasmAwaitRewind } from '../utils/decorators'; +import { MatrixIndex } from '../types'; + +@destroyVerify +@wasmAwaitRewind +export class Matrix { + public wasmIns; + public isDestroyed = false; + + public constructor(wasmIns: any) { + this.wasmIns = wasmIns; + } + + /** + * scaleX; horizontal scale factor to store + */ + public get a(): number { + return this.wasmIns ? this.wasmIns._get(MatrixIndex.a) : 0; + } + public set a(value: number) { + this.wasmIns?._set(MatrixIndex.a, value); + } + /** + * skewY; vertical skew factor to store + */ + public get b(): number { + return this.wasmIns ? this.wasmIns._get(MatrixIndex.b) : 0; + } + public set b(value: number) { + this.wasmIns?._set(MatrixIndex.b, value); + } + /** + * skewX; horizontal skew factor to store + */ + public get c(): number { + return this.wasmIns ? this.wasmIns._get(MatrixIndex.c) : 0; + } + public set c(value: number) { + this.wasmIns?._set(MatrixIndex.c, value); + } + /** + * scaleY; vertical scale factor to store + */ + public get d(): number { + return this.wasmIns ? this.wasmIns._get(MatrixIndex.d) : 0; + } + public set d(value: number) { + this.wasmIns?._set(MatrixIndex.d, value); + } + /** + * transX; horizontal translation to store + */ + public get tx(): number { + return this.wasmIns ? this.wasmIns._get(MatrixIndex.tx) : 0; + } + public set tx(value: number) { + this.wasmIns?._set(MatrixIndex.tx, value); + } + /** + * transY; vertical translation to store + */ + public get ty(): number { + return this.wasmIns ? this.wasmIns._get(MatrixIndex.ty) : 0; + } + public set ty(value: number) { + this.wasmIns?._set(MatrixIndex.ty, value); + } + + /** + * Returns one matrix value. + */ + public get(index: MatrixIndex): number { + return this.wasmIns ? this.wasmIns._get(index) : 0; + } + /** + * Sets Matrix value. + */ + public set(index: MatrixIndex, value: number) { + this.wasmIns?._set(index, value); + } + + public destroy() { + this.wasmIns.delete(); + } +} \ No newline at end of file diff --git a/web/src/core/scaler-context.ts b/web/src/core/scaler-context.ts new file mode 100644 index 00000000..2566a35d --- /dev/null +++ b/web/src/core/scaler-context.ts @@ -0,0 +1,166 @@ +import { measureText } from '../utils/measure-text'; +import { defaultFontNames, getFontFamilies } from '../utils/font-family'; +import { getCanvas2D } from '../utils/canvas'; + +import type { Rect } from '../types'; + +export class ScalerContext { + public static canvas: HTMLCanvasElement | OffscreenCanvas; + public static context: CanvasRenderingContext2D | OffscreenCanvasRenderingContext2D; + + public static setCanvas(canvas: HTMLCanvasElement | OffscreenCanvas) { + ScalerContext.canvas = canvas; + } + + public static setContext(context: CanvasRenderingContext2D | OffscreenCanvasRenderingContext2D) { + ScalerContext.context = context; + } + + public static isUnicodePropertyEscapeSupported(): boolean { + try { + // eslint-disable-next-line prefer-regex-literals + const regex = new RegExp("\\p{L}", "u"); + return true; + } catch (e) { + return false; + } + } + + public static isEmoji(text: string): boolean { + let emojiRegExp: RegExp; + if (this.isUnicodePropertyEscapeSupported()) { + emojiRegExp = /\p{Extended_Pictographic}|[#*0-9]\uFE0F?\u20E3|[\uD83C\uDDE6-\uD83C\uDDFF]/u; + } else { + emojiRegExp = /(\u00a9|\u00ae|[\u2000-\u3300]|\ud83c[\ud000-\udfff]|\ud83d[\ud000-\udfff]|\ud83e[\ud000-\udfff])/; + } + return emojiRegExp.test(text); + } + + private readonly fontName: string; + private readonly fontStyle: string; + private readonly size: number; + private readonly fauxBold: boolean; + private readonly fauxItalic: boolean; + + private fontBoundingBoxMap: { key: string; value: Rect }[] = []; + + public constructor(fontName: string, fontStyle: string, size: number, fauxBold = false, fauxItalic = false) { + this.fontName = fontName; + this.fontStyle = fontStyle; + this.size = size; + this.fauxBold = fauxBold; + this.fauxItalic = fauxItalic; + this.loadCanvas(); + } + + public fontString() { + const attributes = []; + // css font-style + if (this.fauxItalic) { + attributes.push('italic'); + } + // css font-weight + if (this.fauxBold) { + attributes.push('bold'); + } + // css font-size + attributes.push(`${this.size}px`); + // css font-family + const fallbackFontNames = defaultFontNames.concat(); + fallbackFontNames.unshift(...getFontFamilies(this.fontName, this.fontStyle)); + attributes.push(`${fallbackFontNames.join(',')}`); + return attributes.join(' '); + } + + public getTextAdvance(text: string) { + const { context } = ScalerContext; + context.font = this.fontString(); + return context.measureText(text).width; + } + + public getTextBounds(text: string) { + const { context } = ScalerContext; + context.font = this.fontString(); + const metrics = this.measureText(context, text); + + const bounds: Rect = { + left: Math.floor(-metrics.actualBoundingBoxLeft), + top: Math.floor(-metrics.actualBoundingBoxAscent), + right: Math.ceil(metrics.actualBoundingBoxRight), + bottom: Math.ceil(metrics.actualBoundingBoxDescent), + }; + return bounds; + } + + public generateFontMetrics() { + const { context } = ScalerContext; + context.font = this.fontString(); + const metrics = this.measureText(context, '中'); + const capHeight = metrics.actualBoundingBoxAscent; + const xMetrics = this.measureText(context, 'x'); + const xHeight = xMetrics.actualBoundingBoxAscent; + return { + ascent: -metrics.fontBoundingBoxAscent, + descent: metrics.fontBoundingBoxDescent, + xHeight, + capHeight, + }; + } + + public generateImage(text: string, bounds: Rect) { + const canvas = getCanvas2D(bounds.right - bounds.left, bounds.bottom - bounds.top); + const context = canvas.getContext('2d') as CanvasRenderingContext2D; + context.font = this.fontString(); + context.fillText(text, -bounds.left, -bounds.top); + return canvas; + } + + protected loadCanvas() { + if (!ScalerContext.canvas) { + ScalerContext.setCanvas(getCanvas2D(10, 10)); + // https://html.spec.whatwg.org/multipage/canvas.html#concept-canvas-will-read-frequently + ScalerContext.setContext( + (ScalerContext.canvas as HTMLCanvasElement | OffscreenCanvas).getContext('2d', { willReadFrequently: true }) as + | CanvasRenderingContext2D + | OffscreenCanvasRenderingContext2D, + ); + } + } + + private measureText(ctx: CanvasRenderingContext2D | OffscreenCanvasRenderingContext2D, text: string): TextMetrics { + const metrics = ctx.measureText(text); + if (metrics?.actualBoundingBoxAscent) return metrics; + ctx.canvas.width = this.size * 1.5; + ctx.canvas.height = this.size * 1.5; + const pos = [0, this.size]; + ctx.clearRect(0, 0, ctx.canvas.width, ctx.canvas.height); + ctx.font = this.fontString(); + ctx.fillText(text, pos[0], pos[1]); + const imageData = ctx.getImageData(0, 0, ctx.canvas.width, ctx.canvas.height); + const { left, top, right, bottom } = measureText(imageData); + ctx.clearRect(0, 0, ctx.canvas.width, ctx.canvas.height); + + let fontMeasure: Rect; + const fontBoundingBox = this.fontBoundingBoxMap.find((item) => item.key === this.fontName); + if (fontBoundingBox) { + fontMeasure = fontBoundingBox.value; + } else { + ctx.font = this.fontString(); + ctx.fillText('测', pos[0], pos[1]); + const fontImageData = ctx.getImageData(0, 0, ctx.canvas.width, ctx.canvas.height); + fontMeasure = measureText(fontImageData); + this.fontBoundingBoxMap.push({ key: this.fontName, value: fontMeasure }); + ctx.clearRect(0, 0, ctx.canvas.width, ctx.canvas.height); + } + + return { + actualBoundingBoxAscent: pos[1] - top, + actualBoundingBoxRight: right - pos[0], + actualBoundingBoxDescent: bottom - pos[1], + actualBoundingBoxLeft: pos[0] - left, + fontBoundingBoxAscent: fontMeasure.bottom - fontMeasure.top, + fontBoundingBoxDescent: 0, + width: fontMeasure.right - fontMeasure.left, + }; + } +} diff --git a/web/src/core/web-mask.ts b/web/src/core/web-mask.ts new file mode 100644 index 00000000..185a1e73 --- /dev/null +++ b/web/src/core/web-mask.ts @@ -0,0 +1,104 @@ +import { TGFXModule } from '../tgfx-module'; +import { ctor, Point, Vector } from '../types'; +import { ScalerContext } from './scaler-context'; +import { Matrix } from './matrix'; + +export interface WebFont { + name: string; + style: string; + size: number; + bold: boolean; + italic: boolean; +} + +export class WebMask { + public static create(canvas: HTMLCanvasElement | OffscreenCanvas) { + return new WebMask(canvas); + } + + private static getLineCap(cap: ctor): CanvasLineCap { + switch (cap) { + case TGFXModule.TGFXLineCap.Round: + return 'round'; + case TGFXModule.TGFXLineCap.Square: + return 'square'; + default: + return 'butt'; + } + } + + private static getLineJoin(join: ctor): CanvasLineJoin { + switch (join) { + case TGFXModule.TGFXLineJoin.Round: + return 'round'; + case TGFXModule.TGFXLineJoin.Bevel: + return 'bevel'; + default: + return 'miter'; + } + } + + protected canvas: HTMLCanvasElement | OffscreenCanvas; + private context: CanvasRenderingContext2D | OffscreenCanvasRenderingContext2D; + + public constructor(canvas: HTMLCanvasElement | OffscreenCanvas) { + this.canvas = canvas; + this.context = this.canvas.getContext('2d') as CanvasRenderingContext2D | OffscreenCanvasRenderingContext2D; + } + + public updateCanvas(canvas: HTMLCanvasElement | OffscreenCanvas) { + this.canvas = canvas; + } + + public fillPath(path: Path2D, fillType: ctor) { + this.context.setTransform(1, 0, 0, 1, 0, 0); + if ( + fillType === TGFXModule.TGFXPathFillType.InverseWinding || + fillType === TGFXModule.TGFXPathFillType.InverseEvenOdd + ) { + this.context.clip(path, fillType === TGFXModule.TGFXPathFillType.InverseEvenOdd ? 'evenodd' : 'nonzero'); + this.context.fillRect(0, 0, this.canvas.width, this.canvas.height); + } else { + this.context.fill(path, fillType === TGFXModule.TGFXPathFillType.EvenOdd ? 'evenodd' : 'nonzero'); + } + } + + public fillText(webFont: WebFont, texts: Vector, positions: Vector, matrixWasmIns: any) { + const scalerContext = new ScalerContext(webFont.name, webFont.style, webFont.size, webFont.bold, webFont.italic); + const matrix = new Matrix(matrixWasmIns); + this.context.setTransform(matrix.a, matrix.b, matrix.c, matrix.d, matrix.tx, matrix.ty); + this.context.font = scalerContext.fontString(); + for (let i = 0; i < texts.size(); i++) { + const position = positions.get(i); + this.context.fillText(texts.get(i), position.x, position.y); + } + } + + public strokeText( + webFont: WebFont, + stroke: { width: number; cap: ctor; join: ctor; miterLimit: number }, + texts: Vector, + positions: Vector, + matrixWasmIns: any, + ) { + if (stroke.width < 0.5) { + return; + } + const scalerContext = new ScalerContext(webFont.name, webFont.style, webFont.size, webFont.bold, webFont.italic); + const matrix = new Matrix(matrixWasmIns); + this.context.setTransform(matrix.a, matrix.b, matrix.c, matrix.d, matrix.tx, matrix.ty); + this.context.font = scalerContext.fontString(); + this.context.lineJoin = WebMask.getLineJoin(stroke.join); + this.context.miterLimit = stroke.miterLimit; + this.context.lineCap = WebMask.getLineCap(stroke.cap); + this.context.lineWidth = stroke.width; + for (let i = 0; i < texts.size(); i++) { + const position = positions.get(i); + this.context.strokeText(texts.get(i), position.x, position.y); + } + } + + public clear() { + this.context.clearRect(0, 0, this.canvas.width, this.canvas.height); + } +} diff --git a/web/src/main.ts b/web/src/main.ts new file mode 100644 index 00000000..35da1ee5 --- /dev/null +++ b/web/src/main.ts @@ -0,0 +1,23 @@ +import { binding } from './binding'; +import * as types from './types'; +import createTGFX from './wasm/tgfx'; + +export interface ModuleOption { + /** + * Link to wasm file. + */ + locateFile?: (file: 'tgfx.wasm') => string; +} + +const TGFXInit = (moduleOption: ModuleOption = {}): Promise => + createTGFX(moduleOption) + .then((module: types.TGFX) => { + binding(module); + return module; + }) + .catch((error: any) => { + console.error(error); + throw new Error('TGFXInit fail! Please check .wasm file path valid.'); + }); + +export { binding, TGFXInit, types }; diff --git a/web/src/tgfx-module.ts b/web/src/tgfx-module.ts new file mode 100644 index 00000000..a8ff123c --- /dev/null +++ b/web/src/tgfx-module.ts @@ -0,0 +1,9 @@ +import type { TGFX } from './types'; + +export let TGFXModule: TGFX; + +export const setTGFXModule = (module: TGFX) => { + TGFXModule = module; +}; + +export const getTGFXModule = () => TGFXModule; diff --git a/web/src/tgfx.ts b/web/src/tgfx.ts new file mode 100644 index 00000000..ab6e6745 --- /dev/null +++ b/web/src/tgfx.ts @@ -0,0 +1,106 @@ +import { getCanvas2D, releaseCanvas2D, isCanvas } from './utils/canvas'; +import { writeBufferToWasm } from './utils/buffer'; +import { BitmapImage } from './core/bitmap-image'; +import { isInstanceOf } from './utils/type-utils'; + +import type { EmscriptenGL, TGFX } from './types'; +import type { wx } from './wechat/interfaces'; + +declare const wx: wx; + +export const createImage = (source: string) => { + return new Promise((resolve) => { + const image = new Image(); + image.onload = function () { + resolve(image); + }; + image.onerror = function () { + console.error('image create from bytes error.'); + resolve(null); + }; + image.src = source; + }); +}; + +export const createImageFromBytes = (bytes: ArrayBuffer) => { + const blob = new Blob([bytes], { type: 'image/*' }); + return createImage(URL.createObjectURL(blob)); +}; + +export const readImagePixels = (module: TGFX, image: CanvasImageSource, width: number, height: number) => { + if (!image) { + return null; + } + const canvas = getCanvas2D(width, height); + const ctx = canvas.getContext('2d') as CanvasRenderingContext2D | OffscreenCanvasRenderingContext2D | null; + if (!ctx) { + return null; + } + ctx.drawImage(image, 0, 0, width, height); + const { data } = ctx.getImageData(0, 0, width, height); + releaseCanvas2D(canvas); + if (data.length === 0) { + return null; + } + return writeBufferToWasm(module, data); +}; + +export const hasWebpSupport = () => { + try { + return document.createElement('canvas').toDataURL('image/webp', 0.5).indexOf('data:image/webp') === 0; + } catch (err) { + return false; + } +}; + +export const getSourceSize = (source: TexImageSource | OffscreenCanvas) => { + if (isInstanceOf(source, globalThis.HTMLVideoElement)) { + return { + width: (source as HTMLVideoElement).videoWidth, + height: (source as HTMLVideoElement).videoHeight, + }; + } + return { width: source.width, height: source.height }; +}; + +export const uploadToTexture = ( + GL: EmscriptenGL, + source: TexImageSource | OffscreenCanvas | BitmapImage, + textureID: number, + alphaOnly: boolean, +) => { + let renderSource = source instanceof BitmapImage ? source.bitmap : source; + if (!renderSource) return; + const gl = GL.currentContext?.GLctx as WebGLRenderingContext; + gl.bindTexture(gl.TEXTURE_2D, GL.textures[textureID]); + if (alphaOnly) { + gl.pixelStorei(gl.UNPACK_ALIGNMENT, 1); + gl.texSubImage2D(gl.TEXTURE_2D, 0, 0, 0, gl.ALPHA, gl.UNSIGNED_BYTE, renderSource); + } else { + gl.pixelStorei(gl.UNPACK_ALIGNMENT, 4); + gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, true); + gl.texSubImage2D(gl.TEXTURE_2D, 0, 0, 0, gl.RGBA, gl.UNSIGNED_BYTE, renderSource); + gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, false); + } +}; + +export const isAndroidMiniprogram = () => { + if (typeof wx !== 'undefined' && wx.getSystemInfoSync) { + return wx.getSystemInfoSync().platform === 'android'; + } +}; + +export const releaseNativeImage = (source: TexImageSource | OffscreenCanvas) => { + if (isInstanceOf(source, globalThis.ImageBitmap)) { + (source as ImageBitmap).close(); + } else if (isCanvas(source)) { + releaseCanvas2D(source as OffscreenCanvas | HTMLCanvasElement); + } +}; + +export const getBytesFromPath = async (module: TGFX, path: string) => { + const buffer = await fetch(path).then((res) => res.arrayBuffer()); + return writeBufferToWasm(module, buffer); +}; + +export { getCanvas2D as createCanvas2D }; \ No newline at end of file diff --git a/web/src/types.ts b/web/src/types.ts new file mode 100644 index 00000000..25b407d9 --- /dev/null +++ b/web/src/types.ts @@ -0,0 +1,162 @@ +export declare class Vector { + private constructor(); + + /** + * Get item from Vector by index. + */ + public get(index: number): T; + + /** + * Push item into Vector. + */ + public push_back(value: T): void; + + /** + * Get item number in Vector. + */ + public size(): number; + + /** + * Delete Vector instance. + */ + public delete(): void; +} + + +export const enum MatrixIndex { + a, + c, + tx, + b, + d, + ty, +} + +export interface ctor { + value: number; +} + +export interface Point { + x: number; + y: number; +} + +export declare class Rect { + /** + * smaller x-axis bounds. + */ + public left: number; + /** + * smaller y-axis bounds. + */ + public top: number; + /** + * larger x-axis bounds. + */ + public right: number; + /** + * larger y-axis bounds. + */ + public bottom: number; + + private constructor(); +} + +export interface TGFXLineCap { + /** + * No stroke extension. + */ + Butt: ctor; + /** + * Adds circle + */ + Round: ctor; + /** + * Adds square + */ + Square: ctor; +} + +export interface TGFXLineJoin { + /** + * Extends to miter limit. + */ + Miter: ctor; + /** + * Adds circle. + */ + Round: ctor; + /** + * Connects outside edges. + */ + Bevel: ctor; +} + +export interface TGFXPathFillType { + /** + * Enclosed by a non-zero sum of contour directions. + */ + Winding: ctor; + /** + * Enclosed by an odd number of contours. + */ + EvenOdd: ctor; + /** + * Enclosed by a zero sum of contour directions. + */ + InverseWinding: ctor; + /** + * Enclosed by an even number of contours. + */ + InverseEvenOdd: ctor; +} + + +export interface EmscriptenGLContext { + handle: number; + GLctx: WebGLRenderingContext; + attributes: EmscriptenGLContextAttributes; + initExtensionsDone: boolean; + version: number; +} + +export type EmscriptenGLContextAttributes = { majorVersion: number; minorVersion: number } & WebGLContextAttributes; + +export interface EmscriptenGL { + contexts: (EmscriptenGLContext | null)[]; + createContext: ( + canvas: HTMLCanvasElement | OffscreenCanvas, + webGLContextAttributes: EmscriptenGLContextAttributes, + ) => number; + currentContext?: EmscriptenGLContext; + deleteContext: (contextHandle: number) => void; + framebuffers: (WebGLFramebuffer | null)[]; + getContext: (contextHandle: number) => EmscriptenGLContext; + getNewId: (array: any[]) => number; + makeContextCurrent: (contextHandle: number) => boolean; + registerContext: (ctx: WebGLRenderingContext, webGLContextAttributes: EmscriptenGLContextAttributes) => number; + textures: (WebGLTexture | null)[]; +} + +export interface TGFX extends EmscriptenModule { + TGFXLineCap: TGFXLineCap; + TGFXLineJoin: TGFXLineJoin; + GL: EmscriptenGL; + TGFXPathFillType: TGFXPathFillType; + _Matrix: { + _MakeAll: ( + scaleX: number, + skewX: number, + transX: number, + skewY: number, + scaleY: number, + transY: number, + pers0: number, + pers1: number, + pers2: number, + ) => any; + _MakeScale: ((sx: number, sy: number) => any) & ((scale: number) => any); + _MakeTrans: (dx: number, dy: number) => any; + }; + [key: string]: any; +} \ No newline at end of file diff --git a/web/src/utils/buffer.ts b/web/src/utils/buffer.ts new file mode 100644 index 00000000..009121d0 --- /dev/null +++ b/web/src/utils/buffer.ts @@ -0,0 +1,21 @@ +export const writeBufferToWasm = (module: EmscriptenModule, data: ArrayLike | ArrayBufferLike) => { + const uint8Array = new Uint8Array(data); + const numBytes = uint8Array.byteLength; + const dataPtr = module._malloc(numBytes); + const dataOnHeap = new Uint8Array(module.HEAPU8.buffer, dataPtr, numBytes); + dataOnHeap.set(uint8Array); + return { byteOffset: dataPtr, length: numBytes, free: () => module._free(dataPtr) }; +}; + +export const readBufferFromWasm = ( + module: EmscriptenModule, + data: ArrayLike | ArrayBufferLike, + handle: (byteOffset: number, length: number) => boolean, +) => { + const uint8Array = new Uint8Array(data); + const dataPtr = module._malloc(uint8Array.byteLength); + if (!handle(dataPtr, uint8Array.byteLength)) return { data: null, free: () => module._free(dataPtr) }; + const dataOnHeap = new Uint8Array(module.HEAPU8.buffer, dataPtr, uint8Array.byteLength); + uint8Array.set(dataOnHeap); + return { data: uint8Array, free: () => module._free(dataPtr) }; +}; diff --git a/web/src/utils/canvas.ts b/web/src/utils/canvas.ts new file mode 100644 index 00000000..1d679e2a --- /dev/null +++ b/web/src/utils/canvas.ts @@ -0,0 +1,68 @@ +import { CANVAS_POOL_MAX_SIZE } from '../constant'; +import { isInstanceOf } from './type-utils'; +import { SAFARI_OR_IOS_WEBVIEW, WORKER } from './ua'; + +const canvasPool = new Array(); + +export const isOffscreenCanvas = (element: any) => isInstanceOf(element, globalThis.OffscreenCanvas); + +export const isCanvas = (element: any) => + isOffscreenCanvas(element) || isInstanceOf(element, globalThis.HTMLCanvasElement); + +export const getCanvas2D = (width: number, height: number) => { + let canvas = canvasPool.pop() || createCanvas2D(); + if (canvas !== null) { + canvas.width = width; + canvas.height = height; + } + return canvas; +}; + +export const releaseCanvas2D = (canvas: HTMLCanvasElement | OffscreenCanvas) => { + if (canvasPool.length < CANVAS_POOL_MAX_SIZE) { + canvasPool.push(canvas); + } +}; + +const createCanvas2D = () => { + /** + * Safari browser does not support OffscreenCanvas before version 16.4. + * After version 16.4, OffscreenCanvas is supported, but type checking errors still exist for WebGL interfaces on OffscreenCanvas. + * Therefore, HTMLCanvas Element is used uniformly in Safari. + */ + if (SAFARI_OR_IOS_WEBVIEW && !WORKER) { + return document.createElement('canvas'); + } + try { + const offscreenCanvas = new OffscreenCanvas(0, 0); + const context = offscreenCanvas.getContext('2d') as OffscreenCanvasRenderingContext2D; + if (typeof context.measureText === 'function') return offscreenCanvas; + return document.createElement('canvas'); + } catch (err) { + return document.createElement('canvas'); + } +}; + +export const calculateDisplaySize = (canvas: HTMLCanvasElement) => { + const styleDeclaration = globalThis.getComputedStyle(canvas, null); + const computedSize = { + width: Number(styleDeclaration.width.replace('px', '')), + height: Number(styleDeclaration.height.replace('px', '')), + }; + if (computedSize.width > 0 && computedSize.height > 0) { + return computedSize; + } else { + const styleSize = { + width: Number(canvas.style.width.replace('px', '')), + height: Number(canvas.style.height.replace('px', '')), + }; + if (styleSize.width > 0 && styleSize.height > 0) { + return styleSize; + } else { + return { + width: canvas.width, + height: canvas.height, + }; + } + } +}; diff --git a/web/src/utils/decorators.ts b/web/src/utils/decorators.ts new file mode 100644 index 00000000..26104f6e --- /dev/null +++ b/web/src/utils/decorators.ts @@ -0,0 +1,61 @@ +import { TGFXModule } from '../tgfx-module'; + +export function wasmAwaitRewind(constructor: any) { + const ignoreStaticFunctions = ['length', 'name', 'prototype', 'wasmAsyncMethods']; + let staticFunctions = Object.getOwnPropertyNames(constructor).filter( + (name) => ignoreStaticFunctions.indexOf(name) === -1, + ); + if (constructor.wasmAsyncMethods && constructor.wasmAsyncMethods.length > 0) { + staticFunctions = staticFunctions.filter((name) => constructor.wasmAsyncMethods.indexOf(name) === -1); + } + + let functions = Object.getOwnPropertyNames(constructor.prototype).filter( + (name) => name !== 'constructor' && typeof constructor.prototype[name] === 'function', + ); + if (constructor.prototype.wasmAsyncMethods && constructor.prototype.wasmAsyncMethods.length > 0) { + functions = functions.filter((name) => constructor.prototype.wasmAsyncMethods.indexOf(name) === -1); + } + + const proxyFn = (target: { [prop: string]: (...args: any[]) => any }, methodName: string) => { + const fn = target[methodName]; + target[methodName] = function (...args) { + if (TGFXModule.Asyncify.currData !== null) { + const currData = TGFXModule.Asyncify.currData; + TGFXModule.Asyncify.currData = null; + const ret = fn.call(this, ...args); + TGFXModule.Asyncify.currData = currData; + return ret; + } else { + return fn.call(this, ...args); + } + }; + }; + + staticFunctions.forEach((name) => proxyFn(constructor, name)); + functions.forEach((name) => proxyFn(constructor.prototype, name)); +} + +export function wasmAsyncMethod(target: any, propertyKey: string, descriptor: PropertyDescriptor) { + if (!target.wasmAsyncMethods) { + target.wasmAsyncMethods = []; + } + target.wasmAsyncMethods.push(propertyKey); +} + +export function destroyVerify(constructor: any) { + let functions = Object.getOwnPropertyNames(constructor.prototype).filter( + (name) => name !== 'constructor' && typeof constructor.prototype[name] === 'function', + ); + + const proxyFn = (target: { [prop: string]: any }, methodName: string) => { + const fn = target[methodName]; + target[methodName] = function (...args: any[]) { + if (this['isDestroyed']) { + console.error(`Don't call ${methodName} of the ${constructor.name} that is destroyed.`); + return; + } + return fn.call(this, ...args); + }; + }; + functions.forEach((name) => proxyFn(constructor.prototype, name)); +} diff --git a/web/src/utils/font-family.ts b/web/src/utils/font-family.ts new file mode 100644 index 00000000..c27f7aaa --- /dev/null +++ b/web/src/utils/font-family.ts @@ -0,0 +1,31 @@ +const fontNames = ['Arial', '"Courier New"', 'Georgia', '"Times New Roman"', '"Trebuchet MS"', 'Verdana']; + +export const defaultFontNames = (() => { + return ['emoji'].concat(...fontNames); +})(); + +export const getFontFamilies = (name: string, style = ''): string[] => { + if (!name) return []; + const nameChars = name.split(' '); + let names = []; + if (nameChars.length === 1) { + names.push(name); + } else { + names.push(nameChars.join('')); + names.push(nameChars.join(' ')); + } + const fontFamilies = names.reduce((pre: string[], cur: string) => { + if (!style) { + pre.push(`"${cur}"`); + } else { + pre.push(`"${cur} ${style}"`); + pre.push(`"${cur}-${style}"`); + } + return pre; + }, []); + // Fallback font when style is not found. + if (style !== '') { + fontFamilies.push(`"${name}"`); + } + return fontFamilies; +}; \ No newline at end of file diff --git a/web/src/utils/measure-text.ts b/web/src/utils/measure-text.ts new file mode 100644 index 00000000..54017fcc --- /dev/null +++ b/web/src/utils/measure-text.ts @@ -0,0 +1,52 @@ +export const measureText = (imageData: ImageData) => { + const imageDataInt32Array = new Int32Array(imageData.data.buffer); + let left = getLeftPixel(imageDataInt32Array, imageData.width, imageData.height); + let top = getTopPixel(imageDataInt32Array, imageData.width, imageData.height); + let right = getRightPixel(imageDataInt32Array, imageData.width, imageData.height); + let bottom = getBottomPixel(imageDataInt32Array, imageData.width, imageData.height); + return { left, top, right, bottom }; +}; + +export const getLeftPixel = (imageDataArray: Int32Array, width: number, height: number) => { + const verticalCount = imageDataArray.length / width; + const acrossCount = imageDataArray.length / height; + for (let i = 0; i < acrossCount; i++) { + for (let j = 0; j < verticalCount; j++) { + if (imageDataArray[i + j * width] !== 0) return i; + } + } + return acrossCount; +}; + +export const getTopPixel = (imageDataArray: Int32Array, width: number, height: number) => { + const verticalCount = imageDataArray.length / width; + const acrossCount = imageDataArray.length / height; + for (let i = 0; i < verticalCount; i++) { + for (let j = 0; j < acrossCount; j++) { + if (imageDataArray[i * width + j] !== 0) return i; + } + } + return verticalCount; +}; + +export const getRightPixel = (imageDataArray: Int32Array, width: number, height: number) => { + const verticalCount = imageDataArray.length / width; + const acrossCount = imageDataArray.length / height; + for (let i = acrossCount - 1; i > 0; i--) { + for (let j = verticalCount - 1; j > 0; j--) { + if (imageDataArray[i + width * j] !== 0) return i; + } + } + return 0; +}; + +export const getBottomPixel = (imageDataArray: Int32Array, width: number, height: number) => { + const verticalCount = imageDataArray.length / width; + const acrossCount = imageDataArray.length / height; + for (let i = verticalCount - 1; i > 0; i--) { + for (let j = acrossCount - 1; j > 0; j--) { + if (imageDataArray[i * width + j] !== 0) return i; + } + } + return 0; +}; \ No newline at end of file diff --git a/web/src/utils/type-utils.ts b/web/src/utils/type-utils.ts new file mode 100644 index 00000000..efebb4e4 --- /dev/null +++ b/web/src/utils/type-utils.ts @@ -0,0 +1 @@ +export const isInstanceOf = (value: any, type: any) => typeof type !== 'undefined' && value instanceof type; \ No newline at end of file diff --git a/web/src/utils/ua.ts b/web/src/utils/ua.ts new file mode 100644 index 00000000..5ff125ec --- /dev/null +++ b/web/src/utils/ua.ts @@ -0,0 +1,8 @@ +const nav = navigator?.userAgent || ''; +export const ANDROID = /android|adr/i.test(nav); +export const MOBILE = /(mobile)/i.test(nav) && ANDROID; +export const MACOS = !(/(mobile)/i.test(nav) || MOBILE) && /Mac OS X/i.test(nav); +export const IPHONE = /(iphone|ipad|ipod)/i.test(nav); +export const WECHAT = /MicroMessenger/i.test(nav); +export const SAFARI_OR_IOS_WEBVIEW = /^((?!chrome|android).)*safari/i.test(nav) || IPHONE; +export const WORKER = typeof (globalThis as any).importScripts === 'function'; diff --git a/web/src/wechat/interfaces.ts b/web/src/wechat/interfaces.ts new file mode 100644 index 00000000..498b3cf7 --- /dev/null +++ b/web/src/wechat/interfaces.ts @@ -0,0 +1,80 @@ +export interface wx { + env: { + USER_DATA_PATH: string; + }; + getFileSystemManager: () => FileSystemManager; + getFileInfo: (object: { filePath: string; success?: () => void; fail?: () => void; complete?: () => void }) => void; + createVideoDecoder: () => VideoDecoder; + getSystemInfoSync: () => SystemInfo; + createOffscreenCanvas: ( + object?: { type: 'webgl' | '2d' }, + width?: number, + height?: number, + compInst?: any, + ) => OffscreenCanvas; + getPerformance: () => Performance; +} + +export interface FileSystemManager { + accessSync: (path: string) => void; + mkdirSync: (path: string) => void; + writeFileSync: (path: string, data: string | ArrayBuffer, encoding: string) => void; + unlinkSync: (path: string) => void; + readdirSync: (path: string) => string[]; +} + +export interface VideoDecoder { + getFrameData: () => FrameDataOptions; + seek: ( + /** 跳转的解码位置,单位 ms */ + position: number, + ) => Promise; + start: (option: VideoDecoderStartOption) => Promise; + remove: () => Promise; + off: ( + /** 事件名 */ + eventName: string, + /** 事件触发时执行的回调函数 */ + callback: (...args: any[]) => any, + ) => void; + on: ( + /** 事件名 + * + * 参数 eventName 可选值: + * - 'start': 开始事件。返回 \{ width, height \}; + * - 'stop': 结束事件。; + * - 'seek': seek 完成事件。; + * - 'bufferchange': 缓冲区变化事件。; + * - 'ended': 解码结束事件。; */ + eventName: 'start' | 'stop' | 'seek' | 'bufferchange' | 'ended', + /** 事件触发时执行的回调函数 */ + callback: (...args: any[]) => any, + ) => void; +} +/** 视频帧数据,若取不到则返回 null。当缓冲区为空的时候可能暂停取不到数据。 */ +export interface FrameDataOptions { + /** 帧数据 */ + data: ArrayBuffer; + /** 帧数据高度 */ + height: number; + /** 帧原始 dts */ + pkDts: number; + /** 帧原始 pts */ + pkPts: number; + /** 帧数据宽度 */ + width: number; +} + +export interface VideoDecoderStartOption { + /** 需要解码的视频源文件。基础库 2.13.0 以下的版本只支持本地路径。 2.13.0 开始支持 http:// 和 https:// 协议的远程路径。 */ + source: string; + /** 解码模式。0:按 pts 解码;1:以最快速度解码 */ + mode?: number; +} + +export interface SystemInfo { + /** 客户端平台 */ + platform: 'ios' | 'android' | 'windows' | 'mac' | 'devtools'; + /** 设备像素比 */ + pixelRatio: number; +} diff --git a/web/tsconfig.json b/web/tsconfig.json new file mode 100644 index 00000000..de318580 --- /dev/null +++ b/web/tsconfig.json @@ -0,0 +1,19 @@ +{ + "compilerOptions": { + "lib": ["ES5", "ES6", "DOM", "DOM.Iterable"], + "target": "ES2017", + "allowJs": true, + "sourceMap": true, + "esModuleInterop": true, + "moduleResolution": "Node", + "experimentalDecorators": true, + "strict": true, + "outDir": "./lib", + "baseUrl": ".", + "paths": { + "@tgfx/*": ["./src/*"] + }, + "resolveJsonModule": true + }, + "include": ["src/**/*.ts"] +} \ No newline at end of file diff --git a/web/tsconfig.type.json b/web/tsconfig.type.json new file mode 100644 index 00000000..0ac7be3b --- /dev/null +++ b/web/tsconfig.type.json @@ -0,0 +1,19 @@ +{ + "compilerOptions": { + "lib": ["ES5", "ES6", "DOM", "DOM.Iterable", "WebWorker"], + "target": "ES2017", + "declaration": true, + "declarationDir": "./types", + "emitDeclarationOnly": true, + "removeComments": false, + "skipLibCheck": true, + "experimentalDecorators": true, + "moduleResolution": "Node", + "baseUrl": ".", + "paths": { + "@tgfx/*": ["./src/*"] + }, + "resolveJsonModule": true + }, + "include": ["src/tgfx.ts"] +}